From 72c09c4f07ef5cebc6c4f288e8e5f32c5d186315 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Tue, 21 Jul 2020 09:29:32 +0530 Subject: [PATCH 001/188] First commit to rewrite terrascan in go language --- .pre-commit-config.yaml | 22 - .pre-commit-hooks.yaml | 6 - .pyup.yml | 4 - AUTHORS.rst | 15 - CONTRIBUTING.rst | 114 -- HISTORY.rst | 27 - LICENSE | 35 - MANIFEST.in | 11 - Makefile | 87 - README.md | 2 + README.rst | 222 --- TO_DO.md | 60 - docs/.gitignore | 3 - docs/Makefile | 177 -- docs/authors.rst | 1 - docs/conf.py | 275 --- docs/contributing.rst | 1 - docs/history.rst | 1 - docs/index.rst | 22 - docs/installation.rst | 51 - docs/make.bat | 242 --- docs/readme.rst | 1 - docs/usage.rst | 7 - requirements.txt | 1 - requirements_dev.txt | 3 - setup.cfg | 21 - setup.py | 79 - terrascan/__init__.py | 7 - terrascan/embedded/__init__.py | 0 .../embedded/terraform_validate/__init__.py | 1 - .../terraform_validate/terraform_validate.py | 1481 ----------------- terrascan/terrascan.py | 1125 ------------- tests/__init__.py | 3 - tests/infrastructure/fail/gcp_main.tf | 11 - tests/infrastructure/fail/main.tf | 239 --- tests/infrastructure/fail/s3_related.tf | 37 - tests/infrastructure/success/main.tf | 275 --- tests/test_terrascan.py | 37 - 38 files changed, 2 insertions(+), 4704 deletions(-) delete mode 100644 .pre-commit-config.yaml delete mode 100644 .pre-commit-hooks.yaml delete mode 100644 .pyup.yml delete mode 100644 AUTHORS.rst delete mode 100644 CONTRIBUTING.rst delete mode 100644 HISTORY.rst delete mode 100644 LICENSE delete mode 100644 MANIFEST.in delete mode 100644 Makefile create mode 100644 README.md delete mode 100644 README.rst delete mode 100644 TO_DO.md delete mode 100644 docs/.gitignore delete mode 100644 docs/Makefile delete mode 100644 docs/authors.rst delete mode 100755 docs/conf.py delete mode 100644 docs/contributing.rst delete mode 100644 docs/history.rst delete mode 100644 docs/index.rst delete mode 100644 docs/installation.rst delete mode 100644 docs/make.bat delete mode 100644 docs/readme.rst delete mode 100644 docs/usage.rst delete mode 100644 requirements.txt delete mode 100644 requirements_dev.txt delete mode 100644 setup.cfg delete mode 100644 setup.py delete mode 100644 terrascan/__init__.py delete mode 100644 terrascan/embedded/__init__.py delete mode 100644 terrascan/embedded/terraform_validate/__init__.py delete mode 100644 terrascan/embedded/terraform_validate/terraform_validate.py delete mode 100644 terrascan/terrascan.py delete mode 100644 tests/__init__.py delete mode 100644 tests/infrastructure/fail/gcp_main.tf delete mode 100644 tests/infrastructure/fail/main.tf delete mode 100644 tests/infrastructure/fail/s3_related.tf delete mode 100644 tests/infrastructure/success/main.tf delete mode 100644 tests/test_terrascan.py diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml deleted file mode 100644 index 43214e860..000000000 --- a/.pre-commit-config.yaml +++ /dev/null @@ -1,22 +0,0 @@ - repos: - # The below repo is configured for local testing purposes - # to test you can run the following command: - # `pre-commit run -v --files tests/infrastructure/success/main.tf` - - repo: local - hooks: - - id: terrascan - name: terrascan - entry: terrascan - language: python - types: [terraform] - pass_filenames: false - args: [-l=tests/infrastructure/success/] - verbose: true -# Here's an example on how to setup terrascan as a pre-commit -#- repo: /~https://github.com/cesar-rodriguez/terrascan -# rev: v0.1.2 -# hooks: -# - id: terrascan -# pass_filenames: false -# args: [-l=] ##NOTE: this needs to be changed to reference the location of your terraform files -# verbose: true diff --git a/.pre-commit-hooks.yaml b/.pre-commit-hooks.yaml deleted file mode 100644 index d130413ab..000000000 --- a/.pre-commit-hooks.yaml +++ /dev/null @@ -1,6 +0,0 @@ -- id: terrascan - name: terrascan - description: Executes terrascan on the current directory - entry: terrascan - language: python - types: [terraform] diff --git a/.pyup.yml b/.pyup.yml deleted file mode 100644 index c82beb1a3..000000000 --- a/.pyup.yml +++ /dev/null @@ -1,4 +0,0 @@ -# autogenerated pyup.io config file -# see https://pyup.io/docs/configuration/ for all available options - -update: all diff --git a/AUTHORS.rst b/AUTHORS.rst deleted file mode 100644 index 413034b21..000000000 --- a/AUTHORS.rst +++ /dev/null @@ -1,15 +0,0 @@ -======= -Credits -======= - -Development Lead ----------------- -* `Cesar Rodriguez `_ - cesar@cloudsecuritymusings.com - -Contributors ------------- - -* `sbalbach `_ -* `Zach Zeid `_ -* `Logan Rakai `_ -* `Guy Kisel `_ diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst deleted file mode 100644 index a52ae8233..000000000 --- a/CONTRIBUTING.rst +++ /dev/null @@ -1,114 +0,0 @@ -.. highlight:: shell - -============ -Contributing -============ - -Contributions are welcome, and they are greatly appreciated! Every -little bit helps, and credit will always be given. - -You can contribute in many ways: - -Types of Contributions ----------------------- - -Report Bugs -~~~~~~~~~~~ - -Report bugs at /~https://github.com/cesar-rodriguez/terrascan/issues. - -If you are reporting a bug, please include: - -* Your operating system name and version. -* Any details about your local setup that might be helpful in troubleshooting. -* Detailed steps to reproduce the bug. - -Fix Bugs -~~~~~~~~ - -Look through the GitHub issues for bugs. Anything tagged with "bug" -and "help wanted" is open to whoever wants to implement it. - -Implement Features -~~~~~~~~~~~~~~~~~~ - -Look through the GitHub issues for features. Anything tagged with "enhancement" -and "help wanted" is open to whoever wants to implement it. - -Write Documentation -~~~~~~~~~~~~~~~~~~~ - -terrascan could always use more documentation, whether as part of the -official terrascan docs, in docstrings, or even on the web in blog posts, -articles, and such. - -Submit Feedback -~~~~~~~~~~~~~~~ - -The best way to send feedback is to file an issue at /~https://github.com/cesar-rodriguez/terrascan/issues. - -If you are proposing a feature: - -* Explain in detail how it would work. -* Keep the scope as narrow as possible, to make it easier to implement. -* Remember that this is a volunteer-driven project, and that contributions - are welcome :) - -Get Started! ------------- - -Ready to contribute? Here's how to set up `terrascan` for local development. - -1. Fork the `terrascan` repo on GitHub. -2. Clone your fork locally:: - - $ git clone git@github.com:your_name_here/terrascan.git - -3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:: - - $ mkvirtualenv terrascan - $ cd terrascan/ - $ python setup.py develop - -4. Create a branch for local development:: - - $ git checkout -b name-of-your-bugfix-or-feature - - Now you can make your changes locally. - -5. When you're done making changes, check that your changes pass flake8 and the tests, including testing other Python versions with tox:: - - $ flake8 terrascan tests - $ python setup.py test or py.test - $ tox - - To get flake8 and tox, just pip install them into your virtualenv. - -6. Commit your changes and push your branch to GitHub:: - - $ git add . - $ git commit -m "Your detailed description of your changes." - $ git push origin name-of-your-bugfix-or-feature - -7. Submit a pull request through the GitHub website. - -Pull Request Guidelines ------------------------ - -Before you submit a pull request, check that it meets these guidelines: - -1. The pull request should include tests. -2. If the pull request adds functionality, the docs should be updated. Put - your new functionality into a function with a docstring, and add the - feature to the list in README.rst. -3. The pull request should work for Python 2.6, 2.7, 3.3, 3.4 and 3.5, and for PyPy. Check - https://travis-ci.org/cesar-rodriguez/terrascan/pull_requests - and make sure that the tests pass for all supported Python versions. - -Tips ----- - -To run a subset of tests:: - - - $ python -m unittest tests.test_terrascan diff --git a/HISTORY.rst b/HISTORY.rst deleted file mode 100644 index 2348c180f..000000000 --- a/HISTORY.rst +++ /dev/null @@ -1,27 +0,0 @@ -======= -History -======= - -0.2.1 ------------ -* Bugfix: The pyhcl hard dependency in the requirements.txt file caused issues if a higher version was installed. This was fixed by using the ">=" operator. - -0.2.0 (2020-01-11) -------------------- -* Adds support for terraform 0.12+ - -0.1.2 (2020-01-05) ------------------- - -* Adds ability to setup terrascan as a pre-commit hook - -0.1.1 (2020-01-01) ------------------- - -* Updates dependent packages to latest versions -* Migrates CI to GitHub Actions from travis - -0.1.0 (2017-11-26) ------------------- - -* First release on PyPI. diff --git a/LICENSE b/LICENSE deleted file mode 100644 index e43991ff8..000000000 --- a/LICENSE +++ /dev/null @@ -1,35 +0,0 @@ - -GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Security and best practices test for terraform - Copyright (C) 2020 Accurics, Inc. - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. - - diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 965b2dda7..000000000 --- a/MANIFEST.in +++ /dev/null @@ -1,11 +0,0 @@ -include AUTHORS.rst -include CONTRIBUTING.rst -include HISTORY.rst -include LICENSE -include README.rst - -recursive-include tests * -recursive-exclude * __pycache__ -recursive-exclude * *.py[co] - -recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif diff --git a/Makefile b/Makefile deleted file mode 100644 index 2a0ff6365..000000000 --- a/Makefile +++ /dev/null @@ -1,87 +0,0 @@ -.PHONY: clean clean-test clean-pyc clean-build docs help -.DEFAULT_GOAL := help -define BROWSER_PYSCRIPT -import os, webbrowser, sys -try: - from urllib import pathname2url -except: - from urllib.request import pathname2url - -webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1]))) -endef -export BROWSER_PYSCRIPT - -define PRINT_HELP_PYSCRIPT -import re, sys - -for line in sys.stdin: - match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) - if match: - target, help = match.groups() - print("%-20s %s" % (target, help)) -endef -export PRINT_HELP_PYSCRIPT -BROWSER := python -c "$$BROWSER_PYSCRIPT" - -help: - @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) - -clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts - - -clean-build: ## remove build artifacts - rm -fr build/ - rm -fr dist/ - rm -fr .eggs/ - find . -name '*.egg-info' -exec rm -fr {} + - find . -name '*.egg' -exec rm -f {} + - -clean-pyc: ## remove Python file artifacts - find . -name '*.pyc' -exec rm -f {} + - find . -name '*.pyo' -exec rm -f {} + - find . -name '*~' -exec rm -f {} + - find . -name '__pycache__' -exec rm -fr {} + - -clean-test: ## remove test and coverage artifacts - rm -fr .tox/ - rm -f .coverage - rm -fr htmlcov/ - -lint: ## check style with flake8 - flake8 terrascan tests - -test: ## run tests quickly with the default Python - - python setup.py test - -test-all: ## run tests on every Python version with tox - tox - -coverage: ## check code coverage quickly with the default Python - coverage run --source terrascan setup.py test - coverage report -m - coverage html - $(BROWSER) htmlcov/index.html - -docs: ## generate Sphinx HTML documentation, including API docs - rm -f docs/terrascan.rst - rm -f docs/modules.rst - sphinx-apidoc -o docs/ terrascan - $(MAKE) -C docs clean - $(MAKE) -C docs html - $(BROWSER) docs/_build/html/index.html - -servedocs: docs ## compile the docs watching for changes - watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D . - -release: clean ## package and upload a release - python setup.py sdist upload - python setup.py bdist_wheel upload - -dist: clean ## builds source and wheel package - python setup.py sdist - python setup.py bdist_wheel - ls -l dist - -install: clean ## install the package to the active Python's site-packages - python setup.py install diff --git a/README.md b/README.md new file mode 100644 index 000000000..5a870eee0 --- /dev/null +++ b/README.md @@ -0,0 +1,2 @@ +# Terrascan + diff --git a/README.rst b/README.rst deleted file mode 100644 index b08620131..000000000 --- a/README.rst +++ /dev/null @@ -1,222 +0,0 @@ -.. image:: https://img.shields.io/pypi/v/terrascan.svg - :target: https://pypi.python.org/pypi/terrascan - :alt: pypi - -.. image:: https://img.shields.io/github/workflow/status/accurics/terrascan/Python%20package - :target: /~https://github.com/accurics/terrascan/actions - :alt: build - -.. image:: https://readthedocs.org/projects/terrascan/badge/?version=latest - :target: https://terrascan.readthedocs.io/en/latest/?badge=latest - :alt: docs - -.. image:: https://pyup.io/repos/github/accurics/terrascan/shield.svg - :target: https://pyup.io/repos/github/accurics/terrascan/ - :alt: updates - -.. image:: https://img.shields.io/pypi/pyversions/terrascan.svg - :target: https://pypi.python.org/pypi/terrascan - :alt: python versions - -.. image:: https://img.shields.io/badge/dynamic/json.svg?label=downloads&url=https%3A%2F%2Fpypistats.org%2Fapi%2Fpackages%2Fterrascan%2Frecent&query=data.last_month&colorB=brightgreen&suffix=%2FMonth - :target: https://pypistats.org/packages/terrascan - :alt: downloads - -========= -Terrascan -========= - -A collection of security and best practice tests for static code analysis of terraform_ code. - -.. _terraform: https://www.terraform.io - -* GitHub Repo: /~https://github.com/accurics/terrascan -* Documentation: https://terrascan.readthedocs.io. -* Tutorial: https://www.cloudsecuritymusings.com/blog/using-terrascan-for-static-code-analysis-of-your-infrastructure-code-part-1 -* Free software: GNU General Public License v3 - ---------------- -About Accurics ---------------- -Accurics enables organizations to protect their cloud native infrastructure in hybrid and multi-cloud environments. It seamlessly scans infrastructure as code for misconfigurations, monitors provisioned cloud infrastructure for configuration changes that introduce posture drift, and enables reverting to a secure posture. - -Learn more at https://www.accurics.com - --------- -Features --------- -Terrascan will perform tests on your terraform templates to ensure: - -- **Encryption** - - Server Side Encryption (SSE) enabled - - Use of AWS Key Management Service (KMS) with Customer Managed Keys (CMK) - - Use of SSL/TLS and proper configuration -- **Security Groups** - - Provisioning SGs in EC2-classic - - Ingress open to 0.0.0.0/0 -- **Public Exposure** - - Services with public exposure other than Gateways (NAT, VGW, IGW) -- **Logging & Monitoring** - - Access logs enabled to resources that support it - ----------- -Installing ----------- -Terrascan uses Python and depends on pyhcl and terraform-validate (a fork has -been included as part of terrascan that supports terraform 0.12+). -After installing python in your system you can follow these steps: - - $ pip install terrascan - ------------------ -Running the tests ------------------ -To run, execute terrascan.py as follows replacing with the location of your terraform templates: - - $ terrascan --location tests/infrastructure/success --vars tests/infrastructure/vars.json - -- **Returns 0 if no failures or errors; 4 otherwise** - - helps with use in a delivery pipeline - -- **Parameters**:: - - -h, --help show this help message and exit - -l LOCATION, --location LOCATION - location of terraform templates to scan - -v [VARS [VARS ...]], --vars [VARS [VARS ...]] - variables json or .tf file name - -o OVERRIDES, --overrides OVERRIDES - override rules file name - -r RESULTS, --results RESULTS - output results file name - -d [DISPLAYRULES], --displayRules [DISPLAYRULES] - display the rules used - -w [WARRANTY], --warranty [WARRANTY] - displays the warranty - -g [GPL], --gpl [GPL] - displays license information - -c CONFIG, --config CONFIG - logging configuration: error, warning, info, debug, or - none; default is error -- **Override file example** - -1. The first attribute is the name of the rule to be overridden. -2. The second attribute is the name of the resource to be overridden. -3. The third atttribute is the RR or RAR number that waives the failure. -This is required for high severity rules; can be an empty string for medium and low severity rules. - -.. code:: json - - { - "overrides": [ - [ - "aws_s3_bucket_server_side_encryption_configuration", - "noEncryptionWaived", - "RR-1234" - ], - [ - "aws_rds_cluster_encryption", - "rds_cluster_bad", - "RAR-98765" - ] - ] - } - -- **Example output**:: - - Logging level set to error. - ................ - ---------------------------------------------------------------------- - Ran 16 tests in 0.015s - - OK - - Processed 19 files in C:\DEV\terraforms\backends\10-network-analytics - - - Results (took 1.08 seconds): - - Failures: (2) - [high] [aws_dynamodb_table.encryption.server_side_encryption.enabled] should be 'True'. Is: 'False' in module 10-network-analytics, file C:\DEV\terraforms\backends\10-network-analytics\main.tf - [high] [aws_s3_bucket.noEncryption] should have property: 'server_side_encryption_configuration' in module 10-network-analytics, file C:\DEV\terraforms\backends\10-network-analytics\main.tf - - Errors: (0) - --------------------- -Using as pre-commit --------------------- -Terrascan can be used on pre-commit hooks to prevent accidental introduction of security weaknesses into your repository. -This requires having pre-commit_ installed. An example configuration is provided in the comments of the here_ file in this repository. - -.. _pre-commit: https://pre-commit.com/ -.. _here: .pre-commit-config.yaml - --------------- -Feature Status --------------- -Legend: - - `:heavy_minus_sign:` = test needs to be implemented - - `:heavy_check_mark:` = test implemented - - **blank** - N/A - -======================================== ====================== ====================== ====================== ====================== - Terraform resources Encryption Security Groups Public exposure Logging & Monitoring -======================================== ====================== ====================== ====================== ====================== - aws_alb `:heavy_check_mark:` `:heavy_check_mark:` - aws_alb_listener `:heavy_check_mark:` - aws_ami `:heavy_check_mark:` - aws_ami_copy `:heavy_check_mark:` - aws_api_gateway_domain_name `:heavy_check_mark:` - aws_cloudfront_distribution `:heavy_check_mark:` `:heavy_check_mark:` - aws_cloudtrail `:heavy_check_mark:` `:heavy_check_mark:` - aws_codebuild_project `:heavy_check_mark:` - aws_codepipeline `:heavy_check_mark:` - aws_db_instance `:heavy_check_mark:` `:heavy_check_mark:` - aws_db_security_group `:heavy_check_mark:` - aws_dms_endpoint `:heavy_check_mark:` - aws_dms_replication_instance `:heavy_check_mark:` `:heavy_check_mark:` - aws_dynamodb_table `:heavy_check_mark:` - aws_ebs_volume `:heavy_check_mark:` - aws_efs_file_system `:heavy_check_mark:` - aws_elasticache_security_group `:heavy_check_mark:` - aws_efs_file_system `:heavy_check_mark:` - aws_elasticache_security_group `:heavy_check_mark:` - aws_elastictranscoder_pipeline `:heavy_check_mark:` - aws_elb `:heavy_check_mark:` `:heavy_check_mark:` `:heavy_check_mark:` - aws_emr_cluster `:heavy_check_mark:` - aws_instance `:heavy_check_mark:` `:heavy_check_mark:` - aws_kinesis_firehose_delivery_stream `:heavy_check_mark:` `:heavy_check_mark:` - aws_lambda_function `:heavy_check_mark:` - aws_launch_configuration `:heavy_check_mark:` - aws_lb_ssl_negotiation_policy `:heavy_minus_sign:` - aws_load_balancer_backend_server_policy `:heavy_minus_sign:` - aws_load_balancer_listener_policy `:heavy_minus_sign:` - aws_load_balancer_policy `:heavy_minus_sign:` - aws_opsworks_application `:heavy_check_mark:` `:heavy_minus_sign:` - aws_opsworks_custom_layer `:heavy_minus_sign:` - aws_opsworks_ganglia_layer `:heavy_minus_sign:` - aws_opsworks_haproxy_layer `:heavy_minus_sign:` - aws_opsworks_instance `:heavy_minus_sign:` - aws_opsworks_java_app_layer `:heavy_minus_sign:` - aws_opsworks_memcached_layer `:heavy_minus_sign:` - aws_opsworks_mysql_layer `:heavy_minus_sign:` - aws_opsworks_nodejs_app_layer `:heavy_minus_sign:` - aws_opsworks_php_app_layer `:heavy_minus_sign:` - aws_opsworks_rails_app_layer `:heavy_minus_sign:` - aws_opsworks_static_web_layer `:heavy_minus_sign:` - aws_rds_cluster `:heavy_check_mark:` - aws_rds_cluster_instance `:heavy_check_mark:` - aws_redshift_cluster `:heavy_check_mark:` `:heavy_check_mark:` `:heavy_check_mark:` - aws_redshift_parameter_group `:heavy_minus_sign:` `:heavy_minus_sign:` - aws_redshift_security_group `:heavy_check_mark:` - aws_s3_bucket `:heavy_check_mark:` `:heavy_check_mark:` `:heavy_check_mark:` - aws_s3_bucket_object `:heavy_check_mark:` - aws_security_group `:heavy_check_mark:` `:heavy_check_mark:` - aws_security_group_rule `:heavy_check_mark:` `:heavy_check_mark:` - aws_ses_receipt_rule `:heavy_minus_sign:` - aws_sqs_queue `:heavy_check_mark:` - aws_ssm_maintenance_window_task `:heavy_check_mark:` - aws_ssm_parameter `:heavy_check_mark:` -======================================== ====================== ====================== ====================== ====================== - - diff --git a/TO_DO.md b/TO_DO.md deleted file mode 100644 index 4932a5919..000000000 --- a/TO_DO.md +++ /dev/null @@ -1,60 +0,0 @@ -To Do -====== -The following tests are planned to be implemented - -Identity and access management ------------------------------- -Checks for overly permissive permissions and bad practices. -Verifies that: -- For each of these types of policies that there are no NotActions: - - IAM policy - - IAM role trust relationship - - S3 bucket policy - - SNS topic policy - - SQS queue policy - - KMS policy -- For each of these types of policies that there are no NotPrincipals: - - IAM role trust relationship - - S3 bucket policy - - SNS topic policy - - SQS queue policy - - KMS policy -- For each of these types of policies that there are no wildcard actions: - - IAM policy - - IAM role trust relationship - - S3 bucket policy - - SQS queue policy - - KMS policy -- For each of these types of policies that there are no wildcard principals: - - Lambda permission - - S3 bucket policy - - SNS topic policy - - SQS queue policy - - KMS policy -- No policies attached to IAM users -- No inline policies on: - - IAM users - - IAM roles -- S3 bucket no public-read ACL -- S3 bucket no public-read-write ACL -- S3 bucket no authenticated-read ACL -- The AWS administrator managed policy shouldn't be attached to any resources -- AWS Managed policies can't be scanned -- No creation of IAM API keys - -Governance best practices -------------------------- -Checks against general governance best practices. -Verifies that: -- A specified number of tags are applied to all resources when supported. -- Autoscaling lifecycle actions are enabled to reduce uneccessary cost on unused resources -- There are no EC2 instance types provisioned for which AWS doesn't allow penetration testing: m1.small, t1.micro, or t2.nano -- There are no RDS instance types provisiones for which AWS doesn't allow penetration testing: small, micro -- Only approved AMIs are provisioned -- No S3 bucket names larger than 63 characters -- There are no hardcoded credentials in terraform templates - -Logging & Monitoring -------------------------- -- Check for enhanced monitoring on the resources that support it - diff --git a/docs/.gitignore b/docs/.gitignore deleted file mode 100644 index 21193c6e1..000000000 --- a/docs/.gitignore +++ /dev/null @@ -1,3 +0,0 @@ -/terrascan.rst -/terrascan.*.rst -/modules.rst diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index 767cbe9e0..000000000 --- a/docs/Makefile +++ /dev/null @@ -1,177 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = _build - -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/terrascan.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/terrascan.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/terrascan" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/terrascan" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -latexpdfja: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." - -xml: - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." - -pseudoxml: - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/docs/authors.rst b/docs/authors.rst deleted file mode 100644 index e122f914a..000000000 --- a/docs/authors.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ../AUTHORS.rst diff --git a/docs/conf.py b/docs/conf.py deleted file mode 100755 index 1a535b7ae..000000000 --- a/docs/conf.py +++ /dev/null @@ -1,275 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# -# terrascan documentation build configuration file, created by -# sphinx-quickstart on Tue Jul 9 22:26:36 2013. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os - -# If extensions (or modules to document with autodoc) are in another -# directory, add these directories to sys.path here. If the directory is -# relative to the documentation root, use os.path.abspath to make it -# absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) - -# Get the project root dir, which is the parent dir of this -cwd = os.getcwd() -project_root = os.path.dirname(cwd) - -# Insert the project root dir as the first element in the PYTHONPATH. -# This lets us ensure that the source package is imported, and that its -# version is used. -sys.path.insert(0, project_root) - -import terrascan - -# -- General configuration --------------------------------------------- - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode'] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'terrascan' -copyright = u"2017, Cesar Rodriguez" - -# The version info for the project you're documenting, acts as replacement -# for |version| and |release|, also used in various other places throughout -# the built documents. -# -# The short X.Y version. -version = terrascan.__version__ -# The full version, including alpha/beta/rc tags. -release = terrascan.__version__ - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to -# some non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_build'] - -# The reST default role (used for this markup: `text`) to use for all -# documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - -# If true, keep warnings as "system message" paragraphs in the built -# documents. -#keep_warnings = False - - -# -- Options for HTML output ------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'default' - -# Theme options are theme-specific and customize the look and feel of a -# theme further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as -# html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the -# top of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon -# of the docs. This file should be a Windows icon file (.ico) being -# 16x16 or 32x32 pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) -# here, relative to this directory. They are copied after the builtin -# static files, so a file named "default.css" will overwrite the builtin -# "default.css". -html_static_path = ['_static'] - -# If not '', a 'Last updated on:' timestamp is inserted at every page -# bottom, using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -#html_sidebars = {} - -# Additional templates that should be rendered to pages, maps page names -# to template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. -# Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. -# Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages -# will contain a tag referring to it. The value of this option -# must be the base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'terrascandoc' - - -# -- Options for LaTeX output ------------------------------------------ - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - #'papersize': 'letterpaper', - - # The font size ('10pt', '11pt' or '12pt'). - #'pointsize': '10pt', - - # Additional stuff for the LaTeX preamble. - #'preamble': '', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass -# [howto/manual]). -latex_documents = [ - ('index', 'terrascan.tex', - u'terrascan Documentation', - u'Cesar Rodriguez', 'manual'), -] - -# The name of an image file (relative to this directory) to place at -# the top of the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings -# are parts, not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output ------------------------------------ - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'terrascan', - u'terrascan Documentation', - [u'Cesar Rodriguez'], 1) -] - -# If true, show URL addresses after external links. -#man_show_urls = False - - -# -- Options for Texinfo output ---------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ('index', 'terrascan', - u'terrascan Documentation', - u'Cesar Rodriguez', - 'terrascan', - 'One line description of project.', - 'Miscellaneous'), -] - -# Documents to append as an appendix to all manuals. -#texinfo_appendices = [] - -# If false, no module index is generated. -#texinfo_domain_indices = True - -# How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' - -# If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False diff --git a/docs/contributing.rst b/docs/contributing.rst deleted file mode 100644 index e582053ea..000000000 --- a/docs/contributing.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ../CONTRIBUTING.rst diff --git a/docs/history.rst b/docs/history.rst deleted file mode 100644 index 250649964..000000000 --- a/docs/history.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ../HISTORY.rst diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index 319e7250d..000000000 --- a/docs/index.rst +++ /dev/null @@ -1,22 +0,0 @@ -Welcome to terrascan's documentation! -====================================== - -Contents: - -.. toctree:: - :maxdepth: 2 - - readme - installation - usage - modules - contributing - authors - history - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/docs/installation.rst b/docs/installation.rst deleted file mode 100644 index d1494a8a5..000000000 --- a/docs/installation.rst +++ /dev/null @@ -1,51 +0,0 @@ -.. highlight:: shell - -============ -Installation -============ - - -Stable release --------------- - -To install terrascan, run this command in your terminal: - -.. code-block:: console - - $ pip install terrascan - -This is the preferred method to install terrascan, as it will always install the most recent stable release. - -If you don't have `pip`_ installed, this `Python installation guide`_ can guide -you through the process. - -.. _pip: https://pip.pypa.io -.. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/ - - -From sources ------------- - -The sources for terrascan can be downloaded from the `Github repo`_. - -You can either clone the public repository: - -.. code-block:: console - - $ git clone git://github.com/cesar-rodriguez/terrascan - -Or download the `tarball`_: - -.. code-block:: console - - $ curl -OL /~https://github.com/cesar-rodriguez/terrascan/tarball/master - -Once you have a copy of the source, you can install it with: - -.. code-block:: console - - $ python setup.py install - - -.. _Github repo: /~https://github.com/cesar-rodriguez/terrascan -.. _tarball: /~https://github.com/cesar-rodriguez/terrascan/tarball/master diff --git a/docs/make.bat b/docs/make.bat deleted file mode 100644 index 43059fa69..000000000 --- a/docs/make.bat +++ /dev/null @@ -1,242 +0,0 @@ -@ECHO OFF - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set BUILDDIR=_build -set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . -set I18NSPHINXOPTS=%SPHINXOPTS% . -if NOT "%PAPER%" == "" ( - set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% - set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% -) - -if "%1" == "" goto help - -if "%1" == "help" ( - :help - echo.Please use `make ^` where ^ is one of - echo. html to make standalone HTML files - echo. dirhtml to make HTML files named index.html in directories - echo. singlehtml to make a single large HTML file - echo. pickle to make pickle files - echo. json to make JSON files - echo. htmlhelp to make HTML files and a HTML help project - echo. qthelp to make HTML files and a qthelp project - echo. devhelp to make HTML files and a Devhelp project - echo. epub to make an epub - echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter - echo. text to make text files - echo. man to make manual pages - echo. texinfo to make Texinfo files - echo. gettext to make PO message catalogs - echo. changes to make an overview over all changed/added/deprecated items - echo. xml to make Docutils-native XML files - echo. pseudoxml to make pseudoxml-XML files for display purposes - echo. linkcheck to check all external links for integrity - echo. doctest to run all doctests embedded in the documentation if enabled - goto end -) - -if "%1" == "clean" ( - for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i - del /q /s %BUILDDIR%\* - goto end -) - - -%SPHINXBUILD% 2> nul -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -if "%1" == "html" ( - %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/html. - goto end -) - -if "%1" == "dirhtml" ( - %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. - goto end -) - -if "%1" == "singlehtml" ( - %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. - goto end -) - -if "%1" == "pickle" ( - %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the pickle files. - goto end -) - -if "%1" == "json" ( - %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can process the JSON files. - goto end -) - -if "%1" == "htmlhelp" ( - %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run HTML Help Workshop with the ^ -.hhp project file in %BUILDDIR%/htmlhelp. - goto end -) - -if "%1" == "qthelp" ( - %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; now you can run "qcollectiongenerator" with the ^ -.qhcp project file in %BUILDDIR%/qthelp, like this: - echo.^> qcollectiongenerator %BUILDDIR%\qthelp\terrascan.qhcp - echo.To view the help file: - echo.^> assistant -collectionFile %BUILDDIR%\qthelp\terrascan.ghc - goto end -) - -if "%1" == "devhelp" ( - %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. - goto end -) - -if "%1" == "epub" ( - %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The epub file is in %BUILDDIR%/epub. - goto end -) - -if "%1" == "latex" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - if errorlevel 1 exit /b 1 - echo. - echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdf" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf - cd %BUILDDIR%/.. - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "latexpdfja" ( - %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex - cd %BUILDDIR%/latex - make all-pdf-ja - cd %BUILDDIR%/.. - echo. - echo.Build finished; the PDF files are in %BUILDDIR%/latex. - goto end -) - -if "%1" == "text" ( - %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The text files are in %BUILDDIR%/text. - goto end -) - -if "%1" == "man" ( - %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The manual pages are in %BUILDDIR%/man. - goto end -) - -if "%1" == "texinfo" ( - %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. - goto end -) - -if "%1" == "gettext" ( - %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The message catalogs are in %BUILDDIR%/locale. - goto end -) - -if "%1" == "changes" ( - %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes - if errorlevel 1 exit /b 1 - echo. - echo.The overview file is in %BUILDDIR%/changes. - goto end -) - -if "%1" == "linkcheck" ( - %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck - if errorlevel 1 exit /b 1 - echo. - echo.Link check complete; look for any errors in the above output ^ -or in %BUILDDIR%/linkcheck/output.txt. - goto end -) - -if "%1" == "doctest" ( - %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest - if errorlevel 1 exit /b 1 - echo. - echo.Testing of doctests in the sources finished, look at the ^ -results in %BUILDDIR%/doctest/output.txt. - goto end -) - -if "%1" == "xml" ( - %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The XML files are in %BUILDDIR%/xml. - goto end -) - -if "%1" == "pseudoxml" ( - %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml - if errorlevel 1 exit /b 1 - echo. - echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. - goto end -) - -:end diff --git a/docs/readme.rst b/docs/readme.rst deleted file mode 100644 index 72a335581..000000000 --- a/docs/readme.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ../README.rst diff --git a/docs/usage.rst b/docs/usage.rst deleted file mode 100644 index 11cf52874..000000000 --- a/docs/usage.rst +++ /dev/null @@ -1,7 +0,0 @@ -===== -Usage -===== - -To use terrascan in a project:: - - import terrascan diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index 0bd82b44b..000000000 --- a/requirements.txt +++ /dev/null @@ -1 +0,0 @@ -pyhcl>=0.4.4 diff --git a/requirements_dev.txt b/requirements_dev.txt deleted file mode 100644 index 23841233b..000000000 --- a/requirements_dev.txt +++ /dev/null @@ -1,3 +0,0 @@ -flake8==3.8.3 -Sphinx==3.1.2 -pytest==5.4.3 diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 6e8c0e66c..000000000 --- a/setup.cfg +++ /dev/null @@ -1,21 +0,0 @@ -[bumpversion] -current_version = 0.2.1 -commit = True -tag = True - -[bumpversion:file:setup.py] -search = version='{current_version}' -replace = version='{new_version}' - -[bumpversion:file:terrascan/__init__.py] -search = __version__ = '{current_version}' -replace = __version__ = '{new_version}' - -[bdist_wheel] -universal = 1 - -[flake8] -exclude = docs - -[aliases] -# Define setup.py command aliases here diff --git a/setup.py b/setup.py deleted file mode 100644 index 8c8d73fef..000000000 --- a/setup.py +++ /dev/null @@ -1,79 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- - -"""The setup script.""" - -from setuptools import setup, find_packages - -readme = '''========= -Terrascan -========= - -.. image:: https://img.shields.io/pypi/v/terrascan.svg - :target: https://pypi.python.org/pypi/terrascan - :alt: pypi - -.. image:: https://img.shields.io/travis/cesar-rodriguez/terrascan.svg - :target: https://travis-ci.org/cesar-rodriguez/terrascan - :alt: build - -.. image:: https://readthedocs.org/projects/terrascan/badge/?version=latest - :target: https://terrascan.readthedocs.io/en/latest/?badge=latest - :alt: Documentation Status - -.. image:: https://pyup.io/repos/github/cesar-rodriguez/terrascan/shield.svg - :target: https://pyup.io/repos/github/cesar-rodriguez/terrascan/ - :alt: Updates - - -A collection of security and best practice tests for static code analysis of terraform_ templates using terraform_validate_. - -.. _terraform: https://www.terraform.io -.. _terraform_validate: /~https://github.com/elmundio87/terraform_validate - -* GitHub Repo: /~https://github.com/cesar-rodriguez/terrascan -* Documentation: https://terrascan.readthedocs.io. -* Free software: GNU General Public License v3 -''' - -with open('HISTORY.rst') as history_file: - history = history_file.read() - -requirements = [ - 'pyhcl>=0.4.4', -] - -setup( - name='terrascan', - version='0.2.1', - description="Best practices tests for terraform", - long_description=readme, - author="Cesar Rodriguez", - author_email='therasec@gmail.com', - url='/~https://github.com/cesar-rodriguez/terrascan', - download_url='/~https://github.com/cesar-rodriguez/terrascan' + - '/archive/v0.2.1.tar.gz', - packages=find_packages(where='.'), - entry_points={ - 'console_scripts': [ - 'terrascan = terrascan.terrascan:main' - ] - }, - include_package_data=True, - license="GNU General Public License v3", - zip_safe=False, - keywords='terrascan', - classifiers=[ - 'Development Status :: 2 - Pre-Alpha', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: GNU General Public License v3 (GPLv3)', - 'Natural Language :: English', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - ], - test_suite='tests', - tests_require=requirements, - setup_requires=requirements, - install_requires=requirements, -) diff --git a/terrascan/__init__.py b/terrascan/__init__.py deleted file mode 100644 index f09abfe60..000000000 --- a/terrascan/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- - -"""Top-level package for terrascan.""" - -__author__ = """Cesar Rodriguez""" -__email__ = 'therasec@gmail.com' -__version__ = '0.1.0' diff --git a/terrascan/embedded/__init__.py b/terrascan/embedded/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/terrascan/embedded/terraform_validate/__init__.py b/terrascan/embedded/terraform_validate/__init__.py deleted file mode 100644 index b206a3372..000000000 --- a/terrascan/embedded/terraform_validate/__init__.py +++ /dev/null @@ -1 +0,0 @@ -from .terraform_validate import * \ No newline at end of file diff --git a/terrascan/embedded/terraform_validate/terraform_validate.py b/terrascan/embedded/terraform_validate/terraform_validate.py deleted file mode 100644 index dbaa9ceef..000000000 --- a/terrascan/embedded/terraform_validate/terraform_validate.py +++ /dev/null @@ -1,1481 +0,0 @@ -import hcl -import os -import re -import traceback -import json -import logging -import sys - -class TerraformPropertyList: - - def __init__(self, validator): - self.properties = [] - self.validator = validator - - def tfproperties(self): - return self.properties - - def property(self, property_name): - propList = TerraformPropertyList(self.validator) - for prop in self.properties: - pvList = [] - if type(prop.property_value) is list: - pvList = prop.property_value - else: - pvList.append(prop.property_value) - - wasFound = False - for pv in pvList: - if type(pv) is dict and property_name in pv.keys(): - wasFound = True - propList.properties.append(TerraformProperty(prop.resource_type, - "{0}.{1}".format(prop.resource_name, prop.property_name), - property_name, - pv[property_name], - prop.moduleName, - prop.fileName)) - - if not wasFound and self.validator.raise_error_if_property_missing: - self.validator.preprocessor.add_failure( - "[{0}.{1}] should have property: '{2}'".format(prop.resource_type, "{0}.{1}".format(prop.resource_name, prop.property_name), property_name), - prop.moduleName, - prop.fileName, - self.validator.severity, - self.validator.isRuleOverridden, - self.validator.overrides, - prop.resource_name) - - return propList - - def should_equal_case_insensitive(self, expected_value): - self.should_equal(expected_value, True) - - def should_equal(self, expected_value, caseInsensitive=False): - for prop in self.properties: - - expected_value = self.int2str(expected_value) - prop.property_value = self.int2str(prop.property_value) - expected_value = self.bool2str(expected_value) - prop.property_value = self.bool2str(prop.property_value) - - if caseInsensitive: - # make both actual and expected lower case so case won't matter - pv = prop.property_value.lower() - ev = expected_value.lower() - else: - pv = prop.property_value - ev = expected_value - - if pv != ev: - self.validator.preprocessor.add_failure("[{0}.{1}.{2}] should be '{3}'. Is: '{4}'".format(prop.resource_type, - prop.resource_name, - prop.property_name, - expected_value, - prop.property_value), - prop.moduleName, - prop.fileName, - self.validator.severity, - self.validator.isRuleOverridden, - self.validator.overrides, - prop.resource_name) - - def should_not_equal_case_insensitive(self, expected_value): - self.should_not_equal(expected_value, True) - - def should_not_equal(self, expected_value, caseInsensitive=False): - for prop in self.properties: - - prop.property_value = self.int2str(prop.property_value) - expected_value = self.int2str(expected_value) - expected_value = self.bool2str(expected_value) - prop.property_value = self.bool2str(prop.property_value) - - if caseInsensitive: - # make both actual and expected lower case so case won't matter - pv = prop.property_value.lower() - ev = expected_value.lower() - else: - pv = prop.property_value - ev = expected_value - - if pv == ev: - self.validator.preprocessor.add_failure("[{0}.{1}.{2}] should not be '{3}'. Is: '{4}'".format(prop.resource_type, - prop.resource_name, - prop.property_name, - expected_value, - prop.property_value), - prop.moduleName, - prop.fileName, - self.validator.severity, - self.validator.isRuleOverridden, - self.validator.overrides, - prop.resource_name) - - def list_should_contain_any(self, values_list): - if type(values_list) is not list: - values_list = [values_list] - - for prop in self.properties: - property_value = prop.property_value - if type(property_value) is not list: - property_value = [property_value] - for pv in property_value: - if pv not in values_list: - if type(prop.property_value) is list: - prop.property_value = [str(x) for x in prop.property_value] # fix 2.6/7 - self.validator.preprocessor.add_failure("[{0}.{1}.{2}] '{3}' should have been one of '{4}'.".format(prop.resource_type, - prop.resource_name, - prop.property_name, - prop.property_value, - values_list), - prop.moduleName, - prop.fileName, - self.validator.severity, - self.validator.isRuleOverridden, - self.validator.overrides, - prop.resource_name) - break; - - def list_should_contain(self, values_list): - if type(values_list) is not list: - values_list = [values_list] - - for prop in self.properties: - - values_missing = [] - for value in values_list: - if value not in prop.property_value: - values_missing.append(value) - - if len(values_missing) != 0: - if type(prop.property_value) is list: - prop.property_value = [str(x) for x in prop.property_value] # fix 2.6/7 - self.validator.preprocessor.add_failure("[{0}.{1}.{2}] '{3}' should contain '{4}'.".format(prop.resource_type, - prop.resource_name, - prop.property_name, - prop.property_value, - values_missing), - prop.moduleName, - prop.fileName, - self.validator.severity, - self.validator.isRuleOverridden, - self.validator.overrides, - prop.resource_name) - - def list_should_not_contain(self, values_list): - if type(values_list) is not list: - values_list = [values_list] - - for prop in self.properties: - - values_missing = [] - for value in values_list: - if value in prop.property_value: - values_missing.append(value) - - if len(values_missing) != 0: - if type(prop.property_value) is list: - prop.property_value = [str(x) for x in prop.property_value] # fix 2.6/7 - self.validator.preprocessor.add_failure("[{0}.{1}.{2}] '{3}' should not contain '{4}'.".format(prop.resource_type, - prop.resource_name, - prop.property_name, - prop.property_value, - values_missing), - prop.moduleName, - prop.fileName, - self.validator.severity, - self.validator.isRuleOverridden, - self.validator.overrides, - prop.resource_name) - - def should_have_properties(self, properties_list): - if type(properties_list) is not list: - properties_list = [properties_list] - - for prop in self.properties: - property_names = prop.property_value.keys() - for required_property_name in properties_list: - if required_property_name not in property_names: - self.validator.preprocessor.add_failure("[{0}.{1}.{2}] should have property: '{3}'".format(prop.resource_type, - prop.resource_name, - prop.property_name, - required_property_name), - prop.moduleName, - prop.fileName, - self.validator.severity, - self.validator.isRuleOverridden, - self.validator.overrides, - prop.resource_name) - - def should_not_have_properties(self, properties_list): - if type(properties_list) is not list: - properties_list = [properties_list] - - for prop in self.properties: - property_names = prop.property_value.keys() - for excluded_property_name in properties_list: - if excluded_property_name in property_names: - self.validator.preprocessor.add_failure("[{0}.{1}.{2}] should not have property: '{3}'".format(prop.resource_type, - prop.resource_name, - prop.property_name, - excluded_property_name), - prop.moduleName, - prop.fileName, - self.validator.severity, - self.validator.isRuleOverridden, - self.validator.overrides, - prop.resource_name) - - def find_property(self, regex): - lst = TerraformPropertyList(self.validator) - for prop in self.properties: - for nested_property in prop.property_value: - if self.validator.matches_regex_pattern(nested_property, regex): - lst.properties.append(TerraformProperty(prop.resource_type, - "{0}.{1}".format(prop.resource_name, prop.property_name), - nested_property, - prop.property_value[nested_property], - prop.moduleName, - prop.fileName)) - return lst - - def should_match_regex(self, regex): - for prop in self.properties: - if not self.validator.matches_regex_pattern(prop.property_value, regex): - self.validator.preprocessor.add_failure("[{0}.{1}] should match regex '{2}'".format(prop.resource_type, "{0}.{1}".format(prop.resource_name, prop.property_name), regex), - prop.moduleName, - prop.fileName, - self.validator.severity, - self.validator.isRuleOverridden, - self.validator.overrides, - prop.resource_name) - - def should_contain_valid_json(self): - for prop in self.properties: - try: - json.loads(prop.property_value) - except: - self.validator.preprocessor.add_failure("[{0}.{1}.{2}] is not valid json".format(prop.resource_type, prop.resource_name, prop.property_name), - prop.moduleName, - prop.fileName, - self.validator.severity, - self.validator.isRuleOverridden, - self.validator.overrides, - prop.resource_name) - - def bool2str(self, b): - if str(b).lower() in ["true"]: - return "True" - if str(b).lower() in ["false"]: - return "False" - return b - - def int2str(self, property_value): - if type(property_value) is int: - property_value = str(property_value) - return property_value - - -class TerraformProperty: - - def __init__(self, resource_type, resource_name, property_name, property_value, moduleName, fileName): - self.resource_type = resource_type - self.resource_name = resource_name - self.property_name = property_name - self.property_value = property_value - self.moduleName = moduleName - self.fileName = fileName - - -class TerraformResource: - - def __init__(self, typ, name, config, fileName, moduleName): - self.type = typ - self.name = name - self.config = config - self.fileName = fileName - self.moduleName = moduleName - - -class TerraformResourceList: - - def __init__(self, validator, requestedResourceType, resourceTypes, resources): - self.validator = validator - self.resource_list = [] - self.requestedResourceType = requestedResourceType - - resourcesByType = {} - for resourceName in resources: - resource = resources[resourceName] - resourceType = resource.type - resourcesByType[resourceType] = resourcesByType.get(resourceType, {}) - resourcesByType[resourceType][resourceName] = resource.config - - if type(requestedResourceType) is str: - resourceTypes = [] - for resourceType in resourcesByType: - if validator.matches_regex_pattern(resourceType, requestedResourceType): - resourceTypes.append(resourceType) - elif requestedResourceType is not None: - resourceTypes = requestedResourceType - - for resourceType in resourceTypes: - if resourceType in resourcesByType.keys(): - for resourceName in resourcesByType[resourceType]: - self.resource_list.append( - TerraformResource(resourceType, resourceName, resourcesByType[resourceType][resourceName], resources[resourceName].fileName, resources[resourceName].moduleName)) - - self.resource_types = resourceTypes - - def property(self, property_name): - lst = TerraformPropertyList(self.validator) - if len(self.resource_list) > 0: - for resource in self.resource_list: - if property_name in resource.config.keys(): - lst.properties.append(TerraformProperty(resource.type, resource.name, property_name, resource.config[property_name], resource.moduleName, resource.fileName)) - elif self.validator.raise_error_if_property_missing: - self.validator.preprocessor.add_failure("[{0}.{1}] should have property: '{2}'".format(resource.type, resource.name, property_name), - resource.moduleName, - resource.fileName, - self.validator.severity, - self.validator.isRuleOverridden, - self.validator.overrides, - resource.name) - - return lst - - def find_property(self, regex): - lst = TerraformPropertyList(self.validator) - if len(self.resource_list) > 0: - for resource in self.resource_list: - for prop in resource.config: - if self.validator.matches_regex_pattern(prop, regex): - lst.properties.append(TerraformProperty(resource.type, - resource.name, - prop, - resource.config[prop], - resource.moduleName, - resource.fileName)) - return lst - - def with_property(self, property_name, regex): - lst = TerraformResourceList(self.validator, None, self.resource_types, {}) - - if len(self.resource_list) > 0: - for resource in self.resource_list: - for prop in resource.config: - if prop == property_name: - tf_property = TerraformProperty(resource.type, resource.name, property_name, resource.config[property_name], resource.moduleName, resource.fileName) - if self.validator.matches_regex_pattern(tf_property.property_value, regex): - lst.resource_list.append(resource) - - return lst - - def should_not_exist(self): - for terraformResource in self.resource_list: - if terraformResource.type == self.requestedResourceType: - self.validator.preprocessor.add_failure("[{0}] should not exist. Found in resource named {1}".format(self.requestedResourceType, terraformResource.name), - terraformResource.moduleName, - terraformResource.fileName, - self.validator.severity, - self.validator.isRuleOverridden, - self.validator.overrides, - terraformResource.name) - - def should_have_properties(self, properties_list): - if type(properties_list) is not list: - properties_list = [properties_list] - - if len(self.resource_list) > 0: - for resource in self.resource_list: - property_names = resource.config.keys() - for required_property_name in properties_list: - if required_property_name not in property_names: - self.validator.preprocessor.add_failure("[{0}.{1}] should have property: '{2}'".format(resource.type, resource.name, required_property_name), - resource.moduleName, - resource.fileName, - self.validator.severity, - self.validator.isRuleOverridden, - self.validator.overrides, - resource.name) - - def should_not_have_properties(self, properties_list): - if type(properties_list) is not list: - properties_list = [properties_list] - - if len(self.resource_list) > 0: - for resource in self.resource_list: - property_names = resource.config.keys() - for excluded_property_name in properties_list: - if excluded_property_name in property_names: - self.validator.preprocessor.add_failure("[{0}.{1}] should not have property: '{2}'".format(resource.type, resource.name, excluded_property_name), - resource.moduleName, - resource.fileName, - self.validator.severity, - self.validator.isRuleOverridden, - self.validator.overrides, - resource.name) - - def name_should_match_regex(self, regex): - for resource in self.resource_list: - if not self.validator.matches_regex_pattern(resource.name, regex): - self.validator.preprocessor.add_failure("[{0}.{1}] name should match regex: '{2}'".format(resource.type, resource.name, regex), - resource.moduleName, - resource.fileName, - self.validator.severity, - self.validator.isRuleOverridden, - self.validator.overrides, - resource.name) - - -class Validator: - - # default severity is high - severity = "high" - preprocessor = None - - def __init__(self): - self.raise_error_if_property_missing = False - - def resources(self, typ): - resources = self.terraform.get('resource', {}) - - return TerraformResourceList(self, typ, None, resources) - - def error_if_property_missing(self): - self.raise_error_if_property_missing = True - - # generator that loops through all files to be scanned (stored internally in fileName; returns self (Validator) but sets self.fileName and self.terraform - def get_terraform_files(self, isRuleOverridden): - self.isRuleOverridden = isRuleOverridden - for self.fileName, self.terraform in self.preprocessor.modulesDict.items(): - yield self - - def matches_regex_pattern(self, variable, regex): - return not (self.get_regex_matches(regex, variable) is None) - - def get_regex_matches(self, regex, variable): - if regex[-1:] != "$": - regex = regex + "$" - - if regex[0] != "^": - regex = "^" + regex - - variable = str(variable) - if '\n' in variable: - return re.match(regex, variable, re.DOTALL) - return re.match(regex, variable) - - # this is only used by unit_test.py - def setTerraform(self, terraform): - self.terraform = terraform - self.fileName = "none.tf" - - -class PreProcessor: - - TF = ".tf" - UTF8 = "utf8" - IS_MODULE = "__isModule__" - PARENT = "__parent__" - MODULE_NAME = "__ModuleName__" - FILE_NAME = "__fileName__" - LOCALS = "locals" - VARIABLE = "variable" - OUTPUT = "output" - RESOURCE = "resource" - VALUE = "value" - MODULE = "module" - SOURCE = "source" - DEFAULT = "default" - REGEX_COLON_BRACKET = re.compile('.*:\s*\[.*', re.DOTALL) # any characters : whitespace [ any characters - - def __init__(self, jsonOutput): - self.jsonOutput = jsonOutput - self.variablesFromCommandLine = {} - self.hclDict = {} - self.modulesDict = {} - self.fileNames = {} - self.passNumber = 1 - self.dummyIndex = 0 - # on 1st pass replace var. with var$. - # on 2nd pass replace var. and var$. with var!. - self.braces = ["${", "@{", "!{"] - self.vars = ["var.", "var@.", "var!."] - self.locals = ["local.", "local@.", "local!."] - self.modules = ["module.", "module@.", "module!."] - self.terraform_workspaces = ["terraform.workspace", "terraform@.workspace", "terraform!.workspace"] - self.datas = ["data.", "data@.", "data!."] - self.variableFind = [self.braces[0], self.vars[0], self.locals[0], self.modules[0], self.terraform_workspaces[0], self.datas[0]] - self.variableErrorReplacement = [self.braces[1], self.vars[1], self.locals[1], self.modules[1], self.terraform_workspaces[1], self.datas[1]] - self.variableErrorReplacementPass2 = [self.braces[2], self.vars[2], self.locals[2], self.modules[2], self.terraform_workspaces[2], self.datas[2]] - self.replacements = [self.variableFind, self.variableErrorReplacement, self.variableErrorReplacementPass2] - self.replaceableVariablePrefixes = [self.braces[0], self.vars[0], self.locals[0], self.modules[0], self.terraform_workspaces[0], - self.braces[1], self.vars[1], self.locals[1], self.modules[1], self.terraform_workspaces[1], - self.braces[2], self.vars[2], self.locals[2], self.modules[2], self.terraform_workspaces[2]] - - def process(self, path, variablesJsonFilename=None): - inputVars = {} - if variablesJsonFilename is not None: - for fileName in variablesJsonFilename: - with open(fileName, "r", encoding="utf-8") as fp: - try: - variables_string = fp.read() - inputVarsDict = hcl.loads(variables_string) - inputVars = {**inputVars, **inputVarsDict} - except: - self.add_error_force(traceback.format_exc(), "---", fileName, "high") - - # prefix any input variable not containing '.' with 'var.' - for var in inputVars: - if "." not in var: - newVar = "var." + var - self.variablesFromCommandLine[newVar] = inputVars[var] - else: - self.variablesFromCommandLine[var] = inputVars[var] - - self.root = None - self.readDir(path, self.hclDict) - - # all terraform files are now loaded into hclDict (indexed by subdirectory/fileName/terraform structure) - # process hclDict and load every module into modulesDict - self.getAllModules(self.hclDict, False) - # make second pass so variables depending on a previous definition should now be defined - logging.warning("------------------>>>starting pass 2...") - self.passNumber = 2 - self.variableFind += self.variableErrorReplacement - self.variableErrorReplacement = self.variableErrorReplacementPass2 + self.variableErrorReplacementPass2 - self.getAllModules(self.hclDict, False) - - def readDir(self, path, d): - for directory, subdirectories, files in os.walk(path): - if self.root is None: - self.root = directory - i = self.root.rfind(os.path.sep) - if i != -1: - self.root = self.root[i+1:] - # define root and mark this dictionary as a module since all directories in terraform are modules by default - self.hclDict[self.root] = {} - self.hclDict[self.root][self.IS_MODULE] = True - self.hclDict[self.root][self.PARENT] = None - self.hclDict[self.root][self.MODULE_NAME] = self.root - d = self.hclDict[self.root] - - for file in files: - if file[-3:].lower() == self.TF: - # terraform file (ends with .tf) - fileName = os.path.join(directory, file) - relativeFileName = fileName[len(path):] - with open(fileName, 'r', encoding='utf8') as fp: - try: - terraform_string = fp.read() - if len(terraform_string.strip()) > 0: - self.loadFileByDir(fileName, relativeFileName, d, d, terraform_string) - self.fileNames[fileName] = fileName - except: - self.add_error_force(traceback.format_exc(), "---", fileName, "high") - - # load file by directory, marking each directory as a module and setting parent directories - def loadFileByDir(self, fileName, path, hclSubDirDict, parentDir, terraform_string): - i = path.find("\\") - if i == -1: - # \ not found, try / - i = path.find("/") - if i == -1: - # end of subdirectories; path is a terraform filename; load terraform file into dictionary - hclSubDirDict[path] = hcl.loads(terraform_string) - hclSubDirDict[path][self.FILE_NAME] = fileName - # remove file name from end of path - t = self.getPreviousLevel(fileName, os.path.sep) - self.findModuleSources(hclSubDirDict[path], parentDir, t[0]) - return - if i == 0: - # found in first character, recursively try again skipping first character - self.loadFileByDir(fileName, path[1:], hclSubDirDict, parentDir, terraform_string) - else: - # get subdirectory - subdir = path[:i] - if hclSubDirDict.get(subdir) is None: - # subdirectory not defined in our dictionary yet so define it - hclSubDirDict[subdir] = {} - hclSubDir = hclSubDirDict[subdir] - # mark this dictionary as a module since all directories in terraform are modules by default - hclSubDir[self.IS_MODULE] = True - hclSubDir[self.PARENT] = parentDir - hclSubDir[self.MODULE_NAME] = subdir - else: - hclSubDir = hclSubDirDict[subdir] - # recursively process next subdirectory - i += 1 - self.loadFileByDir(fileName, path[i:], hclSubDirDict[subdir], hclSubDir, terraform_string) - - def findModuleSources(self, d, parentDir, currentFileName): - for key in d: - # only process module key - if key == self.MODULE: - modules = d[key] - # process all modules - for moduleName in modules: - module = modules[moduleName] - # find source parameter - for parameter in module: - if parameter == self.SOURCE: - sourcePath = module[parameter] - if not sourcePath.startswith("git::"): - self.createMissingFromSourcePath(sourcePath, parentDir, currentFileName) - - def createMissingFromSourcePath(self, sourcePath, d, currentFileName): - # source is local - while sourcePath != "": - t = self.getNextLevel(sourcePath, "/") - currentModule = t[0] - sourcePath = t[1] - if currentModule == "..": - # move up a level - t = self.getPreviousLevel(currentFileName, os.path.sep) - currentFileName = t[0] - if d.get(self.PARENT) is None: - # add parent - self.dummyIndex += 1 - parent = {} - parent[self.PARENT] = None - parent[self.IS_MODULE] = True - parent[self.MODULE_NAME] = "dummy" + str(self.dummyIndex) - parent[d[self.MODULE_NAME]] = d - d[self.PARENT] = parent - self.hclDict = parent - d = d[self.PARENT] - elif currentModule == ".": - # current directory; do nothing - pass - else: - # move down to currentModule level - currentFileName += os.path.sep + currentModule - md = d.get(currentModule, False) - if md is False: - # create new level - d[currentModule] = {} - md = d[currentModule] - md[self.PARENT] = d - md[self.MODULE_NAME] = currentModule - md[self.IS_MODULE] = True - - d = md - # read directory - self.readDir(currentFileName, d) - return d - - # get all modules for given dictionary d; pass isModule as True if in a module block - def getAllModules(self, d, isModule): - for key in d: - # ignore parent key - if key != self.PARENT: - value = d[key] - if type(value) is dict: - if isModule or self.isModule(value): - moduleName = key - # load module, resolve variables in it and add it to modules dictionary - moduleDict = self.getModule(moduleName) - if self.isModule(value): - moduleDict[self.PARENT] = d[moduleName][self.PARENT] - # recursively get all modules in the nested dictionary in value - self.getAllModules(value, key == "module") - - # get given moduleName from modulesDict; find & load it if not there yet - def getModule(self, moduleName, errorIfNotFound=True, dictToCopyFrom=None, tfDict=None): - moduleDict = self.modulesDict.get(moduleName) - if moduleDict is None or (moduleDict[self.VARIABLE] == {} and moduleDict[self.LOCALS] == {} and moduleDict[self.OUTPUT] == {}): - # not there yet, find it and load it - moduleDict = self.findModule(moduleName, self.hclDict, dictToCopyFrom, tfDict) - if moduleDict is None: - # couldn't find it, log it and create a dummy entry - if errorIfNotFound: - self.logMsg("error", "Couldn't find module " + moduleName) - moduleDict = self.createModuleEntry(moduleName) - elif self.passNumber > 1: - # module found on second pass, re-resolve variables - self.findModule(moduleName, self.hclDict, dictToCopyFrom, tfDict) - - return moduleDict - - # find given moduleName in given dictionary d; load module attributes and resolve variables in module; last two parameters are all or nothing - def findModule(self, moduleName, d, dictToCopyFrom=None, tfDict=None): - # use dictToCopyFrom if provided - if dictToCopyFrom is not None: - sourcePath = self.getSourcePath(dictToCopyFrom) - if sourcePath is not None and not sourcePath.startswith("git::"): - dd = self.getModuleDictFromSourcePath(sourcePath, tfDict) - if dd: - moduleDict = self.modulesDict.get(dd[self.MODULE_NAME]) - if moduleDict is None: - moduleDict = self.createModuleEntry(dd[self.MODULE_NAME]) - moduleDict[self.IS_MODULE] = True - self.loadModule(moduleName, dd, dictToCopyFrom) - if moduleName != dd[self.MODULE_NAME]: - self.loadModule(dd[self.MODULE_NAME], dd, dictToCopyFrom) - # source module found, replace variables and return it - m = self.loadModule(dd[self.MODULE_NAME], dd, dictToCopyFrom) - return m - return self.loadModule(moduleName, {}, dictToCopyFrom) - - for key in d: - # ignore parent key - if key != self.PARENT: - value = d[key] - if key == moduleName: - # module found, replace variables and return it - return self.loadModule(moduleName, value, dictToCopyFrom) - else: - if type(value) is dict: - if self.isModule(value): - # recursively find the module - m = self.findModule(moduleName, value) - if m is not None: - return m - # not found - return None - - def loadModule(self, moduleName, d, dictToCopyFrom): - self.logMsgAlways("warning", ">>>loading module " + moduleName) - - moduleDict = self.modulesDict.get(moduleName) - if moduleDict is None: - # create empty module entry - moduleDict = self.createModuleEntry(moduleName) - moduleDict[self.IS_MODULE] = self.hasTerraform(d) - - if dictToCopyFrom is not None: - mdv = moduleDict[self.VARIABLE] - # add/replace the passed in variables to the module's variables - for attr in dictToCopyFrom: - if attr != self.SOURCE: - # only replace on pass #1 if resolved - if self.passNumber == 2 or self.isResolved(dictToCopyFrom[attr]): - mdv[attr] = dictToCopyFrom[attr] - - # load all attributes for this module - self.loadModuleAttributes(moduleName, d, moduleDict, None) - # resolve variables for this module - self.resolveVariablesInModule(moduleName, moduleDict) - return moduleDict - - def isResolved(self, var): - if type(var) is str: - return self.isStrResolved(var) - elif type(var) is dict: - for key in var: - if not self.isResolved(var[key]): - return False - elif type(var) is list: - for value in var: - if not self.isResolved(value): - return False - else: - return False - return True - - def isStrResolved(self, var): - for varErrorReplacement in self.variableErrorReplacement: - if varErrorReplacement in var: - return False - - return True - - def createModuleEntry(self, moduleName): - self.modulesDict[moduleName] = {} - moduleDict = self.modulesDict[moduleName] - moduleDict[self.VARIABLE] = {} - moduleDict[self.LOCALS] = {} - moduleDict[self.OUTPUT] = {} - moduleDict[self.RESOURCE] = {} - moduleDict[self.IS_MODULE] = False - return moduleDict - - def loadModuleAttributes(self, moduleName, d, moduleDict, tfDict): - if self.isModule(d): - if tfDict is None: - tfDict = d - else: - # skip nested modules - return - - for key in sorted(d): - # ignore parent key - if key != self.PARENT: - value = d[key] - if key == self.LOCALS: - # get values for all local variables - for local in value: - # only replace on first pass or not already fully resolved - if self.passNumber == 1 or self.containsVariable(moduleDict[self.LOCALS][local]): - moduleDict[self.LOCALS][local] = value[local] - elif key == self.OUTPUT: - for output in value: - # only replace on first pass or not already fully resolved - if self.passNumber == 1 or self.containsVariable(moduleDict[self.OUTPUT][output]): - moduleDict[self.OUTPUT][output] = value[output][self.VALUE] - elif key == self.RESOURCE: - for resourceType in value: - resourceNames = value[resourceType] - for resourceName in resourceNames: - config = resourceNames[resourceName] - res = moduleDict[self.RESOURCE].get(resourceName, None) - # only replace on first pass or not already fully resolved - if self.passNumber == 1 or (res != None and self.containsVariable(res.config)): - moduleDict[self.RESOURCE][resourceName] = TerraformResource(resourceType, resourceName, config, d[self.FILE_NAME], moduleName) - elif key == self.VARIABLE: - ''' - value could be a string as in below case - condition { - test = "ArnEquals" - variable = "aws:SourceArn" - values = ["${var.services_entry_arn}"] - } - ''' - if type(value) is dict: - # initialize any default values for variables - for variable in value: - if value[variable].get(self.DEFAULT) is not None: - moduleDict[self.VARIABLE][variable] = value[variable][self.DEFAULT] - elif key == self.MODULE: - # loop through all modules - for mn in value: - # resolve parameter variables first - for parameter in value[mn]: - if parameter != self.SOURCE: - replacementValue = self.resolveVariableByType(value[mn][parameter], moduleName) - if replacementValue != value[mn][parameter]: - self.logMsgAlways("warning", "replaced module " + mn + " parameter " + parameter + " value " + str(value[mn][parameter]) + " with " + str(replacementValue)) - value[mn][parameter] = replacementValue - # get defined module; load it if not already there - md = self.getModule(mn, False, value[mn], tfDict) - # copy all outputs from source module (md) to containing module variable - for output in md[self.OUTPUT]: - # only copy if not already there - if moduleDict[self.VARIABLE].get(output) is None: - moduleDict[self.VARIABLE][output] = md[self.OUTPUT][output] - else: - if type(value) is dict: - # don't load any other nested modules - if not self.isModule(value): - self.loadModuleAttributes(moduleName, value, moduleDict, tfDict) - - def getSourcePath(self, parameterDict): - for parameter in parameterDict: - if parameter == self.SOURCE: - return parameterDict[parameter] - return None - - def getModuleDictFromSourcePath(self, sourcePath, d): - # source is local - while sourcePath != "": - t = self.getNextLevel(sourcePath, "/") - currentModule = t[0] - sourcePath = t[1] - if currentModule == "..": - # move up a level - d = d[self.PARENT] - elif currentModule == ".": - # current directory; do nothing - pass - else: - # move down to currentModule level - d = d.get(currentModule, False) - if d is False: - return False - return d - - # resolve variables (anything surrounded by ${}) in given moduleDict - def resolveVariablesInModule(self, moduleName, moduleDict): - self.shouldLogErrors = False - # resolve variables - for key in moduleDict[self.VARIABLE]: - value = moduleDict[self.VARIABLE][key] - replacementValue = self.resolveVariableByType(value, moduleName) - moduleDict[self.VARIABLE][key] = replacementValue - if replacementValue != value: - self.logMsgAlways("warning", "replaced variable " + key + " value " + str(value) + " with " + str(replacementValue)) - # resolve locals - for key in moduleDict[self.LOCALS]: - value = moduleDict[self.LOCALS][key] - replacementValue = self.resolveVariableByType(value, moduleName) - moduleDict[self.LOCALS][key] = replacementValue - if replacementValue != value: - self.logMsgAlways("warning", "replaced local variable " + key + " value " + str(value) + " with " + str(replacementValue)) - # resolve outputs - for key in moduleDict[self.OUTPUT]: - value = moduleDict[self.OUTPUT][key] - replacementValue = self.resolveVariableByType(value, moduleName) - moduleDict[self.OUTPUT][key] = replacementValue - if replacementValue != value: - self.logMsgAlways("warning", "replaced output variable " + key + " value " + str(value) + " with " + str(replacementValue)) - # resolve resources - self.shouldLogErrors = True - for key in moduleDict[self.RESOURCE]: - value = moduleDict[self.RESOURCE][key].config - replacementValue = self.resolveVariableByType(value, moduleName) - moduleDict[self.RESOURCE][key].config = replacementValue - if replacementValue != value: - self.logMsgAlways("warning", "replaced resource variable " + key + " value " + str(value) + " with " + str(replacementValue)) - - def resolveVariableByType(self, value, moduleName): - if type(value) is str: - return self.resolveVariableLine(value, moduleName) - elif type(value) is dict: - return self.resolveDictVariable(value, moduleName) - elif type(value) is list: - return self.resolveListVariable(value, moduleName) - elif type(value) is tuple: - return self.resolveTupleVariable(value, moduleName) - else: - return value - - def resolveDictVariable(self, value, moduleName): - returnValue = {} - for key in value: - returnValue[key] = self.resolveVariableByType(value[key], moduleName) - return returnValue - - def resolveListVariable(self, value, moduleName): - if len(value) == 0: - return value; - index = 0 - for v in value: - value[index] = self.resolveVariableByType(v, moduleName) - index += 1 - if value[0] in ("join", "merge", "concat", "coalesce", "element", "coalescelist"): - # supported function - return self.handleFunction(value) - return value - - def resolveTupleVariable(self, value, moduleName): - returnValue = tuple(self.resolveVariableByType(v, moduleName) for v in value) - if len(returnValue) == 3: - floatValue0 = self.getFloatValue(returnValue[0]) - floatValue2 = self.getFloatValue(returnValue[2]) - if type(floatValue0) == float and type(returnValue[1]) == str and type(floatValue2) == float: - if returnValue[1] == "+": - return floatValue0 + floatValue2 - elif returnValue[1] == "-": - return floatValue0 - floatValue2 - elif returnValue[1] == "*": - return floatValue0 * floatValue2 - elif returnValue[1] == "/": - return floatValue0 / floatValue2 - return returnValue - - def getFloatValue(self, value): - try: - return float(value) - except: - return value; - - def handleFunction(self, value): - # check if all variables have been resolved - if self.containsVariable(value, True): - # not fully resolved yet; return what we have so far - return value - return self.processFunction(value) - - def processFunction(self, value): - it = iter(value) - function = next(it, None) - if function == "join": - delimiter = next(it, None) - t = next(it, None) - return delimiter.join(v for v in t) - elif function == "merge": - d = {} - for v in it: - if type(v) is dict: - for key in v: - d[key] = v[key] - else: - d[v] = v - return d; - elif function == "concat": - d = [] - for v in it: - if type(v) is list: - for entry in v: - d.append(entry) - else: - d.append(v) - return d; - elif function == "element": - lst = next(it, None) - index = next(it, None) - if '*' in lst: - return value - return lst[index] - else: - # coalesce/coalescelist - d = {} - if value[len(value)-1] == "...": - # there is a single list that needs to be processed - for v in value[1]: - if v: - if type(v) is tuple: - return self.processFunction(v) - else: - return v - else: - for v in it: - if v: - if type(v) is tuple: - return self.processFunction(v) - else: - return v - # no non-empty entries; undefined what to do so return None - return None; - - - # returns True if given dictionary d contains a key of __isModule__ - def isModule(self, d): - for key in d: - if key == self.IS_MODULE: - return d[key] - return False - - # returns True if given dictionary d contains at least one terraform file - def hasTerraform(self, d): - for key in d: - if key.lower().endswith(self.TF): - return True - return False - - # resolve entire variable - def resolveVariableLine(self, value, moduleName): - if not self.containsVariable(value): - return value - # a variable needs to be replaced - t = self.findVariable(value, True) - var = t[0] - b = t[1] - e = t[2] - if var.startswith("["): - var = var[1:len(var)-1] - var = var.strip() - rv = self.resolveVariable(var, moduleName) - if b == 0 and (e == len(value) or e == -1): - # full replacement; don't merge since a string may not have been returned - newValue = rv[0] - else: - newValue = value[:b] + str(rv[0]) + value[e:] - # recursively resolve the variables since there may be more than one variable in this value - return self.resolveVariableByType(newValue, moduleName) - - # resolve innermost variable - def resolveVariable(self, value, moduleName, dictToCopyFrom=None, tfDict=None): - # find variable (possibly in brackets) - isOldTFvarStyle=False - v, b, e, insideBrackets, foundDelineator, foundDelineatorErrRepl = self.findVariable(value, False) - if len(v) > 1 and v[1] == "{": - isOldTFvarStyle = True - if not insideBrackets and isOldTFvarStyle: - # inside ${}; remove them - var = value[2:e-1] - else: - var = v - # update moduleName in case we switch modules and need to recurse more - replacementValue, moduleName, isHandledType = self.getReplacementValue(var, moduleName, isOldTFvarStyle, dictToCopyFrom, tfDict) - if replacementValue == var: - # couldn't find a replacement; change to our notation to mark it - if isHandledType: - self.logMsg("error", "Couldn't find a replacement for: " + self.getOrigVar(var) + " in " + moduleName) - else: - self.logMsg("debug", "Couldn't find a replacement for: " + self.getOrigVar(var) + " in " + moduleName) - if not isOldTFvarStyle: - # strip off replaceable variable - var = var[len(foundDelineator):] - replacementValue = value[:b] + foundDelineatorErrRepl + var - if insideBrackets: - replacementValue += "]" - if not isOldTFvarStyle and len(value) > 1 and value[1] == "{": - replacementValue += "}" - if isOldTFvarStyle and e > 0: - # remove closing brace - replacementValue += value[e-1:] - return (replacementValue, not insideBrackets) - - if type(replacementValue) is str: - if insideBrackets: - self.logMsgAlways("info", " replacing [" + var + "] with " + replacementValue) - # resolve the variable again since the replacement may also contain variables - return (value[:b] + self.resolveVariableLine(replacementValue, moduleName) + value[e:], not insideBrackets) - else: - if v == replacementValue: - # this prevents a loop - replacementValue = replacementValue.replace(foundDelineator, foundDelineatorErrRepl, 1) - self.logMsg("debug", "Couldn't find a replacement for: " + self.getOrigVar(var) + " (would have looped) in " + moduleName) - else: - self.logMsgAlways("info", " replacing ${" + var + "} with " + replacementValue) - # resolve the variable again since the replacement may also contain variables - return (self.resolveVariableLine(replacementValue, moduleName), not insideBrackets) - else: - if isOldTFvarStyle: - self.logMsgAlways("info", " replacing " + foundDelineator + var + "} with " + str(replacementValue)) - else: - self.logMsgAlways("info", " replacing " + var + " with " + str(replacementValue)) - - return (replacementValue, not insideBrackets) - - def getOrigVar(self, var): - if var.startswith(self.vars[1]) or var.startswith(self.vars[2]): - return self.vars[0] + var[len(self.vars[1]):] - elif var.startswith(self.locals[1]) or var.startswith(self.locals[2]): - return self.locals[0] + var[len(self.locals[1]):] - elif var.startswith(self.modules[1]) or var.startswith(self.modules[2]): - return self.modules[0] + var[len(self.modules[1]):] - else: - return var - - # check if given value contains a variable anywhere - def containsVariable(self, value, isAnyVar=False): - if type(value) is str: - t = self.findAnyVariableDelineatorsForVars(value, False, isAnyVar) - if t[0] == -1: - return False - return True - elif type(value) is dict: - return self.containsVariableDict(value, isAnyVar) - elif type(value) is list: - return self.containsVariableList(value, isAnyVar) - else: - # not a variable - return False - - def containsVariableDict(self, value, isAnyVar): - for key in value: - if self.containsVariable(value[key], isAnyVar): - return True - # no variables found - return False - - def containsVariableList(self, value, isAnyVar): - for v in value: - if self.containsVariable(v, isAnyVar): - return True - # no variables found - return False - - # find deepest nested variable in given value - def findVariable(self, value, isNested, previouslyFoundVar=None): - # pass 1: if unreplaceable, change $ to @ - # pass 2: if unreplaceable, change both $ & @ to ! - if type(value) is str: - isVar = False - val = value - if previouslyFoundVar: - insideBrackets = previouslyFoundVar[3] - else: - insideBrackets = False - if isNested and type(previouslyFoundVar) is str and "{" in previouslyFoundVar[0]: - # if this is a nested call and the outer call found ${, only look for the brace now - braceOnly = True - else: - braceOnly = False - - b, e, foundDelineator, foundDelineatorErrRepl = self.findVariableDelineatorsForVars(val, braceOnly, self.variableFind, self.variableErrorReplacement) - if b == -1: - return None - if b > 0: - partial = value[:b] - if partial[len(partial)-1] == "[": - # open bracket found before the variable - insideBrackets = True - partial = value[b:e] - if partial[len(partial)-1] == "]": - # close bracket found after the variable, remove from variable end - e -= 1 - - isVar = True - if "{" in foundDelineator and e == -1: - # problem - self.add_error("Matching close brace not found: " + value, "---", "---", "high") - return None - - foundVar = (value[b:e], b, e, insideBrackets, foundDelineator, foundDelineatorErrRepl) - - newSearchValue = foundVar[0] - bOffset = 0 - if isVar: - # remove delineator(s) - if newSearchValue.endswith("}"): - newSearchValue = newSearchValue[2:len(newSearchValue)-1] - bOffset = 2 - else: - newSearchValue = newSearchValue[len(foundDelineator):] - bOffset = len(foundDelineator) - else: - newSearchValue = newSearchValue[1:len(newSearchValue)-1] - if not insideBrackets: - # adjust beginning & ending since nested inside previouslyFoundVar - fv_length = len(previouslyFoundVar[4]) - b += fv_length - e += fv_length - bOffset = 0 - else: - bOffset = 1 - foundVar = (newSearchValue, b, e, insideBrackets, foundDelineator, foundDelineatorErrRepl) - if newSearchValue not in self.terraform_workspaces[0]: - # recursively find variable - fv = self.findVariable(newSearchValue, True, foundVar) - if fv is None: - # no variable found - return foundVar - if foundVar[0].endswith("}") and fv[1] == 0 and fv[2] == len(newSearchValue): - # return originally found variable which is the old style - return foundVar - if insideBrackets: - # use beginning & ending from previous - fv = (fv[0], b, e, fv[3], fv[4], fv[5]) - else: - fv = (fv[0], fv[1]+b+bOffset, fv[2]+b+bOffset, fv[3], fv[4], fv[5]) - return fv - else: - return foundVar - return previouslyFoundVar - - def findAnyVariableDelineatorsForVars(self, value, braceOnly, isAnyVar): - if isAnyVar: - for variableFind in self.replaceableVariablePrefixes: - t = self.findVariableDelineatorsForVars(value, braceOnly, [variableFind], [variableFind]) - if t[0] != -1: - return t - return -1, 0, None, None - return self.findVariableDelineatorsForVars(value, braceOnly, self.variableFind, self.variableErrorReplacement) - - def findVariableDelineatorsForVars(self, value, braceOnly, variableFind, variableErrorReplacement): - if braceOnly and value not in self.terraform_workspaces: - b, e = self.findVariableDelineators(value, variableFind[0], "}", variableErrorReplacement[0]) - if b > -1: - return b, e, variableFind[0], variableErrorReplacement[0] - else: - prevB = -1 - for varPrefix, varErrorReplacement in zip(variableFind, variableErrorReplacement): - if varPrefix[1] == "{": - closeVar = "}" - else: - closeVar = None - b, e = self.findVariableDelineators(value, varPrefix, closeVar, varErrorReplacement) - if b > -1: - if closeVar != None or b == 0 or (value[b-1] != "{"): - if b > prevB: - prevB = b; - prevE = e - prevVarPrefix = varPrefix - prevVarErrorReplacement = varErrorReplacement - if prevB != -1: - return prevB, prevE, prevVarPrefix, prevVarErrorReplacement - return -1, 0, None, None - - def findVariableDelineators(self, value, openVar, closeVar, varErrorReplacement=None): - ''' - This is valid: name-prefix = "sf-${module.common.account_name}-${local.pcas_vpc_type}-${local.env}-${module.common.region}" - This is not: name-prefix = "sf-module.common.account_name-local.pcas_vpc_type-local.env-module.common.region" - i.e. if combining variables into one variable, must use old ${} variable style - ''' - b = value.rfind(openVar) - if b == -1: - return -1, 0 - if openVar == "[": - # check if preceeded by : - matchObject = self.REGEX_COLON_BRACKET.search(value) - if matchObject != None: - return -1, 0 - if closeVar == None: - if openVar in self.terraform_workspaces: - return b, b + len(openVar) - # search for default "closeVars" - if value.find("[", b) == -1: - defaultCloseVars = [",", "'", ")", "}", "]"] - else: - # don't include ] if [ is in value - defaultCloseVars = [",", "'", ")", "}"] - prevE = 99999 - for closeVar in defaultCloseVars: - e = value.find(closeVar, b) - if e != -1 and e < prevE: - prevE = e - if prevE != 99999: - return b, prevE - # no close value found, use length of value - return b, len(value) - v = value[b+1:] - nested = 0 - for index, char in enumerate(v): - if char == closeVar: - if nested == 0: - return b, b+index+2 - # just closed a nested variable - nested -= 1 - if v[index:index+len(openVar)] == openVar or (varErrorReplacement != None and len(v) >= index+len(varErrorReplacement) and v[index:index+len(varErrorReplacement)] == varErrorReplacement): - # start of a nested variable - nested += 1 - # error: matching closeVar not found - return 0, -1 - - # find replacement value for given var in given moduleName - def getReplacementValue(self, var, moduleName, isOldTFvarStyle, dictToCopyFrom=None, tfDict=None): - replacementValue = None - if (var.startswith('"') and var.endswith('"')) or (var.startswith("'") and var.endswith("'")): - return var[1:len(var)-1], moduleName, True - subscript = None - b = var.find("[") - if b != -1: - v = var[:b] - e = var.find("]", b) - if e != -1: - subscript = var[b+1:e] - v += var[e+1:] - if subscript[0] == '"' or subscript[0] == "'": - # remove quotes - subscript = subscript[1:len(subscript)-1] - else: - v = var - - isHandledType = False - notHandled = ["?", "==", "!=", ">", "<", ">=", "<=", "&&", "||", "!", "+", "-", "*", "/", "%"] - moduleDict = self.modulesDict[moduleName] - if isOldTFvarStyle: - varsTuple = self.vars[0] - localsTuple = self.locals[0] - modulesTuple = self.modules[0] - terraform_workspacesTuple = self.terraform_workspaces[0] - else: - if self.passNumber == 1: - varsTuple = self.vars[0] - localsTuple = self.locals[0] - modulesTuple = self.modules[0] - terraform_workspacesTuple = self.terraform_workspaces[0] - else: - varsTuple = (self.vars[0], self.vars[1]) - localsTuple = (self.locals[0], self.locals[1]) - modulesTuple = (self.modules[0], self.modules[1]) - terraform_workspacesTuple = (self.terraform_workspaces[0], self.terraform_workspaces[1]) - if v.startswith(varsTuple): - # conditional statements, boolean statements, and math are not currently handled - if not any(x in v for x in notHandled): - isHandledType = True - v = v[self.getPrefixLength(v, varsTuple):] - index = v.find('.') - if index > -1: - subscript = v[index+1:] - v = v[:index] - replacementValue = moduleDict[self.VARIABLE].get(v) - elif v.startswith(localsTuple): - if not any(x in v for x in notHandled): - isHandledType = True - v = v[self.getPrefixLength(v, localsTuple):] - replacementValue = moduleDict[self.LOCALS].get(v) - elif v.startswith(modulesTuple): - if not any(x in v for x in notHandled): - isHandledType = True - # variable is in a different module - modulePrefixLength = self.getPrefixLength(v, modulesTuple) - e = v.find(".", modulePrefixLength) - if e == -1: - self.add_error("Error Resolving module variable: " + var + " expected ending '.' not found", moduleName, "---", "high") - else: - moduleName = v[modulePrefixLength:e] - md = self.getModule(moduleName, True, dictToCopyFrom, tfDict) - moduleOutputDict = md[self.OUTPUT] - e += 1 - remainingVar = v[e:] - while remainingVar != "": - t = self.getNextLevel(remainingVar, ".") - moduleOutputDict = moduleOutputDict.get(t[0]) - if moduleOutputDict is None: - self.logMsg("error", "Error resolving variable: " + var + " variable not found in module (no module source available?) in " + moduleName) - return var, moduleName, True - remainingVar = t[1] - if type(moduleOutputDict) is dict: - replacementValue = moduleOutputDict.get(self.VALUE, moduleOutputDict) - else: - replacementValue = moduleOutputDict - elif v.startswith(terraform_workspacesTuple): - isHandledType = True - - if type(replacementValue) is dict and subscript != None: - replacementValue = replacementValue.get(subscript, subscript) - - if replacementValue is None: - replacementValue = self.variablesFromCommandLine.get(var, var) - - return replacementValue, moduleName, isHandledType - - def getPrefixLength(self, var, varTuple): - if type(varTuple) is str: - return len(varTuple) - for t in varTuple: - if var.startswith(t): - return len(t) - - def getPreviousLevel(self, var, separator): - b = var.rfind(separator) - if b == -1: - b = len(var) - return (var[:b], var[b+1:]) - - def getNextLevel(self, var, separator): - b = var.find(separator) - if b == -1: - b = len(var) - return (var[:b], var[b+1:]) - - # add given failure in given fileName - def add_failure(self, failure, moduleName, fileName, severity, isRuleOverridden, overrides, resourceName): - waived = "" - waiver = self.overridden(isRuleOverridden, overrides, resourceName, severity) - if waiver is not None: - if severity == "high": - waived = "**waived by " + waiver + "**" - else: - waived = "**waived**" - self.jsonOutput["failures"].append( self.getFailureMsg(severity, waived, failure, moduleName, fileName) ) - - def overridden(self, isRuleOverridden, overrides, resourceName, severity): - if isRuleOverridden: - for override in overrides: - if override[1] == resourceName: - if severity == "high": - if len(override) == 3: - return override[2] - print("***Invalid override: " + ":".join(override)) - print("high severity rules must include RR or RAR") - print("Needs to be in the following format: rule_name:resource_name:RR-xxx or rule_name:resource_name:RAR-xxx where xxx is 1-10 digits") - sys.exit(99) - return "" - return None - - # add given error in given fileName - def add_error(self, error, moduleName, fileName, severity): - if self.passNumber == 2 and self.shouldLogErrors: - self.add_error_force(error, moduleName, fileName, severity) - - def add_error_force(self, error, moduleName, fileName, severity): - self.jsonOutput["errors"].append( self.getFailureMsg(severity, "", error, moduleName, fileName) ) - - def getFailureMsg(self, severity, waived, msg, moduleName, fileName): - message = {} - message["severity"] = severity - message["waived"] = waived - message["message"] = msg - message["moduleName"] = moduleName - message["fileName"] = fileName - return message - - def logMsg(self, typ, msg): - if self.passNumber == 2: - self.logMsgAlways(typ, msg) - - def logMsgAlways(self, typ, msg): - if typ == "error": - logging.error(msg) - elif typ == "warning": - logging.warning(msg) - elif typ == "info": - logging.info(msg) - elif typ == "debug": - logging.debug(msg) diff --git a/terrascan/terrascan.py b/terrascan/terrascan.py deleted file mode 100644 index 78c69802c..000000000 --- a/terrascan/terrascan.py +++ /dev/null @@ -1,1125 +0,0 @@ -# -*- coding: utf-8 -*- -""" - terrascan: A collection of security and best practice tests for static code analysis of terraform templates using terraform_validate. - - Copyright (C) 2020 Accurics, Inc. - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . -""" - -import argparse -import unittest -import os -import re -import sys -import subprocess -import json -import time -from terrascan.embedded import terraform_validate -import logging - -jsonOutput = { - "dateTimeStamp": "", - "terrascan-version": "", - "failures": [], - "errors": [], - "files": [], - "rules": [] -} - - -############################################################################################################################################################################### -# Rules: these are the rules used to verify that the terraform files are set up correctly. -# -# available methods for resources: -# property(property_name): returns a list of the requested property_name; if self.v.error_if_property_missing() is called before the rule, will fail if property is missing. -# with_property(property_name, regex_value): returns a list of the requested property_name with the requested regex_value. -# should_not_exist(): fails if the resource doesn't exist. -# should_have_properties(properties_list): fails if any of the properties in the given properties_list doesn't exist. -# should_not_have_properties(properties_list): fails if any of the properties in the given properties_list exists. -# find_property(property_name_regex): returns a list of the requested property_name_regex. -# available methods for properties: -# property(property_name): returns a list of the requested property_name; if self.v.error_if_property_missing() is called before the rule, will fail if property is missing. -# should_equal(expected_value): fails if property value doesn't equal given expected_value. -# should_equal_case_insensitive(expected_value): fails if property value doesn't equal given expected_value ignoring case. -# should_not_equal(expected_value): fails if property value equals given expected_value. -# should_not_equal_case_insensitive(expected_value): fails if property value equals given expected_value ignoring case. -# list_should_contain_any(values_list): fails if the value of the property doesn't contain any of the values in values_list. -# list_should_contain(values_list): fails if the value of the property doesn't contain all of the values in values_list. -# list_should_not_contain(values_list): fails if the value of the property contains any of the values in values_list. -# should_have_properties(properties_list): fails if a property doesn't contain any of the properties in properties_list. -# should_not_have_properties(properties_list): fails if a property contains any of the properties in properties_list. -# find_property(property_name_regex): returns a list of the requested property_name_regex. -# should_match_regex(property_value_regex): fails if the value of the property doesn't match the given property_value_regex. -# should_contain_valid_json(): fails if the value of the property doesn't contain valid json. -############################################################################################################################################################################### -class Rules(unittest.TestCase): - - rules = [] - - def setUp(self): - self.v = terraform_validate.Validator() - self.v.preprocessor = self.preprocessor - self.v.overrides = self.overrides - - ################################################################################################# - # examples of good and bad (marked with ***error***) are given before each rule - ################################################################################################# - - ################################################################################################# - # This resource block creates an S3 bucket with encryption. - # resource "aws_s3_bucket" "encryptedBucket" { - # bucket = "good-bucket-name" - # server_side_encryption_configuration { - # rule { - # apply_server_side_encryption_by_default { - # kms_master_key_id = "${data.aws_kms_key.bucket.arn}" - # sse_algorithm = "aws:kms" - # } - # } - # } - # } - # - # This resource block creates an S3 bucket with no encryption. ***error*** - # resource "aws_s3_bucket" "noEncryption" { - # bucket = "bad-bucket-name" - # } - def test_aws_s3_bucket_server_side_encryption_configuration(self): - # get name of rule from function name - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # verify that server side encryption is turned on for s3 buckets - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - # to change severity, override it here (default is high) - validator.severity = "high" - validator.resources('aws_s3_bucket').should_have_properties(['server_side_encryption_configuration']) - - ################################################################################################# - # This resource block creates a dynamodb table with no encryption. ***error*** - # resource "aws_dynamodb_table" "noEncryption" { - # name = "${local.env}" - # } - # - # This resource block creates a dynamodb table with no encryption. ***error*** - # resource "aws_dynamodb_table" "encryptionEnabledFalse" { - # name = "${local.env}" - # server_side_encryption { - # enabled = false - # } - # } - # - # This resource block creates a dynamodb table with encryption. - # resource "aws_dynamodb_table" "encryptionEnabledTrue" { - # name = "${local.env}" - # server_side_encryption { - # enabled = true - # } - # } - def test_aws_dynamodb_table_encryption(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # verify that encryption is turned on for dynamodb tables - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_dynamodb_table').property('server_side_encryption').property('enabled').should_equal(True) - - ################################################################################################# - # This resource block creates an ebs volue with no encryption. ***error*** - # resource "aws_ebs_volume" "noEncryption" { - # availability_zone = "us-east-1a" - # size = 10 - # type = "gp2" - # tags { - # Name = "Encryption Test" - # } - # kms_key_id = "${data.aws_kms_key.volume.arn}" - # } - # - # This resource block creates an ebs volue with no encryption. ***error*** - # resource "aws_ebs_volume" "encryptionEnabledFalse" { - # availability_zone = "us-east-1a" - # size = 10 - # type = "gp2" - # tags { - # Name = "Encryption Test" - # } - # encrypted = false - # kms_key_id = "${data.aws_kms_key.volume.arn}" - # } - # - # resource "aws_ebs_volume" "encryptionEnabledTrue" { - # availability_zone = "us-east-1a" - # size = 10 - # type = "gp2" - # tags { - # Name = "Encryption Test" - # } - # # The attributes below mark the volume for encryption. - # encrypted = true - # kms_key_id = "${data.aws_kms_key.volume.arn}" - # } - def test_aws_ebs_volume_encryption(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # verify that all resources of type 'aws_ebs_volume' are encrypted - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_ebs_volume').property('encrypted').should_equal(True) - - ################################################################################################# - # This resource block creates a kms key without key rotation. ***error*** - # resource "aws_kms_key" "no_enable_key_rotation" { - # description = "KMS key 1" - # deletion_window_in_days = 10 - # } - # - # This resource block creates a kms key without key rotation. ***error*** - # resource "aws_kms_key" "enable_key_rotation_false" { - # description = "KMS key 1" - # deletion_window_in_days = 10 - # enable_key_rotation = false - # } - # - # This resource block creates a kms key with key rotation. - # resource "aws_kms_key" "enable_key_rotation_true" { - # description = "KMS key 1" - # deletion_window_in_days = 10 - # enable_key_rotation = true - # } - def test_aws_kms_key_rotation(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # verify that all aws_kms_key resources have key rotation enabled - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_kms_key').property('enable_key_rotation').should_equal(True) - - ################################################################################################# - # This resource block creates an iam user login profile. ***error*** - # resource "aws_iam_user_login_profile" "badExample" { - # user = "some_user_name" - # pgp_key = "keybase:some_person_that_exists" - # } - def test_aws_iam_user_login_profile(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # resource aws_iam_user_login_profile should not exist - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_iam_user_login_profile').should_not_exist() - - ################################################################################################# - # This resource block creates a security group rule. ***error*** - # resource "aws_security_group_rule" "badExample" { - # type = "ingress" - # from_port = 0 - # to_port = 65535 - # protocol = "tcp" - # cidr_blocks = ["0.0.0.0/0"] - # self = true - # security_group_id = "${aws_security_group.emr-master.id}" - # } - def test_aws_security_group_rule_ingress_open(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # verify that security group rule ingress is not open to 0.0.0.0/0 - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_security_group_rule').with_property('type', 'ingress').property('cidr_blocks').list_should_not_contain('0.0.0.0/0') - - ################################################################################################# - # This resource block creates a security group. ***error*** - # resource "aws_security_group" "badExample" { - # name = "generic-emr-master" - # description = "Manage traffic for EMR masters" - # vpc_id = "${local.emr_vpc_id}" - # ingress { - # from_port = 443 - # to_port = 443 - # protocol = "tcp" - # cidr_blocks = ["0.0.0.0/0"] - # } - # } - def test_aws_security_group_ingress_open(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # verify that security group ingress is not open to 0.0.0.0/0 - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_security_group').property('ingress').property('cidr_blocks').list_should_not_contain('0.0.0.0/0') - - ################################################################################################# - # This resource block creates an aws_db_instance. - # resource "aws_db_instance" "default" { - # allocated_storage = 20 - # engine = "mysql" - # instance_class = "db.t2.micro" - # name = "mydb" - # storage_encrypted = true - # } - # - # This resource block creates an aws_db_instance. ***error*** - # resource "aws_db_instance" "default" { - # allocated_storage = 20 - # engine = "mysql" - # instance_class = "db.t2.micro" - # name = "mydb" - # storage_encrypted = false - # } - def test_aws_db_instance_encrypted(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that DB is encrypted - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_db_instance').property('storage_encrypted').should_equal(True) - - ################################################################################################# - # This resource block creates an aws_rds_cluster. - # resource "aws_rds_cluster" "default" { - # database_name = "mydb" - # master_username = "foo" - # master_password = "bar" - # storage_encrypted = true - # } - # - # This resource block creates an aws_rds_cluster. ***error*** - # resource "aws_rds_cluster" "default" { - # database_name = "mydb" - # master_username = "foo" - # master_password = "bar" - # } - def test_aws_rds_cluster_encryption(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert resource is encrypted - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_rds_cluster').property('storage_encrypted').should_equal(True) - - ################################################################################################# - # public exposure - these were part of the original terrascan - ################################################################################################# - - def test_aws_alb_public(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources(['aws_lb', 'aws_alb']).property('internal').should_not_equal(False) - - def test_aws_db_instance_public(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_db_instance').property('publicly_accessible').should_not_equal(True) - - def test_aws_dms_replication_instance_public(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_dms_replication_instance').property('publicly_accessible').should_not_equal(True) - - def test_aws_elb_public(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_elb').property('internal').should_not_equal(False) - - def test_aws_instance_public(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_instance').property('associate_public_ip_address').should_not_equal(True) - - def test_aws_launch_configuration_public(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_launch_configuration').property('associate_public_ip_address').should_not_equal(True) - - def test_aws_rds_cluster_instance_public(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_rds_cluster_instance').property('publicly_accessible').should_not_equal(True) - - def test_aws_redshift_cluster_public(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_redshift_cluster').property('publicly_accessible').should_not_equal(True) - - def test_aws_s3_bucket_public(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_s3_bucket').property('acl').should_not_equal('public-read') - validator.resources('aws_s3_bucket').property('acl').should_not_equal('public-read-write') - validator.resources('aws_s3_bucket').property('acl').should_not_equal('authenticated-read') - validator.resources('aws_s3_bucket').should_not_have_properties(['website']) - - ################################################################################################# - # other terrascan original rules - prefix test with X to disable - ################################################################################################# - # encryption - ################################################################################################# - - def test_aws_alb_listener_port(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that listener port is 443 - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources(['aws_lb_listener', 'aws_alb_listener']).property('port').should_equal('443') - - def test_aws_alb_listener_protocol(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that protocol is not http - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources(['aws_lb_listener', 'aws_alb_listener']).property('protocol').should_not_equal_case_insensitive('http') - - def test_aws_alb_listener_ssl_policy(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that old ssl policies are not used - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources(['aws_lb_listener', 'aws_alb_listener']).property('ssl_policy').should_not_equal('ELBSecurityPolicy-2015-05') - validator.resources(['aws_lb_listener', 'aws_alb_listener']).property('ssl_policy').should_not_equal('ELBSecurityPolicy-TLS-1-0-2015-04') - - def test_aws_alb_listener_certificate(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that certificate_arn is set - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources(['aws_lb_listener', 'aws_alb_listener']).should_have_properties(['certificate_arn']) - - def test_aws_ami_ebs_block_device_encryption(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert ami 'ebs_block_device' blocks are encrypted - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_ami').property('ebs_block_device').property('encrypted').should_equal(True) - - def test_aws_ami_ebs_block_device_kms(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert ami 'ebs_block_device' blocks has KMS - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_ami').property('ebs_block_device').should_have_properties(['kms_key_id']) - - def test_aws_ami_copy_encryption(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert resources are encrypted - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_ami_copy').property('encrypted').should_equal(True) - - def test_aws_ami_copy_kms(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that a KMS key has been provided - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_ami_copy').should_have_properties(['kms_key_id']) - - def test_aws_api_gateway_domain_name_certificate(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that certificate settings have been configured - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_api_gateway_domain_name').should_have_properties( - [ - 'certificate_name', - 'certificate_body', - 'certificate_chain', - 'certificate_private_key' - ]) - - def test_aws_instance_ebs_block_device_encrypted(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert ec2 instance 'ebs_block_device' is encrypted - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_instance').property('ebs_block_device').property('encrypted').should_equal(True) - - def test_aws_cloudfront_distribution_origin_protocol_policy(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that origin receives https only traffic - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_cloudfront_distribution').property('origin').property('custom_origin_config').property('origin_protocol_policy').should_equal("https-only") - - def test_aws_cloudfront_distribution_def_cache_viewer_prot_policy(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that cache protocol doesn't allow all - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_cloudfront_distribution').property('default_cache_behavior').property('viewer_protocol_policy').should_not_equal("allow-all") - - def test_aws_cloudfront_distribution_cache_beh_viewer_proto_policy(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that cache protocol doesn't allow all - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_cloudfront_distribution').property('cache_behavior').property('viewer_protocol_policy').should_not_equal("allow-all") - - def test_aws_cloudtrail_kms(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that a KMS key has been provided - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_cloudtrail').should_have_properties(['kms_key_id']) - - def test_aws_codebuild_project_kms(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that a KMS key has been provided - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_codebuild_project').should_have_properties(['encryption_key']) - - def test_aws_codepipeline_kms(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that a KMS key has been provided - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_codepipeline').should_have_properties(['encryption_key']) - - def test_aws_db_instance_kms(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that a KMS key has been provided - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_db_instance').should_have_properties(['kms_key_id']) - - def test_aws_dms_endpoint_ssl_mode(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that SSL is verified - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_dms_endpoint').property('ssl_mode').should_equal('verify-full') - - def test_aws_dms_endpoint_kms(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that a KMS key has been provided - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_dms_endpoint').should_have_properties( - [ - 'kms_key_arn' - ]) - - def test_aws_dms_endpoint_certificate(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that SSL cert has been provided - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_dms_endpoint').should_have_properties( - [ - 'certificate_arn' - ]) - - def test_aws_dms_replication_instance_kms(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that a KMS key has been provided - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_dms_replication_instance').should_have_properties(['kms_key_arn']) - - def test_aws_ebs_volume_kms(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that a KMS key has been provided - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_ebs_volume').should_have_properties(['kms_key_id']) - - def test_aws_efs_file_system_encryption(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that all resources of type 'aws_efs_file_system' are encrypted - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_efs_file_system').property('encrypted').should_equal(True) - - def test_aws_efs_file_system_kms(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that a KMS key has been provided - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_efs_file_system').should_have_properties(['kms_key_id']) - - def test_aws_elastictranscoder_pipeline_kms(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that a KMS key has been provided - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_elastictranscoder_pipeline').should_have_properties(['aws_kms_key_arn']) - - def test_aws_elb_listener_port_80(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert ELB listener port is not 80 (http) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_elb').property('listener').property('lb_port').should_not_equal(80) - - def test_aws_elb_listener_port_21(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert ELB listener port is not 21 ftp - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_elb').property('listener').property('lb_port').should_not_equal(21) - - def test_aws_elb_listener_port_23(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert ELB listener port is not 23 telnet - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_elb').property('listener').property('lb_port').should_not_equal(23) - - def test_aws_elb_listener_port_5900(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert ELB listener port is not 5900 VNC - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_elb').property('listener').property('lb_port').should_not_equal(5900) - - def test_aws_kinesis_firehose_delivery_stream_s3_kms(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert ELB listener port is not 80 (http) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_kinesis_firehose_delivery_stream').property('s3_configuration').should_have_properties(['kms_key_arn']) - - def test_aws_kinesis_firehose_delivery_stream_extended_s3_kms(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert ELB listener port is not 80 (http) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_kinesis_firehose_delivery_stream').property('extended_s3_configuration').should_have_properties(['kms_key_arn']) - - def test_aws_lambda_function_kms(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert that a KMS key has been provided - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_lambda_function').should_have_properties(['kms_key_arn']) - - def test_aws_opsworks_application_encryption(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert resource is encrypted - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_opsworks_application').property('enable_ssl').should_equal(True) - - def test_aws_rds_cluster_kms(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert resource has a KMS with CMKs - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_rds_cluster').should_have_properties(['kms_key_id']) - - def test_aws_redshift_cluster_encryption(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert resource is encrypted - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_redshift_cluster').property('encrypted').should_equal(True) - - def test_aws_redshift_cluster_kms(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert resource has a KMS with CMKs - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_redshift_cluster').should_have_properties(['kms_key_id']) - - def test_aws_s3_bucket_object_kms(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert resource has a KMS with CMKs - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_s3_bucket_object').should_have_properties(['kms_key_id']) - - def test_aws_sqs_queue_kms(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert resource has a KMS with CMK - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_sqs_queue').should_have_properties(['kms_master_key_id', 'kms_data_key_reuse_period_seconds']) - - def test_aws_ssm_parameter_encryption(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert resource is encrypted with KMS - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_ssm_parameter').property('type').should_equal("SecureString") - - def test_aws_ssm_parameter_kms(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # Assert resource has a KMS with CMK - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_ssm_parameter').should_have_properties(['key_id']) - - ################################################################################################# - # logging and monitoring - ################################################################################################# - - def test_aws_alb_logging(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources(['aws_lb', 'aws_alb']).should_have_properties(['access_logs']) - - def test_aws_cloudfront_distribution_logging(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_cloudfront_distribution').should_have_properties(['logging_config']) - - def test_aws_cloudtrail_logging(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_cloudtrail').property('enable_logging').should_not_equal(False) - - def test_aws_elb_logging(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_elb').should_have_properties(['access_logs']) - - def test_aws_emr_cluster_logging(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_emr_cluster').should_have_properties(['log_uri']) - - def test_aws_kinesis_firehose_delivery_stream__s3_config_logging(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_kinesis_firehose_delivery_stream').property('s3_configuration').should_have_properties(['cloudwatch_logging_options']) - validator.resources('aws_kinesis_firehose_delivery_stream').property('s3_configuration').property('cloudwatch_logging_options').property('enabled').should_equal(True) - - def test_aws_kinesis_firehose_delivery_stream_redshift_conf_logging(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_kinesis_firehose_delivery_stream').property('redshift_configuration').should_have_properties(['cloudwatch_logging_options']) - validator.resources('aws_kinesis_firehose_delivery_stream').property('redshift_configuration').property('cloudwatch_logging_options').property('enabled').should_equal(True) - - def test_aws_kinesis_firehose_delivery_stream__es_config_logging(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_kinesis_firehose_delivery_stream').property('elasticsearch_configuration').should_have_properties(['cloudwatch_logging_options']) - validator.resources('aws_kinesis_firehose_delivery_stream').property('elasticsearch_configuration').property('cloudwatch_logging_options').property('enabled').should_equal(True) - - def test_aws_redshift_cluster_logging(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - self.v.error_if_property_missing() - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_redshift_cluster').property('enable_logging').should_not_equal(False) - - def test_aws_s3_bucket_logging(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_s3_bucket').should_have_properties(['logging']) - - def test_aws_ssm_maintenance_window_task_logging(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_ssm_maintenance_window_task').should_have_properties(['logging_info']) - - ################################################################################################# - # security group - ################################################################################################# - - def test_aws_db_security_group_used(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # This security group type exists outside of VPC (e.g. ec2 classic) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_db_security_group').should_not_exist() - - def test_aws_redshift_security_group_used(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # This security group type exists outside of VPC (e.g. ec2 classic) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_redshift_security_group').should_not_exist() - - def test_aws_elasticache_security_group_used(self): - ruleName = sys._getframe().f_code.co_name[5:] - self.rules.append(ruleName) - # This security group type exists outside of VPC (e.g. ec2 classic) - validator_generator = self.v.get_terraform_files(self.isRuleOverridden(ruleName)) - for validator in validator_generator: - validator.resources('aws_elasticache_security_group').should_not_exist() - - - def isRuleOverridden(self, ruleName): - for override in self.overrides: - if ruleName == override[0]: - return True - return False - - -################################################################################################# -# run the tests -################################################################################################# -def terrascan(args): - start = time.time() - - try: - result = subprocess.run(['pip', 'show', 'terrascan-sf'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) - stdout = result.stdout.decode("utf-8") - versionStr = "Version: " - startIndex = stdout.find(versionStr) - except: - startIndex = -1 - if startIndex == -1: - version = "?" - else: - startIndex += len(versionStr) - endIndex = stdout.find("\r", startIndex) - version = stdout[startIndex:endIndex] - - # process the arguments - if args.warranty or args.gpl: - print("terrascan Copyright (C) 2020 Accurics, Inc.\n") - if args.warranty: - print("This program is distributed in the hope that it will be useful,") - print("but WITHOUT ANY WARRANTY; without even the implied warranty of") - print("MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the") - print("GNU General Public License for more details.") - else: - print("This program is free software: you can redistribute it and/or modify") - print("it under the terms of the GNU General Public License as published by") - print("the Free Software Foundation, either version 3 of the License, or") - print("(at your option) any later version. see ") - - sys.exit(0) - - terraformLocation = args.location[0] - if not os.path.isabs(terraformLocation): - terraformLocation = os.path.join(os.sep, os.path.abspath("."), terraformLocation) - if args.vars: - variablesJsonFilename = [] - for fileName in args.vars: - if not os.path.isabs(fileName): - fileName = os.path.join(os.sep, os.path.abspath("."), fileName) - variablesJsonFilename.append(fileName) - else: - variablesJsonFilename = None - if args.overrides: - Rules.overrides = [] - overridesFileName = args.overrides[0] - if not os.path.isabs(overridesFileName): - overridesFileName = os.path.join(os.sep, os.path.abspath("."), overridesFileName) - try: - with open(overridesFileName, "r", encoding="utf-8") as fp: - overridesFileString = fp.read() - overrides = json.loads(overridesFileString) - overrides = overrides["overrides"] - # validate overrides - for override in overrides: - if len(override) < 2 or len(override) > 3 or len(override) == 3 and not re.match(r"RR-\d{1,10}$|RAR-\d{1,10}$", override[2]): - print("***Invalid entry in overrides file: " + override) - print("Needs to be in the following format: rule_name:resource_name or rule_name:resource_name:RR-xxx or rule_name:resource_name:RAR-xxx where xxx is 1-10 digits") - sys.exit(99) - Rules.overrides.append(override) - except Exception as e: - print("***Error loading overrides file " + overridesFileName) - print(e) - sys.exit(99) - else: - Rules.overrides = "" - if args.results: - outputJsonFileName = args.results[0] - if not os.path.isabs(outputJsonFileName): - outputJsonFileName = os.path.join(os.sep, os.path.abspath("."), outputJsonFileName) - else: - outputJsonFileName = None - if args.config: - config = args.config[0] - else: - config = None - - # set logging based on logging.config if present (default is error) - if config == "none": - logging.basicConfig(level=logging.CRITICAL) - elif config == "warning": - logging.basicConfig(level=logging.WARNING) - elif config == "info": - logging.basicConfig(level=logging.INFO) - elif config == "debug": - logging.basicConfig(level=logging.DEBUG) - else: - config = "error" - logging.basicConfig(level=logging.ERROR) - - print("terrascan version {0}".format(version)) - print("Logging level set to {0}.".format(config)) - - Rules.preprocessor = terraform_validate.PreProcessor(jsonOutput) - Rules.preprocessor.process(terraformLocation, variablesJsonFilename) - - runner = unittest.TextTestRunner() - itersuite = unittest.TestLoader().loadTestsFromTestCase(Rules) - runner.run(itersuite) - - end = time.time() - elapsedTime = end - start - processedMessage = "Processed on " + time.strftime("%m/%d/%Y") + " at " + time.strftime("%H:%M") - - if outputJsonFileName: - jsonOutput["dateTimeStamp"] = processedMessage - jsonOutput["terrascan-version"] = version - for fileName in Rules.preprocessor.fileNames: - jsonOutput["files"].append(fileName) - for rule in Rules.rules: - jsonOutput["rules"].append(rule) - with open(outputJsonFileName, 'w') as jsonOutFile: - json.dump(jsonOutput, jsonOutFile) - - print("\nProcessed " + str(len(Rules.preprocessor.fileNames)) + " files in " + terraformLocation + "\n") - for fileName in Rules.preprocessor.fileNames: - logging.debug(" Processed " + fileName) - print("") - - print(processedMessage) - print("Results (took %.2f seconds):" % elapsedTime) - rc = 0 - print("\nFailures: (" + str(len(jsonOutput["failures"])) + ")") - for failure in jsonOutput["failures"]: - m, f = getMF(failure) - waived = "" - if len(failure["waived"]) > 0: - waived = failure["waived"] + " " - print("[" + failure["severity"] + "] " + waived + failure["message"] + m + f) - if failure["waived"] == "": - rc = 4 - print("\nErrors: (" + str(len(jsonOutput["errors"])) + ")") - for error in jsonOutput["errors"]: - m, f = getMF(error) - print("[" + error["severity"] + "] " + error["message"] + m + f) - rc = 4 - if args.displayRules: - print("\nRules used:") - for rule in Rules.rules: - print(rule) - - sys.exit(rc) - - -# Returns command line parser for terrascan -def create_parser(): - parser = argparse.ArgumentParser(description="A collection of security and best practice tests for static code analysis of terraform templates using terraform_validate.") - - # only required if optional parameters not present - req = '-w' not in sys.argv and '--warranty' not in sys.argv and '-g' not in sys.argv and '--gpl' not in sys.argv - - parser.add_argument( - '-l', - '--location', - help='location of terraform templates to scan', - nargs=1, - required=req - ) - parser.add_argument( - '-v', - '--vars', - help='variables json or .tf file name', - nargs='*', - ) - parser.add_argument( - '-o', - '--overrides', - help='override rules file name', - nargs=1 - ) - parser.add_argument( - '-r', - '--results', - help='output results file name', - nargs=1, ) - parser.add_argument( - '-d', - '--displayRules', - help='display the rules used', - nargs='?', - const=True, default=False - ) - parser.add_argument( - '-w', - '--warranty', - help='displays the warranty', - nargs='?', - const=True, default=False - ) - parser.add_argument( - '-g', - '--gpl', - help='displays license information', - nargs='?', - const=True, default=False - ) - parser.add_argument( - '-c', - '--config', - help='logging configuration: error, warning, info, debug, or none; default is error', - nargs=1, ) - parser.set_defaults(func=terrascan) - - return parser - - -def getMF(json): - if json["moduleName"] == "---": - m = "" - else: - m = " in module " + json["moduleName"] - if json["fileName"] == "---": - f = "" - else: - if m == "": - f = " in file " + json["fileName"] - else: - f = ", file " + json["fileName"] - return m, f - -def main(args=None): - """ - Terrascan console script. Parses user input to determine location of - terraform templates and which tests to execute - """ - parser = create_parser() - args = parser.parse_args(args) - - #tests = args.tests[0] - #location = path.abspath(args.location[0]) - - #if not path.exists(location): - #print("ERROR: The specified location doesn't exists") - #exit(1) - - #exit(run_test(location, tests)) - terrascan(args) diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index 81a1bc416..000000000 --- a/tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- - -"""Unit test package for terrascan.""" diff --git a/tests/infrastructure/fail/gcp_main.tf b/tests/infrastructure/fail/gcp_main.tf deleted file mode 100644 index b19b890c5..000000000 --- a/tests/infrastructure/fail/gcp_main.tf +++ /dev/null @@ -1,11 +0,0 @@ -resource "google_compute_disk" "default" { - name = "test-disk" -} -resource "google_compute_ssl_policy" "default-ssl" { - name = "test-ssl-policy" - min_tls_version = "1.1" -} -resource "google_compute_firewall" "default_firewall" { - name = "test-firewall" - destination_ranges = ["0.0.0.0/0"] -} diff --git a/tests/infrastructure/fail/main.tf b/tests/infrastructure/fail/main.tf deleted file mode 100644 index 1628fa40e..000000000 --- a/tests/infrastructure/fail/main.tf +++ /dev/null @@ -1,239 +0,0 @@ -/** - * Template to validate encryption test - */ - -variable "encryption" { - description = "Set to false to fail boolen based tests" - default = "false" -} - -resource "aws_alb_listener" "front_end" { - port = "80" - protocol = "http" - ssl_policy = "ELBSecurityPolicy-2015-05" - - # certificate_arn = "arn:aws:iam::187416307283:server-certificate/test_cert_rab3wuqwgja25ct3n4jdj2tzu4" -} - -resource "aws_ami" "example" { - ebs_block_device { - encrypted = "${var.encryption}" - - # Comment the line below to fail KMS test - # kms_key_id = "1234" - } -} - -resource "aws_ami_copy" "example" { - # Comment the line below to fail KMS test - # kms_key_id = "1234" - encrypted = "${var.encryption}" -} - -resource "aws_api_gateway_domain_name" "example" { - # Comment the lines below to fail certificate test # certificate_name = "example-api" # certificate_body = "${file("${path.module}/example.com/example.crt")}" # certificate_chain = "${file("${path.module}/example.com/ca.crt")}" # certificate_private_key = "${file("${path.module}/example.com/example.key")}" -} - -resource "aws_instance" "foo" { - associate_public_ip_address = true - - ebs_block_device { - encrypted = "${var.encryption}" - } -} - -resource "aws_cloudfront_distribution" "distribution" { - origin { - custom_origin_config { - origin_protocol_policy = "http-only" - } - } - - default_cache_behavior { - viewer_protocol_policy = "allow-all" - } - - cache_behavior { - viewer_protocol_policy = "allow-all" - } -} - -resource "aws_cloudtrail" "foo" { - # Comment the line below to fail KMS test # kms_key_id = "1234" - enable_logging = false -} - -resource "aws_codebuild_project" "foo" { - # Comment the line below to fail KMS test # encryption_key = "1234" -} - -resource "aws_codepipeline" "foo" { - # Comment the line below to fail KMS test # encryption_key = "1234" -} - -resource "aws_db_instance" "default" { - # Comment the line below to fail KMS test - # kms_key_id = "1234" - storage_encrypted = "${var.encryption}" -} - -resource "aws_dms_endpoint" "test" { - # certificate_arn = "arn:aws:acm:us-east-1:123456789012:certificate/12345678-1234-1234-1234-123456789012" - # kms_key_arn = "arn:aws:kms:us-east-1:123456789012:key/12345678-1234-1234-1234-123456789012" - ssl_mode = "none" -} - -resource "aws_dms_replication_instance" "test" { - # kms_key_arn = "arn:aws:kms:us-east-1:123456789012:key/12345 78-1234-1234-1234-123456789012" - publicly_accessible = true -} - -resource "aws_ebs_volume" "foo" { - # Comment the line below to fail KMS test - # kms_key_id = "1234" - encrypted = "${var.encryption}" -} - -resource "aws_efs_file_system" "foo" { - # Comment the line below to fail KMS test - # kms_key_id = "1234" - encrypted = "${var.encryption}" -} - -resource "aws_elastictranscoder_pipeline" "bar" { - # aws_kms_key_arn = "${var.kms_key_arn}" -} - -resource "aws_elb" "foo" { - internal = false - - listener { - lb_port = 80 - lb_protocol = "http" - } -} - -resource "aws_elb" "bar" { - listener { - lb_port = 21 - lb_protocol = "tcp" - } -} - -resource "aws_elb" "baz" { - listener { - lb_port = 23 - lb_protocol = "tcp" - } -} - -resource "aws_elb" "foobar" { - listener { - lb_port = 5900 - lb_protocol = "tcp" - } -} - -resource "aws_kinesis_firehose_delivery_stream" "foo" { - s3_configuration { - # kms_key_arn = "${var.kms_key_arn}" - } - - extended_s3_configuration { - # kms_key_arn = "${var.kms_key_arn}" - } - - redshift_configuration { - cloudwatch_logging_options { - enabled = false - } - } - - elasticsearch_configuration {} -} - -resource "aws_lambda_function" "foo" { - # kms_key_arn = "${var.kms_key_arn}" -} - -resource "aws_opsworks_application" "foo-app" { - enable_ssl = false -} - -resource "aws_rds_cluster" "default" { - storage_encrypted = false - - # kms_key_id = "${var.kms_key_arn}" -} - -resource "aws_redshift_cluster" "default" { - publicly_accessible = true - encrypted = "${var.encryption}" - - # kms_key_id = "${var.kms_key_arn}" -} - -resource "aws_sqs_queue" "terraform_queue" { - # kms_master_key_id = "alias/aws/sqs" # kms_data_key_reuse_period_seconds = 300 -} - -resource "aws_ssm_parameter" "secret" { - type = "String" - - # key_id = "${var.kms_key_arn}" -} - -resource "aws_elasticache_security_group" "bar" {} - -resource "aws_db_security_group" "default" {} - -resource "aws_redshift_security_group" "default" {} - -resource "aws_security_group_rule" "allow_all" { - type = "ingress" - from_port = 0 - to_port = 65535 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0"] - prefix_list_ids = ["pl-12c4e678"] - - security_group_id = "sg-123456" -} - -resource "aws_security_group" "allow_all" { - name = "allow_all" - description = "Allow all inbound traffic" - - ingress { - from_port = 0 - to_port = 0 - protocol = "-1" - cidr_blocks = ["0.0.0.0/0"] - } - - egress { - from_port = 0 - to_port = 0 - protocol = "-1" - cidr_blocks = ["0.0.0.0/0"] - prefix_list_ids = ["pl-12c4e678"] - } -} - -resource "aws_alb" "test" { - internal = false -} - -resource "aws_db_instance" "default" { - publicly_accessible = true -} - -resource "aws_launch_configuration" "as_conf" { - associate_public_ip_address = true -} - -resource "aws_rds_cluster_instance" "cluster_instances" { - publicly_accessible = true -} - -resource "aws_ssm_maintenance_window_task" "task" {} diff --git a/tests/infrastructure/fail/s3_related.tf b/tests/infrastructure/fail/s3_related.tf deleted file mode 100644 index 9710ca9b6..000000000 --- a/tests/infrastructure/fail/s3_related.tf +++ /dev/null @@ -1,37 +0,0 @@ -resource "aws_s3_bucket_object" "examplebucket_object" { - server_side_encryption = "AES256" - - # kms_key_id = "${var.kms_key_arn}" -} - -resource "aws_s3_bucket" "public_read" { - acl = "public-read" - - website { - index_document = "index.html" - error_document = "error.html" - - routing_rules = < Date: Tue, 21 Jul 2020 16:40:43 +0530 Subject: [PATCH 002/188] add terrascan cli support --- cmd/terrascan/main.go | 26 ++++++++++++++++++++++++++ pkg/cli/run.go | 13 +++++++++++++ 2 files changed, 39 insertions(+) create mode 100644 cmd/terrascan/main.go create mode 100644 pkg/cli/run.go diff --git a/cmd/terrascan/main.go b/cmd/terrascan/main.go new file mode 100644 index 000000000..7d99ef926 --- /dev/null +++ b/cmd/terrascan/main.go @@ -0,0 +1,26 @@ +package main + +import ( + "flag" + + "github.com/accurics/terrascan/pkg/cli" + httpServer "github.com/accurics/terrascan/pkg/http-server" +) + +func main() { + var ( + server = flag.Bool("server", false, "run terrascan in server mode") + iacType = flag.String("iac", "", "IaC provider (supported values: terraform)") + iacVersion = flag.String("iac-version", "default", "IaC version (supported values: 'v12' for terraform)") + cloudType = flag.String("cloud", "", "cloud provider (supported values: aws)") + iacFilePath = flag.String("f", "", "IaC file path") + ) + flag.Parse() + + // if server mode set, run terrascan as a server, else run it as CLI + if *server { + httpServer.Start() + } else { + cli.Run(*iacType, *iacVersion, *cloudType, *iacFilePath) + } +} diff --git a/pkg/cli/run.go b/pkg/cli/run.go new file mode 100644 index 000000000..83001e9ee --- /dev/null +++ b/pkg/cli/run.go @@ -0,0 +1,13 @@ +package cli + +import ( + "github.com/accurics/terrascan/pkg/runtime" +) + +// Run executes terrascan in CLI mode +func Run(iacType, iacVersion, cloudType, iacFilePath string) { + + // create a new runtime executor for processing IaC + executor := runtime.NewExecutor(iacType, iacVersion, cloudType, iacFilePath) + executor.Process() +} From 3f64c3f87e892bf8048ed404299736181fb8ede8 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Tue, 21 Jul 2020 16:41:14 +0530 Subject: [PATCH 003/188] add support for terrascan server mode --- pkg/http-server/start.go | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) create mode 100644 pkg/http-server/start.go diff --git a/pkg/http-server/start.go b/pkg/http-server/start.go new file mode 100644 index 000000000..deae0b481 --- /dev/null +++ b/pkg/http-server/start.go @@ -0,0 +1,16 @@ +package httpServer + +import ( + "log" + "net/http" +) + +func Start() { + + log.Printf("terrascan server listening at port 9010") + http.HandleFunc("/health", func(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) + }) + + log.Fatal(http.ListenAndServe(":9010", nil)) +} From 13e79e6b6a0bf98adff40a5eb7d7f1420f7b820c Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Tue, 21 Jul 2020 16:42:13 +0530 Subject: [PATCH 004/188] create cloud-providers layout with interfaces --- pkg/cloud-providers/aws/types.go | 12 +++++++++++ pkg/cloud-providers/cloud-provider.go | 30 +++++++++++++++++++++++++++ pkg/cloud-providers/interface.go | 7 +++++++ pkg/cloud-providers/supported.go | 27 ++++++++++++++++++++++++ 4 files changed, 76 insertions(+) create mode 100644 pkg/cloud-providers/aws/types.go create mode 100644 pkg/cloud-providers/cloud-provider.go create mode 100644 pkg/cloud-providers/interface.go create mode 100644 pkg/cloud-providers/supported.go diff --git a/pkg/cloud-providers/aws/types.go b/pkg/cloud-providers/aws/types.go new file mode 100644 index 000000000..3ba8ddc13 --- /dev/null +++ b/pkg/cloud-providers/aws/types.go @@ -0,0 +1,12 @@ +package awsProvider + +import ( + "log" +) + +type AWSProvider struct{} + +// CreateNormalizedJson creates a normalized json for the given input +func (a *AWSProvider) CreateNormalizedJson() { + log.Printf("creating normalized json for AWS resources") +} diff --git a/pkg/cloud-providers/cloud-provider.go b/pkg/cloud-providers/cloud-provider.go new file mode 100644 index 000000000..de0fa979d --- /dev/null +++ b/pkg/cloud-providers/cloud-provider.go @@ -0,0 +1,30 @@ +package cloudProvider + +import ( + "fmt" + "log" + "reflect" +) + +// NewCloudProvider returns a new CloudProvider +func NewCloudProvider(cloudType string) (cloudProvider CloudProvider, err error) { + + // get CloudProvider from supportedCloudProviders + cloudProviderObject, supported := supportedCloudProviders[supportedCloudType(cloudType)] + if !supported { + errMsg := fmt.Sprintf("cloud type '%s' not supported", cloudType) + log.Printf(errMsg) + return cloudProvider, fmt.Errorf("errMsg") + } + + return reflect.New(cloudProviderObject).Interface().(CloudProvider), nil +} + +// IsCloudSupported returns true/false depending on whether the cloud +// provider is supported in terrascan or not +func IsCloudSupported(cloudType string) bool { + if _, supported := supportedCloudProviders[supportedCloudType(cloudType)]; !supported { + return false + } + return true +} diff --git a/pkg/cloud-providers/interface.go b/pkg/cloud-providers/interface.go new file mode 100644 index 000000000..79ec9cae0 --- /dev/null +++ b/pkg/cloud-providers/interface.go @@ -0,0 +1,7 @@ +package cloudProvider + +// CloudProvider defines the interface which every cloud provider needs to implement +// to claim support in terrascan +type CloudProvider interface { + CreateNormalizedJson() +} diff --git a/pkg/cloud-providers/supported.go b/pkg/cloud-providers/supported.go new file mode 100644 index 000000000..d7f6122f4 --- /dev/null +++ b/pkg/cloud-providers/supported.go @@ -0,0 +1,27 @@ +package cloudProvider + +import ( + "reflect" + + awsProvider "github.com/accurics/terrascan/pkg/cloud-providers/aws" +) + +// SupportedCloudType data type for supported IaC provider +type supportedCloudType string + +// supported IaC providers +const ( + aws supportedCloudType = "aws" +) + +// map of supported IaC providers +var supportedCloudProviders map[supportedCloudType]reflect.Type + +// initializes a map of supported IaC providers +func init() { + + supportedCloudProviders = make(map[supportedCloudType]reflect.Type) + + // aws support + supportedCloudProviders[aws] = reflect.TypeOf(awsProvider.AWSProvider{}) +} From df9e63f05b7c623aae8945b36c3e3775d2dd60a3 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Tue, 21 Jul 2020 16:42:37 +0530 Subject: [PATCH 005/188] add terrascan runtime package --- pkg/runtime/executor.go | 95 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 95 insertions(+) create mode 100644 pkg/runtime/executor.go diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go new file mode 100644 index 000000000..ce56b6e51 --- /dev/null +++ b/pkg/runtime/executor.go @@ -0,0 +1,95 @@ +package runtime + +import ( + "fmt" + "log" + + CloudProvider "github.com/accurics/terrascan/pkg/cloud-providers" + IacProvider "github.com/accurics/terrascan/pkg/iac-providers" +) + +// Executor object +type Executor struct { + filePath string + cloudType string + iacType string + iacVersion string +} + +// NewExecutor creates a runtime object +func NewExecutor(iacType, iacVersion, cloudType, filePath string) *Executor { + return &Executor{ + filePath: filePath, + cloudType: cloudType, + iacType: iacType, + iacVersion: iacVersion, + } +} + +// ValidateInputs validates the inputs to the executor object +func (r *Executor) ValidateInputs() error { + + // terrascan can accept either a file or a directory, both inputs cannot + // be processed together + + // if file path, check if file exists + // if directory, check if directory exists + + // check if Iac type is supported + if !IacProvider.IsIacSupported(r.iacType, r.iacVersion) { + errMsg := fmt.Sprintf("iac type '%s', version '%s' not supported", r.iacType, r.iacVersion) + log.Printf(errMsg) + return fmt.Errorf(errMsg) + } + + // check if cloud type is supported + if !CloudProvider.IsCloudSupported(r.cloudType) { + errMsg := fmt.Sprintf("cloud type '%s' not supported", r.cloudType) + log.Printf(errMsg) + return fmt.Errorf(errMsg) + } + + // check if policy type is supported + + // successful + return nil +} + +// Process validates the inputs, processes the IaC, creates json output +func (r *Executor) Process() error { + + // validate inputs + if err := r.ValidateInputs(); err != nil { + return err + } + + // create new IacProvider + iacProvider, err := IacProvider.NewIacProvider(r.iacType, r.iacVersion) + if err != nil { + errMsg := fmt.Sprintf("failed to create a new IacProvider for iacType '%s'. error: '%s'", r.iacType, err) + log.Printf(errMsg) + return fmt.Errorf(errMsg) + } + + // create config from IaC + _, err = iacProvider.LoadIacFile(r.filePath) + if err != nil { + errMsg := fmt.Sprintf("failed to load iac file '%s'. error: '%s'", err) + log.Printf(errMsg) + return fmt.Errorf(errMsg) + } + + // create new CloudProvider + cloudProvider, err := CloudProvider.NewCloudProvider(r.cloudType) + if err != nil { + errMsg := fmt.Sprintf("failed to create a new CloudProvider for cloudType '%s'. error: '%s'", r.cloudType, err) + log.Printf(errMsg) + return fmt.Errorf(errMsg) + } + cloudProvider.CreateNormalizedJson() + + // write output + + // successful + return nil +} From d4a96c92fb06239e870aa91c00af20f29cab64c5 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Tue, 21 Jul 2020 16:44:26 +0530 Subject: [PATCH 006/188] add iac-providers with interfaces --- pkg/iac-providers/iac-provider.go | 33 +++++++++++++++++ pkg/iac-providers/interface.go | 7 ++++ pkg/iac-providers/supported.go | 37 ++++++++++++++++++++ pkg/iac-providers/terraform/v12/load-file.go | 26 ++++++++++++++ pkg/iac-providers/terraform/v12/tfv12.go | 8 +++++ 5 files changed, 111 insertions(+) create mode 100644 pkg/iac-providers/iac-provider.go create mode 100644 pkg/iac-providers/interface.go create mode 100644 pkg/iac-providers/supported.go create mode 100644 pkg/iac-providers/terraform/v12/load-file.go create mode 100644 pkg/iac-providers/terraform/v12/tfv12.go diff --git a/pkg/iac-providers/iac-provider.go b/pkg/iac-providers/iac-provider.go new file mode 100644 index 000000000..d78205650 --- /dev/null +++ b/pkg/iac-providers/iac-provider.go @@ -0,0 +1,33 @@ +package iacProvider + +import ( + "fmt" + "log" + "reflect" +) + +// NewIacProvider returns a new IacProvider +func NewIacProvider(iacType, iacVersion string) (iacProvider IacProvider, err error) { + + // get IacProvider from supportedIacProviders + iacProviderObject, supported := supportedIacProviders[supportedIacType(iacType)][supportedIacVersion(iacVersion)] + if !supported { + errMsg := fmt.Sprintf("IaC type:'%s', version: '%s' not supported", iacType, iacVersion) + log.Printf(errMsg) + return iacProvider, fmt.Errorf("errMsg") + } + + return reflect.New(iacProviderObject).Interface().(IacProvider), nil +} + +// IsIacSupported returns true/false depending on whether the IaC +// provider is supported in terrascan or not +func IsIacSupported(iacType, iacVersion string) bool { + if _, supported := supportedIacProviders[supportedIacType(iacType)]; !supported { + return false + } + if _, supported := supportedIacProviders[supportedIacType(iacType)][supportedIacVersion(iacVersion)]; !supported { + return false + } + return true +} diff --git a/pkg/iac-providers/interface.go b/pkg/iac-providers/interface.go new file mode 100644 index 000000000..20583e477 --- /dev/null +++ b/pkg/iac-providers/interface.go @@ -0,0 +1,7 @@ +package iacProvider + +// IacProvider defines the interface which every IaC provider needs to implement +// to claim support in terrascan +type IacProvider interface { + LoadIacFile(string) (interface{}, error) +} diff --git a/pkg/iac-providers/supported.go b/pkg/iac-providers/supported.go new file mode 100644 index 000000000..359b7e5c1 --- /dev/null +++ b/pkg/iac-providers/supported.go @@ -0,0 +1,37 @@ +package iacProvider + +import ( + "reflect" + + tfv12 "github.com/accurics/terrascan/pkg/iac-providers/terraform/v12" +) + +// SupportedIacType data type for supported IaC provider +type supportedIacType string + +// supported IaC providers +const ( + terraform supportedIacType = "terraform" +) + +// supportedIacVersion data type for supported Iac provider +type supportedIacVersion string + +// supported Iac versions +const ( + defaultVersion supportedIacVersion = "default" + terraformV12 supportedIacVersion = "v12" +) + +// map of supported IaC providers +var supportedIacProviders map[supportedIacType](map[supportedIacVersion]reflect.Type) + +// initializes a map of supported IaC providers +func init() { + supportedIacProviders = make(map[supportedIacType](map[supportedIacVersion]reflect.Type)) + + // terraform support + supportedTerraformVersions := make(map[supportedIacVersion]reflect.Type) + supportedTerraformVersions[terraformV12] = reflect.TypeOf(tfv12.TfV12{}) + supportedIacProviders[terraform] = supportedTerraformVersions +} diff --git a/pkg/iac-providers/terraform/v12/load-file.go b/pkg/iac-providers/terraform/v12/load-file.go new file mode 100644 index 000000000..98a4e5a5b --- /dev/null +++ b/pkg/iac-providers/terraform/v12/load-file.go @@ -0,0 +1,26 @@ +package tfv12 + +import ( + "fmt" + "log" + + "github.com/hashicorp/terraform/configs" + "github.com/spf13/afero" +) + +// ParseFile parses the given terraform file from the given file path +func (*TfV12) LoadIacFile(filePath string) (config interface{}, err error) { + + // new terraform config parser + parser := configs.NewParser(afero.NewOsFs()) + + config, diags := parser.LoadConfigFile(filePath) + if diags != nil { + log.Printf("failed to load config file '%s'. error:\n%v\n", diags) + return config, fmt.Errorf("failed to load config file") + } + log.Printf("config:\n%+v\n", config) + + // successful + return config, nil +} diff --git a/pkg/iac-providers/terraform/v12/tfv12.go b/pkg/iac-providers/terraform/v12/tfv12.go new file mode 100644 index 000000000..ff2958dc5 --- /dev/null +++ b/pkg/iac-providers/terraform/v12/tfv12.go @@ -0,0 +1,8 @@ +package tfv12 + +type TfV12 struct{} + +// NewTfV12 creates a new TfV12 object +func NewTfV12() *TfV12 { + return &TfV12{} +} From 1851c58c5014e612b5c3db110bf977a101056703 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Wed, 22 Jul 2020 15:51:20 +0530 Subject: [PATCH 007/188] add support parsing terraform config files from a module based directory structure --- cmd/terrascan/main.go | 3 +- pkg/cli/run.go | 5 +- pkg/iac-providers/interface.go | 7 +- pkg/iac-providers/output/types.go | 13 ++ pkg/iac-providers/terraform/v12/convert.go | 227 +++++++++++++++++++ pkg/iac-providers/terraform/v12/load-dir.go | 119 ++++++++++ pkg/iac-providers/terraform/v12/load-file.go | 41 +++- pkg/iac-providers/terraform/v12/resource.go | 43 ++++ pkg/iac-providers/terraform/v12/tfv12.go | 8 - pkg/iac-providers/terraform/v12/types.go | 4 + pkg/runtime/executor.go | 55 ++++- pkg/utils/path.go | 31 +++ pkg/utils/printer.go | 18 ++ 13 files changed, 546 insertions(+), 28 deletions(-) create mode 100644 pkg/iac-providers/output/types.go create mode 100644 pkg/iac-providers/terraform/v12/convert.go create mode 100644 pkg/iac-providers/terraform/v12/load-dir.go create mode 100644 pkg/iac-providers/terraform/v12/resource.go delete mode 100644 pkg/iac-providers/terraform/v12/tfv12.go create mode 100644 pkg/iac-providers/terraform/v12/types.go create mode 100644 pkg/utils/path.go create mode 100644 pkg/utils/printer.go diff --git a/cmd/terrascan/main.go b/cmd/terrascan/main.go index 7d99ef926..6f1d2e8e2 100644 --- a/cmd/terrascan/main.go +++ b/cmd/terrascan/main.go @@ -14,6 +14,7 @@ func main() { iacVersion = flag.String("iac-version", "default", "IaC version (supported values: 'v12' for terraform)") cloudType = flag.String("cloud", "", "cloud provider (supported values: aws)") iacFilePath = flag.String("f", "", "IaC file path") + iacDirPath = flag.String("d", "", "IaC directory path") ) flag.Parse() @@ -21,6 +22,6 @@ func main() { if *server { httpServer.Start() } else { - cli.Run(*iacType, *iacVersion, *cloudType, *iacFilePath) + cli.Run(*iacType, *iacVersion, *cloudType, *iacFilePath, *iacDirPath) } } diff --git a/pkg/cli/run.go b/pkg/cli/run.go index 83001e9ee..cf6c98fe2 100644 --- a/pkg/cli/run.go +++ b/pkg/cli/run.go @@ -5,9 +5,10 @@ import ( ) // Run executes terrascan in CLI mode -func Run(iacType, iacVersion, cloudType, iacFilePath string) { +func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath string) { // create a new runtime executor for processing IaC - executor := runtime.NewExecutor(iacType, iacVersion, cloudType, iacFilePath) + executor := runtime.NewExecutor(iacType, iacVersion, cloudType, iacFilePath, + iacDirPath) executor.Process() } diff --git a/pkg/iac-providers/interface.go b/pkg/iac-providers/interface.go index 20583e477..ff3e5ea50 100644 --- a/pkg/iac-providers/interface.go +++ b/pkg/iac-providers/interface.go @@ -1,7 +1,12 @@ package iacProvider +import ( + "github.com/accurics/terrascan/pkg/iac-providers/output" +) + // IacProvider defines the interface which every IaC provider needs to implement // to claim support in terrascan type IacProvider interface { - LoadIacFile(string) (interface{}, error) + LoadIacFile(string) (output.AllResourceConfigs, error) + LoadIacDir(string) (output.AllResourceConfigs, error) } diff --git a/pkg/iac-providers/output/types.go b/pkg/iac-providers/output/types.go new file mode 100644 index 000000000..695122398 --- /dev/null +++ b/pkg/iac-providers/output/types.go @@ -0,0 +1,13 @@ +package output + +// ResourceConfig describes a resource present in IaC +type ResourceConfig struct { + ID string `json:"id"` + Name string `json:"name"` + Source string `json:"source"` + Type string `json:"type"` + Config interface{} `json:"config"` +} + +// AllResourceConfigs is a list/slice of resource configs present in IaC +type AllResourceConfigs []ResourceConfig diff --git a/pkg/iac-providers/terraform/v12/convert.go b/pkg/iac-providers/terraform/v12/convert.go new file mode 100644 index 000000000..8a469dcbc --- /dev/null +++ b/pkg/iac-providers/terraform/v12/convert.go @@ -0,0 +1,227 @@ +package tfv12 + +/* + Following code has been borrowed from: + /~https://github.com/tmccombs/hcl2json/blob/5c1402dc2b410e362afee45a3cf15dcb08bc1f2c/convert.go +*/ + +import ( + "fmt" + "strings" + + hcl "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/hclsyntax" + "github.com/zclconf/go-cty/cty" + ctyconvert "github.com/zclconf/go-cty/cty/convert" + ctyjson "github.com/zclconf/go-cty/cty/json" +) + +type jsonObj map[string]interface{} + +// Convert an hcl File to a json serializable object +// This assumes that the body is a hclsyntax.Body +func convertFile(file *hcl.File) (jsonObj, error) { + c := converter{bytes: file.Bytes} + body := file.Body.(*hclsyntax.Body) + return c.convertBody(body) +} + +type converter struct { + bytes []byte +} + +func (c *converter) rangeSource(r hcl.Range) string { + return string(c.bytes[r.Start.Byte:r.End.Byte]) +} + +func (c *converter) convertBody(body *hclsyntax.Body) (jsonObj, error) { + var err error + out := make(jsonObj) + for key, value := range body.Attributes { + out[key], err = c.convertExpression(value.Expr) + if err != nil { + return nil, err + } + } + + for _, block := range body.Blocks { + err = c.convertBlock(block, out) + if err != nil { + return nil, err + } + } + + return out, nil +} + +func (c *converter) convertBlock(block *hclsyntax.Block, out jsonObj) error { + var key string = block.Type + + value, err := c.convertBody(block.Body) + if err != nil { + return err + } + + for _, label := range block.Labels { + if inner, exists := out[key]; exists { + var ok bool + out, ok = inner.(jsonObj) + if !ok { + // TODO: better diagnostics + return fmt.Errorf("Unable to conver Block to JSON: %v.%v", block.Type, strings.Join(block.Labels, ".")) + } + } else { + obj := make(jsonObj) + out[key] = obj + out = obj + } + key = label + } + + if current, exists := out[key]; exists { + if list, ok := current.([]interface{}); ok { + out[key] = append(list, value) + } else { + out[key] = []interface{}{current, value} + } + } else { + out[key] = value + } + + return nil +} + +func (c *converter) convertExpression(expr hclsyntax.Expression) (interface{}, error) { + // assume it is hcl syntax (because, um, it is) + switch value := expr.(type) { + case *hclsyntax.LiteralValueExpr: + return ctyjson.SimpleJSONValue{Value: value.Val}, nil + case *hclsyntax.TemplateExpr: + return c.convertTemplate(value) + case *hclsyntax.TemplateWrapExpr: + return c.convertExpression(value.Wrapped) + case *hclsyntax.TupleConsExpr: + var list []interface{} + for _, ex := range value.Exprs { + elem, err := c.convertExpression(ex) + if err != nil { + return nil, err + } + list = append(list, elem) + } + return list, nil + case *hclsyntax.ObjectConsExpr: + m := make(jsonObj) + for _, item := range value.Items { + key, err := c.convertKey(item.KeyExpr) + if err != nil { + return nil, err + } + m[key], err = c.convertExpression(item.ValueExpr) + if err != nil { + return nil, err + } + } + return m, nil + default: + return c.wrapExpr(expr), nil + } +} + +func (c *converter) convertTemplate(t *hclsyntax.TemplateExpr) (string, error) { + if t.IsStringLiteral() { + // safe because the value is just the string + v, err := t.Value(nil) + if err != nil { + return "", err + } + return v.AsString(), nil + } + var builder strings.Builder + for _, part := range t.Parts { + s, err := c.convertStringPart(part) + if err != nil { + return "", err + } + builder.WriteString(s) + } + return builder.String(), nil +} + +func (c *converter) convertStringPart(expr hclsyntax.Expression) (string, error) { + switch v := expr.(type) { + case *hclsyntax.LiteralValueExpr: + s, err := ctyconvert.Convert(v.Val, cty.String) + if err != nil { + return "", err + } + return s.AsString(), nil + case *hclsyntax.TemplateExpr: + return c.convertTemplate(v) + case *hclsyntax.TemplateWrapExpr: + return c.convertStringPart(v.Wrapped) + case *hclsyntax.ConditionalExpr: + return c.convertTemplateConditional(v) + case *hclsyntax.TemplateJoinExpr: + return c.convertTemplateFor(v.Tuple.(*hclsyntax.ForExpr)) + default: + // treating as an embedded expression + return c.wrapExpr(expr), nil + } +} + +func (c *converter) convertKey(keyExpr hclsyntax.Expression) (string, error) { + // a key should never have dynamic input + if k, isKeyExpr := keyExpr.(*hclsyntax.ObjectConsKeyExpr); isKeyExpr { + keyExpr = k.Wrapped + if _, isTraversal := keyExpr.(*hclsyntax.ScopeTraversalExpr); isTraversal { + return c.rangeSource(keyExpr.Range()), nil + } + } + return c.convertStringPart(keyExpr) +} + +func (c *converter) convertTemplateConditional(expr *hclsyntax.ConditionalExpr) (string, error) { + var builder strings.Builder + builder.WriteString("%{if ") + builder.WriteString(c.rangeSource(expr.Condition.Range())) + builder.WriteString("}") + trueResult, err := c.convertStringPart(expr.TrueResult) + if err != nil { + return "", nil + } + builder.WriteString(trueResult) + falseResult, err := c.convertStringPart(expr.FalseResult) + if len(falseResult) > 0 { + builder.WriteString("%{else}") + builder.WriteString(falseResult) + } + builder.WriteString("%{endif}") + + return builder.String(), nil +} + +func (c *converter) convertTemplateFor(expr *hclsyntax.ForExpr) (string, error) { + var builder strings.Builder + builder.WriteString("%{for ") + if len(expr.KeyVar) > 0 { + builder.WriteString(expr.KeyVar) + builder.WriteString(", ") + } + builder.WriteString(expr.ValVar) + builder.WriteString(" in ") + builder.WriteString(c.rangeSource(expr.CollExpr.Range())) + builder.WriteString("}") + templ, err := c.convertStringPart(expr.ValExpr) + if err != nil { + return "", err + } + builder.WriteString(templ) + builder.WriteString("%{endfor}") + + return builder.String(), nil +} + +func (c *converter) wrapExpr(expr hclsyntax.Expression) string { + return "${" + c.rangeSource(expr.Range()) + "}" +} diff --git a/pkg/iac-providers/terraform/v12/load-dir.go b/pkg/iac-providers/terraform/v12/load-dir.go new file mode 100644 index 000000000..075f6ce15 --- /dev/null +++ b/pkg/iac-providers/terraform/v12/load-dir.go @@ -0,0 +1,119 @@ +package tfv12 + +import ( + "fmt" + "log" + "path/filepath" + "strings" + + version "github.com/hashicorp/go-version" + "github.com/hashicorp/hcl/v2" + hclConfigs "github.com/hashicorp/terraform/configs" + "github.com/spf13/afero" + + "github.com/accurics/terrascan/pkg/iac-providers/output" + "github.com/accurics/terrascan/pkg/utils" +) + +// LoadIacDir starts traversing from the given rootDir and traverses through +// all the descendant modules present to create an output list of all the +// resources present in rootDir and descendant modules +func (*TfV12) LoadIacDir(rootDir string) (allResourcesConfig output.AllResourceConfigs, err error) { + + // get absolute path + absRootDir, err := utils.GetAbsPath(rootDir) + if err != nil { + return allResourcesConfig, err + } + + // create a new config parser + parser := hclConfigs.NewParser(afero.NewOsFs()) + + // check if the directory has any tf config files (.tf or .tf.json) + if !parser.IsConfigDir(absRootDir) { + errMsg := fmt.Sprintf("directory '%s' has no terraform config files") + log.Printf(errMsg) + return allResourcesConfig, fmt.Errorf(errMsg) + } + + // load root config directory + rootMod, diags := parser.LoadConfigDir(absRootDir) + if diags.HasErrors() { + log.Printf("failed to load terraform config dir '%s'. error:\n%+v\n", rootDir, diags) + return allResourcesConfig, fmt.Errorf("failed to load terraform allResourcesConfig dir") + } + + // using the BuildConfig and ModuleWalkerFunc to traverse through all + // descendant modules from the root module and create a unified + // configuration of type *configs.Config + // Note: currently, only Local paths are supported for Module Sources + versionI := 0 + unified, diags := hclConfigs.BuildConfig(rootMod, hclConfigs.ModuleWalkerFunc( + func(req *hclConfigs.ModuleRequest) (*hclConfigs.Module, *version.Version, hcl.Diagnostics) { + + // Note: currently only local paths are supported for Module Sources + + // determine the absolute path from root module to the sub module + // using *configs.ModuleRequest.Path field + var ( + pathArr = strings.Split(req.Path.String(), ".") + pathToModule = absRootDir + ) + for _, subPath := range pathArr { + pathToModule = filepath.Join(pathToModule, subPath) + } + + // load sub module directory + subMod, diags := parser.LoadConfigDir(pathToModule) + version, _ := version.NewVersion(fmt.Sprintf("1.0.%d", versionI)) + versionI++ + return subMod, version, diags + }, + )) + if diags.HasErrors() { + log.Printf("failed to build unified config. errors:\n%+v\n", diags) + return allResourcesConfig, fmt.Errorf("failed to build terraform allResourcesConfig") + } + + /* + The "unified" config created from BuildConfig in the previous step + represents a tree structure with rootDir module being at its root and + all the sub modules being its children, and these children can have + more children and so on... + + Now, using BFS we traverse through all the submodules using the classic + approach of using a queue data structure + */ + + // queue of for BFS, add root module config to it + configsQ := []*hclConfigs.Config{unified.Root} + + // using BFS traverse through all modules in the unified config tree + for len(configsQ) > 0 { + + // pop first element from the queue + current := configsQ[0] + configsQ = configsQ[1:] + + // traverse through all current's resources + for _, managedResource := range current.Module.ManagedResources { + + // create output.ResourceConfig from hclConfigs.Resource + resourceConfig, err := CreateResourceConfig(managedResource) + if err != nil { + return allResourcesConfig, fmt.Errorf("failed to create ResourceConfig") + } + + // append resource config to list of all resources + allResourcesConfig = append(allResourcesConfig, resourceConfig) + } + + // add all current's children to the queue + for _, childModule := range current.Children { + configsQ = append(configsQ, childModule) + } + } + + // successful + return allResourcesConfig, nil +} diff --git a/pkg/iac-providers/terraform/v12/load-file.go b/pkg/iac-providers/terraform/v12/load-file.go index 98a4e5a5b..f9c9cf70f 100644 --- a/pkg/iac-providers/terraform/v12/load-file.go +++ b/pkg/iac-providers/terraform/v12/load-file.go @@ -4,23 +4,48 @@ import ( "fmt" "log" - "github.com/hashicorp/terraform/configs" + hclConfigs "github.com/hashicorp/terraform/configs" "github.com/spf13/afero" + + "github.com/accurics/terrascan/pkg/iac-providers/output" + "github.com/accurics/terrascan/pkg/utils" ) -// ParseFile parses the given terraform file from the given file path -func (*TfV12) LoadIacFile(filePath string) (config interface{}, err error) { +// LoadIacFile parses the given terraform file from the given file path +func (*TfV12) LoadIacFile(filePath string) (allResourcesConfig output.AllResourceConfigs, err error) { + + // get absolute path + absFilePath, err := utils.GetAbsPath(filePath) + if err != nil { + return allResourcesConfig, err + } // new terraform config parser - parser := configs.NewParser(afero.NewOsFs()) + parser := hclConfigs.NewParser(afero.NewOsFs()) - config, diags := parser.LoadConfigFile(filePath) + hclFile, diags := parser.LoadConfigFile(absFilePath) if diags != nil { log.Printf("failed to load config file '%s'. error:\n%v\n", diags) - return config, fmt.Errorf("failed to load config file") + return allResourcesConfig, fmt.Errorf("failed to load config file") + } + if hclFile == nil && diags.HasErrors() { + log.Printf("error occured while loading config file. error:\n%v\n", diags) + return allResourcesConfig, fmt.Errorf("failed to load config file") + } + + // traverse through all current's resources + for _, managedResource := range hclFile.ManagedResources { + + // create output.ResourceConfig from hclConfigs.Resource + resourceConfig, err := CreateResourceConfig(managedResource) + if err != nil { + return allResourcesConfig, fmt.Errorf("failed to create ResourceConfig") + } + + // append resource config to list of all resources + allResourcesConfig = append(allResourcesConfig, resourceConfig) } - log.Printf("config:\n%+v\n", config) // successful - return config, nil + return allResourcesConfig, nil } diff --git a/pkg/iac-providers/terraform/v12/resource.go b/pkg/iac-providers/terraform/v12/resource.go new file mode 100644 index 000000000..cae7eaa13 --- /dev/null +++ b/pkg/iac-providers/terraform/v12/resource.go @@ -0,0 +1,43 @@ +package tfv12 + +import ( + "fmt" + "io/ioutil" + "log" + + "github.com/hashicorp/hcl/v2/hclsyntax" + hclConfigs "github.com/hashicorp/terraform/configs" + + "github.com/accurics/terrascan/pkg/iac-providers/output" +) + +// CreateResourceConfig creates output.ResourceConfig +func CreateResourceConfig(managedResource *hclConfigs.Resource) (resourceConfig output.ResourceConfig, err error) { + + // read source file + fileBytes, err := ioutil.ReadFile(managedResource.DeclRange.Filename) + if err != nil { + log.Printf("failed to read terrafrom IaC file '%s'. error: '%v'", managedResource.DeclRange.Filename, err) + return resourceConfig, fmt.Errorf("failed to read terraform file") + } + + // convert resource config from hcl.Body to map[string]interface{} + c := converter{bytes: fileBytes} + hclBody := managedResource.Config.(*hclsyntax.Body) + goOut, err := c.convertBody(hclBody) + if err != nil { + log.Printf("failed to convert hcl.Body to go struct; resource '%s', file: '%s'. error: '%v'", + managedResource.Name, managedResource.DeclRange.Filename, err) + return resourceConfig, fmt.Errorf("failed to convert hcl.Body to go struct") + } + + // create a resource config + resourceConfig = output.ResourceConfig{ + Name: managedResource.Name, + Type: managedResource.Type, + Source: managedResource.DeclRange.Filename, + Config: goOut, + } + + return resourceConfig, nil +} diff --git a/pkg/iac-providers/terraform/v12/tfv12.go b/pkg/iac-providers/terraform/v12/tfv12.go deleted file mode 100644 index ff2958dc5..000000000 --- a/pkg/iac-providers/terraform/v12/tfv12.go +++ /dev/null @@ -1,8 +0,0 @@ -package tfv12 - -type TfV12 struct{} - -// NewTfV12 creates a new TfV12 object -func NewTfV12() *TfV12 { - return &TfV12{} -} diff --git a/pkg/iac-providers/terraform/v12/types.go b/pkg/iac-providers/terraform/v12/types.go new file mode 100644 index 000000000..15df3a05c --- /dev/null +++ b/pkg/iac-providers/terraform/v12/types.go @@ -0,0 +1,4 @@ +package tfv12 + +// TfV12 struct implements the IacProvider interface +type TfV12 struct{} diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index ce56b6e51..58d89b600 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -3,23 +3,29 @@ package runtime import ( "fmt" "log" + "os" + + "github.com/accurics/terrascan/pkg/utils" CloudProvider "github.com/accurics/terrascan/pkg/cloud-providers" IacProvider "github.com/accurics/terrascan/pkg/iac-providers" + "github.com/accurics/terrascan/pkg/iac-providers/output" ) // Executor object type Executor struct { filePath string + dirPath string cloudType string iacType string iacVersion string } // NewExecutor creates a runtime object -func NewExecutor(iacType, iacVersion, cloudType, filePath string) *Executor { +func NewExecutor(iacType, iacVersion, cloudType, filePath, dirPath string) *Executor { return &Executor{ filePath: filePath, + dirPath: dirPath, cloudType: cloudType, iacType: iacType, iacVersion: iacVersion, @@ -31,9 +37,38 @@ func (r *Executor) ValidateInputs() error { // terrascan can accept either a file or a directory, both inputs cannot // be processed together + if r.filePath != "" && r.dirPath != "" { + errMsg := fmt.Sprintf("cannot accept both '-f %s' and '-d %s' options together", r.filePath, r.dirPath) + log.Printf(errMsg) + return fmt.Errorf(errMsg) + } - // if file path, check if file exists - // if directory, check if directory exists + if r.dirPath != "" { + // if directory, check if directory exists + absDirPath, err := utils.GetAbsPath(r.dirPath) + if err != nil { + return err + } + + if _, err := os.Stat(absDirPath); err != nil { + errMsg := fmt.Sprintf("directory '%s' does not exist", absDirPath) + log.Printf(errMsg) + return fmt.Errorf(errMsg) + } + } else { + + // if file path, check if file exists + absFilePath, err := utils.GetAbsPath(r.filePath) + if err != nil { + return err + } + + if _, err := os.Stat(absFilePath); err != nil { + errMsg := fmt.Sprintf("file '%s' does not exist", absFilePath) + log.Printf(errMsg) + return fmt.Errorf(errMsg) + } + } // check if Iac type is supported if !IacProvider.IsIacSupported(r.iacType, r.iacVersion) { @@ -71,13 +106,17 @@ func (r *Executor) Process() error { return fmt.Errorf(errMsg) } - // create config from IaC - _, err = iacProvider.LoadIacFile(r.filePath) + var iacOut output.AllResourceConfigs + if r.dirPath != "" { + iacOut, err = iacProvider.LoadIacDir(r.dirPath) + } else { + // create config from IaC + iacOut, err = iacProvider.LoadIacFile(r.filePath) + } if err != nil { - errMsg := fmt.Sprintf("failed to load iac file '%s'. error: '%s'", err) - log.Printf(errMsg) - return fmt.Errorf(errMsg) + return err } + utils.PrintJSON(iacOut) // create new CloudProvider cloudProvider, err := CloudProvider.NewCloudProvider(r.cloudType) diff --git a/pkg/utils/path.go b/pkg/utils/path.go new file mode 100644 index 000000000..c66d97ee6 --- /dev/null +++ b/pkg/utils/path.go @@ -0,0 +1,31 @@ +package utils + +import ( + "fmt" + "log" + "os" + "path/filepath" + "strings" +) + +// GetAbsPath returns absolute path from passed file path resolving even ~ to user home dir and any other such symbols that are only +// shell expanded can also be handled here +func GetAbsPath(path string) (string, error) { + + // Only shell resolves `~` to home so handle it specially + if strings.HasPrefix(path, "~") { + homeDir := os.Getenv("HOME") + if len(path) > 1 { + path = filepath.Join(homeDir, path[1:]) + } + } + + // get absolute file path + path, err := filepath.Abs(path) + if err != nil { + errMsg := fmt.Sprintf("unable to resolve %s to absolute path. error: '%s'", path, err) + log.Println(errMsg) + return path, fmt.Errorf(errMsg) + } + return path, nil +} diff --git a/pkg/utils/printer.go b/pkg/utils/printer.go new file mode 100644 index 000000000..952fdefaf --- /dev/null +++ b/pkg/utils/printer.go @@ -0,0 +1,18 @@ +package utils + +import ( + "encoding/json" + "log" + "os" +) + +// PrintJSON prints data in JSON format +func PrintJSON(data interface{}) { + j, err := json.MarshalIndent(data, "", " ") + if err != nil { + log.Printf("failed to create JSON. error: '%v'", err) + return + } + os.Stdout.Write(j) + os.Stdout.Write([]byte{'\n'}) +} From feb58c80d6f55ab9f058c392959025cb69ceac9b Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Wed, 22 Jul 2020 15:58:32 +0530 Subject: [PATCH 008/188] update go mod files --- go.mod | 11 ++ go.sum | 401 +++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 412 insertions(+) create mode 100644 go.mod create mode 100644 go.sum diff --git a/go.mod b/go.mod new file mode 100644 index 000000000..ccfdf0eae --- /dev/null +++ b/go.mod @@ -0,0 +1,11 @@ +module github.com/accurics/terrascan + +go 1.14 + +require ( + github.com/hashicorp/go-version v1.2.0 + github.com/hashicorp/hcl/v2 v2.3.0 + github.com/hashicorp/terraform v0.12.28 + github.com/spf13/afero v1.3.2 + github.com/zclconf/go-cty v1.2.1 +) diff --git a/go.sum b/go.sum new file mode 100644 index 000000000..dc1c0ce14 --- /dev/null +++ b/go.sum @@ -0,0 +1,401 @@ +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU= +cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU= +cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY= +cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc= +cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o= +cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE= +github.com/Azure/azure-sdk-for-go v35.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/azure-sdk-for-go v36.2.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= +github.com/Azure/go-autorest/autorest v0.9.0/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= +github.com/Azure/go-autorest/autorest v0.9.2/go.mod h1:xyHB1BMZT0cuDHU7I0+g046+BFDTQ8rEZB0s4Yfa6bI= +github.com/Azure/go-autorest/autorest/adal v0.5.0/go.mod h1:8Z9fGy2MpX0PvDjB1pEgQTmVqjGhiHBW7RJJEciWzS0= +github.com/Azure/go-autorest/autorest/adal v0.8.1-0.20191028180845-3492b2aff503/go.mod h1:Z6vX6WXXuyieHAXwMj0S6HY6e6wcHn37qQMBQlvY3lc= +github.com/Azure/go-autorest/autorest/azure/cli v0.2.0/go.mod h1:WWTbGPvkAg3I4ms2j2s+Zr5xCGwGqTQh+6M2ZqOczkE= +github.com/Azure/go-autorest/autorest/date v0.1.0/go.mod h1:plvfp3oPSKwf2DNjlBjWF/7vwR+cUD/ELuzDCXwHUVA= +github.com/Azure/go-autorest/autorest/date v0.2.0/go.mod h1:vcORJHLJEh643/Ioh9+vPmf1Ij9AEBM5FuBIXLmIy0g= +github.com/Azure/go-autorest/autorest/mocks v0.1.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= +github.com/Azure/go-autorest/autorest/mocks v0.2.0/go.mod h1:OTyCOPRA2IgIlWxVYxBee2F5Gr4kF2zd2J5cFRaIDN0= +github.com/Azure/go-autorest/autorest/mocks v0.3.0/go.mod h1:a8FDP3DYzQ4RYfVAxAN3SVSiiO77gL2j2ronKKP0syM= +github.com/Azure/go-autorest/autorest/to v0.3.0/go.mod h1:MgwOyqaIuKdG4TL/2ywSsIWKAfJfgHDo8ObuUk3t5sA= +github.com/Azure/go-autorest/autorest/validation v0.2.0/go.mod h1:3EEqHnBxQGHXRYq3HT1WyXAvT7LLY3tl70hw6tQIbjI= +github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc= +github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk= +github.com/Azure/go-ntlmssp v0.0.0-20180810175552-4a21cbd618b4/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= +github.com/ChrisTrenkamp/goxpath v0.0.0-20170922090931-c385f95c6022/go.mod h1:nuWgzSkT5PnyOd+272uUmV0dnAnAn42Mk7PiQC5VzN4= +github.com/QcloudApi/qcloud_sign_golang v0.0.0-20141224014652-e4130a326409/go.mod h1:1pk82RBxDY/JZnPQrtqHlUFfCctgdorsd9M06fMynOM= +github.com/Unknwon/com v0.0.0-20151008135407-28b053d5a292/go.mod h1:KYCjqMOeHpNuTOiFQU6WEcTG7poCJrUs0YgyHNtn1no= +github.com/abdullin/seq v0.0.0-20160510034733-d5467c17e7af/go.mod h1:5Jv4cbFiHJMsVxt52+i0Ha45fjshj6wxYr1r19tB9bw= +github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= +github.com/agext/levenshtein v1.2.2 h1:0S/Yg6LYmFJ5stwQeRp6EeOcCbj7xiqQSdNelsXvaqE= +github.com/agext/levenshtein v1.2.2/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= +github.com/agl/ed25519 v0.0.0-20150830182803-278e1ec8e8a6/go.mod h1:WPjqKcmVOxf0XSf3YxCJs6N6AOSrOx3obionmG7T0y0= +github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= +github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= +github.com/aliyun/alibaba-cloud-sdk-go v0.0.0-20190329064014-6e358769c32a/go.mod h1:T9M45xf79ahXVelWoOBmH0y4aC1t5kXO5BxwyakgIGA= +github.com/aliyun/aliyun-oss-go-sdk v0.0.0-20190103054945-8205d1f41e70/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8= +github.com/aliyun/aliyun-tablestore-go-sdk v4.1.2+incompatible/go.mod h1:LDQHRZylxvcg8H7wBIDfvO5g/cy4/sz1iucBlc2l3Jw= +github.com/antchfx/xpath v0.0.0-20190129040759-c8489ed3251e/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk= +github.com/antchfx/xquery v0.0.0-20180515051857-ad5b8c7a47b0/go.mod h1:LzD22aAzDP8/dyiCKFp31He4m2GPjl0AFyzDtZzUu9M= +github.com/apparentlymart/go-cidr v1.0.1/go.mod h1:EBcsNrHc3zQeuaeCeCtQruQm+n9/YjEn/vI25Lg7Gwc= +github.com/apparentlymart/go-dump v0.0.0-20180507223929-23540a00eaa3/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM= +github.com/apparentlymart/go-dump v0.0.0-20190214190832-042adf3cf4a0/go.mod h1:oL81AME2rN47vu18xqj1S1jPIPuN7afo62yKTNn3XMM= +github.com/apparentlymart/go-textseg v1.0.0 h1:rRmlIsPEEhUTIKQb7T++Nz/A5Q6C9IuX2wFoYVvnCs0= +github.com/apparentlymart/go-textseg v1.0.0/go.mod h1:z96Txxhf3xSFMPmb5X/1W05FF/Nj9VFpLOpjS5yuumk= +github.com/apparentlymart/go-versions v0.0.2-0.20180815153302-64b99f7cb171/go.mod h1:JXY95WvQrPJQtudvNARshgWajS7jNNlM90altXIPNyI= +github.com/armon/circbuf v0.0.0-20190214190532-5111143e8da2/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o= +github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= +github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/aws/aws-sdk-go v1.15.78/go.mod h1:E3/ieXAlvM0XWO57iftYVDLLvQ824smPP3ATZkfNZeM= +github.com/aws/aws-sdk-go v1.25.3/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= +github.com/aws/aws-sdk-go v1.30.12/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= +github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f/go.mod h1:AuiFmCCPBSrqvVMvuqFuk0qogytodnVFVSN5CeJB8Gc= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= +github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ00z/TKoufEY6K/a0k6AhaJrQKdFe6OfVXsa4= +github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= +github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= +github.com/bmatcuk/doublestar v1.1.5/go.mod h1:wiQtGV+rzVYxB7WIlirSN++5HPtPlXEo9MEoZQC/PmE= +github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps= +github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= +github.com/cheggaaa/pb v1.0.27/go.mod h1:pQciLPpbU0oxA0h+VJYYLxO+XeDQb5pZijXscXHm81s= +github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= +github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= +github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/coreos/bbolt v1.3.0/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk= +github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE= +github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= +github.com/coreos/go-systemd v0.0.0-20181012123002-c6f51f82210d/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= +github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= +github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8= +github.com/dnaeon/go-vcr v0.0.0-20180920040454-5637cf3d8a31/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= +github.com/dylanmei/iso8601 v0.1.0/go.mod h1:w9KhXSgIyROl1DefbMYIE7UVSIvELTbMrCfx+QkYnoQ= +github.com/dylanmei/winrmtest v0.0.0-20190225150635-99b7fe2fddf1/go.mod h1:lcy9/2gH1jn/VCLouHA6tOEwLoNVd4GW6zhuKLmHC2Y= +github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= +github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= +github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= +github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= +github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= +github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= +github.com/go-test/deep v1.0.1/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= +github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= +github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/groupcache v0.0.0-20180513044358-24b0969c4cb7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= +github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= +github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.3.1 h1:Xye71clBPdm5HgqGwUkwhbynsUJZhDbS20FvLhQ2izg= +github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= +github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= +github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= +github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= +github.com/gophercloud/gophercloud v0.0.0-20190208042652-bc37892e1968/go.mod h1:3WdhXV3rUYy9p6AUW8d94kr+HS62Y4VL9mBnFxsD8q4= +github.com/gophercloud/utils v0.0.0-20190128072930-fbb6ab446f01/go.mod h1:wjDF8z83zTeg5eMLml5EBSlAhbF7G8DobyI1YsMuyzw= +github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= +github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= +github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= +github.com/grpc-ecosystem/grpc-gateway v1.8.5/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= +github.com/hashicorp/aws-sdk-go-base v0.4.0/go.mod h1:eRhlz3c4nhqxFZJAahJEFL7gh6Jyj5rQmQc7F9eHFyQ= +github.com/hashicorp/consul v0.0.0-20171026175957-610f3c86a089/go.mod h1:mFrjN1mfidgJfYP1xrJCF+AfRhr6Eaqhb2+sfyn/OOI= +github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= +github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= +github.com/hashicorp/go-azure-helpers v0.10.0/go.mod h1:YuAtHxm2v74s+IjQwUG88dHBJPd5jL+cXr5BGVzSKhE= +github.com/hashicorp/go-checkpoint v0.5.0/go.mod h1:7nfLNL10NsxqO4iWuW6tWW0HjZuDrwkBuEQsVcpCOgg= +github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-getter v1.4.2-0.20200106182914-9813cbd4eb02/go.mod h1:7qxyCd8rBfcShwsvxgIguu4KbS3l8bUCwg2Umn7RjeY= +github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd/go.mod h1:9bjs9uLqI8l75knNv3lV1kA55veR+WUPSiKIWcQHudI= +github.com/hashicorp/go-hclog v0.0.0-20181001195459-61d530d6c27f/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= +github.com/hashicorp/go-immutable-radix v0.0.0-20180129170900-7f3cd4390caa/go.mod h1:6ij3Z20p+OhOkCSrA0gImAWoHYQRGbnlcuk6XYTiaRw= +github.com/hashicorp/go-msgpack v0.5.4/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= +github.com/hashicorp/go-multierror v1.0.0 h1:iVjPR7a6H0tWELX5NxNe7bYopibicUzc7uPribsnS6o= +github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= +github.com/hashicorp/go-plugin v1.3.0/go.mod h1:F9eH4LrE/ZsRdbwhfjs9k9HoDUwAHnYtXdgmf1AVNs0= +github.com/hashicorp/go-retryablehttp v0.5.2/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= +github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= +github.com/hashicorp/go-safetemp v1.0.0/go.mod h1:oaerMy3BhqiTbVye6QuFhFtIceqFoDHxNAB65b+Rj1I= +github.com/hashicorp/go-slug v0.4.1/go.mod h1:I5tq5Lv0E2xcNXNkmx7BSfzi1PsJ2cNjs3cC3LwyhK8= +github.com/hashicorp/go-sockaddr v0.0.0-20180320115054-6d291a969b86/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU= +github.com/hashicorp/go-tfe v0.8.1/go.mod h1:XAV72S4O1iP8BDaqiaPLmL2B4EE6almocnOn8E8stHc= +github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= +github.com/hashicorp/go-version v1.1.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go-version v1.2.0 h1:3vNe/fWF5CBgRIguda1meWhsZHy3m8gCJ5wx+dIzX/E= +github.com/hashicorp/go-version v1.2.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/hcl v0.0.0-20170504190234-a4b07c25de5f h1:UdxlrJz4JOnY8W+DbLISwf2B8WXEolNRA8BGCwI9jws= +github.com/hashicorp/hcl v0.0.0-20170504190234-a4b07c25de5f/go.mod h1:oZtUIOe8dh44I2q6ScRibXws4Ajl+d+nod3AaR9vL5w= +github.com/hashicorp/hcl/v2 v2.0.0/go.mod h1:oVVDG71tEinNGYCxinCYadcmKU9bglqW9pV3txagJ90= +github.com/hashicorp/hcl/v2 v2.3.0 h1:iRly8YaMwTBAKhn1Ybk7VSdzbnopghktCD031P8ggUE= +github.com/hashicorp/hcl/v2 v2.3.0/go.mod h1:d+FwDBbOLvpAM3Z6J7gPj/VoAGkNe/gm352ZhjJ/Zv8= +github.com/hashicorp/hil v0.0.0-20190212112733-ab17b08d6590/go.mod h1:n2TSygSNwsLJ76m8qFXTSc7beTb+auJxYdqrnoqwZWE= +github.com/hashicorp/memberlist v0.1.0/go.mod h1:ncdBp14cuox2iFOq3kDiquKU6fqsTBc3W6JvZwjxxsE= +github.com/hashicorp/serf v0.0.0-20160124182025-e4ec8cc423bb/go.mod h1:h/Ru6tmZazX7WO/GDmwdpS975F019L4t5ng5IgwbNrE= +github.com/hashicorp/terraform v0.12.28 h1:mBA+A9dvMXk1xDpflKEP5mL/KOD0sXap+M4F4Vlgnvc= +github.com/hashicorp/terraform v0.12.28/go.mod h1:CBxNAiTW0pLap44/3GU4j7cYE2bMhkKZNlHPcr4P55U= +github.com/hashicorp/terraform-config-inspect v0.0.0-20191212124732-c6ae6269b9d7/go.mod h1:p+ivJws3dpqbp1iP84+npOyAmTTOLMgCzrXd3GSdn/A= +github.com/hashicorp/terraform-svchost v0.0.0-20191011084731-65d371908596 h1:hjyO2JsNZUKT1ym+FAdlBEkGPevazYsmVgIMw7dVELg= +github.com/hashicorp/terraform-svchost v0.0.0-20191011084731-65d371908596/go.mod h1:kNDNcF7sN4DocDLBkQYz73HGKwN1ANB1blq4lIYLYvg= +github.com/hashicorp/vault v0.10.4/go.mod h1:KfSyffbKxoVyspOdlaGVjIuwLobi07qD1bAbosPMpP0= +github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= +github.com/jhump/protoreflect v1.6.0/go.mod h1:eaTn3RZAmMBcV0fifFvlm6VHNz3wSkYyXYWUh7ymB74= +github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= +github.com/jmespath/go-jmespath v0.3.0/go.mod h1:9QtRXoHjLGCJ5IBSaohpXITPlowMeeYCZ7fLUTSywik= +github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= +github.com/joyent/triton-go v0.0.0-20180313100802-d8f9c0314926/go.mod h1:U+RSyWxWd04xTqnuOQxnai7XGS2PrPY2cfGoDKtMHjA= +github.com/json-iterator/go v1.1.5/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= +github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU= +github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= +github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= +github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8= +github.com/keybase/go-crypto v0.0.0-20161004153544-93f5b35093ba/go.mod h1:ghbZscTyKdM07+Fw3KSi0hcJm+AlEUWj8QLlPtijN/M= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= +github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= +github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo= +github.com/likexian/gokit v0.0.0-20190309162924-0a377eecf7aa/go.mod h1:QdfYv6y6qPA9pbBA2qXtoT8BMKha6UyNbxWGWl/9Jfk= +github.com/likexian/gokit v0.0.0-20190418170008-ace88ad0983b/go.mod h1:KKqSnk/VVSW8kEyO2vVCXoanzEutKdlBAPohmGXkxCk= +github.com/likexian/gokit v0.0.0-20190501133040-e77ea8b19cdc/go.mod h1:3kvONayqCaj+UgrRZGpgfXzHdMYCAO0KAt4/8n0L57Y= +github.com/likexian/gokit v0.20.15/go.mod h1:kn+nTv3tqh6yhor9BC4Lfiu58SmH8NmQ2PmEl+uM6nU= +github.com/likexian/simplejson-go v0.0.0-20190409170913-40473a74d76d/go.mod h1:Typ1BfnATYtZ/+/shXfFYLrovhFyuKvzwrdOnIDHlmg= +github.com/likexian/simplejson-go v0.0.0-20190419151922-c1f9f0b4f084/go.mod h1:U4O1vIJvIKwbMZKUJ62lppfdvkCdVd2nfMimHK81eec= +github.com/likexian/simplejson-go v0.0.0-20190502021454-d8787b4bfa0b/go.mod h1:3BWwtmKP9cXWwYCr5bkoVDEfLywacOv0s06OBEDpyt8= +github.com/lusis/go-artifactory v0.0.0-20160115162124-7e4ce345df82/go.mod h1:y54tfGmO3NKssKveTEFFzH8C/akrSOy/iW9qEAUDV84= +github.com/masterzen/simplexml v0.0.0-20160608183007-4572e39b1ab9/go.mod h1:kCEbxUJlNDEBNbdQMkPSp6yaKcRXVI6f4ddk8Riv4bc= +github.com/masterzen/winrm v0.0.0-20190223112901-5e5c9a7fe54b/go.mod h1:wr1VqkwW0AB5JS0QLy5GpVMS9E3VtRoSYXUYyVk46KY= +github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU= +github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= +github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= +github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= +github.com/mattn/go-shellwords v1.0.4/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o= +github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= +github.com/miekg/dns v1.0.8/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= +github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= +github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw= +github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= +github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-linereader v0.0.0-20190213213312-1b945b3263eb/go.mod h1:OaY7UOoTkkrX3wRwjpYRKafIkkyeD0UtweSHAWWiqQM= +github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= +github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= +github.com/mitchellh/go-wordwrap v1.0.0 h1:6GlHJ/LTGMrIJbwgdqdl2eEH8o+Exx/0m8ir9Gns0u4= +github.com/mitchellh/go-wordwrap v1.0.0/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo= +github.com/mitchellh/hashstructure v1.0.0/go.mod h1:QjSHrPWS+BGUVBYkbTZWEnOh3G1DutKwClXU/ABz6AQ= +github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/mitchellh/panicwrap v1.0.0/go.mod h1:pKvZHwWrZowLUzftuFq7coarnxbBXU4aQh3N0BJOeeA= +github.com/mitchellh/prefixedio v0.0.0-20190213213902-5733675afd51/go.mod h1:kB1naBgV9ORnkiTVeyJOI1DavaJkG4oNIq0Af6ZVKUo= +github.com/mitchellh/reflectwalk v1.0.0/go.mod h1:mSTlrgnPZtwu0c4WaC2kGObEpuNDbx0jmZXqmk4esnw= +github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= +github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0= +github.com/mozillazg/go-httpheader v0.2.1/go.mod h1:jJ8xECTlalr6ValeXYdOF8fFUISeBAdw6E61aqQma60= +github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= +github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d/go.mod h1:YUTz3bUH2ZwIWBy3CJBeOBEugqcmXREj14T+iG/4k4U= +github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= +github.com/packer-community/winrmcp v0.0.0-20180102160824-81144009af58/go.mod h1:f6Izs6JvFTdnRbziASagjZ2vmf55NSIkC/weStxCHqk= +github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA= +github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= +github.com/posener/complete v1.2.1/go.mod h1:6gapUrK/U1TAN7ciCoNRIdVC5sbdBTUh1DKN0g6uH7E= +github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs= +github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= +github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= +github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= +github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= +github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= +github.com/smartystreets/goconvey v0.0.0-20180222194500-ef6db91d284a/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s= +github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= +github.com/spf13/afero v1.2.1/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= +github.com/spf13/afero v1.3.2 h1:GDarE4TJQI52kYSbSAmLiId1Elfj+xgSDqrUZxFhxlU= +github.com/spf13/afero v1.3.2/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= +github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= +github.com/svanharmelen/jsonapi v0.0.0-20180618144545-0c0828c3f16d/go.mod h1:BSTlc8jOjh0niykqEGVXOLXdi9o0r0kR8tCYiMvjFgw= +github.com/tencentcloud/tencentcloud-sdk-go v3.0.82+incompatible/go.mod h1:0PfYow01SHPMhKY31xa+EFz2RStxIqj6JFAJS+IkCi4= +github.com/tencentyun/cos-go-sdk-v5 v0.0.0-20190808065407-f07404cefc8c/go.mod h1:wk2XFUg6egk4tSDNZtXeKfe2G6690UVyt163PuUxBZk= +github.com/terraform-providers/terraform-provider-openstack v1.15.0/go.mod h1:2aQ6n/BtChAl1y2S60vebhyJyZXBsuAI5G4+lHrT1Ew= +github.com/tmc/grpc-websocket-proxy v0.0.0-20171017195756-830351dc03c6/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U= +github.com/ugorji/go v0.0.0-20180813092308-00b869d2f4a5/go.mod h1:hnLbHMwcvSihnDhEfx2/BzKp2xb0Y+ErdfYcrs9tkJQ= +github.com/ulikunitz/xz v0.5.5/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8= +github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= +github.com/vmihailenco/msgpack v4.0.1+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= +github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0Bx9h2kr4= +github.com/xiang90/probing v0.0.0-20160813154853-07dd2e8dfe18/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= +github.com/xlab/treeprint v0.0.0-20161029104018-1d6e34225557/go.mod h1:ce1O1j6UtZfjr22oyGxGLbauSBp2YVXpARAosm7dHBg= +github.com/zclconf/go-cty v1.0.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s= +github.com/zclconf/go-cty v1.1.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s= +github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= +github.com/zclconf/go-cty v1.2.1 h1:vGMsygfmeCl4Xb6OA5U5XVAaQZ69FvoG7X2jUtQujb8= +github.com/zclconf/go-cty v1.2.1/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= +github.com/zclconf/go-cty-yaml v1.0.1/go.mod h1:IP3Ylp0wQpYm50IHK8OZWKMu6sPJIUgKa8XhiVHura0= +go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= +golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190222235706-ffb98f73852f/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= +golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/net v0.0.0-20180530234432-1e491301e022/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190125091013-d26f9f9a57f3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191009170851-d66e71096ffb/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2 h1:CCH4IOTTfewWjGOlSp+zGcjutRKlBEZQ6wTn8ozI/nI= +golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190221075227-b4e8571b14e0/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502175342-a43fa875dd82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190804053845-51ab0e2deafa/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= +google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= +google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= +google.golang.org/genproto v0.0.0-20170818010345-ee236bd376b0/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/genproto v0.0.0-20190819201941-24fa4b261c55/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc= +google.golang.org/grpc v1.8.0/go.mod h1:yo6s7OP7yaDglbqo1J04qKzAhqBH6lvTonzMVmEdcZw= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= +google.golang.org/grpc v1.23.0/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +google.golang.org/grpc v1.27.1/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk= +gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/cheggaaa/pb.v1 v1.0.27/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= +gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= +gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= +gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= +gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= From cdbd4443947b7016d4b7d9401ef42f0c82b4d531 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 23 Jul 2020 12:42:03 +0530 Subject: [PATCH 009/188] Changes: - Add zap logger support - Logging changes in all files - Create normalized json in iac-provider --- cmd/terrascan/main.go | 10 ++++ go.mod | 1 + go.sum | 3 ++ pkg/cloud-providers/aws/normalized.go | 10 ++++ pkg/cloud-providers/aws/types.go | 9 ---- pkg/cloud-providers/cloud-provider.go | 8 +-- pkg/cloud-providers/interface.go | 6 ++- pkg/http-server/start.go | 7 +-- pkg/iac-providers/output/types.go | 2 +- .../{iac-provider.go => providers.go} | 8 +-- pkg/iac-providers/terraform/v12/load-dir.go | 24 ++++++--- pkg/iac-providers/terraform/v12/load-file.go | 18 +++++-- pkg/iac-providers/terraform/v12/resource.go | 8 +-- pkg/logger/logger.go | 54 +++++++++++++++++++ pkg/runtime/executor.go | 44 ++++++++------- pkg/utils/path.go | 8 +-- pkg/utils/printer.go | 5 +- 17 files changed, 163 insertions(+), 62 deletions(-) create mode 100644 pkg/cloud-providers/aws/normalized.go rename pkg/iac-providers/{iac-provider.go => providers.go} (84%) create mode 100644 pkg/logger/logger.go diff --git a/cmd/terrascan/main.go b/cmd/terrascan/main.go index 6f1d2e8e2..7b939ccba 100644 --- a/cmd/terrascan/main.go +++ b/cmd/terrascan/main.go @@ -3,8 +3,11 @@ package main import ( "flag" + "go.uber.org/zap" + "github.com/accurics/terrascan/pkg/cli" httpServer "github.com/accurics/terrascan/pkg/http-server" + "github.com/accurics/terrascan/pkg/logger" ) func main() { @@ -15,13 +18,20 @@ func main() { cloudType = flag.String("cloud", "", "cloud provider (supported values: aws)") iacFilePath = flag.String("f", "", "IaC file path") iacDirPath = flag.String("d", "", "IaC directory path") + + // logging flags + logLevel = flag.String("log-level", "info", "logging level (debug, info, warn, error, panic, fatal)") + logType = flag.String("log-type", "console", "log type (json, console)") ) flag.Parse() // if server mode set, run terrascan as a server, else run it as CLI if *server { + logger.Init("json", *logLevel) httpServer.Start() } else { + logger.Init(*logType, *logLevel) + zap.S().Debug("running terrascan in cli mode") cli.Run(*iacType, *iacVersion, *cloudType, *iacFilePath, *iacDirPath) } } diff --git a/go.mod b/go.mod index ccfdf0eae..820a8b106 100644 --- a/go.mod +++ b/go.mod @@ -8,4 +8,5 @@ require ( github.com/hashicorp/terraform v0.12.28 github.com/spf13/afero v1.3.2 github.com/zclconf/go-cty v1.2.1 + go.uber.org/zap v1.9.1 ) diff --git a/go.sum b/go.sum index dc1c0ce14..5cd1e1b75 100644 --- a/go.sum +++ b/go.sum @@ -284,8 +284,11 @@ github.com/zclconf/go-cty v1.2.1/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q github.com/zclconf/go-cty-yaml v1.0.1/go.mod h1:IP3Ylp0wQpYm50IHK8OZWKMu6sPJIUgKa8XhiVHura0= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +go.uber.org/atomic v1.3.2 h1:2Oa65PReHzfn29GpvgsYwloV9AVFHPDk8tYxt2c2tr4= go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE= +go.uber.org/multierr v1.1.0 h1:HoEmRHQPVSqub6w2z2d2EOVs2fjyFRGyofhKuyDq0QI= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= +go.uber.org/zap v1.9.1 h1:XCJQEf3W6eZaVwhRBof6ImoYGJSITeKWsyeh3HFu/5o= go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= diff --git a/pkg/cloud-providers/aws/normalized.go b/pkg/cloud-providers/aws/normalized.go new file mode 100644 index 000000000..567a7a49d --- /dev/null +++ b/pkg/cloud-providers/aws/normalized.go @@ -0,0 +1,10 @@ +package awsProvider + +import ( + "github.com/accurics/terrascan/pkg/iac-providers/output" +) + +// CreateNormalizedJson creates a normalized json for the given input +func (a *AWSProvider) CreateNormalizedJson(allResourcesConfig output.AllResourceConfigs) (interface{}, error) { + return allResourcesConfig, nil +} diff --git a/pkg/cloud-providers/aws/types.go b/pkg/cloud-providers/aws/types.go index 3ba8ddc13..c02044252 100644 --- a/pkg/cloud-providers/aws/types.go +++ b/pkg/cloud-providers/aws/types.go @@ -1,12 +1,3 @@ package awsProvider -import ( - "log" -) - type AWSProvider struct{} - -// CreateNormalizedJson creates a normalized json for the given input -func (a *AWSProvider) CreateNormalizedJson() { - log.Printf("creating normalized json for AWS resources") -} diff --git a/pkg/cloud-providers/cloud-provider.go b/pkg/cloud-providers/cloud-provider.go index de0fa979d..64b5f4495 100644 --- a/pkg/cloud-providers/cloud-provider.go +++ b/pkg/cloud-providers/cloud-provider.go @@ -2,8 +2,9 @@ package cloudProvider import ( "fmt" - "log" "reflect" + + "go.uber.org/zap" ) // NewCloudProvider returns a new CloudProvider @@ -12,9 +13,8 @@ func NewCloudProvider(cloudType string) (cloudProvider CloudProvider, err error) // get CloudProvider from supportedCloudProviders cloudProviderObject, supported := supportedCloudProviders[supportedCloudType(cloudType)] if !supported { - errMsg := fmt.Sprintf("cloud type '%s' not supported", cloudType) - log.Printf(errMsg) - return cloudProvider, fmt.Errorf("errMsg") + zap.S().Errorf("cloud type '%s' not supported", cloudType) + return cloudProvider, fmt.Errorf("cloud type not supported") } return reflect.New(cloudProviderObject).Interface().(CloudProvider), nil diff --git a/pkg/cloud-providers/interface.go b/pkg/cloud-providers/interface.go index 79ec9cae0..8a6048ebd 100644 --- a/pkg/cloud-providers/interface.go +++ b/pkg/cloud-providers/interface.go @@ -1,7 +1,11 @@ package cloudProvider +import ( + "github.com/accurics/terrascan/pkg/iac-providers/output" +) + // CloudProvider defines the interface which every cloud provider needs to implement // to claim support in terrascan type CloudProvider interface { - CreateNormalizedJson() + CreateNormalizedJson(output.AllResourceConfigs) (interface{}, error) } diff --git a/pkg/http-server/start.go b/pkg/http-server/start.go index deae0b481..9cdcebe5e 100644 --- a/pkg/http-server/start.go +++ b/pkg/http-server/start.go @@ -1,16 +1,17 @@ package httpServer import ( - "log" "net/http" + + "go.uber.org/zap" ) func Start() { - log.Printf("terrascan server listening at port 9010") + zap.S().Info("terrascan server listening at port 9010") http.HandleFunc("/health", func(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) }) - log.Fatal(http.ListenAndServe(":9010", nil)) + zap.S().Fatal(http.ListenAndServe(":9010", nil)) } diff --git a/pkg/iac-providers/output/types.go b/pkg/iac-providers/output/types.go index 695122398..438207d42 100644 --- a/pkg/iac-providers/output/types.go +++ b/pkg/iac-providers/output/types.go @@ -10,4 +10,4 @@ type ResourceConfig struct { } // AllResourceConfigs is a list/slice of resource configs present in IaC -type AllResourceConfigs []ResourceConfig +type AllResourceConfigs map[string][]ResourceConfig diff --git a/pkg/iac-providers/iac-provider.go b/pkg/iac-providers/providers.go similarity index 84% rename from pkg/iac-providers/iac-provider.go rename to pkg/iac-providers/providers.go index d78205650..aaa9be51a 100644 --- a/pkg/iac-providers/iac-provider.go +++ b/pkg/iac-providers/providers.go @@ -2,8 +2,9 @@ package iacProvider import ( "fmt" - "log" "reflect" + + "go.uber.org/zap" ) // NewIacProvider returns a new IacProvider @@ -12,9 +13,8 @@ func NewIacProvider(iacType, iacVersion string) (iacProvider IacProvider, err er // get IacProvider from supportedIacProviders iacProviderObject, supported := supportedIacProviders[supportedIacType(iacType)][supportedIacVersion(iacVersion)] if !supported { - errMsg := fmt.Sprintf("IaC type:'%s', version: '%s' not supported", iacType, iacVersion) - log.Printf(errMsg) - return iacProvider, fmt.Errorf("errMsg") + zap.S().Errorf("IaC type:'%s', version: '%s' not supported", iacType, iacVersion) + return iacProvider, fmt.Errorf("iac not supported") } return reflect.New(iacProviderObject).Interface().(IacProvider), nil diff --git a/pkg/iac-providers/terraform/v12/load-dir.go b/pkg/iac-providers/terraform/v12/load-dir.go index 075f6ce15..4f1271350 100644 --- a/pkg/iac-providers/terraform/v12/load-dir.go +++ b/pkg/iac-providers/terraform/v12/load-dir.go @@ -2,7 +2,6 @@ package tfv12 import ( "fmt" - "log" "path/filepath" "strings" @@ -10,6 +9,7 @@ import ( "github.com/hashicorp/hcl/v2" hclConfigs "github.com/hashicorp/terraform/configs" "github.com/spf13/afero" + "go.uber.org/zap" "github.com/accurics/terrascan/pkg/iac-providers/output" "github.com/accurics/terrascan/pkg/utils" @@ -31,15 +31,14 @@ func (*TfV12) LoadIacDir(rootDir string) (allResourcesConfig output.AllResourceC // check if the directory has any tf config files (.tf or .tf.json) if !parser.IsConfigDir(absRootDir) { - errMsg := fmt.Sprintf("directory '%s' has no terraform config files") - log.Printf(errMsg) - return allResourcesConfig, fmt.Errorf(errMsg) + zap.S().Errorf("directory '%s' has no terraform config files", absRootDir) + return allResourcesConfig, fmt.Errorf("directory has no terraform files") } // load root config directory rootMod, diags := parser.LoadConfigDir(absRootDir) if diags.HasErrors() { - log.Printf("failed to load terraform config dir '%s'. error:\n%+v\n", rootDir, diags) + zap.S().Errorf("failed to load terraform config dir '%s'. error:\n%+v\n", rootDir, diags) return allResourcesConfig, fmt.Errorf("failed to load terraform allResourcesConfig dir") } @@ -71,7 +70,7 @@ func (*TfV12) LoadIacDir(rootDir string) (allResourcesConfig output.AllResourceC }, )) if diags.HasErrors() { - log.Printf("failed to build unified config. errors:\n%+v\n", diags) + zap.S().Errorf("failed to build unified config. errors:\n%+v\n", diags) return allResourcesConfig, fmt.Errorf("failed to build terraform allResourcesConfig") } @@ -88,7 +87,11 @@ func (*TfV12) LoadIacDir(rootDir string) (allResourcesConfig output.AllResourceC // queue of for BFS, add root module config to it configsQ := []*hclConfigs.Config{unified.Root} + // initialize normalized output + allResourcesConfig = make(map[string][]output.ResourceConfig) + // using BFS traverse through all modules in the unified config tree + zap.S().Debug("traversing through all modules in config tree") for len(configsQ) > 0 { // pop first element from the queue @@ -105,7 +108,14 @@ func (*TfV12) LoadIacDir(rootDir string) (allResourcesConfig output.AllResourceC } // append resource config to list of all resources - allResourcesConfig = append(allResourcesConfig, resourceConfig) + // allResourcesConfig = append(allResourcesConfig, resourceConfig) + + // append to normalized output + if _, present := allResourcesConfig[resourceConfig.Type]; !present { + allResourcesConfig[resourceConfig.Type] = []output.ResourceConfig{resourceConfig} + } else { + allResourcesConfig[resourceConfig.Type] = append(allResourcesConfig[resourceConfig.Type], resourceConfig) + } } // add all current's children to the queue diff --git a/pkg/iac-providers/terraform/v12/load-file.go b/pkg/iac-providers/terraform/v12/load-file.go index f9c9cf70f..667c2b5f7 100644 --- a/pkg/iac-providers/terraform/v12/load-file.go +++ b/pkg/iac-providers/terraform/v12/load-file.go @@ -2,10 +2,10 @@ package tfv12 import ( "fmt" - "log" hclConfigs "github.com/hashicorp/terraform/configs" "github.com/spf13/afero" + "go.uber.org/zap" "github.com/accurics/terrascan/pkg/iac-providers/output" "github.com/accurics/terrascan/pkg/utils" @@ -25,14 +25,17 @@ func (*TfV12) LoadIacFile(filePath string) (allResourcesConfig output.AllResourc hclFile, diags := parser.LoadConfigFile(absFilePath) if diags != nil { - log.Printf("failed to load config file '%s'. error:\n%v\n", diags) + zap.S().Errorf("failed to load config file '%s'. error:\n%v\n", diags) return allResourcesConfig, fmt.Errorf("failed to load config file") } if hclFile == nil && diags.HasErrors() { - log.Printf("error occured while loading config file. error:\n%v\n", diags) + zap.S().Errorf("error occured while loading config file. error:\n%v\n", diags) return allResourcesConfig, fmt.Errorf("failed to load config file") } + // initialize normalized output + allResourcesConfig = make(map[string][]output.ResourceConfig) + // traverse through all current's resources for _, managedResource := range hclFile.ManagedResources { @@ -43,7 +46,14 @@ func (*TfV12) LoadIacFile(filePath string) (allResourcesConfig output.AllResourc } // append resource config to list of all resources - allResourcesConfig = append(allResourcesConfig, resourceConfig) + // allResourcesConfig = append(allResourcesConfig, resourceConfig) + + // append to normalized output + if _, present := allResourcesConfig[resourceConfig.Type]; !present { + allResourcesConfig[resourceConfig.Type] = []output.ResourceConfig{resourceConfig} + } else { + allResourcesConfig[resourceConfig.Type] = append(allResourcesConfig[resourceConfig.Type], resourceConfig) + } } // successful diff --git a/pkg/iac-providers/terraform/v12/resource.go b/pkg/iac-providers/terraform/v12/resource.go index cae7eaa13..ca4ca4d2a 100644 --- a/pkg/iac-providers/terraform/v12/resource.go +++ b/pkg/iac-providers/terraform/v12/resource.go @@ -3,10 +3,10 @@ package tfv12 import ( "fmt" "io/ioutil" - "log" "github.com/hashicorp/hcl/v2/hclsyntax" hclConfigs "github.com/hashicorp/terraform/configs" + "go.uber.org/zap" "github.com/accurics/terrascan/pkg/iac-providers/output" ) @@ -17,7 +17,7 @@ func CreateResourceConfig(managedResource *hclConfigs.Resource) (resourceConfig // read source file fileBytes, err := ioutil.ReadFile(managedResource.DeclRange.Filename) if err != nil { - log.Printf("failed to read terrafrom IaC file '%s'. error: '%v'", managedResource.DeclRange.Filename, err) + zap.S().Errorf("failed to read terrafrom IaC file '%s'. error: '%v'", managedResource.DeclRange.Filename, err) return resourceConfig, fmt.Errorf("failed to read terraform file") } @@ -26,7 +26,7 @@ func CreateResourceConfig(managedResource *hclConfigs.Resource) (resourceConfig hclBody := managedResource.Config.(*hclsyntax.Body) goOut, err := c.convertBody(hclBody) if err != nil { - log.Printf("failed to convert hcl.Body to go struct; resource '%s', file: '%s'. error: '%v'", + zap.S().Errorf("failed to convert hcl.Body to go struct; resource '%s', file: '%s'. error: '%v'", managedResource.Name, managedResource.DeclRange.Filename, err) return resourceConfig, fmt.Errorf("failed to convert hcl.Body to go struct") } @@ -39,5 +39,7 @@ func CreateResourceConfig(managedResource *hclConfigs.Resource) (resourceConfig Config: goOut, } + // successful + zap.S().Debugf("successfully created resource config for resource '%s', file: '%s'", resourceConfig.Name, resourceConfig.Source) return resourceConfig, nil } diff --git a/pkg/logger/logger.go b/pkg/logger/logger.go new file mode 100644 index 000000000..9fc6d7b6d --- /dev/null +++ b/pkg/logger/logger.go @@ -0,0 +1,54 @@ +package logger + +import ( + "go.uber.org/zap" + "go.uber.org/zap/zapcore" +) + +// levelMap maps human readable log level to zapcore.Level +var levelMap = map[string]zapcore.Level{ + "debug": zapcore.DebugLevel, + "info": zapcore.InfoLevel, + "warn": zapcore.WarnLevel, + "error": zapcore.ErrorLevel, + "dpanic": zapcore.DPanicLevel, + "panic": zapcore.PanicLevel, + "fatal": zapcore.FatalLevel, +} + +// getLoggerLevel takes human readable log level (debug, info, warn, error...) +// as input and return zapcore.Level +func getLoggerLevel(lvl string) zapcore.Level { + if level, ok := levelMap[lvl]; ok { + return level + } + return zapcore.InfoLevel +} + +func Init(encoding, level string) { + + encodingLevel := zapcore.LowercaseColorLevelEncoder + if encoding == "json" { + encodingLevel = zapcore.LowercaseLevelEncoder + } + + // build zap config + zapConfig := zap.Config{ + Encoding: encoding, + Level: zap.NewAtomicLevelAt(getLoggerLevel(level)), + OutputPaths: []string{"stdout"}, + EncoderConfig: zapcore.EncoderConfig{ + LevelKey: "level", + TimeKey: "time", + CallerKey: "file", + MessageKey: "msg", + EncodeLevel: encodingLevel, + EncodeTime: zapcore.ISO8601TimeEncoder, + EncodeCaller: zapcore.ShortCallerEncoder, + }, + } + logger, _ := zapConfig.Build() + + // initialize global logger + zap.ReplaceGlobals(logger) +} diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 58d89b600..532564724 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -2,10 +2,10 @@ package runtime import ( "fmt" - "log" "os" "github.com/accurics/terrascan/pkg/utils" + "go.uber.org/zap" CloudProvider "github.com/accurics/terrascan/pkg/cloud-providers" IacProvider "github.com/accurics/terrascan/pkg/iac-providers" @@ -35,11 +35,13 @@ func NewExecutor(iacType, iacVersion, cloudType, filePath, dirPath string) *Exec // ValidateInputs validates the inputs to the executor object func (r *Executor) ValidateInputs() error { + // error message + errMsg := "input validation failed" + // terrascan can accept either a file or a directory, both inputs cannot // be processed together if r.filePath != "" && r.dirPath != "" { - errMsg := fmt.Sprintf("cannot accept both '-f %s' and '-d %s' options together", r.filePath, r.dirPath) - log.Printf(errMsg) + zap.S().Errorf("cannot accept both '-f %s' and '-d %s' options together", r.filePath, r.dirPath) return fmt.Errorf(errMsg) } @@ -51,42 +53,43 @@ func (r *Executor) ValidateInputs() error { } if _, err := os.Stat(absDirPath); err != nil { - errMsg := fmt.Sprintf("directory '%s' does not exist", absDirPath) - log.Printf(errMsg) + zap.S().Errorf("directory '%s' does not exist", absDirPath) return fmt.Errorf(errMsg) } + zap.S().Debugf("directory '%s' exists", absDirPath) } else { // if file path, check if file exists absFilePath, err := utils.GetAbsPath(r.filePath) if err != nil { - return err + return fmt.Errorf(errMsg) } if _, err := os.Stat(absFilePath); err != nil { - errMsg := fmt.Sprintf("file '%s' does not exist", absFilePath) - log.Printf(errMsg) + zap.S().Errorf("file '%s' does not exist", absFilePath) return fmt.Errorf(errMsg) } + zap.S().Debugf("file '%s' exists", absFilePath) } // check if Iac type is supported if !IacProvider.IsIacSupported(r.iacType, r.iacVersion) { - errMsg := fmt.Sprintf("iac type '%s', version '%s' not supported", r.iacType, r.iacVersion) - log.Printf(errMsg) + zap.S().Errorf("iac type '%s', version '%s' not supported", r.iacType, r.iacVersion) return fmt.Errorf(errMsg) } + zap.S().Debugf("iac type '%s', version '%s' is supported", r.iacType, r.iacVersion) // check if cloud type is supported if !CloudProvider.IsCloudSupported(r.cloudType) { - errMsg := fmt.Sprintf("cloud type '%s' not supported", r.cloudType) - log.Printf(errMsg) + zap.S().Errorf("cloud type '%s' not supported", r.cloudType) return fmt.Errorf(errMsg) } + zap.S().Debugf("cloud type '%s' supported", r.cloudType) // check if policy type is supported // successful + zap.S().Debug("input validation successful") return nil } @@ -101,9 +104,8 @@ func (r *Executor) Process() error { // create new IacProvider iacProvider, err := IacProvider.NewIacProvider(r.iacType, r.iacVersion) if err != nil { - errMsg := fmt.Sprintf("failed to create a new IacProvider for iacType '%s'. error: '%s'", r.iacType, err) - log.Printf(errMsg) - return fmt.Errorf(errMsg) + zap.S().Errorf("failed to create a new IacProvider for iacType '%s'. error: '%s'", r.iacType, err) + return err } var iacOut output.AllResourceConfigs @@ -116,16 +118,18 @@ func (r *Executor) Process() error { if err != nil { return err } - utils.PrintJSON(iacOut) // create new CloudProvider cloudProvider, err := CloudProvider.NewCloudProvider(r.cloudType) if err != nil { - errMsg := fmt.Sprintf("failed to create a new CloudProvider for cloudType '%s'. error: '%s'", r.cloudType, err) - log.Printf(errMsg) - return fmt.Errorf(errMsg) + zap.S().Errorf("failed to create a new CloudProvider for cloudType '%s'. error: '%s'", r.cloudType, err) + return err + } + normalized, err := cloudProvider.CreateNormalizedJson(iacOut) + if err != nil { + return err } - cloudProvider.CreateNormalizedJson() + utils.PrintJSON(normalized) // write output diff --git a/pkg/utils/path.go b/pkg/utils/path.go index c66d97ee6..b2cb0065c 100644 --- a/pkg/utils/path.go +++ b/pkg/utils/path.go @@ -2,10 +2,11 @@ package utils import ( "fmt" - "log" "os" "path/filepath" "strings" + + "go.uber.org/zap" ) // GetAbsPath returns absolute path from passed file path resolving even ~ to user home dir and any other such symbols that are only @@ -23,9 +24,8 @@ func GetAbsPath(path string) (string, error) { // get absolute file path path, err := filepath.Abs(path) if err != nil { - errMsg := fmt.Sprintf("unable to resolve %s to absolute path. error: '%s'", path, err) - log.Println(errMsg) - return path, fmt.Errorf(errMsg) + zap.S().Errorf("unable to resolve %s to absolute path. error: '%s'", path, err) + return path, fmt.Errorf("failed to resolve absolute path") } return path, nil } diff --git a/pkg/utils/printer.go b/pkg/utils/printer.go index 952fdefaf..35626e680 100644 --- a/pkg/utils/printer.go +++ b/pkg/utils/printer.go @@ -2,15 +2,16 @@ package utils import ( "encoding/json" - "log" "os" + + "go.uber.org/zap" ) // PrintJSON prints data in JSON format func PrintJSON(data interface{}) { j, err := json.MarshalIndent(data, "", " ") if err != nil { - log.Printf("failed to create JSON. error: '%v'", err) + zap.S().Errorf("failed to create JSON. error: '%v'", err) return } os.Stdout.Write(j) From 51800437612dc7c5f04b58a1ddf72fe643c20ee6 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 23 Jul 2020 18:14:30 +0530 Subject: [PATCH 010/188] fixing normalized json format --- pkg/iac-providers/terraform/v12/convert.go | 12 ++++++++++-- pkg/iac-providers/terraform/v12/resource.go | 3 ++- 2 files changed, 12 insertions(+), 3 deletions(-) diff --git a/pkg/iac-providers/terraform/v12/convert.go b/pkg/iac-providers/terraform/v12/convert.go index 8a469dcbc..585137501 100644 --- a/pkg/iac-providers/terraform/v12/convert.go +++ b/pkg/iac-providers/terraform/v12/convert.go @@ -1,7 +1,7 @@ package tfv12 /* - Following code has been borrowed from: + Following code has been borrowed and modifed from: /~https://github.com/tmccombs/hcl2json/blob/5c1402dc2b410e362afee45a3cf15dcb08bc1f2c/convert.go */ @@ -45,10 +45,18 @@ func (c *converter) convertBody(body *hclsyntax.Body) (jsonObj, error) { } for _, block := range body.Blocks { - err = c.convertBlock(block, out) + blockOut := make(jsonObj) + err = c.convertBlock(block, blockOut) if err != nil { return nil, err } + if _, present := out[block.Type]; !present { + out[block.Type] = []jsonObj{blockOut} + } else { + list := out[block.Type].([]jsonObj) + list = append(list, blockOut) + out[block.Type] = list + } } return out, nil diff --git a/pkg/iac-providers/terraform/v12/resource.go b/pkg/iac-providers/terraform/v12/resource.go index ca4ca4d2a..0ca109614 100644 --- a/pkg/iac-providers/terraform/v12/resource.go +++ b/pkg/iac-providers/terraform/v12/resource.go @@ -33,6 +33,7 @@ func CreateResourceConfig(managedResource *hclConfigs.Resource) (resourceConfig // create a resource config resourceConfig = output.ResourceConfig{ + ID: fmt.Sprintf("%s.%s", managedResource.Type, managedResource.Name), Name: managedResource.Name, Type: managedResource.Type, Source: managedResource.DeclRange.Filename, @@ -40,6 +41,6 @@ func CreateResourceConfig(managedResource *hclConfigs.Resource) (resourceConfig } // successful - zap.S().Debugf("successfully created resource config for resource '%s', file: '%s'", resourceConfig.Name, resourceConfig.Source) + zap.S().Debugf("created resource config for resource '%s', file: '%s'", resourceConfig.Name, resourceConfig.Source) return resourceConfig, nil } From 7992292951a8e74e485dc4366a0e3909bef70435 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 23 Jul 2020 18:55:49 +0530 Subject: [PATCH 011/188] accepting log-type for server mode as well --- cmd/terrascan/main.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cmd/terrascan/main.go b/cmd/terrascan/main.go index 7b939ccba..686ca90ee 100644 --- a/cmd/terrascan/main.go +++ b/cmd/terrascan/main.go @@ -27,7 +27,7 @@ func main() { // if server mode set, run terrascan as a server, else run it as CLI if *server { - logger.Init("json", *logLevel) + logger.Init(*logType, *logLevel) httpServer.Start() } else { logger.Init(*logType, *logLevel) From 61c3224dc05743196632b59b5959dad5c779655e Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 23 Jul 2020 23:57:50 +0530 Subject: [PATCH 012/188] add gofmt, govet and golint validation scripts --- scripts/fix-gofmt.sh | 15 +++++++++++++++ scripts/validate-gofmt.sh | 21 +++++++++++++++++++++ scripts/validate-golint.sh | 19 +++++++++++++++++++ scripts/validate-govet.sh | 7 +++++++ 4 files changed, 62 insertions(+) create mode 100755 scripts/fix-gofmt.sh create mode 100755 scripts/validate-gofmt.sh create mode 100755 scripts/validate-golint.sh create mode 100755 scripts/validate-govet.sh diff --git a/scripts/fix-gofmt.sh b/scripts/fix-gofmt.sh new file mode 100755 index 000000000..30bc0ff60 --- /dev/null +++ b/scripts/fix-gofmt.sh @@ -0,0 +1,15 @@ +#!/bin/sh + +set -o errexit +set -o nounset +set -o pipefail + +find_files() { + find . -not \( \ + \( \ + -wholename '*/vendor/*' \ + \) -prune \ + \) -name '*.go' +} + +find_files | xargs gofmt -w -s diff --git a/scripts/validate-gofmt.sh b/scripts/validate-gofmt.sh new file mode 100755 index 000000000..c85fb9ca7 --- /dev/null +++ b/scripts/validate-gofmt.sh @@ -0,0 +1,21 @@ +#!/bin/sh + +set -o errexit +set -o nounset +set -o pipefail + +find_files() { + find . -not \( \ + \( \ + -wholename '*/vendor/*' \ + \) -prune \ + \) -name '*.go' +} + +bad_files=$(find_files | xargs gofmt -d -s 2>&1) +if [[ -n "${bad_files}" ]]; then + echo "${bad_files}" >&2 + echo >&2 + echo "fix gofmt errors by running:\n./hack/fix-gofmt.sh" >&2 + exit 1 +fi diff --git a/scripts/validate-golint.sh b/scripts/validate-golint.sh new file mode 100755 index 000000000..f8ecb385e --- /dev/null +++ b/scripts/validate-golint.sh @@ -0,0 +1,19 @@ +#!/bin/sh + +set -o errexit +set -o nounset +set -o pipefail + +find_files() { + find . -not \( \ + \( \ + -wholename '*/vendor/*' \ + \) -prune \ + \) -name '*.go' +} + +bad_files=$(find_files | xargs -I@ bash -c "$GOPATH/bin/golint @") +if [[ -n "${bad_files}" ]]; then + echo "${bad_files}" + exit 1 +fi diff --git a/scripts/validate-govet.sh b/scripts/validate-govet.sh new file mode 100755 index 000000000..b5e2d5117 --- /dev/null +++ b/scripts/validate-govet.sh @@ -0,0 +1,7 @@ +#!/bin/sh + +set -x + +go vet ./pkg/... +go vet ./cmd/... +#go vet -tags=test ./test/... From b00157bfaf35d747b74cbfdc45d3ff8dd4f75b95 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 23 Jul 2020 23:58:01 +0530 Subject: [PATCH 013/188] add Makefile --- Makefile | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 Makefile diff --git a/Makefile b/Makefile new file mode 100644 index 000000000..e5cdcdefe --- /dev/null +++ b/Makefile @@ -0,0 +1,41 @@ +GITCOMMIT := $(shell git rev-parse --short HEAD 2>/dev/null) +BUILD_FLAGS := -v -ldflags "-w -s" + +BUILD_DIR = build +BINARY_NAME = terrascan + + +# default +default: build + + +# build terrascan binary +build: clean + @mkdir -p $(BUILD_DIR) > /dev/null + go build ${BUILD_FLAGS} -o ${BUILD_DIR}/${BINARY_NAME} cmd/terrascan/main.go + @echo "terrascan binary created in ${BUILD_DIR} directory" + + +# clean build +clean: + @rm -rf $(BUILD_DIR) + + +# run all validation tests +validate: gofmt govet golint + + +# gofmt validation +gofmt: + ./scripts/validate-gofmt.sh + ./scripts/fix-gofmt.sh + + +# golint validation +golint: + ./scripts/validate-golint.sh + + +# govet validation +govet: + ./scripts/validate-govet.sh From d56e25e810c70d497400fa18ab540ede1c5dfa54 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 23 Jul 2020 23:58:34 +0530 Subject: [PATCH 014/188] fixing golint errors --- pkg/cli/run.go | 2 +- pkg/cloud-providers/aws/normalized.go | 6 +++--- pkg/cloud-providers/aws/types.go | 3 ++- pkg/cloud-providers/cloud-provider.go | 2 +- pkg/cloud-providers/interface.go | 4 ++-- pkg/cloud-providers/supported.go | 2 +- pkg/http-server/start.go | 3 ++- pkg/iac-providers/interface.go | 2 +- pkg/iac-providers/providers.go | 2 +- pkg/iac-providers/supported.go | 2 +- pkg/logger/logger.go | 1 + pkg/runtime/executor.go | 6 +++--- 12 files changed, 19 insertions(+), 16 deletions(-) diff --git a/pkg/cli/run.go b/pkg/cli/run.go index cf6c98fe2..42a2bda6c 100644 --- a/pkg/cli/run.go +++ b/pkg/cli/run.go @@ -10,5 +10,5 @@ func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath string) { // create a new runtime executor for processing IaC executor := runtime.NewExecutor(iacType, iacVersion, cloudType, iacFilePath, iacDirPath) - executor.Process() + executor.Execute() } diff --git a/pkg/cloud-providers/aws/normalized.go b/pkg/cloud-providers/aws/normalized.go index 567a7a49d..09ea8ee98 100644 --- a/pkg/cloud-providers/aws/normalized.go +++ b/pkg/cloud-providers/aws/normalized.go @@ -1,10 +1,10 @@ -package awsProvider +package awsprovider import ( "github.com/accurics/terrascan/pkg/iac-providers/output" ) -// CreateNormalizedJson creates a normalized json for the given input -func (a *AWSProvider) CreateNormalizedJson(allResourcesConfig output.AllResourceConfigs) (interface{}, error) { +// CreateNormalizedJSON creates a normalized json for the given input +func (a *AWSProvider) CreateNormalizedJSON(allResourcesConfig output.AllResourceConfigs) (interface{}, error) { return allResourcesConfig, nil } diff --git a/pkg/cloud-providers/aws/types.go b/pkg/cloud-providers/aws/types.go index c02044252..0263ad0a0 100644 --- a/pkg/cloud-providers/aws/types.go +++ b/pkg/cloud-providers/aws/types.go @@ -1,3 +1,4 @@ -package awsProvider +package awsprovider +// AWSProvider implements cloud provider interface type AWSProvider struct{} diff --git a/pkg/cloud-providers/cloud-provider.go b/pkg/cloud-providers/cloud-provider.go index 64b5f4495..1c6b516db 100644 --- a/pkg/cloud-providers/cloud-provider.go +++ b/pkg/cloud-providers/cloud-provider.go @@ -1,4 +1,4 @@ -package cloudProvider +package cloudprovider import ( "fmt" diff --git a/pkg/cloud-providers/interface.go b/pkg/cloud-providers/interface.go index 8a6048ebd..9a920f9da 100644 --- a/pkg/cloud-providers/interface.go +++ b/pkg/cloud-providers/interface.go @@ -1,4 +1,4 @@ -package cloudProvider +package cloudprovider import ( "github.com/accurics/terrascan/pkg/iac-providers/output" @@ -7,5 +7,5 @@ import ( // CloudProvider defines the interface which every cloud provider needs to implement // to claim support in terrascan type CloudProvider interface { - CreateNormalizedJson(output.AllResourceConfigs) (interface{}, error) + CreateNormalizedJSON(output.AllResourceConfigs) (interface{}, error) } diff --git a/pkg/cloud-providers/supported.go b/pkg/cloud-providers/supported.go index d7f6122f4..3b76ebfca 100644 --- a/pkg/cloud-providers/supported.go +++ b/pkg/cloud-providers/supported.go @@ -1,4 +1,4 @@ -package cloudProvider +package cloudprovider import ( "reflect" diff --git a/pkg/http-server/start.go b/pkg/http-server/start.go index 9cdcebe5e..919d2d12a 100644 --- a/pkg/http-server/start.go +++ b/pkg/http-server/start.go @@ -1,4 +1,4 @@ -package httpServer +package httpserver import ( "net/http" @@ -6,6 +6,7 @@ import ( "go.uber.org/zap" ) +// Start starts the terrascan http server func Start() { zap.S().Info("terrascan server listening at port 9010") diff --git a/pkg/iac-providers/interface.go b/pkg/iac-providers/interface.go index ff3e5ea50..00f7fa412 100644 --- a/pkg/iac-providers/interface.go +++ b/pkg/iac-providers/interface.go @@ -1,4 +1,4 @@ -package iacProvider +package iacprovider import ( "github.com/accurics/terrascan/pkg/iac-providers/output" diff --git a/pkg/iac-providers/providers.go b/pkg/iac-providers/providers.go index aaa9be51a..640322897 100644 --- a/pkg/iac-providers/providers.go +++ b/pkg/iac-providers/providers.go @@ -1,4 +1,4 @@ -package iacProvider +package iacprovider import ( "fmt" diff --git a/pkg/iac-providers/supported.go b/pkg/iac-providers/supported.go index 359b7e5c1..d7287faa3 100644 --- a/pkg/iac-providers/supported.go +++ b/pkg/iac-providers/supported.go @@ -1,4 +1,4 @@ -package iacProvider +package iacprovider import ( "reflect" diff --git a/pkg/logger/logger.go b/pkg/logger/logger.go index 9fc6d7b6d..13f538010 100644 --- a/pkg/logger/logger.go +++ b/pkg/logger/logger.go @@ -25,6 +25,7 @@ func getLoggerLevel(lvl string) zapcore.Level { return zapcore.InfoLevel } +// Init initializes global custom zap logger func Init(encoding, level string) { encodingLevel := zapcore.LowercaseColorLevelEncoder diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 532564724..b8339584a 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -93,8 +93,8 @@ func (r *Executor) ValidateInputs() error { return nil } -// Process validates the inputs, processes the IaC, creates json output -func (r *Executor) Process() error { +// Execute validates the inputs, processes the IaC, creates json output +func (r *Executor) Execute() error { // validate inputs if err := r.ValidateInputs(); err != nil { @@ -125,7 +125,7 @@ func (r *Executor) Process() error { zap.S().Errorf("failed to create a new CloudProvider for cloudType '%s'. error: '%s'", r.cloudType, err) return err } - normalized, err := cloudProvider.CreateNormalizedJson(iacOut) + normalized, err := cloudProvider.CreateNormalizedJSON(iacOut) if err != nil { return err } From 926b5534b381c2a63ba6def97eac95361a5b0115 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 23 Jul 2020 23:58:58 +0530 Subject: [PATCH 015/188] update go dependency files --- go.mod | 2 ++ go.sum | 20 ++++++++++++++++++++ 2 files changed, 22 insertions(+) diff --git a/go.mod b/go.mod index 820a8b106..8a419db61 100644 --- a/go.mod +++ b/go.mod @@ -9,4 +9,6 @@ require ( github.com/spf13/afero v1.3.2 github.com/zclconf/go-cty v1.2.1 go.uber.org/zap v1.9.1 + golang.org/x/lint v0.0.0-20200302205851-738671d3881b // indirect + golang.org/x/tools v0.0.0-20200723000907-a7c6fd066f6d // indirect ) diff --git a/go.sum b/go.sum index 5cd1e1b75..59f5081aa 100644 --- a/go.sum +++ b/go.sum @@ -276,6 +276,7 @@ github.com/vmihailenco/msgpack v4.0.1+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6Ac github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0Bx9h2kr4= github.com/xiang90/probing v0.0.0-20160813154853-07dd2e8dfe18/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= github.com/xlab/treeprint v0.0.0-20161029104018-1d6e34225557/go.mod h1:ce1O1j6UtZfjr22oyGxGLbauSBp2YVXpARAosm7dHBg= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/zclconf/go-cty v1.0.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s= github.com/zclconf/go-cty v1.1.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s= github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= @@ -297,7 +298,9 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= @@ -305,8 +308,13 @@ golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTk golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20190409202823-959b441ac422 h1:QzoH/1pFpZguR8NrRHLcO6jKqfv2zpuSqZLgdm7ZmjI= golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b h1:Wh+f8QHJXR411sJR8/vRBTZ7YapZaRvUcLFFJhusH0k= +golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180530234432-1e491301e022/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -325,6 +333,8 @@ golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLL golang.org/x/net v0.0.0-20191009170851-d66e71096ffb/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200202094626-16171245cfb2 h1:CCH4IOTTfewWjGOlSp+zGcjutRKlBEZQ6wTn8ozI/nI= golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200625001655-4c5254603344 h1:vGXIOMxbNfDTk/aXCmfdLgkrSV+Z2tcbze+pEc3v5W4= +golang.org/x/net v0.0.0-20200625001655-4c5254603344/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -333,6 +343,7 @@ golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -349,6 +360,7 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190804053845-51ab0e2deafa/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= @@ -365,7 +377,15 @@ golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBn golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0 h1:Dh6fw+p6FyRl5x/FvNswO1ji0lIGzm3KP8Y9VkS9PTE= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200723000907-a7c6fd066f6d h1:7k9BKfwmdbykG6l5ztniTrH0TP25yel8O7l26/yovMU= +golang.org/x/tools v0.0.0-20200723000907-a7c6fd066f6d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= From 6fa60dd6f66e1c033ad8876951693aa12e21b4cd Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 24 Jul 2020 11:03:17 +0530 Subject: [PATCH 016/188] add go mod verification to Makefile --- Makefile | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index e5cdcdefe..9cea72be3 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ GITCOMMIT := $(shell git rev-parse --short HEAD 2>/dev/null) BUILD_FLAGS := -v -ldflags "-w -s" -BUILD_DIR = build +BUILD_DIR = bin BINARY_NAME = terrascan @@ -22,7 +22,7 @@ clean: # run all validation tests -validate: gofmt govet golint +validate: gofmt govet golint gomodverify # gofmt validation @@ -39,3 +39,8 @@ golint: # govet validation govet: ./scripts/validate-govet.sh + + +# go mod validation +gomodverify: + go mod verify From 9704e2f01025dacabe48927d12cf9742ee7c81d5 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 24 Jul 2020 11:03:44 +0530 Subject: [PATCH 017/188] add go build pipeline and remove python piplines --- .github/workflows/gobuild.yml | 28 ++++++++++++++++++++++++ .github/workflows/pythonpackage.yml | 34 ----------------------------- .github/workflows/pythonpublish.yml | 26 ---------------------- 3 files changed, 28 insertions(+), 60 deletions(-) create mode 100644 .github/workflows/gobuild.yml delete mode 100644 .github/workflows/pythonpackage.yml delete mode 100644 .github/workflows/pythonpublish.yml diff --git a/.github/workflows/gobuild.yml b/.github/workflows/gobuild.yml new file mode 100644 index 000000000..4fc31b458 --- /dev/null +++ b/.github/workflows/gobuild.yml @@ -0,0 +1,28 @@ +name: Go Terrascan build +on: + push: + pull_request: +jobs: + build: + runs-on: ubuntu-latest + env: + GO111MODULE: on + GOPATH: /home/runner/work/terrascan + GOBIN: /home/runner/work/terrascan/bin + steps: + - name: Checkout Terrascan + uses: actions/checkout@v1 + - name: Setup Go + uses: actions/setup-go@v1 + with: + go-version: 1.14 + - name: Install golint + run: | + go get -u golang.org/x/lint/golint + - name: Go validations + run: | + echo Workflow trigger - ${{ github.event_name }} + make validate + - name: Build Terrascan + run: | + make build diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml deleted file mode 100644 index 062518333..000000000 --- a/.github/workflows/pythonpackage.yml +++ /dev/null @@ -1,34 +0,0 @@ -name: Python package - -on: [push] - -jobs: - build: - - runs-on: ubuntu-latest - strategy: - max-parallel: 4 - matrix: - python-version: [3.6, 3.7, 3.8] - - steps: - - uses: actions/checkout@v1 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements_dev.txt - pip install -r requirements.txt - - name: Lint with flake8 - run: | - pip install flake8 - # stop the build if there are Python syntax errors or undefined names - flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - - name: Run Tests - run: | - pytest -v diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml deleted file mode 100644 index 48a76c453..000000000 --- a/.github/workflows/pythonpublish.yml +++ /dev/null @@ -1,26 +0,0 @@ -name: Upload Python Package - -on: - release: - types: [created] - -jobs: - deploy: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v1 - - name: Set up Python - uses: actions/setup-python@v1 - with: - python-version: '3.x' - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install setuptools wheel twine - - name: Build and publish - env: - TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - run: | - python setup.py sdist bdist_wheel - twine upload --verbose dist/* From 3d8571faace10d3ba1bc7a1abcd078d3a7286bed Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 24 Jul 2020 11:16:32 +0530 Subject: [PATCH 018/188] changing shell in validation scripts --- scripts/fix-gofmt.sh | 2 +- scripts/validate-gofmt.sh | 2 +- scripts/validate-golint.sh | 2 +- scripts/validate-govet.sh | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/scripts/fix-gofmt.sh b/scripts/fix-gofmt.sh index 30bc0ff60..86239ddb0 100755 --- a/scripts/fix-gofmt.sh +++ b/scripts/fix-gofmt.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash set -o errexit set -o nounset diff --git a/scripts/validate-gofmt.sh b/scripts/validate-gofmt.sh index c85fb9ca7..a5e389953 100755 --- a/scripts/validate-gofmt.sh +++ b/scripts/validate-gofmt.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash set -o errexit set -o nounset diff --git a/scripts/validate-golint.sh b/scripts/validate-golint.sh index f8ecb385e..96d864098 100755 --- a/scripts/validate-golint.sh +++ b/scripts/validate-golint.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash set -o errexit set -o nounset diff --git a/scripts/validate-govet.sh b/scripts/validate-govet.sh index b5e2d5117..d6f06ca8b 100755 --- a/scripts/validate-govet.sh +++ b/scripts/validate-govet.sh @@ -1,4 +1,4 @@ -#!/bin/sh +#!/bin/bash set -x From 1b177f7e4cb8aba1b3ffa32bbba0304f29f0f43c Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 24 Jul 2020 11:25:08 +0530 Subject: [PATCH 019/188] remove fix-gofmt from gofmt validation step --- Makefile | 1 - 1 file changed, 1 deletion(-) diff --git a/Makefile b/Makefile index 9cea72be3..93847438a 100644 --- a/Makefile +++ b/Makefile @@ -28,7 +28,6 @@ validate: gofmt govet golint gomodverify # gofmt validation gofmt: ./scripts/validate-gofmt.sh - ./scripts/fix-gofmt.sh # golint validation From 930ac6d462a08fa753cadb32764f15bcb2de07c6 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 24 Jul 2020 23:52:27 +0530 Subject: [PATCH 020/188] add script to run unit tests and generate code coverage report --- scripts/generate-coverage.sh | 15 +++++++++++++++ 1 file changed, 15 insertions(+) create mode 100755 scripts/generate-coverage.sh diff --git a/scripts/generate-coverage.sh b/scripts/generate-coverage.sh new file mode 100755 index 000000000..dc603c7a5 --- /dev/null +++ b/scripts/generate-coverage.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +# source: /~https://github.com/codecov/example-go +# go test can't generate code coverage for multiple packages in one command + + set -e +touch coverage.out +go test -i -race ./cmd/terrascan +for d in $(go list ./... | grep -v vendor | grep -v tests | grep -v integration_test); do + go test -race -coverprofile=profile.out -covermode=atomic $d + if [ -f profile.out ]; then + cat profile.out >> coverage.out + rm profile.out + fi +done From 41e04cfd272dc77c790de2d657a118d3060afd65 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 24 Jul 2020 23:53:00 +0530 Subject: [PATCH 021/188] add unit test execution step in build pipeline --- .github/workflows/gobuild.yml | 3 +++ Makefile | 5 +++++ 2 files changed, 8 insertions(+) diff --git a/.github/workflows/gobuild.yml b/.github/workflows/gobuild.yml index 4fc31b458..a03929b87 100644 --- a/.github/workflows/gobuild.yml +++ b/.github/workflows/gobuild.yml @@ -26,3 +26,6 @@ jobs: - name: Build Terrascan run: | make build + - name: Run unit tests + run: | + make unit-tests diff --git a/Makefile b/Makefile index 93847438a..c0e897eb6 100644 --- a/Makefile +++ b/Makefile @@ -43,3 +43,8 @@ govet: # go mod validation gomodverify: go mod verify + + +# run unit tests +unit-tests: + ./scripts/generate-coverage.sh From 350016176c05208bd0024d030d75f83b6062c0fa Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 24 Jul 2020 23:53:55 +0530 Subject: [PATCH 022/188] add unit tests for pkg/utils package and refactor accordingly --- go.sum | 5 +++++ pkg/runtime/executor.go | 2 +- pkg/utils/path.go | 2 ++ pkg/utils/path_test.go | 47 +++++++++++++++++++++++++++++++++++++++ pkg/utils/printer.go | 8 +++---- pkg/utils/printer_test.go | 32 ++++++++++++++++++++++++++ 6 files changed, 91 insertions(+), 5 deletions(-) create mode 100644 pkg/utils/path_test.go create mode 100644 pkg/utils/printer_test.go diff --git a/go.sum b/go.sum index 59f5081aa..cd356d14d 100644 --- a/go.sum +++ b/go.sum @@ -72,6 +72,7 @@ github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3Ee github.com/coreos/go-systemd v0.0.0-20181012123002-c6f51f82210d/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQvIirEdv+8= @@ -232,8 +233,10 @@ github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FI github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/posener/complete v1.2.1/go.mod h1:6gapUrK/U1TAN7ciCoNRIdVC5sbdBTUh1DKN0g6uH7E= @@ -263,6 +266,7 @@ github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+ github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/svanharmelen/jsonapi v0.0.0-20180618144545-0c0828c3f16d/go.mod h1:BSTlc8jOjh0niykqEGVXOLXdi9o0r0kR8tCYiMvjFgw= github.com/tencentcloud/tencentcloud-sdk-go v3.0.82+incompatible/go.mod h1:0PfYow01SHPMhKY31xa+EFz2RStxIqj6JFAJS+IkCi4= @@ -416,6 +420,7 @@ gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index b8339584a..a2edaf75e 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -129,7 +129,7 @@ func (r *Executor) Execute() error { if err != nil { return err } - utils.PrintJSON(normalized) + utils.PrintJSON(normalized, os.Stdout) // write output diff --git a/pkg/utils/path.go b/pkg/utils/path.go index b2cb0065c..34cc2e2f5 100644 --- a/pkg/utils/path.go +++ b/pkg/utils/path.go @@ -18,6 +18,8 @@ func GetAbsPath(path string) (string, error) { homeDir := os.Getenv("HOME") if len(path) > 1 { path = filepath.Join(homeDir, path[1:]) + } else { + return homeDir, nil } } diff --git a/pkg/utils/path_test.go b/pkg/utils/path_test.go new file mode 100644 index 000000000..b8913675f --- /dev/null +++ b/pkg/utils/path_test.go @@ -0,0 +1,47 @@ +package utils + +import ( + "os" + "testing" +) + +func TestGetAbsPath(t *testing.T) { + + table := []struct { + name string + path string + want string + wantErr error + }{ + { + name: "test PWD", + path: ".", + want: os.Getenv("PWD"), + wantErr: nil, + }, + { + name: "user HOME dir", + path: "~", + want: os.Getenv("HOME"), + wantErr: nil, + }, + { + name: "testdata dir", + path: "./testdata", + want: os.Getenv("PWD") + "/testdata", + wantErr: nil, + }, + } + + for _, tt := range table { + t.Run(tt.name, func(t *testing.T) { + got, err := GetAbsPath(tt.path) + if err != tt.wantErr { + t.Errorf("unexpected error; got: '%v', want: '%v'", err, tt.wantErr) + } + if got != tt.want { + t.Errorf("got: '%v', want: '%v'", got, tt.want) + } + }) + } +} diff --git a/pkg/utils/printer.go b/pkg/utils/printer.go index 35626e680..264ccb08a 100644 --- a/pkg/utils/printer.go +++ b/pkg/utils/printer.go @@ -2,18 +2,18 @@ package utils import ( "encoding/json" - "os" + "io" "go.uber.org/zap" ) // PrintJSON prints data in JSON format -func PrintJSON(data interface{}) { +func PrintJSON(data interface{}, writer io.Writer) { j, err := json.MarshalIndent(data, "", " ") if err != nil { zap.S().Errorf("failed to create JSON. error: '%v'", err) return } - os.Stdout.Write(j) - os.Stdout.Write([]byte{'\n'}) + writer.Write(j) + writer.Write([]byte{'\n'}) } diff --git a/pkg/utils/printer_test.go b/pkg/utils/printer_test.go new file mode 100644 index 000000000..5ca4cc349 --- /dev/null +++ b/pkg/utils/printer_test.go @@ -0,0 +1,32 @@ +package utils + +import ( + "bytes" + "strings" + "testing" +) + +func TestPrintJSON(t *testing.T) { + + table := []struct { + name string + input interface{} + want string + }{ + { + name: "empty JSON", + input: make(map[string]interface{}), + want: "{}", + }, + } + + for _, tt := range table { + t.Run(tt.name, func(t *testing.T) { + got := &bytes.Buffer{} + PrintJSON(tt.input, got) + if strings.TrimSpace(got.String()) != tt.want { + t.Errorf("got: '%v', want: '%v'", got, tt.want) + } + }) + } +} From b71f018220c910c33b737d68cb9a664ce5f6788b Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Sat, 25 Jul 2020 14:56:35 +0530 Subject: [PATCH 023/188] add unit tests for iac-provider package --- pkg/iac-providers/providers.go | 6 +- pkg/iac-providers/providers_test.go | 97 +++++++++++++++++++++++++++++ 2 files changed, 102 insertions(+), 1 deletion(-) create mode 100644 pkg/iac-providers/providers_test.go diff --git a/pkg/iac-providers/providers.go b/pkg/iac-providers/providers.go index 640322897..5e2d2ac8f 100644 --- a/pkg/iac-providers/providers.go +++ b/pkg/iac-providers/providers.go @@ -7,6 +7,10 @@ import ( "go.uber.org/zap" ) +var ( + errIacNotSupported = fmt.Errorf("iac not supported") +) + // NewIacProvider returns a new IacProvider func NewIacProvider(iacType, iacVersion string) (iacProvider IacProvider, err error) { @@ -14,7 +18,7 @@ func NewIacProvider(iacType, iacVersion string) (iacProvider IacProvider, err er iacProviderObject, supported := supportedIacProviders[supportedIacType(iacType)][supportedIacVersion(iacVersion)] if !supported { zap.S().Errorf("IaC type:'%s', version: '%s' not supported", iacType, iacVersion) - return iacProvider, fmt.Errorf("iac not supported") + return iacProvider, errIacNotSupported } return reflect.New(iacProviderObject).Interface().(IacProvider), nil diff --git a/pkg/iac-providers/providers_test.go b/pkg/iac-providers/providers_test.go new file mode 100644 index 000000000..b5353ef37 --- /dev/null +++ b/pkg/iac-providers/providers_test.go @@ -0,0 +1,97 @@ +package iacprovider + +import ( + "reflect" + "testing" + + tfv12 "github.com/accurics/terrascan/pkg/iac-providers/terraform/v12" +) + +func TestNewIacProvider(t *testing.T) { + + table := []struct { + name string + iacType supportedIacType + iacVersion supportedIacVersion + want IacProvider + wantErr error + }{ + { + name: "terraform v12", + iacType: terraform, + iacVersion: terraformV12, + want: &tfv12.TfV12{}, + wantErr: nil, + }, + { + name: "not supported iac type", + iacType: "not-supported", + iacVersion: terraformV12, + want: nil, + wantErr: errIacNotSupported, + }, + { + name: "not supported iac version", + iacType: terraform, + iacVersion: "not-supported", + want: nil, + wantErr: errIacNotSupported, + }, + } + + for _, tt := range table { + t.Run(tt.name, func(t *testing.T) { + got, gotErr := NewIacProvider(string(tt.iacType), string(tt.iacVersion)) + if !reflect.DeepEqual(gotErr, tt.wantErr) { + t.Errorf("unexpected error; gotErr: '%v', wantErr: '%v'", gotErr, tt.wantErr) + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("got: '%v', want: '%v'", got, tt.want) + } + }) + } +} + +func TestIsIacSupported(t *testing.T) { + + table := []struct { + name string + iacType supportedIacType + iacVersion supportedIacVersion + want bool + }{ + { + name: "terraform v12", + iacType: terraform, + iacVersion: terraformV12, + want: true, + }, + { + name: "not supported iac type", + iacType: "not-supported", + iacVersion: terraformV12, + want: false, + }, + { + name: "not supported iac version", + iacType: terraform, + iacVersion: "not-supported", + want: false, + }, + { + name: "not supported iac type and version", + iacType: "not-supported-type", + iacVersion: "not-supported-version", + want: false, + }, + } + + for _, tt := range table { + t.Run(tt.name, func(t *testing.T) { + got := IsIacSupported(string(tt.iacType), string(tt.iacVersion)) + if got != tt.want { + t.Errorf("got: '%v', want: '%v'", got, tt.want) + } + }) + } +} From 1fac82bc4742e2166aae607f7a8bf2c6df266e49 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Sat, 25 Jul 2020 22:39:32 +0530 Subject: [PATCH 024/188] add more unit tests for pkg/utils package --- pkg/utils/printer_test.go | 24 ++++++++++++++++++++++-- pkg/utils/testdata/valid.json | 4 ++++ 2 files changed, 26 insertions(+), 2 deletions(-) create mode 100644 pkg/utils/testdata/valid.json diff --git a/pkg/utils/printer_test.go b/pkg/utils/printer_test.go index 5ca4cc349..c58949080 100644 --- a/pkg/utils/printer_test.go +++ b/pkg/utils/printer_test.go @@ -2,10 +2,25 @@ package utils import ( "bytes" + "io/ioutil" "strings" "testing" ) +const ( + validJSONFile = "./testdata/valid.json" +) + +var ( + validJSON []byte + validJSONInput = map[string]int{"apple": 5, "lettuce": 7} +) + +func init() { + validJSON, _ = ioutil.ReadFile(validJSONFile) + +} + func TestPrintJSON(t *testing.T) { table := []struct { @@ -18,14 +33,19 @@ func TestPrintJSON(t *testing.T) { input: make(map[string]interface{}), want: "{}", }, + { + name: "valid JSON", + input: validJSONInput, + want: string(validJSON), + }, } for _, tt := range table { t.Run(tt.name, func(t *testing.T) { got := &bytes.Buffer{} PrintJSON(tt.input, got) - if strings.TrimSpace(got.String()) != tt.want { - t.Errorf("got: '%v', want: '%v'", got, tt.want) + if strings.TrimSpace(got.String()) != strings.TrimSpace(tt.want) { + t.Errorf("got:\n'%v'\n, want:\n'%v'\n", got, tt.want) } }) } diff --git a/pkg/utils/testdata/valid.json b/pkg/utils/testdata/valid.json new file mode 100644 index 000000000..71e9e745e --- /dev/null +++ b/pkg/utils/testdata/valid.json @@ -0,0 +1,4 @@ +{ + "apple": 5, + "lettuce": 7 +} From fb4e51a2fcb31c6c2015c349df3099bdf0dee493 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Sun, 26 Jul 2020 07:49:48 +0530 Subject: [PATCH 025/188] add unit tests for runtime package and refactor accordingly --- pkg/runtime/executor.go | 128 +++++++++++--------------------- pkg/runtime/executor_test.go | 133 ++++++++++++++++++++++++++++++++++ pkg/runtime/testdata/testfile | 0 pkg/runtime/validate.go | 82 +++++++++++++++++++++ pkg/runtime/validate_test.go | 110 ++++++++++++++++++++++++++++ 5 files changed, 369 insertions(+), 84 deletions(-) create mode 100644 pkg/runtime/executor_test.go create mode 100644 pkg/runtime/testdata/testfile create mode 100644 pkg/runtime/validate.go create mode 100644 pkg/runtime/validate_test.go diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index a2edaf75e..cd8f74645 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -1,131 +1,91 @@ package runtime import ( - "fmt" "os" "github.com/accurics/terrascan/pkg/utils" "go.uber.org/zap" - CloudProvider "github.com/accurics/terrascan/pkg/cloud-providers" - IacProvider "github.com/accurics/terrascan/pkg/iac-providers" + cloudProvider "github.com/accurics/terrascan/pkg/cloud-providers" + iacProvider "github.com/accurics/terrascan/pkg/iac-providers" "github.com/accurics/terrascan/pkg/iac-providers/output" ) // Executor object type Executor struct { - filePath string - dirPath string - cloudType string - iacType string - iacVersion string + filePath string + dirPath string + cloudType string + iacType string + iacVersion string + iacProvider iacProvider.IacProvider + cloudProvider cloudProvider.CloudProvider } // NewExecutor creates a runtime object -func NewExecutor(iacType, iacVersion, cloudType, filePath, dirPath string) *Executor { - return &Executor{ +func NewExecutor(iacType, iacVersion, cloudType, filePath, dirPath string) (e *Executor, err error) { + e = &Executor{ filePath: filePath, dirPath: dirPath, cloudType: cloudType, iacType: iacType, iacVersion: iacVersion, } -} - -// ValidateInputs validates the inputs to the executor object -func (r *Executor) ValidateInputs() error { - - // error message - errMsg := "input validation failed" - - // terrascan can accept either a file or a directory, both inputs cannot - // be processed together - if r.filePath != "" && r.dirPath != "" { - zap.S().Errorf("cannot accept both '-f %s' and '-d %s' options together", r.filePath, r.dirPath) - return fmt.Errorf(errMsg) - } - - if r.dirPath != "" { - // if directory, check if directory exists - absDirPath, err := utils.GetAbsPath(r.dirPath) - if err != nil { - return err - } - - if _, err := os.Stat(absDirPath); err != nil { - zap.S().Errorf("directory '%s' does not exist", absDirPath) - return fmt.Errorf(errMsg) - } - zap.S().Debugf("directory '%s' exists", absDirPath) - } else { - // if file path, check if file exists - absFilePath, err := utils.GetAbsPath(r.filePath) - if err != nil { - return fmt.Errorf(errMsg) - } - - if _, err := os.Stat(absFilePath); err != nil { - zap.S().Errorf("file '%s' does not exist", absFilePath) - return fmt.Errorf(errMsg) - } - zap.S().Debugf("file '%s' exists", absFilePath) + // initialized executor + if err = e.Init(); err != nil { + return e, err } - // check if Iac type is supported - if !IacProvider.IsIacSupported(r.iacType, r.iacVersion) { - zap.S().Errorf("iac type '%s', version '%s' not supported", r.iacType, r.iacVersion) - return fmt.Errorf(errMsg) - } - zap.S().Debugf("iac type '%s', version '%s' is supported", r.iacType, r.iacVersion) - - // check if cloud type is supported - if !CloudProvider.IsCloudSupported(r.cloudType) { - zap.S().Errorf("cloud type '%s' not supported", r.cloudType) - return fmt.Errorf(errMsg) - } - zap.S().Debugf("cloud type '%s' supported", r.cloudType) - - // check if policy type is supported - - // successful - zap.S().Debug("input validation successful") - return nil + return e, nil } -// Execute validates the inputs, processes the IaC, creates json output -func (r *Executor) Execute() error { +// Init validates input and initializes iac and cloud providers +func (e *Executor) Init() error { // validate inputs - if err := r.ValidateInputs(); err != nil { + err := e.ValidateInputs() + if err != nil { return err } // create new IacProvider - iacProvider, err := IacProvider.NewIacProvider(r.iacType, r.iacVersion) + e.iacProvider, err = iacProvider.NewIacProvider(e.iacType, e.iacVersion) if err != nil { - zap.S().Errorf("failed to create a new IacProvider for iacType '%s'. error: '%s'", r.iacType, err) + zap.S().Errorf("failed to create a new IacProvider for iacType '%s'. error: '%s'", e.iacType, err) return err } - var iacOut output.AllResourceConfigs - if r.dirPath != "" { - iacOut, err = iacProvider.LoadIacDir(r.dirPath) - } else { - // create config from IaC - iacOut, err = iacProvider.LoadIacFile(r.filePath) - } + // create new CloudProvider + e.cloudProvider, err = cloudProvider.NewCloudProvider(e.cloudType) if err != nil { + zap.S().Errorf("failed to create a new CloudProvider for cloudType '%s'. error: '%s'", e.cloudType, err) return err } - // create new CloudProvider - cloudProvider, err := CloudProvider.NewCloudProvider(r.cloudType) + return nil +} + +// Execute validates the inputs, processes the IaC, creates json output +func (e *Executor) Execute() error { + + // load iac config + var ( + iacOut output.AllResourceConfigs + err error + ) + if e.dirPath != "" { + iacOut, err = e.iacProvider.LoadIacDir(e.dirPath) + } else { + // create config from IaC + iacOut, err = e.iacProvider.LoadIacFile(e.filePath) + } if err != nil { - zap.S().Errorf("failed to create a new CloudProvider for cloudType '%s'. error: '%s'", r.cloudType, err) return err } - normalized, err := cloudProvider.CreateNormalizedJSON(iacOut) + + // create normalized json + normalized, err := e.cloudProvider.CreateNormalizedJSON(iacOut) if err != nil { return err } diff --git a/pkg/runtime/executor_test.go b/pkg/runtime/executor_test.go new file mode 100644 index 000000000..e72f2f585 --- /dev/null +++ b/pkg/runtime/executor_test.go @@ -0,0 +1,133 @@ +package runtime + +import ( + "fmt" + "reflect" + "testing" + + cloudProvider "github.com/accurics/terrascan/pkg/cloud-providers" + awsProvider "github.com/accurics/terrascan/pkg/cloud-providers/aws" + iacProvider "github.com/accurics/terrascan/pkg/iac-providers" + "github.com/accurics/terrascan/pkg/iac-providers/output" + tfv12 "github.com/accurics/terrascan/pkg/iac-providers/terraform/v12" +) + +var ( + errMockLoadIacDir = fmt.Errorf("mock LoadIacDir") + errMockLoadIacFile = fmt.Errorf("mock LoadIacFile") + errMockCreateNormalizedJSON = fmt.Errorf("mock CreateNormalizedJSON") +) + +// MockIacProvider mocks IacProvider interface +type MockIacProvider struct { + output output.AllResourceConfigs + err error +} + +func (m MockIacProvider) LoadIacDir(dir string) (output.AllResourceConfigs, error) { + return m.output, m.err +} + +func (m MockIacProvider) LoadIacFile(file string) (output.AllResourceConfigs, error) { + return m.output, m.err +} + +// MockCloudProvider mocks CloudProvider interface +type MockCloudProvider struct { + err error +} + +func (m MockCloudProvider) CreateNormalizedJSON(data output.AllResourceConfigs) (mockInterface interface{}, err error) { + return data, m.err +} + +func TestExecute(t *testing.T) { + + table := []struct { + name string + executor Executor + wantErr error + }{ + { + name: "test LoadIacDir", + executor: Executor{ + dirPath: "./testdata/testdir", + iacProvider: MockIacProvider{err: errMockLoadIacDir}, + }, + wantErr: errMockLoadIacDir, + }, + { + name: "test LoadIacFile", + executor: Executor{ + filePath: "./testdata/testfile", + iacProvider: MockIacProvider{err: errMockLoadIacFile}, + }, + wantErr: errMockLoadIacFile, + }, + { + name: "test CreateNormalizedJSON error", + executor: Executor{ + filePath: "./testdata/testfile", + iacProvider: MockIacProvider{err: nil}, + cloudProvider: MockCloudProvider{err: errMockCreateNormalizedJSON}, + }, + wantErr: errMockCreateNormalizedJSON, + }, + { + name: "test CreateNormalizedJSON", + executor: Executor{ + filePath: "./testdata/testfile", + iacProvider: MockIacProvider{err: nil}, + cloudProvider: MockCloudProvider{err: nil}, + }, + wantErr: nil, + }, + } + + for _, tt := range table { + t.Run(tt.name, func(t *testing.T) { + gotErr := tt.executor.Execute() + if !reflect.DeepEqual(gotErr, tt.wantErr) { + t.Errorf("unexpected error; gotErr: '%v', wantErr: '%v'", gotErr, tt.wantErr) + } + }) + } +} + +func TestInit(t *testing.T) { + + table := []struct { + name string + executor Executor + wantErr error + wantIacProvider iacProvider.IacProvider + wantCloudProvider cloudProvider.CloudProvider + }{ + { + name: "valid filePath", + executor: Executor{ + filePath: "./testdata/testfile", + dirPath: "", + cloudType: "aws", + iacType: "terraform", + iacVersion: "v12", + }, + wantErr: nil, + wantIacProvider: &tfv12.TfV12{}, + wantCloudProvider: &awsProvider.AWSProvider{}, + }, + } + + for _, tt := range table { + gotErr := tt.executor.Init() + if !reflect.DeepEqual(gotErr, tt.wantErr) { + t.Errorf("unexpected error; gotErr: '%v', wantErr: '%v'", gotErr, tt.wantErr) + } + if !reflect.DeepEqual(tt.executor.iacProvider, tt.wantIacProvider) { + t.Errorf("got: '%v', want: '%v'", tt.executor.iacProvider, tt.wantIacProvider) + } + if !reflect.DeepEqual(tt.executor.cloudProvider, tt.wantCloudProvider) { + t.Errorf("got: '%v', want: '%v'", tt.executor.cloudProvider, tt.wantCloudProvider) + } + } +} diff --git a/pkg/runtime/testdata/testfile b/pkg/runtime/testdata/testfile new file mode 100644 index 000000000..e69de29bb diff --git a/pkg/runtime/validate.go b/pkg/runtime/validate.go new file mode 100644 index 000000000..49a185b78 --- /dev/null +++ b/pkg/runtime/validate.go @@ -0,0 +1,82 @@ +package runtime + +import ( + "fmt" + "os" + + "github.com/accurics/terrascan/pkg/utils" + "go.uber.org/zap" + + CloudProvider "github.com/accurics/terrascan/pkg/cloud-providers" + IacProvider "github.com/accurics/terrascan/pkg/iac-providers" +) + +var ( + errEmptyIacPath = fmt.Errorf("empty iac path, either use '-f' or '-d' option") + errIncorrectIacPath = fmt.Errorf("cannot accept both '-f' and '-d' options together") + errDirNotExists = fmt.Errorf("directory does not exist") + errFileNotExists = fmt.Errorf("file does not exist") + errIacNotSupported = fmt.Errorf("iac type or version not supported") + errCloudNotSupported = fmt.Errorf("cloud type not supported") +) + +// ValidateInputs validates the inputs to the executor object +func (e *Executor) ValidateInputs() error { + + // terrascan can accept either a file or a directory + if e.filePath == "" && e.dirPath == "" { + zap.S().Errorf("no IaC path specified; use '-f' for file or '-d' for directory") + return errEmptyIacPath + } + if e.filePath != "" && e.dirPath != "" { + zap.S().Errorf("cannot accept both '-f %s' and '-d %s' options together", e.filePath, e.dirPath) + return errIncorrectIacPath + } + + if e.dirPath != "" { + // if directory, check if directory exists + absDirPath, err := utils.GetAbsPath(e.dirPath) + if err != nil { + return err + } + + if _, err := os.Stat(absDirPath); err != nil { + zap.S().Errorf("directory '%s' does not exist", absDirPath) + return errDirNotExists + } + zap.S().Debugf("directory '%s' exists", absDirPath) + } else { + + // if file path, check if file exists + absFilePath, err := utils.GetAbsPath(e.filePath) + if err != nil { + return err + } + + if _, err := os.Stat(absFilePath); err != nil { + zap.S().Errorf("file '%s' does not exist", absFilePath) + return errFileNotExists + } + zap.S().Debugf("file '%s' exists", absFilePath) + } + + // check if Iac type is supported + if !IacProvider.IsIacSupported(e.iacType, e.iacVersion) { + zap.S().Errorf("iac type '%s', version '%s' not supported", e.iacType, e.iacVersion) + return errIacNotSupported + } + zap.S().Debugf("iac type '%s', version '%s' is supported", e.iacType, e.iacVersion) + + // check if cloud type is supported + if !CloudProvider.IsCloudSupported(e.cloudType) { + zap.S().Errorf("cloud type '%s' not supported", e.cloudType) + return errCloudNotSupported + } + zap.S().Debugf("cloud type '%s' supported", e.cloudType) + + // check if policy type is supported + + // successful + zap.S().Debug("input validation successful") + return nil +} diff --git a/pkg/runtime/validate_test.go b/pkg/runtime/validate_test.go new file mode 100644 index 000000000..02091c651 --- /dev/null +++ b/pkg/runtime/validate_test.go @@ -0,0 +1,110 @@ +package runtime + +import ( + "reflect" + "testing" +) + +func TestValidateInputs(t *testing.T) { + + table := []struct { + name string + executor Executor + wantErr error + }{ + { + name: "valid filePath", + executor: Executor{ + filePath: "./testdata/testfile", + dirPath: "", + cloudType: "aws", + iacType: "terraform", + iacVersion: "v12", + }, + wantErr: nil, + }, + { + name: "valid dirPath", + executor: Executor{ + filePath: "", + dirPath: "./testdata/testdir", + cloudType: "aws", + iacType: "terraform", + iacVersion: "v12", + }, + wantErr: nil, + }, + { + name: "empty iac path", + executor: Executor{ + filePath: "", + dirPath: "", + }, + wantErr: errEmptyIacPath, + }, + { + name: "incorrect iac path", + executor: Executor{ + filePath: "./testdata/testfile", + dirPath: "./testdata/testdir", + }, + wantErr: errIncorrectIacPath, + }, + { + name: "filepath does not exist", + executor: Executor{ + filePath: "./testdata/notthere", + }, + wantErr: errFileNotExists, + }, + { + name: "directory does not exist", + executor: Executor{ + dirPath: "./testdata/notthere", + }, + wantErr: errDirNotExists, + }, + { + name: "invalid cloud", + executor: Executor{ + filePath: "", + dirPath: "./testdata/testdir", + cloudType: "nothere", + iacType: "terraform", + iacVersion: "v12", + }, + wantErr: errCloudNotSupported, + }, + { + name: "invalid iac type", + executor: Executor{ + filePath: "", + dirPath: "./testdata/testdir", + cloudType: "aws", + iacType: "notthere", + iacVersion: "v12", + }, + wantErr: errIacNotSupported, + }, + { + name: "invalid iac version", + executor: Executor{ + filePath: "", + dirPath: "./testdata/testdir", + cloudType: "aws", + iacType: "terraform", + iacVersion: "notthere", + }, + wantErr: errIacNotSupported, + }, + } + + for _, tt := range table { + t.Run(tt.name, func(t *testing.T) { + gotErr := tt.executor.ValidateInputs() + if !reflect.DeepEqual(gotErr, tt.wantErr) { + t.Errorf("unexpected error, gotErr: '%v', wantErr: '%v'", gotErr, tt.wantErr) + } + }) + } +} From de76066d69f95c746381d680828d80922acd46c5 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Sun, 26 Jul 2020 11:31:26 +0530 Subject: [PATCH 026/188] add unit test for logger package --- pkg/logger/logger.go | 18 ++++++++-- pkg/logger/logger_test.go | 73 +++++++++++++++++++++++++++++++++++++++ 2 files changed, 88 insertions(+), 3 deletions(-) create mode 100644 pkg/logger/logger_test.go diff --git a/pkg/logger/logger.go b/pkg/logger/logger.go index 13f538010..8f0f80048 100644 --- a/pkg/logger/logger.go +++ b/pkg/logger/logger.go @@ -28,15 +28,26 @@ func getLoggerLevel(lvl string) zapcore.Level { // Init initializes global custom zap logger func Init(encoding, level string) { + // select encoding level encodingLevel := zapcore.LowercaseColorLevelEncoder if encoding == "json" { encodingLevel = zapcore.LowercaseLevelEncoder } + // get logger + logger := GetLogger(level, encoding, encodingLevel) + + // initialize global logger + zap.ReplaceGlobals(logger) +} + +// GetLogger creates a customer zap logger +func GetLogger(logLevel, encoding string, encodingLevel func(zapcore.Level, zapcore.PrimitiveArrayEncoder)) *zap.Logger { + // build zap config zapConfig := zap.Config{ Encoding: encoding, - Level: zap.NewAtomicLevelAt(getLoggerLevel(level)), + Level: zap.NewAtomicLevelAt(getLoggerLevel(logLevel)), OutputPaths: []string{"stdout"}, EncoderConfig: zapcore.EncoderConfig{ LevelKey: "level", @@ -48,8 +59,9 @@ func Init(encoding, level string) { EncodeCaller: zapcore.ShortCallerEncoder, }, } + + // create zap logger logger, _ := zapConfig.Build() - // initialize global logger - zap.ReplaceGlobals(logger) + return logger } diff --git a/pkg/logger/logger_test.go b/pkg/logger/logger_test.go new file mode 100644 index 000000000..028d846a4 --- /dev/null +++ b/pkg/logger/logger_test.go @@ -0,0 +1,73 @@ +package logger + +import ( + "testing" + + "go.uber.org/zap/zapcore" +) + +func TestGetLoggerLevel(t *testing.T) { + table := []struct { + name string + input string + want zapcore.Level + }{ + { + name: "empty log level", + input: "", + want: zapcore.InfoLevel, + }, + { + name: "invalid log level", + input: "some log level", + want: zapcore.InfoLevel, + }, + { + name: "debug log level", + input: "debug", + want: zapcore.DebugLevel, + }, + { + name: "panic log level", + input: "panic", + want: zapcore.PanicLevel, + }, + } + + for _, tt := range table { + got := getLoggerLevel(tt.input) + if got != tt.want { + t.Errorf("got: '%v', want: '%v'", got, tt.want) + } + } +} + +func TestGetLogger(t *testing.T) { + + table := []struct { + name string + logLevel string + encoding string + encodingLevel func(zapcore.Level, zapcore.PrimitiveArrayEncoder) + }{ + { + name: "check debug log level", + logLevel: "debug", + encoding: "json", + encodingLevel: zapcore.LowercaseLevelEncoder, + }, + { + name: "check log level", + logLevel: "panic", + encoding: "console", + encodingLevel: zapcore.LowercaseLevelEncoder, + }, + } + + for _, tt := range table { + got := GetLogger(tt.logLevel, tt.encoding, tt.encodingLevel) + if ce := got.Check(getLoggerLevel(tt.logLevel), "testing"); ce == nil { + t.Errorf("unexpected error") + } + } +} From b22944e51962603f1d78a58412441a0f30e1ef1d Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Sun, 26 Jul 2020 11:54:30 +0530 Subject: [PATCH 027/188] refactoring cli package --- pkg/cli/run.go | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pkg/cli/run.go b/pkg/cli/run.go index 42a2bda6c..ed25f5d01 100644 --- a/pkg/cli/run.go +++ b/pkg/cli/run.go @@ -8,7 +8,10 @@ import ( func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath string) { // create a new runtime executor for processing IaC - executor := runtime.NewExecutor(iacType, iacVersion, cloudType, iacFilePath, + executor, err := runtime.NewExecutor(iacType, iacVersion, cloudType, iacFilePath, iacDirPath) + if err != nil { + return + } executor.Execute() } From f5c9cbfa8aaadaf4635f5ee8ea457ad073004905 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Sun, 26 Jul 2020 11:55:23 +0530 Subject: [PATCH 028/188] update code coverage script --- scripts/generate-coverage.sh | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/scripts/generate-coverage.sh b/scripts/generate-coverage.sh index dc603c7a5..90978c878 100755 --- a/scripts/generate-coverage.sh +++ b/scripts/generate-coverage.sh @@ -1,15 +1,8 @@ #!/bin/bash -# source: /~https://github.com/codecov/example-go -# go test can't generate code coverage for multiple packages in one command +set -o errexit +set -o nounset +set -o pipefail - set -e -touch coverage.out -go test -i -race ./cmd/terrascan -for d in $(go list ./... | grep -v vendor | grep -v tests | grep -v integration_test); do - go test -race -coverprofile=profile.out -covermode=atomic $d - if [ -f profile.out ]; then - cat profile.out >> coverage.out - rm profile.out - fi -done +go test -v -coverpkg=./... -coverprofile=coverage.out ./... +go tool cover -func coverage.out From 81b653453e6d4104688785232e40203de900ed1c Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Sun, 26 Jul 2020 11:56:07 +0530 Subject: [PATCH 029/188] fixing static code bugs --- pkg/iac-providers/terraform/v12/convert.go | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/pkg/iac-providers/terraform/v12/convert.go b/pkg/iac-providers/terraform/v12/convert.go index 585137501..ea257f5de 100644 --- a/pkg/iac-providers/terraform/v12/convert.go +++ b/pkg/iac-providers/terraform/v12/convert.go @@ -18,14 +18,6 @@ import ( type jsonObj map[string]interface{} -// Convert an hcl File to a json serializable object -// This assumes that the body is a hclsyntax.Body -func convertFile(file *hcl.File) (jsonObj, error) { - c := converter{bytes: file.Bytes} - body := file.Body.(*hclsyntax.Body) - return c.convertBody(body) -} - type converter struct { bytes []byte } @@ -76,7 +68,7 @@ func (c *converter) convertBlock(block *hclsyntax.Block, out jsonObj) error { out, ok = inner.(jsonObj) if !ok { // TODO: better diagnostics - return fmt.Errorf("Unable to conver Block to JSON: %v.%v", block.Type, strings.Join(block.Labels, ".")) + return fmt.Errorf("unable to convert Block to JSON: %v.%v", block.Type, strings.Join(block.Labels, ".")) } } else { obj := make(jsonObj) @@ -199,7 +191,7 @@ func (c *converter) convertTemplateConditional(expr *hclsyntax.ConditionalExpr) return "", nil } builder.WriteString(trueResult) - falseResult, err := c.convertStringPart(expr.FalseResult) + falseResult, _ := c.convertStringPart(expr.FalseResult) if len(falseResult) > 0 { builder.WriteString("%{else}") builder.WriteString(falseResult) From 95201e45262ec5332c3d0cee34a12d0e1224447f Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Sun, 26 Jul 2020 12:03:29 +0530 Subject: [PATCH 030/188] add testdir to testdata in runtime package --- pkg/runtime/testdata/testdir/testfile | 0 1 file changed, 0 insertions(+), 0 deletions(-) create mode 100644 pkg/runtime/testdata/testdir/testfile diff --git a/pkg/runtime/testdata/testdir/testfile b/pkg/runtime/testdata/testdir/testfile new file mode 100644 index 000000000..e69de29bb From 730ec19a18ed32415ef22b650da14342a9d21e80 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Sun, 26 Jul 2020 22:34:54 +0530 Subject: [PATCH 031/188] add unit tests for iac-provider package --- pkg/iac-providers/terraform/v12/load-dir.go | 17 +- .../terraform/v12/load-dir_test.go | 77 + pkg/iac-providers/terraform/v12/load-file.go | 17 +- .../terraform/v12/load-file_test.go | 76 + .../testdata/moduleconfigs/cloudfront/main.tf | 84 ++ .../cloudfront/sub-cloudfront/main.tf | 9 + .../testdata/moduleconfigs/cloudtrail/main.tf | 29 + .../v12/testdata/moduleconfigs/ecs/main.tf | 5 + .../testdata/moduleconfigs/ecs/service.json | 28 + .../v12/testdata/moduleconfigs/efs/main.tf | 31 + .../moduleconfigs/elasticcache/main.tf | 30 + .../v12/testdata/moduleconfigs/elb/main.tf | 23 + .../testdata/moduleconfigs/guardduty/main.tf | 3 + .../v12/testdata/moduleconfigs/iam/main.tf | 5 + .../testdata/moduleconfigs/kinesis/main.tf | 48 + .../v12/testdata/moduleconfigs/main.tf | 51 + .../v12/testdata/moduleconfigs/s3/main.tf | 85 ++ .../v12/testdata/moduleconfigs/sg/main.tf | 436 ++++++ .../v12/testdata/moduleconfigs/sqs/main.tf | 21 + .../terraform/v12/testdata/testfile | 0 .../v12/testdata/tfconfigs/config1.tf | 103 ++ .../v12/testdata/tfjson/config1.json | 188 +++ .../v12/testdata/tfjson/fullconfig.json | 188 +++ .../v12/testdata/tfjson/modulconfigs.json | 1314 +++++++++++++++++ 24 files changed, 2848 insertions(+), 20 deletions(-) create mode 100644 pkg/iac-providers/terraform/v12/load-dir_test.go create mode 100644 pkg/iac-providers/terraform/v12/load-file_test.go create mode 100644 pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudfront/main.tf create mode 100644 pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudfront/sub-cloudfront/main.tf create mode 100644 pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudtrail/main.tf create mode 100644 pkg/iac-providers/terraform/v12/testdata/moduleconfigs/ecs/main.tf create mode 100644 pkg/iac-providers/terraform/v12/testdata/moduleconfigs/ecs/service.json create mode 100644 pkg/iac-providers/terraform/v12/testdata/moduleconfigs/efs/main.tf create mode 100644 pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elasticcache/main.tf create mode 100644 pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elb/main.tf create mode 100644 pkg/iac-providers/terraform/v12/testdata/moduleconfigs/guardduty/main.tf create mode 100644 pkg/iac-providers/terraform/v12/testdata/moduleconfigs/iam/main.tf create mode 100644 pkg/iac-providers/terraform/v12/testdata/moduleconfigs/kinesis/main.tf create mode 100644 pkg/iac-providers/terraform/v12/testdata/moduleconfigs/main.tf create mode 100644 pkg/iac-providers/terraform/v12/testdata/moduleconfigs/s3/main.tf create mode 100644 pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sg/main.tf create mode 100644 pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sqs/main.tf create mode 100644 pkg/iac-providers/terraform/v12/testdata/testfile create mode 100644 pkg/iac-providers/terraform/v12/testdata/tfconfigs/config1.tf create mode 100644 pkg/iac-providers/terraform/v12/testdata/tfjson/config1.json create mode 100644 pkg/iac-providers/terraform/v12/testdata/tfjson/fullconfig.json create mode 100644 pkg/iac-providers/terraform/v12/testdata/tfjson/modulconfigs.json diff --git a/pkg/iac-providers/terraform/v12/load-dir.go b/pkg/iac-providers/terraform/v12/load-dir.go index 4f1271350..5c85c449a 100644 --- a/pkg/iac-providers/terraform/v12/load-dir.go +++ b/pkg/iac-providers/terraform/v12/load-dir.go @@ -12,19 +12,16 @@ import ( "go.uber.org/zap" "github.com/accurics/terrascan/pkg/iac-providers/output" - "github.com/accurics/terrascan/pkg/utils" +) + +var ( + errDirEmptyTFConfig = fmt.Errorf("directory has no terraform files") ) // LoadIacDir starts traversing from the given rootDir and traverses through // all the descendant modules present to create an output list of all the // resources present in rootDir and descendant modules -func (*TfV12) LoadIacDir(rootDir string) (allResourcesConfig output.AllResourceConfigs, err error) { - - // get absolute path - absRootDir, err := utils.GetAbsPath(rootDir) - if err != nil { - return allResourcesConfig, err - } +func (*TfV12) LoadIacDir(absRootDir string) (allResourcesConfig output.AllResourceConfigs, err error) { // create a new config parser parser := hclConfigs.NewParser(afero.NewOsFs()) @@ -32,13 +29,13 @@ func (*TfV12) LoadIacDir(rootDir string) (allResourcesConfig output.AllResourceC // check if the directory has any tf config files (.tf or .tf.json) if !parser.IsConfigDir(absRootDir) { zap.S().Errorf("directory '%s' has no terraform config files", absRootDir) - return allResourcesConfig, fmt.Errorf("directory has no terraform files") + return allResourcesConfig, errDirEmptyTFConfig } // load root config directory rootMod, diags := parser.LoadConfigDir(absRootDir) if diags.HasErrors() { - zap.S().Errorf("failed to load terraform config dir '%s'. error:\n%+v\n", rootDir, diags) + zap.S().Errorf("failed to load terraform config dir '%s'. error:\n%+v\n", absRootDir, diags) return allResourcesConfig, fmt.Errorf("failed to load terraform allResourcesConfig dir") } diff --git a/pkg/iac-providers/terraform/v12/load-dir_test.go b/pkg/iac-providers/terraform/v12/load-dir_test.go new file mode 100644 index 000000000..1fa880dc0 --- /dev/null +++ b/pkg/iac-providers/terraform/v12/load-dir_test.go @@ -0,0 +1,77 @@ +package tfv12 + +import ( + "bytes" + "encoding/json" + "io/ioutil" + "reflect" + "testing" + + "github.com/accurics/terrascan/pkg/iac-providers/output" +) + +func TestLoadIacDir(t *testing.T) { + + table := []struct { + name string + dirPath string + tfv12 TfV12 + want output.AllResourceConfigs + wantErr error + }{ + { + name: "invalid dirPath", + dirPath: "not-there", + tfv12: TfV12{}, + wantErr: errDirEmptyTFConfig, + }, + { + name: "empty config", + dirPath: "./testdata/testfile", + tfv12: TfV12{}, + wantErr: errDirEmptyTFConfig, + }, + } + + for _, tt := range table { + t.Run(tt.name, func(t *testing.T) { + _, gotErr := tt.tfv12.LoadIacDir(tt.dirPath) + if !reflect.DeepEqual(gotErr, tt.wantErr) { + t.Errorf("unexpected error; gotErr: '%v', wantErr: '%v'", gotErr, tt.wantErr) + } + }) + } + + table2 := []struct { + name string + tfConfigDir string + tfJSONFile string + tfv12 TfV12 + wantErr error + }{ + { + name: "config1", + tfConfigDir: "./testdata/tfconfigs", + tfJSONFile: "testdata/tfjson/fullconfig.json", + tfv12: TfV12{}, + wantErr: nil, + }, + } + + for _, tt := range table2 { + t.Run(tt.name, func(t *testing.T) { + got, gotErr := tt.tfv12.LoadIacDir(tt.tfConfigDir) + if !reflect.DeepEqual(gotErr, tt.wantErr) { + t.Errorf("unexpected error; gotErr: '%v', wantErr: '%v'", gotErr, tt.wantErr) + } + + gotBytes, _ := json.MarshalIndent(got, "", " ") + gotBytes = append(gotBytes, []byte{'\n'}...) + wantBytes, _ := ioutil.ReadFile(tt.tfJSONFile) + + if !bytes.Equal(bytes.TrimSpace(gotBytes), bytes.TrimSpace(wantBytes)) { + t.Errorf("got '%v', want: '%v'", string(gotBytes), string(wantBytes)) + } + }) + } +} diff --git a/pkg/iac-providers/terraform/v12/load-file.go b/pkg/iac-providers/terraform/v12/load-file.go index 667c2b5f7..3b2206d4a 100644 --- a/pkg/iac-providers/terraform/v12/load-file.go +++ b/pkg/iac-providers/terraform/v12/load-file.go @@ -8,17 +8,14 @@ import ( "go.uber.org/zap" "github.com/accurics/terrascan/pkg/iac-providers/output" - "github.com/accurics/terrascan/pkg/utils" ) -// LoadIacFile parses the given terraform file from the given file path -func (*TfV12) LoadIacFile(filePath string) (allResourcesConfig output.AllResourceConfigs, err error) { +var ( + errFailedLoadConfigFile = fmt.Errorf("failed to load config file") +) - // get absolute path - absFilePath, err := utils.GetAbsPath(filePath) - if err != nil { - return allResourcesConfig, err - } +// LoadIacFile parses the given terraform file from the given file path +func (*TfV12) LoadIacFile(absFilePath string) (allResourcesConfig output.AllResourceConfigs, err error) { // new terraform config parser parser := hclConfigs.NewParser(afero.NewOsFs()) @@ -26,11 +23,11 @@ func (*TfV12) LoadIacFile(filePath string) (allResourcesConfig output.AllResourc hclFile, diags := parser.LoadConfigFile(absFilePath) if diags != nil { zap.S().Errorf("failed to load config file '%s'. error:\n%v\n", diags) - return allResourcesConfig, fmt.Errorf("failed to load config file") + return allResourcesConfig, errFailedLoadConfigFile } if hclFile == nil && diags.HasErrors() { zap.S().Errorf("error occured while loading config file. error:\n%v\n", diags) - return allResourcesConfig, fmt.Errorf("failed to load config file") + return allResourcesConfig, errFailedLoadConfigFile } // initialize normalized output diff --git a/pkg/iac-providers/terraform/v12/load-file_test.go b/pkg/iac-providers/terraform/v12/load-file_test.go new file mode 100644 index 000000000..1b795482e --- /dev/null +++ b/pkg/iac-providers/terraform/v12/load-file_test.go @@ -0,0 +1,76 @@ +package tfv12 + +import ( + "bytes" + "encoding/json" + "io/ioutil" + "reflect" + "testing" + + "github.com/accurics/terrascan/pkg/iac-providers/output" +) + +func TestLoadIacFile(t *testing.T) { + + table := []struct { + name string + filePath string + tfv12 TfV12 + want output.AllResourceConfigs + wantErr error + }{ + { + name: "invalid filepath", + filePath: "not-there", + tfv12: TfV12{}, + wantErr: errFailedLoadConfigFile, + }, + { + name: "empty config", + filePath: "./testdata/testfile", + tfv12: TfV12{}, + wantErr: nil, + }, + } + + for _, tt := range table { + t.Run(tt.name, func(t *testing.T) { + _, gotErr := tt.tfv12.LoadIacFile(tt.filePath) + if !reflect.DeepEqual(gotErr, tt.wantErr) { + t.Errorf("unexpected error; gotErr: '%v', wantErr: '%v'", gotErr, tt.wantErr) + } + }) + } + + table2 := []struct { + name string + tfConfigFile string + tfJSONFile string + tfv12 TfV12 + wantErr error + }{ + { + name: "config1", + tfConfigFile: "./testdata/tfconfigs/config1.tf", + tfJSONFile: "./testdata/tfjson/config1.json", + tfv12: TfV12{}, + wantErr: nil, + }, + } + + for _, tt := range table2 { + t.Run(tt.name, func(t *testing.T) { + got, gotErr := tt.tfv12.LoadIacFile(tt.tfConfigFile) + if !reflect.DeepEqual(gotErr, tt.wantErr) { + t.Errorf("unexpected error; gotErr: '%v', wantErr: '%v'", gotErr, tt.wantErr) + } + + gotBytes, _ := json.MarshalIndent(got, "", " ") + gotBytes = append(gotBytes, []byte{'\n'}...) + wantBytes, _ := ioutil.ReadFile(tt.tfJSONFile) + if !reflect.DeepEqual(bytes.TrimSpace(gotBytes), bytes.TrimSpace(wantBytes)) { + t.Errorf("unexpected error; got '%v', want: '%v'", string(gotBytes), string(wantBytes)) + } + }) + } +} diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudfront/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudfront/main.tf new file mode 100644 index 000000000..c047b6469 --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudfront/main.tf @@ -0,0 +1,84 @@ +module "sub-cloudfront" { + source = "./sub-cloudfront" +} + + +resource "aws_cloudfront_distribution" "s3-distribution-TLS-v1" { + origin { + domain_name = "aws_s3_bucket.b.bucket_regional_domain_name" + origin_id = "local.s3_origin_id" + + s3_origin_config { + origin_access_identity = "origin-access-identity/cloudfront/ABCDEFG1234567" + } + } + + enabled = true + + default_cache_behavior { + allowed_methods = ["DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT"] + cached_methods = ["GET", "HEAD"] + target_origin_id = "local.s3_origin_id" + + forwarded_values { + query_string = false + + cookies { + forward = "none" + } + } + viewer_protocol_policy = "https-only" + } + + ordered_cache_behavior { + path_pattern = "/content/immutable/*" + allowed_methods = ["GET", "HEAD", "OPTIONS"] + cached_methods = ["GET", "HEAD", "OPTIONS"] + target_origin_id = "local.s3_origin_id" + + forwarded_values { + query_string = false + headers = ["Origin"] + + cookies { + forward = "none" + } + } + + compress = true + viewer_protocol_policy = "allow-all" + } + + ordered_cache_behavior { + path_pattern = "/content/*" + allowed_methods = ["GET", "HEAD", "OPTIONS"] + cached_methods = ["GET", "HEAD"] + target_origin_id = "local.s3_origin_id" + + forwarded_values { + query_string = false + + cookies { + forward = "none" + } + } + + viewer_protocol_policy = "allow-all" + } + + restrictions { + geo_restriction { + restriction_type = "whitelist" + locations = ["US", "CA", "GB", "DE"] + } + } + + viewer_certificate { + cloudfront_default_certificate = true + minimum_protocol_version = "TLSv1" #expected version is TLSv1.1 or TLSv1.2 + } +} + +locals { + s3_origin_id = "myS3Origin" +} diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudfront/sub-cloudfront/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudfront/sub-cloudfront/main.tf new file mode 100644 index 000000000..58dc7aab1 --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudfront/sub-cloudfront/main.tf @@ -0,0 +1,9 @@ +resource "aws_kms_key" "kmsKeyDisabled" { + description = "KMS key 2" + is_enabled = false + tags = { + Name = "kmsKeyDisabled" + Setup = "self-healing" + } +} + diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudtrail/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudtrail/main.tf new file mode 100644 index 000000000..4f3bd49f6 --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudtrail/main.tf @@ -0,0 +1,29 @@ +resource "aws_cloudtrail" "missing-multi-region" { + name = "tf-trail-foobar" + s3_bucket_name = "some-s3-bucket" + s3_key_prefix = "prefix" + include_global_service_events = false +} + +resource "aws_cloudtrail" "false-multi-region" { + name = "tf-trail-foobar" + s3_bucket_name = "some-s3-bucket" + s3_key_prefix = "prefix" + include_global_service_events = false + is_multi_region_trail = false +} + +resource "aws_cloudtrail" "missing-kms" { + name = "missing-kms" + s3_bucket_name = "some-s3-bucket" + s3_key_prefix = "prefix" + include_global_service_events = false +} + +resource "aws_cloudtrail" "with-kms" { + name = "with-kms" + s3_bucket_name = "some-s3-bucket" + s3_key_prefix = "prefix" + include_global_service_events = false + kms_key_id = "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab" +} diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/ecs/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/ecs/main.tf new file mode 100644 index 000000000..0c3777f31 --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/ecs/main.tf @@ -0,0 +1,5 @@ +resource "aws_ecs_task_definition" "instanceNotInVpc" { + family = "service" + network_mode = "bridge" + container_definitions = file("ecs/service.json") +} diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/ecs/service.json b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/ecs/service.json new file mode 100644 index 000000000..1a7071942 --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/ecs/service.json @@ -0,0 +1,28 @@ +[ + { + "name": "first", + "image": "service-first", + "cpu": 10, + "memory": 512, + "essential": true, + "portMappings": [ + { + "containerPort": 80, + "hostPort": 80 + } + ] + }, + { + "name": "second", + "image": "service-second", + "cpu": 10, + "memory": 256, + "essential": true, + "portMappings": [ + { + "containerPort": 443, + "hostPort": 443 + } + ] + } + ] diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/efs/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/efs/main.tf new file mode 100644 index 000000000..3fc186b0c --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/efs/main.tf @@ -0,0 +1,31 @@ +resource "aws_efs_file_system" "efsNotEncrypted" { + creation_token = "my-product" + + tags = { + Name = "not-encrypted" + } +} + + +resource "aws_efs_file_system" "efsEncryptedFalse" { + creation_token = "my-product" + + tags = { + Name = "encrypted" + } + + encrypted = false + +} + +resource "aws_efs_file_system" "efsEncryptedWithNoKms" { + creation_token = "my-product" + + tags = { + Name = "encrypted" + } + + encrypted = true + +} + diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elasticcache/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elasticcache/main.tf new file mode 100644 index 000000000..e573c7a3e --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elasticcache/main.tf @@ -0,0 +1,30 @@ +resource "aws_elasticache_cluster" "noMemcachedInElastiCache" { + cluster_id = "cluster-example" + engine = "memcached" + node_type = "cache.m4.large" + num_cache_nodes = 2 + parameter_group_name = "default.memcached1.4" + port = 11211 +} + + +resource "aws_elasticache_cluster" "redis_version_compliant" { + cluster_id = "cluster-example" + engine = "redis" + node_type = "cache.m4.large" + num_cache_nodes = 1 + parameter_group_name = "default.redis3.2" + engine_version = "3.2.10" + port = 6379 +} + + +resource "aws_elasticache_cluster" "redis_version_non_compliant" { + cluster_id = "cluster-example" + engine = "redis" + node_type = "cache.m4.large" + num_cache_nodes = 1 + parameter_group_name = "default.redis3.2" + engine_version = "3.2.0" + port = 6379 +} diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elb/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elb/main.tf new file mode 100644 index 000000000..faf404768 --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elb/main.tf @@ -0,0 +1,23 @@ +resource "aws_load_balancer_policy" "elbWeakCipher" { + load_balancer_name = "some-name" + policy_name = "wu-tang-ssl" + policy_type_name = "SSLNegotiationPolicyType" + + policy_attribute { + name = "ECDHE-RSA-RC4-SHA" + value = "true" + } +} + +resource "aws_load_balancer_policy" "elbSsLTsLProtocol" { + load_balancer_name = "some-name" + policy_name = "wu-tang-ssl" + policy_type_name = "SSLNegotiationPolicyType" + + policy_attribute { + name = "Protocol-SSLv3" + value = "true" + } +} + + diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/guardduty/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/guardduty/main.tf new file mode 100644 index 000000000..99ab53f2b --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/guardduty/main.tf @@ -0,0 +1,3 @@ +resource "aws_guardduty_detector" "gaurdDutyDisabled" { + enable = false +} diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/iam/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/iam/main.tf new file mode 100644 index 000000000..c993f2ff0 --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/iam/main.tf @@ -0,0 +1,5 @@ +resource "aws_iam_access_key" "noAccessKeyForRootAccount" { + user = "root" + pgp_key = "keybase:some_person_that_exists" + status = "Inactive" +} diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/kinesis/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/kinesis/main.tf new file mode 100644 index 000000000..9106214de --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/kinesis/main.tf @@ -0,0 +1,48 @@ +resource "aws_kinesis_stream" "kinesisEncryptedWithKms" { + name = "kinesisEncryptedWithKms" + shard_count = 1 + retention_period = 48 + + shard_level_metrics = [ + "IncomingBytes", + "OutgoingBytes", + ] + + encryption_type = "KMS" + kms_key_id = "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab" + + tags = { + Environment = "kinesisEncryptedWithKms" + } +} + +resource "aws_kinesis_stream" "unencrypted_aws_kinesis_stream" { + name = "terraform-kinesis-test" + shard_count = 1 + retention_period = 48 + + shard_level_metrics = [ + "IncomingBytes", + "OutgoingBytes", + ] + + tags = { + Environment = "test" + } +} + + + +resource "aws_kinesis_stream" "kinesis_encrypted_but_no_kms_provided" { + name = "kinesisEncryptedWithKms" + shard_count = 1 + retention_period = 48 + + shard_level_metrics = [ + "IncomingBytes", + "OutgoingBytes", + ] + + encryption_type = "KMS" +} + diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/main.tf new file mode 100644 index 000000000..ede7da556 --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/main.tf @@ -0,0 +1,51 @@ +provider "aws" { + region = "us-east-1" +} + +module "cloudfront" { + source = "./cloudfront" +} + +module "cloudtrail" { + source = "./cloudtrail" +} + +module "ecs" { + source = "./ecs" +} + +module "efs" { + source = "./efs" +} + +module "elb" { + source = "./elb" +} + +module "guardduty" { + source = "./guardduty" +} + +module "iam" { + source = "./iam" +} + +module "kinesis" { + source = "./kinesis" +} + +module "s3" { + source = "./s3" +} + +module "sg" { + source = "./sg" +} + +module "sqs" { + source = "./sqs" +} + +module "elasticcache" { + source = "./elasticcache" +} diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/s3/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/s3/main.tf new file mode 100644 index 000000000..1df1f2bd3 --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/s3/main.tf @@ -0,0 +1,85 @@ +resource "aws_s3_bucket" "noS3BucketSseRules" { + bucket = "mybucket" + acl = "private" + + tags = { + Name = "nos3BucketSseRules" + Environment = "Dev" + } +} + + +resource "aws_s3_bucket" "s3BucketSseRulesWithKmsNull" { + bucket = "mybucket" + acl = "private" + + tags = { + Name = "s3BucketSseRulesWithNoKms" + Environment = "Dev" + } + + server_side_encryption_configuration { + rule { + apply_server_side_encryption_by_default { + sse_algorithm = "aws:kms" + } + } + } +} + +resource "aws_s3_bucket" "s3BucketNoWebsiteIndexDoc" { + bucket = "website" + acl = "public-read" + + server_side_encryption_configuration { + rule { + apply_server_side_encryption_by_default { + kms_master_key_id = "some-key-id" + sse_algorithm = "aws:kms" + } + } + } + + website { + index_document = "index.html" + error_document = "error.html" + } +} + +resource "aws_s3_bucket" "s3VersioningMfaFalse" { + bucket = "tf-test" + + server_side_encryption_configuration { + rule { + apply_server_side_encryption_by_default { + kms_master_key_id = "some-key-id" + sse_algorithm = "aws:kms" + } + } + } + + versioning { + enabled = true + mfa_delete = false + } +} + +resource "aws_s3_bucket" "allUsersReadAccess" { + bucket = "my-tf-test-bucket" + acl = "public-read" +} + +resource "aws_s3_bucket" "authUsersReadAccess" { + bucket = "my-tf-test-bucket" + acl = "authenticated-read" +} + +resource "aws_s3_bucket" "allUsersWriteAccess" { + bucket = "my-tf-test-bucket" + acl = "public-write" +} + +resource "aws_s3_bucket" "allUsersReadWriteAccess" { + bucket = "my-tf-test-bucket" + acl = "public-read-write" +} diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sg/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sg/main.tf new file mode 100644 index 000000000..5143018b9 --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sg/main.tf @@ -0,0 +1,436 @@ +resource "aws_security_group" "acme_web" { + name = "acme_web" + description = "Used in the terraform" + vpc_id = "some_dummy_vpc" + + tags = { + Name = "acme_web" + } + + # SSH access from anywhere + ingress { + from_port = 22 + to_port = 22 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "19.16.0.0/24"] + } + + # HTTP access from the VPC + ingress { + from_port = 80 + to_port = 80 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + # HTTPS access from the VPC + ingress { + from_port = 443 + to_port = 443 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + from_port = 4505 + to_port = 4505 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 4506 + from_port = 4506 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 3020 + from_port = 3020 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 61621 + from_port = 61621 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 7001 + from_port = 7001 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 9000 + from_port = 9000 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 8000 + from_port = 8000 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 8080 + from_port = 8080 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 636 + from_port = 636 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 1434 + from_port = 1434 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 1434 + from_port = 1434 + protocol = "udp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 135 + from_port = 135 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 1433 + from_port = 1433 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 11214 + from_port = 11214 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 11214 + from_port = 11214 + protocol = "udp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 11215 + from_port = 11215 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 11215 + from_port = 11215 + protocol = "udp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 27018 + from_port = 27018 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 3306 + from_port = 3306 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 137 + from_port = 137 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 137 + from_port = 137 + protocol = "udp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 138 + from_port = 138 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 138 + from_port = 138 + protocol = "udp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 139 + from_port = 139 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 139 + from_port = 139 + protocol = "udp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 2484 + from_port = 2484 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 2484 + from_port = 2484 + protocol = "udp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 5432 + from_port = 5432 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 5432 + from_port = 5432 + protocol = "udp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 3000 + from_port = 3000 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 8140 + from_port = 8140 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 161 + from_port = 161 + protocol = "udp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 2382 + from_port = 2382 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 2383 + from_port = 2383 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 9090 + from_port = 9090 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 3389 + from_port = 3389 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 9042 + from_port = 9042 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 7000 + from_port = 7000 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 7199 + from_port = 7199 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 61620 + from_port = 61620 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 8888 + from_port = 8888 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 9160 + from_port = 9160 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 9200 + from_port = 9200 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 9300 + from_port = 9300 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 389 + from_port = 389 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 389 + from_port = 389 + protocol = "udp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 11211 + from_port = 11211 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 11211 + from_port = 11211 + protocol = "udp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 27017 + from_port = 27017 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 1521 + from_port = 1521 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 2483 + from_port = 2483 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 2483 + from_port = 2483 + protocol = "udp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 6379 + from_port = 6379 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 0 + from_port = 6379 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + ingress { + to_port = 0 + from_port = 4506 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } + + # outbound internet access + egress { + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] + } +} + +resource "aws_security_group" "defaultSGNotRestrictsAllTraffic" { + name = "default" + description = "Used in the terraform" + vpc_id = "some_dummy_vpc" + + tags = { + Name = "default" + } +} diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sqs/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sqs/main.tf new file mode 100644 index 000000000..db96848ea --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sqs/main.tf @@ -0,0 +1,21 @@ +resource "aws_sqs_queue" "sqsSseDisabled" { + name = "terraform-example-queue" +} + +resource "aws_sqs_queue" "sqsQueueExposed" { + name = "terraform-example-queue" + kms_master_key_id = "alias/aws/sqs" + kms_data_key_reuse_period_seconds = 300 + policy = < Date: Sun, 26 Jul 2020 23:36:29 +0530 Subject: [PATCH 032/188] add codecov integration to generate code coverage reports --- .github/workflows/gobuild.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/gobuild.yml b/.github/workflows/gobuild.yml index a03929b87..71f4e2f3f 100644 --- a/.github/workflows/gobuild.yml +++ b/.github/workflows/gobuild.yml @@ -29,3 +29,5 @@ jobs: - name: Run unit tests run: | make unit-tests + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v1 From efaf11b497555aebec9a8c374567ab13ecb24488 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Mon, 27 Jul 2020 00:22:02 +0530 Subject: [PATCH 033/188] add codecov yaml --- .codecov.yml | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 .codecov.yml diff --git a/.codecov.yml b/.codecov.yml new file mode 100644 index 000000000..7c64be9ba --- /dev/null +++ b/.codecov.yml @@ -0,0 +1,6 @@ +comment: + layout: "reach, diff, flags, files" + behavior: default + require_changes: false # if true: only post the comment if coverage changes + require_base: no # [yes :: must have a base report to post] + require_head: yes # [yes :: must have a head report to post] From f2fc35dd82ab70bfad641c216d56dc3b9a13cd77 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Mon, 27 Jul 2020 10:00:41 +0530 Subject: [PATCH 034/188] Update log message to indicate path of the built terrascan binary --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index c0e897eb6..36c1c7d5d 100644 --- a/Makefile +++ b/Makefile @@ -1,7 +1,7 @@ GITCOMMIT := $(shell git rev-parse --short HEAD 2>/dev/null) BUILD_FLAGS := -v -ldflags "-w -s" -BUILD_DIR = bin +BUILD_DIR = ./bin BINARY_NAME = terrascan @@ -13,7 +13,7 @@ default: build build: clean @mkdir -p $(BUILD_DIR) > /dev/null go build ${BUILD_FLAGS} -o ${BUILD_DIR}/${BINARY_NAME} cmd/terrascan/main.go - @echo "terrascan binary created in ${BUILD_DIR} directory" + @echo "binary created at ${BUILD_DIR}/${BINARY_NAME}" # clean build From e7730ba2a87939048cd35337c03a2b23f6c2b2a5 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Mon, 27 Jul 2020 10:15:39 +0530 Subject: [PATCH 035/188] add script for running static code analysis --- go.mod | 1 + go.sum | 13 +++++++++++++ scripts/staticcheck.sh | 10 ++++++++++ 3 files changed, 24 insertions(+) create mode 100755 scripts/staticcheck.sh diff --git a/go.mod b/go.mod index 8a419db61..ddb8cad02 100644 --- a/go.mod +++ b/go.mod @@ -11,4 +11,5 @@ require ( go.uber.org/zap v1.9.1 golang.org/x/lint v0.0.0-20200302205851-738671d3881b // indirect golang.org/x/tools v0.0.0-20200723000907-a7c6fd066f6d // indirect + honnef.co/go/tools v0.0.1-2020.1.4 // indirect ) diff --git a/go.sum b/go.sum index cd356d14d..237db5eb5 100644 --- a/go.sum +++ b/go.sum @@ -23,6 +23,7 @@ github.com/Azure/go-autorest/autorest/validation v0.2.0/go.mod h1:3EEqHnBxQGHXRY github.com/Azure/go-autorest/logger v0.1.0/go.mod h1:oExouG+K6PryycPJfVSxi/koC6LSNgds39diKLz7Vrc= github.com/Azure/go-autorest/tracing v0.5.0/go.mod h1:r/s2XiOKccPW3HrqB+W0TQzfbtp2fGCgRFtBroKn4Dk= github.com/Azure/go-ntlmssp v0.0.0-20180810175552-4a21cbd618b4/go.mod h1:chxPXzSsl7ZWRAuOIE23GDNzjWuZquvFlgA8xmpunjU= +github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/ChrisTrenkamp/goxpath v0.0.0-20170922090931-c385f95c6022/go.mod h1:nuWgzSkT5PnyOd+272uUmV0dnAnAn42Mk7PiQC5VzN4= @@ -112,6 +113,7 @@ github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= @@ -178,6 +180,7 @@ github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVY github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8= github.com/keybase/go-crypto v0.0.0-20161004153544-93f5b35093ba/go.mod h1:ghbZscTyKdM07+Fw3KSi0hcJm+AlEUWj8QLlPtijN/M= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= @@ -249,6 +252,7 @@ github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y8 github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= @@ -300,6 +304,7 @@ golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnf golang.org/x/crypto v0.0.0-20190222235706-ffb98f73852f/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= @@ -317,7 +322,9 @@ golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHl golang.org/x/lint v0.0.0-20200302205851-738671d3881b h1:Wh+f8QHJXR411sJR8/vRBTZ7YapZaRvUcLFFJhusH0k= golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180530234432-1e491301e022/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -384,11 +391,13 @@ golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgw golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0 h1:Dh6fw+p6FyRl5x/FvNswO1ji0lIGzm3KP8Y9VkS9PTE= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200723000907-a7c6fd066f6d h1:7k9BKfwmdbykG6l5ztniTrH0TP25yel8O7l26/yovMU= golang.org/x/tools v0.0.0-20200723000907-a7c6fd066f6d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= @@ -416,6 +425,7 @@ gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLks gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/cheggaaa/pb.v1 v1.0.27/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= @@ -425,5 +435,8 @@ gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc h1:/hemPrYIhOhy8zYrNj+069zDB68us2sMGsfkFJO0iZs= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2020.1.4 h1:UoveltGrhghAA7ePc+e+QYDHXrBps2PqFZiHkGR/xK8= +honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= diff --git a/scripts/staticcheck.sh b/scripts/staticcheck.sh new file mode 100755 index 000000000..0571f1416 --- /dev/null +++ b/scripts/staticcheck.sh @@ -0,0 +1,10 @@ +#!/bin/bash + +set -o errexit +set -o nounset +set -o pipefail + + +export GO111MODULE=on +go get honnef.co/go/tools/cmd/staticcheck +staticcheck -f stylish ./... From ad2e4c11119d07ff175eb03020ecfd1eb8c47456 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Mon, 27 Jul 2020 10:36:25 +0530 Subject: [PATCH 036/188] add help, staticcheck and test commands to Makefile --- Makefile | 34 +++++++++++++++++++++++++++++++--- 1 file changed, 31 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 36c1c7d5d..831f87d4a 100644 --- a/Makefile +++ b/Makefile @@ -6,8 +6,23 @@ BINARY_NAME = terrascan # default -default: build - +default: help + + +# help +help: + @echo "usage: make [command]\ncommands:" + @echo "build\n\tbuild terrascan binary" + @echo "cicd\n\tsimulate CI/CD pipeline locally" + @echo "clean\n\tclean up build" + @echo "gofmt\n\tvalidate gofmt" + @echo "golint\n\tvalidate golint" + @echo "gomodverify\n\tverify go modules" + @echo "govet\n\tvalidate govet" + @echo "staticcheck\n\trun static code analysis" + @echo "test\n\texecute unit and integration tests" + @echo "unit-tests\n\texecute unit tests" + @echo "validate\n\trun all validations" # build terrascan binary build: clean @@ -21,8 +36,16 @@ clean: @rm -rf $(BUILD_DIR) +# run all cicd steps +cicd: validate build test + + +# run all unit and integration tests +test: unit-tests + + # run all validation tests -validate: gofmt govet golint gomodverify +validate: gofmt govet golint gomodverify staticcheck # gofmt validation @@ -45,6 +68,11 @@ gomodverify: go mod verify +# static code analysis +staticcheck: + ./scripts/staticcheck.sh + + # run unit tests unit-tests: ./scripts/generate-coverage.sh From e899e91fb4dddd2ff591834736ce891a85bf84fc Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Mon, 27 Jul 2020 10:40:37 +0530 Subject: [PATCH 037/188] removing staticcheck from pipeline for now --- Makefile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Makefile b/Makefile index 831f87d4a..6fdffeca7 100644 --- a/Makefile +++ b/Makefile @@ -45,7 +45,7 @@ test: unit-tests # run all validation tests -validate: gofmt govet golint gomodverify staticcheck +validate: gofmt govet golint gomodverify # gofmt validation From b2a8a46ea5bb5242fe32cfe4aa47a2b795a3ef18 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Mon, 27 Jul 2020 15:17:20 +0530 Subject: [PATCH 038/188] add staticcheck to build pipeline --- Makefile | 2 +- scripts/staticcheck.sh | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 6fdffeca7..831f87d4a 100644 --- a/Makefile +++ b/Makefile @@ -45,7 +45,7 @@ test: unit-tests # run all validation tests -validate: gofmt govet golint gomodverify +validate: gofmt govet golint gomodverify staticcheck # gofmt validation diff --git a/scripts/staticcheck.sh b/scripts/staticcheck.sh index 0571f1416..0a9d8930a 100755 --- a/scripts/staticcheck.sh +++ b/scripts/staticcheck.sh @@ -4,7 +4,7 @@ set -o errexit set -o nounset set -o pipefail - export GO111MODULE=on -go get honnef.co/go/tools/cmd/staticcheck +export PATH=$PATH:$(go env GOPATH)/bin +go get -u honnef.co/go/tools/cmd/staticcheck staticcheck -f stylish ./... From 60d243d4fc284dd33963817334c90b9ad78a25c7 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Mon, 27 Jul 2020 17:35:48 +0530 Subject: [PATCH 039/188] adding more unit tests for terraform v12 --- .../terraform/v12/load-dir_test.go | 7 + .../testdata/moduleconfigs/cloudtrail/main.tf | 23 - .../v12/testdata/moduleconfigs/efs/main.tf | 24 - .../moduleconfigs/elasticcache/main.tf | 22 - .../v12/testdata/moduleconfigs/elb/main.tf | 13 - .../testdata/moduleconfigs/kinesis/main.tf | 31 - .../v12/testdata/moduleconfigs/s3/main.tf | 76 - .../v12/testdata/moduleconfigs/sg/main.tf | 411 ------ .../v12/testdata/moduleconfigs/sqs/main.tf | 4 - .../v12/testdata/tfjson/modulconfigs.json | 1314 ----------------- .../v12/testdata/tfjson/moduleconfigs.json | 369 +++++ 11 files changed, 376 insertions(+), 1918 deletions(-) delete mode 100644 pkg/iac-providers/terraform/v12/testdata/tfjson/modulconfigs.json create mode 100644 pkg/iac-providers/terraform/v12/testdata/tfjson/moduleconfigs.json diff --git a/pkg/iac-providers/terraform/v12/load-dir_test.go b/pkg/iac-providers/terraform/v12/load-dir_test.go index 1fa880dc0..f4b747a43 100644 --- a/pkg/iac-providers/terraform/v12/load-dir_test.go +++ b/pkg/iac-providers/terraform/v12/load-dir_test.go @@ -56,6 +56,13 @@ func TestLoadIacDir(t *testing.T) { tfv12: TfV12{}, wantErr: nil, }, + { + name: "module directory", + tfConfigDir: "./testdata/moduleconfigs", + tfJSONFile: "./testdata/tfjson/moduleconfigs.json", + tfv12: TfV12{}, + wantErr: nil, + }, } for _, tt := range table2 { diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudtrail/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudtrail/main.tf index 4f3bd49f6..651974226 100644 --- a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudtrail/main.tf +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudtrail/main.tf @@ -4,26 +4,3 @@ resource "aws_cloudtrail" "missing-multi-region" { s3_key_prefix = "prefix" include_global_service_events = false } - -resource "aws_cloudtrail" "false-multi-region" { - name = "tf-trail-foobar" - s3_bucket_name = "some-s3-bucket" - s3_key_prefix = "prefix" - include_global_service_events = false - is_multi_region_trail = false -} - -resource "aws_cloudtrail" "missing-kms" { - name = "missing-kms" - s3_bucket_name = "some-s3-bucket" - s3_key_prefix = "prefix" - include_global_service_events = false -} - -resource "aws_cloudtrail" "with-kms" { - name = "with-kms" - s3_bucket_name = "some-s3-bucket" - s3_key_prefix = "prefix" - include_global_service_events = false - kms_key_id = "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab" -} diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/efs/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/efs/main.tf index 3fc186b0c..94833205f 100644 --- a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/efs/main.tf +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/efs/main.tf @@ -5,27 +5,3 @@ resource "aws_efs_file_system" "efsNotEncrypted" { Name = "not-encrypted" } } - - -resource "aws_efs_file_system" "efsEncryptedFalse" { - creation_token = "my-product" - - tags = { - Name = "encrypted" - } - - encrypted = false - -} - -resource "aws_efs_file_system" "efsEncryptedWithNoKms" { - creation_token = "my-product" - - tags = { - Name = "encrypted" - } - - encrypted = true - -} - diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elasticcache/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elasticcache/main.tf index e573c7a3e..280c0a859 100644 --- a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elasticcache/main.tf +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elasticcache/main.tf @@ -6,25 +6,3 @@ resource "aws_elasticache_cluster" "noMemcachedInElastiCache" { parameter_group_name = "default.memcached1.4" port = 11211 } - - -resource "aws_elasticache_cluster" "redis_version_compliant" { - cluster_id = "cluster-example" - engine = "redis" - node_type = "cache.m4.large" - num_cache_nodes = 1 - parameter_group_name = "default.redis3.2" - engine_version = "3.2.10" - port = 6379 -} - - -resource "aws_elasticache_cluster" "redis_version_non_compliant" { - cluster_id = "cluster-example" - engine = "redis" - node_type = "cache.m4.large" - num_cache_nodes = 1 - parameter_group_name = "default.redis3.2" - engine_version = "3.2.0" - port = 6379 -} diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elb/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elb/main.tf index faf404768..f841a1ab3 100644 --- a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elb/main.tf +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elb/main.tf @@ -8,16 +8,3 @@ resource "aws_load_balancer_policy" "elbWeakCipher" { value = "true" } } - -resource "aws_load_balancer_policy" "elbSsLTsLProtocol" { - load_balancer_name = "some-name" - policy_name = "wu-tang-ssl" - policy_type_name = "SSLNegotiationPolicyType" - - policy_attribute { - name = "Protocol-SSLv3" - value = "true" - } -} - - diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/kinesis/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/kinesis/main.tf index 9106214de..ed2f59135 100644 --- a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/kinesis/main.tf +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/kinesis/main.tf @@ -15,34 +15,3 @@ resource "aws_kinesis_stream" "kinesisEncryptedWithKms" { Environment = "kinesisEncryptedWithKms" } } - -resource "aws_kinesis_stream" "unencrypted_aws_kinesis_stream" { - name = "terraform-kinesis-test" - shard_count = 1 - retention_period = 48 - - shard_level_metrics = [ - "IncomingBytes", - "OutgoingBytes", - ] - - tags = { - Environment = "test" - } -} - - - -resource "aws_kinesis_stream" "kinesis_encrypted_but_no_kms_provided" { - name = "kinesisEncryptedWithKms" - shard_count = 1 - retention_period = 48 - - shard_level_metrics = [ - "IncomingBytes", - "OutgoingBytes", - ] - - encryption_type = "KMS" -} - diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/s3/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/s3/main.tf index 1df1f2bd3..8ab3a4082 100644 --- a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/s3/main.tf +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/s3/main.tf @@ -7,79 +7,3 @@ resource "aws_s3_bucket" "noS3BucketSseRules" { Environment = "Dev" } } - - -resource "aws_s3_bucket" "s3BucketSseRulesWithKmsNull" { - bucket = "mybucket" - acl = "private" - - tags = { - Name = "s3BucketSseRulesWithNoKms" - Environment = "Dev" - } - - server_side_encryption_configuration { - rule { - apply_server_side_encryption_by_default { - sse_algorithm = "aws:kms" - } - } - } -} - -resource "aws_s3_bucket" "s3BucketNoWebsiteIndexDoc" { - bucket = "website" - acl = "public-read" - - server_side_encryption_configuration { - rule { - apply_server_side_encryption_by_default { - kms_master_key_id = "some-key-id" - sse_algorithm = "aws:kms" - } - } - } - - website { - index_document = "index.html" - error_document = "error.html" - } -} - -resource "aws_s3_bucket" "s3VersioningMfaFalse" { - bucket = "tf-test" - - server_side_encryption_configuration { - rule { - apply_server_side_encryption_by_default { - kms_master_key_id = "some-key-id" - sse_algorithm = "aws:kms" - } - } - } - - versioning { - enabled = true - mfa_delete = false - } -} - -resource "aws_s3_bucket" "allUsersReadAccess" { - bucket = "my-tf-test-bucket" - acl = "public-read" -} - -resource "aws_s3_bucket" "authUsersReadAccess" { - bucket = "my-tf-test-bucket" - acl = "authenticated-read" -} - -resource "aws_s3_bucket" "allUsersWriteAccess" { - bucket = "my-tf-test-bucket" - acl = "public-write" -} - -resource "aws_s3_bucket" "allUsersReadWriteAccess" { - bucket = "my-tf-test-bucket" - acl = "public-read-write" -} diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sg/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sg/main.tf index 5143018b9..c61772631 100644 --- a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sg/main.tf +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sg/main.tf @@ -15,407 +15,6 @@ resource "aws_security_group" "acme_web" { cidr_blocks = ["0.0.0.0/0", "19.16.0.0/24"] } - # HTTP access from the VPC - ingress { - from_port = 80 - to_port = 80 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - # HTTPS access from the VPC - ingress { - from_port = 443 - to_port = 443 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - from_port = 4505 - to_port = 4505 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 4506 - from_port = 4506 - protocol = "-1" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 3020 - from_port = 3020 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 61621 - from_port = 61621 - protocol = "-1" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 7001 - from_port = 7001 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 9000 - from_port = 9000 - protocol = "-1" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 8000 - from_port = 8000 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 8080 - from_port = 8080 - protocol = "-1" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 636 - from_port = 636 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 1434 - from_port = 1434 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 1434 - from_port = 1434 - protocol = "udp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 135 - from_port = 135 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 1433 - from_port = 1433 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 11214 - from_port = 11214 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 11214 - from_port = 11214 - protocol = "udp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 11215 - from_port = 11215 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 11215 - from_port = 11215 - protocol = "udp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 27018 - from_port = 27018 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 3306 - from_port = 3306 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 137 - from_port = 137 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 137 - from_port = 137 - protocol = "udp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 138 - from_port = 138 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 138 - from_port = 138 - protocol = "udp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 139 - from_port = 139 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 139 - from_port = 139 - protocol = "udp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 2484 - from_port = 2484 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 2484 - from_port = 2484 - protocol = "udp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 5432 - from_port = 5432 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 5432 - from_port = 5432 - protocol = "udp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 3000 - from_port = 3000 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 8140 - from_port = 8140 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 161 - from_port = 161 - protocol = "udp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 2382 - from_port = 2382 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 2383 - from_port = 2383 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 9090 - from_port = 9090 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 3389 - from_port = 3389 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 9042 - from_port = 9042 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 7000 - from_port = 7000 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 7199 - from_port = 7199 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 61620 - from_port = 61620 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 8888 - from_port = 8888 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 9160 - from_port = 9160 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 9200 - from_port = 9200 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 9300 - from_port = 9300 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 389 - from_port = 389 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 389 - from_port = 389 - protocol = "udp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 11211 - from_port = 11211 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 11211 - from_port = 11211 - protocol = "udp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 27017 - from_port = 27017 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 1521 - from_port = 1521 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 2483 - from_port = 2483 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 2483 - from_port = 2483 - protocol = "udp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 6379 - from_port = 6379 - protocol = "tcp" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 0 - from_port = 6379 - protocol = "-1" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - - ingress { - to_port = 0 - from_port = 4506 - protocol = "-1" - cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] - } - # outbound internet access egress { from_port = 0 @@ -424,13 +23,3 @@ resource "aws_security_group" "acme_web" { cidr_blocks = ["0.0.0.0/0", "192.164.0.0/24"] } } - -resource "aws_security_group" "defaultSGNotRestrictsAllTraffic" { - name = "default" - description = "Used in the terraform" - vpc_id = "some_dummy_vpc" - - tags = { - Name = "default" - } -} diff --git a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sqs/main.tf b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sqs/main.tf index db96848ea..a6753377d 100644 --- a/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sqs/main.tf +++ b/pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sqs/main.tf @@ -1,7 +1,3 @@ -resource "aws_sqs_queue" "sqsSseDisabled" { - name = "terraform-example-queue" -} - resource "aws_sqs_queue" "sqsQueueExposed" { name = "terraform-example-queue" kms_master_key_id = "alias/aws/sqs" diff --git a/pkg/iac-providers/terraform/v12/testdata/tfjson/modulconfigs.json b/pkg/iac-providers/terraform/v12/testdata/tfjson/modulconfigs.json deleted file mode 100644 index 01c2721e3..000000000 --- a/pkg/iac-providers/terraform/v12/testdata/tfjson/modulconfigs.json +++ /dev/null @@ -1,1314 +0,0 @@ -{ - "aws_cloudfront_distribution": [ - { - "id": "aws_cloudfront_distribution.s3-distribution-TLS-v1", - "name": "s3-distribution-TLS-v1", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudfront/main.tf", - "type": "aws_cloudfront_distribution", - "config": { - "default_cache_behavior": [ - { - "default_cache_behavior": { - "allowed_methods": [ - "DELETE", - "GET", - "HEAD", - "OPTIONS", - "PATCH", - "POST", - "PUT" - ], - "cached_methods": [ - "GET", - "HEAD" - ], - "forwarded_values": [ - { - "forwarded_values": { - "cookies": [ - { - "cookies": { - "forward": "none" - } - } - ], - "query_string": false - } - } - ], - "target_origin_id": "local.s3_origin_id", - "viewer_protocol_policy": "https-only" - } - } - ], - "enabled": true, - "ordered_cache_behavior": [ - { - "ordered_cache_behavior": { - "allowed_methods": [ - "GET", - "HEAD", - "OPTIONS" - ], - "cached_methods": [ - "GET", - "HEAD", - "OPTIONS" - ], - "compress": true, - "forwarded_values": [ - { - "forwarded_values": { - "cookies": [ - { - "cookies": { - "forward": "none" - } - } - ], - "headers": [ - "Origin" - ], - "query_string": false - } - } - ], - "path_pattern": "/content/immutable/*", - "target_origin_id": "local.s3_origin_id", - "viewer_protocol_policy": "allow-all" - } - }, - { - "ordered_cache_behavior": { - "allowed_methods": [ - "GET", - "HEAD", - "OPTIONS" - ], - "cached_methods": [ - "GET", - "HEAD" - ], - "forwarded_values": [ - { - "forwarded_values": { - "cookies": [ - { - "cookies": { - "forward": "none" - } - } - ], - "query_string": false - } - } - ], - "path_pattern": "/content/*", - "target_origin_id": "local.s3_origin_id", - "viewer_protocol_policy": "allow-all" - } - } - ], - "origin": [ - { - "origin": { - "domain_name": "aws_s3_bucket.b.bucket_regional_domain_name", - "origin_id": "local.s3_origin_id", - "s3_origin_config": [ - { - "s3_origin_config": { - "origin_access_identity": "origin-access-identity/cloudfront/ABCDEFG1234567" - } - } - ] - } - } - ], - "restrictions": [ - { - "restrictions": { - "geo_restriction": [ - { - "geo_restriction": { - "locations": [ - "US", - "CA", - "GB", - "DE" - ], - "restriction_type": "whitelist" - } - } - ] - } - } - ], - "viewer_certificate": [ - { - "viewer_certificate": { - "cloudfront_default_certificate": true, - "minimum_protocol_version": "TLSv1" - } - } - ] - } - } - ], - "aws_cloudtrail": [ - { - "id": "aws_cloudtrail.missing-multi-region", - "name": "missing-multi-region", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudtrail/main.tf", - "type": "aws_cloudtrail", - "config": { - "include_global_service_events": false, - "name": "tf-trail-foobar", - "s3_bucket_name": "some-s3-bucket", - "s3_key_prefix": "prefix" - } - }, - { - "id": "aws_cloudtrail.false-multi-region", - "name": "false-multi-region", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudtrail/main.tf", - "type": "aws_cloudtrail", - "config": { - "include_global_service_events": false, - "is_multi_region_trail": false, - "name": "tf-trail-foobar", - "s3_bucket_name": "some-s3-bucket", - "s3_key_prefix": "prefix" - } - }, - { - "id": "aws_cloudtrail.missing-kms", - "name": "missing-kms", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudtrail/main.tf", - "type": "aws_cloudtrail", - "config": { - "include_global_service_events": false, - "name": "missing-kms", - "s3_bucket_name": "some-s3-bucket", - "s3_key_prefix": "prefix" - } - }, - { - "id": "aws_cloudtrail.with-kms", - "name": "with-kms", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudtrail/main.tf", - "type": "aws_cloudtrail", - "config": { - "include_global_service_events": false, - "kms_key_id": "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "name": "with-kms", - "s3_bucket_name": "some-s3-bucket", - "s3_key_prefix": "prefix" - } - } - ], - "aws_ecs_task_definition": [ - { - "id": "aws_ecs_task_definition.instanceNotInVpc", - "name": "instanceNotInVpc", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/ecs/main.tf", - "type": "aws_ecs_task_definition", - "config": { - "container_definitions": "${file(\"ecs/service.json\")}", - "family": "service", - "network_mode": "bridge" - } - } - ], - "aws_efs_file_system": [ - { - "id": "aws_efs_file_system.efsEncryptedWithNoKms", - "name": "efsEncryptedWithNoKms", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/efs/main.tf", - "type": "aws_efs_file_system", - "config": { - "creation_token": "my-product", - "encrypted": true, - "tags": { - "Name": "encrypted" - } - } - }, - { - "id": "aws_efs_file_system.efsNotEncrypted", - "name": "efsNotEncrypted", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/efs/main.tf", - "type": "aws_efs_file_system", - "config": { - "creation_token": "my-product", - "tags": { - "Name": "not-encrypted" - } - } - }, - { - "id": "aws_efs_file_system.efsEncryptedFalse", - "name": "efsEncryptedFalse", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/efs/main.tf", - "type": "aws_efs_file_system", - "config": { - "creation_token": "my-product", - "encrypted": false, - "tags": { - "Name": "encrypted" - } - } - } - ], - "aws_elasticache_cluster": [ - { - "id": "aws_elasticache_cluster.noMemcachedInElastiCache", - "name": "noMemcachedInElastiCache", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elasticcache/main.tf", - "type": "aws_elasticache_cluster", - "config": { - "cluster_id": "cluster-example", - "engine": "memcached", - "node_type": "cache.m4.large", - "num_cache_nodes": 2, - "parameter_group_name": "default.memcached1.4", - "port": 11211 - } - }, - { - "id": "aws_elasticache_cluster.redis_version_compliant", - "name": "redis_version_compliant", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elasticcache/main.tf", - "type": "aws_elasticache_cluster", - "config": { - "cluster_id": "cluster-example", - "engine": "redis", - "engine_version": "3.2.10", - "node_type": "cache.m4.large", - "num_cache_nodes": 1, - "parameter_group_name": "default.redis3.2", - "port": 6379 - } - }, - { - "id": "aws_elasticache_cluster.redis_version_non_compliant", - "name": "redis_version_non_compliant", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elasticcache/main.tf", - "type": "aws_elasticache_cluster", - "config": { - "cluster_id": "cluster-example", - "engine": "redis", - "engine_version": "3.2.0", - "node_type": "cache.m4.large", - "num_cache_nodes": 1, - "parameter_group_name": "default.redis3.2", - "port": 6379 - } - } - ], - "aws_guardduty_detector": [ - { - "id": "aws_guardduty_detector.gaurdDutyDisabled", - "name": "gaurdDutyDisabled", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/guardduty/main.tf", - "type": "aws_guardduty_detector", - "config": { - "enable": false - } - } - ], - "aws_iam_access_key": [ - { - "id": "aws_iam_access_key.noAccessKeyForRootAccount", - "name": "noAccessKeyForRootAccount", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/iam/main.tf", - "type": "aws_iam_access_key", - "config": { - "pgp_key": "keybase:some_person_that_exists", - "status": "Inactive", - "user": "root" - } - } - ], - "aws_kinesis_stream": [ - { - "id": "aws_kinesis_stream.kinesis_encrypted_but_no_kms_provided", - "name": "kinesis_encrypted_but_no_kms_provided", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/kinesis/main.tf", - "type": "aws_kinesis_stream", - "config": { - "encryption_type": "KMS", - "name": "kinesisEncryptedWithKms", - "retention_period": 48, - "shard_count": 1, - "shard_level_metrics": [ - "IncomingBytes", - "OutgoingBytes" - ] - } - }, - { - "id": "aws_kinesis_stream.kinesisEncryptedWithKms", - "name": "kinesisEncryptedWithKms", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/kinesis/main.tf", - "type": "aws_kinesis_stream", - "config": { - "encryption_type": "KMS", - "kms_key_id": "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", - "name": "kinesisEncryptedWithKms", - "retention_period": 48, - "shard_count": 1, - "shard_level_metrics": [ - "IncomingBytes", - "OutgoingBytes" - ], - "tags": { - "Environment": "kinesisEncryptedWithKms" - } - } - }, - { - "id": "aws_kinesis_stream.unencrypted_aws_kinesis_stream", - "name": "unencrypted_aws_kinesis_stream", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/kinesis/main.tf", - "type": "aws_kinesis_stream", - "config": { - "name": "terraform-kinesis-test", - "retention_period": 48, - "shard_count": 1, - "shard_level_metrics": [ - "IncomingBytes", - "OutgoingBytes" - ], - "tags": { - "Environment": "test" - } - } - } - ], - "aws_kms_key": [ - { - "id": "aws_kms_key.kmsKeyDisabled", - "name": "kmsKeyDisabled", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/cloudfront/sub-cloudfront/main.tf", - "type": "aws_kms_key", - "config": { - "description": "KMS key 2", - "is_enabled": false, - "tags": { - "Name": "kmsKeyDisabled", - "Setup": "self-healing" - } - } - } - ], - "aws_load_balancer_policy": [ - { - "id": "aws_load_balancer_policy.elbWeakCipher", - "name": "elbWeakCipher", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elb/main.tf", - "type": "aws_load_balancer_policy", - "config": { - "load_balancer_name": "some-name", - "policy_attribute": [ - { - "policy_attribute": { - "name": "ECDHE-RSA-RC4-SHA", - "value": "true" - } - } - ], - "policy_name": "wu-tang-ssl", - "policy_type_name": "SSLNegotiationPolicyType" - } - }, - { - "id": "aws_load_balancer_policy.elbSsLTsLProtocol", - "name": "elbSsLTsLProtocol", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/elb/main.tf", - "type": "aws_load_balancer_policy", - "config": { - "load_balancer_name": "some-name", - "policy_attribute": [ - { - "policy_attribute": { - "name": "Protocol-SSLv3", - "value": "true" - } - } - ], - "policy_name": "wu-tang-ssl", - "policy_type_name": "SSLNegotiationPolicyType" - } - } - ], - "aws_s3_bucket": [ - { - "id": "aws_s3_bucket.s3VersioningMfaFalse", - "name": "s3VersioningMfaFalse", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/s3/main.tf", - "type": "aws_s3_bucket", - "config": { - "bucket": "tf-test", - "server_side_encryption_configuration": [ - { - "server_side_encryption_configuration": { - "rule": [ - { - "rule": { - "apply_server_side_encryption_by_default": [ - { - "apply_server_side_encryption_by_default": { - "kms_master_key_id": "some-key-id", - "sse_algorithm": "aws:kms" - } - } - ] - } - } - ] - } - } - ], - "versioning": [ - { - "versioning": { - "enabled": true, - "mfa_delete": false - } - } - ] - } - }, - { - "id": "aws_s3_bucket.allUsersReadAccess", - "name": "allUsersReadAccess", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/s3/main.tf", - "type": "aws_s3_bucket", - "config": { - "acl": "public-read", - "bucket": "my-tf-test-bucket" - } - }, - { - "id": "aws_s3_bucket.authUsersReadAccess", - "name": "authUsersReadAccess", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/s3/main.tf", - "type": "aws_s3_bucket", - "config": { - "acl": "authenticated-read", - "bucket": "my-tf-test-bucket" - } - }, - { - "id": "aws_s3_bucket.allUsersWriteAccess", - "name": "allUsersWriteAccess", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/s3/main.tf", - "type": "aws_s3_bucket", - "config": { - "acl": "public-write", - "bucket": "my-tf-test-bucket" - } - }, - { - "id": "aws_s3_bucket.allUsersReadWriteAccess", - "name": "allUsersReadWriteAccess", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/s3/main.tf", - "type": "aws_s3_bucket", - "config": { - "acl": "public-read-write", - "bucket": "my-tf-test-bucket" - } - }, - { - "id": "aws_s3_bucket.noS3BucketSseRules", - "name": "noS3BucketSseRules", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/s3/main.tf", - "type": "aws_s3_bucket", - "config": { - "acl": "private", - "bucket": "mybucket", - "tags": { - "Environment": "Dev", - "Name": "nos3BucketSseRules" - } - } - }, - { - "id": "aws_s3_bucket.s3BucketSseRulesWithKmsNull", - "name": "s3BucketSseRulesWithKmsNull", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/s3/main.tf", - "type": "aws_s3_bucket", - "config": { - "acl": "private", - "bucket": "mybucket", - "server_side_encryption_configuration": [ - { - "server_side_encryption_configuration": { - "rule": [ - { - "rule": { - "apply_server_side_encryption_by_default": [ - { - "apply_server_side_encryption_by_default": { - "sse_algorithm": "aws:kms" - } - } - ] - } - } - ] - } - } - ], - "tags": { - "Environment": "Dev", - "Name": "s3BucketSseRulesWithNoKms" - } - } - }, - { - "id": "aws_s3_bucket.s3BucketNoWebsiteIndexDoc", - "name": "s3BucketNoWebsiteIndexDoc", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/s3/main.tf", - "type": "aws_s3_bucket", - "config": { - "acl": "public-read", - "bucket": "website", - "server_side_encryption_configuration": [ - { - "server_side_encryption_configuration": { - "rule": [ - { - "rule": { - "apply_server_side_encryption_by_default": [ - { - "apply_server_side_encryption_by_default": { - "kms_master_key_id": "some-key-id", - "sse_algorithm": "aws:kms" - } - } - ] - } - } - ] - } - } - ], - "website": [ - { - "website": { - "error_document": "error.html", - "index_document": "index.html" - } - } - ] - } - } - ], - "aws_security_group": [ - { - "id": "aws_security_group.acme_web", - "name": "acme_web", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sg/main.tf", - "type": "aws_security_group", - "config": { - "description": "Used in the terraform", - "egress": [ - { - "egress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 0, - "protocol": "-1", - "to_port": 0 - } - } - ], - "ingress": [ - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "19.16.0.0/24" - ], - "from_port": 22, - "protocol": "tcp", - "to_port": 22 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 80, - "protocol": "tcp", - "to_port": 80 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 443, - "protocol": "tcp", - "to_port": 443 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 4505, - "protocol": "tcp", - "to_port": 4505 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 4506, - "protocol": "-1", - "to_port": 4506 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 3020, - "protocol": "tcp", - "to_port": 3020 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 61621, - "protocol": "-1", - "to_port": 61621 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 7001, - "protocol": "tcp", - "to_port": 7001 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 9000, - "protocol": "-1", - "to_port": 9000 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 8000, - "protocol": "tcp", - "to_port": 8000 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 8080, - "protocol": "-1", - "to_port": 8080 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 636, - "protocol": "tcp", - "to_port": 636 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 1434, - "protocol": "tcp", - "to_port": 1434 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 1434, - "protocol": "udp", - "to_port": 1434 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 135, - "protocol": "tcp", - "to_port": 135 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 1433, - "protocol": "tcp", - "to_port": 1433 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 11214, - "protocol": "tcp", - "to_port": 11214 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 11214, - "protocol": "udp", - "to_port": 11214 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 11215, - "protocol": "tcp", - "to_port": 11215 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 11215, - "protocol": "udp", - "to_port": 11215 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 27018, - "protocol": "tcp", - "to_port": 27018 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 3306, - "protocol": "tcp", - "to_port": 3306 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 137, - "protocol": "tcp", - "to_port": 137 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 137, - "protocol": "udp", - "to_port": 137 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 138, - "protocol": "tcp", - "to_port": 138 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 138, - "protocol": "udp", - "to_port": 138 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 139, - "protocol": "tcp", - "to_port": 139 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 139, - "protocol": "udp", - "to_port": 139 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 2484, - "protocol": "tcp", - "to_port": 2484 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 2484, - "protocol": "udp", - "to_port": 2484 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 5432, - "protocol": "tcp", - "to_port": 5432 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 5432, - "protocol": "udp", - "to_port": 5432 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 3000, - "protocol": "tcp", - "to_port": 3000 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 8140, - "protocol": "tcp", - "to_port": 8140 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 161, - "protocol": "udp", - "to_port": 161 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 2382, - "protocol": "tcp", - "to_port": 2382 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 2383, - "protocol": "tcp", - "to_port": 2383 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 9090, - "protocol": "tcp", - "to_port": 9090 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 3389, - "protocol": "tcp", - "to_port": 3389 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 9042, - "protocol": "tcp", - "to_port": 9042 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 7000, - "protocol": "tcp", - "to_port": 7000 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 7199, - "protocol": "tcp", - "to_port": 7199 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 61620, - "protocol": "tcp", - "to_port": 61620 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 8888, - "protocol": "tcp", - "to_port": 8888 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 9160, - "protocol": "tcp", - "to_port": 9160 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 9200, - "protocol": "tcp", - "to_port": 9200 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 9300, - "protocol": "tcp", - "to_port": 9300 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 389, - "protocol": "tcp", - "to_port": 389 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 389, - "protocol": "udp", - "to_port": 389 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 11211, - "protocol": "tcp", - "to_port": 11211 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 11211, - "protocol": "udp", - "to_port": 11211 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 27017, - "protocol": "tcp", - "to_port": 27017 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 1521, - "protocol": "tcp", - "to_port": 1521 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 2483, - "protocol": "tcp", - "to_port": 2483 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 2483, - "protocol": "udp", - "to_port": 2483 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 6379, - "protocol": "tcp", - "to_port": 6379 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 6379, - "protocol": "-1", - "to_port": 0 - } - }, - { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 4506, - "protocol": "-1", - "to_port": 0 - } - } - ], - "name": "acme_web", - "tags": { - "Name": "acme_web" - }, - "vpc_id": "some_dummy_vpc" - } - }, - { - "id": "aws_security_group.defaultSGNotRestrictsAllTraffic", - "name": "defaultSGNotRestrictsAllTraffic", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sg/main.tf", - "type": "aws_security_group", - "config": { - "description": "Used in the terraform", - "name": "default", - "tags": { - "Name": "default" - }, - "vpc_id": "some_dummy_vpc" - } - } - ], - "aws_sqs_queue": [ - { - "id": "aws_sqs_queue.sqsSseDisabled", - "name": "sqsSseDisabled", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sqs/main.tf", - "type": "aws_sqs_queue", - "config": { - "name": "terraform-example-queue" - } - }, - { - "id": "aws_sqs_queue.sqsQueueExposed", - "name": "sqsQueueExposed", - "source": "pkg/iac-providers/terraform/v12/testdata/moduleconfigs/sqs/main.tf", - "type": "aws_sqs_queue", - "config": { - "kms_data_key_reuse_period_seconds": 300, - "kms_master_key_id": "alias/aws/sqs", - "name": "terraform-example-queue", - "policy": "{\n \"Version\": \"2012-10-17\",\n \"Statement\": [{\n \"Sid\":\"Queue1_AnonymousAccess_AllActions_WhitelistIP\",\n \"Effect\": \"Allow\",\n \"Principal\": \"*\",\n \"Action\": \"sqs:*\",\n \"Resource\": \"arn:aws:sqs:*:111122223333:queue1\"\n }] \n}\n" - } - } - ] -} diff --git a/pkg/iac-providers/terraform/v12/testdata/tfjson/moduleconfigs.json b/pkg/iac-providers/terraform/v12/testdata/tfjson/moduleconfigs.json new file mode 100644 index 000000000..074ba47fd --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/tfjson/moduleconfigs.json @@ -0,0 +1,369 @@ +{ + "aws_cloudfront_distribution": [ + { + "id": "aws_cloudfront_distribution.s3-distribution-TLS-v1", + "name": "s3-distribution-TLS-v1", + "source": "testdata/moduleconfigs/cloudfront/main.tf", + "type": "aws_cloudfront_distribution", + "config": { + "default_cache_behavior": [ + { + "default_cache_behavior": { + "allowed_methods": [ + "DELETE", + "GET", + "HEAD", + "OPTIONS", + "PATCH", + "POST", + "PUT" + ], + "cached_methods": [ + "GET", + "HEAD" + ], + "forwarded_values": [ + { + "forwarded_values": { + "cookies": [ + { + "cookies": { + "forward": "none" + } + } + ], + "query_string": false + } + } + ], + "target_origin_id": "local.s3_origin_id", + "viewer_protocol_policy": "https-only" + } + } + ], + "enabled": true, + "ordered_cache_behavior": [ + { + "ordered_cache_behavior": { + "allowed_methods": [ + "GET", + "HEAD", + "OPTIONS" + ], + "cached_methods": [ + "GET", + "HEAD", + "OPTIONS" + ], + "compress": true, + "forwarded_values": [ + { + "forwarded_values": { + "cookies": [ + { + "cookies": { + "forward": "none" + } + } + ], + "headers": [ + "Origin" + ], + "query_string": false + } + } + ], + "path_pattern": "/content/immutable/*", + "target_origin_id": "local.s3_origin_id", + "viewer_protocol_policy": "allow-all" + } + }, + { + "ordered_cache_behavior": { + "allowed_methods": [ + "GET", + "HEAD", + "OPTIONS" + ], + "cached_methods": [ + "GET", + "HEAD" + ], + "forwarded_values": [ + { + "forwarded_values": { + "cookies": [ + { + "cookies": { + "forward": "none" + } + } + ], + "query_string": false + } + } + ], + "path_pattern": "/content/*", + "target_origin_id": "local.s3_origin_id", + "viewer_protocol_policy": "allow-all" + } + } + ], + "origin": [ + { + "origin": { + "domain_name": "aws_s3_bucket.b.bucket_regional_domain_name", + "origin_id": "local.s3_origin_id", + "s3_origin_config": [ + { + "s3_origin_config": { + "origin_access_identity": "origin-access-identity/cloudfront/ABCDEFG1234567" + } + } + ] + } + } + ], + "restrictions": [ + { + "restrictions": { + "geo_restriction": [ + { + "geo_restriction": { + "locations": [ + "US", + "CA", + "GB", + "DE" + ], + "restriction_type": "whitelist" + } + } + ] + } + } + ], + "viewer_certificate": [ + { + "viewer_certificate": { + "cloudfront_default_certificate": true, + "minimum_protocol_version": "TLSv1" + } + } + ] + } + } + ], + "aws_cloudtrail": [ + { + "id": "aws_cloudtrail.missing-multi-region", + "name": "missing-multi-region", + "source": "testdata/moduleconfigs/cloudtrail/main.tf", + "type": "aws_cloudtrail", + "config": { + "include_global_service_events": false, + "name": "tf-trail-foobar", + "s3_bucket_name": "some-s3-bucket", + "s3_key_prefix": "prefix" + } + } + ], + "aws_ecs_task_definition": [ + { + "id": "aws_ecs_task_definition.instanceNotInVpc", + "name": "instanceNotInVpc", + "source": "testdata/moduleconfigs/ecs/main.tf", + "type": "aws_ecs_task_definition", + "config": { + "container_definitions": "${file(\"ecs/service.json\")}", + "family": "service", + "network_mode": "bridge" + } + } + ], + "aws_efs_file_system": [ + { + "id": "aws_efs_file_system.efsNotEncrypted", + "name": "efsNotEncrypted", + "source": "testdata/moduleconfigs/efs/main.tf", + "type": "aws_efs_file_system", + "config": { + "creation_token": "my-product", + "tags": { + "Name": "not-encrypted" + } + } + } + ], + "aws_elasticache_cluster": [ + { + "id": "aws_elasticache_cluster.noMemcachedInElastiCache", + "name": "noMemcachedInElastiCache", + "source": "testdata/moduleconfigs/elasticcache/main.tf", + "type": "aws_elasticache_cluster", + "config": { + "cluster_id": "cluster-example", + "engine": "memcached", + "node_type": "cache.m4.large", + "num_cache_nodes": 2, + "parameter_group_name": "default.memcached1.4", + "port": 11211 + } + } + ], + "aws_guardduty_detector": [ + { + "id": "aws_guardduty_detector.gaurdDutyDisabled", + "name": "gaurdDutyDisabled", + "source": "testdata/moduleconfigs/guardduty/main.tf", + "type": "aws_guardduty_detector", + "config": { + "enable": false + } + } + ], + "aws_iam_access_key": [ + { + "id": "aws_iam_access_key.noAccessKeyForRootAccount", + "name": "noAccessKeyForRootAccount", + "source": "testdata/moduleconfigs/iam/main.tf", + "type": "aws_iam_access_key", + "config": { + "pgp_key": "keybase:some_person_that_exists", + "status": "Inactive", + "user": "root" + } + } + ], + "aws_kinesis_stream": [ + { + "id": "aws_kinesis_stream.kinesisEncryptedWithKms", + "name": "kinesisEncryptedWithKms", + "source": "testdata/moduleconfigs/kinesis/main.tf", + "type": "aws_kinesis_stream", + "config": { + "encryption_type": "KMS", + "kms_key_id": "arn:aws:kms:us-west-2:111122223333:key/1234abcd-12ab-34cd-56ef-1234567890ab", + "name": "kinesisEncryptedWithKms", + "retention_period": 48, + "shard_count": 1, + "shard_level_metrics": [ + "IncomingBytes", + "OutgoingBytes" + ], + "tags": { + "Environment": "kinesisEncryptedWithKms" + } + } + } + ], + "aws_kms_key": [ + { + "id": "aws_kms_key.kmsKeyDisabled", + "name": "kmsKeyDisabled", + "source": "testdata/moduleconfigs/cloudfront/sub-cloudfront/main.tf", + "type": "aws_kms_key", + "config": { + "description": "KMS key 2", + "is_enabled": false, + "tags": { + "Name": "kmsKeyDisabled", + "Setup": "self-healing" + } + } + } + ], + "aws_load_balancer_policy": [ + { + "id": "aws_load_balancer_policy.elbWeakCipher", + "name": "elbWeakCipher", + "source": "testdata/moduleconfigs/elb/main.tf", + "type": "aws_load_balancer_policy", + "config": { + "load_balancer_name": "some-name", + "policy_attribute": [ + { + "policy_attribute": { + "name": "ECDHE-RSA-RC4-SHA", + "value": "true" + } + } + ], + "policy_name": "wu-tang-ssl", + "policy_type_name": "SSLNegotiationPolicyType" + } + } + ], + "aws_s3_bucket": [ + { + "id": "aws_s3_bucket.noS3BucketSseRules", + "name": "noS3BucketSseRules", + "source": "testdata/moduleconfigs/s3/main.tf", + "type": "aws_s3_bucket", + "config": { + "acl": "private", + "bucket": "mybucket", + "tags": { + "Environment": "Dev", + "Name": "nos3BucketSseRules" + } + } + } + ], + "aws_security_group": [ + { + "id": "aws_security_group.acme_web", + "name": "acme_web", + "source": "testdata/moduleconfigs/sg/main.tf", + "type": "aws_security_group", + "config": { + "description": "Used in the terraform", + "egress": [ + { + "egress": { + "cidr_blocks": [ + "0.0.0.0/0", + "192.164.0.0/24" + ], + "from_port": 0, + "protocol": "-1", + "to_port": 0 + } + } + ], + "ingress": [ + { + "ingress": { + "cidr_blocks": [ + "0.0.0.0/0", + "19.16.0.0/24" + ], + "from_port": 22, + "protocol": "tcp", + "to_port": 22 + } + } + ], + "name": "acme_web", + "tags": { + "Name": "acme_web" + }, + "vpc_id": "some_dummy_vpc" + } + } + ], + "aws_sqs_queue": [ + { + "id": "aws_sqs_queue.sqsQueueExposed", + "name": "sqsQueueExposed", + "source": "testdata/moduleconfigs/sqs/main.tf", + "type": "aws_sqs_queue", + "config": { + "kms_data_key_reuse_period_seconds": 300, + "kms_master_key_id": "alias/aws/sqs", + "name": "terraform-example-queue", + "policy": "{\n \"Version\": \"2012-10-17\",\n \"Statement\": [{\n \"Sid\":\"Queue1_AnonymousAccess_AllActions_WhitelistIP\",\n \"Effect\": \"Allow\",\n \"Principal\": \"*\",\n \"Action\": \"sqs:*\",\n \"Resource\": \"arn:aws:sqs:*:111122223333:queue1\"\n }] \n}\n" + } + } + ] +} From a7053686bc0fd256f40c5556be5b66e47f13fe1d Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Mon, 27 Jul 2020 19:12:06 +0530 Subject: [PATCH 040/188] add unit tests for cloud-provider package --- pkg/cloud-providers/cloud-provider.go | 6 ++- pkg/cloud-providers/providers_test.go | 72 +++++++++++++++++++++++++++ 2 files changed, 77 insertions(+), 1 deletion(-) create mode 100644 pkg/cloud-providers/providers_test.go diff --git a/pkg/cloud-providers/cloud-provider.go b/pkg/cloud-providers/cloud-provider.go index 1c6b516db..ee567ba9a 100644 --- a/pkg/cloud-providers/cloud-provider.go +++ b/pkg/cloud-providers/cloud-provider.go @@ -7,6 +7,10 @@ import ( "go.uber.org/zap" ) +var ( + errCloudNotSupported = fmt.Errorf("cloud type not supported") +) + // NewCloudProvider returns a new CloudProvider func NewCloudProvider(cloudType string) (cloudProvider CloudProvider, err error) { @@ -14,7 +18,7 @@ func NewCloudProvider(cloudType string) (cloudProvider CloudProvider, err error) cloudProviderObject, supported := supportedCloudProviders[supportedCloudType(cloudType)] if !supported { zap.S().Errorf("cloud type '%s' not supported", cloudType) - return cloudProvider, fmt.Errorf("cloud type not supported") + return cloudProvider, errCloudNotSupported } return reflect.New(cloudProviderObject).Interface().(CloudProvider), nil diff --git a/pkg/cloud-providers/providers_test.go b/pkg/cloud-providers/providers_test.go new file mode 100644 index 000000000..0a5479479 --- /dev/null +++ b/pkg/cloud-providers/providers_test.go @@ -0,0 +1,72 @@ +package cloudprovider + +import ( + "reflect" + "testing" + + awsProvider "github.com/accurics/terrascan/pkg/cloud-providers/aws" +) + +func TestNewCloudProvider(t *testing.T) { + + table := []struct { + name string + cloudType supportedCloudType + want CloudProvider + wantErr error + }{ + { + name: "aws provider", + cloudType: aws, + want: &awsProvider.AWSProvider{}, + wantErr: nil, + }, + { + name: "not supported cloud type", + cloudType: "not-supported", + want: nil, + wantErr: errCloudNotSupported, + }, + } + + for _, tt := range table { + t.Run(tt.name, func(t *testing.T) { + got, gotErr := NewCloudProvider(string(tt.cloudType)) + if !reflect.DeepEqual(gotErr, tt.wantErr) { + t.Errorf("unexpected error; gotErr: '%v', wantErr: '%v'", gotErr, tt.wantErr) + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("got: '%v', want: '%v'", got, tt.want) + } + }) + } +} + +func TestIsCloudSupported(t *testing.T) { + + table := []struct { + name string + cloudType supportedCloudType + want bool + }{ + { + name: "aws provider", + cloudType: aws, + want: true, + }, + { + name: "not supported cloud type", + cloudType: "not-supported", + want: false, + }, + } + + for _, tt := range table { + t.Run(tt.name, func(t *testing.T) { + got := IsCloudSupported(string(tt.cloudType)) + if got != tt.want { + t.Errorf("got: '%v', want: '%v'", got, tt.want) + } + }) + } +} From af49fe727467e2b82719de562d23de4206c65dbe Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Mon, 27 Jul 2020 20:03:56 +0530 Subject: [PATCH 041/188] add Github actions badge for CI passing --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 5a870eee0..7309c20b3 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,2 @@ # Terrascan - +![CI](/~https://github.com/accurics/terrascan/workflows/Go%20Terrascan%20build/badge.svg) From a0c19c5f34ed4c267600d7a5ea2d3ca304142de8 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Tue, 28 Jul 2020 18:11:40 +0530 Subject: [PATCH 042/188] update Copyrights info in all files --- cmd/terrascan/main.go | 16 ++++++++++++++++ go.mod | 2 +- go.sum | 2 ++ pkg/cli/run.go | 16 ++++++++++++++++ pkg/cloud-providers/aws/normalized.go | 16 ++++++++++++++++ pkg/cloud-providers/aws/types.go | 16 ++++++++++++++++ pkg/cloud-providers/cloud-provider.go | 16 ++++++++++++++++ pkg/cloud-providers/interface.go | 16 ++++++++++++++++ pkg/cloud-providers/providers_test.go | 16 ++++++++++++++++ pkg/cloud-providers/supported.go | 16 ++++++++++++++++ pkg/http-server/start.go | 16 ++++++++++++++++ pkg/iac-providers/interface.go | 16 ++++++++++++++++ pkg/iac-providers/output/types.go | 16 ++++++++++++++++ pkg/iac-providers/providers.go | 16 ++++++++++++++++ pkg/iac-providers/providers_test.go | 16 ++++++++++++++++ pkg/iac-providers/supported.go | 16 ++++++++++++++++ pkg/iac-providers/terraform/v12/convert.go | 16 ++++++++++++++++ pkg/iac-providers/terraform/v12/load-dir.go | 16 ++++++++++++++++ pkg/iac-providers/terraform/v12/load-dir_test.go | 16 ++++++++++++++++ pkg/iac-providers/terraform/v12/load-file.go | 16 ++++++++++++++++ .../terraform/v12/load-file_test.go | 16 ++++++++++++++++ pkg/iac-providers/terraform/v12/resource.go | 16 ++++++++++++++++ pkg/iac-providers/terraform/v12/types.go | 16 ++++++++++++++++ pkg/logger/logger.go | 16 ++++++++++++++++ pkg/logger/logger_test.go | 16 ++++++++++++++++ pkg/runtime/executor.go | 16 ++++++++++++++++ pkg/runtime/executor_test.go | 16 ++++++++++++++++ pkg/runtime/validate.go | 16 ++++++++++++++++ pkg/runtime/validate_test.go | 16 ++++++++++++++++ pkg/utils/path.go | 16 ++++++++++++++++ pkg/utils/path_test.go | 16 ++++++++++++++++ pkg/utils/printer.go | 16 ++++++++++++++++ pkg/utils/printer_test.go | 16 ++++++++++++++++ 33 files changed, 499 insertions(+), 1 deletion(-) diff --git a/cmd/terrascan/main.go b/cmd/terrascan/main.go index 686ca90ee..32fa78711 100644 --- a/cmd/terrascan/main.go +++ b/cmd/terrascan/main.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package main import ( diff --git a/go.mod b/go.mod index ddb8cad02..f4bc50066 100644 --- a/go.mod +++ b/go.mod @@ -10,6 +10,6 @@ require ( github.com/zclconf/go-cty v1.2.1 go.uber.org/zap v1.9.1 golang.org/x/lint v0.0.0-20200302205851-738671d3881b // indirect - golang.org/x/tools v0.0.0-20200723000907-a7c6fd066f6d // indirect + golang.org/x/tools v0.0.0-20200725200936-102e7d357031 // indirect honnef.co/go/tools v0.0.1-2020.1.4 // indirect ) diff --git a/go.sum b/go.sum index 237db5eb5..1f974b31b 100644 --- a/go.sum +++ b/go.sum @@ -395,6 +395,8 @@ golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.0.0-20200723000907-a7c6fd066f6d h1:7k9BKfwmdbykG6l5ztniTrH0TP25yel8O7l26/yovMU= golang.org/x/tools v0.0.0-20200723000907-a7c6fd066f6d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200725200936-102e7d357031 h1:VtIxiVHWPhnny2ZTi4f9/2diZKqyLaq3FUTuud5+khA= +golang.org/x/tools v0.0.0-20200725200936-102e7d357031/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= diff --git a/pkg/cli/run.go b/pkg/cli/run.go index ed25f5d01..54d5a23ce 100644 --- a/pkg/cli/run.go +++ b/pkg/cli/run.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package cli import ( diff --git a/pkg/cloud-providers/aws/normalized.go b/pkg/cloud-providers/aws/normalized.go index 09ea8ee98..d0b7545ea 100644 --- a/pkg/cloud-providers/aws/normalized.go +++ b/pkg/cloud-providers/aws/normalized.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package awsprovider import ( diff --git a/pkg/cloud-providers/aws/types.go b/pkg/cloud-providers/aws/types.go index 0263ad0a0..5a27a806f 100644 --- a/pkg/cloud-providers/aws/types.go +++ b/pkg/cloud-providers/aws/types.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package awsprovider // AWSProvider implements cloud provider interface diff --git a/pkg/cloud-providers/cloud-provider.go b/pkg/cloud-providers/cloud-provider.go index ee567ba9a..44c578bea 100644 --- a/pkg/cloud-providers/cloud-provider.go +++ b/pkg/cloud-providers/cloud-provider.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package cloudprovider import ( diff --git a/pkg/cloud-providers/interface.go b/pkg/cloud-providers/interface.go index 9a920f9da..4fa1f6005 100644 --- a/pkg/cloud-providers/interface.go +++ b/pkg/cloud-providers/interface.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package cloudprovider import ( diff --git a/pkg/cloud-providers/providers_test.go b/pkg/cloud-providers/providers_test.go index 0a5479479..62e6e3f2f 100644 --- a/pkg/cloud-providers/providers_test.go +++ b/pkg/cloud-providers/providers_test.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package cloudprovider import ( diff --git a/pkg/cloud-providers/supported.go b/pkg/cloud-providers/supported.go index 3b76ebfca..b1c27dc6f 100644 --- a/pkg/cloud-providers/supported.go +++ b/pkg/cloud-providers/supported.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package cloudprovider import ( diff --git a/pkg/http-server/start.go b/pkg/http-server/start.go index 919d2d12a..701777d8b 100644 --- a/pkg/http-server/start.go +++ b/pkg/http-server/start.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package httpserver import ( diff --git a/pkg/iac-providers/interface.go b/pkg/iac-providers/interface.go index 00f7fa412..8e5d50927 100644 --- a/pkg/iac-providers/interface.go +++ b/pkg/iac-providers/interface.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package iacprovider import ( diff --git a/pkg/iac-providers/output/types.go b/pkg/iac-providers/output/types.go index 438207d42..681f65f96 100644 --- a/pkg/iac-providers/output/types.go +++ b/pkg/iac-providers/output/types.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package output // ResourceConfig describes a resource present in IaC diff --git a/pkg/iac-providers/providers.go b/pkg/iac-providers/providers.go index 5e2d2ac8f..c7f63818d 100644 --- a/pkg/iac-providers/providers.go +++ b/pkg/iac-providers/providers.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package iacprovider import ( diff --git a/pkg/iac-providers/providers_test.go b/pkg/iac-providers/providers_test.go index b5353ef37..191c6d5eb 100644 --- a/pkg/iac-providers/providers_test.go +++ b/pkg/iac-providers/providers_test.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package iacprovider import ( diff --git a/pkg/iac-providers/supported.go b/pkg/iac-providers/supported.go index d7287faa3..ae3665eee 100644 --- a/pkg/iac-providers/supported.go +++ b/pkg/iac-providers/supported.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package iacprovider import ( diff --git a/pkg/iac-providers/terraform/v12/convert.go b/pkg/iac-providers/terraform/v12/convert.go index ea257f5de..025b6c814 100644 --- a/pkg/iac-providers/terraform/v12/convert.go +++ b/pkg/iac-providers/terraform/v12/convert.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package tfv12 /* diff --git a/pkg/iac-providers/terraform/v12/load-dir.go b/pkg/iac-providers/terraform/v12/load-dir.go index 5c85c449a..6a5795be8 100644 --- a/pkg/iac-providers/terraform/v12/load-dir.go +++ b/pkg/iac-providers/terraform/v12/load-dir.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package tfv12 import ( diff --git a/pkg/iac-providers/terraform/v12/load-dir_test.go b/pkg/iac-providers/terraform/v12/load-dir_test.go index f4b747a43..e082b27cd 100644 --- a/pkg/iac-providers/terraform/v12/load-dir_test.go +++ b/pkg/iac-providers/terraform/v12/load-dir_test.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package tfv12 import ( diff --git a/pkg/iac-providers/terraform/v12/load-file.go b/pkg/iac-providers/terraform/v12/load-file.go index 3b2206d4a..a5248b992 100644 --- a/pkg/iac-providers/terraform/v12/load-file.go +++ b/pkg/iac-providers/terraform/v12/load-file.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package tfv12 import ( diff --git a/pkg/iac-providers/terraform/v12/load-file_test.go b/pkg/iac-providers/terraform/v12/load-file_test.go index 1b795482e..f98ff3ecf 100644 --- a/pkg/iac-providers/terraform/v12/load-file_test.go +++ b/pkg/iac-providers/terraform/v12/load-file_test.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package tfv12 import ( diff --git a/pkg/iac-providers/terraform/v12/resource.go b/pkg/iac-providers/terraform/v12/resource.go index 0ca109614..0b6fd6472 100644 --- a/pkg/iac-providers/terraform/v12/resource.go +++ b/pkg/iac-providers/terraform/v12/resource.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package tfv12 import ( diff --git a/pkg/iac-providers/terraform/v12/types.go b/pkg/iac-providers/terraform/v12/types.go index 15df3a05c..939fed49d 100644 --- a/pkg/iac-providers/terraform/v12/types.go +++ b/pkg/iac-providers/terraform/v12/types.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package tfv12 // TfV12 struct implements the IacProvider interface diff --git a/pkg/logger/logger.go b/pkg/logger/logger.go index 8f0f80048..80ed17d64 100644 --- a/pkg/logger/logger.go +++ b/pkg/logger/logger.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package logger import ( diff --git a/pkg/logger/logger_test.go b/pkg/logger/logger_test.go index 028d846a4..0bfd1d732 100644 --- a/pkg/logger/logger_test.go +++ b/pkg/logger/logger_test.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package logger import ( diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index cd8f74645..0510fc18d 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package runtime import ( diff --git a/pkg/runtime/executor_test.go b/pkg/runtime/executor_test.go index e72f2f585..153a48822 100644 --- a/pkg/runtime/executor_test.go +++ b/pkg/runtime/executor_test.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package runtime import ( diff --git a/pkg/runtime/validate.go b/pkg/runtime/validate.go index 49a185b78..fc850142c 100644 --- a/pkg/runtime/validate.go +++ b/pkg/runtime/validate.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package runtime import ( diff --git a/pkg/runtime/validate_test.go b/pkg/runtime/validate_test.go index 02091c651..03c337f30 100644 --- a/pkg/runtime/validate_test.go +++ b/pkg/runtime/validate_test.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package runtime import ( diff --git a/pkg/utils/path.go b/pkg/utils/path.go index 34cc2e2f5..6025470e7 100644 --- a/pkg/utils/path.go +++ b/pkg/utils/path.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package utils import ( diff --git a/pkg/utils/path_test.go b/pkg/utils/path_test.go index b8913675f..77774281a 100644 --- a/pkg/utils/path_test.go +++ b/pkg/utils/path_test.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package utils import ( diff --git a/pkg/utils/printer.go b/pkg/utils/printer.go index 264ccb08a..c3da417cc 100644 --- a/pkg/utils/printer.go +++ b/pkg/utils/printer.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package utils import ( diff --git a/pkg/utils/printer_test.go b/pkg/utils/printer_test.go index c58949080..2185a626c 100644 --- a/pkg/utils/printer_test.go +++ b/pkg/utils/printer_test.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package utils import ( From 01c9bb772cab7d1f0407574fc86e964d0ff7785c Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Tue, 28 Jul 2020 20:45:38 +0530 Subject: [PATCH 043/188] use GO111MODULE=on in the build command --- Makefile | 1 + 1 file changed, 1 insertion(+) diff --git a/Makefile b/Makefile index 831f87d4a..e962d83c8 100644 --- a/Makefile +++ b/Makefile @@ -27,6 +27,7 @@ help: # build terrascan binary build: clean @mkdir -p $(BUILD_DIR) > /dev/null + @export GO111MODULE=on go build ${BUILD_FLAGS} -o ${BUILD_DIR}/${BINARY_NAME} cmd/terrascan/main.go @echo "binary created at ${BUILD_DIR}/${BINARY_NAME}" From 0d0f1133eb876ccb5e3b8caa6f1f4596355ed693 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Tue, 28 Jul 2020 22:05:21 +0530 Subject: [PATCH 044/188] refactoring http server and logging packages --- cmd/terrascan/main.go | 6 +-- go.mod | 3 +- go.sum | 4 ++ pkg/http-server/constants.go | 25 ++++++++++ pkg/http-server/gateway.go | 26 ++++++++++ pkg/http-server/health.go | 26 ++++++++++ pkg/http-server/routes.go | 39 +++++++++++++++ pkg/http-server/start.go | 66 +++++++++++++++++++++++--- pkg/{logger => logging}/logger.go | 12 ++++- pkg/{logger => logging}/logger_test.go | 2 +- 10 files changed, 196 insertions(+), 13 deletions(-) create mode 100644 pkg/http-server/constants.go create mode 100644 pkg/http-server/gateway.go create mode 100644 pkg/http-server/health.go create mode 100644 pkg/http-server/routes.go rename pkg/{logger => logging}/logger.go (90%) rename pkg/{logger => logging}/logger_test.go (99%) diff --git a/cmd/terrascan/main.go b/cmd/terrascan/main.go index 32fa78711..54bf3b3fe 100644 --- a/cmd/terrascan/main.go +++ b/cmd/terrascan/main.go @@ -23,7 +23,7 @@ import ( "github.com/accurics/terrascan/pkg/cli" httpServer "github.com/accurics/terrascan/pkg/http-server" - "github.com/accurics/terrascan/pkg/logger" + "github.com/accurics/terrascan/pkg/logging" ) func main() { @@ -43,10 +43,10 @@ func main() { // if server mode set, run terrascan as a server, else run it as CLI if *server { - logger.Init(*logType, *logLevel) + logging.Init(*logType, *logLevel) httpServer.Start() } else { - logger.Init(*logType, *logLevel) + logging.Init(*logType, *logLevel) zap.S().Debug("running terrascan in cli mode") cli.Run(*iacType, *iacVersion, *cloudType, *iacFilePath, *iacDirPath) } diff --git a/go.mod b/go.mod index f4bc50066..321a14d32 100644 --- a/go.mod +++ b/go.mod @@ -3,6 +3,7 @@ module github.com/accurics/terrascan go 1.14 require ( + github.com/gorilla/mux v1.7.4 github.com/hashicorp/go-version v1.2.0 github.com/hashicorp/hcl/v2 v2.3.0 github.com/hashicorp/terraform v0.12.28 @@ -10,6 +11,6 @@ require ( github.com/zclconf/go-cty v1.2.1 go.uber.org/zap v1.9.1 golang.org/x/lint v0.0.0-20200302205851-738671d3881b // indirect - golang.org/x/tools v0.0.0-20200725200936-102e7d357031 // indirect + golang.org/x/tools v0.0.0-20200728160517-2ad651e9e297 // indirect honnef.co/go/tools v0.0.1-2020.1.4 // indirect ) diff --git a/go.sum b/go.sum index 1f974b31b..442aee8e5 100644 --- a/go.sum +++ b/go.sum @@ -120,6 +120,8 @@ github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5m github.com/gophercloud/gophercloud v0.0.0-20190208042652-bc37892e1968/go.mod h1:3WdhXV3rUYy9p6AUW8d94kr+HS62Y4VL9mBnFxsD8q4= github.com/gophercloud/utils v0.0.0-20190128072930-fbb6ab446f01/go.mod h1:wjDF8z83zTeg5eMLml5EBSlAhbF7G8DobyI1YsMuyzw= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/mux v1.7.4 h1:VuZ8uybHlWmqV03+zRzdwKL4tUnIp1MAQtp1mIFE1bc= +github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= @@ -397,6 +399,8 @@ golang.org/x/tools v0.0.0-20200723000907-a7c6fd066f6d h1:7k9BKfwmdbykG6l5ztniTrH golang.org/x/tools v0.0.0-20200723000907-a7c6fd066f6d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200725200936-102e7d357031 h1:VtIxiVHWPhnny2ZTi4f9/2diZKqyLaq3FUTuud5+khA= golang.org/x/tools v0.0.0-20200725200936-102e7d357031/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200728160517-2ad651e9e297 h1:6BwalBvLQpSmhwOWj2+nJv85VJXnDefRc3FmPb2NI0Y= +golang.org/x/tools v0.0.0-20200728160517-2ad651e9e297/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= diff --git a/pkg/http-server/constants.go b/pkg/http-server/constants.go new file mode 100644 index 000000000..2226cf49a --- /dev/null +++ b/pkg/http-server/constants.go @@ -0,0 +1,25 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package httpserver + +const ( + // GatewayDefaultPort - default port at which the http server listens + GatewayDefaultPort = "9010" + + // APIVersion - default api version for REST endpoints + APIVersion = "v1" +) diff --git a/pkg/http-server/gateway.go b/pkg/http-server/gateway.go new file mode 100644 index 000000000..c07633ac1 --- /dev/null +++ b/pkg/http-server/gateway.go @@ -0,0 +1,26 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package httpserver + +// APIGateway implements all the API endpoints, APIGateway should store all the +// metadata info which may be required by all the API handlers +type APIGateway struct{} + +// NewAPIGateway returns a new APIGateway{} +func NewAPIGateway() *APIGateway { + return &APIGateway{} +} diff --git a/pkg/http-server/health.go b/pkg/http-server/health.go new file mode 100644 index 000000000..70344c912 --- /dev/null +++ b/pkg/http-server/health.go @@ -0,0 +1,26 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package httpserver + +import ( + "net/http" +) + +// Health returns the health of the http server +func (g *APIGateway) Health(w http.ResponseWriter, r *http.Request) { + w.WriteHeader(http.StatusOK) +} diff --git a/pkg/http-server/routes.go b/pkg/http-server/routes.go new file mode 100644 index 000000000..7d1a636ed --- /dev/null +++ b/pkg/http-server/routes.go @@ -0,0 +1,39 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package httpserver + +import ( + "net/http" +) + +// Route is a specification and handler for a REST endpoint. +type Route struct { + verb string + path string + fn func(http.ResponseWriter, *http.Request) +} + +// Routes returns a slice of routes of API endpoints registered with http server +func (g *APIGateway) Routes() []*Route { + return []*Route{ + {verb: "GET", path: path("health", APIVersion), fn: g.Health}, + } +} + +func path(route, version string) string { + return "/" + version + "/" + route +} diff --git a/pkg/http-server/start.go b/pkg/http-server/start.go index 701777d8b..4e54d7cc8 100644 --- a/pkg/http-server/start.go +++ b/pkg/http-server/start.go @@ -17,18 +17,70 @@ package httpserver import ( + "context" "net/http" + "os" + "os/signal" + "time" - "go.uber.org/zap" + "github.com/accurics/terrascan/pkg/logging" + "github.com/gorilla/mux" ) -// Start starts the terrascan http server +// Start initializes api routes and starts http server func Start() { + // create a new API gateway + g := NewAPIGateway() - zap.S().Info("terrascan server listening at port 9010") - http.HandleFunc("/health", func(w http.ResponseWriter, r *http.Request) { - w.WriteHeader(http.StatusOK) - }) + // get all routes + routes := g.Routes() - zap.S().Fatal(http.ListenAndServe(":9010", nil)) + // register routes and start the http server + g.start(routes) +} + +// start http server +func (g *APIGateway) start(routes []*Route) { + + var ( + err error + logger = logging.GetDefaultLogger() // new logger + router = mux.NewRouter() // new router + ) + + logger.Info("registering routes...") + + // register all routes + for _, v := range routes { + logger.Info("Route ", v.verb, " - ", v.path) + router.Methods(v.verb).Path(v.path).HandlerFunc(v.fn) + } + + // start http server + server := &http.Server{ + Addr: ":" + GatewayDefaultPort, + Handler: router, + } + + go func() { + err = server.ListenAndServe() + if err != nil && err != http.ErrServerClosed { + logger.Fatal(err) + } + }() + logger.Infof("http server listening at port %v", GatewayDefaultPort) + + // Wait for interrupt signal to gracefully shutdown the server + quit := make(chan os.Signal, 1) + signal.Notify(quit, os.Interrupt) + <-quit + + // try to stop the server gracefully with default 5 second timeout + ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) + defer cancel() + err = server.Shutdown(ctx) + if err != nil { + logger.Fatalf("server failed to exit gracefully. error: '%v'", err) + } + logger.Info("server exiting gracefully") } diff --git a/pkg/logger/logger.go b/pkg/logging/logger.go similarity index 90% rename from pkg/logger/logger.go rename to pkg/logging/logger.go index 80ed17d64..19dadedc0 100644 --- a/pkg/logger/logger.go +++ b/pkg/logging/logger.go @@ -14,13 +14,15 @@ limitations under the License. */ -package logger +package logging import ( "go.uber.org/zap" "go.uber.org/zap/zapcore" ) +var globalLogger *zap.SugaredLogger + // levelMap maps human readable log level to zapcore.Level var levelMap = map[string]zapcore.Level{ "debug": zapcore.DebugLevel, @@ -53,6 +55,9 @@ func Init(encoding, level string) { // get logger logger := GetLogger(level, encoding, encodingLevel) + // set global Logger as well + globalLogger = logger.Sugar() + // initialize global logger zap.ReplaceGlobals(logger) } @@ -81,3 +86,8 @@ func GetLogger(logLevel, encoding string, encodingLevel func(zapcore.Level, zapc return logger } + +// GetDefaultLogger returns the globalLogger +func GetDefaultLogger() *zap.SugaredLogger { + return globalLogger +} diff --git a/pkg/logger/logger_test.go b/pkg/logging/logger_test.go similarity index 99% rename from pkg/logger/logger_test.go rename to pkg/logging/logger_test.go index 0bfd1d732..cd5b6eb26 100644 --- a/pkg/logger/logger_test.go +++ b/pkg/logging/logger_test.go @@ -14,7 +14,7 @@ limitations under the License. */ -package logger +package logging import ( "testing" From d37a6278a28d096382b042f9722edc60d16b27d3 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Wed, 29 Jul 2020 15:02:11 +0530 Subject: [PATCH 045/188] remove cloud provider stage --- pkg/cloud-providers/aws/normalized.go | 26 -------- pkg/cloud-providers/aws/types.go | 20 ------ pkg/cloud-providers/cloud-provider.go | 50 --------------- pkg/cloud-providers/interface.go | 27 -------- pkg/cloud-providers/providers_test.go | 88 --------------------------- pkg/cloud-providers/supported.go | 43 ------------- pkg/runtime/executor.go | 35 +++-------- pkg/runtime/executor_test.go | 59 ++++++------------ pkg/runtime/validate.go | 19 ++---- pkg/runtime/validate_test.go | 11 ---- 10 files changed, 35 insertions(+), 343 deletions(-) delete mode 100644 pkg/cloud-providers/aws/normalized.go delete mode 100644 pkg/cloud-providers/aws/types.go delete mode 100644 pkg/cloud-providers/cloud-provider.go delete mode 100644 pkg/cloud-providers/interface.go delete mode 100644 pkg/cloud-providers/providers_test.go delete mode 100644 pkg/cloud-providers/supported.go diff --git a/pkg/cloud-providers/aws/normalized.go b/pkg/cloud-providers/aws/normalized.go deleted file mode 100644 index d0b7545ea..000000000 --- a/pkg/cloud-providers/aws/normalized.go +++ /dev/null @@ -1,26 +0,0 @@ -/* - Copyright (C) 2020 Accurics, Inc. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ - -package awsprovider - -import ( - "github.com/accurics/terrascan/pkg/iac-providers/output" -) - -// CreateNormalizedJSON creates a normalized json for the given input -func (a *AWSProvider) CreateNormalizedJSON(allResourcesConfig output.AllResourceConfigs) (interface{}, error) { - return allResourcesConfig, nil -} diff --git a/pkg/cloud-providers/aws/types.go b/pkg/cloud-providers/aws/types.go deleted file mode 100644 index 5a27a806f..000000000 --- a/pkg/cloud-providers/aws/types.go +++ /dev/null @@ -1,20 +0,0 @@ -/* - Copyright (C) 2020 Accurics, Inc. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ - -package awsprovider - -// AWSProvider implements cloud provider interface -type AWSProvider struct{} diff --git a/pkg/cloud-providers/cloud-provider.go b/pkg/cloud-providers/cloud-provider.go deleted file mode 100644 index 44c578bea..000000000 --- a/pkg/cloud-providers/cloud-provider.go +++ /dev/null @@ -1,50 +0,0 @@ -/* - Copyright (C) 2020 Accurics, Inc. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ - -package cloudprovider - -import ( - "fmt" - "reflect" - - "go.uber.org/zap" -) - -var ( - errCloudNotSupported = fmt.Errorf("cloud type not supported") -) - -// NewCloudProvider returns a new CloudProvider -func NewCloudProvider(cloudType string) (cloudProvider CloudProvider, err error) { - - // get CloudProvider from supportedCloudProviders - cloudProviderObject, supported := supportedCloudProviders[supportedCloudType(cloudType)] - if !supported { - zap.S().Errorf("cloud type '%s' not supported", cloudType) - return cloudProvider, errCloudNotSupported - } - - return reflect.New(cloudProviderObject).Interface().(CloudProvider), nil -} - -// IsCloudSupported returns true/false depending on whether the cloud -// provider is supported in terrascan or not -func IsCloudSupported(cloudType string) bool { - if _, supported := supportedCloudProviders[supportedCloudType(cloudType)]; !supported { - return false - } - return true -} diff --git a/pkg/cloud-providers/interface.go b/pkg/cloud-providers/interface.go deleted file mode 100644 index 4fa1f6005..000000000 --- a/pkg/cloud-providers/interface.go +++ /dev/null @@ -1,27 +0,0 @@ -/* - Copyright (C) 2020 Accurics, Inc. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ - -package cloudprovider - -import ( - "github.com/accurics/terrascan/pkg/iac-providers/output" -) - -// CloudProvider defines the interface which every cloud provider needs to implement -// to claim support in terrascan -type CloudProvider interface { - CreateNormalizedJSON(output.AllResourceConfigs) (interface{}, error) -} diff --git a/pkg/cloud-providers/providers_test.go b/pkg/cloud-providers/providers_test.go deleted file mode 100644 index 62e6e3f2f..000000000 --- a/pkg/cloud-providers/providers_test.go +++ /dev/null @@ -1,88 +0,0 @@ -/* - Copyright (C) 2020 Accurics, Inc. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ - -package cloudprovider - -import ( - "reflect" - "testing" - - awsProvider "github.com/accurics/terrascan/pkg/cloud-providers/aws" -) - -func TestNewCloudProvider(t *testing.T) { - - table := []struct { - name string - cloudType supportedCloudType - want CloudProvider - wantErr error - }{ - { - name: "aws provider", - cloudType: aws, - want: &awsProvider.AWSProvider{}, - wantErr: nil, - }, - { - name: "not supported cloud type", - cloudType: "not-supported", - want: nil, - wantErr: errCloudNotSupported, - }, - } - - for _, tt := range table { - t.Run(tt.name, func(t *testing.T) { - got, gotErr := NewCloudProvider(string(tt.cloudType)) - if !reflect.DeepEqual(gotErr, tt.wantErr) { - t.Errorf("unexpected error; gotErr: '%v', wantErr: '%v'", gotErr, tt.wantErr) - } - if !reflect.DeepEqual(got, tt.want) { - t.Errorf("got: '%v', want: '%v'", got, tt.want) - } - }) - } -} - -func TestIsCloudSupported(t *testing.T) { - - table := []struct { - name string - cloudType supportedCloudType - want bool - }{ - { - name: "aws provider", - cloudType: aws, - want: true, - }, - { - name: "not supported cloud type", - cloudType: "not-supported", - want: false, - }, - } - - for _, tt := range table { - t.Run(tt.name, func(t *testing.T) { - got := IsCloudSupported(string(tt.cloudType)) - if got != tt.want { - t.Errorf("got: '%v', want: '%v'", got, tt.want) - } - }) - } -} diff --git a/pkg/cloud-providers/supported.go b/pkg/cloud-providers/supported.go deleted file mode 100644 index b1c27dc6f..000000000 --- a/pkg/cloud-providers/supported.go +++ /dev/null @@ -1,43 +0,0 @@ -/* - Copyright (C) 2020 Accurics, Inc. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ - -package cloudprovider - -import ( - "reflect" - - awsProvider "github.com/accurics/terrascan/pkg/cloud-providers/aws" -) - -// SupportedCloudType data type for supported IaC provider -type supportedCloudType string - -// supported IaC providers -const ( - aws supportedCloudType = "aws" -) - -// map of supported IaC providers -var supportedCloudProviders map[supportedCloudType]reflect.Type - -// initializes a map of supported IaC providers -func init() { - - supportedCloudProviders = make(map[supportedCloudType]reflect.Type) - - // aws support - supportedCloudProviders[aws] = reflect.TypeOf(awsProvider.AWSProvider{}) -} diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 0510fc18d..514f450f2 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -22,20 +22,18 @@ import ( "github.com/accurics/terrascan/pkg/utils" "go.uber.org/zap" - cloudProvider "github.com/accurics/terrascan/pkg/cloud-providers" iacProvider "github.com/accurics/terrascan/pkg/iac-providers" "github.com/accurics/terrascan/pkg/iac-providers/output" ) // Executor object type Executor struct { - filePath string - dirPath string - cloudType string - iacType string - iacVersion string - iacProvider iacProvider.IacProvider - cloudProvider cloudProvider.CloudProvider + filePath string + dirPath string + cloudType string + iacType string + iacVersion string + iacProvider iacProvider.IacProvider } // NewExecutor creates a runtime object @@ -72,13 +70,6 @@ func (e *Executor) Init() error { return err } - // create new CloudProvider - e.cloudProvider, err = cloudProvider.NewCloudProvider(e.cloudType) - if err != nil { - zap.S().Errorf("failed to create a new CloudProvider for cloudType '%s'. error: '%s'", e.cloudType, err) - return err - } - return nil } @@ -87,24 +78,18 @@ func (e *Executor) Execute() error { // load iac config var ( - iacOut output.AllResourceConfigs - err error + normalized output.AllResourceConfigs + err error ) if e.dirPath != "" { - iacOut, err = e.iacProvider.LoadIacDir(e.dirPath) + normalized, err = e.iacProvider.LoadIacDir(e.dirPath) } else { // create config from IaC - iacOut, err = e.iacProvider.LoadIacFile(e.filePath) + normalized, err = e.iacProvider.LoadIacFile(e.filePath) } if err != nil { return err } - - // create normalized json - normalized, err := e.cloudProvider.CreateNormalizedJSON(iacOut) - if err != nil { - return err - } utils.PrintJSON(normalized, os.Stdout) // write output diff --git a/pkg/runtime/executor_test.go b/pkg/runtime/executor_test.go index 153a48822..86be316c9 100644 --- a/pkg/runtime/executor_test.go +++ b/pkg/runtime/executor_test.go @@ -21,17 +21,14 @@ import ( "reflect" "testing" - cloudProvider "github.com/accurics/terrascan/pkg/cloud-providers" - awsProvider "github.com/accurics/terrascan/pkg/cloud-providers/aws" iacProvider "github.com/accurics/terrascan/pkg/iac-providers" "github.com/accurics/terrascan/pkg/iac-providers/output" tfv12 "github.com/accurics/terrascan/pkg/iac-providers/terraform/v12" ) var ( - errMockLoadIacDir = fmt.Errorf("mock LoadIacDir") - errMockLoadIacFile = fmt.Errorf("mock LoadIacFile") - errMockCreateNormalizedJSON = fmt.Errorf("mock CreateNormalizedJSON") + errMockLoadIacDir = fmt.Errorf("mock LoadIacDir") + errMockLoadIacFile = fmt.Errorf("mock LoadIacFile") ) // MockIacProvider mocks IacProvider interface @@ -48,15 +45,6 @@ func (m MockIacProvider) LoadIacFile(file string) (output.AllResourceConfigs, er return m.output, m.err } -// MockCloudProvider mocks CloudProvider interface -type MockCloudProvider struct { - err error -} - -func (m MockCloudProvider) CreateNormalizedJSON(data output.AllResourceConfigs) (mockInterface interface{}, err error) { - return data, m.err -} - func TestExecute(t *testing.T) { table := []struct { @@ -65,7 +53,7 @@ func TestExecute(t *testing.T) { wantErr error }{ { - name: "test LoadIacDir", + name: "test LoadIacDir error", executor: Executor{ dirPath: "./testdata/testdir", iacProvider: MockIacProvider{err: errMockLoadIacDir}, @@ -73,28 +61,26 @@ func TestExecute(t *testing.T) { wantErr: errMockLoadIacDir, }, { - name: "test LoadIacFile", + name: "test LoadIacDir no error", executor: Executor{ - filePath: "./testdata/testfile", - iacProvider: MockIacProvider{err: errMockLoadIacFile}, + dirPath: "./testdata/testdir", + iacProvider: MockIacProvider{err: nil}, }, - wantErr: errMockLoadIacFile, + wantErr: nil, }, { - name: "test CreateNormalizedJSON error", + name: "test LoadIacFile error", executor: Executor{ - filePath: "./testdata/testfile", - iacProvider: MockIacProvider{err: nil}, - cloudProvider: MockCloudProvider{err: errMockCreateNormalizedJSON}, + filePath: "./testdata/testfile", + iacProvider: MockIacProvider{err: errMockLoadIacFile}, }, - wantErr: errMockCreateNormalizedJSON, + wantErr: errMockLoadIacFile, }, { - name: "test CreateNormalizedJSON", + name: "test LoadIacFile no error", executor: Executor{ - filePath: "./testdata/testfile", - iacProvider: MockIacProvider{err: nil}, - cloudProvider: MockCloudProvider{err: nil}, + filePath: "./testdata/testfile", + iacProvider: MockIacProvider{err: nil}, }, wantErr: nil, }, @@ -113,11 +99,10 @@ func TestExecute(t *testing.T) { func TestInit(t *testing.T) { table := []struct { - name string - executor Executor - wantErr error - wantIacProvider iacProvider.IacProvider - wantCloudProvider cloudProvider.CloudProvider + name string + executor Executor + wantErr error + wantIacProvider iacProvider.IacProvider }{ { name: "valid filePath", @@ -128,9 +113,8 @@ func TestInit(t *testing.T) { iacType: "terraform", iacVersion: "v12", }, - wantErr: nil, - wantIacProvider: &tfv12.TfV12{}, - wantCloudProvider: &awsProvider.AWSProvider{}, + wantErr: nil, + wantIacProvider: &tfv12.TfV12{}, }, } @@ -142,8 +126,5 @@ func TestInit(t *testing.T) { if !reflect.DeepEqual(tt.executor.iacProvider, tt.wantIacProvider) { t.Errorf("got: '%v', want: '%v'", tt.executor.iacProvider, tt.wantIacProvider) } - if !reflect.DeepEqual(tt.executor.cloudProvider, tt.wantCloudProvider) { - t.Errorf("got: '%v', want: '%v'", tt.executor.cloudProvider, tt.wantCloudProvider) - } } } diff --git a/pkg/runtime/validate.go b/pkg/runtime/validate.go index fc850142c..715fb2a85 100644 --- a/pkg/runtime/validate.go +++ b/pkg/runtime/validate.go @@ -23,17 +23,15 @@ import ( "github.com/accurics/terrascan/pkg/utils" "go.uber.org/zap" - CloudProvider "github.com/accurics/terrascan/pkg/cloud-providers" IacProvider "github.com/accurics/terrascan/pkg/iac-providers" ) var ( - errEmptyIacPath = fmt.Errorf("empty iac path, either use '-f' or '-d' option") - errIncorrectIacPath = fmt.Errorf("cannot accept both '-f' and '-d' options together") - errDirNotExists = fmt.Errorf("directory does not exist") - errFileNotExists = fmt.Errorf("file does not exist") - errIacNotSupported = fmt.Errorf("iac type or version not supported") - errCloudNotSupported = fmt.Errorf("cloud type not supported") + errEmptyIacPath = fmt.Errorf("empty iac path, either use '-f' or '-d' option") + errIncorrectIacPath = fmt.Errorf("cannot accept both '-f' and '-d' options together") + errDirNotExists = fmt.Errorf("directory does not exist") + errFileNotExists = fmt.Errorf("file does not exist") + errIacNotSupported = fmt.Errorf("iac type or version not supported") ) // ValidateInputs validates the inputs to the executor object @@ -83,13 +81,6 @@ func (e *Executor) ValidateInputs() error { } zap.S().Debugf("iac type '%s', version '%s' is supported", e.iacType, e.iacVersion) - // check if cloud type is supported - if !CloudProvider.IsCloudSupported(e.cloudType) { - zap.S().Errorf("cloud type '%s' not supported", e.cloudType) - return errCloudNotSupported - } - zap.S().Debugf("cloud type '%s' supported", e.cloudType) - // check if policy type is supported // successful diff --git a/pkg/runtime/validate_test.go b/pkg/runtime/validate_test.go index 03c337f30..938672e40 100644 --- a/pkg/runtime/validate_test.go +++ b/pkg/runtime/validate_test.go @@ -80,17 +80,6 @@ func TestValidateInputs(t *testing.T) { }, wantErr: errDirNotExists, }, - { - name: "invalid cloud", - executor: Executor{ - filePath: "", - dirPath: "./testdata/testdir", - cloudType: "nothere", - iacType: "terraform", - iacVersion: "v12", - }, - wantErr: errCloudNotSupported, - }, { name: "invalid iac type", executor: Executor{ From cadae234d130779ed38ae630d5b24d5a239b4653 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 30 Jul 2020 22:31:56 +0530 Subject: [PATCH 046/188] add scan file endpoint to the API server --- pkg/cli/run.go | 9 ++- pkg/http-server/file-scan.go | 103 ++++++++++++++++++++++++++++++++ pkg/http-server/gateway_test.go | 18 ++++++ pkg/http-server/routes.go | 3 +- pkg/http-server/routes_test.go | 49 +++++++++++++++ pkg/runtime/executor.go | 16 +---- pkg/runtime/executor_test.go | 3 +- 7 files changed, 185 insertions(+), 16 deletions(-) create mode 100644 pkg/http-server/file-scan.go create mode 100644 pkg/http-server/gateway_test.go create mode 100644 pkg/http-server/routes_test.go diff --git a/pkg/cli/run.go b/pkg/cli/run.go index 54d5a23ce..e4dbd3810 100644 --- a/pkg/cli/run.go +++ b/pkg/cli/run.go @@ -17,7 +17,10 @@ package cli import ( + "os" + "github.com/accurics/terrascan/pkg/runtime" + "github.com/accurics/terrascan/pkg/utils" ) // Run executes terrascan in CLI mode @@ -29,5 +32,9 @@ func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath string) { if err != nil { return } - executor.Execute() + normalized, err := executor.Execute() + if err != nil { + return + } + utils.PrintJSON(normalized, os.Stdout) } diff --git a/pkg/http-server/file-scan.go b/pkg/http-server/file-scan.go new file mode 100644 index 000000000..c191ebfa3 --- /dev/null +++ b/pkg/http-server/file-scan.go @@ -0,0 +1,103 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package httpserver + +import ( + "encoding/json" + "fmt" + "io/ioutil" + "net/http" + "os" + + "github.com/accurics/terrascan/pkg/logging" + "github.com/accurics/terrascan/pkg/runtime" + "github.com/gorilla/mux" +) + +// scanFile accepts uploaded file and runs scan on it +func (g *APIGateway) scanFile(w http.ResponseWriter, r *http.Request) { + + // new logger + logger := logging.GetDefaultLogger() + + // get url params + params := mux.Vars(r) + var ( + iacType = params["iac"] + iacVersion = params["iacVersion"] + cloudType = params["cloud"] + ) + + logger.Infof("url params: '%+v'", params) + + // parse multipart form, 10 << 20 specifies maximum upload of 10 MB files + r.ParseMultipartForm(10 << 20) + + // FormFile returns the first file for the given key + // it also returns the FileHeader so we can get the Filename, + // the Header and the size of the file + file, handler, err := r.FormFile("file") + if err != nil { + logger.Errorf("failed to retreive uploaded file. error: '%v'", err) + return + } + defer file.Close() + + logger.Debugf("uploaded file: %+v", handler.Filename) + logger.Debugf("file size: %+v", handler.Size) + logger.Debugf("MIME header: %+v", handler.Header) + + // Create a temporary file within temp directory + tempFile, err := ioutil.TempFile("", "terrascan-*.tf") + if err != nil { + logger.Errorf("failed to create temp file. error: '%v'", err) + return + } + defer os.Remove(tempFile.Name()) + logger.Debugf("create temp config file at '%s'", tempFile.Name()) + + // read all of the contents of uploaded file + fileBytes, err := ioutil.ReadAll(file) + if err != nil { + logger.Errorf("failed to read uploaded file. error: '%v'", err) + return + } + + // write this byte array to our temporary file + tempFile.Write(fileBytes) + + // create a new runtime executor for scanning the uploaded file + executor, err := runtime.NewExecutor(iacType, iacVersion, cloudType, + tempFile.Name(), "") + if err != nil { + return + } + normalized, err := executor.Execute() + if err != nil { + logger.Errorf("failed to scan uploaded file. error: '%v'", err) + return + } + + j, err := json.MarshalIndent(normalized, "", " ") + if err != nil { + logger.Errorf("failed to create JSON. error: '%v'", err) + return + } + + // return that we have successfully uploaded our file! + fmt.Fprint(w, string(j)) +} diff --git a/pkg/http-server/gateway_test.go b/pkg/http-server/gateway_test.go new file mode 100644 index 000000000..d03c6f483 --- /dev/null +++ b/pkg/http-server/gateway_test.go @@ -0,0 +1,18 @@ +package httpserver + +import ( + "reflect" + "testing" +) + +func TestNewAPIGateway(t *testing.T) { + t.Run("new API gateway", func(t *testing.T) { + var ( + want = APIGateway{} + got = NewAPIGateway() + ) + if !reflect.DeepEqual(*got, want) { + t.Errorf("got: '%v', want: '%v'", *got, want) + } + }) +} diff --git a/pkg/http-server/routes.go b/pkg/http-server/routes.go index 7d1a636ed..d60589300 100644 --- a/pkg/http-server/routes.go +++ b/pkg/http-server/routes.go @@ -30,7 +30,8 @@ type Route struct { // Routes returns a slice of routes of API endpoints registered with http server func (g *APIGateway) Routes() []*Route { return []*Route{ - {verb: "GET", path: path("health", APIVersion), fn: g.Health}, + {verb: "GET", path: "/health", fn: g.Health}, + {verb: "POST", path: path("{iac}/{iacVersion}/{cloud}/local/file/scan", APIVersion), fn: g.scanFile}, } } diff --git a/pkg/http-server/routes_test.go b/pkg/http-server/routes_test.go new file mode 100644 index 000000000..2df448d27 --- /dev/null +++ b/pkg/http-server/routes_test.go @@ -0,0 +1,49 @@ +package httpserver + +import ( + "testing" +) + +func TestRoutes(t *testing.T) { + t.Run("health route check", func(t *testing.T) { + var ( + g = NewAPIGateway() + got = g.Routes() + passed = false + ) + + for _, r := range got { + if r.path == "/health" && r.verb == "GET" { + passed = true + break + } + } + if !passed { + t.Errorf("failed to find /health in routes") + } + }) +} + +func TestPath(t *testing.T) { + + table := []struct { + name string + route string + version string + want string + }{ + { + name: "route someroute version v1", + route: "someroute", + version: "v1", + want: "/v1/someroute", + }, + } + + for _, tt := range table { + got := path(tt.route, tt.version) + if got != tt.want { + t.Errorf("got: '%v', want: '%v'", got, tt.want) + } + } +} diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 514f450f2..1f40cf967 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -17,13 +17,9 @@ package runtime import ( - "os" - - "github.com/accurics/terrascan/pkg/utils" "go.uber.org/zap" iacProvider "github.com/accurics/terrascan/pkg/iac-providers" - "github.com/accurics/terrascan/pkg/iac-providers/output" ) // Executor object @@ -74,13 +70,8 @@ func (e *Executor) Init() error { } // Execute validates the inputs, processes the IaC, creates json output -func (e *Executor) Execute() error { +func (e *Executor) Execute() (normalized interface{}, err error) { - // load iac config - var ( - normalized output.AllResourceConfigs - err error - ) if e.dirPath != "" { normalized, err = e.iacProvider.LoadIacDir(e.dirPath) } else { @@ -88,12 +79,11 @@ func (e *Executor) Execute() error { normalized, err = e.iacProvider.LoadIacFile(e.filePath) } if err != nil { - return err + return normalized, err } - utils.PrintJSON(normalized, os.Stdout) // write output // successful - return nil + return normalized, nil } diff --git a/pkg/runtime/executor_test.go b/pkg/runtime/executor_test.go index 86be316c9..46b5ed607 100644 --- a/pkg/runtime/executor_test.go +++ b/pkg/runtime/executor_test.go @@ -47,6 +47,7 @@ func (m MockIacProvider) LoadIacFile(file string) (output.AllResourceConfigs, er func TestExecute(t *testing.T) { + // TODO: add tests to validate output of Execute() table := []struct { name string executor Executor @@ -88,7 +89,7 @@ func TestExecute(t *testing.T) { for _, tt := range table { t.Run(tt.name, func(t *testing.T) { - gotErr := tt.executor.Execute() + _, gotErr := tt.executor.Execute() if !reflect.DeepEqual(gotErr, tt.wantErr) { t.Errorf("unexpected error; gotErr: '%v', wantErr: '%v'", gotErr, tt.wantErr) } From 599a9aaca774c9f66666f9a305753547c2c00f32 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 31 Jul 2020 00:12:27 +0530 Subject: [PATCH 047/188] add terrascan Dockerfile --- .gitignore | 3 +-- build/Dockerfile | 36 ++++++++++++++++++++++++++++++++++++ 2 files changed, 37 insertions(+), 2 deletions(-) create mode 100644 build/Dockerfile diff --git a/.gitignore b/.gitignore index 38ff92844..8639a3a91 100644 --- a/.gitignore +++ b/.gitignore @@ -8,7 +8,6 @@ __pycache__/ # Distribution / packaging .Python -build/ develop-eggs/ dist/ downloads/ @@ -104,4 +103,4 @@ venv.bak/ #vscode .vscode/ -/updatedFiles \ No newline at end of file +/updatedFiles diff --git a/build/Dockerfile b/build/Dockerfile new file mode 100644 index 000000000..7dae7cb2d --- /dev/null +++ b/build/Dockerfile @@ -0,0 +1,36 @@ +# -------- builder stage -------- # +FROM golang:alpine AS builder + +ARG GOOS_VAL=linux +ARG GOARCH=amd64 + +WORKDIR $GOPATH/src/terrascan + +# download go dependencies +COPY go.mod go.sum ./ +RUN go mod download + +# copy terrascan source +COPY . . + +# build binary +RUN GOOS=${GOOS_VAL} GOARCH=${GOARCH_VAL} go build -v -ldflags "-w -s" -o /go/bin/terrascan ./cmd/terrascan + + +# -------- prod stage -------- # +FROM alpine:3.12.0 + +# create non root user +RUN addgroup --gid 101 terrascan && \ + adduser -S --uid 101 --ingroup terrascan terrascan + +# run as non root user +USER terrascan + +# copy terrascan binary from build +COPY --from=builder /go/bin/terrascan /go/bin/terrascan + +EXPOSE 9010 + +ENTRYPOINT ["/go/bin/terrascan", "--server"] +CMD ["--log-type", "json"] From b724edc33f52a78ad8df0cebece016a8d65fead8 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 31 Jul 2020 10:01:50 +0530 Subject: [PATCH 048/188] add script to build docker image, update Makefile to build docker image --- Makefile | 11 ++++++++--- scripts/docker-build.sh | 11 +++++++++++ 2 files changed, 19 insertions(+), 3 deletions(-) create mode 100755 scripts/docker-build.sh diff --git a/Makefile b/Makefile index e962d83c8..52cd10707 100644 --- a/Makefile +++ b/Makefile @@ -1,10 +1,9 @@ -GITCOMMIT := $(shell git rev-parse --short HEAD 2>/dev/null) +GIT_COMMIT := $(shell git rev-parse --short HEAD 2>/dev/null) BUILD_FLAGS := -v -ldflags "-w -s" BUILD_DIR = ./bin BINARY_NAME = terrascan - # default default: help @@ -15,6 +14,7 @@ help: @echo "build\n\tbuild terrascan binary" @echo "cicd\n\tsimulate CI/CD pipeline locally" @echo "clean\n\tclean up build" + @echo "docker-build\n\tbuild terrascan docker image" @echo "gofmt\n\tvalidate gofmt" @echo "golint\n\tvalidate golint" @echo "gomodverify\n\tverify go modules" @@ -38,7 +38,7 @@ clean: # run all cicd steps -cicd: validate build test +cicd: validate build test docker-build # run all unit and integration tests @@ -77,3 +77,8 @@ staticcheck: # run unit tests unit-tests: ./scripts/generate-coverage.sh + + +# build terrascan docker image +docker-build: + ./scripts/docker-build.sh diff --git a/scripts/docker-build.sh b/scripts/docker-build.sh new file mode 100755 index 000000000..759ab3e0a --- /dev/null +++ b/scripts/docker-build.sh @@ -0,0 +1,11 @@ +#!/bin/bash + +set -o errexit +set -o nounset +set -o pipefail + +GIT_COMMIT=$(git rev-parse --short HEAD 2>/dev/null) +DOCKER_REPO="accurics/terrascan" +DOCKERFILE="./build/Dockerfile" + +docker build -t ${DOCKER_REPO}:${GIT_COMMIT} -f ${DOCKERFILE} . From 4d2f8379188e36efde1988504f8bb9e95500cfdc Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 31 Jul 2020 10:16:31 +0530 Subject: [PATCH 049/188] add script to push docker image, update Makefile to push docker image --- Makefile | 7 +++++++ scripts/docker-push.sh | 12 ++++++++++++ 2 files changed, 19 insertions(+) create mode 100755 scripts/docker-push.sh diff --git a/Makefile b/Makefile index 52cd10707..6534e3a09 100644 --- a/Makefile +++ b/Makefile @@ -10,11 +10,13 @@ default: help # help help: + # please keep the commands in lexicographical order @echo "usage: make [command]\ncommands:" @echo "build\n\tbuild terrascan binary" @echo "cicd\n\tsimulate CI/CD pipeline locally" @echo "clean\n\tclean up build" @echo "docker-build\n\tbuild terrascan docker image" + @echo "docker-push\n\tpush terrascan docker image" @echo "gofmt\n\tvalidate gofmt" @echo "golint\n\tvalidate golint" @echo "gomodverify\n\tverify go modules" @@ -82,3 +84,8 @@ unit-tests: # build terrascan docker image docker-build: ./scripts/docker-build.sh + + +# push terrascan docker image +docker-push: + ./scripts/docker-push.sh diff --git a/scripts/docker-push.sh b/scripts/docker-push.sh new file mode 100755 index 000000000..fac4b5263 --- /dev/null +++ b/scripts/docker-push.sh @@ -0,0 +1,12 @@ +#!/bin/bash + +set -o errexit +set -o nounset +set -o pipefail + +GIT_COMMIT=$(git rev-parse --short HEAD 2>/dev/null) +DOCKER_REPO="accurics/terrascan" +DOCKERFILE="./build/Dockerfile" + +# PS: It is a prerequisite to execute 'docker login' before running this script +docker push ${DOCKER_REPO}:${GIT_COMMIT} From 96cb4dec09a506b6951cd1a2c5309b84c675eccf Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 31 Jul 2020 10:36:10 +0530 Subject: [PATCH 050/188] add docker build and push to github actions --- .github/workflows/gobuild.yml | 42 ++++++++++++++++++++++++++--------- 1 file changed, 32 insertions(+), 10 deletions(-) diff --git a/.github/workflows/gobuild.yml b/.github/workflows/gobuild.yml index 71f4e2f3f..92adf2085 100644 --- a/.github/workflows/gobuild.yml +++ b/.github/workflows/gobuild.yml @@ -3,7 +3,7 @@ on: push: pull_request: jobs: - build: + validate: runs-on: ubuntu-latest env: GO111MODULE: on @@ -12,22 +12,44 @@ jobs: steps: - name: Checkout Terrascan uses: actions/checkout@v1 + - name: Setup Go uses: actions/setup-go@v1 with: go-version: 1.14 + - name: Install golint - run: | - go get -u golang.org/x/lint/golint + run: go get -u golang.org/x/lint/golint + - name: Go validations - run: | - echo Workflow trigger - ${{ github.event_name }} - make validate + run: make validate + - name: Build Terrascan - run: | - make build + run: make build + - name: Run unit tests - run: | - make unit-tests + run: make unit-tests + - name: Upload coverage to Codecov uses: codecov/codecov-action@v1 + + # push image to Docker Hub + push: + # Ensure "validate" job passes before pushing image. + needs: validate + + runs-on: ubuntu-latest + if: github.event_name == 'push' + + steps: + - name: Checkout Terrascan + uses: actions/checkout@v1 + + - name: Build Terrascan docker image + run: make docker-build + + - name: Login to docker hub + run: echo "${{ secrets.DOCKER_HUB_TOKEN }}" | docker login -u accurics --password-stdin + + - name: Push Terrascan docker image + run: make docker-push From 02c0ef06d45e23bc32fc0ef70398e502247681f4 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 31 Jul 2020 11:21:47 +0530 Subject: [PATCH 051/188] add docker-compose file for terrascan --- deploy/docker-compose.yml | 6 ++++++ 1 file changed, 6 insertions(+) create mode 100644 deploy/docker-compose.yml diff --git a/deploy/docker-compose.yml b/deploy/docker-compose.yml new file mode 100644 index 000000000..9210f5c88 --- /dev/null +++ b/deploy/docker-compose.yml @@ -0,0 +1,6 @@ +version: "3" +services: + terrascan: + image: accurics/terrascan:${TAG:-latest} + ports: + - 9010:9010 From 5f0ce1a7706ff7192e459070240db24f28158f00 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 31 Jul 2020 11:26:01 +0530 Subject: [PATCH 052/188] update Makefile comments --- Makefile | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 6534e3a09..389dde9b8 100644 --- a/Makefile +++ b/Makefile @@ -8,9 +8,8 @@ BINARY_NAME = terrascan default: help -# help +# please keep the commands in lexicographical order help: - # please keep the commands in lexicographical order @echo "usage: make [command]\ncommands:" @echo "build\n\tbuild terrascan binary" @echo "cicd\n\tsimulate CI/CD pipeline locally" From 8e15ba2c8b4f30c54cae87903d02324c939460a4 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 31 Jul 2020 20:06:55 +0530 Subject: [PATCH 053/188] simplify iac provider registration --- pkg/iac-providers/register.go | 40 ++++++++++ pkg/iac-providers/register_test.go | 77 ++++++++++++++++++++ pkg/iac-providers/terraform.go | 20 +++++ pkg/iac-providers/{supported.go => types.go} | 29 +------- 4 files changed, 139 insertions(+), 27 deletions(-) create mode 100644 pkg/iac-providers/register.go create mode 100644 pkg/iac-providers/register_test.go create mode 100644 pkg/iac-providers/terraform.go rename pkg/iac-providers/{supported.go => types.go} (50%) diff --git a/pkg/iac-providers/register.go b/pkg/iac-providers/register.go new file mode 100644 index 000000000..24fc97b56 --- /dev/null +++ b/pkg/iac-providers/register.go @@ -0,0 +1,40 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package iacprovider + +import ( + "reflect" +) + +// map of supported IaC providers +var supportedIacProviders = make(map[supportedIacType](map[supportedIacVersion]reflect.Type)) + +// RegisterIacProvider registers an IaC provider for terrascan +// if the Iac provider does not have a version, it can be kept empty +func RegisterIacProvider(iacType supportedIacType, iacVersion supportedIacVersion, iacProvider reflect.Type) { + + if iacVersion == "" { + iacVersion = defaultIacVersion + } + + // version support + supportedTerraformVersions := make(map[supportedIacVersion]reflect.Type) + supportedTerraformVersions[iacVersion] = iacProvider + + // type support + supportedIacProviders[iacType] = supportedTerraformVersions +} diff --git a/pkg/iac-providers/register_test.go b/pkg/iac-providers/register_test.go new file mode 100644 index 000000000..0c4c1d500 --- /dev/null +++ b/pkg/iac-providers/register_test.go @@ -0,0 +1,77 @@ +package iacprovider + +import ( + "reflect" + "testing" +) + +type MockIacProvider struct{} + +func TestRegisterIacProvider(t *testing.T) { + + /* + table := []struct { + name string + iacType supportedIacType + iacVersion supportedIacVersion + want reflect.Type + }{ + { + name: "mock iac type and version", + iacType: supportedIacType("mockIacType"), + iacVersion: supportedIacVersion("mockIacVersion"), + want: reflect.TypeOf(MockIacProvider{}), + }, + { + name: "mock iac type default version", + iacType: supportedIacType("mockIacType"), + iacVersion: supportedIacVersion(""), + want: reflect.TypeOf(MockIacProvider{}), + }, + } + */ + + t.Run("mock iac provider", func(t *testing.T) { + + var ( + iacType = supportedIacType("mockIacType") + iacVersion = supportedIacVersion("mockIacVersion") + want = reflect.TypeOf(MockIacProvider{}) + ) + + RegisterIacProvider(iacType, iacVersion, want) + + if _, present := supportedIacProviders[iacType]; !present { + t.Errorf("mockIacType not registered") + } + got, present := supportedIacProviders[iacType][iacVersion] + if !present { + t.Errorf("mockIacVersion not registered") + } + if !reflect.DeepEqual(got, want) { + t.Errorf("got: '%v', want: '%v'", got, want) + } + }) + + t.Run("mock iac default version", func(t *testing.T) { + + var ( + iacType = supportedIacType("mockIacType") + iacVersion = supportedIacVersion("") + want = reflect.TypeOf(MockIacProvider{}) + ) + + RegisterIacProvider(iacType, iacVersion, want) + + if _, present := supportedIacProviders[iacType]; !present { + t.Errorf("mockIacType not registered") + } + got, present := supportedIacProviders[iacType][defaultIacVersion] + if !present { + t.Errorf("defaultIacVersion not registered") + } + if !reflect.DeepEqual(got, want) { + t.Errorf("got: '%v', want: '%v'", got, want) + } + }) +} diff --git a/pkg/iac-providers/terraform.go b/pkg/iac-providers/terraform.go new file mode 100644 index 000000000..791bf374e --- /dev/null +++ b/pkg/iac-providers/terraform.go @@ -0,0 +1,20 @@ +package iacprovider + +import ( + "reflect" + + tfv12 "github.com/accurics/terrascan/pkg/iac-providers/terraform/v12" +) + +// terraform specific constants +const ( + terraform supportedIacType = "terraform" + terraformV12 supportedIacVersion = "v12" +) + +// register terraform as an IaC provider with terrascan +func init() { + + // register iac provider + RegisterIacProvider(terraform, terraformV12, reflect.TypeOf(tfv12.TfV12{})) +} diff --git a/pkg/iac-providers/supported.go b/pkg/iac-providers/types.go similarity index 50% rename from pkg/iac-providers/supported.go rename to pkg/iac-providers/types.go index ae3665eee..d34ebaefe 100644 --- a/pkg/iac-providers/supported.go +++ b/pkg/iac-providers/types.go @@ -16,38 +16,13 @@ package iacprovider -import ( - "reflect" - - tfv12 "github.com/accurics/terrascan/pkg/iac-providers/terraform/v12" -) - // SupportedIacType data type for supported IaC provider type supportedIacType string -// supported IaC providers -const ( - terraform supportedIacType = "terraform" -) - // supportedIacVersion data type for supported Iac provider type supportedIacVersion string -// supported Iac versions +// default Iac versions const ( - defaultVersion supportedIacVersion = "default" - terraformV12 supportedIacVersion = "v12" + defaultIacVersion supportedIacVersion = "default" ) - -// map of supported IaC providers -var supportedIacProviders map[supportedIacType](map[supportedIacVersion]reflect.Type) - -// initializes a map of supported IaC providers -func init() { - supportedIacProviders = make(map[supportedIacType](map[supportedIacVersion]reflect.Type)) - - // terraform support - supportedTerraformVersions := make(map[supportedIacVersion]reflect.Type) - supportedTerraformVersions[terraformV12] = reflect.TypeOf(tfv12.TfV12{}) - supportedIacProviders[terraform] = supportedTerraformVersions -} From 0564e55cf9c1eceacdec2cb1749a8bed3094c811 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 31 Jul 2020 21:25:30 +0530 Subject: [PATCH 054/188] improving unit tests for utils package --- pkg/utils/path.go | 9 +-------- pkg/utils/path_test.go | 6 ++++++ pkg/utils/printer.go | 8 +------- 3 files changed, 8 insertions(+), 15 deletions(-) diff --git a/pkg/utils/path.go b/pkg/utils/path.go index 6025470e7..4fcfe66ee 100644 --- a/pkg/utils/path.go +++ b/pkg/utils/path.go @@ -17,12 +17,9 @@ package utils import ( - "fmt" "os" "path/filepath" "strings" - - "go.uber.org/zap" ) // GetAbsPath returns absolute path from passed file path resolving even ~ to user home dir and any other such symbols that are only @@ -40,10 +37,6 @@ func GetAbsPath(path string) (string, error) { } // get absolute file path - path, err := filepath.Abs(path) - if err != nil { - zap.S().Errorf("unable to resolve %s to absolute path. error: '%s'", path, err) - return path, fmt.Errorf("failed to resolve absolute path") - } + path, _ = filepath.Abs(path) return path, nil } diff --git a/pkg/utils/path_test.go b/pkg/utils/path_test.go index 77774281a..a2efc55b8 100644 --- a/pkg/utils/path_test.go +++ b/pkg/utils/path_test.go @@ -41,6 +41,12 @@ func TestGetAbsPath(t *testing.T) { want: os.Getenv("HOME"), wantErr: nil, }, + { + name: "dir in HOME dir", + path: "~/somedir", + want: os.Getenv("HOME") + "/somedir", + wantErr: nil, + }, { name: "testdata dir", path: "./testdata", diff --git a/pkg/utils/printer.go b/pkg/utils/printer.go index c3da417cc..4e1f6a356 100644 --- a/pkg/utils/printer.go +++ b/pkg/utils/printer.go @@ -19,17 +19,11 @@ package utils import ( "encoding/json" "io" - - "go.uber.org/zap" ) // PrintJSON prints data in JSON format func PrintJSON(data interface{}, writer io.Writer) { - j, err := json.MarshalIndent(data, "", " ") - if err != nil { - zap.S().Errorf("failed to create JSON. error: '%v'", err) - return - } + j, _ := json.MarshalIndent(data, "", " ") writer.Write(j) writer.Write([]byte{'\n'}) } From 492db060d5ce99aba8961a2e66d5e1e8f18a54ba Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Sat, 1 Aug 2020 06:56:18 +0530 Subject: [PATCH 055/188] improving unit tests for terraform/v12 package --- pkg/iac-providers/terraform/v12/load-dir.go | 10 ++- .../terraform/v12/load-dir_test.go | 16 +++- pkg/iac-providers/terraform/v12/load-file.go | 6 +- .../terraform/v12/load-file_test.go | 8 +- .../terraform/v12/testdata/empty.tf | 1 + .../invalid-moduleconfigs/cloudfront/main.tf | 84 +++++++++++++++++++ .../testdata/invalid-moduleconfigs/main.tf | 7 ++ 7 files changed, 122 insertions(+), 10 deletions(-) create mode 100644 pkg/iac-providers/terraform/v12/testdata/empty.tf create mode 100644 pkg/iac-providers/terraform/v12/testdata/invalid-moduleconfigs/cloudfront/main.tf create mode 100644 pkg/iac-providers/terraform/v12/testdata/invalid-moduleconfigs/main.tf diff --git a/pkg/iac-providers/terraform/v12/load-dir.go b/pkg/iac-providers/terraform/v12/load-dir.go index 6a5795be8..b0ae7bc80 100644 --- a/pkg/iac-providers/terraform/v12/load-dir.go +++ b/pkg/iac-providers/terraform/v12/load-dir.go @@ -31,7 +31,9 @@ import ( ) var ( - errDirEmptyTFConfig = fmt.Errorf("directory has no terraform files") + errEmptyTFConfigDir = fmt.Errorf("directory has no terraform files") + errLoadConfigDir = fmt.Errorf("failed to load terraform allResourcesConfig dir") + errBuildTFConfigDir = fmt.Errorf("failed to build terraform allResourcesConfig") ) // LoadIacDir starts traversing from the given rootDir and traverses through @@ -45,14 +47,14 @@ func (*TfV12) LoadIacDir(absRootDir string) (allResourcesConfig output.AllResour // check if the directory has any tf config files (.tf or .tf.json) if !parser.IsConfigDir(absRootDir) { zap.S().Errorf("directory '%s' has no terraform config files", absRootDir) - return allResourcesConfig, errDirEmptyTFConfig + return allResourcesConfig, errEmptyTFConfigDir } // load root config directory rootMod, diags := parser.LoadConfigDir(absRootDir) if diags.HasErrors() { zap.S().Errorf("failed to load terraform config dir '%s'. error:\n%+v\n", absRootDir, diags) - return allResourcesConfig, fmt.Errorf("failed to load terraform allResourcesConfig dir") + return allResourcesConfig, errLoadConfigDir } // using the BuildConfig and ModuleWalkerFunc to traverse through all @@ -84,7 +86,7 @@ func (*TfV12) LoadIacDir(absRootDir string) (allResourcesConfig output.AllResour )) if diags.HasErrors() { zap.S().Errorf("failed to build unified config. errors:\n%+v\n", diags) - return allResourcesConfig, fmt.Errorf("failed to build terraform allResourcesConfig") + return allResourcesConfig, errBuildTFConfigDir } /* diff --git a/pkg/iac-providers/terraform/v12/load-dir_test.go b/pkg/iac-providers/terraform/v12/load-dir_test.go index e082b27cd..943b83043 100644 --- a/pkg/iac-providers/terraform/v12/load-dir_test.go +++ b/pkg/iac-providers/terraform/v12/load-dir_test.go @@ -39,13 +39,25 @@ func TestLoadIacDir(t *testing.T) { name: "invalid dirPath", dirPath: "not-there", tfv12: TfV12{}, - wantErr: errDirEmptyTFConfig, + wantErr: errEmptyTFConfigDir, }, { name: "empty config", dirPath: "./testdata/testfile", tfv12: TfV12{}, - wantErr: errDirEmptyTFConfig, + wantErr: errEmptyTFConfigDir, + }, + { + name: "incorrect module structure", + dirPath: "./testdata/invalid-moduleconfigs", + tfv12: TfV12{}, + wantErr: errBuildTFConfigDir, + }, + { + name: "load invalid config dir", + dirPath: "./testdata", + tfv12: TfV12{}, + wantErr: errLoadConfigDir, }, } diff --git a/pkg/iac-providers/terraform/v12/load-file.go b/pkg/iac-providers/terraform/v12/load-file.go index a5248b992..9fdd8b694 100644 --- a/pkg/iac-providers/terraform/v12/load-file.go +++ b/pkg/iac-providers/terraform/v12/load-file.go @@ -27,7 +27,7 @@ import ( ) var ( - errFailedLoadConfigFile = fmt.Errorf("failed to load config file") + errLoadConfigFile = fmt.Errorf("failed to load config file") ) // LoadIacFile parses the given terraform file from the given file path @@ -39,11 +39,11 @@ func (*TfV12) LoadIacFile(absFilePath string) (allResourcesConfig output.AllReso hclFile, diags := parser.LoadConfigFile(absFilePath) if diags != nil { zap.S().Errorf("failed to load config file '%s'. error:\n%v\n", diags) - return allResourcesConfig, errFailedLoadConfigFile + return allResourcesConfig, errLoadConfigFile } if hclFile == nil && diags.HasErrors() { zap.S().Errorf("error occured while loading config file. error:\n%v\n", diags) - return allResourcesConfig, errFailedLoadConfigFile + return allResourcesConfig, errLoadConfigFile } // initialize normalized output diff --git a/pkg/iac-providers/terraform/v12/load-file_test.go b/pkg/iac-providers/terraform/v12/load-file_test.go index f98ff3ecf..ab0270f27 100644 --- a/pkg/iac-providers/terraform/v12/load-file_test.go +++ b/pkg/iac-providers/terraform/v12/load-file_test.go @@ -39,7 +39,7 @@ func TestLoadIacFile(t *testing.T) { name: "invalid filepath", filePath: "not-there", tfv12: TfV12{}, - wantErr: errFailedLoadConfigFile, + wantErr: errLoadConfigFile, }, { name: "empty config", @@ -47,6 +47,12 @@ func TestLoadIacFile(t *testing.T) { tfv12: TfV12{}, wantErr: nil, }, + { + name: "invalid config", + filePath: "./testdata/empty.tf", + tfv12: TfV12{}, + wantErr: errLoadConfigFile, + }, } for _, tt := range table { diff --git a/pkg/iac-providers/terraform/v12/testdata/empty.tf b/pkg/iac-providers/terraform/v12/testdata/empty.tf new file mode 100644 index 000000000..f9ff3aaaf --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/empty.tf @@ -0,0 +1 @@ +some invalid tf file diff --git a/pkg/iac-providers/terraform/v12/testdata/invalid-moduleconfigs/cloudfront/main.tf b/pkg/iac-providers/terraform/v12/testdata/invalid-moduleconfigs/cloudfront/main.tf new file mode 100644 index 000000000..c047b6469 --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/invalid-moduleconfigs/cloudfront/main.tf @@ -0,0 +1,84 @@ +module "sub-cloudfront" { + source = "./sub-cloudfront" +} + + +resource "aws_cloudfront_distribution" "s3-distribution-TLS-v1" { + origin { + domain_name = "aws_s3_bucket.b.bucket_regional_domain_name" + origin_id = "local.s3_origin_id" + + s3_origin_config { + origin_access_identity = "origin-access-identity/cloudfront/ABCDEFG1234567" + } + } + + enabled = true + + default_cache_behavior { + allowed_methods = ["DELETE", "GET", "HEAD", "OPTIONS", "PATCH", "POST", "PUT"] + cached_methods = ["GET", "HEAD"] + target_origin_id = "local.s3_origin_id" + + forwarded_values { + query_string = false + + cookies { + forward = "none" + } + } + viewer_protocol_policy = "https-only" + } + + ordered_cache_behavior { + path_pattern = "/content/immutable/*" + allowed_methods = ["GET", "HEAD", "OPTIONS"] + cached_methods = ["GET", "HEAD", "OPTIONS"] + target_origin_id = "local.s3_origin_id" + + forwarded_values { + query_string = false + headers = ["Origin"] + + cookies { + forward = "none" + } + } + + compress = true + viewer_protocol_policy = "allow-all" + } + + ordered_cache_behavior { + path_pattern = "/content/*" + allowed_methods = ["GET", "HEAD", "OPTIONS"] + cached_methods = ["GET", "HEAD"] + target_origin_id = "local.s3_origin_id" + + forwarded_values { + query_string = false + + cookies { + forward = "none" + } + } + + viewer_protocol_policy = "allow-all" + } + + restrictions { + geo_restriction { + restriction_type = "whitelist" + locations = ["US", "CA", "GB", "DE"] + } + } + + viewer_certificate { + cloudfront_default_certificate = true + minimum_protocol_version = "TLSv1" #expected version is TLSv1.1 or TLSv1.2 + } +} + +locals { + s3_origin_id = "myS3Origin" +} diff --git a/pkg/iac-providers/terraform/v12/testdata/invalid-moduleconfigs/main.tf b/pkg/iac-providers/terraform/v12/testdata/invalid-moduleconfigs/main.tf new file mode 100644 index 000000000..39dacd6cc --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/invalid-moduleconfigs/main.tf @@ -0,0 +1,7 @@ +provider "aws" { + region = "us-east-1" +} + +module "cloudfront" { + source = "./cloudfront" +} From 6606496f3ce6f79b4714133c9243b302ca9c1014 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Sat, 1 Aug 2020 07:26:20 +0530 Subject: [PATCH 056/188] improving tests for logging package --- pkg/logging/logger_test.go | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/pkg/logging/logger_test.go b/pkg/logging/logger_test.go index cd5b6eb26..59b755fae 100644 --- a/pkg/logging/logger_test.go +++ b/pkg/logging/logger_test.go @@ -17,6 +17,7 @@ package logging import ( + "reflect" "testing" "go.uber.org/zap/zapcore" @@ -87,3 +88,22 @@ func TestGetLogger(t *testing.T) { } } } + +func TestGetDefaultLogger(t *testing.T) { + t.Run("json encoding", func(t *testing.T) { + Init("json", "info") + got := GetDefaultLogger() + want := globalLogger + if !reflect.DeepEqual(got, want) { + t.Errorf("got: '%v', want: '%v'", got, want) + } + }) + t.Run("console encoding", func(t *testing.T) { + Init("console", "info") + got := GetDefaultLogger() + want := globalLogger + if !reflect.DeepEqual(got, want) { + t.Errorf("got: '%v', want: '%v'", got, want) + } + }) +} From b0ca89f6de353d52f9f09383b6f2f424d7fff79a Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Sun, 2 Aug 2020 16:09:58 +0530 Subject: [PATCH 057/188] refactor and add unit tests to http-server package --- pkg/http-server/file-scan.go | 2 +- pkg/http-server/{gateway.go => handler.go} | 11 ++++---- .../{gateway_test.go => handler_test.go} | 6 ++--- pkg/http-server/health.go | 2 +- pkg/http-server/health_test.go | 26 +++++++++++++++++++ pkg/http-server/routes.go | 17 +++++++----- pkg/http-server/routes_test.go | 6 ++--- pkg/http-server/server.go | 25 ++++++++++++++++++ pkg/http-server/server_test.go | 18 +++++++++++++ pkg/http-server/start.go | 10 +++---- 10 files changed, 97 insertions(+), 26 deletions(-) rename pkg/http-server/{gateway.go => handler.go} (69%) rename pkg/http-server/{gateway_test.go => handler_test.go} (70%) create mode 100644 pkg/http-server/health_test.go create mode 100644 pkg/http-server/server.go create mode 100644 pkg/http-server/server_test.go diff --git a/pkg/http-server/file-scan.go b/pkg/http-server/file-scan.go index c191ebfa3..852f0081f 100644 --- a/pkg/http-server/file-scan.go +++ b/pkg/http-server/file-scan.go @@ -29,7 +29,7 @@ import ( ) // scanFile accepts uploaded file and runs scan on it -func (g *APIGateway) scanFile(w http.ResponseWriter, r *http.Request) { +func (g *APIHandler) scanFile(w http.ResponseWriter, r *http.Request) { // new logger logger := logging.GetDefaultLogger() diff --git a/pkg/http-server/gateway.go b/pkg/http-server/handler.go similarity index 69% rename from pkg/http-server/gateway.go rename to pkg/http-server/handler.go index c07633ac1..a72370030 100644 --- a/pkg/http-server/gateway.go +++ b/pkg/http-server/handler.go @@ -16,11 +16,10 @@ package httpserver -// APIGateway implements all the API endpoints, APIGateway should store all the -// metadata info which may be required by all the API handlers -type APIGateway struct{} +// APIHandler struct for http api server +type APIHandler struct{} -// NewAPIGateway returns a new APIGateway{} -func NewAPIGateway() *APIGateway { - return &APIGateway{} +// NewAPIHandler returns a new APIHandler{} +func NewAPIHandler() *APIHandler { + return &APIHandler{} } diff --git a/pkg/http-server/gateway_test.go b/pkg/http-server/handler_test.go similarity index 70% rename from pkg/http-server/gateway_test.go rename to pkg/http-server/handler_test.go index d03c6f483..15c926eb1 100644 --- a/pkg/http-server/gateway_test.go +++ b/pkg/http-server/handler_test.go @@ -5,11 +5,11 @@ import ( "testing" ) -func TestNewAPIGateway(t *testing.T) { +func TestNewAPIHandler(t *testing.T) { t.Run("new API gateway", func(t *testing.T) { var ( - want = APIGateway{} - got = NewAPIGateway() + want = APIHandler{} + got = NewAPIHandler() ) if !reflect.DeepEqual(*got, want) { t.Errorf("got: '%v', want: '%v'", *got, want) diff --git a/pkg/http-server/health.go b/pkg/http-server/health.go index 70344c912..ff5e54712 100644 --- a/pkg/http-server/health.go +++ b/pkg/http-server/health.go @@ -21,6 +21,6 @@ import ( ) // Health returns the health of the http server -func (g *APIGateway) Health(w http.ResponseWriter, r *http.Request) { +func (g *APIHandler) Health(w http.ResponseWriter, r *http.Request) { w.WriteHeader(http.StatusOK) } diff --git a/pkg/http-server/health_test.go b/pkg/http-server/health_test.go new file mode 100644 index 000000000..08b92f8a8 --- /dev/null +++ b/pkg/http-server/health_test.go @@ -0,0 +1,26 @@ +package httpserver + +import ( + "net/http" + "net/http/httptest" + "testing" +) + +func TestHealth(t *testing.T) { + + handler := NewAPIHandler() + + t.Run("test health api", func(t *testing.T) { + var ( + req, _ = http.NewRequest(http.MethodGet, "/players/Pepper", nil) + resp = httptest.NewRecorder() + want = http.StatusOK + ) + handler.Health(resp, req) + got := resp.Result().StatusCode + + if got != want { + t.Errorf("incorrect health status code, got: '%v', want: '%v'", got, want) + } + }) +} diff --git a/pkg/http-server/routes.go b/pkg/http-server/routes.go index d60589300..0b2fc8198 100644 --- a/pkg/http-server/routes.go +++ b/pkg/http-server/routes.go @@ -27,14 +27,17 @@ type Route struct { fn func(http.ResponseWriter, *http.Request) } -// Routes returns a slice of routes of API endpoints registered with http server -func (g *APIGateway) Routes() []*Route { - return []*Route{ - {verb: "GET", path: "/health", fn: g.Health}, - {verb: "POST", path: path("{iac}/{iacVersion}/{cloud}/local/file/scan", APIVersion), fn: g.scanFile}, +// Routes returns a slice of routes of API endpoints to be registered with +// http server +func (g *APIServer) Routes() []*Route { + h := NewAPIHandler() + routes := []*Route{ + {verb: "GET", path: "/health", fn: h.Health}, + {verb: "POST", path: versionedPath("{iac}/{iacVersion}/{cloud}/local/file/scan"), fn: h.scanFile}, } + return routes } -func path(route, version string) string { - return "/" + version + "/" + route +func versionedPath(route string) string { + return "/" + APIVersion + "/" + route } diff --git a/pkg/http-server/routes_test.go b/pkg/http-server/routes_test.go index 2df448d27..7425dfd46 100644 --- a/pkg/http-server/routes_test.go +++ b/pkg/http-server/routes_test.go @@ -7,8 +7,8 @@ import ( func TestRoutes(t *testing.T) { t.Run("health route check", func(t *testing.T) { var ( - g = NewAPIGateway() - got = g.Routes() + server = NewAPIServer() + got = server.Routes() passed = false ) @@ -41,7 +41,7 @@ func TestPath(t *testing.T) { } for _, tt := range table { - got := path(tt.route, tt.version) + got := versionedPath(tt.route) if got != tt.want { t.Errorf("got: '%v', want: '%v'", got, tt.want) } diff --git a/pkg/http-server/server.go b/pkg/http-server/server.go new file mode 100644 index 000000000..68529d84a --- /dev/null +++ b/pkg/http-server/server.go @@ -0,0 +1,25 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package httpserver + +// APIServer struct for http api server +type APIServer struct{} + +// NewAPIServer returns a new APIServer{} +func NewAPIServer() *APIServer { + return &APIServer{} +} diff --git a/pkg/http-server/server_test.go b/pkg/http-server/server_test.go new file mode 100644 index 000000000..77f5352ea --- /dev/null +++ b/pkg/http-server/server_test.go @@ -0,0 +1,18 @@ +package httpserver + +import ( + "reflect" + "testing" +) + +func TestNewAPIServer(t *testing.T) { + t.Run("new API gateway", func(t *testing.T) { + var ( + want = APIServer{} + got = NewAPIServer() + ) + if !reflect.DeepEqual(*got, want) { + t.Errorf("got: '%v', want: '%v'", *got, want) + } + }) +} diff --git a/pkg/http-server/start.go b/pkg/http-server/start.go index 4e54d7cc8..807538960 100644 --- a/pkg/http-server/start.go +++ b/pkg/http-server/start.go @@ -29,18 +29,18 @@ import ( // Start initializes api routes and starts http server func Start() { - // create a new API gateway - g := NewAPIGateway() + // create a new API server + server := NewAPIServer() // get all routes - routes := g.Routes() + routes := server.Routes() // register routes and start the http server - g.start(routes) + server.start(routes) } // start http server -func (g *APIGateway) start(routes []*Route) { +func (g *APIServer) start(routes []*Route) { var ( err error From ae98831bcaa125a915a1bd8102d61235bc917ee6 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Mon, 3 Aug 2020 08:45:28 +0530 Subject: [PATCH 058/188] add more unit tests for scan file --- pkg/http-server/file-scan.go | 39 +++++---- pkg/http-server/file-scan_test.go | 113 +++++++++++++++++++++++++ pkg/http-server/helpers.go | 36 ++++++++ pkg/http-server/testdata/empty.tf | 0 pkg/http-server/testdata/invalid.tf | 1 + pkg/http-server/testdata/testconfig.tf | 103 ++++++++++++++++++++++ 6 files changed, 276 insertions(+), 16 deletions(-) create mode 100644 pkg/http-server/file-scan_test.go create mode 100644 pkg/http-server/helpers.go create mode 100644 pkg/http-server/testdata/empty.tf create mode 100644 pkg/http-server/testdata/invalid.tf create mode 100644 pkg/http-server/testdata/testconfig.tf diff --git a/pkg/http-server/file-scan.go b/pkg/http-server/file-scan.go index 852f0081f..3a5564aaa 100644 --- a/pkg/http-server/file-scan.go +++ b/pkg/http-server/file-scan.go @@ -23,17 +23,14 @@ import ( "net/http" "os" - "github.com/accurics/terrascan/pkg/logging" "github.com/accurics/terrascan/pkg/runtime" "github.com/gorilla/mux" + "go.uber.org/zap" ) // scanFile accepts uploaded file and runs scan on it func (g *APIHandler) scanFile(w http.ResponseWriter, r *http.Request) { - // new logger - logger := logging.GetDefaultLogger() - // get url params params := mux.Vars(r) var ( @@ -42,8 +39,6 @@ func (g *APIHandler) scanFile(w http.ResponseWriter, r *http.Request) { cloudType = params["cloud"] ) - logger.Infof("url params: '%+v'", params) - // parse multipart form, 10 << 20 specifies maximum upload of 10 MB files r.ParseMultipartForm(10 << 20) @@ -52,28 +47,34 @@ func (g *APIHandler) scanFile(w http.ResponseWriter, r *http.Request) { // the Header and the size of the file file, handler, err := r.FormFile("file") if err != nil { - logger.Errorf("failed to retreive uploaded file. error: '%v'", err) + errMsg := fmt.Sprintf("failed to retreive uploaded file. error: '%v'", err) + zap.S().Error(errMsg) + apiErrorResponse(w, errMsg, http.StatusInternalServerError) return } defer file.Close() - logger.Debugf("uploaded file: %+v", handler.Filename) - logger.Debugf("file size: %+v", handler.Size) - logger.Debugf("MIME header: %+v", handler.Header) + zap.S().Debugf("uploaded file: %+v", handler.Filename) + zap.S().Debugf("file size: %+v", handler.Size) + zap.S().Debugf("MIME header: %+v", handler.Header) // Create a temporary file within temp directory tempFile, err := ioutil.TempFile("", "terrascan-*.tf") if err != nil { - logger.Errorf("failed to create temp file. error: '%v'", err) + errMsg := fmt.Sprintf("failed to create temp file. error: '%v'", err) + zap.S().Error(errMsg) + apiErrorResponse(w, errMsg, http.StatusInternalServerError) return } defer os.Remove(tempFile.Name()) - logger.Debugf("create temp config file at '%s'", tempFile.Name()) + zap.S().Debugf("create temp config file at '%s'", tempFile.Name()) // read all of the contents of uploaded file fileBytes, err := ioutil.ReadAll(file) if err != nil { - logger.Errorf("failed to read uploaded file. error: '%v'", err) + errMsg := fmt.Sprintf("failed to read uploaded file. error: '%v'", err) + zap.S().Error(errMsg) + apiErrorResponse(w, errMsg, http.StatusInternalServerError) return } @@ -84,20 +85,26 @@ func (g *APIHandler) scanFile(w http.ResponseWriter, r *http.Request) { executor, err := runtime.NewExecutor(iacType, iacVersion, cloudType, tempFile.Name(), "") if err != nil { + zap.S().Error(err) + apiErrorResponse(w, err.Error(), http.StatusBadRequest) return } normalized, err := executor.Execute() if err != nil { - logger.Errorf("failed to scan uploaded file. error: '%v'", err) + errMsg := fmt.Sprintf("failed to scan uploaded file. error: '%v'", err) + zap.S().Error(errMsg) + apiErrorResponse(w, errMsg, http.StatusInternalServerError) return } j, err := json.MarshalIndent(normalized, "", " ") if err != nil { - logger.Errorf("failed to create JSON. error: '%v'", err) + errMsg := fmt.Sprintf("failed to create JSON. error: '%v'", err) + zap.S().Error(errMsg) + apiErrorResponse(w, errMsg, http.StatusInternalServerError) return } // return that we have successfully uploaded our file! - fmt.Fprint(w, string(j)) + apiResponse(w, string(j), http.StatusOK) } diff --git a/pkg/http-server/file-scan_test.go b/pkg/http-server/file-scan_test.go new file mode 100644 index 000000000..2e4523b7c --- /dev/null +++ b/pkg/http-server/file-scan_test.go @@ -0,0 +1,113 @@ +package httpserver + +import ( + "bytes" + "fmt" + "io" + "mime/multipart" + "net/http" + "net/http/httptest" + "os" + "path/filepath" + "testing" + + "github.com/gorilla/mux" +) + +func TestUpload(t *testing.T) { + + table := []struct { + name string + path string + param string + iacType string + iacVersion string + cloudType string + wantStatus int + }{ + { + name: "valid file scan", + path: "./testdata/testconfig.tf", + param: "file", + iacType: "terraform", + iacVersion: "v12", + cloudType: "aws", + wantStatus: http.StatusOK, + }, + { + name: "invalid iacType", + path: "./testdata/testconfig.tf", + param: "file", + iacType: "notthere", + iacVersion: "v12", + cloudType: "aws", + wantStatus: http.StatusBadRequest, + }, + { + name: "invalid file param", + path: "./testdata/testconfig.tf", + param: "someparam", + wantStatus: http.StatusInternalServerError, + }, + { + name: "invalid file config", + path: "./testdata/invalid.tf", + param: "file", + iacType: "terraform", + iacVersion: "v12", + cloudType: "aws", + wantStatus: http.StatusInternalServerError, + }, + { + name: "empty file config", + path: "./testdata/empty.tf", + param: "file", + iacType: "terraform", + iacVersion: "v12", + cloudType: "aws", + wantStatus: http.StatusOK, + }, + } + + for _, tt := range table { + t.Run(tt.name, func(t *testing.T) { + + // test file to upload + path := tt.path + file, err := os.Open(path) + if err != nil { + t.Error(err) + } + defer file.Close() + + // use buffer to store response body + body := &bytes.Buffer{} + writer := multipart.NewWriter(body) + part, err := writer.CreateFormFile(tt.param, filepath.Base(path)) + if err != nil { + writer.Close() + t.Error(err) + } + io.Copy(part, file) + writer.Close() + + // http request of the type "/v1/{iacType}/{iacVersion}/{cloudType}/file/scan" + url := fmt.Sprintf("/v1/%s/%s/%s/local/file/scan", tt.iacType, tt.iacVersion, tt.cloudType) + req := httptest.NewRequest("POST", url, body) + req.Header.Set("Content-Type", writer.FormDataContentType()) + req = mux.SetURLVars(req, map[string]string{ + "iac": tt.iacType, + "iacVersion": tt.iacVersion, + "cloud": tt.cloudType, + }) + res := httptest.NewRecorder() + // new api handler + h := NewAPIHandler() + h.scanFile(res, req) + + if res.Code != tt.wantStatus { + t.Errorf("incorrect status code, got: '%v', want: '%v', error: '%v'", res.Code, http.StatusOK, res.Body) + } + }) + } +} diff --git a/pkg/http-server/helpers.go b/pkg/http-server/helpers.go new file mode 100644 index 000000000..089870377 --- /dev/null +++ b/pkg/http-server/helpers.go @@ -0,0 +1,36 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package httpserver + +import ( + "fmt" + "net/http" +) + +// apiResponse creates an API response +func apiResponse(w http.ResponseWriter, msg string, statusCode int) { + w.WriteHeader(statusCode) + w.Header().Set("Content-Type", "application/json") + fmt.Fprint(w, msg) +} + +// apiErrorResponse creates an API error response +func apiErrorResponse(w http.ResponseWriter, errMsg string, statusCode int) { + w.Header().Set("Content-Type", "application/json") + w.Header().Set("X-Content-Type-Options", "nosniff") + http.Error(w, errMsg, statusCode) +} diff --git a/pkg/http-server/testdata/empty.tf b/pkg/http-server/testdata/empty.tf new file mode 100644 index 000000000..e69de29bb diff --git a/pkg/http-server/testdata/invalid.tf b/pkg/http-server/testdata/invalid.tf new file mode 100644 index 000000000..f9ff3aaaf --- /dev/null +++ b/pkg/http-server/testdata/invalid.tf @@ -0,0 +1 @@ +some invalid tf file diff --git a/pkg/http-server/testdata/testconfig.tf b/pkg/http-server/testdata/testconfig.tf new file mode 100644 index 000000000..0e92e1ac9 --- /dev/null +++ b/pkg/http-server/testdata/testconfig.tf @@ -0,0 +1,103 @@ +provider "aws" { + region = var.aws_region +} + + +resource "aws_vpc" "vpc_playground" { + cidr_block = var.cidr_vpc + enable_dns_support = true + enable_dns_hostnames = true + tags = { + Environment = "${var.environment_tag}" + } +} + +resource "aws_internet_gateway" "igw_playground" { + vpc_id = aws_vpc.vpc_playground.id + tags = { + Environment = "${var.environment_tag}" + } +} + +resource "aws_subnet" "subnet_public_playground" { + vpc_id = aws_vpc.vpc_playground.id + cidr_block = var.cidr_subnet + map_public_ip_on_launch = "true" + tags = { + Environment = "${var.environment_tag}" + } +} + +resource "aws_route_table" "rtb_public_playground" { + vpc_id = aws_vpc.vpc_playground.id + route { + cidr_block = "0.0.0.0/0" + gateway_id = aws_internet_gateway.igw_playground.id + } + tags = { + Environment = "${var.environment_tag}" + } +} + +resource "aws_route_table_association" "rta_subnet_public_playground" { + subnet_id = aws_subnet.subnet_public_playground.id + route_table_id = aws_route_table.rtb_public_playground.id +} + +resource "aws_security_group" "sg_playground" { + name = "sg" + vpc_id = aws_vpc.vpc_playground.id + ingress { + from_port = 22 + to_port = 22 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0"] + } + ingress { + from_port = 80 + to_port = 80 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0"] + } + egress { + from_port = 0 + to_port = 0 + protocol = "-1" + cidr_blocks = ["0.0.0.0/0"] + } + tags = { + Environment = "${var.environment_tag}" + } +} + +resource "aws_key_pair" "ec2key_playground" { + key_name = "testKey" + public_key = file(var.public_key_path) +} + +resource "aws_instance" "instance_playground" { + ami = lookup(var.aws_amis, var.aws_region) + instance_type = var.instance_type + subnet_id = aws_subnet.subnet_public_playground.id + vpc_security_group_ids = [aws_security_group.sg_playground.id] + key_name = aws_key_pair.ec2key_playground.key_name + tags = { + Environment = "${var.environment_tag}" + } + provisioner "remote-exec" { + + inline = [ + "sudo apt-get -y update", + "sudo apt-get -y install nginx", + "sudo touch /var/www/html/index.html", + "echo HelloWorld | sudo tee -a /var/www/html/index.html", + "sudo service nginx start", + ] + connection { + host = self.public_ip + type = "ssh" + user = "ubuntu" + private_key = file(var.privateKey) + } + } +} From 887911bea4d0e590205da0ac44c91ed46c4c0456 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Mon, 3 Aug 2020 11:00:00 +0530 Subject: [PATCH 059/188] add more unit test for terraform v12 package --- .../terraform/v12/load-file_test.go | 7 ++ .../v12/testdata/dummyconfig/dummyconfig.tf | 55 +++++++++++++ .../v12/testdata/tfjson/dummyconfig.json | 77 +++++++++++++++++++ 3 files changed, 139 insertions(+) create mode 100644 pkg/iac-providers/terraform/v12/testdata/dummyconfig/dummyconfig.tf create mode 100644 pkg/iac-providers/terraform/v12/testdata/tfjson/dummyconfig.json diff --git a/pkg/iac-providers/terraform/v12/load-file_test.go b/pkg/iac-providers/terraform/v12/load-file_test.go index ab0270f27..d1f289042 100644 --- a/pkg/iac-providers/terraform/v12/load-file_test.go +++ b/pkg/iac-providers/terraform/v12/load-file_test.go @@ -78,6 +78,13 @@ func TestLoadIacFile(t *testing.T) { tfv12: TfV12{}, wantErr: nil, }, + { + name: "config1", + tfConfigFile: "./testdata/dummyconfig/dummyconfig.tf", + tfJSONFile: "./testdata/tfjson/dummyconfig.json", + tfv12: TfV12{}, + wantErr: nil, + }, } for _, tt := range table2 { diff --git a/pkg/iac-providers/terraform/v12/testdata/dummyconfig/dummyconfig.tf b/pkg/iac-providers/terraform/v12/testdata/dummyconfig/dummyconfig.tf new file mode 100644 index 000000000..a2f973393 --- /dev/null +++ b/pkg/iac-providers/terraform/v12/testdata/dummyconfig/dummyconfig.tf @@ -0,0 +1,55 @@ +resource "type1" "resource1" { + test3 = 1 + 2 + test1 = "hello" + test2 = 5 + arr = [1, 2, 3, 4] + hyphen-test = 3 + temp = "${1 + 2} %{if local.test2 < 3}\"4\n\"%{endif}" + temp2 = "${"hi"} there" + quoted = "\"quoted\"" + squoted = "'quoted'" + } + +resource "type2" "resource2"{ + other = { + num = local.test2 + 5 + thing = [for x in local.arr: x * 2] + "${local.test3}" = 4 + 3 = 1 + "local.test1" = 89 + "a.b.c[\"hi\"][3].*" = 3 + loop = "This has a for loop: %{for x in local.arr}x,%{endfor}" + a.b.c = "True" + } +} + +resource "type3" "resource3" { + heredoc = <<-EOF + This is a heredoc template. + It references ${local.other.3} + EOF + simple = 4 - 2 + cond = test3 > 2 ? 1: 0 + heredoc2 = < Date: Tue, 4 Aug 2020 15:32:44 +0530 Subject: [PATCH 060/188] print help if no flags are passed --- cmd/terrascan/main.go | 17 +++++++++++++++-- 1 file changed, 15 insertions(+), 2 deletions(-) diff --git a/cmd/terrascan/main.go b/cmd/terrascan/main.go index 54bf3b3fe..6b33c1f82 100644 --- a/cmd/terrascan/main.go +++ b/cmd/terrascan/main.go @@ -27,20 +27,33 @@ import ( ) func main() { + + // command line flags var ( - server = flag.Bool("server", false, "run terrascan in server mode") + // server mode + server = flag.Bool("server", false, "run terrascan in server mode") + + // IaC flags iacType = flag.String("iac", "", "IaC provider (supported values: terraform)") iacVersion = flag.String("iac-version", "default", "IaC version (supported values: 'v12' for terraform)") - cloudType = flag.String("cloud", "", "cloud provider (supported values: aws)") iacFilePath = flag.String("f", "", "IaC file path") iacDirPath = flag.String("d", "", "IaC directory path") + // cloud flags + cloudType = flag.String("cloud", "", "cloud provider (supported values: aws)") + // logging flags logLevel = flag.String("log-level", "info", "logging level (debug, info, warn, error, panic, fatal)") logType = flag.String("log-type", "console", "log type (json, console)") ) flag.Parse() + // if no flags are passed, print usage + if flag.NFlag() < 1 { + flag.Usage() + return + } + // if server mode set, run terrascan as a server, else run it as CLI if *server { logging.Init(*logType, *logLevel) From 3aa34fa8cb4b5748f11260d9912bd504bf6952a1 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Wed, 5 Aug 2020 02:35:24 -0700 Subject: [PATCH 061/188] First cut of the policy engine - OPA can evaluate and read rego files in a given directory - Template regos are supported - Still some bugs to fix around mapping --- go.mod | 1 + go.sum | 53 +++++++ pkg/data/file/importer.go | 23 +++ pkg/policy/interface.go | 31 ++++ pkg/policy/opa/constants.go | 7 + pkg/policy/opa/opa_engine.go | 275 +++++++++++++++++++++++++++++++++++ pkg/runtime/executor.go | 17 ++- 7 files changed, 404 insertions(+), 3 deletions(-) create mode 100644 pkg/data/file/importer.go create mode 100644 pkg/policy/interface.go create mode 100644 pkg/policy/opa/constants.go create mode 100644 pkg/policy/opa/opa_engine.go diff --git a/go.mod b/go.mod index 321a14d32..54b7f2d36 100644 --- a/go.mod +++ b/go.mod @@ -7,6 +7,7 @@ require ( github.com/hashicorp/go-version v1.2.0 github.com/hashicorp/hcl/v2 v2.3.0 github.com/hashicorp/terraform v0.12.28 + github.com/open-policy-agent/opa v0.22.0 github.com/spf13/afero v1.3.2 github.com/zclconf/go-cty v1.2.1 go.uber.org/zap v1.9.1 diff --git a/go.sum b/go.sum index 442aee8e5..b5037503d 100644 --- a/go.sum +++ b/go.sum @@ -27,6 +27,8 @@ github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/ChrisTrenkamp/goxpath v0.0.0-20170922090931-c385f95c6022/go.mod h1:nuWgzSkT5PnyOd+272uUmV0dnAnAn42Mk7PiQC5VzN4= +github.com/OneOfOne/xxhash v1.2.7 h1:fzrmmkskv067ZQbd9wERNGuxckWw67dyzoMG62p7LMo= +github.com/OneOfOne/xxhash v1.2.7/go.mod h1:eZbhyaAYD41SGSSsnmcpxVoRiQ/MPUTjUdIIOT9Um7Q= github.com/QcloudApi/qcloud_sign_golang v0.0.0-20141224014652-e4130a326409/go.mod h1:1pk82RBxDY/JZnPQrtqHlUFfCctgdorsd9M06fMynOM= github.com/Unknwon/com v0.0.0-20151008135407-28b053d5a292/go.mod h1:KYCjqMOeHpNuTOiFQU6WEcTG7poCJrUs0YgyHNtn1no= github.com/abdullin/seq v0.0.0-20160510034733-d5467c17e7af/go.mod h1:5Jv4cbFiHJMsVxt52+i0Ha45fjshj6wxYr1r19tB9bw= @@ -55,6 +57,7 @@ github.com/aws/aws-sdk-go v1.15.78/go.mod h1:E3/ieXAlvM0XWO57iftYVDLLvQ824smPP3A github.com/aws/aws-sdk-go v1.25.3/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.30.12/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f/go.mod h1:AuiFmCCPBSrqvVMvuqFuk0qogytodnVFVSN5CeJB8Gc= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973 h1:xJ4a3vCFaGF/jqvzLMYoU8P317H5OQ+Via4RmuPwCS0= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ00z/TKoufEY6K/a0k6AhaJrQKdFe6OfVXsa4= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= @@ -72,6 +75,7 @@ github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd v0.0.0-20181012123002-c6f51f82210d/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -83,6 +87,9 @@ github.com/dylanmei/winrmtest v0.0.0-20190225150635-99b7fe2fddf1/go.mod h1:lcy9/ github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/ghodss/yaml v0.0.0-20180820084758-c7ce16629ff4/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= @@ -90,17 +97,22 @@ github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LB github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-test/deep v1.0.1/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= +github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= +github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.3.0/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20180513044358-24b0969c4cb7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/protobuf v0.0.0-20181025225059-d3de96c4c28e/go.mod h1:Qd/q+1AKNOZr9uGQzbzCmRO6sUih6GTPZv6a1/R87v0= github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.4 h1:87PNWwrRvUSnqS4dlcBU/ftvOIBep4sYuBLlh6rX2wk= github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= @@ -120,6 +132,7 @@ github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5m github.com/gophercloud/gophercloud v0.0.0-20190208042652-bc37892e1968/go.mod h1:3WdhXV3rUYy9p6AUW8d94kr+HS62Y4VL9mBnFxsD8q4= github.com/gophercloud/utils v0.0.0-20190128072930-fbb6ab446f01/go.mod h1:wjDF8z83zTeg5eMLml5EBSlAhbF7G8DobyI1YsMuyzw= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/mux v0.0.0-20181024020800-521ea7b17d02/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= github.com/gorilla/mux v1.7.4 h1:VuZ8uybHlWmqV03+zRzdwKL4tUnIp1MAQtp1mIFE1bc= github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= @@ -170,6 +183,8 @@ github.com/hashicorp/terraform-svchost v0.0.0-20191011084731-65d371908596 h1:hjy github.com/hashicorp/terraform-svchost v0.0.0-20191011084731-65d371908596/go.mod h1:kNDNcF7sN4DocDLBkQYz73HGKwN1ANB1blq4lIYLYvg= github.com/hashicorp/vault v0.10.4/go.mod h1:KfSyffbKxoVyspOdlaGVjIuwLobi07qD1bAbosPMpP0= github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= +github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/jhump/protoreflect v1.6.0/go.mod h1:eaTn3RZAmMBcV0fifFvlm6VHNz3wSkYyXYWUh7ymB74= github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= @@ -182,8 +197,11 @@ github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVY github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8= github.com/keybase/go-crypto v0.0.0-20161004153544-93f5b35093ba/go.mod h1:ghbZscTyKdM07+Fw3KSi0hcJm+AlEUWj8QLlPtijN/M= +github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2 h1:DB17ag19krx9CFsz4o3enTrPXyIXCl+2iCXH/aMAp9s= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= @@ -207,8 +225,11 @@ github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcncea github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-runewidth v0.0.0-20181025052659-b20a3daf6a39/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= +github.com/mattn/go-runewidth v0.0.4 h1:2BvfKmzob6Bmd4YsL0zygOqfdFnK7GR4QL06Do4/p7Y= github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-shellwords v1.0.4/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o= +github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/miekg/dns v1.0.8/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= @@ -233,9 +254,16 @@ github.com/mozillazg/go-httpheader v0.2.1/go.mod h1:jJ8xECTlalr6ValeXYdOF8fFUISe github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d/go.mod h1:YUTz3bUH2ZwIWBy3CJBeOBEugqcmXREj14T+iG/4k4U= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= +github.com/olekukonko/tablewriter v0.0.1 h1:b3iUnf1v+ppJiOfNX4yxxqfWKMQPZR5yoh8urCTFX88= +github.com/olekukonko/tablewriter v0.0.1/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= +github.com/open-policy-agent/opa v0.22.0 h1:KZvn0uMQIorBIwYk8Vc89dp8No9FIEF8eFl0sc1r/1U= +github.com/open-policy-agent/opa v0.22.0/go.mod h1:rrwxoT/b011T0cyj+gg2VvxqTtn6N3gp/jzmr3fjW44= github.com/packer-community/winrmcp v0.0.0-20180102160824-81144009af58/go.mod h1:f6Izs6JvFTdnRbziASagjZ2vmf55NSIkC/weStxCHqk= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/peterh/liner v0.0.0-20170211195444-bf27d3ba8e1d h1:zapSxdmZYY6vJWXFKLQ+MkI+agc+HQyfrCGowDSHiKs= +github.com/peterh/liner v0.0.0-20170211195444-bf27d3ba8e1d/go.mod h1:xIteQHvHuaLYG9IFj6mSxM0fCKrs34IrEQUhOYuGPHc= github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA= +github.com/pkg/errors v0.0.0-20181023235946-059132a15dd0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= @@ -245,27 +273,42 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/posener/complete v1.2.1/go.mod h1:6gapUrK/U1TAN7ciCoNRIdVC5sbdBTUh1DKN0g6uH7E= +github.com/prometheus/client_golang v0.0.0-20181025174421-f30f42803563/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829 h1:D+CiwcpGTW6pL6bv6KI3KbyEyCKyS+1JWS2h8PNDnGA= github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4 h1:gQz4mCbXsO+nc9n1hCxHcGA3Zx3Eo+UHZoInFGUIXNM= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.0.0-20181020173914-7e9e6cabbd39/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= +github.com/prometheus/common v0.2.0 h1:kUZDBDTdBVBYBj5Tmh2NZLlF60mfjA27rM34b+cVwNU= github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1 h1:/K3IL0Z1quvmJ7X0A1AwNEK7CRkVK3YwfOU/QAL4WGg= github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a h1:9ZKAASQSHhDYGoxY8uLVpewe1GDZ2vu2Tr/vTdVAkFQ= +github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.1 h1:GL2rEmy6nsikmW0r8opw9JIRScdMF5hA8cOYLH7In1k= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/goconvey v0.0.0-20180222194500-ef6db91d284a/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= github.com/spf13/afero v1.2.1/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= github.com/spf13/afero v1.3.2 h1:GDarE4TJQI52kYSbSAmLiId1Elfj+xgSDqrUZxFhxlU= github.com/spf13/afero v1.3.2/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= +github.com/spf13/cobra v0.0.0-20181021141114-fe5e611709b0 h1:BgSbPgT2Zu8hDen1jJDGLWO8voaSRVrwsk18Q/uSh5M= +github.com/spf13/cobra v0.0.0-20181021141114-fe5e611709b0/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/pflag v0.0.0-20181024212040-082b515c9490/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -286,6 +329,8 @@ github.com/vmihailenco/msgpack v4.0.1+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6Ac github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0Bx9h2kr4= github.com/xiang90/probing v0.0.0-20160813154853-07dd2e8dfe18/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= github.com/xlab/treeprint v0.0.0-20161029104018-1d6e34225557/go.mod h1:ce1O1j6UtZfjr22oyGxGLbauSBp2YVXpARAosm7dHBg= +github.com/yashtewari/glob-intersection v0.0.0-20180916065949-5c77d914dd0b h1:vVRagRXf67ESqAb72hG2C/ZwI8NtJF2u2V76EsuOHGY= +github.com/yashtewari/glob-intersection v0.0.0-20180916065949-5c77d914dd0b/go.mod h1:HptNXiXVDcJjXe9SqMd0v2FsL9f8dz4GnXgltU6q/co= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/zclconf/go-cty v1.0.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s= github.com/zclconf/go-cty v1.1.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s= @@ -315,6 +360,7 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/lint v0.0.0-20181023182221-1baf3a9d7d67/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -332,6 +378,7 @@ golang.org/x/net v0.0.0-20180530234432-1e491301e022/go.mod h1:mL1N/T3taQHkDXs73r golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -373,6 +420,7 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190804053845-51ab0e2deafa/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd h1:xhmwyvizuTgC2qz7ZlMluP20uW+C3Rm0FD/WLDX8884= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -381,6 +429,7 @@ golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= @@ -392,6 +441,7 @@ golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBn golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0 h1:Dh6fw+p6FyRl5x/FvNswO1ji0lIGzm3KP8Y9VkS9PTE= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190920225731-5eefd052ad72/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= @@ -415,6 +465,7 @@ google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7 google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= google.golang.org/genproto v0.0.0-20170818010345-ee236bd376b0/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20180831171423-11092d34479b/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= @@ -432,6 +483,8 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/cheggaaa/pb.v1 v1.0.27/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= diff --git a/pkg/data/file/importer.go b/pkg/data/file/importer.go new file mode 100644 index 000000000..1cc036441 --- /dev/null +++ b/pkg/data/file/importer.go @@ -0,0 +1,23 @@ +package file + +type FileInfo struct { + Path string + Hash string + HashType string + Attributes string +} + +// Group Group metadata +type Group struct { + Name string + IsReadOnly bool + VerifySignatures bool + Directories []*FileInfo + Files []*FileInfo +} + +// Metadata File metadata +type Metadata struct { + Version string + Groups []*Group +} diff --git a/pkg/policy/interface.go b/pkg/policy/interface.go new file mode 100644 index 000000000..8c15e02a2 --- /dev/null +++ b/pkg/policy/interface.go @@ -0,0 +1,31 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package policy + +type Manager interface { + Import() error + Export() error + Validate() error +} + +type Engine interface { + Initialize(policyPath string) error + Configure() error + Evaluate(inputData *interface{}) error + GetResults() error + Release() error +} diff --git a/pkg/policy/opa/constants.go b/pkg/policy/opa/constants.go new file mode 100644 index 000000000..7d66da466 --- /dev/null +++ b/pkg/policy/opa/constants.go @@ -0,0 +1,7 @@ +package policy + +const ( + RegoMetadataFile = "rule.json" + RegoFileSuffix = ".rego" + RuleQueryBase = "data.accurics" +) diff --git a/pkg/policy/opa/opa_engine.go b/pkg/policy/opa/opa_engine.go new file mode 100644 index 000000000..bd50ef368 --- /dev/null +++ b/pkg/policy/opa/opa_engine.go @@ -0,0 +1,275 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package policy + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "sort" + "strings" + "text/template" + + "github.com/open-policy-agent/opa/ast" + + "go.uber.org/zap" + + "github.com/open-policy-agent/opa/rego" +) + +type AccuricsRegoMetadata struct { + Name string `json:"ruleName"` + DisplayName string `json:"ruleDisplayName"` + Category string `json:"category"` + ImpactedRes []string `json:"impactedRes"` + PolicyRelevance string `json:"policyRelevance"` + Remediation string `json:"remediation"` + Row int `json:"row"` + Rule string `json:"rule"` + RuleTemplate string `json:"ruleTemplate"` + RuleTemplateArgs map[string]interface{} `json:"ruleArgument"` + RuleReferenceID string `json:"ruleReferenceId"` + Severity string `json:"severity"` + Vulnerability string `json:"vulnerability"` +} + +type RegoData struct { + Name string `json:"ruleName"` + DisplayName string `json:"ruleDisplayName"` + Category string `json:"category"` + Remediation string `json:"remediation"` + Rule string `json:"rule"` + RuleTemplate string `json:"ruleTemplate"` + RuleTemplateArgs map[string]interface{} `json:"ruleArgument"` + RuleReferenceID string `json:"ruleReferenceId"` + Severity string `json:"severity"` + Vulnerability string `json:"vulnerability"` + RawRego *[]byte + PreparedQuery *rego.PreparedEvalQuery +} + +type ResultData struct { +} + +type OpaEngine struct { + Context context.Context + RegoFileMap map[string][]byte + RegoDataMap map[string]*RegoData +} + +func filterFileListBySuffix(allFileList *[]string, filter string) *[]string { + fileList := make([]string, 0) + + for i := range *allFileList { + if strings.HasSuffix((*allFileList)[i], filter) { + fileList = append(fileList, (*allFileList)[i]) + } + } + return &fileList +} + +func (o *OpaEngine) LoadRegoFiles(policyPath string) error { + ruleCount := 0 + regoFileCount := 0 + metadataCount := 0 + + // Walk the file path and find all directories + dirList := make([]string, 0) + err := filepath.Walk(policyPath, func(filePath string, fileInfo os.FileInfo, err error) error { + if fileInfo != nil && fileInfo.IsDir() { + dirList = append(dirList, filePath) + } + return err + }) + + if len(dirList) == 0 { + return fmt.Errorf("no directories found for path %s", policyPath) + } + + o.RegoFileMap = make(map[string][]byte) + o.RegoDataMap = make(map[string]*RegoData) + + // Load rego data files from each dir + sort.Strings(dirList) + for i := range dirList { + metaFilename := filepath.Join(dirList[i], RegoMetadataFile) + var metadata []byte + metadata, err = ioutil.ReadFile(metaFilename) + if err != nil { + if !errors.Is(err, os.ErrNotExist) { + zap.S().Warn("failed to load rego metadata", zap.String("file", metaFilename)) + } + continue + } + + // Read metadata into struct + regoMetadata := make([]*RegoData, 0) + if err = json.Unmarshal(metadata, ®oMetadata); err != nil { + zap.S().Warn("failed to unmarshal rego metadata", zap.String("file", metaFilename)) + continue + } + + metadataCount++ + + // Find all .rego files within the directory + fileInfo, err := ioutil.ReadDir(dirList[i]) + if err != nil { + zap.S().Error("error while finding rego files", zap.String("dir", dirList[i])) + continue + } + + files := make([]string, 0) + for j := range fileInfo { + files = append(files, fileInfo[j].Name()) + } + + // Load rego data for all rego files + regoFileList := filterFileListBySuffix(&files, RegoFileSuffix) + regoFileCount += len(*regoFileList) + for j := range *regoFileList { + regoFilename := (*regoFileList)[j] + regoFullPath := filepath.Join(dirList[i], regoFilename) + var rawRegoData []byte + rawRegoData, err = ioutil.ReadFile(regoFullPath) + if err != nil { + zap.S().Warn("failed to load rego file", zap.String("file", regoFilename)) + continue + } + + _, ok := o.RegoFileMap[regoFullPath] + if ok { + // Already loaded this file, so continue + continue + } + + // Set raw rego data + o.RegoFileMap[regoFullPath] = rawRegoData + } + + for j := range regoMetadata { + //key := filepath.Join(dirList[i], regoMetadata[j].Rule) + //regoData := o.RegoFileMap[key] + metadataCount++ + // Apply templates if available + var buf bytes.Buffer + t := template.New("opa") + t.Parse(string(o.RegoFileMap[filepath.Join(dirList[i], regoMetadata[j].RuleTemplate+".rego")])) + t.Execute(&buf, regoMetadata[j].RuleTemplateArgs) + + templateData := buf.Bytes() + regoMetadata[j].RawRego = &templateData + o.RegoDataMap[regoMetadata[j].Name] = regoMetadata[j] + } + } + + ruleCount = len(o.RegoDataMap) + zap.S().Infof("Loaded %d Rego rules from %d rego files (%d metadata files).", ruleCount, regoFileCount, metadataCount) + + return err +} + +func (o *OpaEngine) CompileRegoFiles() error { + for k := range o.RegoDataMap { + compiler, err := ast.CompileModules(map[string]string{ + o.RegoDataMap[k].Rule: string(*(o.RegoDataMap[k].RawRego)), + }) + + r := rego.New( + rego.Query(RuleQueryBase+"."+o.RegoDataMap[k].Name), + rego.Compiler(compiler), + ) + + // Create a prepared query that can be evaluated. + query, err := r.PrepareForEval(o.Context) + if err != nil { + return err + } + + o.RegoDataMap[k].PreparedQuery = &query + } + + return nil +} + +// Initialize Initializes the Opa engine +// Handles loading all rules, filtering, compiling, and preparing for evaluation +func (o *OpaEngine) Initialize(policyPath string) error { + o.Context = context.Background() + + if err := o.LoadRegoFiles(policyPath); err != nil { + return err + } + + err := o.CompileRegoFiles() + if err != nil { + return err + } + + return nil +} + +func (o *OpaEngine) Configure() error { + return nil +} + +func (o *OpaEngine) GetResults() error { + return nil +} + +func (o *OpaEngine) Release() error { + return nil +} + +func (o *OpaEngine) Evaluate(inputData *interface{}) error { + + sortedKeys := make([]string, len(o.RegoDataMap)) + x := 0 + for k := range o.RegoDataMap { + sortedKeys[x] = k + x++ + } + sort.Strings(sortedKeys) + + for _, k := range sortedKeys { + // Execute the prepared query. + rs, err := o.RegoDataMap[k].PreparedQuery.Eval(o.Context, rego.EvalInput(inputData)) + // rs, err := r.Eval(o.Context) + if err != nil { + zap.S().Warn("failed to run prepared query", zap.String("rule", "'"+k+"'"), zap.Any("input", inputData)) + continue + } + + if len(rs) > 0 { + results := rs[0].Expressions[0].Value.([]interface{}) + if len(results) > 0 { + r := o.RegoDataMap[k] + fmt.Printf("\n[%s] [%s] %s\n %s\n", r.Severity, r.RuleReferenceID, r.DisplayName, r.Vulnerability) + } + // fmt.Printf(" [%s] %v\n", k, results) + } else { + // fmt.Printf("No Result [%s] \n", k) + } + // Store results + } + + return nil +} diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 0510fc18d..78d7fd8ef 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -17,9 +17,8 @@ package runtime import ( - "os" + policy "github.com/accurics/terrascan/pkg/policy/opa" - "github.com/accurics/terrascan/pkg/utils" "go.uber.org/zap" cloudProvider "github.com/accurics/terrascan/pkg/cloud-providers" @@ -105,10 +104,22 @@ func (e *Executor) Execute() error { if err != nil { return err } - utils.PrintJSON(normalized, os.Stdout) + //utils.PrintJSON(normalized, os.Stdout) // write output + // Create a new policy engine based on IaC type + if e.iacType == "terraform" { + engine := policy.OpaEngine{} + + err := engine.Initialize("/Users/wsana/go/src/accurics/terrascan/pkg/policies/accurics/v1/opa") + if err != nil { + return err + } + + engine.Evaluate(&normalized) + } + // successful return nil } From 1223bdd8edb1e8cfc2190313c9525ba5e708bf61 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Wed, 5 Aug 2020 22:48:07 -0400 Subject: [PATCH 062/188] updates gitignore to remove python related extensions and add golang --- .gitignore | 105 +++++++---------------------------------------------- 1 file changed, 14 insertions(+), 91 deletions(-) diff --git a/.gitignore b/.gitignore index 8639a3a91..93a9fd879 100644 --- a/.gitignore +++ b/.gitignore @@ -1,106 +1,29 @@ -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[cod] -*$py.class - -# C extensions +# Binaries for programs and plugins +*.exe +*.exe~ +*.dll *.so +*.dylib -# Distribution / packaging -.Python -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -.hypothesis/ +# Test binary, built with `go test -c` +*.test -# Translations -*.mo -*.pot +# Output of the go coverage tool, specifically when used with LiteIDE +*.out -# Django stuff: -*.log -local_settings.py - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy +# Dependency directories (remove the comment below to include it) +# vendor/ # Sphinx documentation docs/_build/ -# PyBuilder -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# pyenv -.python-version - -# celery beat schedule file -celerybeat-schedule - -# SageMath parsed files -*.sage.py - -# Environments -.env -.venv -env/ -venv/ -ENV/ -env.bak/ -venv.bak/ - -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - # mkdocs documentation /site -# mypy -.mypy_cache/ - #vscode .vscode/ /updatedFiles + +# Go binar +/bin From a3c83040fe5a4d6dc88ba4570d0825697b7595df Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Thu, 6 Aug 2020 19:07:15 -0400 Subject: [PATCH 063/188] adds license --- LICENSE | 191 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 191 insertions(+) create mode 100644 LICENSE diff --git a/LICENSE b/LICENSE new file mode 100644 index 000000000..82894c5ac --- /dev/null +++ b/LICENSE @@ -0,0 +1,191 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + Copyright 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. From b255717193aa5efd6af3c24465bd4d75e7447c9d Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Thu, 6 Aug 2020 19:08:46 -0400 Subject: [PATCH 064/188] initial take at readme --- README.md | 60 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 60 insertions(+) diff --git a/README.md b/README.md index 7309c20b3..d31e736e7 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,62 @@ # Terrascan ![CI](/~https://github.com/accurics/terrascan/workflows/Go%20Terrascan%20build/badge.svg) +[![codecov](https://codecov.io/gh/accurics/terrascan/branch/master/graph/badge.svg)](https://codecov.io/gh/accurics/terrascan) + +Terrascan is a static code analyzer and linter for security weanesses in Infrastructure as Code (IaC). + +* GitHub Repo: /~https://github.com/accurics/terrascan +* Documentation: https://docs.accurics.com +* Discuss: https://community.accurics.com + +## Features +* 500+ Policies for security best practices +* Scanning of Terraform 12+ (HCL2) +* Support for AWS, Azure, and GCP + +## Installing +Terrascan's binary for your architecture can be found on the releases page. Here's an example of how to install it: + +``` +FIXME: Add example +``` + +### Homebrew +Terrascan can be installed using Homebrew on macOS: + +``` +brew install terrascan +``` + +### Chocolatey +Terrascan can be installed on Windows using Chocolatey: + +``` +choco install terrascan +``` + +### Docker +Terrascan is also available as a Docker image and can be used as follows + + $ docker run accurics/terrascan + +# Using Terrascan + +To scan your code for security weaknesses you can run the following + +``` + $ terrascan --location tests/infrastructure/success --vars tests/infrastructure/vars.json +``` + +# Documentation + +To learn more about Terrascan check out the documentation https://docs.accurics.com where we include a getting started guide, Terrascan's architecture, a break down of it's commands, and how to write your own policies. + +# Developing Terrascan +To learn more about developing and contributing to Terrascan refer to our (contributing guide)[CONTRIBUTING.md]. + + +To learn more about compiling Terraform and contributing suggested changes, please refer to the contributing guide. + +# License + +Terrascan is licensed under the [Apache 2.0 License](LICENSE). From 4fa4d9ed8ec77d9be95ad45c138e641ad553ac70 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Thu, 6 Aug 2020 22:48:36 -0400 Subject: [PATCH 065/188] adds badges and help menu --- README.md | 43 +++++++++++++++++++++++++++++++++++++------ 1 file changed, 37 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index d31e736e7..2ca473b4a 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,9 @@ # Terrascan ![CI](/~https://github.com/accurics/terrascan/workflows/Go%20Terrascan%20build/badge.svg) [![codecov](https://codecov.io/gh/accurics/terrascan/branch/master/graph/badge.svg)](https://codecov.io/gh/accurics/terrascan) +[![community](https://img.shields.io/discourse/status?server=https%3A%2F%2Fcommunity.accurics.com)](https://community.accurics.com) +[![downloads](https://img.shields.io/github/downloads/accurics/terrascan/total)](/~https://github.com/accurics/terrascan/releases) + Terrascan is a static code analyzer and linter for security weanesses in Infrastructure as Code (IaC). @@ -17,7 +20,12 @@ Terrascan is a static code analyzer and linter for security weanesses in Infrast Terrascan's binary for your architecture can be found on the releases page. Here's an example of how to install it: ``` -FIXME: Add example +$ curl --location /~https://github.com/accurics/terrascan/releases/download/v1.0.0/terrascan_darwin_amd64.zip --output terrascan_darwin_amd64.zip +$ unzip terrascan_darwin_amd64.zip +Archive: terrascan_darwin_amd64.zip + inflating: terrascan +$ install terrascan /usr/local/bin +$ terrascan --help ``` ### Homebrew @@ -39,24 +47,47 @@ Terrascan is also available as a Docker image and can be used as follows $ docker run accurics/terrascan -# Using Terrascan +## Getting started To scan your code for security weaknesses you can run the following ``` - $ terrascan --location tests/infrastructure/success --vars tests/infrastructure/vars.json +$ terrascan --iac terraform --iac-version v12 --cloud aws -d pkg/iac-providers/terraform/v12/testdata/moduleconfigs +``` + +The following flags are available: + +``` +$ terrascan --help +Usage of ./bin/terrascan: + -cloud string + cloud provider (supported values: aws) + -d string + IaC directory path + -f string + IaC file path + -iac string + IaC provider (supported values: terraform) + -iac-version string + IaC version (supported values: 'v12' for terraform) (default "default") + -log-level string + logging level (debug, info, warn, error, panic, fatal) (default "info") + -log-type string + log type (json, console) (default "console") + -server + run terrascan in server mode ``` -# Documentation +## Documentation To learn more about Terrascan check out the documentation https://docs.accurics.com where we include a getting started guide, Terrascan's architecture, a break down of it's commands, and how to write your own policies. -# Developing Terrascan +## Developing Terrascan To learn more about developing and contributing to Terrascan refer to our (contributing guide)[CONTRIBUTING.md]. To learn more about compiling Terraform and contributing suggested changes, please refer to the contributing guide. -# License +## License Terrascan is licensed under the [Apache 2.0 License](LICENSE). From 8d42259c9f38b8c2ac3c2a9e5b4ef1e42bfe9474 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Thu, 6 Aug 2020 23:06:43 -0400 Subject: [PATCH 066/188] adds docs badge --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 2ca473b4a..c5db3a148 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,7 @@ ![CI](/~https://github.com/accurics/terrascan/workflows/Go%20Terrascan%20build/badge.svg) [![codecov](https://codecov.io/gh/accurics/terrascan/branch/master/graph/badge.svg)](https://codecov.io/gh/accurics/terrascan) [![community](https://img.shields.io/discourse/status?server=https%3A%2F%2Fcommunity.accurics.com)](https://community.accurics.com) +[![Documentation](https://readthedocs.org/projects/terrascan/badge/?version=latest)](https://terrascan.readthedocs.io/en/latest/?badge=latest) [![downloads](https://img.shields.io/github/downloads/accurics/terrascan/total)](/~https://github.com/accurics/terrascan/releases) From 56991e2ecd2bddcd56b994ef3f46b664a5165f1e Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Fri, 7 Aug 2020 21:54:34 -0400 Subject: [PATCH 067/188] initial docs format --- docs/Makefile | 177 ++++++++++++++++++ docs/README.md | 55 ++++++ ...rrascan_By_Accurics_Logo_38B34A-333F48.svg | 1 + docs/img/Terrascan_Logo_F2F2F2_600px.png | Bin 0 -> 7364 bytes docs/img/Terrascan_STACK_Logo_F2F2F2.svg | 1 + docs/img/favicon.ico | Bin 0 -> 4286 bytes docs/index.md | 3 + mkdocs.yml | 12 ++ 8 files changed, 249 insertions(+) create mode 100644 docs/Makefile create mode 100644 docs/README.md create mode 100644 docs/img/Terrascan_By_Accurics_Logo_38B34A-333F48.svg create mode 100644 docs/img/Terrascan_Logo_F2F2F2_600px.png create mode 100644 docs/img/Terrascan_STACK_Logo_F2F2F2.svg create mode 100644 docs/img/favicon.ico create mode 100644 docs/index.md create mode 100644 mkdocs.yml diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 000000000..4f2efedde --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,177 @@ +# Makefile for Mkdocs documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/terrascan.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/terrascan.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/terrascan" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/terrascan" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 000000000..d96755dda --- /dev/null +++ b/docs/README.md @@ -0,0 +1,55 @@ +# Terrascan documentation +- About Terrascan +- Getting Started + - Installation + - Scanning + - Terrascan CLI + +- Architecture + - Runtime + - Infrastructure as Code Language Providers + - Cloud Providers + - Policy Engine +- Provider Reference + - IAC Language Providers + - Terraform (HCL2) + - CloudFormation (JSON) + - CloudFormation (YAML) + - Kubernetes (YAML) + - Cloud Providers + - AWS + - Azure + - GCP +- Policies + - AWS + - Azure + - GCP + - Kubernetes +- Learning + - pre-commit + - super-linter + + + Introduction: This section covers a general overview of what Envoy is, an architecture overview, how it is typically deployed, etc. + + Getting Started: Quickly get started with Envoy using Docker. + + Installation: How to build/install Envoy using Docker. + + Configuration: Detailed configuration instructions for Envoy. Where relevant, the configuration guide also contains information on statistics, runtime configuration, and APIs. + + Operations: General information on how to operate Envoy including the command line interface, hot restart wrapper, administration interface, a general statistics overview, etc. + + Extending Envoy: Information on how to write custom filters for Envoy. + + API reference: Envoy API detailed reference. + + Envoy FAQ: Have questions? We have answers. Hopefully. + +## Using as pre-commit + +Terrascan can be used on pre-commit hooks to prevent accidental introduction of security weaknesses into your repository. +This requires having pre-commit_ installed. An example configuration is provided in the comments of the here_ file in this repository. + +.. _pre-commit: https://pre-commit.com/ +.. _here: .pre-commit-config.yaml diff --git a/docs/img/Terrascan_By_Accurics_Logo_38B34A-333F48.svg b/docs/img/Terrascan_By_Accurics_Logo_38B34A-333F48.svg new file mode 100644 index 000000000..65bf24e0e --- /dev/null +++ b/docs/img/Terrascan_By_Accurics_Logo_38B34A-333F48.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/img/Terrascan_Logo_F2F2F2_600px.png b/docs/img/Terrascan_Logo_F2F2F2_600px.png new file mode 100644 index 0000000000000000000000000000000000000000..e4171197e343c769561d16f4d2e71710d0d26146 GIT binary patch literal 7364 zcmeHM=Q|th{|&Wjw6toEwn~+tN@^9Ytpqh{1~qC#?1-4H*;4CK<3XrB60!Hz619RF ztso60wpNVVKhHn$ef4{BpZoLbKIc03_}tfZ-WwTcvoP{70ssIOT^$W$0D#)!vL0oi zyTrx*5cK7A)mz8X4*;O~{NJGReCK-#0PqFqYCJOy%-Nbh^O@8U==a&0IzvC=d<&+@ zO?Y#Mx}DAbZnwsp_m^cq8-|}MfCj2voy%}7gZgJhi?)Z*-h{-Nm6Qz8Fl__t{>P#) zV_Um&x+?tSV9WYHVx&&D8aR~eY}q)iYJ)f( zoBxp$rho>Yct?MTmiH3HjWm}sU>(;G`37V1Ir~hY2`5i z0IyenUP)J*`ZvrrVb2Bfvo$~!nQND{Cr1V7*17iB{+1?*dg|z1euXY~<6~ql@$iHJ z0C;$xp0hfxz&n5a(NOIYw}-(Litt$A4mF_KgZ^gqYXM9JgB1V({JubsDG;5D+TT)( zFiT^yBDY5~0J>{8VciqI_A5zzXq0%%&okKvLyFB8%8!l-g1b+;WRHOWK=OB6Iw07d zc#T;e;cQ83?r;ZBsZwrS(s%|fZelm2_rOPL)nzhV2Xqf_LAu4_98#Ts5Cpoi+WjM* z=`u7Wa3XWPZh8n@3ayo+(jzT5;zY*{Qi)Y3AB^{l$0g+coEPA92xhrtvLj3!0C-o& zNu@XRV@``M?Wf%S-qHQEBQx+Ip2=3El+2sBok9v4Hp*0c!QPaAi$ZyF0rRnA1$ZZO z2v!G0p-}>LAQigF)ro(bJVujSY?Qnt%{N3XU$AGS2wBBnx1<3Ax>c4~o&sDR&s8!$D{Gwr9 z=cC$U+Z(*RL|g=qB>hepen^zZ99!*P&VVa~ClZ?#^g#4R%QKaqe(ReEvKq zwo{snG@+DH)RGhkPh8KZF%tU{Y8|eBhQ(E^_d`F*6eVxjtXWaQJ9&diK3<*$szIU9 z4zG?KLV&(thX2w;^-e_?&_U3c3kh5RP1kSL5j?>=7G_o4hAkbza(0h@wuMiDCZRB* zu$9xlFy_iYGcrm3F4TlkMddFKM58PjQ& z?;i*`2B0cnPfWCDnsSMeioMpfKVJ04Bj(JNJvH*$OIaMHf>-j?YtTL(4IK?9@{2+4 z?O0wFxs_9owwEViho~8m?X4k8Jt^7av#1KCrzxa$19)u&&kd>alg@ts+}Q)z7hOxv z+U+Ym<7B^9@ya-6iy9u@n0H&&=+>~jzLt&pX;t28RL7O{Q8L(7qS=&6udto-ZO=TTJyh?Xm!y-TV?WpyLl|*j;v-jHaC$U~c!{y!)sX@aCi#>9RAvC`o0#rm(k@N?gb5wONgSKAoB zYVUx*-lhNR|L%%^Y_VzAE zq<&KUjzEKycA-@mfzADhe`A?UVXfND?{;cfj1Vdf3oD&6wV%_A4oWBI(R>& z5}w~Q(By}8DPt4ga16klZ*^AUZ3;B$F^a8!)z_`lrtoqsd42_{^Ydxv<#=XpH-(9= zlQO!sU~MbJc(c;N6Fj8wRQsvXpe*tXcePAu1>3)^+0e-```}Z!u6%7eavv|^W;*%8 z#t>4g!Ez9~7>Yuo94fk&4oKSbHo`l%;b7H+OgqKNa>dUJ6ccfj|GFQ3OAu|;Jl!%& zjxIM^J{T2q4czL>yPEW){CKzr=Pr35X1`X1VbQ;J9e#6BM#_SF;numV!#Ha_7R1xA z@fX-6F>~&+itthllXA-Ad%}NmlEO5WxKI|@7bKy*a_oUaF4DDaa(UiF#C3AcHhon~ zKOkhhhaPFt1HHaTtVvh-6d8RFmbIfg(~C%HRKzV?{}MW_WOZ!fgd0LR+=yD$YU?&> zdgMv4e#c%!T7wX1w*k@27<@->iEoCcgp$ZsZ88rkc-xB{$Z0gCg9B0?-weYWuZ|db z*(U2ND!yEjohm#MV45qfjz>u?@yFL_&xKT-*f<`eYN&(OT@3T&OKS z-~K_v8BX|hSVbjd{O^k6Q4*sZe-fA7CV@tcDFtesNP;aYHMK~vr48CB`s?`xP6~p8 zKy}_`LA;L&sU@aCM8rP&ObfG#C7PAn0piL&4X( z#2CAf?uqCzc<;IKQiV%%kZ8t8`OE9mtI1tY3%@Cb>?GY+Wr$d-30-)3-fE&Arw4&2MEktR73O6P!RSH-N8WB1ISBj+t>)T~57evo zmo@&pn-2v=vek2(v#|352Wc1SB}$8k*DcNRt1^>eDvFhlf%5))&7F@DTF)LCK2E28 zWirsp#6ZWX96`#?hw2`<@|NsQnyNoqZ1cSP^5FxCT8o3rFH@5dG_`cyH8DoBjamyy zJ>n)`i}pbW04oUwU`$!!-%;P5&38>2dgVjV;aAUo&yQ&-*~_+#e(GBLMlNEu%wMVA zcHLA6b_A`bPsL&&M|-z_2I4$pzmAwU<|I4&j)+zxZcQKXTT3Ek(}k1l;oygrydCSU zN&-2fCSwQn2@V})IUx|z@3D;0_ih>^#Ma3_%>3mFU<=!&*K{N?bnxeWZ z9TYxFT6wTyv3aB8M@ass#gf+;HP1{qs&6<&w&Q59H$yc$ZXa~BZHK`^Xr1xu)Hu4e z$)RHO^{IP^dF-iU`08C4lM}u5b@5uZj^}^Bd)$RsT2NhGp+Q>wg1Jas;A}IcHUm6Qq z6!11X9Mkg8aFG2L?Jbv8;rGDb+L#ZmAp28J>yA{w#XXeWkEPM}4T^w}cHbb}@WPrs z+M4g&;md~d%FeKa#+SK35tbN6nLAz@iNaH$x|zO@8CI_KXvd0NW=J6iiX`UHLXSvk zeZKi%#TU}lj#^p}u}KRziGEH{*>EBnZqwzH5WhRe->nRutXYrGwz87G=?g?#gJz{s z$MZVT0bU8j?^|s~yJ%mfxPrjlmjcb^HBK9p+ag6pd{l80OC^CHGCq~q@}0y+6~d*- z#KjeR36Oom)@p1PUA=^TG_wg#^6=;O?W$XWKwb(qwOui)6}pyVu4hLsFCDAC5keAw zQ`Nq#<#2x}<}@Mgm5Y3aJ*sQ*Hc|AQKwK8$^;yiK;|umY=8FCqg(>_Y zpmYpgNV!*%6*j3;&=+Iq^s%_#fB5>!$#!9}fv*?T2o1f~uVR4VxskjelIc1-sh~0? zyADe@y))VqcQm>!y;G4RW!5?QNg*)YJ#i?WZKi-Am%d-H!c{$79x!GB`ezQjtc~d0 zpv~<|5YUJ_3Ugfeqz^G?xdH^)_W_IMc)Vw0J_om>OmW97)g@YuSVd`sz*!5t^?V}Y z=Trtyf9*HaIsKCT!&o+qdd`K*tN9vZb;hb&^(#g)5@{`TI@nZN>vf@CE?6N41QE$m z_@u-SwU_B#iF9@@WNlyty>=syM7B)Xa8W-BEAU$!_V(G~+8Jt$_4AXZmrHZZ<5TN) zuN%-(OCaQ|{p|jqlIPVntqA;@Amo z9D7myA`v#2_Uz#`>X%F%A}0^MGXt=w&3u)AT4h>?y3kP%L<$Y=za}P4xq+t@#J*ol zZkN@mj2j+Z#L`s_WeE>B3=Uw(#_>|09@gy)G)qEK6@YUIRM+mi)f@lz26k zECft9=(cJWmO7X#QVb!?atBn77mMwL#w{u*cvIcA)tC1*I}abC%)tk+%>C)!AXP;X z>HUaSjxa%Ywq@qIkvwY+TXG{)w3N_>2mj3EQT$bGmCH*z@z(^riDXG78;(6GK)Rrq z4%66e(qZ4G_$4|2v~C~<2RiTz94gsr<6q~)WGbyVn(g9F`vNv~SH?vj4m+GRTktGf zX-fhj^GP*A#$S8O;%G{(wAMubu1fP&%Mz-H3Ip| zyL1%tuCd?Qhib@SqBPN2Kfiy3%Cu&IzJ~wg@i@bmf|)};D{8IO1xKw@$ahdUdr4z2 zc*~pOsvc>ySreRhw``-*Xi#SJ?SG1u5?U{&4|WXIC2z2lwmm!uPIt}|6S7i@u2S8T zcatO)Z{39~GNhZua(#RIkqL1h$@fz+qCix@B%zEid1C+*79)%}3!4N@2SrXVt%%oo^2$1>1W;MttDSt)0CUyP-=a;uW<<`4hw7ItVnIkLT=dw0k{fpQ8WsOKS{b7-0YZjLE(W;iq!vK69J+yNG|&R5gS{_RM5p z_jhB2{RzY-8)&G=!U;^>Xw|G*aYpo2S1!LFJB z{GEXm(%rVOJ@Im{@Uz*F&#SoN8th^H*fT%D_ba?iYO-e`2+BD90PPmr8L*{gIv1k) zJY=tK+-HjtcLLqI*lkB*o;;Ugg6D!CUTr~d9jowIvG6tuvp^Hu_K(lv1P_z;Zcd%g~yAxJJ++RGNdcI^9h4fv360m>eq z9Bq3IMuXlQR_9sRZCKC*hs`XdG`4+fQ3+_(nM~%7{)qUk_*Ox-Brh`(l%v*d%72nJ z4vy?wO)HL7SvG!sqc%5dLDuV$&p?pzh$e8dhZeJ$7b90B7F^=8suvX6iFV9&E_MSs zm8)rvd~raCOJmE^5@p_$Vn& zO98j}aY)fq-$Skh`4pWJF-Sj@;WX$g%lO`e?gD|#rB}9s%8c*tKa0)}_G*L5b!-__ zGS>ouUEcJ()wO?<8=}}SSR`BBPW#RI<*>Mpo0FpCuMGS>28AzW%dVD3`afHCF8H*p zLOINA(UrE*F{QsShD}?LemxgbEs-j4Rjs@c&UyF)FOl?KWdo_v#pYPLJWWS$IsEw< z{iQd_4cAj&wP{u5raw|#oL}zbwqT!{zuGxDp*7BuKAi`QoX!^5_y|H8UpX@#0!q7s zZphJpW3u8v1ta;iuMW(HH34U265JueqI{d4o(s;F2ZCpn_fQ>bJ1y8MgqEC{(!7!K zOk;M^_h7=AO*nM*JbVm1{WT}u`m=Xe=*dCC{9nVTKN{IZLsZB(Xob@pN3}{(YnuTW z-Z3hBpH#HWr5vcK>Se+&g%XF(zg1-_`xRQsqj5MZNlE-pC}AZ;orvu51?T8h?;Xrr z;p)l{##7DZYoFj|;{wx3`9oV{#b{Y=|E*g!AsiMP6?`LY`=j`;T88aAFTye!H2G=f;my71F&rUuyCe6W}JDAtIOCZv4NG_k~H9i@LE^rWa`78cdVCmTvV6&#{c90)B)pk b=Zh \ No newline at end of file diff --git a/docs/img/favicon.ico b/docs/img/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..b2454bf7af95f9e8f716f15ecad96853239394ef GIT binary patch literal 4286 zcmcJT&#O&Q6vwyTfFxehBZ-rwXLikGfHG1BH{L)=-aw+1ft1nf8p#APcu6Utq?9NV z(w&&_5H42d*2)9*2%x3aQt;I=ABz` z&Mk_F${iCyUw&k7RUQ!ic<)_~oH)2)ATkmei}-}Fhs_`dGsi}-mPIBaDbAE0pXD5m zn2q=rBIJ|8&((ub#5htqu^);|XTYs7f;CL>g?h20-s(5&wX+|@_#e47muMY?mwwN( z=E`SRI9l@{x>?McA5 zosF2t6I;aQj4eK49AJh#)id0qP``3t40O-I%+V_jlL) zSC;0jeJfu$vBM8OgYU1O+gjsM6SLpoW-sicv8{b=&28S+Px}D2-Lvm~`D6e3`2&CL z{GmqQQ!(`9--@TJ|8T#5^Hj#x`=39i{?h3KoiT8R!+J*g-{OZOHvjYv{o}irOaAZQ zzWAjh+gtOXXWq^~^|7^1y@xZcz0(!^J1b{OZuPfwuC}@+&VOe-V3GsAibE~??!U0! zT8r4Z(AZ^doq=Glj1hm%*u5J&V)s~`2mQqc+a6B!fqP{6>^pO$HQ;Wu`})qF+GX;? z-MD`h%dR?;)HH^TPJ5NOYh!Vjzf*;)@%>}Dd8e+Lo8Eb9N4?SMe%FkskF4eQj~pf^ z&&z>67H6poIp88+%~`o~?zoDp{{F3E8280Mjl=sR4 z;C`SVk`aYhMli>Vs q=Z^kzZqL-YEo;uLA3HaPjO4!$5ciiK^&3|>Yk~ Date: Fri, 7 Aug 2020 22:06:40 -0400 Subject: [PATCH 068/188] adding .DS_Store files --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index 93a9fd879..2e00cd3e7 100644 --- a/.gitignore +++ b/.gitignore @@ -27,3 +27,5 @@ docs/_build/ # Go binar /bin + +.DS_Store From 96079c8b8fbe328e855a48c1490550f10a3d3611 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Fri, 7 Aug 2020 22:07:05 -0400 Subject: [PATCH 069/188] removes favicon --- docs/img/favicon.ico | Bin 4286 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 docs/img/favicon.ico diff --git a/docs/img/favicon.ico b/docs/img/favicon.ico deleted file mode 100644 index b2454bf7af95f9e8f716f15ecad96853239394ef..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 4286 zcmcJT&#O&Q6vwyTfFxehBZ-rwXLikGfHG1BH{L)=-aw+1ft1nf8p#APcu6Utq?9NV z(w&&_5H42d*2)9*2%x3aQt;I=ABz` z&Mk_F${iCyUw&k7RUQ!ic<)_~oH)2)ATkmei}-}Fhs_`dGsi}-mPIBaDbAE0pXD5m zn2q=rBIJ|8&((ub#5htqu^);|XTYs7f;CL>g?h20-s(5&wX+|@_#e47muMY?mwwN( z=E`SRI9l@{x>?McA5 zosF2t6I;aQj4eK49AJh#)id0qP``3t40O-I%+V_jlL) zSC;0jeJfu$vBM8OgYU1O+gjsM6SLpoW-sicv8{b=&28S+Px}D2-Lvm~`D6e3`2&CL z{GmqQQ!(`9--@TJ|8T#5^Hj#x`=39i{?h3KoiT8R!+J*g-{OZOHvjYv{o}irOaAZQ zzWAjh+gtOXXWq^~^|7^1y@xZcz0(!^J1b{OZuPfwuC}@+&VOe-V3GsAibE~??!U0! zT8r4Z(AZ^doq=Glj1hm%*u5J&V)s~`2mQqc+a6B!fqP{6>^pO$HQ;Wu`})qF+GX;? z-MD`h%dR?;)HH^TPJ5NOYh!Vjzf*;)@%>}Dd8e+Lo8Eb9N4?SMe%FkskF4eQj~pf^ z&&z>67H6poIp88+%~`o~?zoDp{{F3E8280Mjl=sR4 z;C`SVk`aYhMli>Vs q=Z^kzZqL-YEo;uLA3HaPjO4!$5ciiK^&3|>Yk~ Date: Fri, 7 Aug 2020 22:14:14 -0400 Subject: [PATCH 070/188] adding stubs for the different sections --- docs/about.md | 2 ++ docs/architecture.md | 7 +++++++ docs/getting-started.md | 7 +++++++ docs/learning.md | 5 +++++ docs/policies.md | 3 +++ docs/provider-reference.md | 3 +++ 6 files changed, 27 insertions(+) create mode 100644 docs/about.md create mode 100644 docs/architecture.md create mode 100644 docs/getting-started.md create mode 100644 docs/learning.md create mode 100644 docs/policies.md create mode 100644 docs/provider-reference.md diff --git a/docs/about.md b/docs/about.md new file mode 100644 index 000000000..420481f66 --- /dev/null +++ b/docs/about.md @@ -0,0 +1,2 @@ +# About Terrascan + diff --git a/docs/architecture.md b/docs/architecture.md new file mode 100644 index 000000000..886527ed5 --- /dev/null +++ b/docs/architecture.md @@ -0,0 +1,7 @@ +# Architecture + +## Runtime + +## IaC Language Providers + +## Policy Engine diff --git a/docs/getting-started.md b/docs/getting-started.md new file mode 100644 index 000000000..0dc486bf0 --- /dev/null +++ b/docs/getting-started.md @@ -0,0 +1,7 @@ +# Getting Started + +## Installation + +## Scanning + +## Terrascan CLI diff --git a/docs/learning.md b/docs/learning.md new file mode 100644 index 000000000..0d074b943 --- /dev/null +++ b/docs/learning.md @@ -0,0 +1,5 @@ +# Educational Resources + +## Using Terrascan as a git pre-commit + +## Using Terrascan as part of super-linter diff --git a/docs/policies.md b/docs/policies.md new file mode 100644 index 000000000..dfb5e9daa --- /dev/null +++ b/docs/policies.md @@ -0,0 +1,3 @@ +# Policies + +## AWS diff --git a/docs/provider-reference.md b/docs/provider-reference.md new file mode 100644 index 000000000..8f4fe3c58 --- /dev/null +++ b/docs/provider-reference.md @@ -0,0 +1,3 @@ +# Provider Reference + +## Terraform (HCL2) From 370b4e1ba7df8b1a7076c5c290a8b9967cc82cc7 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Fri, 7 Aug 2020 22:17:32 -0400 Subject: [PATCH 071/188] adds pre-commit info --- docs/README.md | 55 ------------------------------------------------ docs/learning.md | 3 +++ 2 files changed, 3 insertions(+), 55 deletions(-) delete mode 100644 docs/README.md diff --git a/docs/README.md b/docs/README.md deleted file mode 100644 index d96755dda..000000000 --- a/docs/README.md +++ /dev/null @@ -1,55 +0,0 @@ -# Terrascan documentation -- About Terrascan -- Getting Started - - Installation - - Scanning - - Terrascan CLI - -- Architecture - - Runtime - - Infrastructure as Code Language Providers - - Cloud Providers - - Policy Engine -- Provider Reference - - IAC Language Providers - - Terraform (HCL2) - - CloudFormation (JSON) - - CloudFormation (YAML) - - Kubernetes (YAML) - - Cloud Providers - - AWS - - Azure - - GCP -- Policies - - AWS - - Azure - - GCP - - Kubernetes -- Learning - - pre-commit - - super-linter - - - Introduction: This section covers a general overview of what Envoy is, an architecture overview, how it is typically deployed, etc. - - Getting Started: Quickly get started with Envoy using Docker. - - Installation: How to build/install Envoy using Docker. - - Configuration: Detailed configuration instructions for Envoy. Where relevant, the configuration guide also contains information on statistics, runtime configuration, and APIs. - - Operations: General information on how to operate Envoy including the command line interface, hot restart wrapper, administration interface, a general statistics overview, etc. - - Extending Envoy: Information on how to write custom filters for Envoy. - - API reference: Envoy API detailed reference. - - Envoy FAQ: Have questions? We have answers. Hopefully. - -## Using as pre-commit - -Terrascan can be used on pre-commit hooks to prevent accidental introduction of security weaknesses into your repository. -This requires having pre-commit_ installed. An example configuration is provided in the comments of the here_ file in this repository. - -.. _pre-commit: https://pre-commit.com/ -.. _here: .pre-commit-config.yaml diff --git a/docs/learning.md b/docs/learning.md index 0d074b943..a0c1adde5 100644 --- a/docs/learning.md +++ b/docs/learning.md @@ -2,4 +2,7 @@ ## Using Terrascan as a git pre-commit +Terrascan can be used on pre-commit hooks to prevent accidental introduction of security weaknesses into your repository. +This requires having [pre-commit](https://pre-commit.com/) installed. An example configuration is provided in the comments of [.pre-commit-config.yaml](/~https://github.com/accurics/terrascan/blob/master/.pre-commit-config.yaml).yaml. + ## Using Terrascan as part of super-linter From c5ecc37deffcbc3ab521025abbbba5c68405547f Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Fri, 7 Aug 2020 22:23:33 -0400 Subject: [PATCH 072/188] updates for golang --- .editorconfig | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/.editorconfig b/.editorconfig index d4a2c4405..b5d505bd1 100644 --- a/.editorconfig +++ b/.editorconfig @@ -4,12 +4,16 @@ root = true [*] indent_style = space -indent_size = 4 +indent_size = 2 trim_trailing_whitespace = true insert_final_newline = true charset = utf-8 end_of_line = lf +[*.go] +indent_style = tab +indent_size = 4 + [*.bat] indent_style = tab end_of_line = crlf From c6d158e108877b83316c0f926ca0c70ed6de2939 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Fri, 7 Aug 2020 22:29:09 -0400 Subject: [PATCH 073/188] adds nav --- docs/{provider-reference.md => providers.md} | 0 mkdocs.yml | 9 +++++++-- 2 files changed, 7 insertions(+), 2 deletions(-) rename docs/{provider-reference.md => providers.md} (100%) diff --git a/docs/provider-reference.md b/docs/providers.md similarity index 100% rename from docs/provider-reference.md rename to docs/providers.md diff --git a/mkdocs.yml b/mkdocs.yml index 6ff553d24..31d8a919d 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,7 +1,12 @@ site_name: Terrascan documentation nav: - - Home: index.md - - About: about.md + - Home: index.md + - Getting Started: getting-started.md + - Architecture: architecture.md + - Provider Reference: providers.md + - Policies: policies.md + - Educational Resources: learning.md + - About: about.md theme: name: material favicon: img/Terrascan_STACK_Logo_F2F2F2.svg From b6284a63f7e00bcce8e3e59d27adcf7dacf5babd Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Fri, 7 Aug 2020 22:29:57 -0400 Subject: [PATCH 074/188] removes old makefile --- docs/Makefile | 177 -------------------------------------------------- 1 file changed, 177 deletions(-) delete mode 100644 docs/Makefile diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index 4f2efedde..000000000 --- a/docs/Makefile +++ /dev/null @@ -1,177 +0,0 @@ -# Makefile for Mkdocs documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = _build - -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/terrascan.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/terrascan.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/terrascan" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/terrascan" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -latexpdfja: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." - -xml: - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." - -pseudoxml: - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." From 853c03782405f059ca493df5d1c57a777501bb8c Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Fri, 7 Aug 2020 23:03:36 -0400 Subject: [PATCH 075/188] adds changelog --- CHANGELOG.md | 23 +++++++++++++++++++++++ docs/changelog.md | 1 + 2 files changed, 24 insertions(+) create mode 100644 CHANGELOG.md create mode 100644 docs/changelog.md diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 000000000..07c6b59b8 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,23 @@ +# Changelog + +## 0.2.3 +* Introduces the '-f' flag for passing a list of ".tf" files for linting and the '--version' flag. + +## 0.2.2 +* Adds Docker image and pipeline to push to DockerHub + +## 0.2.1 +* Bugfix: The pyhcl hard dependency in the requirements.txt file caused issues if a higher version was installed. This was fixed by using the ">=" operator. + +## 0.2.0 (2020-01-11) +* Adds support for terraform 0.12+ + +## 0.1.2 (2020-01-05) +* Adds ability to setup terrascan as a pre-commit hook + +## 0.1.1 (2020-01-01) +* Updates dependent packages to latest versions +* Migrates CI to GitHub Actions from travis + +## 0.1.0 (2017-11-26) +* First release on PyPI. diff --git a/docs/changelog.md b/docs/changelog.md new file mode 100644 index 000000000..f4d16fd97 --- /dev/null +++ b/docs/changelog.md @@ -0,0 +1 @@ +{!CHANGELOG.md!} From 0b1d0cf6eeb8ddd963a3ea9556bef589f32d7897 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Fri, 7 Aug 2020 23:03:47 -0400 Subject: [PATCH 076/188] organizes layout --- docs/overrides/partials/footer.html | 87 +++++++++++++++++++++++++++++ mkdocs.yml | 48 +++++++++++++--- 2 files changed, 127 insertions(+), 8 deletions(-) create mode 100644 docs/overrides/partials/footer.html diff --git a/docs/overrides/partials/footer.html b/docs/overrides/partials/footer.html new file mode 100644 index 000000000..7ad9293cf --- /dev/null +++ b/docs/overrides/partials/footer.html @@ -0,0 +1,87 @@ + + +{% import "partials/language.html" as lang with context %} + + + diff --git a/mkdocs.yml b/mkdocs.yml index 31d8a919d..79f31b7ab 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,4 +1,43 @@ +# Project Info site_name: Terrascan documentation +site_description: >- + Terrascan documentation. Learn more about how to use Terrascan to scan infrastructure as code. +copyright: >- + © 2020 Accurics, Inc. All rights reserved. Terrascan and Terrascan logo are US trademarks of Accurics Inc. All other registered trademarks are the properties of their respective owners. + +# Repository +repo_name: accurics/terrascan +repo_url: /~https://github.com/accurics/terrascan/ + +# Configuration +theme: + name: material + favicon: img/Terrascan_STACK_Logo_F2F2F2.svg + logo: img/Terrascan_STACK_Logo_F2F2F2.svg + custom_dir: docs/overrides + palette: + primary: green + features: + - tabs + language: en + +# Extensions +markdown_extensions: + - markdown_include.include + +# Social Icons +extra: + social: + - icon: fontawesome/brands/twitter + link: https://twitter.com/AccuricsSec + - icon: fontawesome/brands/linkedin + link: https://www.linkedin.com/company/accurics + - icon: fontawesome/brands/docker + link: https://hub.docker.com/orgs/accuricsorg + - icon: fontawesome/brands/github + link: /~https://github.com/accurics + +# Navigation nav: - Home: index.md - Getting Started: getting-started.md @@ -6,12 +45,5 @@ nav: - Provider Reference: providers.md - Policies: policies.md - Educational Resources: learning.md + - Changelog: changelog.md - About: about.md -theme: - name: material - favicon: img/Terrascan_STACK_Logo_F2F2F2.svg - logo: img/Terrascan_STACK_Logo_F2F2F2.svg - repo_url: /~https://github.com/accurics/terrascan/ - palette: - primary: green - From 622bfec5018e0cdc97e5f86a00cc696be7058ba3 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Fri, 7 Aug 2020 23:05:19 -0400 Subject: [PATCH 077/188] removes tabs --- mkdocs.yml | 2 -- 1 file changed, 2 deletions(-) diff --git a/mkdocs.yml b/mkdocs.yml index 79f31b7ab..408aa1024 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -17,8 +17,6 @@ theme: custom_dir: docs/overrides palette: primary: green - features: - - tabs language: en # Extensions From 7aaabfc17c69737267ef92e7f8f6a75dc09335af Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Fri, 7 Aug 2020 23:05:51 -0400 Subject: [PATCH 078/188] updates nav order --- mkdocs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mkdocs.yml b/mkdocs.yml index 408aa1024..803240eb9 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -42,6 +42,6 @@ nav: - Architecture: architecture.md - Provider Reference: providers.md - Policies: policies.md - - Educational Resources: learning.md - Changelog: changelog.md + - Educational Resources: learning.md - About: about.md From 9fbd9576ae6ce12f3c7a5a81e2fe61625b7b4f5c Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Sat, 8 Aug 2020 09:36:11 +0530 Subject: [PATCH 079/188] add fixes to normalized json format --- pkg/iac-providers/terraform/v12/convert.go | 5 +- pkg/iac-providers/terraform/v12/load-file.go | 2 +- .../terraform/v12/load-file_test.go | 2 +- .../v12/testdata/tfjson/config1.json | 84 +++--- .../v12/testdata/tfjson/fullconfig.json | 84 +++--- .../v12/testdata/tfjson/moduleconfigs.json | 248 ++++++++---------- 6 files changed, 184 insertions(+), 241 deletions(-) diff --git a/pkg/iac-providers/terraform/v12/convert.go b/pkg/iac-providers/terraform/v12/convert.go index 025b6c814..1e2d92e0b 100644 --- a/pkg/iac-providers/terraform/v12/convert.go +++ b/pkg/iac-providers/terraform/v12/convert.go @@ -58,11 +58,12 @@ func (c *converter) convertBody(body *hclsyntax.Body) (jsonObj, error) { if err != nil { return nil, err } + blockConfig := blockOut[block.Type].(jsonObj) if _, present := out[block.Type]; !present { - out[block.Type] = []jsonObj{blockOut} + out[block.Type] = []jsonObj{blockConfig} } else { list := out[block.Type].([]jsonObj) - list = append(list, blockOut) + list = append(list, blockConfig) out[block.Type] = list } } diff --git a/pkg/iac-providers/terraform/v12/load-file.go b/pkg/iac-providers/terraform/v12/load-file.go index 9fdd8b694..7f4e3c2d2 100644 --- a/pkg/iac-providers/terraform/v12/load-file.go +++ b/pkg/iac-providers/terraform/v12/load-file.go @@ -38,7 +38,7 @@ func (*TfV12) LoadIacFile(absFilePath string) (allResourcesConfig output.AllReso hclFile, diags := parser.LoadConfigFile(absFilePath) if diags != nil { - zap.S().Errorf("failed to load config file '%s'. error:\n%v\n", diags) + zap.S().Errorf("failed to load config file '%s'. error:\n%v\n", absFilePath, diags) return allResourcesConfig, errLoadConfigFile } if hclFile == nil && diags.HasErrors() { diff --git a/pkg/iac-providers/terraform/v12/load-file_test.go b/pkg/iac-providers/terraform/v12/load-file_test.go index d1f289042..b9db07070 100644 --- a/pkg/iac-providers/terraform/v12/load-file_test.go +++ b/pkg/iac-providers/terraform/v12/load-file_test.go @@ -79,7 +79,7 @@ func TestLoadIacFile(t *testing.T) { wantErr: nil, }, { - name: "config1", + name: "dummyconfig", tfConfigFile: "./testdata/dummyconfig/dummyconfig.tf", tfJSONFile: "./testdata/tfjson/dummyconfig.json", tfv12: TfV12{}, diff --git a/pkg/iac-providers/terraform/v12/testdata/tfjson/config1.json b/pkg/iac-providers/terraform/v12/testdata/tfjson/config1.json index d70fecda6..e5bcee920 100644 --- a/pkg/iac-providers/terraform/v12/testdata/tfjson/config1.json +++ b/pkg/iac-providers/terraform/v12/testdata/tfjson/config1.json @@ -11,26 +11,22 @@ "key_name": "${aws_key_pair.ec2key_playground.key_name}", "provisioner": [ { - "provisioner": { - "remote-exec": { - "connection": [ - { - "connection": { - "host": "${self.public_ip}", - "private_key": "${file(var.privateKey)}", - "type": "ssh", - "user": "ubuntu" - } - } - ], - "inline": [ - "sudo apt-get -y update", - "sudo apt-get -y install nginx", - "sudo touch /var/www/html/index.html", - "echo HelloWorld | sudo tee -a /var/www/html/index.html", - "sudo service nginx start" - ] - } + "remote-exec": { + "connection": [ + { + "host": "${self.public_ip}", + "private_key": "${file(var.privateKey)}", + "type": "ssh", + "user": "ubuntu" + } + ], + "inline": [ + "sudo apt-get -y update", + "sudo apt-get -y install nginx", + "sudo touch /var/www/html/index.html", + "echo HelloWorld | sudo tee -a /var/www/html/index.html", + "sudo service nginx start" + ] } } ], @@ -79,10 +75,8 @@ "config": { "route": [ { - "route": { - "cidr_block": "0.0.0.0/0", - "gateway_id": "${aws_internet_gateway.igw_playground.id}" - } + "cidr_block": "0.0.0.0/0", + "gateway_id": "${aws_internet_gateway.igw_playground.id}" } ], "tags": { @@ -113,36 +107,30 @@ "config": { "egress": [ { - "egress": { - "cidr_blocks": [ - "0.0.0.0/0" - ], - "from_port": 0, - "protocol": "-1", - "to_port": 0 - } + "cidr_blocks": [ + "0.0.0.0/0" + ], + "from_port": 0, + "protocol": "-1", + "to_port": 0 } ], "ingress": [ { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0" - ], - "from_port": 22, - "protocol": "tcp", - "to_port": 22 - } + "cidr_blocks": [ + "0.0.0.0/0" + ], + "from_port": 22, + "protocol": "tcp", + "to_port": 22 }, { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0" - ], - "from_port": 80, - "protocol": "tcp", - "to_port": 80 - } + "cidr_blocks": [ + "0.0.0.0/0" + ], + "from_port": 80, + "protocol": "tcp", + "to_port": 80 } ], "name": "sg", diff --git a/pkg/iac-providers/terraform/v12/testdata/tfjson/fullconfig.json b/pkg/iac-providers/terraform/v12/testdata/tfjson/fullconfig.json index 78e6c85d6..1a36646ca 100644 --- a/pkg/iac-providers/terraform/v12/testdata/tfjson/fullconfig.json +++ b/pkg/iac-providers/terraform/v12/testdata/tfjson/fullconfig.json @@ -11,26 +11,22 @@ "key_name": "${aws_key_pair.ec2key_playground.key_name}", "provisioner": [ { - "provisioner": { - "remote-exec": { - "connection": [ - { - "connection": { - "host": "${self.public_ip}", - "private_key": "${file(var.privateKey)}", - "type": "ssh", - "user": "ubuntu" - } - } - ], - "inline": [ - "sudo apt-get -y update", - "sudo apt-get -y install nginx", - "sudo touch /var/www/html/index.html", - "echo HelloWorld | sudo tee -a /var/www/html/index.html", - "sudo service nginx start" - ] - } + "remote-exec": { + "connection": [ + { + "host": "${self.public_ip}", + "private_key": "${file(var.privateKey)}", + "type": "ssh", + "user": "ubuntu" + } + ], + "inline": [ + "sudo apt-get -y update", + "sudo apt-get -y install nginx", + "sudo touch /var/www/html/index.html", + "echo HelloWorld | sudo tee -a /var/www/html/index.html", + "sudo service nginx start" + ] } } ], @@ -79,10 +75,8 @@ "config": { "route": [ { - "route": { - "cidr_block": "0.0.0.0/0", - "gateway_id": "${aws_internet_gateway.igw_playground.id}" - } + "cidr_block": "0.0.0.0/0", + "gateway_id": "${aws_internet_gateway.igw_playground.id}" } ], "tags": { @@ -113,36 +107,30 @@ "config": { "egress": [ { - "egress": { - "cidr_blocks": [ - "0.0.0.0/0" - ], - "from_port": 0, - "protocol": "-1", - "to_port": 0 - } + "cidr_blocks": [ + "0.0.0.0/0" + ], + "from_port": 0, + "protocol": "-1", + "to_port": 0 } ], "ingress": [ { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0" - ], - "from_port": 22, - "protocol": "tcp", - "to_port": 22 - } + "cidr_blocks": [ + "0.0.0.0/0" + ], + "from_port": 22, + "protocol": "tcp", + "to_port": 22 }, { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0" - ], - "from_port": 80, - "protocol": "tcp", - "to_port": 80 - } + "cidr_blocks": [ + "0.0.0.0/0" + ], + "from_port": 80, + "protocol": "tcp", + "to_port": 80 } ], "name": "sg", diff --git a/pkg/iac-providers/terraform/v12/testdata/tfjson/moduleconfigs.json b/pkg/iac-providers/terraform/v12/testdata/tfjson/moduleconfigs.json index 074ba47fd..48a10dabf 100644 --- a/pkg/iac-providers/terraform/v12/testdata/tfjson/moduleconfigs.json +++ b/pkg/iac-providers/terraform/v12/testdata/tfjson/moduleconfigs.json @@ -8,147 +8,119 @@ "config": { "default_cache_behavior": [ { - "default_cache_behavior": { - "allowed_methods": [ - "DELETE", - "GET", - "HEAD", - "OPTIONS", - "PATCH", - "POST", - "PUT" - ], - "cached_methods": [ - "GET", - "HEAD" - ], - "forwarded_values": [ - { - "forwarded_values": { - "cookies": [ - { - "cookies": { - "forward": "none" - } - } - ], - "query_string": false + "allowed_methods": [ + "DELETE", + "GET", + "HEAD", + "OPTIONS", + "PATCH", + "POST", + "PUT" + ], + "cached_methods": [ + "GET", + "HEAD" + ], + "forwarded_values": [ + { + "cookies": [ + { + "forward": "none" } - } - ], - "target_origin_id": "local.s3_origin_id", - "viewer_protocol_policy": "https-only" - } + ], + "query_string": false + } + ], + "target_origin_id": "local.s3_origin_id", + "viewer_protocol_policy": "https-only" } ], "enabled": true, "ordered_cache_behavior": [ { - "ordered_cache_behavior": { - "allowed_methods": [ - "GET", - "HEAD", - "OPTIONS" - ], - "cached_methods": [ - "GET", - "HEAD", - "OPTIONS" - ], - "compress": true, - "forwarded_values": [ - { - "forwarded_values": { - "cookies": [ - { - "cookies": { - "forward": "none" - } - } - ], - "headers": [ - "Origin" - ], - "query_string": false + "allowed_methods": [ + "GET", + "HEAD", + "OPTIONS" + ], + "cached_methods": [ + "GET", + "HEAD", + "OPTIONS" + ], + "compress": true, + "forwarded_values": [ + { + "cookies": [ + { + "forward": "none" } - } - ], - "path_pattern": "/content/immutable/*", - "target_origin_id": "local.s3_origin_id", - "viewer_protocol_policy": "allow-all" - } + ], + "headers": [ + "Origin" + ], + "query_string": false + } + ], + "path_pattern": "/content/immutable/*", + "target_origin_id": "local.s3_origin_id", + "viewer_protocol_policy": "allow-all" }, { - "ordered_cache_behavior": { - "allowed_methods": [ - "GET", - "HEAD", - "OPTIONS" - ], - "cached_methods": [ - "GET", - "HEAD" - ], - "forwarded_values": [ - { - "forwarded_values": { - "cookies": [ - { - "cookies": { - "forward": "none" - } - } - ], - "query_string": false + "allowed_methods": [ + "GET", + "HEAD", + "OPTIONS" + ], + "cached_methods": [ + "GET", + "HEAD" + ], + "forwarded_values": [ + { + "cookies": [ + { + "forward": "none" } - } - ], - "path_pattern": "/content/*", - "target_origin_id": "local.s3_origin_id", - "viewer_protocol_policy": "allow-all" - } + ], + "query_string": false + } + ], + "path_pattern": "/content/*", + "target_origin_id": "local.s3_origin_id", + "viewer_protocol_policy": "allow-all" } ], "origin": [ { - "origin": { - "domain_name": "aws_s3_bucket.b.bucket_regional_domain_name", - "origin_id": "local.s3_origin_id", - "s3_origin_config": [ - { - "s3_origin_config": { - "origin_access_identity": "origin-access-identity/cloudfront/ABCDEFG1234567" - } - } - ] - } + "domain_name": "aws_s3_bucket.b.bucket_regional_domain_name", + "origin_id": "local.s3_origin_id", + "s3_origin_config": [ + { + "origin_access_identity": "origin-access-identity/cloudfront/ABCDEFG1234567" + } + ] } ], "restrictions": [ { - "restrictions": { - "geo_restriction": [ - { - "geo_restriction": { - "locations": [ - "US", - "CA", - "GB", - "DE" - ], - "restriction_type": "whitelist" - } - } - ] - } + "geo_restriction": [ + { + "locations": [ + "US", + "CA", + "GB", + "DE" + ], + "restriction_type": "whitelist" + } + ] } ], "viewer_certificate": [ { - "viewer_certificate": { - "cloudfront_default_certificate": true, - "minimum_protocol_version": "TLSv1" - } + "cloudfront_default_certificate": true, + "minimum_protocol_version": "TLSv1" } ] } @@ -283,10 +255,8 @@ "load_balancer_name": "some-name", "policy_attribute": [ { - "policy_attribute": { - "name": "ECDHE-RSA-RC4-SHA", - "value": "true" - } + "name": "ECDHE-RSA-RC4-SHA", + "value": "true" } ], "policy_name": "wu-tang-ssl", @@ -320,28 +290,24 @@ "description": "Used in the terraform", "egress": [ { - "egress": { - "cidr_blocks": [ - "0.0.0.0/0", - "192.164.0.0/24" - ], - "from_port": 0, - "protocol": "-1", - "to_port": 0 - } + "cidr_blocks": [ + "0.0.0.0/0", + "192.164.0.0/24" + ], + "from_port": 0, + "protocol": "-1", + "to_port": 0 } ], "ingress": [ { - "ingress": { - "cidr_blocks": [ - "0.0.0.0/0", - "19.16.0.0/24" - ], - "from_port": 22, - "protocol": "tcp", - "to_port": 22 - } + "cidr_blocks": [ + "0.0.0.0/0", + "19.16.0.0/24" + ], + "from_port": 22, + "protocol": "tcp", + "to_port": 22 } ], "name": "acme_web", From ad257115660230aa1ab03622c1a8fe1edef82ed4 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Sun, 9 Aug 2020 11:37:19 -0700 Subject: [PATCH 080/188] Update Terrascan to use the latest policy format - Adds initial AWS and Azure policy set - Adds command line switch for policy path --- cmd/terrascan/main.go | 3 +- pkg/cli/run.go | 4 +- ....EncryptionandKeyManagement.High.0407.json | 13 ++ ....EncryptionandKeyManagement.High.0408.json | 13 ++ .../AWS.CloudFront.Logging.Medium.0567.json | 13 ++ .../cloudfrontNoHTTPSTraffic.rego | 10 + .../cloudfrontNoLogging.rego | 21 ++ .../cloudfrontNoSecureCiphers.rego | 19 ++ .../AWS.CloudTrail.Logging.High.0399.json | 13 ++ .../AWS.CloudTrail.Logging.Low.0559.json | 13 ++ .../AWS.CloudTrail.Logging.Medium.0460.json | 13 ++ .../cloudTrailLogNotEncrypted.rego | 9 + .../cloudTrailMultiRegionNotCreated.rego | 9 + .../aws/aws_cloudtrail/enableSNSTopic.rego | 6 + .../opa/rego/aws/aws_db_instance/.json | 13 ++ .../rdsPubliclyAccessible.rego | 9 + .../AWS.IamUser.IAM.High.0390.json | 13 ++ .../noAccessKeyForRootAccount.rego | 20 ++ .../AWS.Iam.IAM.Low.0540.json | 14 ++ .../AWS.Iam.IAM.Medium.0454.json | 15 ++ .../AWS.Iam.IAM.Medium.0455.json | 15 ++ .../AWS.Iam.IAM.Medium.0456.json | 15 ++ .../AWS.Iam.IAM.Medium.0457.json | 15 ++ .../AWS.Iam.IAM.Medium.0458.json | 16 ++ .../AWS.Iam.IAM.Medium.0495.json | 16 ++ .../passwordMinLength.rego | 12 + .../passwordPolicyRequirement.rego | 9 + .../passwordRotateEvery90Days.rego | 9 + .../AWS.IamPolicy.IAM.High.0392.json | 13 ++ .../iamGrpPolicyWithFullAdminCntrl.rego | 51 +++++ .../AWS.IamPolicy.IAM.High.0392.json | 13 ++ .../iamPolicyWithFullAdminControl.rego | 50 +++++ .../AWS.IamUser.IAM.High.0387.json | 13 ++ .../AWS.IamUser.IAM.High.0388.json | 13 ++ .../rootUserNotContainMfaTypeHardware.rego | 34 +++ .../rootUserNotContainMfaTypeVirtual.rego | 35 +++ ....Instance.NetworkSecurity.Medium.0506.json | 13 ++ .../aws/aws_instance/instanceWithNoVpc.rego | 9 + ....EncryptionandKeyManagement.High.0412.json | 13 ++ .../aws_kinesis_stream.rego | 8 + .../AWS.KMS.Logging.High.0400.json | 13 ++ .../aws_kms_key/kmsKeyRotationDisabled.rego | 9 + ...hConfiguration.DataSecurity.High.0102.json | 13 ++ .../hardCodedKey.rego | 23 ++ ....EncryptionandKeyManagement.High.0405.json | 13 ++ .../AWS.S3Bucket.IAM.High.0370.json | 13 ++ .../AWS.S3Bucket.IAM.High.0377.json | 15 ++ .../AWS.S3Bucket.IAM.High.0378.json | 15 ++ .../AWS.S3Bucket.IAM.High.0379.json | 15 ++ .../AWS.S3Bucket.IAM.High.0381.json | 15 ++ ...WS.S3Bucket.NetworkSecurity.High.0417.json | 13 ++ .../aws/aws_s3_bucket/noS3BucketSseRules.rego | 9 + .../rego/aws/aws_s3_bucket/s3AclGrants.rego | 8 + .../s3BucketNoWebsiteIndexDoc.rego | 8 + .../aws_s3_bucket/s3VersioningMfaFalse.rego | 10 + .../AWS.IamPolicy.IAM.High.0374.json | 15 ++ .../AWS.S3Bucket.IAM.High.0371.json | 13 ++ .../AWS.S3Bucket.IAM.High.0372.json | 15 ++ .../actionsFromAllPrincipals.rego | 59 +++++ .../allowActionsFromAllPrincipals.rego | 51 +++++ ...curityGroup.NetworkSecurity.High.0094.json | 13 ++ .../unrestrictedIngressAccess.rego | 24 ++ .../aws_vpc/AWS.VPC.Logging.Medium.0470.json | 13 ++ .../aws_vpc/AWS.VPC.Logging.Medium.0471.json | 13 ++ .../opa/rego/aws/aws_vpc/defaultVpcExist.rego | 6 + .../aws/aws_vpc/vpcFlowLogsNotEnabled.rego | 27 +++ .../accurics.azure.CAM.162.json | 13 ++ .../azurerm_cosmosdb_account/noTags.rego | 9 + .../accurics.azure.EKM.156.json | 13 ++ .../checkDataDisksEncrypted.rego | 11 + .../accurics.azure.NPS.171.json | 19 ++ .../accurics.azure.NPS.172.json | 19 ++ .../accurics.azure.NPS.35.json | 19 ++ .../accurics.azure.NPS.36.json | 19 ++ .../accurics.azure.NPS.37.json | 19 ++ .../networkPortExposedPublic.rego | 73 ++++++ .../accurics.azure.LOG.151.json | 13 ++ .../accurics.azure.LOG.152.json | 13 ++ .../accurics.azure.LOG.155.json | 13 ++ .../connectionThrottling.rego | 10 + .../logConnections.rego | 10 + .../logRetention.rego | 14 ++ .../accurics.azure.BDR.163.json | 13 ++ .../accurics.azure.EKM.1.json | 13 ++ .../geoRedundancyDisabled.rego | 17 ++ .../sslEnforceDisabled.rego | 9 + .../accurics.azure.EKM.23.json | 13 ++ .../accurics.azure.NS.30.json | 15 ++ .../accurics.azure.NS.31.json | 15 ++ .../azurerm_redis_cache/nonSslEnabled.rego | 9 + .../publiclyAccessible.rego | 29 +++ .../accurics.azure.MON.157.json | 13 ++ .../checkAuditEnabled.rego | 36 +++ .../accurics.azure.NS.21.json | 15 ++ .../accurics.azure.NS.5.json | 15 ++ .../checkPublicAccessNotAllow.rego | 17 ++ .../opa/rego/azure/azurerm_sql_server/.json | 13 ++ .../sqlServerADAdminConfigured.rego | 21 ++ pkg/policy/interface.go | 2 +- pkg/policy/opa/constants.go | 8 +- pkg/policy/opa/opa_engine.go | 210 ++++++++++-------- pkg/runtime/executor.go | 19 +- pkg/utils/path.go | 24 ++ 103 files changed, 1757 insertions(+), 112 deletions(-) create mode 100755 pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json create mode 100755 pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json create mode 100755 pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json create mode 100755 pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego create mode 100755 pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoLogging.rego create mode 100755 pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego create mode 100755 pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.High.0399.json create mode 100755 pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Low.0559.json create mode 100755 pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Medium.0460.json create mode 100755 pkg/policies/opa/rego/aws/aws_cloudtrail/cloudTrailLogNotEncrypted.rego create mode 100755 pkg/policies/opa/rego/aws/aws_cloudtrail/cloudTrailMultiRegionNotCreated.rego create mode 100755 pkg/policies/opa/rego/aws/aws_cloudtrail/enableSNSTopic.rego create mode 100755 pkg/policies/opa/rego/aws/aws_db_instance/.json create mode 100755 pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0390.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_access_key/noAccessKeyForRootAccount.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0540.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0454.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0455.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0456.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0457.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0458.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0495.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordMinLength.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordPolicyRequirement.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordRotateEvery90Days.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_group_policy/AWS.IamPolicy.IAM.High.0392.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_group_policy/iamGrpPolicyWithFullAdminCntrl.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_policy/AWS.IamPolicy.IAM.High.0392.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_policy/iamPolicyWithFullAdminControl.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0387.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0388.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_user_policy/rootUserNotContainMfaTypeHardware.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_user_policy/rootUserNotContainMfaTypeVirtual.rego create mode 100755 pkg/policies/opa/rego/aws/aws_instance/AWS.Instance.NetworkSecurity.Medium.0506.json create mode 100755 pkg/policies/opa/rego/aws/aws_instance/instanceWithNoVpc.rego create mode 100755 pkg/policies/opa/rego/aws/aws_kinesis_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0412.json create mode 100755 pkg/policies/opa/rego/aws/aws_kinesis_stream/aws_kinesis_stream.rego create mode 100755 pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json create mode 100755 pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyRotationDisabled.rego create mode 100755 pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0102.json create mode 100755 pkg/policies/opa/rego/aws/aws_launch_configuration/hardCodedKey.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0374.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0371.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0372.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket_policy/actionsFromAllPrincipals.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket_policy/allowActionsFromAllPrincipals.rego create mode 100755 pkg/policies/opa/rego/aws/aws_security_group/AWS.SecurityGroup.NetworkSecurity.High.0094.json create mode 100755 pkg/policies/opa/rego/aws/aws_security_group/unrestrictedIngressAccess.rego create mode 100755 pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0470.json create mode 100755 pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0471.json create mode 100755 pkg/policies/opa/rego/aws/aws_vpc/defaultVpcExist.rego create mode 100755 pkg/policies/opa/rego/aws/aws_vpc/vpcFlowLogsNotEnabled.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/accurics.azure.CAM.162.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/noTags.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_managed_disk/accurics.azure.EKM.156.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_managed_disk/checkDataDisksEncrypted.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.171.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.172.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.35.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.36.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.37.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_network_security_rule/networkPortExposedPublic.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.151.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.152.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.155.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/connectionThrottling.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logConnections.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logRetention.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.BDR.163.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.EKM.1.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_server/geoRedundancyDisabled.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_server/sslEnforceDisabled.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.EKM.23.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.30.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.31.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_redis_cache/nonSslEnabled.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_redis_cache/publiclyAccessible.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_database/accurics.azure.MON.157.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_database/checkAuditEnabled.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.21.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.5.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/checkPublicAccessNotAllow.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_server/.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerADAdminConfigured.rego diff --git a/cmd/terrascan/main.go b/cmd/terrascan/main.go index 54bf3b3fe..82ad99ab8 100644 --- a/cmd/terrascan/main.go +++ b/cmd/terrascan/main.go @@ -34,6 +34,7 @@ func main() { cloudType = flag.String("cloud", "", "cloud provider (supported values: aws)") iacFilePath = flag.String("f", "", "IaC file path") iacDirPath = flag.String("d", "", "IaC directory path") + policyPath = flag.String("p", "", "Policy directory path") // logging flags logLevel = flag.String("log-level", "info", "logging level (debug, info, warn, error, panic, fatal)") @@ -48,6 +49,6 @@ func main() { } else { logging.Init(*logType, *logLevel) zap.S().Debug("running terrascan in cli mode") - cli.Run(*iacType, *iacVersion, *cloudType, *iacFilePath, *iacDirPath) + cli.Run(*iacType, *iacVersion, *cloudType, *iacFilePath, *iacDirPath, *policyPath) } } diff --git a/pkg/cli/run.go b/pkg/cli/run.go index 54d5a23ce..3138bae19 100644 --- a/pkg/cli/run.go +++ b/pkg/cli/run.go @@ -21,11 +21,11 @@ import ( ) // Run executes terrascan in CLI mode -func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath string) { +func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath, policyPath string) { // create a new runtime executor for processing IaC executor, err := runtime.NewExecutor(iacType, iacVersion, cloudType, iacFilePath, - iacDirPath) + iacDirPath, policyPath) if err != nil { return } diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json new file mode 100755 index 000000000..e2f3a8a84 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json @@ -0,0 +1,13 @@ +{ + "ruleName": "cloudfrontNoHTTPSTraffic", + "file": "cloudfrontNoHTTPSTraffic.rego", + "ruleTemplate": "cloudfrontNoHTTPSTraffic", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Use encrypted connection between CloudFront and origin server", + "ruleReferenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0407", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json new file mode 100755 index 000000000..3ee435f99 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json @@ -0,0 +1,13 @@ +{ + "ruleName": "cloudfrontNoSecureCiphers", + "file": "cloudfrontNoSecureCiphers.rego", + "ruleTemplate": "cloudfrontNoSecureCiphers", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Secure ciphers are not used in CloudFront distribution", + "ruleReferenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0408", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json new file mode 100755 index 000000000..a20956e92 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json @@ -0,0 +1,13 @@ +{ + "ruleName": "cloudfrontNoLogging", + "file": "cloudfrontNoLogging.rego", + "ruleTemplate": "cloudfrontNoLogging", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Ensure that your AWS Cloudfront distributions have the Logging feature enabled in order to track all viewer requests for the content delivered through the Content Delivery Network (CDN).", + "ruleReferenceId": "AWS.CloudFront.Logging.Medium.0567", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego new file mode 100755 index 000000000..6073a927c --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego @@ -0,0 +1,10 @@ +package accurics + +{{.prefix}}cloudfrontNoHTTPSTraffic[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + some i + orderedcachebehaviour = cloudfront.config.ordered_cache_behavior[i] + orderedcachebehaviour.viewer_protocol_policy == "allow-all" + traverse := sprintf("ordered_cache_behavior[%d].viewer_protocol_policy", [i]) + retVal := { "Id": cloudfront.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "ordered_cache_behavior.viewer_protocol_policy", "AttributeDataType": "string", "Expected": "redirect-to-https", "Actual": orderedcachebehaviour.viewer_protocol_policy } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoLogging.rego b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoLogging.rego new file mode 100755 index 000000000..dfd52a3a1 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoLogging.rego @@ -0,0 +1,21 @@ +package accurics + +{{.prefix}}cloudfrontNoLogging[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + not cloudfront.config.logging_config + + rc = "ewogICJsb2dnaW5nX2NvbmZpZyI6IHsKICAgICJpbmNsdWRlX2Nvb2tpZXMiOiBmYWxzZSwKICAgICJidWNrZXQiOiAiPGJ1Y2tldD4iLAogICAgInByZWZpeCI6ICI8cHJlZml4PiIKICB9Cn0=" + + traverse = "" + retVal := { "Id": cloudfront.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "logging_config", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} + +{{.prefix}}cloudfrontNoLogging[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + cloudfront.config.logging_config == [] + + rc = "ewogICJsb2dnaW5nX2NvbmZpZyI6IHsKICAgICJpbmNsdWRlX2Nvb2tpZXMiOiBmYWxzZSwKICAgICJidWNrZXQiOiAiPGJ1Y2tldD4iLAogICAgInByZWZpeCI6ICI8cHJlZml4PiIKICB9Cn0=" + + traverse = "" + retVal := { "Id": cloudfront.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "logging_config", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego new file mode 100755 index 000000000..9159d825f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego @@ -0,0 +1,19 @@ +package accurics + +{{.prefix}}cloudfrontNoSecureCiphers[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + some i + certificate = cloudfront.config.viewer_certificate[i] + certificate.cloudfront_default_certificate = false + not minimumAllowedProtocolVersion(certificate.minimum_protocol_version) + traverse := sprintf("viewer_certificate[%d].minimum_protocol_version", [i]) + retVal := { "Id": cloudfront.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "viewer_certificate.minimum_protocol_version", "AttributeDataType": "string", "Expected": "TLSv1.2", "Actual": certificate.minimum_protocol_version } +} + +minimumAllowedProtocolVersion(currentVersion) { + currentVersion == "TLSv1.1" +} + +minimumAllowedProtocolVersion(currentVersion) { + currentVersion == "TLSv1.2" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.High.0399.json b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.High.0399.json new file mode 100755 index 000000000..8c070cbad --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.High.0399.json @@ -0,0 +1,13 @@ +{ + "ruleName": "cloudTrailLogNotEncrypted", + "file": "cloudTrailLogNotEncrypted.rego", + "ruleTemplate": "cloudTrailLogNotEncrypted", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Cloud Trail Log Not Enabled", + "ruleReferenceId": "AWS.CloudTrail.Logging.High.0399", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Low.0559.json b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Low.0559.json new file mode 100755 index 000000000..9fcf02b5f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Low.0559.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_enableSNSTopic", + "file": "enableSNSTopic.rego", + "ruleTemplate": "enableSNSTopic", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure appropriate subscribers to each SNS topic", + "ruleReferenceId": "AWS.CloudTrail.Logging.Low.0559", + "category": "Logging", + "version": 0 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Medium.0460.json b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Medium.0460.json new file mode 100755 index 000000000..ec9b6b080 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Medium.0460.json @@ -0,0 +1,13 @@ +{ + "ruleName": "cloudTrailMultiRegionNotCreated", + "file": "cloudTrailMultiRegionNotCreated.rego", + "ruleTemplate": "cloudTrailMultiRegionNotCreated", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Cloud Trail Multi Region not enabled", + "ruleReferenceId": "AWS.CloudTrail.Logging.Medium.0460", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudtrail/cloudTrailLogNotEncrypted.rego b/pkg/policies/opa/rego/aws/aws_cloudtrail/cloudTrailLogNotEncrypted.rego new file mode 100755 index 000000000..ae019288e --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudtrail/cloudTrailLogNotEncrypted.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}cloudTrailLogNotEncrypted[retVal]{ + cloud_trail = input.aws_cloudtrail[_] + cloud_trail.config.kms_key_id == null + + traverse = "kms_key_id" + retVal := { "Id": cloud_trail.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "kms_key_id", "AttributeDataType": "string", "Expected": "", "Actual": cloud_trail.config.kms_key_id } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudtrail/cloudTrailMultiRegionNotCreated.rego b/pkg/policies/opa/rego/aws/aws_cloudtrail/cloudTrailMultiRegionNotCreated.rego new file mode 100755 index 000000000..e2a2c2afe --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudtrail/cloudTrailMultiRegionNotCreated.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}cloudTrailMultiRegionNotCreated[retVal]{ + cloud_trail = input.aws_cloudtrail[_] + cloud_trail.config.is_multi_region_trail == false + + traverse = "is_multi_region_trail" + retVal := { "Id": cloud_trail.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "is_multi_region_trail", "AttributeDataType": "bool", "Expected": true, "Actual": cloud_trail.config.is_multi_region_trail } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudtrail/enableSNSTopic.rego b/pkg/policies/opa/rego/aws/aws_cloudtrail/enableSNSTopic.rego new file mode 100755 index 000000000..fdd4cdbc0 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudtrail/enableSNSTopic.rego @@ -0,0 +1,6 @@ +package accurics + +{{.prefix}}enableSNSTopic[sns.id] { + sns := input.aws_cloudtrail[_] + sns.config.sns_topic_name == null +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_instance/.json b/pkg/policies/opa/rego/aws/aws_db_instance/.json new file mode 100755 index 000000000..06c786984 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_instance/.json @@ -0,0 +1,13 @@ +{ + "ruleName": "rdsPubliclyAccessible", + "file": "rdsPubliclyAccessible.rego", + "ruleTemplate": "rdsPubliclyAccessible", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "RDS Instance publicly_accessible flag is true", + "ruleReferenceId": "", + "category": "Data Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego b/pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego new file mode 100755 index 000000000..601e8c85e --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}rdsPubliclyAccessible[retVal] { + db := input.aws_db_instance[_] + db.config.publicly_accessible == true + traverse = "publicly_accessible" + retVal := { "Id": db.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "publicly_accessible", "AttributeDataType": "bool", "Expected": false, "Actual": db.config.publicly_accessible } +} + diff --git a/pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0390.json b/pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0390.json new file mode 100755 index 000000000..2e0b4d321 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0390.json @@ -0,0 +1,13 @@ +{ + "ruleName": "noAccessKeyForRootAccount", + "file": "noAccessKeyForRootAccount.rego", + "ruleTemplate": "noAccessKeyForRootAccount", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "The root account is the most privileged user in an AWS account. AWS Access Keys provide programmatic access to a given AWS account. It is recommended that all access keys associated with the root account be removed. Removing access keys associated with the root account limits vectors by which the account can be compromised. Additionally, removing the root access keys encourages the creation and use of role based accounts that are least privileged.", + "ruleReferenceId": "AWS.IamUser.IAM.High.0390", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_access_key/noAccessKeyForRootAccount.rego b/pkg/policies/opa/rego/aws/aws_iam_access_key/noAccessKeyForRootAccount.rego new file mode 100755 index 000000000..fe6d4cefa --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_access_key/noAccessKeyForRootAccount.rego @@ -0,0 +1,20 @@ +package accurics + +{{.prefix}}noAccessKeyForRootAccount[retVal] { + access := input.aws_iam_access_key[_] + access.type == "aws_iam_access_key" + status = getStatus(access.config) + status == "Active" + access.config.user == "root" + traverse = "status" + retVal := { "Id": access.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "status", "AttributeDataType": "string", "Expected": "Inactive", "Actual": access.config.status } +} + +getStatus(config) = "Active" { + # defaults to Active + not config.status +} + +getStatus(config) = "Active" { + config.status == "Active" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0540.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0540.json new file mode 100755 index 000000000..40b2ad60c --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0540.json @@ -0,0 +1,14 @@ +{ + "ruleName": "passwordRotateEvery90Days", + "file": "passwordRotateEvery90Days.rego", + "ruleTemplate": "passwordRotateEvery90Days", + "ruleTemplateArgs": { + "name": "passwordRotateEvery90Days", + "prefix": "" + }, + "severity": "LOW", + "description": "Reducing the password lifetime increases account resiliency against brute force login attempts", + "ruleReferenceId": "AWS.Iam.IAM.Low.0540", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0454.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0454.json new file mode 100755 index 000000000..0be97abb7 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0454.json @@ -0,0 +1,15 @@ +{ + "ruleName": "passwordRequireLowerCase", + "file": "passwordPolicyRequirement.rego", + "ruleTemplate": "passwordRequireLowerCase", + "ruleTemplateArgs": { + "name": "passwordRequireLowerCase", + "prefix": "", + "required_parameter": "require_lowercase_characters" + }, + "severity": "MEDIUM", + "description": "Lower case alphabet not present in the Password, Password Complexity is not high. Increased Password complexity increases resiliency against brute force attack", + "ruleReferenceId": "AWS.Iam.IAM.Medium.0454", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0455.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0455.json new file mode 100755 index 000000000..30595871b --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0455.json @@ -0,0 +1,15 @@ +{ + "ruleName": "passwordRequireNumber", + "file": "passwordPolicyRequirement.rego", + "ruleTemplate": "passwordRequireNumber", + "ruleTemplateArgs": { + "name": "passwordRequireNumber", + "prefix": "", + "required_parameter": "require_numbers" + }, + "severity": "MEDIUM", + "description": "Number not present in the Password, Password Complexity is not high. Increased Password complexity increases resiliency against brute force attack", + "ruleReferenceId": "AWS.Iam.IAM.Medium.0455", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0456.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0456.json new file mode 100755 index 000000000..a4e46cc3b --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0456.json @@ -0,0 +1,15 @@ +{ + "ruleName": "passwordRequireSymbol", + "file": "passwordPolicyRequirement.rego", + "ruleTemplate": "passwordRequireSymbol", + "ruleTemplateArgs": { + "name": "passwordRequireSymbol", + "prefix": "", + "required_parameter": "require_symbols" + }, + "severity": "MEDIUM", + "description": "Special symbols not present in the Password, Password Complexity is not high. Increased Password complexity increases resiliency against brute force attack", + "ruleReferenceId": "AWS.Iam.IAM.Medium.0456", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0457.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0457.json new file mode 100755 index 000000000..dae9c9fb6 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0457.json @@ -0,0 +1,15 @@ +{ + "ruleName": "passwordRequireUpperCase", + "file": "passwordPolicyRequirement.rego", + "ruleTemplate": "passwordRequireUpperCase", + "ruleTemplateArgs": { + "name": "passwordRequireUpperCase", + "prefix": "", + "required_parameter": "require_uppercase_characters" + }, + "severity": "MEDIUM", + "description": "Upper case alphabet not present in the Password, Password Complexity is not high. Increased Password complexity increases resiliency against brute force attack", + "ruleReferenceId": "AWS.Iam.IAM.Medium.0457", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0458.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0458.json new file mode 100755 index 000000000..46c09c988 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0458.json @@ -0,0 +1,16 @@ +{ + "ruleName": "passwordRequireMinLength14", + "file": "passwordMinLength.rego", + "ruleTemplate": "passwordRequireMinLength14", + "ruleTemplateArgs": { + "name": "passwordRequireMinLength14", + "parameter": "minimum_password_length", + "prefix": "", + "value": 14 + }, + "severity": "MEDIUM", + "description": "Setting a lengthy password increases account resiliency against brute force login attempts", + "ruleReferenceId": "AWS.Iam.IAM.Medium.0458", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0495.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0495.json new file mode 100755 index 000000000..b5aa69d69 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0495.json @@ -0,0 +1,16 @@ +{ + "ruleName": "passwordRequireMinLength", + "file": "passwordMinLength.rego", + "ruleTemplate": "passwordRequireMinLength", + "ruleTemplateArgs": { + "name": "passwordRequireMinLength", + "parameter": "minimum_password_length", + "prefix": "", + "value": 7 + }, + "severity": "MEDIUM", + "description": "Setting a lengthy password increases account resiliency against brute force login attempts", + "ruleReferenceId": "AWS.Iam.IAM.Medium.0495", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordMinLength.rego b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordMinLength.rego new file mode 100755 index 000000000..de1f034d8 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordMinLength.rego @@ -0,0 +1,12 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + password_policy := input.aws_iam_account_password_policy[_] + check_validity(password_policy.config, {{.value}}) == true + traverse = "{{.parameter}}" + retVal := { "Id": password_policy.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "{{.parameter}}", "AttributeDataType": "int", "Expected": {{.value}}, "Actual": password_policy.config.{{.parameter}} } +} + +check_validity(p, v) = true { + p.{{.parameter}} < v +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordPolicyRequirement.rego b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordPolicyRequirement.rego new file mode 100755 index 000000000..091302c3f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordPolicyRequirement.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + password_policy := input.aws_iam_account_password_policy[_] + password_policy.config.{{.required_parameter}} == false + password_policy_id := password_policy.id + traverse = "{{.required_parameter}}" + retVal := { "Id": password_policy.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "{{.required_parameter}}", "AttributeDataType": "bool", "Expected": true, "Actual": password_policy.config.{{.required_parameter}} } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordRotateEvery90Days.rego b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordRotateEvery90Days.rego new file mode 100755 index 000000000..5755a95e5 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordRotateEvery90Days.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + password_policy := input.aws_iam_account_password_policy[_] + password_policy.config.max_password_age > 90 + password_policy_id := password_policy.id + traverse = "max_password_age" + retVal := { "Id": password_policy.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "max_password_age", "AttributeDataType": "int", "Expected": 90, "Actual": password_policy.config.max_password_age } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_group_policy/AWS.IamPolicy.IAM.High.0392.json b/pkg/policies/opa/rego/aws/aws_iam_group_policy/AWS.IamPolicy.IAM.High.0392.json new file mode 100755 index 000000000..ece948f53 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_group_policy/AWS.IamPolicy.IAM.High.0392.json @@ -0,0 +1,13 @@ +{ + "ruleName": "iamGrpPolicyWithFullAdminCntrl", + "file": "iamGrpPolicyWithFullAdminCntrl.rego", + "ruleTemplate": "iamGrpPolicyWithFullAdminCntrl", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "It is recommended and considered a standard security advice to grant least privileges that is, granting only the permissions required to perform a task. IAM policies are the means by which privileges are granted to users, groups, or roles. Determine what users need to do and then craft policies for them that let the users perform only those tasks, instead of granting full administrative privileges.", + "ruleReferenceId": "AWS.IamPolicy.IAM.High.0392", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_group_policy/iamGrpPolicyWithFullAdminCntrl.rego b/pkg/policies/opa/rego/aws/aws_iam_group_policy/iamGrpPolicyWithFullAdminCntrl.rego new file mode 100755 index 000000000..7baee45ce --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_group_policy/iamGrpPolicyWithFullAdminCntrl.rego @@ -0,0 +1,51 @@ +package accurics + +{{.prefix}}iamGrpPolicyWithFullAdminCntrl[retVal] { + iamUserMfa = input.aws_iam_group_policy[_] + policy := json_unmarshal(iamUserMfa.config.policy) + statement = policy.Statement[_] + policyCheck(statement, "*", "Allow", "*") == true + + statements := [ statement | statement := replace_if_needed(policy.Statement[_]) ] + expected := object.union(policy, {"Statement": statements}) + + traverse = "policy" + retVal := { "Id": iamUserMfa.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "policy", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +replace_if_needed(statement) = value { + policyCheck(statement, "*", "Allow", "*") == true + actions := [ action | action := replace_action_if_needed( statement.Action[_] ) ] + value := object.union(statement, { "Action": actions }) +} + +replace_if_needed(statement) = value { + not policyCheck(statement, "*", "Allow", "*") + value := statement +} + +replace_action_if_needed(action) = value { + action == "*" + value := "##resource:action##" +} + +replace_action_if_needed(action) = value { + action != "*" + value := action +} + +policyCheck(s, a, e ,r) = true { + s.Action[_] = a + s.Effect == e + s.Resource == r +} diff --git a/pkg/policies/opa/rego/aws/aws_iam_policy/AWS.IamPolicy.IAM.High.0392.json b/pkg/policies/opa/rego/aws/aws_iam_policy/AWS.IamPolicy.IAM.High.0392.json new file mode 100755 index 000000000..d0f99213c --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_policy/AWS.IamPolicy.IAM.High.0392.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_iamPolicyWithFullAdminControl", + "file": "iamPolicyWithFullAdminControl.rego", + "ruleTemplate": "iamPolicyWithFullAdminControl", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "It is recommended and considered a standard security advice to grant least privileges that is, granting only the permissions required to perform a task. IAM policies are the means by which privileges are granted to users, groups, or roles. Determine what users need to do and then craft policies for them that let the users perform only those tasks, instead of granting full administrative privileges.", + "ruleReferenceId": "AWS.IamPolicy.IAM.High.0392", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_policy/iamPolicyWithFullAdminControl.rego b/pkg/policies/opa/rego/aws/aws_iam_policy/iamPolicyWithFullAdminControl.rego new file mode 100755 index 000000000..1581177fb --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_policy/iamPolicyWithFullAdminControl.rego @@ -0,0 +1,50 @@ +package accurics + +{{.prefix}}iamPolicyWithFullAdminControl[retVal] { + iamUserMfa = input.aws_iam_policy[_] + policy := json_unmarshal(iamUserMfa.config.policy) + statement = policy.Statement[_] + policyCheck(statement, "*", "Allow", "*") == true + + statements := [ statement | statement := replace_if_needed(policy.Statement[_]) ] + expected := object.union(policy, {"Statement": statements}) + traverse = "policy" + retVal := { "Id": iamUserMfa.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "policy", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +replace_if_needed(statement) = value { + policyCheck(statement, "*", "Allow", "*") == true + actions := [ action | action := replace_action_if_needed( statement.Action[_] ) ] + value := object.union(statement, { "Action": actions }) +} + +replace_if_needed(statement) = value { + not policyCheck(statement, "*", "Allow", "*") + value := statement +} + +replace_action_if_needed(action) = value { + action == "*" + value := "##resource:action##" +} + +replace_action_if_needed(action) = value { + action != "*" + value := action +} + +policyCheck(s, a, e ,r) = true { + s.Action[_] = a + s.Effect == e + s.Resource == r +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0387.json b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0387.json new file mode 100755 index 000000000..706366e9f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0387.json @@ -0,0 +1,13 @@ +{ + "ruleName": "rootUserNotContainMfaTypeHardware", + "file": "rootUserNotContainMfaTypeHardware.rego", + "ruleTemplate": "rootUserNotContainMfaTypeHardware", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure Hardware MFA device is enabled for the \"root\" account", + "ruleReferenceId": "AWS.IamUser.IAM.High.0387", + "category": "Identity and Access Management", + "version": 0 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0388.json b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0388.json new file mode 100755 index 000000000..31623a9b5 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0388.json @@ -0,0 +1,13 @@ +{ + "ruleName": "rootUserNotContainMfaTypeVirtual", + "file": "rootUserNotContainMfaTypeVirtual.rego", + "ruleTemplate": "rootUserNotContainMfaTypeVirtual", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure Virtual MFA device is enabled for the \"root\" account", + "ruleReferenceId": "AWS.IamUser.IAM.High.0388", + "category": "Identity and Access Management", + "version": 0 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_user_policy/rootUserNotContainMfaTypeHardware.rego b/pkg/policies/opa/rego/aws/aws_iam_user_policy/rootUserNotContainMfaTypeHardware.rego new file mode 100755 index 000000000..e52bbe032 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_user_policy/rootUserNotContainMfaTypeHardware.rego @@ -0,0 +1,34 @@ +package accurics + +{{.prefix}}rootUserNotContainMfaTypeHardware[iamUserMfa.id] { + iamUserMfa = input.aws_iam_user_policy[_] + policy := json_unmarshal(iamUserMfa.config.policy) + statement = policy.Statement[_] + check_role(statement, "sts:AssumeRole", "Allow") == true + root_check(iamUserMfa.config.user, "root") == true + mfa_check(statement.Principal.AWS, "[a-zA-Z]+[0-9]+") == true +} + + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +root_check(s, v) = true { + re_match(s, v) +} + +mfa_check(s, v) = true { + not re_match(v, s) +} + +check_role(s, a, e) = true { + s.Action == a + s.Effect == e +} diff --git a/pkg/policies/opa/rego/aws/aws_iam_user_policy/rootUserNotContainMfaTypeVirtual.rego b/pkg/policies/opa/rego/aws/aws_iam_user_policy/rootUserNotContainMfaTypeVirtual.rego new file mode 100755 index 000000000..9fe281db4 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_user_policy/rootUserNotContainMfaTypeVirtual.rego @@ -0,0 +1,35 @@ +package accurics + +{{.prefix}}rootUserNotContainMfaTypeVirtual[iamUserMfa_id] { + iamUserMfa = input.aws_iam_user_policy[_] + policy := json_unmarshal(iamUserMfa.config.policy) + statement = policy.Statement[_] + check_role(statement, "sts:AssumeRole", "Allow") == true + root_check(iamUserMfa.config.user, "root") == true + mfa_check(statement.Principal.AWS, ":mfa/") == true + iamUserMfa_id = iamUserMfa.id +} + + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +root_check(s, v) = true { + re_match(v, s) +} + +mfa_check(s, v) = true { + not re_match(v, s) +} + +check_role(s, a, e) = true { + s.Action == a + s.Effect == e +} diff --git a/pkg/policies/opa/rego/aws/aws_instance/AWS.Instance.NetworkSecurity.Medium.0506.json b/pkg/policies/opa/rego/aws/aws_instance/AWS.Instance.NetworkSecurity.Medium.0506.json new file mode 100755 index 000000000..9b05b3442 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_instance/AWS.Instance.NetworkSecurity.Medium.0506.json @@ -0,0 +1,13 @@ +{ + "ruleName": "instanceWithNoVpc", + "file": "instanceWithNoVpc.rego", + "ruleTemplate": "instanceWithNoVpc", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Instance should be configured in vpc. AWS VPCs provides the controls to facilitate a formal process for approving and testing all network connections and changes to the firewall and router configurations.", + "ruleReferenceId": "AWS.Instance.NetworkSecurity.Medium.0506", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_instance/instanceWithNoVpc.rego b/pkg/policies/opa/rego/aws/aws_instance/instanceWithNoVpc.rego new file mode 100755 index 000000000..f3c809b12 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_instance/instanceWithNoVpc.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}instanceWithNoVpc[retVal] { + instance := input.aws_instance[_] + not instance.config.vpc_security_group_ids + rc = "ewogICJhd3NfdnBjIjogewogICAgImFjY3VyaWNzX3ZwYyI6IHsKICAgICAgImNpZHJfYmxvY2siOiAiPGNpZHJfYmxvY2s+IiwKICAgICAgImVuYWJsZV9kbnNfc3VwcG9ydCI6ICI8ZW5hYmxlX2Ruc19zdXBwb3J0PiIsCiAgICAgICJlbmFibGVfZG5zX2hvc3RuYW1lcyI6ICI8ZW5hYmxlX2Ruc19ob3N0bmFtZXM+IgogICAgfQogIH0KfQ==" + traverse = "" + retVal := { "Id": instance.id, "ReplaceType": "add", "CodeType": "resource", "Traverse": traverse, "Attribute": "", "AttributeDataType": "resource", "Expected": rc, "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_kinesis_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0412.json b/pkg/policies/opa/rego/aws/aws_kinesis_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0412.json new file mode 100755 index 000000000..2d4242f06 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_kinesis_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0412.json @@ -0,0 +1,13 @@ +{ + "ruleName": "kinesisNotEncryptedWithKms", + "file": "aws_kinesis_stream.rego", + "ruleTemplate": "kinesisNotEncryptedWithKms", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Kinesis Streams and metadata are not protected", + "ruleReferenceId": "AWS.Kinesis.EncryptionandKeyManagement.High.0412", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_kinesis_stream/aws_kinesis_stream.rego b/pkg/policies/opa/rego/aws/aws_kinesis_stream/aws_kinesis_stream.rego new file mode 100755 index 000000000..d1415dbef --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_kinesis_stream/aws_kinesis_stream.rego @@ -0,0 +1,8 @@ +package accurics + +{{.prefix}}kinesisNotEncryptedWithKms[retVal] { + stream = input.aws_kinesis_stream[_] + stream.config.kms_key_id == null + traverse = "" + retVal := { "Id": stream.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "kms_key_id", "AttributeDataType": "string", "Expected": "", "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json b/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json new file mode 100755 index 000000000..b4074468f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json @@ -0,0 +1,13 @@ +{ + "ruleName": "kmsKeyRotationDisabled", + "file": "kmsKeyRotationDisabled.rego", + "ruleTemplate": "kmsKeyRotationDisabled", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure rotation for customer created CMKs is enabled", + "ruleReferenceId": "AWS.KMS.Logging.High.0400", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyRotationDisabled.rego b/pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyRotationDisabled.rego new file mode 100755 index 000000000..ef4907b52 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyRotationDisabled.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}kmsKeyRotationDisabled[retVal] { + kms_key = input.aws_kms_key[_] + kms_key.config.is_enabled == true + kms_key.config.enable_key_rotation == false + traverse = "enable_key_rotation" + retVal := { "Id": kms_key.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "enable_key_rotation", "AttributeDataType": "bool", "Expected": true, "Actual": kms_key.config.enable_key_rotation } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0102.json b/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0102.json new file mode 100755 index 000000000..d5c4b6dd5 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0102.json @@ -0,0 +1,13 @@ +{ + "ruleName": "hardCodedKey", + "file": "hardCodedKey.rego", + "ruleTemplate": "hardCodedKey", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Avoid using base64 encoded private keys as part of config", + "ruleReferenceId": "AWS.LaunchConfiguration.DataSecurity.High.0102", + "category": "Data Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_launch_configuration/hardCodedKey.rego b/pkg/policies/opa/rego/aws/aws_launch_configuration/hardCodedKey.rego new file mode 100755 index 000000000..8fc594611 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_launch_configuration/hardCodedKey.rego @@ -0,0 +1,23 @@ +package accurics + +{{.prefix}}hardCodedKey[res.id] { + res = input.aws_launch_configuration[_] + value = base64NullCheck(res.config.user_data_base64) + contains(value, "LS0tLS1CR") +} + +{{.prefix}}hardCodeKey[res.id]{ + res = input.aws_launch_configuration[_] + value = base64NullCheck(res.config.user_data_base64) + contains(value, "LS0tLS1CR") +} + +base64NullCheck(s) = result { + s == null + result := base64.decode("e30=") +} + +base64NullCheck(s) = result { + s != null + result := base64.decode(s) +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json new file mode 100755 index 000000000..a758e0b44 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json @@ -0,0 +1,13 @@ +{ + "ruleName": "noS3BucketSseRules", + "file": "noS3BucketSseRules.rego", + "ruleTemplate": "noS3BucketSseRules", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure that S3 Buckets have server side encryption at rest enabled to protect sensitive data.", + "ruleReferenceId": "AWS.S3Bucket.EncryptionandKeyManagement.High.0405", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json new file mode 100755 index 000000000..1a38cf231 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json @@ -0,0 +1,13 @@ +{ + "ruleName": "s3VersioningMfaFalse", + "file": "s3VersioningMfaFalse.rego", + "ruleTemplate": "s3VersioningMfaFalse", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Enabling MFA delete for versioning is a good way to add extra protection to sensitive files stored in buckets.aws s3api put-bucket-versioning --bucket bucketname --versioning-configuration Status=Enabled,MFADelete=Enabled --mfa your-mfa-serial-number mfa-code", + "ruleReferenceId": "AWS.S3Bucket.IAM.High.0370", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json new file mode 100755 index 000000000..4a008b21e --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json @@ -0,0 +1,15 @@ +{ + "ruleName": "allUsersReadAccess", + "file": "s3AclGrants.rego", + "ruleTemplate": "allUsersReadAccess", + "ruleTemplateArgs": { + "access": "public-read", + "name": "allUsersReadAccess", + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "ruleReferenceId": "AWS.S3Bucket.IAM.High.0377", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json new file mode 100755 index 000000000..b9b8584ed --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json @@ -0,0 +1,15 @@ +{ + "ruleName": "authUsersReadAccess", + "file": "s3AclGrants.rego", + "ruleTemplate": "authUsersReadAccess", + "ruleTemplateArgs": { + "access": "authenticated-read", + "name": "authUsersReadAccess", + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "ruleReferenceId": "AWS.S3Bucket.IAM.High.0378", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json new file mode 100755 index 000000000..a8286931b --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json @@ -0,0 +1,15 @@ +{ + "ruleName": "allUsersWriteAccess", + "file": "s3AclGrants.rego", + "ruleTemplate": "allUsersWriteAccess", + "ruleTemplateArgs": { + "access": "public-read-write", + "name": "allUsersWriteAccess", + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "ruleReferenceId": "AWS.S3Bucket.IAM.High.0379", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json new file mode 100755 index 000000000..e413dd20e --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json @@ -0,0 +1,15 @@ +{ + "ruleName": "allUsersReadWriteAccess", + "file": "s3AclGrants.rego", + "ruleTemplate": "allUsersReadWriteAccess", + "ruleTemplateArgs": { + "access": "public-read-write", + "name": "allUsersReadWriteAccess", + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "ruleReferenceId": "AWS.S3Bucket.IAM.High.0381", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json new file mode 100755 index 000000000..1bc2de912 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json @@ -0,0 +1,13 @@ +{ + "ruleName": "s3BucketNoWebsiteIndexDoc", + "file": "s3BucketNoWebsiteIndexDoc.rego", + "ruleTemplate": "s3BucketNoWebsiteIndexDoc", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure that there are not any static websites being hosted on buckets you aren't aware of", + "ruleReferenceId": "AWS.S3Bucket.NetworkSecurity.High.0417", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego new file mode 100755 index 000000000..2661fa0a9 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}noS3BucketSseRules[retVal] { + bucket := input.aws_s3_bucket[_] + bucket.config.server_side_encryption_configuration == [] + rc = "ewogICJzZXJ2ZXJfc2lkZV9lbmNyeXB0aW9uX2NvbmZpZ3VyYXRpb24iOiB7CiAgICAicnVsZSI6IHsKICAgICAgImFwcGx5X3NlcnZlcl9zaWRlX2VuY3J5cHRpb25fYnlfZGVmYXVsdCI6IHsKICAgICAgICAic3NlX2FsZ29yaXRobSI6ICJBRVMyNTYiCiAgICAgIH0KICAgIH0KICB9Cn0=" + traverse = "" + retVal := { "Id": bucket.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "server_side_encryption_configuration", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego new file mode 100755 index 000000000..fc83f4a0f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego @@ -0,0 +1,8 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + bucket := input.aws_s3_bucket[_] + bucket.config.acl == "{{.access}}" + traverse = "acl" + retVal := { "Id": bucket.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "acl", "AttributeDataType": "string", "Expected": "private", "Actual": bucket.config.acl } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego new file mode 100755 index 000000000..7ee714f1e --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego @@ -0,0 +1,8 @@ +package accurics + +{{.prefix}}s3BucketNoWebsiteIndexDoc[retVal] { + bucket := input.aws_s3_bucket[_] + count(bucket.config.website) > 0 + traverse = "website" + retVal := { "Id": bucket.id, "ReplaceType": "delete", "CodeType": "block", "Traverse": traverse, "Attribute": "website", "AttributeDataType": "block", "Expected": null, "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego new file mode 100755 index 000000000..d2c28b5b5 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego @@ -0,0 +1,10 @@ +package accurics + +{{.prefix}}s3VersioningMfaFalse[retVal] { + bucket := input.aws_s3_bucket[_] + some i + mfa := bucket.config.versioning[i] + mfa.mfa_delete == false + traverse := sprintf("versioning[%d].mfa_delete", [i]) + retVal := { "Id": bucket.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "versioning.mfa_delete", "AttributeDataType": "bool", "Expected": true, "Actual": mfa.mfa_delete } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0374.json b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0374.json new file mode 100755 index 000000000..2f36ad689 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0374.json @@ -0,0 +1,15 @@ +{ + "ruleName": "allowListActionFromAllPrncpls", + "file": "actionsFromAllPrincipals.rego", + "ruleTemplate": "allowListActionFromAllPrncpls", + "ruleTemplateArgs": { + "Action": "s3:List", + "name": "allowListActionFromAllPrncpls", + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "ruleReferenceId": "AWS.IamPolicy.IAM.High.0374", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0371.json b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0371.json new file mode 100755 index 000000000..e6cf3edc3 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0371.json @@ -0,0 +1,13 @@ +{ + "ruleName": "allowActionsFromAllPrincipals", + "file": "allowActionsFromAllPrincipals.rego", + "ruleTemplate": "allowActionsFromAllPrincipals", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "ruleReferenceId": "AWS.S3Bucket.IAM.High.0371", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0372.json b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0372.json new file mode 100755 index 000000000..bf5b87e94 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0372.json @@ -0,0 +1,15 @@ +{ + "ruleName": "allowDeleteActionFromAllPrncpls", + "file": "actionsFromAllPrincipals.rego", + "ruleTemplate": "allowDeleteActionFromAllPrncpls", + "ruleTemplateArgs": { + "Action": "s3:Delete", + "name": "allowDeleteActionFromAllPrncpls", + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "ruleReferenceId": "AWS.S3Bucket.IAM.High.0372", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/actionsFromAllPrincipals.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/actionsFromAllPrincipals.rego new file mode 100755 index 000000000..2d77a955d --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/actionsFromAllPrincipals.rego @@ -0,0 +1,59 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + s3bucket = input.aws_s3_bucket_policy[_] + policy := json_unmarshal(s3bucket.config.policy) + statement = policy.Statement[_] + policyCheck(statement, "*", "Allow", "{{.Action}}") == true + + statements := [ statement | statement := replace_if_needed(policy.Statement[_]) ] + expected := object.union(policy, {"Statement": statements}) + traverse = "policy" + retVal := { "Id": s3bucket.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "policy", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +{{.prefix}}{{.name}}[retVal] { + s3bucket = input.aws_s3_bucket[_] + policy := json_unmarshal(s3bucket.config.policy) + statement = policy.Statement[_] + policyCheck(statement, "*", "Allow", "{{.Action}}") == true + + statements := [ statement | statement := replace_if_needed(policy.Statement[_]) ] + expected := object.union(policy, {"Statement": statements}) + traverse = "policy" + retVal := { "Id": s3bucket.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "policy", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +replace_if_needed(statement) = value { + policyCheck(statement, "*", "Allow", "{{.Action}}") == true + value := object.union(statement, { "Principal": "##principal##" }) +} + +replace_if_needed(statement) = value { + not policyCheck(statement, "*", "Allow", "{{.Action}}") + value := statement +} + +policyCheck(s, p, e ,a) = true { + action := is_array(s.Action) + s.Effect == e + s.Principal == p + re_match(a, s.Action[_]) +} + +policyCheck(s, p, e ,a) = true { + action := is_string(s.Action) + s.Effect == e + s.Principal == p + re_match(a, s.Action) +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/allowActionsFromAllPrincipals.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/allowActionsFromAllPrincipals.rego new file mode 100755 index 000000000..7d6feb19b --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/allowActionsFromAllPrincipals.rego @@ -0,0 +1,51 @@ +package accurics + +{{.prefix}}allowActionsFromAllPrincipals[retVal] { + s3bucket = input.aws_s3_bucket_policy[_] + policy := json_unmarshal(s3bucket.config.policy) + statement = policy.Statement[_] + policyCheck(statement, "*", "Allow", "*") == true + + statements := [ statement | statement := replace_if_needed(policy.Statement[_]) ] + expected := object.union(policy, {"Statement": statements}) + traverse = "policy" + retVal := { "Id": s3bucket.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "policy", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +{{.prefix}}allowActionsFromAllPrincipals[retVal] { + s3bucket = input.aws_s3_bucket[_] + policy := json_unmarshal(s3bucket.config.policy) + statement = policy.Statement[_] + policyCheck(statement, "*", "Allow", "*") == true + + statements := [ statement | statement := replace_if_needed(policy.Statement[_]) ] + expected := object.union(policy, {"Statement": statements}) + traverse = "policy" + retVal := { "Id": s3bucket.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "policy", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +replace_if_needed(statement) = value { + policyCheck(statement, "*", "Allow", "*") == true + value := object.union(statement, { "Principal": "##principal##", "Action": "##s3:action##" }) +} + +replace_if_needed(statement) = value { + not policyCheck(statement, "*", "Allow", "*") + value := statement +} + +policyCheck(s, a, e ,p) = true { + s.Action == a + s.Effect == e + s.Principal == p +} diff --git a/pkg/policies/opa/rego/aws/aws_security_group/AWS.SecurityGroup.NetworkSecurity.High.0094.json b/pkg/policies/opa/rego/aws/aws_security_group/AWS.SecurityGroup.NetworkSecurity.High.0094.json new file mode 100755 index 000000000..e53f8c6d9 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_security_group/AWS.SecurityGroup.NetworkSecurity.High.0094.json @@ -0,0 +1,13 @@ +{ + "ruleName": "unrestrictedIngressAccess", + "file": "unrestrictedIngressAccess.rego", + "ruleTemplate": "unrestrictedIngressAccess", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": " It is recommended that no security group allows unrestricted ingress access", + "ruleReferenceId": "AWS.SecurityGroup.NetworkSecurity.High.0094", + "category": "Network Ports Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_security_group/unrestrictedIngressAccess.rego b/pkg/policies/opa/rego/aws/aws_security_group/unrestrictedIngressAccess.rego new file mode 100755 index 000000000..77c643087 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_security_group/unrestrictedIngressAccess.rego @@ -0,0 +1,24 @@ +package accurics + +{{.prefix}}unrestrictedIngressAccess[retVal] { + security_group = input.aws_security_group[_] + some i + ingress = security_group.config.ingress[i] + ingress.cidr_blocks[j] == "0.0.0.0/0" + ingress.from_port == 0 + ingress.to_port == 0 + ingress.protocol == "-1" + expected := [ item | item := validate_cidr(ingress.cidr_blocks[_]) ] + traverse := sprintf("ingress[%d].cidr_blocks", [i]) + retVal := { "Id": security_group.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "ingress.cidr_blocks", "AttributeDataType": "list", "Expected": expected, "Actual": ingress.cidr_blocks } +} + +validate_cidr(cidr) = value { + cidr == "0.0.0.0/0" + value := "" +} + +validate_cidr(cidr) = value { + cidr != "0.0.0.0/0" + value := cidr +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0470.json b/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0470.json new file mode 100755 index 000000000..d44940e99 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0470.json @@ -0,0 +1,13 @@ +{ + "ruleName": "vpcFlowLogsNotEnabled", + "file": "vpcFlowLogsNotEnabled.rego", + "ruleTemplate": "vpcFlowLogsNotEnabled", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Ensure VPC flow logging is enabled in all VPCs", + "ruleReferenceId": "AWS.VPC.Logging.Medium.0470", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0471.json b/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0471.json new file mode 100755 index 000000000..8c22bcc45 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0471.json @@ -0,0 +1,13 @@ +{ + "ruleName": "defaultVpcExist", + "file": "defaultVpcExist.rego", + "ruleTemplate": "defaultVpcExist", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Avoid creating resources in default VPC", + "ruleReferenceId": "AWS.VPC.Logging.Medium.0471", + "category": "Logging", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_vpc/defaultVpcExist.rego b/pkg/policies/opa/rego/aws/aws_vpc/defaultVpcExist.rego new file mode 100755 index 000000000..90cbc4855 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_vpc/defaultVpcExist.rego @@ -0,0 +1,6 @@ +package accurics + +{{.prefix}}defaultVpcExist[vpc.id] { + vpc = input.aws_vpc[_] + vpc.config.is_default == true +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_vpc/vpcFlowLogsNotEnabled.rego b/pkg/policies/opa/rego/aws/aws_vpc/vpcFlowLogsNotEnabled.rego new file mode 100755 index 000000000..64e497146 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_vpc/vpcFlowLogsNotEnabled.rego @@ -0,0 +1,27 @@ +package accurics + +{{.prefix}}vpcFlowLogsNotEnabled[retVal] { + vpc := input.aws_vpc[_] + vpc_input := input + vpc.type == "aws_vpc" + + not flowLogExist(vpc, vpc_input) + + rc = "cmVzb3VyY2UgImF3c19mbG93X2xvZyIgIiMjcmVzb3VyY2VfbmFtZSMjIiB7CiAgdnBjX2lkICAgICAgICAgID0gIiR7YXdzX3ZwYy4jI3Jlc291cmNlX25hbWUjIy5pZH0iCiAgaWFtX3JvbGVfYXJuICAgID0gIiMjYXJuOmF3czppYW06OjExMTExMTExMTExMTpyb2xlL3NhbXBsZV9yb2xlIyMiCiAgbG9nX2Rlc3RpbmF0aW9uID0gIiR7YXdzX3MzX2J1Y2tldC4jI3Jlc291cmNlX25hbWUjIy5hcm59IgogIHRyYWZmaWNfdHlwZSAgICA9ICJBTEwiCgogIHRhZ3MgPSB7CiAgICBHZW5lcmF0ZWRCeSA9ICJBY2N1cmljcyIKICAgIFBhcmVudFJlc291cmNlSWQgPSAiYXdzX3ZwYy4jI3Jlc291cmNlX25hbWUjIyIKICB9Cn0KCnJlc291cmNlICJhd3NfczNfYnVja2V0IiAiIyNyZXNvdXJjZV9uYW1lIyMiIHsKICBidWNrZXQgPSAiIyNyZXNvdXJjZV9uYW1lIyNfZmxvd19sb2dfczNfYnVja2V0IgogIGFjbCAgICA9ICJwcml2YXRlIgogIGZvcmNlX2Rlc3Ryb3kgPSB0cnVlCgogIHZlcnNpb25pbmcgewogICAgZW5hYmxlZCA9IHRydWUKICAgIG1mYV9kZWxldGUgPSB0cnVlCiAgfQoKICBzZXJ2ZXJfc2lkZV9lbmNyeXB0aW9uX2NvbmZpZ3VyYXRpb24gewogICAgcnVsZSB7CiAgICAgIGFwcGx5X3NlcnZlcl9zaWRlX2VuY3J5cHRpb25fYnlfZGVmYXVsdCB7CiAgICAgICAgc3NlX2FsZ29yaXRobSA9ICJBRVMyNTYiCiAgICAgIH0KICAgIH0KICB9Cn0KCnJlc291cmNlICJhd3NfczNfYnVja2V0X3BvbGljeSIgIiMjcmVzb3VyY2VfbmFtZSMjIiB7CiAgYnVja2V0ID0gIiR7YXdzX3MzX2J1Y2tldC4jI3Jlc291cmNlX25hbWUjIy5pZH0iCgogIHBvbGljeSA9IDw8UE9MSUNZCnsKICAiVmVyc2lvbiI6ICIyMDEyLTEwLTE3IiwKICAiU3RhdGVtZW50IjogWwogICAgewogICAgICAiU2lkIjogIiMjcmVzb3VyY2VfbmFtZSMjLXJlc3RyaWN0LWFjY2Vzcy10by11c2Vycy1vci1yb2xlcyIsCiAgICAgICJFZmZlY3QiOiAiQWxsb3ciLAogICAgICAiUHJpbmNpcGFsIjogWwogICAgICAgIHsKICAgICAgICAgICJBV1MiOiBbCiAgICAgICAgICAgICJhcm46YXdzOmlhbTo6IyNhY291bnRfaWQjIzpyb2xlLyMjcm9sZV9uYW1lIyMiLAogICAgICAgICAgICAiYXJuOmF3czppYW06OiMjYWNvdW50X2lkIyM6dXNlci8jI3VzZXJfbmFtZSMjIgogICAgICAgICAgXQogICAgICAgIH0KICAgICAgXSwKICAgICAgIkFjdGlvbiI6ICJzMzpHZXRPYmplY3QiLAogICAgICAiUmVzb3VyY2UiOiAiYXJuOmF3czpzMzo6OiR7YXdzX3MzX2J1Y2tldC4jI3Jlc291cmNlX25hbWUjIy5pZH0vKiIKICAgIH0KICBdCn0KUE9MSUNZCn0=" + decode_rc = base64.decode(rc) + replaced_vpc_id := replace(decode_rc, "##resource_name##", vpc.name) + + traverse = "" + retVal := { "Id": vpc.id, "ReplaceType": "add", "CodeType": "resource", "Traverse": traverse, "Attribute": "", "AttributeDataType": "resource", "Expected": base64.encode(replaced_vpc_id), "Actual": null } +} + +flowLogExist(vpc, vpc_input) = exists { + flow_log_vpcs_set := { vpc_id | input.aws_flow_log[i].type == "aws_flow_log"; vpc_id := input.aws_flow_log[i].config.vpc_id } + flow_log_vpcs_set[vpc.id] + exists = true +} else = exists { + flow_log_tags_set := { resource_id | input.aws_flow_log[i].type == "aws_flow_log"; resource_id := input.aws_flow_log[i].config.tags.ParentResourceId } + vpc_name := sprintf("aws_vpc.%s", [vpc.name]) + flow_log_tags_set[vpc_name] + exists = true +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/accurics.azure.CAM.162.json b/pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/accurics.azure.CAM.162.json new file mode 100755 index 000000000..b5a48fb4e --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/accurics.azure.CAM.162.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_noTags", + "file": "noTags.rego", + "ruleTemplate": "noTags", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure that Cosmos DB Account has an associated tag", + "ruleReferenceId": "accurics.azure.CAM.162", + "category": "Cloud Assets Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/noTags.rego b/pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/noTags.rego new file mode 100755 index 000000000..ba1f113a9 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/noTags.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}noTags[retVal] { + cosmos := input.azurerm_cosmosdb_account[_] + cosmos.config.tags == null + + rc := "ewogICJ0YWdzIjogewogICAgImFkZGVkX2J5IjogImFjY3VyaWNzIgogIH0KfQ==" + retVal := { "Id": cosmos.id, "ReplaceType": "add", "CodeType": "attribute", "Traverse": "", "Attribute": "", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_managed_disk/accurics.azure.EKM.156.json b/pkg/policies/opa/rego/azure/azurerm_managed_disk/accurics.azure.EKM.156.json new file mode 100755 index 000000000..6ea0f84ce --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_managed_disk/accurics.azure.EKM.156.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_checkDataDisksEncrypted", + "file": "checkDataDisksEncrypted.rego", + "ruleTemplate": "checkDataDisksEncrypted", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure that 'OS disk' are encrypted", + "ruleReferenceId": "accurics.azure.EKM.156", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_managed_disk/checkDataDisksEncrypted.rego b/pkg/policies/opa/rego/azure/azurerm_managed_disk/checkDataDisksEncrypted.rego new file mode 100755 index 000000000..9b561d35f --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_managed_disk/checkDataDisksEncrypted.rego @@ -0,0 +1,11 @@ +package accurics + +{{.prefix}}checkDataDisksEncrypted[retVal] { + managed_disk := input.azurerm_managed_disk[_] + some i + encryption_settings = managed_disk.config.encryption_settings[i] + encryption_settings.enabled == false + + traverse := sprintf("encryption_settings[%d].enabled", [i]) + retVal := { "Id": managed_disk.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "encryption_settings.enabled", "AttributeDataType": "boolean", "Expected": true, "Actual": encryption_settings.enabled } +} diff --git a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.171.json b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.171.json new file mode 100755 index 000000000..0b6e1be26 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.171.json @@ -0,0 +1,19 @@ +{ + "ruleName": "reme_networkPort3389ExposedPublicEntire", + "file": "networkPortExposedPublic.rego", + "ruleTemplate": "networkPortExposedPublic", + "ruleTemplateArgs": { + "endLimit": 0, + "evalHosts": true, + "name": "networkPort3389ExposedPublicEntire", + "numberOfHosts": 1, + "portNumber": 3389, + "prefix": "reme_", + "protocol": "TCP" + }, + "severity": "HIGH", + "description": "Remote Desktop (TCP:3389) is exposed to the entire public internet", + "ruleReferenceId": "accurics.azure.NPS.171", + "category": "Network Ports Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.172.json b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.172.json new file mode 100755 index 000000000..9a1ffd52d --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.172.json @@ -0,0 +1,19 @@ +{ + "ruleName": "reme_networkPort22ExposedPublicEntire", + "file": "networkPortExposedPublic.rego", + "ruleTemplate": "networkPortExposedPublic", + "ruleTemplateArgs": { + "endLimit": 0, + "evalHosts": true, + "name": "networkPort22ExposedPublicEntire", + "numberOfHosts": 1, + "portNumber": 22, + "prefix": "reme_", + "protocol": "TCP" + }, + "severity": "HIGH", + "description": "SSH (TCP:22) is exposed to the entire public internet", + "ruleReferenceId": "accurics.azure.NPS.172", + "category": "Network Ports Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.35.json b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.35.json new file mode 100755 index 000000000..001a10bd5 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.35.json @@ -0,0 +1,19 @@ +{ + "ruleName": "reme_networkPort9090ExposedPublicWide", + "file": "networkPortExposedPublic.rego", + "ruleTemplate": "networkPortExposedPublic", + "ruleTemplateArgs": { + "endLimit": 1, + "evalHosts": false, + "name": "networkPort9090ExposedPublicWide", + "numberOfHosts": 27, + "portNumber": 9090, + "prefix": "reme_", + "protocol": "TCP" + }, + "severity": "HIGH", + "description": "CiscoSecure, WebSM (TCP:9090) is exposed to the wide public internet", + "ruleReferenceId": "accurics.azure.NPS.35", + "category": "Network Ports Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.36.json b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.36.json new file mode 100755 index 000000000..8a6b3c74f --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.36.json @@ -0,0 +1,19 @@ +{ + "ruleName": "reme_networkPort3389ExposedPublicWide", + "file": "networkPortExposedPublic.rego", + "ruleTemplate": "networkPortExposedPublic", + "ruleTemplateArgs": { + "endLimit": 1, + "evalHosts": false, + "name": "networkPort3389ExposedPublicWide", + "numberOfHosts": 27, + "portNumber": 3389, + "prefix": "reme_", + "protocol": "TCP" + }, + "severity": "HIGH", + "description": "Remote Desktop (TCP:3389) is exposed to the wide public internet", + "ruleReferenceId": "accurics.azure.NPS.36", + "category": "Network Ports Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.37.json b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.37.json new file mode 100755 index 000000000..4035cbcd7 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.37.json @@ -0,0 +1,19 @@ +{ + "ruleName": "reme_networkPort22ExposedPublicWide", + "file": "networkPortExposedPublic.rego", + "ruleTemplate": "networkPortExposedPublic", + "ruleTemplateArgs": { + "endLimit": 1, + "evalHosts": false, + "name": "networkPort22ExposedPublicWide", + "numberOfHosts": 27, + "portNumber": 22, + "prefix": "reme_", + "protocol": "TCP" + }, + "severity": "HIGH", + "description": "SSH (TCP:22) is exposed to the wide public internet", + "ruleReferenceId": "accurics.azure.NPS.37", + "category": "Network Ports Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/networkPortExposedPublic.rego b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/networkPortExposedPublic.rego new file mode 100755 index 000000000..311289687 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/networkPortExposedPublic.rego @@ -0,0 +1,73 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + sg = input.azurerm_network_security_rule[_] + sg.config.access == "Allow" + sg.config.direction == "Inbound" + checkScopeIsPublic(sg.config.source_address_prefix) + checkPort(sg.config, "{{.portNumber}}") + checkProtocol(sg.config.protocol) + + traverse := "source_address_prefix" + retVal := { "Id": sg.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "source_address_prefix", "AttributeDataType": "string", "Expected": "", "Actual": sg.config.source_address_prefix } +} + +{{.prefix}}{{.name}}[retVal] { + nsg = input.azurerm_network_security_group[_] + some i + sg = nsg.config.security_rule[i] + sg.access == "Allow" + sg.direction == "Inbound" + checkScopeIsPublic(sg.source_address_prefix) + checkPort(sg, "{{.portNumber}}") + checkProtocol(sg.protocol) + + traverse := sprintf("security_rule[%d].source_address_prefix", [i]) + retVal := { "Id": nsg.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "security_rule.source_address_prefix", "AttributeDataType": "string", "Expected": "", "Actual": sg.source_address_prefix } +} + +scopeIsPrivate(scope) { + private_ips = ["10.0.0.0/8", "192.168.0.0/16", "172.16.0.0/12"] + net.cidr_contains(private_ips[_], scope) +} + +checkScopeIsPublic(val) = true { + glob.match("[0-9]*.[0-9]*.[0-9]*.*", [], val) + not scopeIsPrivate(val) + hosts = split(val, "/") + to_number(hosts[1]) < {{.numberOfHosts}} + to_number(hosts[1]) >= {{.endLimit}} +} + +checkScopeIsPublic(val) = true { + glob.match("[0-9]*.[0-9]*.[0-9]*.*", [], val) + not scopeIsPrivate(val) + hosts = split(val, "/") + not hosts[1] + {{.evalHosts}} +} + +checkScopeIsPublic(val) = true { + not glob.match("[0-9]*.[0-9]*.[0-9]*.*", [], val) + val == "*" + {{.evalHosts}} +} + +checkScopeIsPublic(val) = true { + not glob.match("[0-9]*.[0-9]*.[0-9]*.*", [], val) + val == "Internet" + {{.evalHosts}} +} + +checkPort(obj, val) = true { + obj.destination_port_range == val +} + +checkPort(obj, val) = true { + obj.source_port_range == val +} + +checkProtocol(proto) { + protocols = ["{{.protocol}}", "*"] + upper(proto) == protocols[_] +} diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.151.json b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.151.json new file mode 100755 index 000000000..dacf226b5 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.151.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_connectionThrottling", + "file": "connectionThrottling.rego", + "ruleTemplate": "connectionThrottling", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure server parameter 'connection_throttling' is set to 'ON' for PostgreSQL Database Server", + "ruleReferenceId": "accurics.azure.LOG.151", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.152.json b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.152.json new file mode 100755 index 000000000..54c058d46 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.152.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_logConnections", + "file": "logConnections.rego", + "ruleTemplate": "logConnections", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure server parameter 'log_connections' is set to 'ON' for PostgreSQL Database Server", + "ruleReferenceId": "accurics.azure.LOG.152", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.155.json b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.155.json new file mode 100755 index 000000000..052a928b9 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.155.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_logRetention", + "file": "logRetention.rego", + "ruleTemplate": "logRetention", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure server parameter 'log_retention_days' is greater than 3 days for PostgreSQL Database Server", + "ruleReferenceId": "accurics.azure.LOG.155", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/connectionThrottling.rego b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/connectionThrottling.rego new file mode 100755 index 000000000..54beb31fd --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/connectionThrottling.rego @@ -0,0 +1,10 @@ +package accurics + +{{.prefix}}connectionThrottling[retVal] { + psql_config := input.azurerm_postgresql_configuration[_] + psql_config.config.name == "connection_throttling" + psql_config.config.value != "on" + + traverse = "value" + retVal := { "Id": psql_config.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "value", "AttributeDataType": "string", "Expected": "on", "Actual": psql_config.config.value } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logConnections.rego b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logConnections.rego new file mode 100755 index 000000000..dfc2dc516 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logConnections.rego @@ -0,0 +1,10 @@ +package accurics + +{{.prefix}}logConnections[retVal] { + psql_config := input.azurerm_postgresql_configuration[_] + psql_config.config.name == "log_connections" + psql_config.config.value != "on" + + traverse = "value" + retVal := { "Id": psql_config.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "value", "AttributeDataType": "string", "Expected": "on", "Actual": psql_config.config.value } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logRetention.rego b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logRetention.rego new file mode 100755 index 000000000..ce6644b5e --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logRetention.rego @@ -0,0 +1,14 @@ +package accurics + +{{.prefix}}logRetention[retVal] { + psql_config := input.azurerm_postgresql_configuration[_] + psql_config.config.name == "log_retention_days" + not checkValid(psql_config.config.value) + + traverse = "value" + retVal := { "Id": psql_config.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "value", "AttributeDataType": "string", "Expected": "4", "Actual": psql_config.config.value } +} + +checkValid(val) = true { + val == ["4", "5", "6", "7"][_] +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.BDR.163.json b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.BDR.163.json new file mode 100755 index 000000000..f00c25c6f --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.BDR.163.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_geoRedundancyDisabled", + "file": "geoRedundancyDisabled.rego", + "ruleTemplate": "geoRedundancyDisabled", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure that Geo Redundant Backups is enabled on PostgreSQL", + "ruleReferenceId": "accurics.azure.BDR.163", + "category": "Backup and Disaster Recovery", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.EKM.1.json b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.EKM.1.json new file mode 100755 index 000000000..5b7d411b4 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.EKM.1.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_sslEnforceDisabled", + "file": "sslEnforceDisabled.rego", + "ruleTemplate": "sslEnforceDisabled", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure 'Enforce SSL connection' is set to 'ENABLED' for PostgreSQL Database Server", + "ruleReferenceId": "accurics.azure.EKM.1", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_server/geoRedundancyDisabled.rego b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/geoRedundancyDisabled.rego new file mode 100755 index 000000000..4e1bf8305 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/geoRedundancyDisabled.rego @@ -0,0 +1,17 @@ +package accurics + +{{.prefix}}geoRedundancyDisabled[retVal] { + psql_server := input.azurerm_postgresql_server[_] + psql_server.config.geo_redundant_backup_enabled != true + + traverse = "geo_redundant_backup_enabled" + retVal := { "Id": psql_server.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "geo_redundant_backup_enabled", "AttributeDataType": "boolean", "Expected": true, "Actual": psql_server.config.geo_redundant_backup_enabled } +} + +{{.prefix}}geoRedundancyDisabled[retVal] { + psql_server := input.azurerm_postgresql_server[_] + object.get(psql_server.config, "geo_redundant_backup_enabled", "undefined") == "undefined" + + traverse = "geo_redundant_backup_enabled" + retVal := { "Id": psql_server.id, "ReplaceType": "add", "CodeType": "attribute", "Traverse": traverse, "Attribute": "geo_redundant_backup_enabled", "AttributeDataType": "boolean", "Expected": true, "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_server/sslEnforceDisabled.rego b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/sslEnforceDisabled.rego new file mode 100755 index 000000000..b69f9771e --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/sslEnforceDisabled.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}sslEnforceDisabled[retVal] { + psql_server := input.azurerm_postgresql_server[_] + psql_server.config.ssl_enforcement_enabled == false + + traverse = "ssl_enforcement_enabled" + retVal := { "Id": psql_server.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "ssl_enforcement_enabled", "AttributeDataType": "boolean", "Expected": true, "Actual": psql_server.config.ssl_enforcement_enabled } +} diff --git a/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.EKM.23.json b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.EKM.23.json new file mode 100755 index 000000000..f25530c3e --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.EKM.23.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_nonSslEnabled", + "file": "nonSslEnabled.rego", + "ruleTemplate": "nonSslEnabled", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure that the Redis Cache accepts only SSL connections", + "ruleReferenceId": "accurics.azure.EKM.23", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.30.json b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.30.json new file mode 100755 index 000000000..134fb0295 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.30.json @@ -0,0 +1,15 @@ +{ + "ruleName": "reme_entirelyAccessible", + "file": "publiclyAccessible.rego", + "ruleTemplate": "publiclyAccessible", + "ruleTemplateArgs": { + "isEntire": true, + "name": "entirelyAccessible", + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure there are no firewall rules allowing unrestricted access to Redis from the Internet", + "ruleReferenceId": "accurics.azure.NS.30", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.31.json b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.31.json new file mode 100755 index 000000000..24ad82625 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.31.json @@ -0,0 +1,15 @@ +{ + "ruleName": "reme_publiclyAccessible", + "file": "publiclyAccessible.rego", + "ruleTemplate": "publiclyAccessible", + "ruleTemplateArgs": { + "isEntire": false, + "name": "publiclyAccessible", + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure there are no firewall rules allowing unrestricted access to Redis from other Azure sources", + "ruleReferenceId": "accurics.azure.NS.31", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_redis_cache/nonSslEnabled.rego b/pkg/policies/opa/rego/azure/azurerm_redis_cache/nonSslEnabled.rego new file mode 100755 index 000000000..791e54abf --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_redis_cache/nonSslEnabled.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}nonSslEnabled[retVal] { + redis := input.azurerm_redis_cache[_] + redis.config.enable_non_ssl_port == true + + traverse = "enable_non_ssl_port" + retVal := { "Id": redis.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "enable_non_ssl_port", "AttributeDataType": "boolean", "Expected": false, "Actual": redis.config.enable_non_ssl_port } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_redis_cache/publiclyAccessible.rego b/pkg/policies/opa/rego/azure/azurerm_redis_cache/publiclyAccessible.rego new file mode 100755 index 000000000..77a79181a --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_redis_cache/publiclyAccessible.rego @@ -0,0 +1,29 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + redis := input.azurerm_redis_firewall_rule[_] + redis.config.start_ip == "0.0.0.0" + redis.config.end_ip == "0.0.0.0" + {{.isEntire}} + retVal := { "Id": redis.id, "ReplaceType": "delete", "CodeType": "resource", "Traverse": "", "Attribute": "", "AttributeDataType": "resource", "Expected": null, "Actual": null } +} + +{{.prefix}}{{.name}}[retVal] { + redis := input.azurerm_redis_firewall_rule[_] + redis.config.start_ip != "0.0.0.0" + checkScopeIsPublic(redis.config.start_ip) + redis.config.end_ip != "0.0.0.0" + checkScopeIsPublic(redis.config.end_ip) + not {{.isEntire}} + retVal := { "Id": redis.id, "ReplaceType": "delete", "CodeType": "resource", "Traverse": "", "Attribute": "", "AttributeDataType": "resource", "Expected": null, "Actual": null } +} + +scopeIsPrivate(scope) { + private_ips = ["10.0.0.0/8", "192.168.0.0/16", "172.16.0.0/12"] + net.cidr_contains(private_ips[_], scope) +} + +checkScopeIsPublic(val) = true { + glob.match("[0-9]*.[0-9]*.[0-9]*.*", [], val) + not scopeIsPrivate(val) +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_database/accurics.azure.MON.157.json b/pkg/policies/opa/rego/azure/azurerm_sql_database/accurics.azure.MON.157.json new file mode 100755 index 000000000..9c3d806ad --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_database/accurics.azure.MON.157.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_checkAuditEnabled", + "file": "checkAuditEnabled.rego", + "ruleTemplate": "checkAuditEnabled", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure that 'Threat Detection' is enabled for Azure SQL Database", + "ruleReferenceId": "accurics.azure.MON.157", + "category": "Monitoring", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_database/checkAuditEnabled.rego b/pkg/policies/opa/rego/azure/azurerm_sql_database/checkAuditEnabled.rego new file mode 100755 index 000000000..33ee467b6 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_database/checkAuditEnabled.rego @@ -0,0 +1,36 @@ +package accurics + +{{.prefix}}checkAuditEnabled[retVal] { + sql_db_resource := input.azurerm_sql_database[_] + some i + threat_detection_policy = sql_db_resource.config.threat_detection_policy[i] + threat_detection_policy.state == "Disabled" + + traverse := sprintf("threat_detection_policy[%d].state", [i]) + retVal := { "Id": sql_db_resource.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "threat_detection_policy.state", "AttributeDataType": "string", "Expected": "Enabled", "Actual": threat_detection_policy.state } +} + +{{.prefix}}checkAuditEnabled[retVal] { + sql_db_resource := input.azurerm_sql_database[_] + object.get(sql_db_resource.config, "threat_detection_policy", "undefined") == "undefined" + + rc := "ewogICJ0aHJlYXRfZGV0ZWN0aW9uX3BvbGljeSI6IHsKICAgICJzdGF0ZSI6ICJFbmFibGVkIiwKICAgICJzdG9yYWdlX2FjY291bnRfYWNjZXNzX2tleSI6ICIke2F6dXJlcm1fc3RvcmFnZV9hY2NvdW50LiMjcmVzb3VyY2VfbmFtZSMjLnByaW1hcnlfYWNjZXNzX2tleX0iLAogICAgInN0b3JhZ2VfZW5kcG9pbnQiOiAiJHthenVyZXJtX3N0b3JhZ2VfYWNjb3VudC4jI3Jlc291cmNlX25hbWUjIy5wcmltYXJ5X2Jsb2JfZW5kcG9pbnR9IiwKICAgICJ1c2Vfc2VydmVyX2RlZmF1bHQiOiAiRW5hYmxlZCIKICB9Cn0=" + decode_rc = base64.decode(rc) + storage_account := input.azurerm_storage_account[0] + replaced_resource_name := replace(decode_rc, "##resource_name##", storage_account.name) + + retVal := { "Id": sql_db_resource.id, "ReplaceType": "add", "CodeType": "block", "Traverse": "", "Attribute": "", "AttributeDataType": "block", "Expected": base64.encode(replaced_resource_name), "Actual": null } +} + +# create storage_account TODO +# {{.prefix}}checkAuditEnabled[retVal] { +# sql_db_resource := input.azurerm_sql_database[_] +# object.get(sql_db_resource.config, "threat_detection_policy", "undefined") == "undefined" + +# rc := "ewogICJ0aHJlYXRfZGV0ZWN0aW9uX3BvbGljeSI6IHsKICAgICJzdGF0ZSI6ICJFbmFibGVkIiwKICAgICJzdG9yYWdlX2FjY291bnRfYWNjZXNzX2tleSI6ICIke2F6dXJlcm1fc3RvcmFnZV9hY2NvdW50LiMjcmVzb3VyY2VfbmFtZSMjLnByaW1hcnlfYWNjZXNzX2tleX0iLAogICAgInN0b3JhZ2VfZW5kcG9pbnQiOiAiJHthenVyZXJtX3N0b3JhZ2VfYWNjb3VudC4jI3Jlc291cmNlX25hbWUjIy5wcmltYXJ5X2Jsb2JfZW5kcG9pbnR9IiwKICAgICJ1c2Vfc2VydmVyX2RlZmF1bHQiOiAiRW5hYmxlZCIKICB9Cn0=" +# decode_rc = base64.decode(rc) +# object.get(input, "azurerm_storage_account", "undefined") == "undefined" +# replaced_resource_name := replace(decode_rc, "##resource_name##", "blah") + +# retVal := { "Id": sql_db_resource.id, "ReplaceType": "add", "CodeType": "block", "Traverse": "", "Attribute": "", "AttributeDataType": "block", "Expected": base64.encode(replaced_resource_name), "Actual": null } +# } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.21.json b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.21.json new file mode 100755 index 000000000..473c088a7 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.21.json @@ -0,0 +1,15 @@ +{ + "ruleName": "reme_sqlIngressAccess", + "file": "checkPublicAccessNotAllow.rego", + "ruleTemplate": "checkPublicAccessNotAllow", + "ruleTemplateArgs": { + "isEntire": false, + "name": "sqlIngressAccess", + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure that no SQL Server allows ingress from 0.0.0.0/0 (ANY IP)", + "ruleReferenceId": "accurics.azure.NS.21", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.5.json b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.5.json new file mode 100755 index 000000000..8af78d71b --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.5.json @@ -0,0 +1,15 @@ +{ + "ruleName": "reme_sqlPublicAccess", + "file": "checkPublicAccessNotAllow.rego", + "ruleTemplate": "checkPublicAccessNotAllow", + "ruleTemplateArgs": { + "isEntire": true, + "name": "sqlPublicAccess", + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure entire Azure infrastructure doesn't have access to Azure SQL ServerEnsure entire Azure infrastructure doesn't have access to Azure SQL Server", + "ruleReferenceId": "accurics.azure.NS.5", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/checkPublicAccessNotAllow.rego b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/checkPublicAccessNotAllow.rego new file mode 100755 index 000000000..5534efd66 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/checkPublicAccessNotAllow.rego @@ -0,0 +1,17 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + sql_rule := input.azurerm_sql_firewall_rule[_] + sql_rule.config.start_ip_address == "0.0.0.0" + sql_rule.config.end_ip_address == "0.0.0.0" + {{.isEntire}} + retVal := { "Id": sql_rule.id, "ReplaceType": "delete", "CodeType": "resource", "Traverse": "", "Attribute": "", "AttributeDataType": "resource", "Expected": null, "Actual": null } +} + +{{.prefix}}{{.name}}[retVal] { + sql_rule := input.azurerm_sql_firewall_rule[_] + sql_rule.config.start_ip_address == "0.0.0.0" + sql_rule.config.end_ip_address == "255.255.255.255" + not {{.isEntire}} + retVal := { "Id": sql_rule.id, "ReplaceType": "delete", "CodeType": "resource", "Traverse": "", "Attribute": "", "AttributeDataType": "resource", "Expected": null, "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_server/.json b/pkg/policies/opa/rego/azure/azurerm_sql_server/.json new file mode 100755 index 000000000..7b995d40d --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_server/.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_sqlServerADAdminConfigured", + "file": "sqlServerADAdminConfigured.rego", + "ruleTemplate": "sqlServerADAdminConfigured", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure that Azure Active Directory Admin is configured for SQL Server", + "ruleReferenceId": "", + "category": "Data Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerADAdminConfigured.rego b/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerADAdminConfigured.rego new file mode 100755 index 000000000..ed63dd4ee --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerADAdminConfigured.rego @@ -0,0 +1,21 @@ +package accurics + +{{.prefix}}sqlServerADAdminConfigured[retVal] { + sql_server := input.azurerm_sql_server[_] + sql_server.type == "azurerm_sql_server" + key := concat("-", [sql_server.config.resource_group_name, sql_server.config.name]) + not adAdminExist(key) + rc = "ZGF0YSAiYXp1cmVybV9jbGllbnRfY29uZmlnIiAiY3VycmVudCIge30KCnJlc291cmNlICJhenVyZXJtX3NxbF9hY3RpdmVfZGlyZWN0b3J5X2FkbWluaXN0cmF0b3IiICIjI3Jlc291cmNlX25hbWUjIyIgewogIHNlcnZlcl9uYW1lICAgICAgICAgPSBhenVyZXJtX3NxbF9zZXJ2ZXIuIyNyZXNvdXJjZV9uYW1lIyMubmFtZQogIHJlc291cmNlX2dyb3VwX25hbWUgPSBhenVyZXJtX3Jlc291cmNlX2dyb3VwLiMjcmVzb3VyY2VfbmFtZSMjLm5hbWUKICBsb2dpbiAgICAgICAgICAgICAgID0gInNxbGFkbWluIgogIHRlbmFudF9pZCAgICAgICAgICAgPSBkYXRhLmF6dXJlcm1fY2xpZW50X2NvbmZpZy5jdXJyZW50LnRlbmFudF9pZAogIG9iamVjdF9pZCAgICAgICAgICAgPSBkYXRhLmF6dXJlcm1fY2xpZW50X2NvbmZpZy5jdXJyZW50Lm9iamVjdF9pZAp9" + decode_rc = base64.decode(rc) + replaced := replace(decode_rc, "##resource_name##", sql_server.name) + traverse = "" + retVal := { "Id": sql_server.id, "ReplaceType": "add", "CodeType": "resource", "Traverse": traverse, "Attribute": traverse, "AttributeDataType": "resource", "Expected": base64.encode(replaced), "Actual": null } +} + +adAdminExist(rg_servername) = exists { + ad_admin_set := { ad_id | input.azurerm_sql_active_directory_administrator[i].type == "azurerm_sql_active_directory_administrator"; ad_id := concat("-", [input.azurerm_sql_active_directory_administrator[i].config.resource_group_name, input.azurerm_sql_active_directory_administrator[i].config.server_name]) } + ad_admin_set[rg_servername] + exists = true +} else = false { + true +} \ No newline at end of file diff --git a/pkg/policy/interface.go b/pkg/policy/interface.go index 8c15e02a2..3939ee5ac 100644 --- a/pkg/policy/interface.go +++ b/pkg/policy/interface.go @@ -19,7 +19,7 @@ package policy type Manager interface { Import() error Export() error - Validate() error + CreateManager() error } type Engine interface { diff --git a/pkg/policy/opa/constants.go b/pkg/policy/opa/constants.go index 7d66da466..cdbc959f4 100644 --- a/pkg/policy/opa/constants.go +++ b/pkg/policy/opa/constants.go @@ -1,7 +1,7 @@ -package policy +package opa const ( - RegoMetadataFile = "rule.json" - RegoFileSuffix = ".rego" - RuleQueryBase = "data.accurics" + RegoMetadataFileSuffix = ".json" + RegoFileSuffix = ".rego" + RuleQueryBase = "data.accurics" ) diff --git a/pkg/policy/opa/opa_engine.go b/pkg/policy/opa/opa_engine.go index bd50ef368..bea7c9725 100644 --- a/pkg/policy/opa/opa_engine.go +++ b/pkg/policy/opa/opa_engine.go @@ -14,7 +14,7 @@ limitations under the License. */ -package policy +package opa import ( "bytes" @@ -26,9 +26,10 @@ import ( "os" "path/filepath" "sort" - "strings" "text/template" + "github.com/accurics/terrascan/pkg/utils" + "github.com/open-policy-agent/opa/ast" "go.uber.org/zap" @@ -36,70 +37,99 @@ import ( "github.com/open-policy-agent/opa/rego" ) -type AccuricsRegoMetadata struct { - Name string `json:"ruleName"` - DisplayName string `json:"ruleDisplayName"` - Category string `json:"category"` - ImpactedRes []string `json:"impactedRes"` - PolicyRelevance string `json:"policyRelevance"` - Remediation string `json:"remediation"` - Row int `json:"row"` - Rule string `json:"rule"` +type Violation struct { + Name string + Description string + LineNumber int + Category string + Data interface{} + RuleData interface{} +} + +type ResultData struct { + EngineType string + Provider string + Violations []*Violation +} + +type RegoMetadata struct { + RuleName string `json:"ruleName"` + File string `json:"file"` RuleTemplate string `json:"ruleTemplate"` - RuleTemplateArgs map[string]interface{} `json:"ruleArgument"` - RuleReferenceID string `json:"ruleReferenceId"` + RuleTemplateArgs map[string]interface{} `json:"ruleTemplateArgs"` Severity string `json:"severity"` - Vulnerability string `json:"vulnerability"` + Description string `json:"description"` + RuleReferenceID string `json:"ruleReferenceId"` + Category string `json:"category"` + Version int `json:"version"` } type RegoData struct { - Name string `json:"ruleName"` - DisplayName string `json:"ruleDisplayName"` - Category string `json:"category"` - Remediation string `json:"remediation"` - Rule string `json:"rule"` - RuleTemplate string `json:"ruleTemplate"` - RuleTemplateArgs map[string]interface{} `json:"ruleArgument"` - RuleReferenceID string `json:"ruleReferenceId"` - Severity string `json:"severity"` - Vulnerability string `json:"vulnerability"` - RawRego *[]byte - PreparedQuery *rego.PreparedEvalQuery + Metadata RegoMetadata + RawRego []byte + PreparedQuery *rego.PreparedEvalQuery } -type ResultData struct { +type EngineStats struct { + ruleCount int + regoFileCount int + metadataFileCount int + metadataCount int } type OpaEngine struct { Context context.Context RegoFileMap map[string][]byte RegoDataMap map[string]*RegoData + stats EngineStats +} + +func (o *OpaEngine) LoadRegoMetadata(metaFilename string) (*RegoMetadata, error) { + // Load metadata file if it exists + metadata, err := ioutil.ReadFile(metaFilename) + if err != nil { + if !errors.Is(err, os.ErrNotExist) { + zap.S().Warn("failed to load rego metadata", zap.String("file", metaFilename)) + } + return nil, err + } + + // Read metadata into struct + regoMetadata := RegoMetadata{} + if err = json.Unmarshal(metadata, ®oMetadata); err != nil { + zap.S().Warn("failed to unmarshal rego metadata", zap.String("file", metaFilename)) + return nil, err + } + return ®oMetadata, err } -func filterFileListBySuffix(allFileList *[]string, filter string) *[]string { - fileList := make([]string, 0) +func (o *OpaEngine) loadRawRegoFilesIntoMap(currentDir string, regoDataList []*RegoData, regoFileMap *map[string][]byte) error { + for i := range regoDataList { + regoPath := filepath.Join(currentDir, regoDataList[i].Metadata.File) + rawRegoData, err := ioutil.ReadFile(regoPath) + if err != nil { + zap.S().Warn("failed to load rego file", zap.String("file", regoPath)) + continue + } - for i := range *allFileList { - if strings.HasSuffix((*allFileList)[i], filter) { - fileList = append(fileList, (*allFileList)[i]) + // Load the raw rego into the map + _, ok := (*regoFileMap)[regoPath] + if ok { + // Already loaded this file, so continue + continue } + + (*regoFileMap)[regoPath] = rawRegoData } - return &fileList + return nil } func (o *OpaEngine) LoadRegoFiles(policyPath string) error { - ruleCount := 0 - regoFileCount := 0 - metadataCount := 0 - // Walk the file path and find all directories - dirList := make([]string, 0) - err := filepath.Walk(policyPath, func(filePath string, fileInfo os.FileInfo, err error) error { - if fileInfo != nil && fileInfo.IsDir() { - dirList = append(dirList, filePath) - } + dirList, err := utils.FindAllDirectories(policyPath) + if err != nil { return err - }) + } if len(dirList) == 0 { return fmt.Errorf("no directories found for path %s", policyPath) @@ -109,80 +139,63 @@ func (o *OpaEngine) LoadRegoFiles(policyPath string) error { o.RegoDataMap = make(map[string]*RegoData) // Load rego data files from each dir + // First, we read the metadata file, which contains info about the associated rego rule. The .rego file data is + // stored in a map in its raw format. sort.Strings(dirList) for i := range dirList { - metaFilename := filepath.Join(dirList[i], RegoMetadataFile) - var metadata []byte - metadata, err = ioutil.ReadFile(metaFilename) + // Find all files in the current dir + fileInfo, err := ioutil.ReadDir(dirList[i]) if err != nil { if !errors.Is(err, os.ErrNotExist) { - zap.S().Warn("failed to load rego metadata", zap.String("file", metaFilename)) + zap.S().Error("error while searching for files", zap.String("dir", dirList[i])) } continue } - // Read metadata into struct - regoMetadata := make([]*RegoData, 0) - if err = json.Unmarshal(metadata, ®oMetadata); err != nil { - zap.S().Warn("failed to unmarshal rego metadata", zap.String("file", metaFilename)) - continue - } - - metadataCount++ - - // Find all .rego files within the directory - fileInfo, err := ioutil.ReadDir(dirList[i]) - if err != nil { - zap.S().Error("error while finding rego files", zap.String("dir", dirList[i])) - continue + // Load the rego metadata first (*.json) + metadataFiles := utils.FilterFileInfoBySuffix(&fileInfo, RegoMetadataFileSuffix) + if metadataFiles == nil { + return fmt.Errorf("no metadata files were found") } - files := make([]string, 0) - for j := range fileInfo { - files = append(files, fileInfo[j].Name()) - } + var regoDataList []*RegoData + for j := range *metadataFiles { + filePath := filepath.Join(dirList[i], (*metadataFiles)[j]) - // Load rego data for all rego files - regoFileList := filterFileListBySuffix(&files, RegoFileSuffix) - regoFileCount += len(*regoFileList) - for j := range *regoFileList { - regoFilename := (*regoFileList)[j] - regoFullPath := filepath.Join(dirList[i], regoFilename) - var rawRegoData []byte - rawRegoData, err = ioutil.ReadFile(regoFullPath) + var regoMetadata *RegoMetadata + regoMetadata, err = o.LoadRegoMetadata(filePath) if err != nil { - zap.S().Warn("failed to load rego file", zap.String("file", regoFilename)) continue } - _, ok := o.RegoFileMap[regoFullPath] - if ok { - // Already loaded this file, so continue - continue + regoData := RegoData{ + Metadata: *regoMetadata, } - // Set raw rego data - o.RegoFileMap[regoFullPath] = rawRegoData + regoDataList = append(regoDataList, ®oData) + o.stats.metadataFileCount++ } - for j := range regoMetadata { - //key := filepath.Join(dirList[i], regoMetadata[j].Rule) - //regoData := o.RegoFileMap[key] - metadataCount++ + // Read in raw rego data from associated rego files + if err = o.loadRawRegoFilesIntoMap(dirList[i], regoDataList, &o.RegoFileMap); err != nil { + continue + } + + for j := range regoDataList { + o.stats.metadataCount++ // Apply templates if available - var buf bytes.Buffer + var templateData bytes.Buffer t := template.New("opa") - t.Parse(string(o.RegoFileMap[filepath.Join(dirList[i], regoMetadata[j].RuleTemplate+".rego")])) - t.Execute(&buf, regoMetadata[j].RuleTemplateArgs) + t.Parse(string(o.RegoFileMap[filepath.Join(dirList[i], regoDataList[j].Metadata.RuleTemplate+".rego")])) + t.Execute(&templateData, regoDataList[j].Metadata.RuleTemplateArgs) - templateData := buf.Bytes() - regoMetadata[j].RawRego = &templateData - o.RegoDataMap[regoMetadata[j].Name] = regoMetadata[j] + regoDataList[j].RawRego = templateData.Bytes() + o.RegoDataMap[regoDataList[j].Metadata.RuleName] = regoDataList[j] } } - ruleCount = len(o.RegoDataMap) - zap.S().Infof("Loaded %d Rego rules from %d rego files (%d metadata files).", ruleCount, regoFileCount, metadataCount) + o.stats.ruleCount = len(o.RegoDataMap) + zap.S().Infof("Loaded %d Rego rules from %d rego files (%d metadata files).", o.stats.ruleCount, o.stats.regoFileCount, o.stats.metadataCount) return err } @@ -190,11 +203,11 @@ func (o *OpaEngine) LoadRegoFiles(policyPath string) error { func (o *OpaEngine) CompileRegoFiles() error { for k := range o.RegoDataMap { compiler, err := ast.CompileModules(map[string]string{ - o.RegoDataMap[k].Rule: string(*(o.RegoDataMap[k].RawRego)), + o.RegoDataMap[k].Metadata.RuleName: string(o.RegoDataMap[k].RawRego), }) r := rego.New( - rego.Query(RuleQueryBase+"."+o.RegoDataMap[k].Name), + rego.Query(RuleQueryBase+"."+o.RegoDataMap[k].Metadata.RuleName), rego.Compiler(compiler), ) @@ -261,15 +274,20 @@ func (o *OpaEngine) Evaluate(inputData *interface{}) error { if len(rs) > 0 { results := rs[0].Expressions[0].Value.([]interface{}) if len(results) > 0 { - r := o.RegoDataMap[k] - fmt.Printf("\n[%s] [%s] %s\n %s\n", r.Severity, r.RuleReferenceID, r.DisplayName, r.Vulnerability) + r := o.RegoDataMap[k].Metadata + fmt.Printf("\nResource(s): %v\n[%s] [%s] %s\n %s\n", results, r.Severity, r.RuleReferenceID, r.RuleName, r.Description) + continue } // fmt.Printf(" [%s] %v\n", k, results) } else { // fmt.Printf("No Result [%s] \n", k) } + // Store results } + b, _ := json.MarshalIndent(inputData, "", " ") + //fmt.Printf("InputData:\n%v\n", string(b)) + return nil } diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 78d7fd8ef..13b6dadc6 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -17,7 +17,8 @@ package runtime import ( - policy "github.com/accurics/terrascan/pkg/policy/opa" + "github.com/accurics/terrascan/pkg/policy" + opa "github.com/accurics/terrascan/pkg/policy/opa" "go.uber.org/zap" @@ -30,18 +31,22 @@ import ( type Executor struct { filePath string dirPath string + policyPath string cloudType string iacType string iacVersion string iacProvider iacProvider.IacProvider cloudProvider cloudProvider.CloudProvider + policyEngine []policy.Engine + // policyEngine } // NewExecutor creates a runtime object -func NewExecutor(iacType, iacVersion, cloudType, filePath, dirPath string) (e *Executor, err error) { +func NewExecutor(iacType, iacVersion, cloudType, filePath, dirPath, policyPath string) (e *Executor, err error) { e = &Executor{ filePath: filePath, dirPath: dirPath, + policyPath: policyPath, cloudType: cloudType, iacType: iacType, iacVersion: iacVersion, @@ -104,15 +109,13 @@ func (e *Executor) Execute() error { if err != nil { return err } - //utils.PrintJSON(normalized, os.Stdout) - // write output - - // Create a new policy engine based on IaC type + // create a new policy engine based on IaC type if e.iacType == "terraform" { - engine := policy.OpaEngine{} + var engine policy.Engine + engine = &opa.OpaEngine{} - err := engine.Initialize("/Users/wsana/go/src/accurics/terrascan/pkg/policies/accurics/v1/opa") + err = engine.Initialize(e.policyPath) if err != nil { return err } diff --git a/pkg/utils/path.go b/pkg/utils/path.go index 6025470e7..410ff171f 100644 --- a/pkg/utils/path.go +++ b/pkg/utils/path.go @@ -47,3 +47,27 @@ func GetAbsPath(path string) (string, error) { } return path, nil } + +// FindAllDirectories Walks the file path and returns a list of all directories within +func FindAllDirectories(basePath string) ([]string, error) { + dirList := make([]string, 0) + err := filepath.Walk(basePath, func(filePath string, fileInfo os.FileInfo, err error) error { + if fileInfo != nil && fileInfo.IsDir() { + dirList = append(dirList, filePath) + } + return err + }) + return dirList, err +} + +// FilterFileInfoBySuffix Given a list of files, returns a subset of files containing a suffix which matches the input filter +func FilterFileInfoBySuffix(allFileList *[]os.FileInfo, filter string) *[]string { + fileList := make([]string, 0) + + for i := range *allFileList { + if strings.HasSuffix((*allFileList)[i].Name(), filter) { + fileList = append(fileList, (*allFileList)[i].Name()) + } + } + return &fileList +} From 9d355ace6bb4e90323d78353b93d3c9bee20ad6b Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Sun, 9 Aug 2020 14:40:12 -0400 Subject: [PATCH 081/188] adds permalink setting --- mkdocs.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mkdocs.yml b/mkdocs.yml index 803240eb9..403de14c1 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -45,3 +45,7 @@ nav: - Changelog: changelog.md - Educational Resources: learning.md - About: about.md + +markdown_extensions: + - toc: + permalink: true From cd8d2ec84da0c90e8c1a0f38f43bd519f16acf43 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Sun, 9 Aug 2020 13:01:36 -0700 Subject: [PATCH 082/188] Change OPA engine filename --- pkg/policy/opa/engine.go | 303 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 303 insertions(+) create mode 100644 pkg/policy/opa/engine.go diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go new file mode 100644 index 000000000..c443029f0 --- /dev/null +++ b/pkg/policy/opa/engine.go @@ -0,0 +1,303 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package opa + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "sort" + "text/template" + + "github.com/accurics/terrascan/pkg/utils" + + "github.com/open-policy-agent/opa/ast" + + "go.uber.org/zap" + + "github.com/open-policy-agent/opa/rego" +) + +type Violation struct { + Name string + Description string + LineNumber int + Category string + Data interface{} + RuleData interface{} +} + +type ResultData struct { + EngineType string + Provider string + Violations []*Violation +} + +type RegoMetadata struct { + RuleName string `json:"ruleName"` + File string `json:"file"` + RuleTemplate string `json:"ruleTemplate"` + RuleTemplateArgs map[string]interface{} `json:"ruleTemplateArgs"` + Severity string `json:"severity"` + Description string `json:"description"` + RuleReferenceID string `json:"ruleReferenceId"` + Category string `json:"category"` + Version int `json:"version"` +} + +type RegoData struct { + Metadata RegoMetadata + RawRego []byte + PreparedQuery *rego.PreparedEvalQuery +} + +type EngineStats struct { + ruleCount int + regoFileCount int + metadataFileCount int + metadataCount int +} + +type Engine struct { + Context context.Context + RegoFileMap map[string][]byte + RegoDataMap map[string]*RegoData + stats EngineStats +} + +func (e *Engine) LoadRegoMetadata(metaFilename string) (*RegoMetadata, error) { + // Load metadata file if it exists + metadata, err := ioutil.ReadFile(metaFilename) + if err != nil { + if !errors.Is(err, os.ErrNotExist) { + zap.S().Warn("failed to load rego metadata", zap.String("file", metaFilename)) + } + return nil, err + } + + // Read metadata into struct + regoMetadata := RegoMetadata{} + if err = json.Unmarshal(metadata, ®oMetadata); err != nil { + zap.S().Warn("failed to unmarshal rego metadata", zap.String("file", metaFilename)) + return nil, err + } + return ®oMetadata, err +} + +func (e *Engine) loadRawRegoFilesIntoMap(currentDir string, regoDataList []*RegoData, regoFileMap *map[string][]byte) error { + for i := range regoDataList { + regoPath := filepath.Join(currentDir, regoDataList[i].Metadata.File) + rawRegoData, err := ioutil.ReadFile(regoPath) + if err != nil { + zap.S().Warn("failed to load rego file", zap.String("file", regoPath)) + continue + } + + // Load the raw rego into the map + _, ok := (*regoFileMap)[regoPath] + if ok { + // Already loaded this file, so continue + continue + } + + (*regoFileMap)[regoPath] = rawRegoData + } + return nil +} + +func (e *Engine) LoadRegoFiles(policyPath string) error { + // Walk the file path and find all directories + dirList, err := utils.FindAllDirectories(policyPath) + if err != nil { + return err + } + + if len(dirList) == 0 { + return fmt.Errorf("no directories found for path %s", policyPath) + } + + e.RegoFileMap = make(map[string][]byte) + e.RegoDataMap = make(map[string]*RegoData) + + // Load rego data files from each dir + // First, we read the metadata file, which contains info about the associated rego rule. The .rego file data is + // stored in a map in its raw format. + sort.Strings(dirList) + for i := range dirList { + // Find all files in the current dir + fileInfo, err := ioutil.ReadDir(dirList[i]) + if err != nil { + if !errors.Is(err, os.ErrNotExist) { + zap.S().Error("error while searching for files", zap.String("dir", dirList[i])) + } + continue + } + + // Load the rego metadata first (*.json) + metadataFiles := utils.FilterFileInfoBySuffix(&fileInfo, RegoMetadataFileSuffix) + if metadataFiles == nil { + return fmt.Errorf("no metadata files were found") + } + + var regoDataList []*RegoData + for j := range *metadataFiles { + filePath := filepath.Join(dirList[i], (*metadataFiles)[j]) + + var regoMetadata *RegoMetadata + regoMetadata, err = e.LoadRegoMetadata(filePath) + if err != nil { + continue + } + + regoData := RegoData{ + Metadata: *regoMetadata, + } + + regoDataList = append(regoDataList, ®oData) + e.stats.metadataFileCount++ + } + + // Read in raw rego data from associated rego files + if err = e.loadRawRegoFilesIntoMap(dirList[i], regoDataList, &e.RegoFileMap); err != nil { + continue + } + + for j := range regoDataList { + e.stats.metadataCount++ + // Apply templates if available + var templateData bytes.Buffer + t := template.New("opa") + _, err = t.Parse(string(e.RegoFileMap[filepath.Join(dirList[i], regoDataList[j].Metadata.RuleTemplate+".rego")])) + if err != nil { + zap.S().Warn("unable to parse template", zap.String("template", regoDataList[j].Metadata.RuleTemplate)) + continue + } + if err = t.Execute(&templateData, regoDataList[j].Metadata.RuleTemplateArgs); err != nil { + zap.S().Warn("unable to execute template", zap.String("template", regoDataList[j].Metadata.RuleTemplate)) + continue + } + + regoDataList[j].RawRego = templateData.Bytes() + e.RegoDataMap[regoDataList[j].Metadata.RuleName] = regoDataList[j] + } + } + + e.stats.ruleCount = len(e.RegoDataMap) + zap.S().Infof("loaded %d Rego rules from %d rego files (%d metadata files).", e.stats.ruleCount, e.stats.regoFileCount, e.stats.metadataCount) + + return err +} + +func (e *Engine) CompileRegoFiles() error { + for k := range e.RegoDataMap { + compiler, err := ast.CompileModules(map[string]string{ + e.RegoDataMap[k].Metadata.RuleName: string(e.RegoDataMap[k].RawRego), + }) + + r := rego.New( + rego.Query(RuleQueryBase+"."+e.RegoDataMap[k].Metadata.RuleName), + rego.Compiler(compiler), + ) + + // Create a prepared query that can be evaluated. + query, err := r.PrepareForEval(e.Context) + if err != nil { + return err + } + + e.RegoDataMap[k].PreparedQuery = &query + } + + return nil +} + +// Initialize Initializes the Opa engine +// Handles loading all rules, filtering, compiling, and preparing for evaluation +func (e *Engine) Initialize(policyPath string) error { + e.Context = context.Background() + + if err := e.LoadRegoFiles(policyPath); err != nil { + return err + } + + err := e.CompileRegoFiles() + if err != nil { + return err + } + + return nil +} + +func (e *Engine) Configure() error { + return nil +} + +func (e *Engine) GetResults() error { + return nil +} + +func (e *Engine) Release() error { + return nil +} + +func (e *Engine) Evaluate(inputData *interface{}) error { + + sortedKeys := make([]string, len(e.RegoDataMap)) + x := 0 + for k := range e.RegoDataMap { + sortedKeys[x] = k + x++ + } + sort.Strings(sortedKeys) + + for _, k := range sortedKeys { + // Execute the prepared query. + rs, err := e.RegoDataMap[k].PreparedQuery.Eval(e.Context, rego.EvalInput(inputData)) + // rs, err := r.Eval(o.Context) + if err != nil { + zap.S().Warn("failed to run prepared query", zap.String("rule", "'"+k+"'"), zap.Any("input", inputData)) + continue + } + + if len(rs) > 0 { + results := rs[0].Expressions[0].Value.([]interface{}) + if len(results) > 0 { + r := e.RegoDataMap[k].Metadata + fmt.Printf("\nResource(s): %v\n[%s] [%s] %s\n %s\n", results, r.Severity, r.RuleReferenceID, r.RuleName, r.Description) + continue + } + // fmt.Printf(" [%s] %v\n", k, results) + } else { + // fmt.Printf("No Result [%s] \n", k) + } + + // Store results + } + + _, err := json.MarshalIndent(inputData, "", " ") + if err != nil { + return err + } + //fmt.Printf("InputData:\n%v\n", string(b)) + + return nil +} From f06638f55abd75aacb2ed39f9f309918f4d77239 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Sun, 9 Aug 2020 13:03:04 -0700 Subject: [PATCH 083/188] Rename OPA engine #2 --- pkg/policy/opa/opa_engine.go | 293 ----------------------------------- pkg/runtime/executor.go | 2 +- 2 files changed, 1 insertion(+), 294 deletions(-) delete mode 100644 pkg/policy/opa/opa_engine.go diff --git a/pkg/policy/opa/opa_engine.go b/pkg/policy/opa/opa_engine.go deleted file mode 100644 index bea7c9725..000000000 --- a/pkg/policy/opa/opa_engine.go +++ /dev/null @@ -1,293 +0,0 @@ -/* - Copyright (C) 2020 Accurics, Inc. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ - -package opa - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "fmt" - "io/ioutil" - "os" - "path/filepath" - "sort" - "text/template" - - "github.com/accurics/terrascan/pkg/utils" - - "github.com/open-policy-agent/opa/ast" - - "go.uber.org/zap" - - "github.com/open-policy-agent/opa/rego" -) - -type Violation struct { - Name string - Description string - LineNumber int - Category string - Data interface{} - RuleData interface{} -} - -type ResultData struct { - EngineType string - Provider string - Violations []*Violation -} - -type RegoMetadata struct { - RuleName string `json:"ruleName"` - File string `json:"file"` - RuleTemplate string `json:"ruleTemplate"` - RuleTemplateArgs map[string]interface{} `json:"ruleTemplateArgs"` - Severity string `json:"severity"` - Description string `json:"description"` - RuleReferenceID string `json:"ruleReferenceId"` - Category string `json:"category"` - Version int `json:"version"` -} - -type RegoData struct { - Metadata RegoMetadata - RawRego []byte - PreparedQuery *rego.PreparedEvalQuery -} - -type EngineStats struct { - ruleCount int - regoFileCount int - metadataFileCount int - metadataCount int -} - -type OpaEngine struct { - Context context.Context - RegoFileMap map[string][]byte - RegoDataMap map[string]*RegoData - stats EngineStats -} - -func (o *OpaEngine) LoadRegoMetadata(metaFilename string) (*RegoMetadata, error) { - // Load metadata file if it exists - metadata, err := ioutil.ReadFile(metaFilename) - if err != nil { - if !errors.Is(err, os.ErrNotExist) { - zap.S().Warn("failed to load rego metadata", zap.String("file", metaFilename)) - } - return nil, err - } - - // Read metadata into struct - regoMetadata := RegoMetadata{} - if err = json.Unmarshal(metadata, ®oMetadata); err != nil { - zap.S().Warn("failed to unmarshal rego metadata", zap.String("file", metaFilename)) - return nil, err - } - return ®oMetadata, err -} - -func (o *OpaEngine) loadRawRegoFilesIntoMap(currentDir string, regoDataList []*RegoData, regoFileMap *map[string][]byte) error { - for i := range regoDataList { - regoPath := filepath.Join(currentDir, regoDataList[i].Metadata.File) - rawRegoData, err := ioutil.ReadFile(regoPath) - if err != nil { - zap.S().Warn("failed to load rego file", zap.String("file", regoPath)) - continue - } - - // Load the raw rego into the map - _, ok := (*regoFileMap)[regoPath] - if ok { - // Already loaded this file, so continue - continue - } - - (*regoFileMap)[regoPath] = rawRegoData - } - return nil -} - -func (o *OpaEngine) LoadRegoFiles(policyPath string) error { - // Walk the file path and find all directories - dirList, err := utils.FindAllDirectories(policyPath) - if err != nil { - return err - } - - if len(dirList) == 0 { - return fmt.Errorf("no directories found for path %s", policyPath) - } - - o.RegoFileMap = make(map[string][]byte) - o.RegoDataMap = make(map[string]*RegoData) - - // Load rego data files from each dir - // First, we read the metadata file, which contains info about the associated rego rule. The .rego file data is - // stored in a map in its raw format. - sort.Strings(dirList) - for i := range dirList { - // Find all files in the current dir - fileInfo, err := ioutil.ReadDir(dirList[i]) - if err != nil { - if !errors.Is(err, os.ErrNotExist) { - zap.S().Error("error while searching for files", zap.String("dir", dirList[i])) - } - continue - } - - // Load the rego metadata first (*.json) - metadataFiles := utils.FilterFileInfoBySuffix(&fileInfo, RegoMetadataFileSuffix) - if metadataFiles == nil { - return fmt.Errorf("no metadata files were found") - } - - var regoDataList []*RegoData - for j := range *metadataFiles { - filePath := filepath.Join(dirList[i], (*metadataFiles)[j]) - - var regoMetadata *RegoMetadata - regoMetadata, err = o.LoadRegoMetadata(filePath) - if err != nil { - continue - } - - regoData := RegoData{ - Metadata: *regoMetadata, - } - - regoDataList = append(regoDataList, ®oData) - o.stats.metadataFileCount++ - } - - // Read in raw rego data from associated rego files - if err = o.loadRawRegoFilesIntoMap(dirList[i], regoDataList, &o.RegoFileMap); err != nil { - continue - } - - for j := range regoDataList { - o.stats.metadataCount++ - // Apply templates if available - var templateData bytes.Buffer - t := template.New("opa") - t.Parse(string(o.RegoFileMap[filepath.Join(dirList[i], regoDataList[j].Metadata.RuleTemplate+".rego")])) - t.Execute(&templateData, regoDataList[j].Metadata.RuleTemplateArgs) - - regoDataList[j].RawRego = templateData.Bytes() - o.RegoDataMap[regoDataList[j].Metadata.RuleName] = regoDataList[j] - } - } - - o.stats.ruleCount = len(o.RegoDataMap) - zap.S().Infof("Loaded %d Rego rules from %d rego files (%d metadata files).", o.stats.ruleCount, o.stats.regoFileCount, o.stats.metadataCount) - - return err -} - -func (o *OpaEngine) CompileRegoFiles() error { - for k := range o.RegoDataMap { - compiler, err := ast.CompileModules(map[string]string{ - o.RegoDataMap[k].Metadata.RuleName: string(o.RegoDataMap[k].RawRego), - }) - - r := rego.New( - rego.Query(RuleQueryBase+"."+o.RegoDataMap[k].Metadata.RuleName), - rego.Compiler(compiler), - ) - - // Create a prepared query that can be evaluated. - query, err := r.PrepareForEval(o.Context) - if err != nil { - return err - } - - o.RegoDataMap[k].PreparedQuery = &query - } - - return nil -} - -// Initialize Initializes the Opa engine -// Handles loading all rules, filtering, compiling, and preparing for evaluation -func (o *OpaEngine) Initialize(policyPath string) error { - o.Context = context.Background() - - if err := o.LoadRegoFiles(policyPath); err != nil { - return err - } - - err := o.CompileRegoFiles() - if err != nil { - return err - } - - return nil -} - -func (o *OpaEngine) Configure() error { - return nil -} - -func (o *OpaEngine) GetResults() error { - return nil -} - -func (o *OpaEngine) Release() error { - return nil -} - -func (o *OpaEngine) Evaluate(inputData *interface{}) error { - - sortedKeys := make([]string, len(o.RegoDataMap)) - x := 0 - for k := range o.RegoDataMap { - sortedKeys[x] = k - x++ - } - sort.Strings(sortedKeys) - - for _, k := range sortedKeys { - // Execute the prepared query. - rs, err := o.RegoDataMap[k].PreparedQuery.Eval(o.Context, rego.EvalInput(inputData)) - // rs, err := r.Eval(o.Context) - if err != nil { - zap.S().Warn("failed to run prepared query", zap.String("rule", "'"+k+"'"), zap.Any("input", inputData)) - continue - } - - if len(rs) > 0 { - results := rs[0].Expressions[0].Value.([]interface{}) - if len(results) > 0 { - r := o.RegoDataMap[k].Metadata - fmt.Printf("\nResource(s): %v\n[%s] [%s] %s\n %s\n", results, r.Severity, r.RuleReferenceID, r.RuleName, r.Description) - continue - } - // fmt.Printf(" [%s] %v\n", k, results) - } else { - // fmt.Printf("No Result [%s] \n", k) - } - - // Store results - } - - b, _ := json.MarshalIndent(inputData, "", " ") - //fmt.Printf("InputData:\n%v\n", string(b)) - - return nil -} diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 13b6dadc6..3561ae154 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -113,7 +113,7 @@ func (e *Executor) Execute() error { // create a new policy engine based on IaC type if e.iacType == "terraform" { var engine policy.Engine - engine = &opa.OpaEngine{} + engine = &opa.Engine{} err = engine.Initialize(e.policyPath) if err != nil { From f030748913cb30ca1793f7668b753c72322b6c2c Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Sun, 9 Aug 2020 13:59:31 -0700 Subject: [PATCH 084/188] Fix misc linter errors --- pkg/policy/interface.go | 2 ++ pkg/policy/opa/constants.go | 5 +++-- pkg/policy/opa/engine.go | 17 ++++++++++++++++- 3 files changed, 21 insertions(+), 3 deletions(-) diff --git a/pkg/policy/interface.go b/pkg/policy/interface.go index 3939ee5ac..084f5599c 100644 --- a/pkg/policy/interface.go +++ b/pkg/policy/interface.go @@ -16,12 +16,14 @@ package policy +// Manager Policy Manager interface type Manager interface { Import() error Export() error CreateManager() error } +// Engine Policy Engine interface type Engine interface { Initialize(policyPath string) error Configure() error diff --git a/pkg/policy/opa/constants.go b/pkg/policy/opa/constants.go index cdbc959f4..c00b15022 100644 --- a/pkg/policy/opa/constants.go +++ b/pkg/policy/opa/constants.go @@ -1,7 +1,8 @@ package opa const ( + // RegoMetadataFileSuffix Suffix for files containing rego metadata RegoMetadataFileSuffix = ".json" - RegoFileSuffix = ".rego" - RuleQueryBase = "data.accurics" + // RuleQueryBase Default package to query + RuleQueryBase = "data.accurics" ) diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index c443029f0..ec755f60d 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -37,6 +37,7 @@ import ( "github.com/open-policy-agent/opa/rego" ) +// Violation Contains data for each violation type Violation struct { Name string Description string @@ -46,12 +47,14 @@ type Violation struct { RuleData interface{} } +// ResultData Contains full report data type ResultData struct { EngineType string Provider string Violations []*Violation } +// RegoMetadata The rego metadata struct which is read and saved from disk type RegoMetadata struct { RuleName string `json:"ruleName"` File string `json:"file"` @@ -64,12 +67,14 @@ type RegoMetadata struct { Version int `json:"version"` } +// RegoData Stores all information needed to evaluate and report on a rego rule type RegoData struct { Metadata RegoMetadata RawRego []byte PreparedQuery *rego.PreparedEvalQuery } +// EngineStats Contains misc stats type EngineStats struct { ruleCount int regoFileCount int @@ -77,6 +82,7 @@ type EngineStats struct { metadataCount int } +// Engine Implements the policy engine interface type Engine struct { Context context.Context RegoFileMap map[string][]byte @@ -84,6 +90,7 @@ type Engine struct { stats EngineStats } +// LoadRegoMetadata Loads rego metadata from a given file func (e *Engine) LoadRegoMetadata(metaFilename string) (*RegoMetadata, error) { // Load metadata file if it exists metadata, err := ioutil.ReadFile(metaFilename) @@ -103,6 +110,7 @@ func (e *Engine) LoadRegoMetadata(metaFilename string) (*RegoMetadata, error) { return ®oMetadata, err } +// loadRawRegoFilesIntoMap imports raw rego files into a map func (e *Engine) loadRawRegoFilesIntoMap(currentDir string, regoDataList []*RegoData, regoFileMap *map[string][]byte) error { for i := range regoDataList { regoPath := filepath.Join(currentDir, regoDataList[i].Metadata.File) @@ -124,6 +132,7 @@ func (e *Engine) loadRawRegoFilesIntoMap(currentDir string, regoDataList []*Rego return nil } +// LoadRegoFiles Loads all related rego files from the given policy path into memory func (e *Engine) LoadRegoFiles(policyPath string) error { // Walk the file path and find all directories dirList, err := utils.FindAllDirectories(policyPath) @@ -144,7 +153,8 @@ func (e *Engine) LoadRegoFiles(policyPath string) error { sort.Strings(dirList) for i := range dirList { // Find all files in the current dir - fileInfo, err := ioutil.ReadDir(dirList[i]) + var fileInfo []os.FileInfo + fileInfo, err = ioutil.ReadDir(dirList[i]) if err != nil { if !errors.Is(err, os.ErrNotExist) { zap.S().Error("error while searching for files", zap.String("dir", dirList[i])) @@ -207,6 +217,7 @@ func (e *Engine) LoadRegoFiles(policyPath string) error { return err } +// CompileRegoFiles Compiles rego files for faster evaluation func (e *Engine) CompileRegoFiles() error { for k := range e.RegoDataMap { compiler, err := ast.CompileModules(map[string]string{ @@ -247,18 +258,22 @@ func (e *Engine) Initialize(policyPath string) error { return nil } +// Configure Configures the OPA engine func (e *Engine) Configure() error { return nil } +// GetResults Fetches results from OPA engine policy evaluation func (e *Engine) GetResults() error { return nil } +// Release Performs any tasks required to free resources func (e *Engine) Release() error { return nil } +// Evaluate Executes compiled OPA queries against the input JSON data func (e *Engine) Evaluate(inputData *interface{}) error { sortedKeys := make([]string, len(e.RegoDataMap)) From 38590bcc019f3f916ac06b061b887c6f1bcccb14 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Sun, 9 Aug 2020 14:46:56 -0700 Subject: [PATCH 085/188] Fix FileInfo-related linter errors --- pkg/data/file/importer.go | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pkg/data/file/importer.go b/pkg/data/file/importer.go index 1cc036441..fc3b808b7 100644 --- a/pkg/data/file/importer.go +++ b/pkg/data/file/importer.go @@ -1,6 +1,7 @@ package file -type FileInfo struct { +// Info File info +type Info struct { Path string Hash string HashType string @@ -12,8 +13,8 @@ type Group struct { Name string IsReadOnly bool VerifySignatures bool - Directories []*FileInfo - Files []*FileInfo + Directories []*Info + Files []*Info } // Metadata File metadata From 9386cf127168a265e0d24a9f2f21153555cea0e0 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Sun, 9 Aug 2020 14:58:30 -0700 Subject: [PATCH 086/188] Fixed static checker issues --- pkg/policy/opa/engine.go | 6 +++--- pkg/runtime/executor.go | 5 ++--- 2 files changed, 5 insertions(+), 6 deletions(-) diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index ec755f60d..c3717d2de 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -223,6 +223,9 @@ func (e *Engine) CompileRegoFiles() error { compiler, err := ast.CompileModules(map[string]string{ e.RegoDataMap[k].Metadata.RuleName: string(e.RegoDataMap[k].RawRego), }) + if err != nil { + return err + } r := rego.New( rego.Query(RuleQueryBase+"."+e.RegoDataMap[k].Metadata.RuleName), @@ -300,9 +303,6 @@ func (e *Engine) Evaluate(inputData *interface{}) error { fmt.Printf("\nResource(s): %v\n[%s] [%s] %s\n %s\n", results, r.Severity, r.RuleReferenceID, r.RuleName, r.Description) continue } - // fmt.Printf(" [%s] %v\n", k, results) - } else { - // fmt.Printf("No Result [%s] \n", k) } // Store results diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 3561ae154..b65648b0d 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -37,7 +37,7 @@ type Executor struct { iacVersion string iacProvider iacProvider.IacProvider cloudProvider cloudProvider.CloudProvider - policyEngine []policy.Engine + // policyEngine []policy.Engine // policyEngine } @@ -112,8 +112,7 @@ func (e *Executor) Execute() error { // create a new policy engine based on IaC type if e.iacType == "terraform" { - var engine policy.Engine - engine = &opa.Engine{} + var engine policy.Engine = &opa.Engine{} err = engine.Initialize(e.policyPath) if err != nil { From e77972482fdff40dbb0c52bf40cd9424ef7c3322 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Sun, 9 Aug 2020 21:48:09 -0400 Subject: [PATCH 087/188] updates description --- README.md | 3 ++- mkdocs.yml | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index c5db3a148..e5c25ae17 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,8 @@ [![downloads](https://img.shields.io/github/downloads/accurics/terrascan/total)](/~https://github.com/accurics/terrascan/releases) -Terrascan is a static code analyzer and linter for security weanesses in Infrastructure as Code (IaC). +Detect compliance and security violations across Infrastructure as Code to mitigate risk before provisioning cloud native infrastructure. + * GitHub Repo: /~https://github.com/accurics/terrascan * Documentation: https://docs.accurics.com diff --git a/mkdocs.yml b/mkdocs.yml index 403de14c1..a71bc2c94 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -1,7 +1,7 @@ # Project Info site_name: Terrascan documentation site_description: >- - Terrascan documentation. Learn more about how to use Terrascan to scan infrastructure as code. + Detect compliance and security violations across Infrastructure as Code to mitigate risk before deploying cloud native infrastructure. copyright: >- © 2020 Accurics, Inc. All rights reserved. Terrascan and Terrascan logo are US trademarks of Accurics Inc. All other registered trademarks are the properties of their respective owners. From 0276b9e0b7a7b31b504afc3ec4386325da468e97 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Sun, 9 Aug 2020 21:55:46 -0400 Subject: [PATCH 088/188] updates nav order --- mkdocs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mkdocs.yml b/mkdocs.yml index a71bc2c94..507667e89 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -42,8 +42,8 @@ nav: - Architecture: architecture.md - Provider Reference: providers.md - Policies: policies.md - - Changelog: changelog.md - Educational Resources: learning.md + - Changelog: changelog.md - About: about.md markdown_extensions: From 4d0942d52307ecff63c3256d75ebdf09a404dc2a Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Sun, 9 Aug 2020 21:55:58 -0400 Subject: [PATCH 089/188] adds explanation of docs sections --- docs/index.md | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/docs/index.md b/docs/index.md index 2ae8e92de..3efae1a5c 100644 --- a/docs/index.md +++ b/docs/index.md @@ -1,3 +1,12 @@ ![Terrascan_Logo](img/Terrascan_By_Accurics_Logo_38B34A-333F48.svg) # Terrascan Documentation + +Terrascan documentation is composed of the following major sections: + +* [Getting Started](getting-started.md): Tutorial on how to install and quickly get started with Terrascan. +* [Architecture](architecture.md): Explains the pluggable architecture powering Terrascan. +* [Provider Reference](providers.md): References the different IaC providers (e.g. Terraform HCL2, Kubernetes YAML, etc.). +* [Policies](policies.md): Explains policies, how to write them, and reference for all policies/rules included by default. +* [Educational Resources](learning.md): Additional resources and tutorials. + From ee3b675296676ce4c0f59a8add792ea0dc38bb78 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Sun, 9 Aug 2020 22:09:17 -0400 Subject: [PATCH 090/188] fixes link to contributing guide --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index e5c25ae17..1857f5831 100644 --- a/README.md +++ b/README.md @@ -85,7 +85,7 @@ Usage of ./bin/terrascan: To learn more about Terrascan check out the documentation https://docs.accurics.com where we include a getting started guide, Terrascan's architecture, a break down of it's commands, and how to write your own policies. ## Developing Terrascan -To learn more about developing and contributing to Terrascan refer to our (contributing guide)[CONTRIBUTING.md]. +To learn more about developing and contributing to Terrascan refer to our [contributing guide](CONTRIBUTING.md). To learn more about compiling Terraform and contributing suggested changes, please refer to the contributing guide. From 4961867ab63cff789b87f20e7d743fb032865d4b Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Sun, 9 Aug 2020 22:09:40 -0400 Subject: [PATCH 091/188] adds contributing guide --- CONTRIBUTING.md | 78 ++++++++++++++++++++++++++++++++++++++++++++++ docs/developing.md | 1 + 2 files changed, 79 insertions(+) create mode 100644 CONTRIBUTING.md create mode 100644 docs/developing.md diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 000000000..642faa57f --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,78 @@ +# Contributing + +Contributions are welcome, and they are greatly appreciated! + +You can contribute in many ways: + +## Types of Contributions + +### Report Bugs + +Report bugs at /~https://github.com/accurics/terrascan/issues. + +If you are reporting a bug, please include: + +* Your operating system name and version. +* Any details about your local setup that might be helpful in troubleshooting. +* Detailed steps to reproduce the bug. + +### Fix Bugs + +Look through the GitHub issues for bugs. Anything tagged with "bug" +and "help wanted" is open to whoever wants to implement it. + +### Implement Features + +Look through the GitHub issues for features. Anything tagged with "enhancement" +and "help wanted" is open to whoever wants to implement it. + +### Write Documentation + +Terrascan could always use more documentation, whether as part of the +official Terrascan docs, or even on the web in blog posts, +articles, videos, and such. + +### Submit Feedback + +The best way to send feedback is to file an issue at /~https://github.com/accurics/terrascan/issues. + +If you are proposing a feature: + +* Explain in detail how it would work. +* Keep the scope as narrow as possible, to make it easier to implement. +* Remember that this is a volunteer-driven project, and that contributions + are welcome :) + +## Get Started! + +Ready to contribute? Here's how to set up `terrascan` for local development. + +1. Fork the `terrascan` repo on GitHub. +2. Clone your fork locally:: +``` + $ git clone git@github.com:your_name_here/terrascan.git +``` +3. Create a branch for local development: +``` + $ git checkout -b name-of-your-bugfix-or-feature +``` + Now you can make your changes locally. +1. When you're done making changes, check that your changes pass linting and tests. The following commands will simulate locally all checks executed as part of Terrascan's CI pipeline: +``` + $ make cicd +``` +5. Commit your changes and push your branch to GitHub:: +``` + $ git add . + $ git commit -m "Your detailed description of your changes." + $ git push origin name-of-your-bugfix-or-feature +``` +6. Submit a pull request through the GitHub website. + +## Pull Request Guidelines + +Before you submit a pull request, check that it meets these guidelines: + +1. The pull request should include tests. +2. If the pull request adds functionality or policies, the docs should be updated. +3. Make sure all tests pass by running `make cicd`. diff --git a/docs/developing.md b/docs/developing.md new file mode 100644 index 000000000..66e8e2b46 --- /dev/null +++ b/docs/developing.md @@ -0,0 +1 @@ +{!CONTRIBUTING.md!} From 57fe3bbe442aa91f552da73395e71ed08afa44e9 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Sun, 9 Aug 2020 22:10:13 -0400 Subject: [PATCH 092/188] adds contributing guide to nav --- mkdocs.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/mkdocs.yml b/mkdocs.yml index 507667e89..07516c0e3 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -43,6 +43,7 @@ nav: - Provider Reference: providers.md - Policies: policies.md - Educational Resources: learning.md + - Developing: developing.md - Changelog: changelog.md - About: about.md From ad162a3d408b7480f1b88e4093e059eda0398584 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Sun, 9 Aug 2020 22:14:33 -0400 Subject: [PATCH 093/188] renames to contributing --- docs/{developing.md => contributing.md} | 0 mkdocs.yml | 7 ++----- 2 files changed, 2 insertions(+), 5 deletions(-) rename docs/{developing.md => contributing.md} (100%) diff --git a/docs/developing.md b/docs/contributing.md similarity index 100% rename from docs/developing.md rename to docs/contributing.md diff --git a/mkdocs.yml b/mkdocs.yml index 07516c0e3..bed4e24b0 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -19,10 +19,6 @@ theme: primary: green language: en -# Extensions -markdown_extensions: - - markdown_include.include - # Social Icons extra: social: @@ -43,10 +39,11 @@ nav: - Provider Reference: providers.md - Policies: policies.md - Educational Resources: learning.md - - Developing: developing.md + - Contributing: Contributing.md - Changelog: changelog.md - About: about.md markdown_extensions: + - markdown_include.include - toc: permalink: true From 40776f56af58c22bd0859b0ed3a9c22fa8ced8f8 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Sun, 9 Aug 2020 22:15:11 -0400 Subject: [PATCH 094/188] fixes typo --- mkdocs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mkdocs.yml b/mkdocs.yml index bed4e24b0..64bd6e2bf 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -39,7 +39,7 @@ nav: - Provider Reference: providers.md - Policies: policies.md - Educational Resources: learning.md - - Contributing: Contributing.md + - Contributing: contributing.md - Changelog: changelog.md - About: about.md From fd59e45aea3b94b4ffa78bf1230562bb1e91c70d Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Sun, 9 Aug 2020 22:18:24 -0400 Subject: [PATCH 095/188] adds consistency to changelog --- CHANGELOG.md | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 07c6b59b8..56c4041e2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,12 +1,15 @@ # Changelog -## 0.2.3 +## 1.0.0 (UNRELEASED) +Major updates to Terrascan and the underlying architecture. + +## 0.2.3 (2020-07-23) * Introduces the '-f' flag for passing a list of ".tf" files for linting and the '--version' flag. -## 0.2.2 +## 0.2.2 (2020-07-21) * Adds Docker image and pipeline to push to DockerHub -## 0.2.1 +## 0.2.1 (2020-06-19) * Bugfix: The pyhcl hard dependency in the requirements.txt file caused issues if a higher version was installed. This was fixed by using the ">=" operator. ## 0.2.0 (2020-01-11) From 290c4516e31fba29f754250bece4885f8cf7a54e Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Sun, 9 Aug 2020 22:26:04 -0400 Subject: [PATCH 096/188] adds about page contents --- docs/about.md | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/about.md b/docs/about.md index 420481f66..912a2a95b 100644 --- a/docs/about.md +++ b/docs/about.md @@ -1,2 +1,7 @@ # About Terrascan +Terrascan is a static code analyzer for infrastructure as code. Originally developed in 2017, Terrascan has evolved from a Python application specifically focused on scanning of security issues for Terraform into a golang applicaiton with pluggable support for multiple IaC tools and technologies. The project is maintained by [Accurics](https://www.accurics.com) + +## About Accurics + +[Accurics](https://www.accurics.com) enables organizations to protect their cloud native infrastructure in hybrid and multi-cloud environments. It seamlessly scans infrastructure as code for misconfigurations, monitors provisioned cloud infrastructure for configuration changes that introduce posture drift, and enables reverting to a secure posture. From 07e529259deb86c16bbbb8163260ed3e99d0f1a5 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Sun, 9 Aug 2020 22:26:30 -0400 Subject: [PATCH 097/188] fix typo --- docs/about.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/about.md b/docs/about.md index 912a2a95b..62e1316be 100644 --- a/docs/about.md +++ b/docs/about.md @@ -1,6 +1,6 @@ # About Terrascan -Terrascan is a static code analyzer for infrastructure as code. Originally developed in 2017, Terrascan has evolved from a Python application specifically focused on scanning of security issues for Terraform into a golang applicaiton with pluggable support for multiple IaC tools and technologies. The project is maintained by [Accurics](https://www.accurics.com) +Terrascan is a static code analyzer for infrastructure as code. Originally developed in 2017, Terrascan has evolved from a Python application specifically focused on scanning of security issues for Terraform into a Golang applicaiton with pluggable support for multiple IaC tools and technologies. The project is maintained by [Accurics](https://www.accurics.com) ## About Accurics From ed7311822b7d94751a0646d094ba568a876b76dc Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Sun, 9 Aug 2020 23:17:01 -0400 Subject: [PATCH 098/188] adds docs requirements --- requirements.txt | 2 ++ 1 file changed, 2 insertions(+) create mode 100644 requirements.txt diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 000000000..d0f2375b0 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,2 @@ +mkdocs==1.1.2 +mkdocs-material==5.5.3 From 600a6e6eac35bcabe624c4f1d67c03ae13877051 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Mon, 10 Aug 2020 09:42:32 +0530 Subject: [PATCH 099/188] disable codecov commit status --- .codecov.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.codecov.yml b/.codecov.yml index 7c64be9ba..d630b79a8 100644 --- a/.codecov.yml +++ b/.codecov.yml @@ -4,3 +4,8 @@ comment: require_changes: false # if true: only post the comment if coverage changes require_base: no # [yes :: must have a base report to post] require_head: yes # [yes :: must have a head report to post] + +coverage: + status: + project: off + patch: off From 0ddf0a0b39576840e103d2672a61110e633806b7 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Tue, 4 Aug 2020 23:34:40 +0530 Subject: [PATCH 100/188] add support for sending webhook notifications --- pkg/notifications/interface.go | 8 ++++ pkg/notifications/notifiers.go | 57 +++++++++++++++++++++++++++ pkg/notifications/register.go | 29 ++++++++++++++ pkg/notifications/types.go | 20 ++++++++++ pkg/notifications/webhook.go | 35 +++++++++++++++++ pkg/notifications/webhook/types.go | 23 +++++++++++ pkg/notifications/webhook/webhook.go | 51 ++++++++++++++++++++++++ pkg/runtime/executor.go | 18 ++++++++- pkg/utils/http/request.go | 58 ++++++++++++++++++++++++++++ 9 files changed, 297 insertions(+), 2 deletions(-) create mode 100644 pkg/notifications/interface.go create mode 100644 pkg/notifications/notifiers.go create mode 100644 pkg/notifications/register.go create mode 100644 pkg/notifications/types.go create mode 100644 pkg/notifications/webhook.go create mode 100644 pkg/notifications/webhook/types.go create mode 100644 pkg/notifications/webhook/webhook.go create mode 100644 pkg/utils/http/request.go diff --git a/pkg/notifications/interface.go b/pkg/notifications/interface.go new file mode 100644 index 000000000..255c4c9eb --- /dev/null +++ b/pkg/notifications/interface.go @@ -0,0 +1,8 @@ +package notifications + +// Notifier defines the interface which every type of notification provider +// needs to implement to claim support in terrascan +type Notifier interface { + Init() error + SendNotification() error +} diff --git a/pkg/notifications/notifiers.go b/pkg/notifications/notifiers.go new file mode 100644 index 000000000..54713454f --- /dev/null +++ b/pkg/notifications/notifiers.go @@ -0,0 +1,57 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package notifications + +import ( + "fmt" + "reflect" + + "go.uber.org/zap" +) + +var ( + errNotifierNotSupported = fmt.Errorf("notifier not supported") +) + +// NewNotifier returns a new notifier +func NewNotifier(notifierType string) (notifier Notifier, err error) { + + // get notifier from supportedNotifierss + notifierObject, supported := supportedNotifiers[supportedNotifierType(notifierType)] + if !supported { + zap.S().Errorf("notifier type '%s' not supported", notifierType) + return notifier, errNotifierNotSupported + } + + // notifier + notifier = reflect.New(notifierObject).Interface().(Notifier) + + // initialize notifier + notifier.Init() + + // successful + return notifier, nil +} + +// IsNotifierSupported returns true/false depending on whether the notifier +// is supported in terrascan or not +func IsNotifierSupported(notifierType string) bool { + if _, supported := supportedNotifiers[supportedNotifierType(notifierType)]; !supported { + return false + } + return true +} diff --git a/pkg/notifications/register.go b/pkg/notifications/register.go new file mode 100644 index 000000000..c875a7fda --- /dev/null +++ b/pkg/notifications/register.go @@ -0,0 +1,29 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package notifications + +import ( + "reflect" +) + +// map of supported notifier types +var supportedNotifiers = make(map[supportedNotifierType]reflect.Type) + +// RegisterNotifier registers an notifier provider for terrascan +func RegisterNotifier(notifierType supportedNotifierType, notifierProvider reflect.Type) { + supportedNotifiers[notifierType] = notifierProvider +} diff --git a/pkg/notifications/types.go b/pkg/notifications/types.go new file mode 100644 index 000000000..ce13c45f5 --- /dev/null +++ b/pkg/notifications/types.go @@ -0,0 +1,20 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package notifications + +// SupportedNotifierType data type for supported IaC provider +type supportedNotifierType string diff --git a/pkg/notifications/webhook.go b/pkg/notifications/webhook.go new file mode 100644 index 000000000..695b4a1cd --- /dev/null +++ b/pkg/notifications/webhook.go @@ -0,0 +1,35 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package notifications + +import ( + "reflect" + + webhookNotifier "github.com/accurics/terrascan/pkg/notifications/webhook" +) + +// terraform specific constants +const ( + terraform supportedNotifierType = "webhook" +) + +// register terraform as an IaC provider with terrascan +func init() { + + // register iac provider + RegisterNotifier(terraform, reflect.TypeOf(webhookNotifier.Webhook{})) +} diff --git a/pkg/notifications/webhook/types.go b/pkg/notifications/webhook/types.go new file mode 100644 index 000000000..a1c131ad1 --- /dev/null +++ b/pkg/notifications/webhook/types.go @@ -0,0 +1,23 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package webhook + +// Webhook implements the Notifier interface +type Webhook struct { + url string + authToken string +} diff --git a/pkg/notifications/webhook/webhook.go b/pkg/notifications/webhook/webhook.go new file mode 100644 index 000000000..5eaf5e1e9 --- /dev/null +++ b/pkg/notifications/webhook/webhook.go @@ -0,0 +1,51 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package webhook + +import ( + "go.uber.org/zap" +) + +// Init initalizes the webhook notifier, reads config file and configures the +// necessary parameters for webhook notifications to work +func (w *Webhook) Init() error { + + // check if conf file exists + + // parse conf file + + // read webhook url and auth token + + // initalize Webhook struct with url and token + + // succesful + zap.S().Debug("initialized webhook notifier") + return nil +} + +// SendNotification sends webhook notification i.e sends a http POST request +// to the configured URL +func (w *Webhook) SendNotification() error { + + // make http POST request + + // validate http response + + // successful + zap.S().Debug("sent webhook notification") + return nil +} diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 1f40cf967..d33855647 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -20,6 +20,7 @@ import ( "go.uber.org/zap" iacProvider "github.com/accurics/terrascan/pkg/iac-providers" + "github.com/accurics/terrascan/pkg/notifications" ) // Executor object @@ -29,7 +30,9 @@ type Executor struct { cloudType string iacType string iacVersion string + configFile string iacProvider iacProvider.IacProvider + notifiers notifications.Notifier } // NewExecutor creates a runtime object @@ -66,23 +69,34 @@ func (e *Executor) Init() error { return err } + // create new notifiers + e.notifiers, err = notifications.NewNotifier("webhook") + if err != nil { + zap.S().Errorf("failed to create notifier(s). error: '%s'", err) + return err + } + + zap.S().Debug("initialized executor") return nil } // Execute validates the inputs, processes the IaC, creates json output func (e *Executor) Execute() (normalized interface{}, err error) { + // create normalized output from Iac if e.dirPath != "" { normalized, err = e.iacProvider.LoadIacDir(e.dirPath) } else { - // create config from IaC normalized, err = e.iacProvider.LoadIacFile(e.filePath) } if err != nil { return normalized, err } - // write output + // evaluate policies + + // send notifications, if configured + e.notifiers.SendNotification() // successful return normalized, nil diff --git a/pkg/utils/http/request.go b/pkg/utils/http/request.go new file mode 100644 index 000000000..b0ce6d960 --- /dev/null +++ b/pkg/utils/http/request.go @@ -0,0 +1,58 @@ +package httputils + +import ( + "bytes" + "encoding/json" + "log" + "net/http" + + "github.com/hashicorp/go-retryablehttp" + "go.uber.org/zap" +) + +const ( + errNewRequest = fmt.Errorf("failed to create http request") + errDoRequest = fmt.Errorf("failed to make http request") +) + +// default global http client +var client *http.Client = &http.Client{} + +// init creates a http client which retries on errors like connection timeouts, +// server too slow respond etc. +func init() { + retryClient := retryablehttp.NewClient() + retryClient.RetryMax = 10 + client = retryClient.StandardClient() +} + +// SendRequest sends a http request on the given url +func SendRequest(method, url, token string, data []byte) (*http.Response, error) { + + var resp *http.Response + + // new http request + req, err := http.NewRequest("POST", url, bytes.NewBuffer(data)) + if err != nil { + zap.S().Errorf("failed to create http request; method: '%v', url: '%v'") + return resp, errNewRequest + } + req.Header.Set("Content-Type", "application/json") + if token != nil { + req.Header.Set("Authorization", fmt.Sprintf("Bearer: '%s'", token)) + } + + // make request + resp, err := client.Do(req) + if err != nil { + zap.S().Errorf("failed to make http request; method: '%v', url: '%v'") + return resp, errDoRequest + } + + return resp, err +} + +// SendPOSTRequest sends a http POST request +func SendPOSTRequest(url, token string) (*http.Response, error) { + return SendRequest("POST", url, token) +} From 96480b860156f1d912bb82a74ba9f37dbe662655 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Wed, 5 Aug 2020 18:53:55 +0530 Subject: [PATCH 101/188] add support for webhook notifier --- cmd/terrascan/main.go | 5 +- config/terrascan.toml | 6 +++ go.mod | 2 + go.sum | 7 +++ pkg/cli/run.go | 12 ++--- pkg/http-server/file-scan.go | 2 +- pkg/notifications/interface.go | 4 +- pkg/notifications/notifiers.go | 81 +++++++++++++++++++++++++--- pkg/notifications/webhook/types.go | 4 +- pkg/notifications/webhook/webhook.go | 38 ++++++++++--- pkg/runtime/executor.go | 9 ++-- pkg/runtime/notifications.go | 28 ++++++++++ pkg/utils/http/request.go | 15 +++--- 13 files changed, 176 insertions(+), 37 deletions(-) create mode 100644 config/terrascan.toml create mode 100644 pkg/runtime/notifications.go diff --git a/cmd/terrascan/main.go b/cmd/terrascan/main.go index 6b33c1f82..7cbaaf0e7 100644 --- a/cmd/terrascan/main.go +++ b/cmd/terrascan/main.go @@ -45,6 +45,9 @@ func main() { // logging flags logLevel = flag.String("log-level", "info", "logging level (debug, info, warn, error, panic, fatal)") logType = flag.String("log-type", "console", "log type (json, console)") + + // config file + configFile = flag.String("config", "", "config file path") ) flag.Parse() @@ -61,6 +64,6 @@ func main() { } else { logging.Init(*logType, *logLevel) zap.S().Debug("running terrascan in cli mode") - cli.Run(*iacType, *iacVersion, *cloudType, *iacFilePath, *iacDirPath) + cli.Run(*iacType, *iacVersion, *cloudType, *iacFilePath, *iacDirPath, *configFile) } } diff --git a/config/terrascan.toml b/config/terrascan.toml new file mode 100644 index 000000000..d1af36565 --- /dev/null +++ b/config/terrascan.toml @@ -0,0 +1,6 @@ +# terrascan configuration file + +# notifications configuration +[notifications] + [notifications.webhook] + url = "https://httpbin.org/post" diff --git a/go.mod b/go.mod index 321a14d32..04ee7a44a 100644 --- a/go.mod +++ b/go.mod @@ -4,9 +4,11 @@ go 1.14 require ( github.com/gorilla/mux v1.7.4 + github.com/hashicorp/go-retryablehttp v0.6.6 github.com/hashicorp/go-version v1.2.0 github.com/hashicorp/hcl/v2 v2.3.0 github.com/hashicorp/terraform v0.12.28 + github.com/pelletier/go-toml v1.8.0 github.com/spf13/afero v1.3.2 github.com/zclconf/go-cty v1.2.1 go.uber.org/zap v1.9.1 diff --git a/go.sum b/go.sum index 442aee8e5..8448a0219 100644 --- a/go.sum +++ b/go.sum @@ -133,16 +133,20 @@ github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brv github.com/hashicorp/go-azure-helpers v0.10.0/go.mod h1:YuAtHxm2v74s+IjQwUG88dHBJPd5jL+cXr5BGVzSKhE= github.com/hashicorp/go-checkpoint v0.5.0/go.mod h1:7nfLNL10NsxqO4iWuW6tWW0HjZuDrwkBuEQsVcpCOgg= github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= +github.com/hashicorp/go-cleanhttp v0.5.1 h1:dH3aiDG9Jvb5r5+bYHsikaOUIpcM0xvgMXVoDkXMzJM= github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80= github.com/hashicorp/go-getter v1.4.2-0.20200106182914-9813cbd4eb02/go.mod h1:7qxyCd8rBfcShwsvxgIguu4KbS3l8bUCwg2Umn7RjeY= github.com/hashicorp/go-hclog v0.0.0-20180709165350-ff2cf002a8dd/go.mod h1:9bjs9uLqI8l75knNv3lV1kA55veR+WUPSiKIWcQHudI= github.com/hashicorp/go-hclog v0.0.0-20181001195459-61d530d6c27f/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= +github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= github.com/hashicorp/go-immutable-radix v0.0.0-20180129170900-7f3cd4390caa/go.mod h1:6ij3Z20p+OhOkCSrA0gImAWoHYQRGbnlcuk6XYTiaRw= github.com/hashicorp/go-msgpack v0.5.4/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM= github.com/hashicorp/go-multierror v1.0.0 h1:iVjPR7a6H0tWELX5NxNe7bYopibicUzc7uPribsnS6o= github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk= github.com/hashicorp/go-plugin v1.3.0/go.mod h1:F9eH4LrE/ZsRdbwhfjs9k9HoDUwAHnYtXdgmf1AVNs0= github.com/hashicorp/go-retryablehttp v0.5.2/go.mod h1:9B5zBasrRhHXnJnui7y6sL7es7NDiJgTc6Er0maI1Xs= +github.com/hashicorp/go-retryablehttp v0.6.6 h1:HJunrbHTDDbBb/ay4kxa1n+dLmttUlnP3V9oNE4hmsM= +github.com/hashicorp/go-retryablehttp v0.6.6/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU= github.com/hashicorp/go-safetemp v1.0.0/go.mod h1:oaerMy3BhqiTbVye6QuFhFtIceqFoDHxNAB65b+Rj1I= github.com/hashicorp/go-slug v0.4.1/go.mod h1:I5tq5Lv0E2xcNXNkmx7BSfzi1PsJ2cNjs3cC3LwyhK8= @@ -235,6 +239,8 @@ github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d/go.mod h1:YUTz3bUH github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= github.com/packer-community/winrmcp v0.0.0-20180102160824-81144009af58/go.mod h1:f6Izs6JvFTdnRbziASagjZ2vmf55NSIkC/weStxCHqk= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pelletier/go-toml v1.8.0 h1:Keo9qb7iRJs2voHvunFtuuYFsbWeOBh8/P9v/kVMFtw= +github.com/pelletier/go-toml v1.8.0/go.mod h1:D6yutnOGMveHEPV7VQOuvI/gXY61bv+9bAOTRnLElKs= github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -438,6 +444,7 @@ gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bl gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= diff --git a/pkg/cli/run.go b/pkg/cli/run.go index e4dbd3810..e3e76d8f3 100644 --- a/pkg/cli/run.go +++ b/pkg/cli/run.go @@ -17,24 +17,24 @@ package cli import ( - "os" + // "os" "github.com/accurics/terrascan/pkg/runtime" - "github.com/accurics/terrascan/pkg/utils" + // "github.com/accurics/terrascan/pkg/utils" ) // Run executes terrascan in CLI mode -func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath string) { +func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath, configFile string) { // create a new runtime executor for processing IaC executor, err := runtime.NewExecutor(iacType, iacVersion, cloudType, iacFilePath, - iacDirPath) + iacDirPath, configFile) if err != nil { return } - normalized, err := executor.Execute() + _, err = executor.Execute() if err != nil { return } - utils.PrintJSON(normalized, os.Stdout) + // utils.PrintJSON(normalized, os.Stdout) } diff --git a/pkg/http-server/file-scan.go b/pkg/http-server/file-scan.go index 3a5564aaa..5a60498f3 100644 --- a/pkg/http-server/file-scan.go +++ b/pkg/http-server/file-scan.go @@ -83,7 +83,7 @@ func (g *APIHandler) scanFile(w http.ResponseWriter, r *http.Request) { // create a new runtime executor for scanning the uploaded file executor, err := runtime.NewExecutor(iacType, iacVersion, cloudType, - tempFile.Name(), "") + tempFile.Name(), "", "") if err != nil { zap.S().Error(err) apiErrorResponse(w, err.Error(), http.StatusBadRequest) diff --git a/pkg/notifications/interface.go b/pkg/notifications/interface.go index 255c4c9eb..49d133b6c 100644 --- a/pkg/notifications/interface.go +++ b/pkg/notifications/interface.go @@ -3,6 +3,6 @@ package notifications // Notifier defines the interface which every type of notification provider // needs to implement to claim support in terrascan type Notifier interface { - Init() error - SendNotification() error + Init(interface{}) error + SendNotification(interface{}) error } diff --git a/pkg/notifications/notifiers.go b/pkg/notifications/notifiers.go index 54713454f..baf5a2db9 100644 --- a/pkg/notifications/notifiers.go +++ b/pkg/notifications/notifiers.go @@ -18,13 +18,22 @@ package notifications import ( "fmt" + "os" "reflect" + "github.com/pelletier/go-toml" "go.uber.org/zap" ) +const ( + notificationsConfigKey = "notifications" +) + var ( + errNotPresent = fmt.Errorf("config file not present") errNotifierNotSupported = fmt.Errorf("notifier not supported") + errTomlLoadConfig = fmt.Errorf("failed to load toml config") + errTomlKeyNotPresent = fmt.Errorf("key not present in toml config") ) // NewNotifier returns a new notifier @@ -37,14 +46,74 @@ func NewNotifier(notifierType string) (notifier Notifier, err error) { return notifier, errNotifierNotSupported } - // notifier - notifier = reflect.New(notifierObject).Interface().(Notifier) + // successful + return reflect.New(notifierObject).Interface().(Notifier), nil +} + +// NewNotifiers returns a list of notifiers configured in the config file +func NewNotifiers(configFile string) ([]Notifier, error) { - // initialize notifier - notifier.Init() + var notifiers []Notifier - // successful - return notifier, nil + // empty config file path + if configFile == "" { + zap.S().Infof("no config file specified") + return notifiers, nil + } + + // check if file exists + _, err := os.Stat(configFile) + if err != nil { + zap.S().Errorf("config file '%s' not present", configFile) + return notifiers, errNotPresent + } + + // parse toml config file + config, err := toml.LoadFile(configFile) + if err != nil { + zap.S().Errorf("failed to load toml config file '%s'. error: '%v'", err) + return notifiers, errTomlLoadConfig + } + + // get config for 'notifications' + keyConfig := config.Get(notificationsConfigKey) + if keyConfig == nil { + zap.S().Infof("key '%s' not present in toml config", notificationsConfigKey) + return notifiers, errTomlKeyNotPresent + } + + // get all the notifier types configured in TOML config + keyTomlConfig := keyConfig.(*toml.Tree) + notifierTypes := keyTomlConfig.Keys() + + // create notifiers + for _, nType := range notifierTypes { + + // check if toml config present for notifier type + nTypeConfig := keyTomlConfig.Get(nType) + if nTypeConfig == nil { + zap.S().Errorf("notifier '%v' config not present", nType) + return notifiers, errTomlKeyNotPresent + } + + // create a new notifier + n, err := NewNotifier(nType) + if err != nil { + continue + } + + // populate data + err = n.Init(nTypeConfig) + if err != nil { + continue + } + + // add to the list of notifiers + notifiers = append(notifiers, n) + } + + // return list of notifiers + return notifiers, nil } // IsNotifierSupported returns true/false depending on whether the notifier diff --git a/pkg/notifications/webhook/types.go b/pkg/notifications/webhook/types.go index a1c131ad1..0bfebcb60 100644 --- a/pkg/notifications/webhook/types.go +++ b/pkg/notifications/webhook/types.go @@ -18,6 +18,6 @@ package webhook // Webhook implements the Notifier interface type Webhook struct { - url string - authToken string + URL string + Token string } diff --git a/pkg/notifications/webhook/webhook.go b/pkg/notifications/webhook/webhook.go index 5eaf5e1e9..fa59598fc 100644 --- a/pkg/notifications/webhook/webhook.go +++ b/pkg/notifications/webhook/webhook.go @@ -17,20 +17,32 @@ package webhook import ( + "encoding/json" + "fmt" + "net/http" + + httputils "github.com/accurics/terrascan/pkg/utils/http" + "github.com/pelletier/go-toml" "go.uber.org/zap" ) +var ( + errInitFailed = fmt.Errorf("failed to initialize webhook notifier") +) + // Init initalizes the webhook notifier, reads config file and configures the // necessary parameters for webhook notifications to work -func (w *Webhook) Init() error { +func (w *Webhook) Init(config interface{}) error { - // check if conf file exists - - // parse conf file - - // read webhook url and auth token + // config to *toml.Tree + tomlConfig := config.(*toml.Tree) // initalize Webhook struct with url and token + err := tomlConfig.Unmarshal(w) + if err != nil { + zap.S().Error(errInitFailed.Error()) + return errInitFailed + } // succesful zap.S().Debug("initialized webhook notifier") @@ -39,11 +51,23 @@ func (w *Webhook) Init() error { // SendNotification sends webhook notification i.e sends a http POST request // to the configured URL -func (w *Webhook) SendNotification() error { +func (w *Webhook) SendNotification(data interface{}) error { + + // convert data to json + dataBytes, _ := json.Marshal(data) // make http POST request + resp, err := httputils.SendPOSTRequest(w.URL, w.Token, dataBytes) + if err != nil { + zap.S().Errorf("failed to send webhook notification. error: '%v'", err) + return err + } // validate http response + if resp.StatusCode != http.StatusOK && resp.StatusCode != http.StatusCreated && resp.StatusCode != http.StatusAccepted { + zap.S().Errorf("failed to webhook notification. Incorrect status code: '%v'", resp.StatusCode) + return fmt.Errorf("webhook notification failed") + } // successful zap.S().Debug("sent webhook notification") diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index d33855647..40e22e558 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -32,17 +32,18 @@ type Executor struct { iacVersion string configFile string iacProvider iacProvider.IacProvider - notifiers notifications.Notifier + notifiers []notifications.Notifier } // NewExecutor creates a runtime object -func NewExecutor(iacType, iacVersion, cloudType, filePath, dirPath string) (e *Executor, err error) { +func NewExecutor(iacType, iacVersion, cloudType, filePath, dirPath, configFile string) (e *Executor, err error) { e = &Executor{ filePath: filePath, dirPath: dirPath, cloudType: cloudType, iacType: iacType, iacVersion: iacVersion, + configFile: configFile, } // initialized executor @@ -70,7 +71,7 @@ func (e *Executor) Init() error { } // create new notifiers - e.notifiers, err = notifications.NewNotifier("webhook") + e.notifiers, err = notifications.NewNotifiers(e.configFile) if err != nil { zap.S().Errorf("failed to create notifier(s). error: '%s'", err) return err @@ -96,7 +97,7 @@ func (e *Executor) Execute() (normalized interface{}, err error) { // evaluate policies // send notifications, if configured - e.notifiers.SendNotification() + e.SendNotifications(normalized) // successful return normalized, nil diff --git a/pkg/runtime/notifications.go b/pkg/runtime/notifications.go new file mode 100644 index 000000000..004b0497b --- /dev/null +++ b/pkg/runtime/notifications.go @@ -0,0 +1,28 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package runtime + +// SendNotifications sends notifications via all the configured notifiers +func (e *Executor) SendNotifications(data interface{}) { + // send notifications using configured notifiers + for _, notifier := range e.notifiers { + err := notifier.SendNotification(data) + if err != nil { + continue + } + } +} diff --git a/pkg/utils/http/request.go b/pkg/utils/http/request.go index b0ce6d960..7127ea171 100644 --- a/pkg/utils/http/request.go +++ b/pkg/utils/http/request.go @@ -2,21 +2,20 @@ package httputils import ( "bytes" - "encoding/json" - "log" + "fmt" "net/http" "github.com/hashicorp/go-retryablehttp" "go.uber.org/zap" ) -const ( +var ( errNewRequest = fmt.Errorf("failed to create http request") errDoRequest = fmt.Errorf("failed to make http request") ) // default global http client -var client *http.Client = &http.Client{} +var client *http.Client // init creates a http client which retries on errors like connection timeouts, // server too slow respond etc. @@ -38,12 +37,12 @@ func SendRequest(method, url, token string, data []byte) (*http.Response, error) return resp, errNewRequest } req.Header.Set("Content-Type", "application/json") - if token != nil { + if token != "" { req.Header.Set("Authorization", fmt.Sprintf("Bearer: '%s'", token)) } // make request - resp, err := client.Do(req) + resp, err = client.Do(req) if err != nil { zap.S().Errorf("failed to make http request; method: '%v', url: '%v'") return resp, errDoRequest @@ -53,6 +52,6 @@ func SendRequest(method, url, token string, data []byte) (*http.Response, error) } // SendPOSTRequest sends a http POST request -func SendPOSTRequest(url, token string) (*http.Response, error) { - return SendRequest("POST", url, token) +func SendPOSTRequest(url, token string, data []byte) (*http.Response, error) { + return SendRequest("POST", url, token, data) } From ee73d7108607c520149195d80f7f0f8d8ddc6731 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 7 Aug 2020 23:33:14 +0530 Subject: [PATCH 102/188] add unit tests for utils.WrapError func --- pkg/utils/wrap_errors.go | 38 +++++++++++++++++++++++++ pkg/utils/wrap_errors_test.go | 53 +++++++++++++++++++++++++++++++++++ 2 files changed, 91 insertions(+) create mode 100644 pkg/utils/wrap_errors.go create mode 100644 pkg/utils/wrap_errors_test.go diff --git a/pkg/utils/wrap_errors.go b/pkg/utils/wrap_errors.go new file mode 100644 index 000000000..c1f85ac4b --- /dev/null +++ b/pkg/utils/wrap_errors.go @@ -0,0 +1,38 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package utils + +import ( + "github.com/pkg/errors" +) + +// WrapError wraps given err with allErrs and returns a unified error +func WrapError(err, allErrs error) error { + // if allErrs is empty, return err + if allErrs == nil { + return err + } + + // if err empty return allErrs + if err == nil { + return allErrs + } + + // wrap err with allErrs + allErrs = errors.Wrap(err, allErrs.Error()) + return allErrs +} diff --git a/pkg/utils/wrap_errors_test.go b/pkg/utils/wrap_errors_test.go new file mode 100644 index 000000000..f5235c08a --- /dev/null +++ b/pkg/utils/wrap_errors_test.go @@ -0,0 +1,53 @@ +package utils + +import ( + "fmt" + "reflect" + "testing" +) + +func TestWrapError(t *testing.T) { + + mockErr := fmt.Errorf("mock error") + + table := []struct { + name string + err error + allErr error + wantErr error + }{ + { + name: "empty allErrs", + allErr: nil, + err: mockErr, + wantErr: mockErr, + }, + { + name: "empty err", + err: nil, + allErr: mockErr, + wantErr: mockErr, + }, + } + + for _, tt := range table { + t.Run(tt.name, func(t *testing.T) { + gotErr := WrapError(tt.err, tt.allErr) + if !reflect.DeepEqual(gotErr, tt.wantErr) { + t.Errorf("incorrect error; got: '%+v', want: '%+v'", gotErr, tt.wantErr) + } + }) + } + + t.Run("wrapped error", func(t *testing.T) { + var ( + err = fmt.Errorf("mock err") + allErrs = fmt.Errorf("mock allErrs") + wantErr = fmt.Errorf("%s: %s", allErrs.Error(), err.Error()) + ) + gotErr := WrapError(err, allErrs) + if gotErr.Error() != wantErr.Error() { + t.Errorf("incorrect error: got: '%v', want: '%v'", gotErr, wantErr) + } + }) +} From c0680975a75d66b89f71a5b7a92f295c59c4491d Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 7 Aug 2020 23:34:07 +0530 Subject: [PATCH 103/188] add notifier unit tests to runtime package --- pkg/runtime/executor.go | 4 +- pkg/runtime/executor_test.go | 68 +++++++++++++++++++--- pkg/runtime/notifications.go | 9 ++- pkg/runtime/notifications_test.go | 59 +++++++++++++++++++ pkg/runtime/testdata/invalid-notifier.toml | 6 ++ pkg/runtime/testdata/webhook.toml | 6 ++ 6 files changed, 143 insertions(+), 9 deletions(-) create mode 100644 pkg/runtime/notifications_test.go create mode 100644 pkg/runtime/testdata/invalid-notifier.toml create mode 100644 pkg/runtime/testdata/webhook.toml diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 40e22e558..54482321e 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -97,7 +97,9 @@ func (e *Executor) Execute() (normalized interface{}, err error) { // evaluate policies // send notifications, if configured - e.SendNotifications(normalized) + if err = e.SendNotifications(normalized); err != nil { + return normalized, err + } // successful return normalized, nil diff --git a/pkg/runtime/executor_test.go b/pkg/runtime/executor_test.go index 46b5ed607..3cb31d1ba 100644 --- a/pkg/runtime/executor_test.go +++ b/pkg/runtime/executor_test.go @@ -24,6 +24,8 @@ import ( iacProvider "github.com/accurics/terrascan/pkg/iac-providers" "github.com/accurics/terrascan/pkg/iac-providers/output" tfv12 "github.com/accurics/terrascan/pkg/iac-providers/terraform/v12" + "github.com/accurics/terrascan/pkg/notifications" + "github.com/accurics/terrascan/pkg/notifications/webhook" ) var ( @@ -85,6 +87,22 @@ func TestExecute(t *testing.T) { }, wantErr: nil, }, + { + name: "test SendNofitications no error", + executor: Executor{ + iacProvider: MockIacProvider{err: nil}, + notifiers: []notifications.Notifier{&MockNotifier{err: nil}}, + }, + wantErr: nil, + }, + { + name: "test SendNofitications no error", + executor: Executor{ + iacProvider: MockIacProvider{err: nil}, + notifiers: []notifications.Notifier{&MockNotifier{err: mockNotifierErr}}, + }, + wantErr: mockNotifierErr, + }, } for _, tt := range table { @@ -104,6 +122,7 @@ func TestInit(t *testing.T) { executor Executor wantErr error wantIacProvider iacProvider.IacProvider + wantNotifiers []notifications.Notifier }{ { name: "valid filePath", @@ -116,16 +135,51 @@ func TestInit(t *testing.T) { }, wantErr: nil, wantIacProvider: &tfv12.TfV12{}, + wantNotifiers: []notifications.Notifier{}, + }, + { + name: "valid notifier", + executor: Executor{ + filePath: "./testdata/testfile", + dirPath: "", + cloudType: "aws", + iacType: "terraform", + iacVersion: "v12", + configFile: "./testdata/webhook.toml", + }, + wantErr: nil, + wantIacProvider: &tfv12.TfV12{}, + wantNotifiers: []notifications.Notifier{&webhook.Webhook{}}, + }, + { + name: "config not present", + executor: Executor{ + filePath: "./testdata/testfile", + dirPath: "", + cloudType: "aws", + iacType: "terraform", + iacVersion: "v12", + configFile: "./testdata/does-not-exist", + }, + wantErr: fmt.Errorf("config file not present"), + wantIacProvider: &tfv12.TfV12{}, }, } for _, tt := range table { - gotErr := tt.executor.Init() - if !reflect.DeepEqual(gotErr, tt.wantErr) { - t.Errorf("unexpected error; gotErr: '%v', wantErr: '%v'", gotErr, tt.wantErr) - } - if !reflect.DeepEqual(tt.executor.iacProvider, tt.wantIacProvider) { - t.Errorf("got: '%v', want: '%v'", tt.executor.iacProvider, tt.wantIacProvider) - } + t.Run(tt.name, func(t *testing.T) { + gotErr := tt.executor.Init() + if !reflect.DeepEqual(gotErr, tt.wantErr) { + t.Errorf("unexpected error; gotErr: '%v', wantErr: '%v'", gotErr, tt.wantErr) + } + if !reflect.DeepEqual(tt.executor.iacProvider, tt.wantIacProvider) { + t.Errorf("got: '%v', want: '%v'", tt.executor.iacProvider, tt.wantIacProvider) + } + for i, notifier := range tt.executor.notifiers { + if !reflect.DeepEqual(reflect.TypeOf(notifier), reflect.TypeOf(tt.wantNotifiers[i])) { + t.Errorf("got: '%v', want: '%v'", reflect.TypeOf(notifier), reflect.TypeOf(tt.wantNotifiers[i])) + } + } + }) } } diff --git a/pkg/runtime/notifications.go b/pkg/runtime/notifications.go index 004b0497b..8cda3bf50 100644 --- a/pkg/runtime/notifications.go +++ b/pkg/runtime/notifications.go @@ -16,13 +16,20 @@ package runtime +import ( + "github.com/accurics/terrascan/pkg/utils" +) + // SendNotifications sends notifications via all the configured notifiers -func (e *Executor) SendNotifications(data interface{}) { +func (e *Executor) SendNotifications(data interface{}) error { + var allErrs error // send notifications using configured notifiers for _, notifier := range e.notifiers { err := notifier.SendNotification(data) if err != nil { + allErrs = utils.WrapError(err, allErrs) continue } } + return allErrs } diff --git a/pkg/runtime/notifications_test.go b/pkg/runtime/notifications_test.go new file mode 100644 index 000000000..def21bf65 --- /dev/null +++ b/pkg/runtime/notifications_test.go @@ -0,0 +1,59 @@ +package runtime + +import ( + "fmt" + "reflect" + "testing" + + "github.com/accurics/terrascan/pkg/notifications" +) + +// MockNotifier mocks notifications.Notifier interface +type MockNotifier struct { + err error +} + +var ( + mockNotifierErr = fmt.Errorf("mock notification error") +) + +func (m MockNotifier) Init(config interface{}) error { + return m.err +} + +func (m MockNotifier) SendNotification(config interface{}) error { + return m.err +} + +func TestSendNotifications(t *testing.T) { + + table := []struct { + name string + executor Executor + wantErr error + }{ + { + name: "no notifier error", + executor: Executor{ + notifiers: []notifications.Notifier{&MockNotifier{err: nil}}, + }, + wantErr: nil, + }, + { + name: "no notifier error", + executor: Executor{ + notifiers: []notifications.Notifier{&MockNotifier{err: mockNotifierErr}}, + }, + wantErr: mockNotifierErr, + }, + } + + for _, tt := range table { + t.Run(tt.name, func(t *testing.T) { + gotErr := tt.executor.SendNotifications("some data") + if !reflect.DeepEqual(gotErr, tt.wantErr) { + t.Errorf("incorrect error; got: '%v', want: '%v'", gotErr, tt.wantErr) + } + }) + } +} diff --git a/pkg/runtime/testdata/invalid-notifier.toml b/pkg/runtime/testdata/invalid-notifier.toml new file mode 100644 index 000000000..74c795e09 --- /dev/null +++ b/pkg/runtime/testdata/invalid-notifier.toml @@ -0,0 +1,6 @@ +# terrascan configuration file + +# notifications configuration +[notifications] + [notifications.invalid] + url = "https://httpbin.org/post" diff --git a/pkg/runtime/testdata/webhook.toml b/pkg/runtime/testdata/webhook.toml new file mode 100644 index 000000000..d1af36565 --- /dev/null +++ b/pkg/runtime/testdata/webhook.toml @@ -0,0 +1,6 @@ +# terrascan configuration file + +# notifications configuration +[notifications] + [notifications.webhook] + url = "https://httpbin.org/post" From 90071434bc68b3d351301444bbfcd1728415cd53 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Sat, 8 Aug 2020 01:34:24 +0530 Subject: [PATCH 104/188] add unit test for notifications package --- go.mod | 1 + pkg/notifications/notifiers.go | 17 +++- pkg/notifications/notifiers_test.go | 91 +++++++++++++++++++ .../testdata/empty-notifier-config.toml | 2 + .../testdata/invalid-notifier-config.toml | 4 + .../testdata/invalid-notifier-type.toml | 6 ++ pkg/notifications/testdata/invalid.toml | 1 + pkg/notifications/testdata/nokey.toml | 2 + pkg/runtime/executor_test.go | 14 +++ 9 files changed, 135 insertions(+), 3 deletions(-) create mode 100644 pkg/notifications/notifiers_test.go create mode 100644 pkg/notifications/testdata/empty-notifier-config.toml create mode 100644 pkg/notifications/testdata/invalid-notifier-config.toml create mode 100644 pkg/notifications/testdata/invalid-notifier-type.toml create mode 100644 pkg/notifications/testdata/invalid.toml create mode 100644 pkg/notifications/testdata/nokey.toml diff --git a/go.mod b/go.mod index 04ee7a44a..48eb15a73 100644 --- a/go.mod +++ b/go.mod @@ -9,6 +9,7 @@ require ( github.com/hashicorp/hcl/v2 v2.3.0 github.com/hashicorp/terraform v0.12.28 github.com/pelletier/go-toml v1.8.0 + github.com/pkg/errors v0.9.1 github.com/spf13/afero v1.3.2 github.com/zclconf/go-cty v1.2.1 go.uber.org/zap v1.9.1 diff --git a/pkg/notifications/notifiers.go b/pkg/notifications/notifiers.go index baf5a2db9..add3b8e1f 100644 --- a/pkg/notifications/notifiers.go +++ b/pkg/notifications/notifiers.go @@ -21,6 +21,7 @@ import ( "os" "reflect" + "github.com/accurics/terrascan/pkg/utils" "github.com/pelletier/go-toml" "go.uber.org/zap" ) @@ -87,24 +88,34 @@ func NewNotifiers(configFile string) ([]Notifier, error) { notifierTypes := keyTomlConfig.Keys() // create notifiers + var allErrs error for _, nType := range notifierTypes { + if !IsNotifierSupported(nType) { + zap.S().Errorf("notifier type '%s' not supported", nType) + allErrs = utils.WrapError(errNotifierNotSupported, allErrs) + continue + } + // check if toml config present for notifier type nTypeConfig := keyTomlConfig.Get(nType) - if nTypeConfig == nil { + if nTypeConfig.(*toml.Tree).String() == "" { zap.S().Errorf("notifier '%v' config not present", nType) - return notifiers, errTomlKeyNotPresent + allErrs = utils.WrapError(errTomlKeyNotPresent, allErrs) + continue } // create a new notifier n, err := NewNotifier(nType) if err != nil { + allErrs = utils.WrapError(err, allErrs) continue } // populate data err = n.Init(nTypeConfig) if err != nil { + allErrs = utils.WrapError(err, allErrs) continue } @@ -113,7 +124,7 @@ func NewNotifiers(configFile string) ([]Notifier, error) { } // return list of notifiers - return notifiers, nil + return notifiers, allErrs } // IsNotifierSupported returns true/false depending on whether the notifier diff --git a/pkg/notifications/notifiers_test.go b/pkg/notifications/notifiers_test.go new file mode 100644 index 000000000..2f71af0c9 --- /dev/null +++ b/pkg/notifications/notifiers_test.go @@ -0,0 +1,91 @@ +package notifications + +import ( + "reflect" + "testing" + + "github.com/accurics/terrascan/pkg/notifications/webhook" +) + +func TestNewNotifier(t *testing.T) { + + table := []struct { + name string + nType string + wantType Notifier + wantErr error + }{ + { + name: "valid notifier", + nType: "webhook", + wantType: &webhook.Webhook{}, + wantErr: nil, + }, + { + name: "invalid notifier", + nType: "notthere", + wantErr: errNotifierNotSupported, + }, + } + + for _, tt := range table { + t.Run(tt.name, func(t *testing.T) { + gotType, gotErr := NewNotifier(tt.nType) + if !reflect.DeepEqual(gotType, tt.wantType) { + t.Errorf("got: '%v', want: '%v'", gotType, tt.wantType) + } + if !reflect.DeepEqual(gotErr, tt.wantErr) { + t.Errorf("incorrect error; got: '%v', want: '%v'", gotErr, tt.wantErr) + } + }) + } +} + +func TestNewNotifiers(t *testing.T) { + + table := []struct { + name string + configFile string + wantErr error + }{ + { + name: "config not present", + configFile: "notthere", + wantErr: errNotPresent, + }, + { + name: "invalid toml", + configFile: "testdata/invalid.toml", + wantErr: errTomlLoadConfig, + }, + { + name: "key not present", + configFile: "testdata/nokey.toml", + wantErr: errTomlKeyNotPresent, + }, + { + name: "invalid notifier", + configFile: "testdata/invalid-notifier-type.toml", + wantErr: errNotifierNotSupported, + }, + { + name: "empty notifier config", + configFile: "testdata/empty-notifier-config.toml", + wantErr: errTomlKeyNotPresent, + }, + { + name: "invalid notifier config", + configFile: "testdata/invalid-notifier-config.toml", + wantErr: nil, + }, + } + + for _, tt := range table { + t.Run(tt.name, func(t *testing.T) { + _, gotErr := NewNotifiers(tt.configFile) + if !reflect.DeepEqual(gotErr, tt.wantErr) { + t.Errorf("incorrect error; got: '%v', want: '%v'", gotErr, tt.wantErr) + } + }) + } +} diff --git a/pkg/notifications/testdata/empty-notifier-config.toml b/pkg/notifications/testdata/empty-notifier-config.toml new file mode 100644 index 000000000..299652e1f --- /dev/null +++ b/pkg/notifications/testdata/empty-notifier-config.toml @@ -0,0 +1,2 @@ +[notifications] + [notifications.webhook] diff --git a/pkg/notifications/testdata/invalid-notifier-config.toml b/pkg/notifications/testdata/invalid-notifier-config.toml new file mode 100644 index 000000000..4b2914425 --- /dev/null +++ b/pkg/notifications/testdata/invalid-notifier-config.toml @@ -0,0 +1,4 @@ +[notifications] + [notifications.webhook] + key1 = "val1" + key2 = "val2" diff --git a/pkg/notifications/testdata/invalid-notifier-type.toml b/pkg/notifications/testdata/invalid-notifier-type.toml new file mode 100644 index 000000000..74c795e09 --- /dev/null +++ b/pkg/notifications/testdata/invalid-notifier-type.toml @@ -0,0 +1,6 @@ +# terrascan configuration file + +# notifications configuration +[notifications] + [notifications.invalid] + url = "https://httpbin.org/post" diff --git a/pkg/notifications/testdata/invalid.toml b/pkg/notifications/testdata/invalid.toml new file mode 100644 index 000000000..030ffe2ec --- /dev/null +++ b/pkg/notifications/testdata/invalid.toml @@ -0,0 +1 @@ +I am an invalid toml diff --git a/pkg/notifications/testdata/nokey.toml b/pkg/notifications/testdata/nokey.toml new file mode 100644 index 000000000..b2529fbd5 --- /dev/null +++ b/pkg/notifications/testdata/nokey.toml @@ -0,0 +1,2 @@ +[somefield] +somekey = "somevalue" diff --git a/pkg/runtime/executor_test.go b/pkg/runtime/executor_test.go index 3cb31d1ba..8105e022b 100644 --- a/pkg/runtime/executor_test.go +++ b/pkg/runtime/executor_test.go @@ -151,6 +151,20 @@ func TestInit(t *testing.T) { wantIacProvider: &tfv12.TfV12{}, wantNotifiers: []notifications.Notifier{&webhook.Webhook{}}, }, + { + name: "invalid notifier", + executor: Executor{ + filePath: "./testdata/testfile", + dirPath: "", + cloudType: "aws", + iacType: "terraform", + iacVersion: "v12", + configFile: "testdata/invalid-notifier.toml", + }, + wantErr: fmt.Errorf("notifier not supported"), + wantIacProvider: &tfv12.TfV12{}, + wantNotifiers: []notifications.Notifier{&webhook.Webhook{}}, + }, { name: "config not present", executor: Executor{ From 3a8a3145c2fb7d6aa82a2186083827e60bc5fb6d Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Sat, 8 Aug 2020 01:39:22 +0530 Subject: [PATCH 105/188] fixing lint error --- pkg/runtime/executor_test.go | 4 ++-- pkg/runtime/notifications_test.go | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/pkg/runtime/executor_test.go b/pkg/runtime/executor_test.go index 8105e022b..62d163754 100644 --- a/pkg/runtime/executor_test.go +++ b/pkg/runtime/executor_test.go @@ -99,9 +99,9 @@ func TestExecute(t *testing.T) { name: "test SendNofitications no error", executor: Executor{ iacProvider: MockIacProvider{err: nil}, - notifiers: []notifications.Notifier{&MockNotifier{err: mockNotifierErr}}, + notifiers: []notifications.Notifier{&MockNotifier{err: errMockNotifier}}, }, - wantErr: mockNotifierErr, + wantErr: errMockNotifier, }, } diff --git a/pkg/runtime/notifications_test.go b/pkg/runtime/notifications_test.go index def21bf65..584ed2c15 100644 --- a/pkg/runtime/notifications_test.go +++ b/pkg/runtime/notifications_test.go @@ -14,7 +14,7 @@ type MockNotifier struct { } var ( - mockNotifierErr = fmt.Errorf("mock notification error") + errMockNotifier = fmt.Errorf("mock notification error") ) func (m MockNotifier) Init(config interface{}) error { @@ -42,9 +42,9 @@ func TestSendNotifications(t *testing.T) { { name: "no notifier error", executor: Executor{ - notifiers: []notifications.Notifier{&MockNotifier{err: mockNotifierErr}}, + notifiers: []notifications.Notifier{&MockNotifier{err: errMockNotifier}}, }, - wantErr: mockNotifierErr, + wantErr: errMockNotifier, }, } From 3765fea2773c85f0ca6b97bb4deb13e2f02c6d5a Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Sat, 8 Aug 2020 23:45:52 +0530 Subject: [PATCH 106/188] add goreleaser support --- .github/workflows/release.yml | 29 +++++++++++++++++++++++++++++ .goreleaser.yml | 33 +++++++++++++++++++++++++++++++++ 2 files changed, 62 insertions(+) create mode 100644 .github/workflows/release.yml create mode 100644 .goreleaser.yml diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 000000000..61253871b --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,29 @@ +name: release + +on: + push: + tags: + - '*' + +jobs: + release: + runs-on: ubuntu-latest + steps: + - + name: Checkout + uses: actions/checkout@v2 + with: + fetch-depth: 0 + - + name: Set up Go + uses: actions/setup-go@v2 + with: + go-version: 1.14 + - + name: Run GoReleaser + uses: goreleaser/goreleaser-action@v2 + with: + version: latest + args: release --rm-dist + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.goreleaser.yml b/.goreleaser.yml new file mode 100644 index 000000000..714196f5c --- /dev/null +++ b/.goreleaser.yml @@ -0,0 +1,33 @@ +# This is an example goreleaser.yaml file with some sane defaults. +# Make sure to check the documentation at http://goreleaser.com +before: + hooks: + # You may remove this if you don't use go modules. + - go mod download + # you may remove this if you don't need go generate + - go generate ./... +builds: + - env: + - CGO_ENABLED=0 + goos: + - linux + - windows + - darwin + main: ./cmd/terrascan/main.go +archives: + - replacements: + darwin: Darwin + linux: Linux + windows: Windows + 386: i386 + amd64: x86_64 +checksum: + name_template: 'checksums.txt' +snapshot: + name_template: "{{ .Tag }}-next" +changelog: + sort: asc + filters: + exclude: + - '^docs:' + - '^test:' From ac8b93b4f6143e0bb115e91a9bfd9651bb0cfeba Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Mon, 10 Aug 2020 10:28:31 +0530 Subject: [PATCH 107/188] changing workflow name to 'build' --- .github/workflows/gobuild.yml | 2 +- README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/gobuild.yml b/.github/workflows/gobuild.yml index 92adf2085..26d8bedff 100644 --- a/.github/workflows/gobuild.yml +++ b/.github/workflows/gobuild.yml @@ -1,4 +1,4 @@ -name: Go Terrascan build +name: build on: push: pull_request: diff --git a/README.md b/README.md index c5db3a148..c3bdfdb72 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,5 @@ # Terrascan -![CI](/~https://github.com/accurics/terrascan/workflows/Go%20Terrascan%20build/badge.svg) +![CI](/~https://github.com/accurics/terrascan/workflows/build/badge.svg) [![codecov](https://codecov.io/gh/accurics/terrascan/branch/master/graph/badge.svg)](https://codecov.io/gh/accurics/terrascan) [![community](https://img.shields.io/discourse/status?server=https%3A%2F%2Fcommunity.accurics.com)](https://community.accurics.com) [![Documentation](https://readthedocs.org/projects/terrascan/badge/?version=latest)](https://terrascan.readthedocs.io/en/latest/?badge=latest) From 8fd495dd2288d754608d1dddde9235002eaeebf8 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Wed, 5 Aug 2020 02:35:24 -0700 Subject: [PATCH 108/188] First cut of the policy engine - OPA can evaluate and read rego files in a given directory - Template regos are supported - Still some bugs to fix around mapping --- go.mod | 1 + go.sum | 53 +++++++ pkg/data/file/importer.go | 23 +++ pkg/policy/interface.go | 31 ++++ pkg/policy/opa/constants.go | 7 + pkg/policy/opa/opa_engine.go | 275 +++++++++++++++++++++++++++++++++++ pkg/runtime/executor.go | 13 +- 7 files changed, 402 insertions(+), 1 deletion(-) create mode 100644 pkg/data/file/importer.go create mode 100644 pkg/policy/interface.go create mode 100644 pkg/policy/opa/constants.go create mode 100644 pkg/policy/opa/opa_engine.go diff --git a/go.mod b/go.mod index 48eb15a73..945e3a9f4 100644 --- a/go.mod +++ b/go.mod @@ -8,6 +8,7 @@ require ( github.com/hashicorp/go-version v1.2.0 github.com/hashicorp/hcl/v2 v2.3.0 github.com/hashicorp/terraform v0.12.28 + github.com/open-policy-agent/opa v0.22.0 github.com/pelletier/go-toml v1.8.0 github.com/pkg/errors v0.9.1 github.com/spf13/afero v1.3.2 diff --git a/go.sum b/go.sum index 8448a0219..142c98747 100644 --- a/go.sum +++ b/go.sum @@ -27,6 +27,8 @@ github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= github.com/ChrisTrenkamp/goxpath v0.0.0-20170922090931-c385f95c6022/go.mod h1:nuWgzSkT5PnyOd+272uUmV0dnAnAn42Mk7PiQC5VzN4= +github.com/OneOfOne/xxhash v1.2.7 h1:fzrmmkskv067ZQbd9wERNGuxckWw67dyzoMG62p7LMo= +github.com/OneOfOne/xxhash v1.2.7/go.mod h1:eZbhyaAYD41SGSSsnmcpxVoRiQ/MPUTjUdIIOT9Um7Q= github.com/QcloudApi/qcloud_sign_golang v0.0.0-20141224014652-e4130a326409/go.mod h1:1pk82RBxDY/JZnPQrtqHlUFfCctgdorsd9M06fMynOM= github.com/Unknwon/com v0.0.0-20151008135407-28b053d5a292/go.mod h1:KYCjqMOeHpNuTOiFQU6WEcTG7poCJrUs0YgyHNtn1no= github.com/abdullin/seq v0.0.0-20160510034733-d5467c17e7af/go.mod h1:5Jv4cbFiHJMsVxt52+i0Ha45fjshj6wxYr1r19tB9bw= @@ -55,6 +57,7 @@ github.com/aws/aws-sdk-go v1.15.78/go.mod h1:E3/ieXAlvM0XWO57iftYVDLLvQ824smPP3A github.com/aws/aws-sdk-go v1.25.3/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.30.12/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f/go.mod h1:AuiFmCCPBSrqvVMvuqFuk0qogytodnVFVSN5CeJB8Gc= +github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973 h1:xJ4a3vCFaGF/jqvzLMYoU8P317H5OQ+Via4RmuPwCS0= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ00z/TKoufEY6K/a0k6AhaJrQKdFe6OfVXsa4= github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs= @@ -72,6 +75,7 @@ github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd v0.0.0-20181012123002-c6f51f82210d/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -83,6 +87,9 @@ github.com/dylanmei/winrmtest v0.0.0-20190225150635-99b7fe2fddf1/go.mod h1:lcy9/ github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= +github.com/ghodss/yaml v0.0.0-20180820084758-c7ce16629ff4/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= @@ -90,17 +97,22 @@ github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LB github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-test/deep v1.0.1/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/go-test/deep v1.0.3/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= +github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= +github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.3.0/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/groupcache v0.0.0-20180513044358-24b0969c4cb7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y= +github.com/golang/protobuf v0.0.0-20181025225059-d3de96c4c28e/go.mod h1:Qd/q+1AKNOZr9uGQzbzCmRO6sUih6GTPZv6a1/R87v0= github.com/golang/protobuf v1.1.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.4 h1:87PNWwrRvUSnqS4dlcBU/ftvOIBep4sYuBLlh6rX2wk= github.com/golang/protobuf v1.3.4/go.mod h1:vzj43D7+SQXF/4pzW/hwtAqwc6iTitCiVSaWz5lYuqw= github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ= @@ -120,6 +132,7 @@ github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5m github.com/gophercloud/gophercloud v0.0.0-20190208042652-bc37892e1968/go.mod h1:3WdhXV3rUYy9p6AUW8d94kr+HS62Y4VL9mBnFxsD8q4= github.com/gophercloud/utils v0.0.0-20190128072930-fbb6ab446f01/go.mod h1:wjDF8z83zTeg5eMLml5EBSlAhbF7G8DobyI1YsMuyzw= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= +github.com/gorilla/mux v0.0.0-20181024020800-521ea7b17d02/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= github.com/gorilla/mux v1.7.4 h1:VuZ8uybHlWmqV03+zRzdwKL4tUnIp1MAQtp1mIFE1bc= github.com/gorilla/mux v1.7.4/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= @@ -174,6 +187,8 @@ github.com/hashicorp/terraform-svchost v0.0.0-20191011084731-65d371908596 h1:hjy github.com/hashicorp/terraform-svchost v0.0.0-20191011084731-65d371908596/go.mod h1:kNDNcF7sN4DocDLBkQYz73HGKwN1ANB1blq4lIYLYvg= github.com/hashicorp/vault v0.10.4/go.mod h1:KfSyffbKxoVyspOdlaGVjIuwLobi07qD1bAbosPMpP0= github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= +github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= github.com/jhump/protoreflect v1.6.0/go.mod h1:eaTn3RZAmMBcV0fifFvlm6VHNz3wSkYyXYWUh7ymB74= github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= @@ -186,8 +201,11 @@ github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVY github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8= github.com/keybase/go-crypto v0.0.0-20161004153544-93f5b35093ba/go.mod h1:ghbZscTyKdM07+Fw3KSi0hcJm+AlEUWj8QLlPtijN/M= +github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2 h1:DB17ag19krx9CFsz4o3enTrPXyIXCl+2iCXH/aMAp9s= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= @@ -211,8 +229,11 @@ github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcncea github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4= github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-runewidth v0.0.0-20181025052659-b20a3daf6a39/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= +github.com/mattn/go-runewidth v0.0.4 h1:2BvfKmzob6Bmd4YsL0zygOqfdFnK7GR4QL06Do4/p7Y= github.com/mattn/go-runewidth v0.0.4/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU= github.com/mattn/go-shellwords v1.0.4/go.mod h1:3xCvwCdWdlDJUrvuMn7Wuy9eWs4pE8vqg+NOMyg4B2o= +github.com/matttproud/golang_protobuf_extensions v1.0.1 h1:4hp9jkHxhMHkqkrB3Ix0jegS5sx/RkqARlsWZ6pIwiU= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/miekg/dns v1.0.8/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg= github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc= @@ -237,11 +258,18 @@ github.com/mozillazg/go-httpheader v0.2.1/go.mod h1:jJ8xECTlalr6ValeXYdOF8fFUISe github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/nu7hatch/gouuid v0.0.0-20131221200532-179d4d0c4d8d/go.mod h1:YUTz3bUH2ZwIWBy3CJBeOBEugqcmXREj14T+iG/4k4U= github.com/oklog/run v1.0.0/go.mod h1:dlhp/R75TPv97u0XWUtDeV/lRKWPKSdTuV0TZvrmrQA= +github.com/olekukonko/tablewriter v0.0.1 h1:b3iUnf1v+ppJiOfNX4yxxqfWKMQPZR5yoh8urCTFX88= +github.com/olekukonko/tablewriter v0.0.1/go.mod h1:vsDQFd/mU46D+Z4whnwzcISnGGzXWMclvtLoiIKAKIo= +github.com/open-policy-agent/opa v0.22.0 h1:KZvn0uMQIorBIwYk8Vc89dp8No9FIEF8eFl0sc1r/1U= +github.com/open-policy-agent/opa v0.22.0/go.mod h1:rrwxoT/b011T0cyj+gg2VvxqTtn6N3gp/jzmr3fjW44= github.com/packer-community/winrmcp v0.0.0-20180102160824-81144009af58/go.mod h1:f6Izs6JvFTdnRbziASagjZ2vmf55NSIkC/weStxCHqk= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= github.com/pelletier/go-toml v1.8.0 h1:Keo9qb7iRJs2voHvunFtuuYFsbWeOBh8/P9v/kVMFtw= github.com/pelletier/go-toml v1.8.0/go.mod h1:D6yutnOGMveHEPV7VQOuvI/gXY61bv+9bAOTRnLElKs= +github.com/peterh/liner v0.0.0-20170211195444-bf27d3ba8e1d h1:zapSxdmZYY6vJWXFKLQ+MkI+agc+HQyfrCGowDSHiKs= +github.com/peterh/liner v0.0.0-20170211195444-bf27d3ba8e1d/go.mod h1:xIteQHvHuaLYG9IFj6mSxM0fCKrs34IrEQUhOYuGPHc= github.com/pkg/browser v0.0.0-20180916011732-0a3d74bf9ce4/go.mod h1:4OwLy04Bl9Ef3GJJCoec+30X3LQs/0/m4HFRt/2LUSA= +github.com/pkg/errors v0.0.0-20181023235946-059132a15dd0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= @@ -251,27 +279,42 @@ github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZb github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI= github.com/posener/complete v1.2.1/go.mod h1:6gapUrK/U1TAN7ciCoNRIdVC5sbdBTUh1DKN0g6uH7E= +github.com/prometheus/client_golang v0.0.0-20181025174421-f30f42803563/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw= +github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829 h1:D+CiwcpGTW6pL6bv6KI3KbyEyCKyS+1JWS2h8PNDnGA= github.com/prometheus/client_golang v0.9.3-0.20190127221311-3c4408c8b829/go.mod h1:p2iRAGwDERtqlqzRXnrOVns+ignqQo//hLXqYxZYVNs= github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= github.com/prometheus/client_model v0.0.0-20190115171406-56726106282f/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo= +github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4 h1:gQz4mCbXsO+nc9n1hCxHcGA3Zx3Eo+UHZoInFGUIXNM= github.com/prometheus/client_model v0.0.0-20190812154241-14fe0d1b01d4/go.mod h1:xMI15A0UPsDsEKsMN9yxemIoYk6Tm2C1GtYGdfGttqA= +github.com/prometheus/common v0.0.0-20181020173914-7e9e6cabbd39/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro= +github.com/prometheus/common v0.2.0 h1:kUZDBDTdBVBYBj5Tmh2NZLlF60mfjA27rM34b+cVwNU= github.com/prometheus/common v0.2.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y86RQel1bk4= github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1 h1:/K3IL0Z1quvmJ7X0A1AwNEK7CRkVK3YwfOU/QAL4WGg= github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a h1:9ZKAASQSHhDYGoxY8uLVpewe1GDZ2vu2Tr/vTdVAkFQ= +github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= +github.com/sirupsen/logrus v1.4.1 h1:GL2rEmy6nsikmW0r8opw9JIRScdMF5hA8cOYLH7In1k= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/goconvey v0.0.0-20180222194500-ef6db91d284a/go.mod h1:XDJAKZRPZ1CvBcN2aX5YOUTYGHki24fSF0Iv48Ibg0s= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= github.com/spf13/afero v1.2.1/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= github.com/spf13/afero v1.3.2 h1:GDarE4TJQI52kYSbSAmLiId1Elfj+xgSDqrUZxFhxlU= github.com/spf13/afero v1.3.2/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= +github.com/spf13/cobra v0.0.0-20181021141114-fe5e611709b0 h1:BgSbPgT2Zu8hDen1jJDGLWO8voaSRVrwsk18Q/uSh5M= +github.com/spf13/cobra v0.0.0-20181021141114-fe5e611709b0/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/pflag v0.0.0-20181024212040-082b515c9490/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= @@ -292,6 +335,8 @@ github.com/vmihailenco/msgpack v4.0.1+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6Ac github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0Bx9h2kr4= github.com/xiang90/probing v0.0.0-20160813154853-07dd2e8dfe18/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= github.com/xlab/treeprint v0.0.0-20161029104018-1d6e34225557/go.mod h1:ce1O1j6UtZfjr22oyGxGLbauSBp2YVXpARAosm7dHBg= +github.com/yashtewari/glob-intersection v0.0.0-20180916065949-5c77d914dd0b h1:vVRagRXf67ESqAb72hG2C/ZwI8NtJF2u2V76EsuOHGY= +github.com/yashtewari/glob-intersection v0.0.0-20180916065949-5c77d914dd0b/go.mod h1:HptNXiXVDcJjXe9SqMd0v2FsL9f8dz4GnXgltU6q/co= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/zclconf/go-cty v1.0.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s= github.com/zclconf/go-cty v1.1.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s= @@ -321,6 +366,7 @@ golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPh golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= +golang.org/x/lint v0.0.0-20181023182221-1baf3a9d7d67/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -338,6 +384,7 @@ golang.org/x/net v0.0.0-20180530234432-1e491301e022/go.mod h1:mL1N/T3taQHkDXs73r golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -379,6 +426,7 @@ golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190804053845-51ab0e2deafa/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd h1:xhmwyvizuTgC2qz7ZlMluP20uW+C3Rm0FD/WLDX8884= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -387,6 +435,7 @@ golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= @@ -398,6 +447,7 @@ golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBn golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0 h1:Dh6fw+p6FyRl5x/FvNswO1ji0lIGzm3KP8Y9VkS9PTE= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190920225731-5eefd052ad72/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= @@ -421,6 +471,7 @@ google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7 google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0= google.golang.org/genproto v0.0.0-20170818010345-ee236bd376b0/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20180831171423-11092d34479b/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= @@ -438,6 +489,8 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/cheggaaa/pb.v1 v1.0.27/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= +gopkg.in/fsnotify.v1 v1.4.7 h1:xOHLXZwVvI9hhs+cLKq5+I5onOuwQLhQwiu63xxlHs4= +gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= diff --git a/pkg/data/file/importer.go b/pkg/data/file/importer.go new file mode 100644 index 000000000..1cc036441 --- /dev/null +++ b/pkg/data/file/importer.go @@ -0,0 +1,23 @@ +package file + +type FileInfo struct { + Path string + Hash string + HashType string + Attributes string +} + +// Group Group metadata +type Group struct { + Name string + IsReadOnly bool + VerifySignatures bool + Directories []*FileInfo + Files []*FileInfo +} + +// Metadata File metadata +type Metadata struct { + Version string + Groups []*Group +} diff --git a/pkg/policy/interface.go b/pkg/policy/interface.go new file mode 100644 index 000000000..8c15e02a2 --- /dev/null +++ b/pkg/policy/interface.go @@ -0,0 +1,31 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package policy + +type Manager interface { + Import() error + Export() error + Validate() error +} + +type Engine interface { + Initialize(policyPath string) error + Configure() error + Evaluate(inputData *interface{}) error + GetResults() error + Release() error +} diff --git a/pkg/policy/opa/constants.go b/pkg/policy/opa/constants.go new file mode 100644 index 000000000..7d66da466 --- /dev/null +++ b/pkg/policy/opa/constants.go @@ -0,0 +1,7 @@ +package policy + +const ( + RegoMetadataFile = "rule.json" + RegoFileSuffix = ".rego" + RuleQueryBase = "data.accurics" +) diff --git a/pkg/policy/opa/opa_engine.go b/pkg/policy/opa/opa_engine.go new file mode 100644 index 000000000..bd50ef368 --- /dev/null +++ b/pkg/policy/opa/opa_engine.go @@ -0,0 +1,275 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package policy + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "sort" + "strings" + "text/template" + + "github.com/open-policy-agent/opa/ast" + + "go.uber.org/zap" + + "github.com/open-policy-agent/opa/rego" +) + +type AccuricsRegoMetadata struct { + Name string `json:"ruleName"` + DisplayName string `json:"ruleDisplayName"` + Category string `json:"category"` + ImpactedRes []string `json:"impactedRes"` + PolicyRelevance string `json:"policyRelevance"` + Remediation string `json:"remediation"` + Row int `json:"row"` + Rule string `json:"rule"` + RuleTemplate string `json:"ruleTemplate"` + RuleTemplateArgs map[string]interface{} `json:"ruleArgument"` + RuleReferenceID string `json:"ruleReferenceId"` + Severity string `json:"severity"` + Vulnerability string `json:"vulnerability"` +} + +type RegoData struct { + Name string `json:"ruleName"` + DisplayName string `json:"ruleDisplayName"` + Category string `json:"category"` + Remediation string `json:"remediation"` + Rule string `json:"rule"` + RuleTemplate string `json:"ruleTemplate"` + RuleTemplateArgs map[string]interface{} `json:"ruleArgument"` + RuleReferenceID string `json:"ruleReferenceId"` + Severity string `json:"severity"` + Vulnerability string `json:"vulnerability"` + RawRego *[]byte + PreparedQuery *rego.PreparedEvalQuery +} + +type ResultData struct { +} + +type OpaEngine struct { + Context context.Context + RegoFileMap map[string][]byte + RegoDataMap map[string]*RegoData +} + +func filterFileListBySuffix(allFileList *[]string, filter string) *[]string { + fileList := make([]string, 0) + + for i := range *allFileList { + if strings.HasSuffix((*allFileList)[i], filter) { + fileList = append(fileList, (*allFileList)[i]) + } + } + return &fileList +} + +func (o *OpaEngine) LoadRegoFiles(policyPath string) error { + ruleCount := 0 + regoFileCount := 0 + metadataCount := 0 + + // Walk the file path and find all directories + dirList := make([]string, 0) + err := filepath.Walk(policyPath, func(filePath string, fileInfo os.FileInfo, err error) error { + if fileInfo != nil && fileInfo.IsDir() { + dirList = append(dirList, filePath) + } + return err + }) + + if len(dirList) == 0 { + return fmt.Errorf("no directories found for path %s", policyPath) + } + + o.RegoFileMap = make(map[string][]byte) + o.RegoDataMap = make(map[string]*RegoData) + + // Load rego data files from each dir + sort.Strings(dirList) + for i := range dirList { + metaFilename := filepath.Join(dirList[i], RegoMetadataFile) + var metadata []byte + metadata, err = ioutil.ReadFile(metaFilename) + if err != nil { + if !errors.Is(err, os.ErrNotExist) { + zap.S().Warn("failed to load rego metadata", zap.String("file", metaFilename)) + } + continue + } + + // Read metadata into struct + regoMetadata := make([]*RegoData, 0) + if err = json.Unmarshal(metadata, ®oMetadata); err != nil { + zap.S().Warn("failed to unmarshal rego metadata", zap.String("file", metaFilename)) + continue + } + + metadataCount++ + + // Find all .rego files within the directory + fileInfo, err := ioutil.ReadDir(dirList[i]) + if err != nil { + zap.S().Error("error while finding rego files", zap.String("dir", dirList[i])) + continue + } + + files := make([]string, 0) + for j := range fileInfo { + files = append(files, fileInfo[j].Name()) + } + + // Load rego data for all rego files + regoFileList := filterFileListBySuffix(&files, RegoFileSuffix) + regoFileCount += len(*regoFileList) + for j := range *regoFileList { + regoFilename := (*regoFileList)[j] + regoFullPath := filepath.Join(dirList[i], regoFilename) + var rawRegoData []byte + rawRegoData, err = ioutil.ReadFile(regoFullPath) + if err != nil { + zap.S().Warn("failed to load rego file", zap.String("file", regoFilename)) + continue + } + + _, ok := o.RegoFileMap[regoFullPath] + if ok { + // Already loaded this file, so continue + continue + } + + // Set raw rego data + o.RegoFileMap[regoFullPath] = rawRegoData + } + + for j := range regoMetadata { + //key := filepath.Join(dirList[i], regoMetadata[j].Rule) + //regoData := o.RegoFileMap[key] + metadataCount++ + // Apply templates if available + var buf bytes.Buffer + t := template.New("opa") + t.Parse(string(o.RegoFileMap[filepath.Join(dirList[i], regoMetadata[j].RuleTemplate+".rego")])) + t.Execute(&buf, regoMetadata[j].RuleTemplateArgs) + + templateData := buf.Bytes() + regoMetadata[j].RawRego = &templateData + o.RegoDataMap[regoMetadata[j].Name] = regoMetadata[j] + } + } + + ruleCount = len(o.RegoDataMap) + zap.S().Infof("Loaded %d Rego rules from %d rego files (%d metadata files).", ruleCount, regoFileCount, metadataCount) + + return err +} + +func (o *OpaEngine) CompileRegoFiles() error { + for k := range o.RegoDataMap { + compiler, err := ast.CompileModules(map[string]string{ + o.RegoDataMap[k].Rule: string(*(o.RegoDataMap[k].RawRego)), + }) + + r := rego.New( + rego.Query(RuleQueryBase+"."+o.RegoDataMap[k].Name), + rego.Compiler(compiler), + ) + + // Create a prepared query that can be evaluated. + query, err := r.PrepareForEval(o.Context) + if err != nil { + return err + } + + o.RegoDataMap[k].PreparedQuery = &query + } + + return nil +} + +// Initialize Initializes the Opa engine +// Handles loading all rules, filtering, compiling, and preparing for evaluation +func (o *OpaEngine) Initialize(policyPath string) error { + o.Context = context.Background() + + if err := o.LoadRegoFiles(policyPath); err != nil { + return err + } + + err := o.CompileRegoFiles() + if err != nil { + return err + } + + return nil +} + +func (o *OpaEngine) Configure() error { + return nil +} + +func (o *OpaEngine) GetResults() error { + return nil +} + +func (o *OpaEngine) Release() error { + return nil +} + +func (o *OpaEngine) Evaluate(inputData *interface{}) error { + + sortedKeys := make([]string, len(o.RegoDataMap)) + x := 0 + for k := range o.RegoDataMap { + sortedKeys[x] = k + x++ + } + sort.Strings(sortedKeys) + + for _, k := range sortedKeys { + // Execute the prepared query. + rs, err := o.RegoDataMap[k].PreparedQuery.Eval(o.Context, rego.EvalInput(inputData)) + // rs, err := r.Eval(o.Context) + if err != nil { + zap.S().Warn("failed to run prepared query", zap.String("rule", "'"+k+"'"), zap.Any("input", inputData)) + continue + } + + if len(rs) > 0 { + results := rs[0].Expressions[0].Value.([]interface{}) + if len(results) > 0 { + r := o.RegoDataMap[k] + fmt.Printf("\n[%s] [%s] %s\n %s\n", r.Severity, r.RuleReferenceID, r.DisplayName, r.Vulnerability) + } + // fmt.Printf(" [%s] %v\n", k, results) + } else { + // fmt.Printf("No Result [%s] \n", k) + } + // Store results + } + + return nil +} diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 54482321e..f98a5c136 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -21,6 +21,7 @@ import ( iacProvider "github.com/accurics/terrascan/pkg/iac-providers" "github.com/accurics/terrascan/pkg/notifications" + policy "github.com/accurics/terrascan/pkg/policy/opa" ) // Executor object @@ -94,7 +95,17 @@ func (e *Executor) Execute() (normalized interface{}, err error) { return normalized, err } - // evaluate policies + // Create a new policy engine based on IaC type + if e.iacType == "terraform" { + engine := policy.OpaEngine{} + + err := engine.Initialize("/Users/wsana/go/src/accurics/terrascan/pkg/policies/accurics/v1/opa") + if err != nil { + return err + } + + engine.Evaluate(&normalized) + } // send notifications, if configured if err = e.SendNotifications(normalized); err != nil { From 79cb369da1fa22bddd0349e0ae7820ae8eba8655 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Sun, 9 Aug 2020 11:37:19 -0700 Subject: [PATCH 109/188] Update Terrascan to use the latest policy format - Adds initial AWS and Azure policy set - Adds command line switch for policy path --- cmd/terrascan/main.go | 3 +- pkg/cli/run.go | 4 +- ....EncryptionandKeyManagement.High.0407.json | 13 ++ ....EncryptionandKeyManagement.High.0408.json | 13 ++ .../AWS.CloudFront.Logging.Medium.0567.json | 13 ++ .../cloudfrontNoHTTPSTraffic.rego | 10 + .../cloudfrontNoLogging.rego | 21 ++ .../cloudfrontNoSecureCiphers.rego | 19 ++ .../AWS.CloudTrail.Logging.High.0399.json | 13 ++ .../AWS.CloudTrail.Logging.Low.0559.json | 13 ++ .../AWS.CloudTrail.Logging.Medium.0460.json | 13 ++ .../cloudTrailLogNotEncrypted.rego | 9 + .../cloudTrailMultiRegionNotCreated.rego | 9 + .../aws/aws_cloudtrail/enableSNSTopic.rego | 6 + .../opa/rego/aws/aws_db_instance/.json | 13 ++ .../rdsPubliclyAccessible.rego | 9 + .../AWS.IamUser.IAM.High.0390.json | 13 ++ .../noAccessKeyForRootAccount.rego | 20 ++ .../AWS.Iam.IAM.Low.0540.json | 14 ++ .../AWS.Iam.IAM.Medium.0454.json | 15 ++ .../AWS.Iam.IAM.Medium.0455.json | 15 ++ .../AWS.Iam.IAM.Medium.0456.json | 15 ++ .../AWS.Iam.IAM.Medium.0457.json | 15 ++ .../AWS.Iam.IAM.Medium.0458.json | 16 ++ .../AWS.Iam.IAM.Medium.0495.json | 16 ++ .../passwordMinLength.rego | 12 + .../passwordPolicyRequirement.rego | 9 + .../passwordRotateEvery90Days.rego | 9 + .../AWS.IamPolicy.IAM.High.0392.json | 13 ++ .../iamGrpPolicyWithFullAdminCntrl.rego | 51 +++++ .../AWS.IamPolicy.IAM.High.0392.json | 13 ++ .../iamPolicyWithFullAdminControl.rego | 50 +++++ .../AWS.IamUser.IAM.High.0387.json | 13 ++ .../AWS.IamUser.IAM.High.0388.json | 13 ++ .../rootUserNotContainMfaTypeHardware.rego | 34 +++ .../rootUserNotContainMfaTypeVirtual.rego | 35 +++ ....Instance.NetworkSecurity.Medium.0506.json | 13 ++ .../aws/aws_instance/instanceWithNoVpc.rego | 9 + ....EncryptionandKeyManagement.High.0412.json | 13 ++ .../aws_kinesis_stream.rego | 8 + .../AWS.KMS.Logging.High.0400.json | 13 ++ .../aws_kms_key/kmsKeyRotationDisabled.rego | 9 + ...hConfiguration.DataSecurity.High.0102.json | 13 ++ .../hardCodedKey.rego | 23 ++ ....EncryptionandKeyManagement.High.0405.json | 13 ++ .../AWS.S3Bucket.IAM.High.0370.json | 13 ++ .../AWS.S3Bucket.IAM.High.0377.json | 15 ++ .../AWS.S3Bucket.IAM.High.0378.json | 15 ++ .../AWS.S3Bucket.IAM.High.0379.json | 15 ++ .../AWS.S3Bucket.IAM.High.0381.json | 15 ++ ...WS.S3Bucket.NetworkSecurity.High.0417.json | 13 ++ .../aws/aws_s3_bucket/noS3BucketSseRules.rego | 9 + .../rego/aws/aws_s3_bucket/s3AclGrants.rego | 8 + .../s3BucketNoWebsiteIndexDoc.rego | 8 + .../aws_s3_bucket/s3VersioningMfaFalse.rego | 10 + .../AWS.IamPolicy.IAM.High.0374.json | 15 ++ .../AWS.S3Bucket.IAM.High.0371.json | 13 ++ .../AWS.S3Bucket.IAM.High.0372.json | 15 ++ .../actionsFromAllPrincipals.rego | 59 +++++ .../allowActionsFromAllPrincipals.rego | 51 +++++ ...curityGroup.NetworkSecurity.High.0094.json | 13 ++ .../unrestrictedIngressAccess.rego | 24 ++ .../aws_vpc/AWS.VPC.Logging.Medium.0470.json | 13 ++ .../aws_vpc/AWS.VPC.Logging.Medium.0471.json | 13 ++ .../opa/rego/aws/aws_vpc/defaultVpcExist.rego | 6 + .../aws/aws_vpc/vpcFlowLogsNotEnabled.rego | 27 +++ .../accurics.azure.CAM.162.json | 13 ++ .../azurerm_cosmosdb_account/noTags.rego | 9 + .../accurics.azure.EKM.156.json | 13 ++ .../checkDataDisksEncrypted.rego | 11 + .../accurics.azure.NPS.171.json | 19 ++ .../accurics.azure.NPS.172.json | 19 ++ .../accurics.azure.NPS.35.json | 19 ++ .../accurics.azure.NPS.36.json | 19 ++ .../accurics.azure.NPS.37.json | 19 ++ .../networkPortExposedPublic.rego | 73 ++++++ .../accurics.azure.LOG.151.json | 13 ++ .../accurics.azure.LOG.152.json | 13 ++ .../accurics.azure.LOG.155.json | 13 ++ .../connectionThrottling.rego | 10 + .../logConnections.rego | 10 + .../logRetention.rego | 14 ++ .../accurics.azure.BDR.163.json | 13 ++ .../accurics.azure.EKM.1.json | 13 ++ .../geoRedundancyDisabled.rego | 17 ++ .../sslEnforceDisabled.rego | 9 + .../accurics.azure.EKM.23.json | 13 ++ .../accurics.azure.NS.30.json | 15 ++ .../accurics.azure.NS.31.json | 15 ++ .../azurerm_redis_cache/nonSslEnabled.rego | 9 + .../publiclyAccessible.rego | 29 +++ .../accurics.azure.MON.157.json | 13 ++ .../checkAuditEnabled.rego | 36 +++ .../accurics.azure.NS.21.json | 15 ++ .../accurics.azure.NS.5.json | 15 ++ .../checkPublicAccessNotAllow.rego | 17 ++ .../opa/rego/azure/azurerm_sql_server/.json | 13 ++ .../sqlServerADAdminConfigured.rego | 21 ++ pkg/policy/interface.go | 2 +- pkg/policy/opa/constants.go | 8 +- pkg/policy/opa/opa_engine.go | 210 ++++++++++-------- pkg/runtime/executor.go | 31 +-- pkg/utils/path.go | 24 ++ 103 files changed, 1764 insertions(+), 117 deletions(-) create mode 100755 pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json create mode 100755 pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json create mode 100755 pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json create mode 100755 pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego create mode 100755 pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoLogging.rego create mode 100755 pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego create mode 100755 pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.High.0399.json create mode 100755 pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Low.0559.json create mode 100755 pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Medium.0460.json create mode 100755 pkg/policies/opa/rego/aws/aws_cloudtrail/cloudTrailLogNotEncrypted.rego create mode 100755 pkg/policies/opa/rego/aws/aws_cloudtrail/cloudTrailMultiRegionNotCreated.rego create mode 100755 pkg/policies/opa/rego/aws/aws_cloudtrail/enableSNSTopic.rego create mode 100755 pkg/policies/opa/rego/aws/aws_db_instance/.json create mode 100755 pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0390.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_access_key/noAccessKeyForRootAccount.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0540.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0454.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0455.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0456.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0457.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0458.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0495.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordMinLength.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordPolicyRequirement.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordRotateEvery90Days.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_group_policy/AWS.IamPolicy.IAM.High.0392.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_group_policy/iamGrpPolicyWithFullAdminCntrl.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_policy/AWS.IamPolicy.IAM.High.0392.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_policy/iamPolicyWithFullAdminControl.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0387.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0388.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_user_policy/rootUserNotContainMfaTypeHardware.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_user_policy/rootUserNotContainMfaTypeVirtual.rego create mode 100755 pkg/policies/opa/rego/aws/aws_instance/AWS.Instance.NetworkSecurity.Medium.0506.json create mode 100755 pkg/policies/opa/rego/aws/aws_instance/instanceWithNoVpc.rego create mode 100755 pkg/policies/opa/rego/aws/aws_kinesis_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0412.json create mode 100755 pkg/policies/opa/rego/aws/aws_kinesis_stream/aws_kinesis_stream.rego create mode 100755 pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json create mode 100755 pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyRotationDisabled.rego create mode 100755 pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0102.json create mode 100755 pkg/policies/opa/rego/aws/aws_launch_configuration/hardCodedKey.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0374.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0371.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0372.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket_policy/actionsFromAllPrincipals.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket_policy/allowActionsFromAllPrincipals.rego create mode 100755 pkg/policies/opa/rego/aws/aws_security_group/AWS.SecurityGroup.NetworkSecurity.High.0094.json create mode 100755 pkg/policies/opa/rego/aws/aws_security_group/unrestrictedIngressAccess.rego create mode 100755 pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0470.json create mode 100755 pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0471.json create mode 100755 pkg/policies/opa/rego/aws/aws_vpc/defaultVpcExist.rego create mode 100755 pkg/policies/opa/rego/aws/aws_vpc/vpcFlowLogsNotEnabled.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/accurics.azure.CAM.162.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/noTags.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_managed_disk/accurics.azure.EKM.156.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_managed_disk/checkDataDisksEncrypted.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.171.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.172.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.35.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.36.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.37.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_network_security_rule/networkPortExposedPublic.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.151.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.152.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.155.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/connectionThrottling.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logConnections.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logRetention.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.BDR.163.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.EKM.1.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_server/geoRedundancyDisabled.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_server/sslEnforceDisabled.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.EKM.23.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.30.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.31.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_redis_cache/nonSslEnabled.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_redis_cache/publiclyAccessible.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_database/accurics.azure.MON.157.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_database/checkAuditEnabled.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.21.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.5.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/checkPublicAccessNotAllow.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_server/.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerADAdminConfigured.rego diff --git a/cmd/terrascan/main.go b/cmd/terrascan/main.go index 7cbaaf0e7..453c93961 100644 --- a/cmd/terrascan/main.go +++ b/cmd/terrascan/main.go @@ -38,6 +38,7 @@ func main() { iacVersion = flag.String("iac-version", "default", "IaC version (supported values: 'v12' for terraform)") iacFilePath = flag.String("f", "", "IaC file path") iacDirPath = flag.String("d", "", "IaC directory path") + policyPath = flag.String("p", "", "Policy directory path") // cloud flags cloudType = flag.String("cloud", "", "cloud provider (supported values: aws)") @@ -64,6 +65,6 @@ func main() { } else { logging.Init(*logType, *logLevel) zap.S().Debug("running terrascan in cli mode") - cli.Run(*iacType, *iacVersion, *cloudType, *iacFilePath, *iacDirPath, *configFile) + cli.Run(*iacType, *iacVersion, *cloudType, *iacFilePath, *iacDirPath, *configFile, *policyPath) } } diff --git a/pkg/cli/run.go b/pkg/cli/run.go index e3e76d8f3..6d22beae1 100644 --- a/pkg/cli/run.go +++ b/pkg/cli/run.go @@ -24,11 +24,11 @@ import ( ) // Run executes terrascan in CLI mode -func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath, configFile string) { +func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath, configFile, policyPath string) { // create a new runtime executor for processing IaC executor, err := runtime.NewExecutor(iacType, iacVersion, cloudType, iacFilePath, - iacDirPath, configFile) + iacDirPath, configFile, policyPath) if err != nil { return } diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json new file mode 100755 index 000000000..e2f3a8a84 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json @@ -0,0 +1,13 @@ +{ + "ruleName": "cloudfrontNoHTTPSTraffic", + "file": "cloudfrontNoHTTPSTraffic.rego", + "ruleTemplate": "cloudfrontNoHTTPSTraffic", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Use encrypted connection between CloudFront and origin server", + "ruleReferenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0407", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json new file mode 100755 index 000000000..3ee435f99 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json @@ -0,0 +1,13 @@ +{ + "ruleName": "cloudfrontNoSecureCiphers", + "file": "cloudfrontNoSecureCiphers.rego", + "ruleTemplate": "cloudfrontNoSecureCiphers", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Secure ciphers are not used in CloudFront distribution", + "ruleReferenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0408", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json new file mode 100755 index 000000000..a20956e92 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json @@ -0,0 +1,13 @@ +{ + "ruleName": "cloudfrontNoLogging", + "file": "cloudfrontNoLogging.rego", + "ruleTemplate": "cloudfrontNoLogging", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Ensure that your AWS Cloudfront distributions have the Logging feature enabled in order to track all viewer requests for the content delivered through the Content Delivery Network (CDN).", + "ruleReferenceId": "AWS.CloudFront.Logging.Medium.0567", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego new file mode 100755 index 000000000..6073a927c --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego @@ -0,0 +1,10 @@ +package accurics + +{{.prefix}}cloudfrontNoHTTPSTraffic[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + some i + orderedcachebehaviour = cloudfront.config.ordered_cache_behavior[i] + orderedcachebehaviour.viewer_protocol_policy == "allow-all" + traverse := sprintf("ordered_cache_behavior[%d].viewer_protocol_policy", [i]) + retVal := { "Id": cloudfront.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "ordered_cache_behavior.viewer_protocol_policy", "AttributeDataType": "string", "Expected": "redirect-to-https", "Actual": orderedcachebehaviour.viewer_protocol_policy } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoLogging.rego b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoLogging.rego new file mode 100755 index 000000000..dfd52a3a1 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoLogging.rego @@ -0,0 +1,21 @@ +package accurics + +{{.prefix}}cloudfrontNoLogging[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + not cloudfront.config.logging_config + + rc = "ewogICJsb2dnaW5nX2NvbmZpZyI6IHsKICAgICJpbmNsdWRlX2Nvb2tpZXMiOiBmYWxzZSwKICAgICJidWNrZXQiOiAiPGJ1Y2tldD4iLAogICAgInByZWZpeCI6ICI8cHJlZml4PiIKICB9Cn0=" + + traverse = "" + retVal := { "Id": cloudfront.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "logging_config", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} + +{{.prefix}}cloudfrontNoLogging[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + cloudfront.config.logging_config == [] + + rc = "ewogICJsb2dnaW5nX2NvbmZpZyI6IHsKICAgICJpbmNsdWRlX2Nvb2tpZXMiOiBmYWxzZSwKICAgICJidWNrZXQiOiAiPGJ1Y2tldD4iLAogICAgInByZWZpeCI6ICI8cHJlZml4PiIKICB9Cn0=" + + traverse = "" + retVal := { "Id": cloudfront.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "logging_config", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego new file mode 100755 index 000000000..9159d825f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego @@ -0,0 +1,19 @@ +package accurics + +{{.prefix}}cloudfrontNoSecureCiphers[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + some i + certificate = cloudfront.config.viewer_certificate[i] + certificate.cloudfront_default_certificate = false + not minimumAllowedProtocolVersion(certificate.minimum_protocol_version) + traverse := sprintf("viewer_certificate[%d].minimum_protocol_version", [i]) + retVal := { "Id": cloudfront.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "viewer_certificate.minimum_protocol_version", "AttributeDataType": "string", "Expected": "TLSv1.2", "Actual": certificate.minimum_protocol_version } +} + +minimumAllowedProtocolVersion(currentVersion) { + currentVersion == "TLSv1.1" +} + +minimumAllowedProtocolVersion(currentVersion) { + currentVersion == "TLSv1.2" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.High.0399.json b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.High.0399.json new file mode 100755 index 000000000..8c070cbad --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.High.0399.json @@ -0,0 +1,13 @@ +{ + "ruleName": "cloudTrailLogNotEncrypted", + "file": "cloudTrailLogNotEncrypted.rego", + "ruleTemplate": "cloudTrailLogNotEncrypted", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Cloud Trail Log Not Enabled", + "ruleReferenceId": "AWS.CloudTrail.Logging.High.0399", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Low.0559.json b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Low.0559.json new file mode 100755 index 000000000..9fcf02b5f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Low.0559.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_enableSNSTopic", + "file": "enableSNSTopic.rego", + "ruleTemplate": "enableSNSTopic", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure appropriate subscribers to each SNS topic", + "ruleReferenceId": "AWS.CloudTrail.Logging.Low.0559", + "category": "Logging", + "version": 0 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Medium.0460.json b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Medium.0460.json new file mode 100755 index 000000000..ec9b6b080 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Medium.0460.json @@ -0,0 +1,13 @@ +{ + "ruleName": "cloudTrailMultiRegionNotCreated", + "file": "cloudTrailMultiRegionNotCreated.rego", + "ruleTemplate": "cloudTrailMultiRegionNotCreated", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Cloud Trail Multi Region not enabled", + "ruleReferenceId": "AWS.CloudTrail.Logging.Medium.0460", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudtrail/cloudTrailLogNotEncrypted.rego b/pkg/policies/opa/rego/aws/aws_cloudtrail/cloudTrailLogNotEncrypted.rego new file mode 100755 index 000000000..ae019288e --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudtrail/cloudTrailLogNotEncrypted.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}cloudTrailLogNotEncrypted[retVal]{ + cloud_trail = input.aws_cloudtrail[_] + cloud_trail.config.kms_key_id == null + + traverse = "kms_key_id" + retVal := { "Id": cloud_trail.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "kms_key_id", "AttributeDataType": "string", "Expected": "", "Actual": cloud_trail.config.kms_key_id } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudtrail/cloudTrailMultiRegionNotCreated.rego b/pkg/policies/opa/rego/aws/aws_cloudtrail/cloudTrailMultiRegionNotCreated.rego new file mode 100755 index 000000000..e2a2c2afe --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudtrail/cloudTrailMultiRegionNotCreated.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}cloudTrailMultiRegionNotCreated[retVal]{ + cloud_trail = input.aws_cloudtrail[_] + cloud_trail.config.is_multi_region_trail == false + + traverse = "is_multi_region_trail" + retVal := { "Id": cloud_trail.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "is_multi_region_trail", "AttributeDataType": "bool", "Expected": true, "Actual": cloud_trail.config.is_multi_region_trail } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudtrail/enableSNSTopic.rego b/pkg/policies/opa/rego/aws/aws_cloudtrail/enableSNSTopic.rego new file mode 100755 index 000000000..fdd4cdbc0 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudtrail/enableSNSTopic.rego @@ -0,0 +1,6 @@ +package accurics + +{{.prefix}}enableSNSTopic[sns.id] { + sns := input.aws_cloudtrail[_] + sns.config.sns_topic_name == null +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_instance/.json b/pkg/policies/opa/rego/aws/aws_db_instance/.json new file mode 100755 index 000000000..06c786984 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_instance/.json @@ -0,0 +1,13 @@ +{ + "ruleName": "rdsPubliclyAccessible", + "file": "rdsPubliclyAccessible.rego", + "ruleTemplate": "rdsPubliclyAccessible", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "RDS Instance publicly_accessible flag is true", + "ruleReferenceId": "", + "category": "Data Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego b/pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego new file mode 100755 index 000000000..601e8c85e --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}rdsPubliclyAccessible[retVal] { + db := input.aws_db_instance[_] + db.config.publicly_accessible == true + traverse = "publicly_accessible" + retVal := { "Id": db.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "publicly_accessible", "AttributeDataType": "bool", "Expected": false, "Actual": db.config.publicly_accessible } +} + diff --git a/pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0390.json b/pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0390.json new file mode 100755 index 000000000..2e0b4d321 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0390.json @@ -0,0 +1,13 @@ +{ + "ruleName": "noAccessKeyForRootAccount", + "file": "noAccessKeyForRootAccount.rego", + "ruleTemplate": "noAccessKeyForRootAccount", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "The root account is the most privileged user in an AWS account. AWS Access Keys provide programmatic access to a given AWS account. It is recommended that all access keys associated with the root account be removed. Removing access keys associated with the root account limits vectors by which the account can be compromised. Additionally, removing the root access keys encourages the creation and use of role based accounts that are least privileged.", + "ruleReferenceId": "AWS.IamUser.IAM.High.0390", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_access_key/noAccessKeyForRootAccount.rego b/pkg/policies/opa/rego/aws/aws_iam_access_key/noAccessKeyForRootAccount.rego new file mode 100755 index 000000000..fe6d4cefa --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_access_key/noAccessKeyForRootAccount.rego @@ -0,0 +1,20 @@ +package accurics + +{{.prefix}}noAccessKeyForRootAccount[retVal] { + access := input.aws_iam_access_key[_] + access.type == "aws_iam_access_key" + status = getStatus(access.config) + status == "Active" + access.config.user == "root" + traverse = "status" + retVal := { "Id": access.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "status", "AttributeDataType": "string", "Expected": "Inactive", "Actual": access.config.status } +} + +getStatus(config) = "Active" { + # defaults to Active + not config.status +} + +getStatus(config) = "Active" { + config.status == "Active" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0540.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0540.json new file mode 100755 index 000000000..40b2ad60c --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0540.json @@ -0,0 +1,14 @@ +{ + "ruleName": "passwordRotateEvery90Days", + "file": "passwordRotateEvery90Days.rego", + "ruleTemplate": "passwordRotateEvery90Days", + "ruleTemplateArgs": { + "name": "passwordRotateEvery90Days", + "prefix": "" + }, + "severity": "LOW", + "description": "Reducing the password lifetime increases account resiliency against brute force login attempts", + "ruleReferenceId": "AWS.Iam.IAM.Low.0540", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0454.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0454.json new file mode 100755 index 000000000..0be97abb7 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0454.json @@ -0,0 +1,15 @@ +{ + "ruleName": "passwordRequireLowerCase", + "file": "passwordPolicyRequirement.rego", + "ruleTemplate": "passwordRequireLowerCase", + "ruleTemplateArgs": { + "name": "passwordRequireLowerCase", + "prefix": "", + "required_parameter": "require_lowercase_characters" + }, + "severity": "MEDIUM", + "description": "Lower case alphabet not present in the Password, Password Complexity is not high. Increased Password complexity increases resiliency against brute force attack", + "ruleReferenceId": "AWS.Iam.IAM.Medium.0454", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0455.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0455.json new file mode 100755 index 000000000..30595871b --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0455.json @@ -0,0 +1,15 @@ +{ + "ruleName": "passwordRequireNumber", + "file": "passwordPolicyRequirement.rego", + "ruleTemplate": "passwordRequireNumber", + "ruleTemplateArgs": { + "name": "passwordRequireNumber", + "prefix": "", + "required_parameter": "require_numbers" + }, + "severity": "MEDIUM", + "description": "Number not present in the Password, Password Complexity is not high. Increased Password complexity increases resiliency against brute force attack", + "ruleReferenceId": "AWS.Iam.IAM.Medium.0455", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0456.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0456.json new file mode 100755 index 000000000..a4e46cc3b --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0456.json @@ -0,0 +1,15 @@ +{ + "ruleName": "passwordRequireSymbol", + "file": "passwordPolicyRequirement.rego", + "ruleTemplate": "passwordRequireSymbol", + "ruleTemplateArgs": { + "name": "passwordRequireSymbol", + "prefix": "", + "required_parameter": "require_symbols" + }, + "severity": "MEDIUM", + "description": "Special symbols not present in the Password, Password Complexity is not high. Increased Password complexity increases resiliency against brute force attack", + "ruleReferenceId": "AWS.Iam.IAM.Medium.0456", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0457.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0457.json new file mode 100755 index 000000000..dae9c9fb6 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0457.json @@ -0,0 +1,15 @@ +{ + "ruleName": "passwordRequireUpperCase", + "file": "passwordPolicyRequirement.rego", + "ruleTemplate": "passwordRequireUpperCase", + "ruleTemplateArgs": { + "name": "passwordRequireUpperCase", + "prefix": "", + "required_parameter": "require_uppercase_characters" + }, + "severity": "MEDIUM", + "description": "Upper case alphabet not present in the Password, Password Complexity is not high. Increased Password complexity increases resiliency against brute force attack", + "ruleReferenceId": "AWS.Iam.IAM.Medium.0457", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0458.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0458.json new file mode 100755 index 000000000..46c09c988 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0458.json @@ -0,0 +1,16 @@ +{ + "ruleName": "passwordRequireMinLength14", + "file": "passwordMinLength.rego", + "ruleTemplate": "passwordRequireMinLength14", + "ruleTemplateArgs": { + "name": "passwordRequireMinLength14", + "parameter": "minimum_password_length", + "prefix": "", + "value": 14 + }, + "severity": "MEDIUM", + "description": "Setting a lengthy password increases account resiliency against brute force login attempts", + "ruleReferenceId": "AWS.Iam.IAM.Medium.0458", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0495.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0495.json new file mode 100755 index 000000000..b5aa69d69 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0495.json @@ -0,0 +1,16 @@ +{ + "ruleName": "passwordRequireMinLength", + "file": "passwordMinLength.rego", + "ruleTemplate": "passwordRequireMinLength", + "ruleTemplateArgs": { + "name": "passwordRequireMinLength", + "parameter": "minimum_password_length", + "prefix": "", + "value": 7 + }, + "severity": "MEDIUM", + "description": "Setting a lengthy password increases account resiliency against brute force login attempts", + "ruleReferenceId": "AWS.Iam.IAM.Medium.0495", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordMinLength.rego b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordMinLength.rego new file mode 100755 index 000000000..de1f034d8 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordMinLength.rego @@ -0,0 +1,12 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + password_policy := input.aws_iam_account_password_policy[_] + check_validity(password_policy.config, {{.value}}) == true + traverse = "{{.parameter}}" + retVal := { "Id": password_policy.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "{{.parameter}}", "AttributeDataType": "int", "Expected": {{.value}}, "Actual": password_policy.config.{{.parameter}} } +} + +check_validity(p, v) = true { + p.{{.parameter}} < v +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordPolicyRequirement.rego b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordPolicyRequirement.rego new file mode 100755 index 000000000..091302c3f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordPolicyRequirement.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + password_policy := input.aws_iam_account_password_policy[_] + password_policy.config.{{.required_parameter}} == false + password_policy_id := password_policy.id + traverse = "{{.required_parameter}}" + retVal := { "Id": password_policy.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "{{.required_parameter}}", "AttributeDataType": "bool", "Expected": true, "Actual": password_policy.config.{{.required_parameter}} } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordRotateEvery90Days.rego b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordRotateEvery90Days.rego new file mode 100755 index 000000000..5755a95e5 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordRotateEvery90Days.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + password_policy := input.aws_iam_account_password_policy[_] + password_policy.config.max_password_age > 90 + password_policy_id := password_policy.id + traverse = "max_password_age" + retVal := { "Id": password_policy.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "max_password_age", "AttributeDataType": "int", "Expected": 90, "Actual": password_policy.config.max_password_age } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_group_policy/AWS.IamPolicy.IAM.High.0392.json b/pkg/policies/opa/rego/aws/aws_iam_group_policy/AWS.IamPolicy.IAM.High.0392.json new file mode 100755 index 000000000..ece948f53 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_group_policy/AWS.IamPolicy.IAM.High.0392.json @@ -0,0 +1,13 @@ +{ + "ruleName": "iamGrpPolicyWithFullAdminCntrl", + "file": "iamGrpPolicyWithFullAdminCntrl.rego", + "ruleTemplate": "iamGrpPolicyWithFullAdminCntrl", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "It is recommended and considered a standard security advice to grant least privileges that is, granting only the permissions required to perform a task. IAM policies are the means by which privileges are granted to users, groups, or roles. Determine what users need to do and then craft policies for them that let the users perform only those tasks, instead of granting full administrative privileges.", + "ruleReferenceId": "AWS.IamPolicy.IAM.High.0392", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_group_policy/iamGrpPolicyWithFullAdminCntrl.rego b/pkg/policies/opa/rego/aws/aws_iam_group_policy/iamGrpPolicyWithFullAdminCntrl.rego new file mode 100755 index 000000000..7baee45ce --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_group_policy/iamGrpPolicyWithFullAdminCntrl.rego @@ -0,0 +1,51 @@ +package accurics + +{{.prefix}}iamGrpPolicyWithFullAdminCntrl[retVal] { + iamUserMfa = input.aws_iam_group_policy[_] + policy := json_unmarshal(iamUserMfa.config.policy) + statement = policy.Statement[_] + policyCheck(statement, "*", "Allow", "*") == true + + statements := [ statement | statement := replace_if_needed(policy.Statement[_]) ] + expected := object.union(policy, {"Statement": statements}) + + traverse = "policy" + retVal := { "Id": iamUserMfa.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "policy", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +replace_if_needed(statement) = value { + policyCheck(statement, "*", "Allow", "*") == true + actions := [ action | action := replace_action_if_needed( statement.Action[_] ) ] + value := object.union(statement, { "Action": actions }) +} + +replace_if_needed(statement) = value { + not policyCheck(statement, "*", "Allow", "*") + value := statement +} + +replace_action_if_needed(action) = value { + action == "*" + value := "##resource:action##" +} + +replace_action_if_needed(action) = value { + action != "*" + value := action +} + +policyCheck(s, a, e ,r) = true { + s.Action[_] = a + s.Effect == e + s.Resource == r +} diff --git a/pkg/policies/opa/rego/aws/aws_iam_policy/AWS.IamPolicy.IAM.High.0392.json b/pkg/policies/opa/rego/aws/aws_iam_policy/AWS.IamPolicy.IAM.High.0392.json new file mode 100755 index 000000000..d0f99213c --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_policy/AWS.IamPolicy.IAM.High.0392.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_iamPolicyWithFullAdminControl", + "file": "iamPolicyWithFullAdminControl.rego", + "ruleTemplate": "iamPolicyWithFullAdminControl", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "It is recommended and considered a standard security advice to grant least privileges that is, granting only the permissions required to perform a task. IAM policies are the means by which privileges are granted to users, groups, or roles. Determine what users need to do and then craft policies for them that let the users perform only those tasks, instead of granting full administrative privileges.", + "ruleReferenceId": "AWS.IamPolicy.IAM.High.0392", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_policy/iamPolicyWithFullAdminControl.rego b/pkg/policies/opa/rego/aws/aws_iam_policy/iamPolicyWithFullAdminControl.rego new file mode 100755 index 000000000..1581177fb --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_policy/iamPolicyWithFullAdminControl.rego @@ -0,0 +1,50 @@ +package accurics + +{{.prefix}}iamPolicyWithFullAdminControl[retVal] { + iamUserMfa = input.aws_iam_policy[_] + policy := json_unmarshal(iamUserMfa.config.policy) + statement = policy.Statement[_] + policyCheck(statement, "*", "Allow", "*") == true + + statements := [ statement | statement := replace_if_needed(policy.Statement[_]) ] + expected := object.union(policy, {"Statement": statements}) + traverse = "policy" + retVal := { "Id": iamUserMfa.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "policy", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +replace_if_needed(statement) = value { + policyCheck(statement, "*", "Allow", "*") == true + actions := [ action | action := replace_action_if_needed( statement.Action[_] ) ] + value := object.union(statement, { "Action": actions }) +} + +replace_if_needed(statement) = value { + not policyCheck(statement, "*", "Allow", "*") + value := statement +} + +replace_action_if_needed(action) = value { + action == "*" + value := "##resource:action##" +} + +replace_action_if_needed(action) = value { + action != "*" + value := action +} + +policyCheck(s, a, e ,r) = true { + s.Action[_] = a + s.Effect == e + s.Resource == r +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0387.json b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0387.json new file mode 100755 index 000000000..706366e9f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0387.json @@ -0,0 +1,13 @@ +{ + "ruleName": "rootUserNotContainMfaTypeHardware", + "file": "rootUserNotContainMfaTypeHardware.rego", + "ruleTemplate": "rootUserNotContainMfaTypeHardware", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure Hardware MFA device is enabled for the \"root\" account", + "ruleReferenceId": "AWS.IamUser.IAM.High.0387", + "category": "Identity and Access Management", + "version": 0 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0388.json b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0388.json new file mode 100755 index 000000000..31623a9b5 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0388.json @@ -0,0 +1,13 @@ +{ + "ruleName": "rootUserNotContainMfaTypeVirtual", + "file": "rootUserNotContainMfaTypeVirtual.rego", + "ruleTemplate": "rootUserNotContainMfaTypeVirtual", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure Virtual MFA device is enabled for the \"root\" account", + "ruleReferenceId": "AWS.IamUser.IAM.High.0388", + "category": "Identity and Access Management", + "version": 0 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_user_policy/rootUserNotContainMfaTypeHardware.rego b/pkg/policies/opa/rego/aws/aws_iam_user_policy/rootUserNotContainMfaTypeHardware.rego new file mode 100755 index 000000000..e52bbe032 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_user_policy/rootUserNotContainMfaTypeHardware.rego @@ -0,0 +1,34 @@ +package accurics + +{{.prefix}}rootUserNotContainMfaTypeHardware[iamUserMfa.id] { + iamUserMfa = input.aws_iam_user_policy[_] + policy := json_unmarshal(iamUserMfa.config.policy) + statement = policy.Statement[_] + check_role(statement, "sts:AssumeRole", "Allow") == true + root_check(iamUserMfa.config.user, "root") == true + mfa_check(statement.Principal.AWS, "[a-zA-Z]+[0-9]+") == true +} + + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +root_check(s, v) = true { + re_match(s, v) +} + +mfa_check(s, v) = true { + not re_match(v, s) +} + +check_role(s, a, e) = true { + s.Action == a + s.Effect == e +} diff --git a/pkg/policies/opa/rego/aws/aws_iam_user_policy/rootUserNotContainMfaTypeVirtual.rego b/pkg/policies/opa/rego/aws/aws_iam_user_policy/rootUserNotContainMfaTypeVirtual.rego new file mode 100755 index 000000000..9fe281db4 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_user_policy/rootUserNotContainMfaTypeVirtual.rego @@ -0,0 +1,35 @@ +package accurics + +{{.prefix}}rootUserNotContainMfaTypeVirtual[iamUserMfa_id] { + iamUserMfa = input.aws_iam_user_policy[_] + policy := json_unmarshal(iamUserMfa.config.policy) + statement = policy.Statement[_] + check_role(statement, "sts:AssumeRole", "Allow") == true + root_check(iamUserMfa.config.user, "root") == true + mfa_check(statement.Principal.AWS, ":mfa/") == true + iamUserMfa_id = iamUserMfa.id +} + + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +root_check(s, v) = true { + re_match(v, s) +} + +mfa_check(s, v) = true { + not re_match(v, s) +} + +check_role(s, a, e) = true { + s.Action == a + s.Effect == e +} diff --git a/pkg/policies/opa/rego/aws/aws_instance/AWS.Instance.NetworkSecurity.Medium.0506.json b/pkg/policies/opa/rego/aws/aws_instance/AWS.Instance.NetworkSecurity.Medium.0506.json new file mode 100755 index 000000000..9b05b3442 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_instance/AWS.Instance.NetworkSecurity.Medium.0506.json @@ -0,0 +1,13 @@ +{ + "ruleName": "instanceWithNoVpc", + "file": "instanceWithNoVpc.rego", + "ruleTemplate": "instanceWithNoVpc", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Instance should be configured in vpc. AWS VPCs provides the controls to facilitate a formal process for approving and testing all network connections and changes to the firewall and router configurations.", + "ruleReferenceId": "AWS.Instance.NetworkSecurity.Medium.0506", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_instance/instanceWithNoVpc.rego b/pkg/policies/opa/rego/aws/aws_instance/instanceWithNoVpc.rego new file mode 100755 index 000000000..f3c809b12 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_instance/instanceWithNoVpc.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}instanceWithNoVpc[retVal] { + instance := input.aws_instance[_] + not instance.config.vpc_security_group_ids + rc = "ewogICJhd3NfdnBjIjogewogICAgImFjY3VyaWNzX3ZwYyI6IHsKICAgICAgImNpZHJfYmxvY2siOiAiPGNpZHJfYmxvY2s+IiwKICAgICAgImVuYWJsZV9kbnNfc3VwcG9ydCI6ICI8ZW5hYmxlX2Ruc19zdXBwb3J0PiIsCiAgICAgICJlbmFibGVfZG5zX2hvc3RuYW1lcyI6ICI8ZW5hYmxlX2Ruc19ob3N0bmFtZXM+IgogICAgfQogIH0KfQ==" + traverse = "" + retVal := { "Id": instance.id, "ReplaceType": "add", "CodeType": "resource", "Traverse": traverse, "Attribute": "", "AttributeDataType": "resource", "Expected": rc, "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_kinesis_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0412.json b/pkg/policies/opa/rego/aws/aws_kinesis_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0412.json new file mode 100755 index 000000000..2d4242f06 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_kinesis_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0412.json @@ -0,0 +1,13 @@ +{ + "ruleName": "kinesisNotEncryptedWithKms", + "file": "aws_kinesis_stream.rego", + "ruleTemplate": "kinesisNotEncryptedWithKms", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Kinesis Streams and metadata are not protected", + "ruleReferenceId": "AWS.Kinesis.EncryptionandKeyManagement.High.0412", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_kinesis_stream/aws_kinesis_stream.rego b/pkg/policies/opa/rego/aws/aws_kinesis_stream/aws_kinesis_stream.rego new file mode 100755 index 000000000..d1415dbef --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_kinesis_stream/aws_kinesis_stream.rego @@ -0,0 +1,8 @@ +package accurics + +{{.prefix}}kinesisNotEncryptedWithKms[retVal] { + stream = input.aws_kinesis_stream[_] + stream.config.kms_key_id == null + traverse = "" + retVal := { "Id": stream.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "kms_key_id", "AttributeDataType": "string", "Expected": "", "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json b/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json new file mode 100755 index 000000000..b4074468f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json @@ -0,0 +1,13 @@ +{ + "ruleName": "kmsKeyRotationDisabled", + "file": "kmsKeyRotationDisabled.rego", + "ruleTemplate": "kmsKeyRotationDisabled", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure rotation for customer created CMKs is enabled", + "ruleReferenceId": "AWS.KMS.Logging.High.0400", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyRotationDisabled.rego b/pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyRotationDisabled.rego new file mode 100755 index 000000000..ef4907b52 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyRotationDisabled.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}kmsKeyRotationDisabled[retVal] { + kms_key = input.aws_kms_key[_] + kms_key.config.is_enabled == true + kms_key.config.enable_key_rotation == false + traverse = "enable_key_rotation" + retVal := { "Id": kms_key.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "enable_key_rotation", "AttributeDataType": "bool", "Expected": true, "Actual": kms_key.config.enable_key_rotation } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0102.json b/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0102.json new file mode 100755 index 000000000..d5c4b6dd5 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0102.json @@ -0,0 +1,13 @@ +{ + "ruleName": "hardCodedKey", + "file": "hardCodedKey.rego", + "ruleTemplate": "hardCodedKey", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Avoid using base64 encoded private keys as part of config", + "ruleReferenceId": "AWS.LaunchConfiguration.DataSecurity.High.0102", + "category": "Data Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_launch_configuration/hardCodedKey.rego b/pkg/policies/opa/rego/aws/aws_launch_configuration/hardCodedKey.rego new file mode 100755 index 000000000..8fc594611 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_launch_configuration/hardCodedKey.rego @@ -0,0 +1,23 @@ +package accurics + +{{.prefix}}hardCodedKey[res.id] { + res = input.aws_launch_configuration[_] + value = base64NullCheck(res.config.user_data_base64) + contains(value, "LS0tLS1CR") +} + +{{.prefix}}hardCodeKey[res.id]{ + res = input.aws_launch_configuration[_] + value = base64NullCheck(res.config.user_data_base64) + contains(value, "LS0tLS1CR") +} + +base64NullCheck(s) = result { + s == null + result := base64.decode("e30=") +} + +base64NullCheck(s) = result { + s != null + result := base64.decode(s) +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json new file mode 100755 index 000000000..a758e0b44 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json @@ -0,0 +1,13 @@ +{ + "ruleName": "noS3BucketSseRules", + "file": "noS3BucketSseRules.rego", + "ruleTemplate": "noS3BucketSseRules", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure that S3 Buckets have server side encryption at rest enabled to protect sensitive data.", + "ruleReferenceId": "AWS.S3Bucket.EncryptionandKeyManagement.High.0405", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json new file mode 100755 index 000000000..1a38cf231 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json @@ -0,0 +1,13 @@ +{ + "ruleName": "s3VersioningMfaFalse", + "file": "s3VersioningMfaFalse.rego", + "ruleTemplate": "s3VersioningMfaFalse", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Enabling MFA delete for versioning is a good way to add extra protection to sensitive files stored in buckets.aws s3api put-bucket-versioning --bucket bucketname --versioning-configuration Status=Enabled,MFADelete=Enabled --mfa your-mfa-serial-number mfa-code", + "ruleReferenceId": "AWS.S3Bucket.IAM.High.0370", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json new file mode 100755 index 000000000..4a008b21e --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json @@ -0,0 +1,15 @@ +{ + "ruleName": "allUsersReadAccess", + "file": "s3AclGrants.rego", + "ruleTemplate": "allUsersReadAccess", + "ruleTemplateArgs": { + "access": "public-read", + "name": "allUsersReadAccess", + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "ruleReferenceId": "AWS.S3Bucket.IAM.High.0377", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json new file mode 100755 index 000000000..b9b8584ed --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json @@ -0,0 +1,15 @@ +{ + "ruleName": "authUsersReadAccess", + "file": "s3AclGrants.rego", + "ruleTemplate": "authUsersReadAccess", + "ruleTemplateArgs": { + "access": "authenticated-read", + "name": "authUsersReadAccess", + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "ruleReferenceId": "AWS.S3Bucket.IAM.High.0378", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json new file mode 100755 index 000000000..a8286931b --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json @@ -0,0 +1,15 @@ +{ + "ruleName": "allUsersWriteAccess", + "file": "s3AclGrants.rego", + "ruleTemplate": "allUsersWriteAccess", + "ruleTemplateArgs": { + "access": "public-read-write", + "name": "allUsersWriteAccess", + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "ruleReferenceId": "AWS.S3Bucket.IAM.High.0379", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json new file mode 100755 index 000000000..e413dd20e --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json @@ -0,0 +1,15 @@ +{ + "ruleName": "allUsersReadWriteAccess", + "file": "s3AclGrants.rego", + "ruleTemplate": "allUsersReadWriteAccess", + "ruleTemplateArgs": { + "access": "public-read-write", + "name": "allUsersReadWriteAccess", + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "ruleReferenceId": "AWS.S3Bucket.IAM.High.0381", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json new file mode 100755 index 000000000..1bc2de912 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json @@ -0,0 +1,13 @@ +{ + "ruleName": "s3BucketNoWebsiteIndexDoc", + "file": "s3BucketNoWebsiteIndexDoc.rego", + "ruleTemplate": "s3BucketNoWebsiteIndexDoc", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure that there are not any static websites being hosted on buckets you aren't aware of", + "ruleReferenceId": "AWS.S3Bucket.NetworkSecurity.High.0417", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego new file mode 100755 index 000000000..2661fa0a9 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}noS3BucketSseRules[retVal] { + bucket := input.aws_s3_bucket[_] + bucket.config.server_side_encryption_configuration == [] + rc = "ewogICJzZXJ2ZXJfc2lkZV9lbmNyeXB0aW9uX2NvbmZpZ3VyYXRpb24iOiB7CiAgICAicnVsZSI6IHsKICAgICAgImFwcGx5X3NlcnZlcl9zaWRlX2VuY3J5cHRpb25fYnlfZGVmYXVsdCI6IHsKICAgICAgICAic3NlX2FsZ29yaXRobSI6ICJBRVMyNTYiCiAgICAgIH0KICAgIH0KICB9Cn0=" + traverse = "" + retVal := { "Id": bucket.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "server_side_encryption_configuration", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego new file mode 100755 index 000000000..fc83f4a0f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego @@ -0,0 +1,8 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + bucket := input.aws_s3_bucket[_] + bucket.config.acl == "{{.access}}" + traverse = "acl" + retVal := { "Id": bucket.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "acl", "AttributeDataType": "string", "Expected": "private", "Actual": bucket.config.acl } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego new file mode 100755 index 000000000..7ee714f1e --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego @@ -0,0 +1,8 @@ +package accurics + +{{.prefix}}s3BucketNoWebsiteIndexDoc[retVal] { + bucket := input.aws_s3_bucket[_] + count(bucket.config.website) > 0 + traverse = "website" + retVal := { "Id": bucket.id, "ReplaceType": "delete", "CodeType": "block", "Traverse": traverse, "Attribute": "website", "AttributeDataType": "block", "Expected": null, "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego new file mode 100755 index 000000000..d2c28b5b5 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego @@ -0,0 +1,10 @@ +package accurics + +{{.prefix}}s3VersioningMfaFalse[retVal] { + bucket := input.aws_s3_bucket[_] + some i + mfa := bucket.config.versioning[i] + mfa.mfa_delete == false + traverse := sprintf("versioning[%d].mfa_delete", [i]) + retVal := { "Id": bucket.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "versioning.mfa_delete", "AttributeDataType": "bool", "Expected": true, "Actual": mfa.mfa_delete } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0374.json b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0374.json new file mode 100755 index 000000000..2f36ad689 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0374.json @@ -0,0 +1,15 @@ +{ + "ruleName": "allowListActionFromAllPrncpls", + "file": "actionsFromAllPrincipals.rego", + "ruleTemplate": "allowListActionFromAllPrncpls", + "ruleTemplateArgs": { + "Action": "s3:List", + "name": "allowListActionFromAllPrncpls", + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "ruleReferenceId": "AWS.IamPolicy.IAM.High.0374", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0371.json b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0371.json new file mode 100755 index 000000000..e6cf3edc3 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0371.json @@ -0,0 +1,13 @@ +{ + "ruleName": "allowActionsFromAllPrincipals", + "file": "allowActionsFromAllPrincipals.rego", + "ruleTemplate": "allowActionsFromAllPrincipals", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "ruleReferenceId": "AWS.S3Bucket.IAM.High.0371", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0372.json b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0372.json new file mode 100755 index 000000000..bf5b87e94 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0372.json @@ -0,0 +1,15 @@ +{ + "ruleName": "allowDeleteActionFromAllPrncpls", + "file": "actionsFromAllPrincipals.rego", + "ruleTemplate": "allowDeleteActionFromAllPrncpls", + "ruleTemplateArgs": { + "Action": "s3:Delete", + "name": "allowDeleteActionFromAllPrncpls", + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "ruleReferenceId": "AWS.S3Bucket.IAM.High.0372", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/actionsFromAllPrincipals.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/actionsFromAllPrincipals.rego new file mode 100755 index 000000000..2d77a955d --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/actionsFromAllPrincipals.rego @@ -0,0 +1,59 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + s3bucket = input.aws_s3_bucket_policy[_] + policy := json_unmarshal(s3bucket.config.policy) + statement = policy.Statement[_] + policyCheck(statement, "*", "Allow", "{{.Action}}") == true + + statements := [ statement | statement := replace_if_needed(policy.Statement[_]) ] + expected := object.union(policy, {"Statement": statements}) + traverse = "policy" + retVal := { "Id": s3bucket.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "policy", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +{{.prefix}}{{.name}}[retVal] { + s3bucket = input.aws_s3_bucket[_] + policy := json_unmarshal(s3bucket.config.policy) + statement = policy.Statement[_] + policyCheck(statement, "*", "Allow", "{{.Action}}") == true + + statements := [ statement | statement := replace_if_needed(policy.Statement[_]) ] + expected := object.union(policy, {"Statement": statements}) + traverse = "policy" + retVal := { "Id": s3bucket.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "policy", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +replace_if_needed(statement) = value { + policyCheck(statement, "*", "Allow", "{{.Action}}") == true + value := object.union(statement, { "Principal": "##principal##" }) +} + +replace_if_needed(statement) = value { + not policyCheck(statement, "*", "Allow", "{{.Action}}") + value := statement +} + +policyCheck(s, p, e ,a) = true { + action := is_array(s.Action) + s.Effect == e + s.Principal == p + re_match(a, s.Action[_]) +} + +policyCheck(s, p, e ,a) = true { + action := is_string(s.Action) + s.Effect == e + s.Principal == p + re_match(a, s.Action) +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/allowActionsFromAllPrincipals.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/allowActionsFromAllPrincipals.rego new file mode 100755 index 000000000..7d6feb19b --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/allowActionsFromAllPrincipals.rego @@ -0,0 +1,51 @@ +package accurics + +{{.prefix}}allowActionsFromAllPrincipals[retVal] { + s3bucket = input.aws_s3_bucket_policy[_] + policy := json_unmarshal(s3bucket.config.policy) + statement = policy.Statement[_] + policyCheck(statement, "*", "Allow", "*") == true + + statements := [ statement | statement := replace_if_needed(policy.Statement[_]) ] + expected := object.union(policy, {"Statement": statements}) + traverse = "policy" + retVal := { "Id": s3bucket.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "policy", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +{{.prefix}}allowActionsFromAllPrincipals[retVal] { + s3bucket = input.aws_s3_bucket[_] + policy := json_unmarshal(s3bucket.config.policy) + statement = policy.Statement[_] + policyCheck(statement, "*", "Allow", "*") == true + + statements := [ statement | statement := replace_if_needed(policy.Statement[_]) ] + expected := object.union(policy, {"Statement": statements}) + traverse = "policy" + retVal := { "Id": s3bucket.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "policy", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +replace_if_needed(statement) = value { + policyCheck(statement, "*", "Allow", "*") == true + value := object.union(statement, { "Principal": "##principal##", "Action": "##s3:action##" }) +} + +replace_if_needed(statement) = value { + not policyCheck(statement, "*", "Allow", "*") + value := statement +} + +policyCheck(s, a, e ,p) = true { + s.Action == a + s.Effect == e + s.Principal == p +} diff --git a/pkg/policies/opa/rego/aws/aws_security_group/AWS.SecurityGroup.NetworkSecurity.High.0094.json b/pkg/policies/opa/rego/aws/aws_security_group/AWS.SecurityGroup.NetworkSecurity.High.0094.json new file mode 100755 index 000000000..e53f8c6d9 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_security_group/AWS.SecurityGroup.NetworkSecurity.High.0094.json @@ -0,0 +1,13 @@ +{ + "ruleName": "unrestrictedIngressAccess", + "file": "unrestrictedIngressAccess.rego", + "ruleTemplate": "unrestrictedIngressAccess", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": " It is recommended that no security group allows unrestricted ingress access", + "ruleReferenceId": "AWS.SecurityGroup.NetworkSecurity.High.0094", + "category": "Network Ports Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_security_group/unrestrictedIngressAccess.rego b/pkg/policies/opa/rego/aws/aws_security_group/unrestrictedIngressAccess.rego new file mode 100755 index 000000000..77c643087 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_security_group/unrestrictedIngressAccess.rego @@ -0,0 +1,24 @@ +package accurics + +{{.prefix}}unrestrictedIngressAccess[retVal] { + security_group = input.aws_security_group[_] + some i + ingress = security_group.config.ingress[i] + ingress.cidr_blocks[j] == "0.0.0.0/0" + ingress.from_port == 0 + ingress.to_port == 0 + ingress.protocol == "-1" + expected := [ item | item := validate_cidr(ingress.cidr_blocks[_]) ] + traverse := sprintf("ingress[%d].cidr_blocks", [i]) + retVal := { "Id": security_group.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "ingress.cidr_blocks", "AttributeDataType": "list", "Expected": expected, "Actual": ingress.cidr_blocks } +} + +validate_cidr(cidr) = value { + cidr == "0.0.0.0/0" + value := "" +} + +validate_cidr(cidr) = value { + cidr != "0.0.0.0/0" + value := cidr +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0470.json b/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0470.json new file mode 100755 index 000000000..d44940e99 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0470.json @@ -0,0 +1,13 @@ +{ + "ruleName": "vpcFlowLogsNotEnabled", + "file": "vpcFlowLogsNotEnabled.rego", + "ruleTemplate": "vpcFlowLogsNotEnabled", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Ensure VPC flow logging is enabled in all VPCs", + "ruleReferenceId": "AWS.VPC.Logging.Medium.0470", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0471.json b/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0471.json new file mode 100755 index 000000000..8c22bcc45 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0471.json @@ -0,0 +1,13 @@ +{ + "ruleName": "defaultVpcExist", + "file": "defaultVpcExist.rego", + "ruleTemplate": "defaultVpcExist", + "ruleTemplateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Avoid creating resources in default VPC", + "ruleReferenceId": "AWS.VPC.Logging.Medium.0471", + "category": "Logging", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_vpc/defaultVpcExist.rego b/pkg/policies/opa/rego/aws/aws_vpc/defaultVpcExist.rego new file mode 100755 index 000000000..90cbc4855 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_vpc/defaultVpcExist.rego @@ -0,0 +1,6 @@ +package accurics + +{{.prefix}}defaultVpcExist[vpc.id] { + vpc = input.aws_vpc[_] + vpc.config.is_default == true +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_vpc/vpcFlowLogsNotEnabled.rego b/pkg/policies/opa/rego/aws/aws_vpc/vpcFlowLogsNotEnabled.rego new file mode 100755 index 000000000..64e497146 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_vpc/vpcFlowLogsNotEnabled.rego @@ -0,0 +1,27 @@ +package accurics + +{{.prefix}}vpcFlowLogsNotEnabled[retVal] { + vpc := input.aws_vpc[_] + vpc_input := input + vpc.type == "aws_vpc" + + not flowLogExist(vpc, vpc_input) + + rc = "cmVzb3VyY2UgImF3c19mbG93X2xvZyIgIiMjcmVzb3VyY2VfbmFtZSMjIiB7CiAgdnBjX2lkICAgICAgICAgID0gIiR7YXdzX3ZwYy4jI3Jlc291cmNlX25hbWUjIy5pZH0iCiAgaWFtX3JvbGVfYXJuICAgID0gIiMjYXJuOmF3czppYW06OjExMTExMTExMTExMTpyb2xlL3NhbXBsZV9yb2xlIyMiCiAgbG9nX2Rlc3RpbmF0aW9uID0gIiR7YXdzX3MzX2J1Y2tldC4jI3Jlc291cmNlX25hbWUjIy5hcm59IgogIHRyYWZmaWNfdHlwZSAgICA9ICJBTEwiCgogIHRhZ3MgPSB7CiAgICBHZW5lcmF0ZWRCeSA9ICJBY2N1cmljcyIKICAgIFBhcmVudFJlc291cmNlSWQgPSAiYXdzX3ZwYy4jI3Jlc291cmNlX25hbWUjIyIKICB9Cn0KCnJlc291cmNlICJhd3NfczNfYnVja2V0IiAiIyNyZXNvdXJjZV9uYW1lIyMiIHsKICBidWNrZXQgPSAiIyNyZXNvdXJjZV9uYW1lIyNfZmxvd19sb2dfczNfYnVja2V0IgogIGFjbCAgICA9ICJwcml2YXRlIgogIGZvcmNlX2Rlc3Ryb3kgPSB0cnVlCgogIHZlcnNpb25pbmcgewogICAgZW5hYmxlZCA9IHRydWUKICAgIG1mYV9kZWxldGUgPSB0cnVlCiAgfQoKICBzZXJ2ZXJfc2lkZV9lbmNyeXB0aW9uX2NvbmZpZ3VyYXRpb24gewogICAgcnVsZSB7CiAgICAgIGFwcGx5X3NlcnZlcl9zaWRlX2VuY3J5cHRpb25fYnlfZGVmYXVsdCB7CiAgICAgICAgc3NlX2FsZ29yaXRobSA9ICJBRVMyNTYiCiAgICAgIH0KICAgIH0KICB9Cn0KCnJlc291cmNlICJhd3NfczNfYnVja2V0X3BvbGljeSIgIiMjcmVzb3VyY2VfbmFtZSMjIiB7CiAgYnVja2V0ID0gIiR7YXdzX3MzX2J1Y2tldC4jI3Jlc291cmNlX25hbWUjIy5pZH0iCgogIHBvbGljeSA9IDw8UE9MSUNZCnsKICAiVmVyc2lvbiI6ICIyMDEyLTEwLTE3IiwKICAiU3RhdGVtZW50IjogWwogICAgewogICAgICAiU2lkIjogIiMjcmVzb3VyY2VfbmFtZSMjLXJlc3RyaWN0LWFjY2Vzcy10by11c2Vycy1vci1yb2xlcyIsCiAgICAgICJFZmZlY3QiOiAiQWxsb3ciLAogICAgICAiUHJpbmNpcGFsIjogWwogICAgICAgIHsKICAgICAgICAgICJBV1MiOiBbCiAgICAgICAgICAgICJhcm46YXdzOmlhbTo6IyNhY291bnRfaWQjIzpyb2xlLyMjcm9sZV9uYW1lIyMiLAogICAgICAgICAgICAiYXJuOmF3czppYW06OiMjYWNvdW50X2lkIyM6dXNlci8jI3VzZXJfbmFtZSMjIgogICAgICAgICAgXQogICAgICAgIH0KICAgICAgXSwKICAgICAgIkFjdGlvbiI6ICJzMzpHZXRPYmplY3QiLAogICAgICAiUmVzb3VyY2UiOiAiYXJuOmF3czpzMzo6OiR7YXdzX3MzX2J1Y2tldC4jI3Jlc291cmNlX25hbWUjIy5pZH0vKiIKICAgIH0KICBdCn0KUE9MSUNZCn0=" + decode_rc = base64.decode(rc) + replaced_vpc_id := replace(decode_rc, "##resource_name##", vpc.name) + + traverse = "" + retVal := { "Id": vpc.id, "ReplaceType": "add", "CodeType": "resource", "Traverse": traverse, "Attribute": "", "AttributeDataType": "resource", "Expected": base64.encode(replaced_vpc_id), "Actual": null } +} + +flowLogExist(vpc, vpc_input) = exists { + flow_log_vpcs_set := { vpc_id | input.aws_flow_log[i].type == "aws_flow_log"; vpc_id := input.aws_flow_log[i].config.vpc_id } + flow_log_vpcs_set[vpc.id] + exists = true +} else = exists { + flow_log_tags_set := { resource_id | input.aws_flow_log[i].type == "aws_flow_log"; resource_id := input.aws_flow_log[i].config.tags.ParentResourceId } + vpc_name := sprintf("aws_vpc.%s", [vpc.name]) + flow_log_tags_set[vpc_name] + exists = true +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/accurics.azure.CAM.162.json b/pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/accurics.azure.CAM.162.json new file mode 100755 index 000000000..b5a48fb4e --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/accurics.azure.CAM.162.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_noTags", + "file": "noTags.rego", + "ruleTemplate": "noTags", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure that Cosmos DB Account has an associated tag", + "ruleReferenceId": "accurics.azure.CAM.162", + "category": "Cloud Assets Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/noTags.rego b/pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/noTags.rego new file mode 100755 index 000000000..ba1f113a9 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/noTags.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}noTags[retVal] { + cosmos := input.azurerm_cosmosdb_account[_] + cosmos.config.tags == null + + rc := "ewogICJ0YWdzIjogewogICAgImFkZGVkX2J5IjogImFjY3VyaWNzIgogIH0KfQ==" + retVal := { "Id": cosmos.id, "ReplaceType": "add", "CodeType": "attribute", "Traverse": "", "Attribute": "", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_managed_disk/accurics.azure.EKM.156.json b/pkg/policies/opa/rego/azure/azurerm_managed_disk/accurics.azure.EKM.156.json new file mode 100755 index 000000000..6ea0f84ce --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_managed_disk/accurics.azure.EKM.156.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_checkDataDisksEncrypted", + "file": "checkDataDisksEncrypted.rego", + "ruleTemplate": "checkDataDisksEncrypted", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure that 'OS disk' are encrypted", + "ruleReferenceId": "accurics.azure.EKM.156", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_managed_disk/checkDataDisksEncrypted.rego b/pkg/policies/opa/rego/azure/azurerm_managed_disk/checkDataDisksEncrypted.rego new file mode 100755 index 000000000..9b561d35f --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_managed_disk/checkDataDisksEncrypted.rego @@ -0,0 +1,11 @@ +package accurics + +{{.prefix}}checkDataDisksEncrypted[retVal] { + managed_disk := input.azurerm_managed_disk[_] + some i + encryption_settings = managed_disk.config.encryption_settings[i] + encryption_settings.enabled == false + + traverse := sprintf("encryption_settings[%d].enabled", [i]) + retVal := { "Id": managed_disk.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "encryption_settings.enabled", "AttributeDataType": "boolean", "Expected": true, "Actual": encryption_settings.enabled } +} diff --git a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.171.json b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.171.json new file mode 100755 index 000000000..0b6e1be26 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.171.json @@ -0,0 +1,19 @@ +{ + "ruleName": "reme_networkPort3389ExposedPublicEntire", + "file": "networkPortExposedPublic.rego", + "ruleTemplate": "networkPortExposedPublic", + "ruleTemplateArgs": { + "endLimit": 0, + "evalHosts": true, + "name": "networkPort3389ExposedPublicEntire", + "numberOfHosts": 1, + "portNumber": 3389, + "prefix": "reme_", + "protocol": "TCP" + }, + "severity": "HIGH", + "description": "Remote Desktop (TCP:3389) is exposed to the entire public internet", + "ruleReferenceId": "accurics.azure.NPS.171", + "category": "Network Ports Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.172.json b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.172.json new file mode 100755 index 000000000..9a1ffd52d --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.172.json @@ -0,0 +1,19 @@ +{ + "ruleName": "reme_networkPort22ExposedPublicEntire", + "file": "networkPortExposedPublic.rego", + "ruleTemplate": "networkPortExposedPublic", + "ruleTemplateArgs": { + "endLimit": 0, + "evalHosts": true, + "name": "networkPort22ExposedPublicEntire", + "numberOfHosts": 1, + "portNumber": 22, + "prefix": "reme_", + "protocol": "TCP" + }, + "severity": "HIGH", + "description": "SSH (TCP:22) is exposed to the entire public internet", + "ruleReferenceId": "accurics.azure.NPS.172", + "category": "Network Ports Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.35.json b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.35.json new file mode 100755 index 000000000..001a10bd5 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.35.json @@ -0,0 +1,19 @@ +{ + "ruleName": "reme_networkPort9090ExposedPublicWide", + "file": "networkPortExposedPublic.rego", + "ruleTemplate": "networkPortExposedPublic", + "ruleTemplateArgs": { + "endLimit": 1, + "evalHosts": false, + "name": "networkPort9090ExposedPublicWide", + "numberOfHosts": 27, + "portNumber": 9090, + "prefix": "reme_", + "protocol": "TCP" + }, + "severity": "HIGH", + "description": "CiscoSecure, WebSM (TCP:9090) is exposed to the wide public internet", + "ruleReferenceId": "accurics.azure.NPS.35", + "category": "Network Ports Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.36.json b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.36.json new file mode 100755 index 000000000..8a6b3c74f --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.36.json @@ -0,0 +1,19 @@ +{ + "ruleName": "reme_networkPort3389ExposedPublicWide", + "file": "networkPortExposedPublic.rego", + "ruleTemplate": "networkPortExposedPublic", + "ruleTemplateArgs": { + "endLimit": 1, + "evalHosts": false, + "name": "networkPort3389ExposedPublicWide", + "numberOfHosts": 27, + "portNumber": 3389, + "prefix": "reme_", + "protocol": "TCP" + }, + "severity": "HIGH", + "description": "Remote Desktop (TCP:3389) is exposed to the wide public internet", + "ruleReferenceId": "accurics.azure.NPS.36", + "category": "Network Ports Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.37.json b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.37.json new file mode 100755 index 000000000..4035cbcd7 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.37.json @@ -0,0 +1,19 @@ +{ + "ruleName": "reme_networkPort22ExposedPublicWide", + "file": "networkPortExposedPublic.rego", + "ruleTemplate": "networkPortExposedPublic", + "ruleTemplateArgs": { + "endLimit": 1, + "evalHosts": false, + "name": "networkPort22ExposedPublicWide", + "numberOfHosts": 27, + "portNumber": 22, + "prefix": "reme_", + "protocol": "TCP" + }, + "severity": "HIGH", + "description": "SSH (TCP:22) is exposed to the wide public internet", + "ruleReferenceId": "accurics.azure.NPS.37", + "category": "Network Ports Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/networkPortExposedPublic.rego b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/networkPortExposedPublic.rego new file mode 100755 index 000000000..311289687 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/networkPortExposedPublic.rego @@ -0,0 +1,73 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + sg = input.azurerm_network_security_rule[_] + sg.config.access == "Allow" + sg.config.direction == "Inbound" + checkScopeIsPublic(sg.config.source_address_prefix) + checkPort(sg.config, "{{.portNumber}}") + checkProtocol(sg.config.protocol) + + traverse := "source_address_prefix" + retVal := { "Id": sg.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "source_address_prefix", "AttributeDataType": "string", "Expected": "", "Actual": sg.config.source_address_prefix } +} + +{{.prefix}}{{.name}}[retVal] { + nsg = input.azurerm_network_security_group[_] + some i + sg = nsg.config.security_rule[i] + sg.access == "Allow" + sg.direction == "Inbound" + checkScopeIsPublic(sg.source_address_prefix) + checkPort(sg, "{{.portNumber}}") + checkProtocol(sg.protocol) + + traverse := sprintf("security_rule[%d].source_address_prefix", [i]) + retVal := { "Id": nsg.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "security_rule.source_address_prefix", "AttributeDataType": "string", "Expected": "", "Actual": sg.source_address_prefix } +} + +scopeIsPrivate(scope) { + private_ips = ["10.0.0.0/8", "192.168.0.0/16", "172.16.0.0/12"] + net.cidr_contains(private_ips[_], scope) +} + +checkScopeIsPublic(val) = true { + glob.match("[0-9]*.[0-9]*.[0-9]*.*", [], val) + not scopeIsPrivate(val) + hosts = split(val, "/") + to_number(hosts[1]) < {{.numberOfHosts}} + to_number(hosts[1]) >= {{.endLimit}} +} + +checkScopeIsPublic(val) = true { + glob.match("[0-9]*.[0-9]*.[0-9]*.*", [], val) + not scopeIsPrivate(val) + hosts = split(val, "/") + not hosts[1] + {{.evalHosts}} +} + +checkScopeIsPublic(val) = true { + not glob.match("[0-9]*.[0-9]*.[0-9]*.*", [], val) + val == "*" + {{.evalHosts}} +} + +checkScopeIsPublic(val) = true { + not glob.match("[0-9]*.[0-9]*.[0-9]*.*", [], val) + val == "Internet" + {{.evalHosts}} +} + +checkPort(obj, val) = true { + obj.destination_port_range == val +} + +checkPort(obj, val) = true { + obj.source_port_range == val +} + +checkProtocol(proto) { + protocols = ["{{.protocol}}", "*"] + upper(proto) == protocols[_] +} diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.151.json b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.151.json new file mode 100755 index 000000000..dacf226b5 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.151.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_connectionThrottling", + "file": "connectionThrottling.rego", + "ruleTemplate": "connectionThrottling", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure server parameter 'connection_throttling' is set to 'ON' for PostgreSQL Database Server", + "ruleReferenceId": "accurics.azure.LOG.151", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.152.json b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.152.json new file mode 100755 index 000000000..54c058d46 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.152.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_logConnections", + "file": "logConnections.rego", + "ruleTemplate": "logConnections", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure server parameter 'log_connections' is set to 'ON' for PostgreSQL Database Server", + "ruleReferenceId": "accurics.azure.LOG.152", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.155.json b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.155.json new file mode 100755 index 000000000..052a928b9 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.155.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_logRetention", + "file": "logRetention.rego", + "ruleTemplate": "logRetention", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure server parameter 'log_retention_days' is greater than 3 days for PostgreSQL Database Server", + "ruleReferenceId": "accurics.azure.LOG.155", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/connectionThrottling.rego b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/connectionThrottling.rego new file mode 100755 index 000000000..54beb31fd --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/connectionThrottling.rego @@ -0,0 +1,10 @@ +package accurics + +{{.prefix}}connectionThrottling[retVal] { + psql_config := input.azurerm_postgresql_configuration[_] + psql_config.config.name == "connection_throttling" + psql_config.config.value != "on" + + traverse = "value" + retVal := { "Id": psql_config.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "value", "AttributeDataType": "string", "Expected": "on", "Actual": psql_config.config.value } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logConnections.rego b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logConnections.rego new file mode 100755 index 000000000..dfc2dc516 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logConnections.rego @@ -0,0 +1,10 @@ +package accurics + +{{.prefix}}logConnections[retVal] { + psql_config := input.azurerm_postgresql_configuration[_] + psql_config.config.name == "log_connections" + psql_config.config.value != "on" + + traverse = "value" + retVal := { "Id": psql_config.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "value", "AttributeDataType": "string", "Expected": "on", "Actual": psql_config.config.value } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logRetention.rego b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logRetention.rego new file mode 100755 index 000000000..ce6644b5e --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logRetention.rego @@ -0,0 +1,14 @@ +package accurics + +{{.prefix}}logRetention[retVal] { + psql_config := input.azurerm_postgresql_configuration[_] + psql_config.config.name == "log_retention_days" + not checkValid(psql_config.config.value) + + traverse = "value" + retVal := { "Id": psql_config.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "value", "AttributeDataType": "string", "Expected": "4", "Actual": psql_config.config.value } +} + +checkValid(val) = true { + val == ["4", "5", "6", "7"][_] +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.BDR.163.json b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.BDR.163.json new file mode 100755 index 000000000..f00c25c6f --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.BDR.163.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_geoRedundancyDisabled", + "file": "geoRedundancyDisabled.rego", + "ruleTemplate": "geoRedundancyDisabled", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure that Geo Redundant Backups is enabled on PostgreSQL", + "ruleReferenceId": "accurics.azure.BDR.163", + "category": "Backup and Disaster Recovery", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.EKM.1.json b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.EKM.1.json new file mode 100755 index 000000000..5b7d411b4 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.EKM.1.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_sslEnforceDisabled", + "file": "sslEnforceDisabled.rego", + "ruleTemplate": "sslEnforceDisabled", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure 'Enforce SSL connection' is set to 'ENABLED' for PostgreSQL Database Server", + "ruleReferenceId": "accurics.azure.EKM.1", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_server/geoRedundancyDisabled.rego b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/geoRedundancyDisabled.rego new file mode 100755 index 000000000..4e1bf8305 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/geoRedundancyDisabled.rego @@ -0,0 +1,17 @@ +package accurics + +{{.prefix}}geoRedundancyDisabled[retVal] { + psql_server := input.azurerm_postgresql_server[_] + psql_server.config.geo_redundant_backup_enabled != true + + traverse = "geo_redundant_backup_enabled" + retVal := { "Id": psql_server.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "geo_redundant_backup_enabled", "AttributeDataType": "boolean", "Expected": true, "Actual": psql_server.config.geo_redundant_backup_enabled } +} + +{{.prefix}}geoRedundancyDisabled[retVal] { + psql_server := input.azurerm_postgresql_server[_] + object.get(psql_server.config, "geo_redundant_backup_enabled", "undefined") == "undefined" + + traverse = "geo_redundant_backup_enabled" + retVal := { "Id": psql_server.id, "ReplaceType": "add", "CodeType": "attribute", "Traverse": traverse, "Attribute": "geo_redundant_backup_enabled", "AttributeDataType": "boolean", "Expected": true, "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_server/sslEnforceDisabled.rego b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/sslEnforceDisabled.rego new file mode 100755 index 000000000..b69f9771e --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/sslEnforceDisabled.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}sslEnforceDisabled[retVal] { + psql_server := input.azurerm_postgresql_server[_] + psql_server.config.ssl_enforcement_enabled == false + + traverse = "ssl_enforcement_enabled" + retVal := { "Id": psql_server.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "ssl_enforcement_enabled", "AttributeDataType": "boolean", "Expected": true, "Actual": psql_server.config.ssl_enforcement_enabled } +} diff --git a/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.EKM.23.json b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.EKM.23.json new file mode 100755 index 000000000..f25530c3e --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.EKM.23.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_nonSslEnabled", + "file": "nonSslEnabled.rego", + "ruleTemplate": "nonSslEnabled", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure that the Redis Cache accepts only SSL connections", + "ruleReferenceId": "accurics.azure.EKM.23", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.30.json b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.30.json new file mode 100755 index 000000000..134fb0295 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.30.json @@ -0,0 +1,15 @@ +{ + "ruleName": "reme_entirelyAccessible", + "file": "publiclyAccessible.rego", + "ruleTemplate": "publiclyAccessible", + "ruleTemplateArgs": { + "isEntire": true, + "name": "entirelyAccessible", + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure there are no firewall rules allowing unrestricted access to Redis from the Internet", + "ruleReferenceId": "accurics.azure.NS.30", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.31.json b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.31.json new file mode 100755 index 000000000..24ad82625 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.31.json @@ -0,0 +1,15 @@ +{ + "ruleName": "reme_publiclyAccessible", + "file": "publiclyAccessible.rego", + "ruleTemplate": "publiclyAccessible", + "ruleTemplateArgs": { + "isEntire": false, + "name": "publiclyAccessible", + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure there are no firewall rules allowing unrestricted access to Redis from other Azure sources", + "ruleReferenceId": "accurics.azure.NS.31", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_redis_cache/nonSslEnabled.rego b/pkg/policies/opa/rego/azure/azurerm_redis_cache/nonSslEnabled.rego new file mode 100755 index 000000000..791e54abf --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_redis_cache/nonSslEnabled.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}nonSslEnabled[retVal] { + redis := input.azurerm_redis_cache[_] + redis.config.enable_non_ssl_port == true + + traverse = "enable_non_ssl_port" + retVal := { "Id": redis.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "enable_non_ssl_port", "AttributeDataType": "boolean", "Expected": false, "Actual": redis.config.enable_non_ssl_port } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_redis_cache/publiclyAccessible.rego b/pkg/policies/opa/rego/azure/azurerm_redis_cache/publiclyAccessible.rego new file mode 100755 index 000000000..77a79181a --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_redis_cache/publiclyAccessible.rego @@ -0,0 +1,29 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + redis := input.azurerm_redis_firewall_rule[_] + redis.config.start_ip == "0.0.0.0" + redis.config.end_ip == "0.0.0.0" + {{.isEntire}} + retVal := { "Id": redis.id, "ReplaceType": "delete", "CodeType": "resource", "Traverse": "", "Attribute": "", "AttributeDataType": "resource", "Expected": null, "Actual": null } +} + +{{.prefix}}{{.name}}[retVal] { + redis := input.azurerm_redis_firewall_rule[_] + redis.config.start_ip != "0.0.0.0" + checkScopeIsPublic(redis.config.start_ip) + redis.config.end_ip != "0.0.0.0" + checkScopeIsPublic(redis.config.end_ip) + not {{.isEntire}} + retVal := { "Id": redis.id, "ReplaceType": "delete", "CodeType": "resource", "Traverse": "", "Attribute": "", "AttributeDataType": "resource", "Expected": null, "Actual": null } +} + +scopeIsPrivate(scope) { + private_ips = ["10.0.0.0/8", "192.168.0.0/16", "172.16.0.0/12"] + net.cidr_contains(private_ips[_], scope) +} + +checkScopeIsPublic(val) = true { + glob.match("[0-9]*.[0-9]*.[0-9]*.*", [], val) + not scopeIsPrivate(val) +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_database/accurics.azure.MON.157.json b/pkg/policies/opa/rego/azure/azurerm_sql_database/accurics.azure.MON.157.json new file mode 100755 index 000000000..9c3d806ad --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_database/accurics.azure.MON.157.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_checkAuditEnabled", + "file": "checkAuditEnabled.rego", + "ruleTemplate": "checkAuditEnabled", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure that 'Threat Detection' is enabled for Azure SQL Database", + "ruleReferenceId": "accurics.azure.MON.157", + "category": "Monitoring", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_database/checkAuditEnabled.rego b/pkg/policies/opa/rego/azure/azurerm_sql_database/checkAuditEnabled.rego new file mode 100755 index 000000000..33ee467b6 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_database/checkAuditEnabled.rego @@ -0,0 +1,36 @@ +package accurics + +{{.prefix}}checkAuditEnabled[retVal] { + sql_db_resource := input.azurerm_sql_database[_] + some i + threat_detection_policy = sql_db_resource.config.threat_detection_policy[i] + threat_detection_policy.state == "Disabled" + + traverse := sprintf("threat_detection_policy[%d].state", [i]) + retVal := { "Id": sql_db_resource.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "threat_detection_policy.state", "AttributeDataType": "string", "Expected": "Enabled", "Actual": threat_detection_policy.state } +} + +{{.prefix}}checkAuditEnabled[retVal] { + sql_db_resource := input.azurerm_sql_database[_] + object.get(sql_db_resource.config, "threat_detection_policy", "undefined") == "undefined" + + rc := "ewogICJ0aHJlYXRfZGV0ZWN0aW9uX3BvbGljeSI6IHsKICAgICJzdGF0ZSI6ICJFbmFibGVkIiwKICAgICJzdG9yYWdlX2FjY291bnRfYWNjZXNzX2tleSI6ICIke2F6dXJlcm1fc3RvcmFnZV9hY2NvdW50LiMjcmVzb3VyY2VfbmFtZSMjLnByaW1hcnlfYWNjZXNzX2tleX0iLAogICAgInN0b3JhZ2VfZW5kcG9pbnQiOiAiJHthenVyZXJtX3N0b3JhZ2VfYWNjb3VudC4jI3Jlc291cmNlX25hbWUjIy5wcmltYXJ5X2Jsb2JfZW5kcG9pbnR9IiwKICAgICJ1c2Vfc2VydmVyX2RlZmF1bHQiOiAiRW5hYmxlZCIKICB9Cn0=" + decode_rc = base64.decode(rc) + storage_account := input.azurerm_storage_account[0] + replaced_resource_name := replace(decode_rc, "##resource_name##", storage_account.name) + + retVal := { "Id": sql_db_resource.id, "ReplaceType": "add", "CodeType": "block", "Traverse": "", "Attribute": "", "AttributeDataType": "block", "Expected": base64.encode(replaced_resource_name), "Actual": null } +} + +# create storage_account TODO +# {{.prefix}}checkAuditEnabled[retVal] { +# sql_db_resource := input.azurerm_sql_database[_] +# object.get(sql_db_resource.config, "threat_detection_policy", "undefined") == "undefined" + +# rc := "ewogICJ0aHJlYXRfZGV0ZWN0aW9uX3BvbGljeSI6IHsKICAgICJzdGF0ZSI6ICJFbmFibGVkIiwKICAgICJzdG9yYWdlX2FjY291bnRfYWNjZXNzX2tleSI6ICIke2F6dXJlcm1fc3RvcmFnZV9hY2NvdW50LiMjcmVzb3VyY2VfbmFtZSMjLnByaW1hcnlfYWNjZXNzX2tleX0iLAogICAgInN0b3JhZ2VfZW5kcG9pbnQiOiAiJHthenVyZXJtX3N0b3JhZ2VfYWNjb3VudC4jI3Jlc291cmNlX25hbWUjIy5wcmltYXJ5X2Jsb2JfZW5kcG9pbnR9IiwKICAgICJ1c2Vfc2VydmVyX2RlZmF1bHQiOiAiRW5hYmxlZCIKICB9Cn0=" +# decode_rc = base64.decode(rc) +# object.get(input, "azurerm_storage_account", "undefined") == "undefined" +# replaced_resource_name := replace(decode_rc, "##resource_name##", "blah") + +# retVal := { "Id": sql_db_resource.id, "ReplaceType": "add", "CodeType": "block", "Traverse": "", "Attribute": "", "AttributeDataType": "block", "Expected": base64.encode(replaced_resource_name), "Actual": null } +# } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.21.json b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.21.json new file mode 100755 index 000000000..473c088a7 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.21.json @@ -0,0 +1,15 @@ +{ + "ruleName": "reme_sqlIngressAccess", + "file": "checkPublicAccessNotAllow.rego", + "ruleTemplate": "checkPublicAccessNotAllow", + "ruleTemplateArgs": { + "isEntire": false, + "name": "sqlIngressAccess", + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure that no SQL Server allows ingress from 0.0.0.0/0 (ANY IP)", + "ruleReferenceId": "accurics.azure.NS.21", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.5.json b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.5.json new file mode 100755 index 000000000..8af78d71b --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.5.json @@ -0,0 +1,15 @@ +{ + "ruleName": "reme_sqlPublicAccess", + "file": "checkPublicAccessNotAllow.rego", + "ruleTemplate": "checkPublicAccessNotAllow", + "ruleTemplateArgs": { + "isEntire": true, + "name": "sqlPublicAccess", + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure entire Azure infrastructure doesn't have access to Azure SQL ServerEnsure entire Azure infrastructure doesn't have access to Azure SQL Server", + "ruleReferenceId": "accurics.azure.NS.5", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/checkPublicAccessNotAllow.rego b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/checkPublicAccessNotAllow.rego new file mode 100755 index 000000000..5534efd66 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/checkPublicAccessNotAllow.rego @@ -0,0 +1,17 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + sql_rule := input.azurerm_sql_firewall_rule[_] + sql_rule.config.start_ip_address == "0.0.0.0" + sql_rule.config.end_ip_address == "0.0.0.0" + {{.isEntire}} + retVal := { "Id": sql_rule.id, "ReplaceType": "delete", "CodeType": "resource", "Traverse": "", "Attribute": "", "AttributeDataType": "resource", "Expected": null, "Actual": null } +} + +{{.prefix}}{{.name}}[retVal] { + sql_rule := input.azurerm_sql_firewall_rule[_] + sql_rule.config.start_ip_address == "0.0.0.0" + sql_rule.config.end_ip_address == "255.255.255.255" + not {{.isEntire}} + retVal := { "Id": sql_rule.id, "ReplaceType": "delete", "CodeType": "resource", "Traverse": "", "Attribute": "", "AttributeDataType": "resource", "Expected": null, "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_server/.json b/pkg/policies/opa/rego/azure/azurerm_sql_server/.json new file mode 100755 index 000000000..7b995d40d --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_server/.json @@ -0,0 +1,13 @@ +{ + "ruleName": "reme_sqlServerADAdminConfigured", + "file": "sqlServerADAdminConfigured.rego", + "ruleTemplate": "sqlServerADAdminConfigured", + "ruleTemplateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure that Azure Active Directory Admin is configured for SQL Server", + "ruleReferenceId": "", + "category": "Data Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerADAdminConfigured.rego b/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerADAdminConfigured.rego new file mode 100755 index 000000000..ed63dd4ee --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerADAdminConfigured.rego @@ -0,0 +1,21 @@ +package accurics + +{{.prefix}}sqlServerADAdminConfigured[retVal] { + sql_server := input.azurerm_sql_server[_] + sql_server.type == "azurerm_sql_server" + key := concat("-", [sql_server.config.resource_group_name, sql_server.config.name]) + not adAdminExist(key) + rc = "ZGF0YSAiYXp1cmVybV9jbGllbnRfY29uZmlnIiAiY3VycmVudCIge30KCnJlc291cmNlICJhenVyZXJtX3NxbF9hY3RpdmVfZGlyZWN0b3J5X2FkbWluaXN0cmF0b3IiICIjI3Jlc291cmNlX25hbWUjIyIgewogIHNlcnZlcl9uYW1lICAgICAgICAgPSBhenVyZXJtX3NxbF9zZXJ2ZXIuIyNyZXNvdXJjZV9uYW1lIyMubmFtZQogIHJlc291cmNlX2dyb3VwX25hbWUgPSBhenVyZXJtX3Jlc291cmNlX2dyb3VwLiMjcmVzb3VyY2VfbmFtZSMjLm5hbWUKICBsb2dpbiAgICAgICAgICAgICAgID0gInNxbGFkbWluIgogIHRlbmFudF9pZCAgICAgICAgICAgPSBkYXRhLmF6dXJlcm1fY2xpZW50X2NvbmZpZy5jdXJyZW50LnRlbmFudF9pZAogIG9iamVjdF9pZCAgICAgICAgICAgPSBkYXRhLmF6dXJlcm1fY2xpZW50X2NvbmZpZy5jdXJyZW50Lm9iamVjdF9pZAp9" + decode_rc = base64.decode(rc) + replaced := replace(decode_rc, "##resource_name##", sql_server.name) + traverse = "" + retVal := { "Id": sql_server.id, "ReplaceType": "add", "CodeType": "resource", "Traverse": traverse, "Attribute": traverse, "AttributeDataType": "resource", "Expected": base64.encode(replaced), "Actual": null } +} + +adAdminExist(rg_servername) = exists { + ad_admin_set := { ad_id | input.azurerm_sql_active_directory_administrator[i].type == "azurerm_sql_active_directory_administrator"; ad_id := concat("-", [input.azurerm_sql_active_directory_administrator[i].config.resource_group_name, input.azurerm_sql_active_directory_administrator[i].config.server_name]) } + ad_admin_set[rg_servername] + exists = true +} else = false { + true +} \ No newline at end of file diff --git a/pkg/policy/interface.go b/pkg/policy/interface.go index 8c15e02a2..3939ee5ac 100644 --- a/pkg/policy/interface.go +++ b/pkg/policy/interface.go @@ -19,7 +19,7 @@ package policy type Manager interface { Import() error Export() error - Validate() error + CreateManager() error } type Engine interface { diff --git a/pkg/policy/opa/constants.go b/pkg/policy/opa/constants.go index 7d66da466..cdbc959f4 100644 --- a/pkg/policy/opa/constants.go +++ b/pkg/policy/opa/constants.go @@ -1,7 +1,7 @@ -package policy +package opa const ( - RegoMetadataFile = "rule.json" - RegoFileSuffix = ".rego" - RuleQueryBase = "data.accurics" + RegoMetadataFileSuffix = ".json" + RegoFileSuffix = ".rego" + RuleQueryBase = "data.accurics" ) diff --git a/pkg/policy/opa/opa_engine.go b/pkg/policy/opa/opa_engine.go index bd50ef368..bea7c9725 100644 --- a/pkg/policy/opa/opa_engine.go +++ b/pkg/policy/opa/opa_engine.go @@ -14,7 +14,7 @@ limitations under the License. */ -package policy +package opa import ( "bytes" @@ -26,9 +26,10 @@ import ( "os" "path/filepath" "sort" - "strings" "text/template" + "github.com/accurics/terrascan/pkg/utils" + "github.com/open-policy-agent/opa/ast" "go.uber.org/zap" @@ -36,70 +37,99 @@ import ( "github.com/open-policy-agent/opa/rego" ) -type AccuricsRegoMetadata struct { - Name string `json:"ruleName"` - DisplayName string `json:"ruleDisplayName"` - Category string `json:"category"` - ImpactedRes []string `json:"impactedRes"` - PolicyRelevance string `json:"policyRelevance"` - Remediation string `json:"remediation"` - Row int `json:"row"` - Rule string `json:"rule"` +type Violation struct { + Name string + Description string + LineNumber int + Category string + Data interface{} + RuleData interface{} +} + +type ResultData struct { + EngineType string + Provider string + Violations []*Violation +} + +type RegoMetadata struct { + RuleName string `json:"ruleName"` + File string `json:"file"` RuleTemplate string `json:"ruleTemplate"` - RuleTemplateArgs map[string]interface{} `json:"ruleArgument"` - RuleReferenceID string `json:"ruleReferenceId"` + RuleTemplateArgs map[string]interface{} `json:"ruleTemplateArgs"` Severity string `json:"severity"` - Vulnerability string `json:"vulnerability"` + Description string `json:"description"` + RuleReferenceID string `json:"ruleReferenceId"` + Category string `json:"category"` + Version int `json:"version"` } type RegoData struct { - Name string `json:"ruleName"` - DisplayName string `json:"ruleDisplayName"` - Category string `json:"category"` - Remediation string `json:"remediation"` - Rule string `json:"rule"` - RuleTemplate string `json:"ruleTemplate"` - RuleTemplateArgs map[string]interface{} `json:"ruleArgument"` - RuleReferenceID string `json:"ruleReferenceId"` - Severity string `json:"severity"` - Vulnerability string `json:"vulnerability"` - RawRego *[]byte - PreparedQuery *rego.PreparedEvalQuery + Metadata RegoMetadata + RawRego []byte + PreparedQuery *rego.PreparedEvalQuery } -type ResultData struct { +type EngineStats struct { + ruleCount int + regoFileCount int + metadataFileCount int + metadataCount int } type OpaEngine struct { Context context.Context RegoFileMap map[string][]byte RegoDataMap map[string]*RegoData + stats EngineStats +} + +func (o *OpaEngine) LoadRegoMetadata(metaFilename string) (*RegoMetadata, error) { + // Load metadata file if it exists + metadata, err := ioutil.ReadFile(metaFilename) + if err != nil { + if !errors.Is(err, os.ErrNotExist) { + zap.S().Warn("failed to load rego metadata", zap.String("file", metaFilename)) + } + return nil, err + } + + // Read metadata into struct + regoMetadata := RegoMetadata{} + if err = json.Unmarshal(metadata, ®oMetadata); err != nil { + zap.S().Warn("failed to unmarshal rego metadata", zap.String("file", metaFilename)) + return nil, err + } + return ®oMetadata, err } -func filterFileListBySuffix(allFileList *[]string, filter string) *[]string { - fileList := make([]string, 0) +func (o *OpaEngine) loadRawRegoFilesIntoMap(currentDir string, regoDataList []*RegoData, regoFileMap *map[string][]byte) error { + for i := range regoDataList { + regoPath := filepath.Join(currentDir, regoDataList[i].Metadata.File) + rawRegoData, err := ioutil.ReadFile(regoPath) + if err != nil { + zap.S().Warn("failed to load rego file", zap.String("file", regoPath)) + continue + } - for i := range *allFileList { - if strings.HasSuffix((*allFileList)[i], filter) { - fileList = append(fileList, (*allFileList)[i]) + // Load the raw rego into the map + _, ok := (*regoFileMap)[regoPath] + if ok { + // Already loaded this file, so continue + continue } + + (*regoFileMap)[regoPath] = rawRegoData } - return &fileList + return nil } func (o *OpaEngine) LoadRegoFiles(policyPath string) error { - ruleCount := 0 - regoFileCount := 0 - metadataCount := 0 - // Walk the file path and find all directories - dirList := make([]string, 0) - err := filepath.Walk(policyPath, func(filePath string, fileInfo os.FileInfo, err error) error { - if fileInfo != nil && fileInfo.IsDir() { - dirList = append(dirList, filePath) - } + dirList, err := utils.FindAllDirectories(policyPath) + if err != nil { return err - }) + } if len(dirList) == 0 { return fmt.Errorf("no directories found for path %s", policyPath) @@ -109,80 +139,63 @@ func (o *OpaEngine) LoadRegoFiles(policyPath string) error { o.RegoDataMap = make(map[string]*RegoData) // Load rego data files from each dir + // First, we read the metadata file, which contains info about the associated rego rule. The .rego file data is + // stored in a map in its raw format. sort.Strings(dirList) for i := range dirList { - metaFilename := filepath.Join(dirList[i], RegoMetadataFile) - var metadata []byte - metadata, err = ioutil.ReadFile(metaFilename) + // Find all files in the current dir + fileInfo, err := ioutil.ReadDir(dirList[i]) if err != nil { if !errors.Is(err, os.ErrNotExist) { - zap.S().Warn("failed to load rego metadata", zap.String("file", metaFilename)) + zap.S().Error("error while searching for files", zap.String("dir", dirList[i])) } continue } - // Read metadata into struct - regoMetadata := make([]*RegoData, 0) - if err = json.Unmarshal(metadata, ®oMetadata); err != nil { - zap.S().Warn("failed to unmarshal rego metadata", zap.String("file", metaFilename)) - continue - } - - metadataCount++ - - // Find all .rego files within the directory - fileInfo, err := ioutil.ReadDir(dirList[i]) - if err != nil { - zap.S().Error("error while finding rego files", zap.String("dir", dirList[i])) - continue + // Load the rego metadata first (*.json) + metadataFiles := utils.FilterFileInfoBySuffix(&fileInfo, RegoMetadataFileSuffix) + if metadataFiles == nil { + return fmt.Errorf("no metadata files were found") } - files := make([]string, 0) - for j := range fileInfo { - files = append(files, fileInfo[j].Name()) - } + var regoDataList []*RegoData + for j := range *metadataFiles { + filePath := filepath.Join(dirList[i], (*metadataFiles)[j]) - // Load rego data for all rego files - regoFileList := filterFileListBySuffix(&files, RegoFileSuffix) - regoFileCount += len(*regoFileList) - for j := range *regoFileList { - regoFilename := (*regoFileList)[j] - regoFullPath := filepath.Join(dirList[i], regoFilename) - var rawRegoData []byte - rawRegoData, err = ioutil.ReadFile(regoFullPath) + var regoMetadata *RegoMetadata + regoMetadata, err = o.LoadRegoMetadata(filePath) if err != nil { - zap.S().Warn("failed to load rego file", zap.String("file", regoFilename)) continue } - _, ok := o.RegoFileMap[regoFullPath] - if ok { - // Already loaded this file, so continue - continue + regoData := RegoData{ + Metadata: *regoMetadata, } - // Set raw rego data - o.RegoFileMap[regoFullPath] = rawRegoData + regoDataList = append(regoDataList, ®oData) + o.stats.metadataFileCount++ } - for j := range regoMetadata { - //key := filepath.Join(dirList[i], regoMetadata[j].Rule) - //regoData := o.RegoFileMap[key] - metadataCount++ + // Read in raw rego data from associated rego files + if err = o.loadRawRegoFilesIntoMap(dirList[i], regoDataList, &o.RegoFileMap); err != nil { + continue + } + + for j := range regoDataList { + o.stats.metadataCount++ // Apply templates if available - var buf bytes.Buffer + var templateData bytes.Buffer t := template.New("opa") - t.Parse(string(o.RegoFileMap[filepath.Join(dirList[i], regoMetadata[j].RuleTemplate+".rego")])) - t.Execute(&buf, regoMetadata[j].RuleTemplateArgs) + t.Parse(string(o.RegoFileMap[filepath.Join(dirList[i], regoDataList[j].Metadata.RuleTemplate+".rego")])) + t.Execute(&templateData, regoDataList[j].Metadata.RuleTemplateArgs) - templateData := buf.Bytes() - regoMetadata[j].RawRego = &templateData - o.RegoDataMap[regoMetadata[j].Name] = regoMetadata[j] + regoDataList[j].RawRego = templateData.Bytes() + o.RegoDataMap[regoDataList[j].Metadata.RuleName] = regoDataList[j] } } - ruleCount = len(o.RegoDataMap) - zap.S().Infof("Loaded %d Rego rules from %d rego files (%d metadata files).", ruleCount, regoFileCount, metadataCount) + o.stats.ruleCount = len(o.RegoDataMap) + zap.S().Infof("Loaded %d Rego rules from %d rego files (%d metadata files).", o.stats.ruleCount, o.stats.regoFileCount, o.stats.metadataCount) return err } @@ -190,11 +203,11 @@ func (o *OpaEngine) LoadRegoFiles(policyPath string) error { func (o *OpaEngine) CompileRegoFiles() error { for k := range o.RegoDataMap { compiler, err := ast.CompileModules(map[string]string{ - o.RegoDataMap[k].Rule: string(*(o.RegoDataMap[k].RawRego)), + o.RegoDataMap[k].Metadata.RuleName: string(o.RegoDataMap[k].RawRego), }) r := rego.New( - rego.Query(RuleQueryBase+"."+o.RegoDataMap[k].Name), + rego.Query(RuleQueryBase+"."+o.RegoDataMap[k].Metadata.RuleName), rego.Compiler(compiler), ) @@ -261,15 +274,20 @@ func (o *OpaEngine) Evaluate(inputData *interface{}) error { if len(rs) > 0 { results := rs[0].Expressions[0].Value.([]interface{}) if len(results) > 0 { - r := o.RegoDataMap[k] - fmt.Printf("\n[%s] [%s] %s\n %s\n", r.Severity, r.RuleReferenceID, r.DisplayName, r.Vulnerability) + r := o.RegoDataMap[k].Metadata + fmt.Printf("\nResource(s): %v\n[%s] [%s] %s\n %s\n", results, r.Severity, r.RuleReferenceID, r.RuleName, r.Description) + continue } // fmt.Printf(" [%s] %v\n", k, results) } else { // fmt.Printf("No Result [%s] \n", k) } + // Store results } + b, _ := json.MarshalIndent(inputData, "", " ") + //fmt.Printf("InputData:\n%v\n", string(b)) + return nil } diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index f98a5c136..253256782 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -21,26 +21,30 @@ import ( iacProvider "github.com/accurics/terrascan/pkg/iac-providers" "github.com/accurics/terrascan/pkg/notifications" - policy "github.com/accurics/terrascan/pkg/policy/opa" + "github.com/accurics/terrascan/pkg/policy" + opa "github.com/accurics/terrascan/pkg/policy/opa" ) // Executor object type Executor struct { - filePath string - dirPath string - cloudType string - iacType string - iacVersion string - configFile string - iacProvider iacProvider.IacProvider - notifiers []notifications.Notifier + filePath string + dirPath string + policyPath string + cloudType string + iacType string + iacVersion string + configFile string + iacProvider iacProvider.IacProvider + notifiers []notifications.Notifier + policyEngine []policy.Engine } // NewExecutor creates a runtime object -func NewExecutor(iacType, iacVersion, cloudType, filePath, dirPath, configFile string) (e *Executor, err error) { +func NewExecutor(iacType, iacVersion, cloudType, filePath, dirPath, configFile, policyPath string) (e *Executor, err error) { e = &Executor{ filePath: filePath, dirPath: dirPath, + policyPath: policyPath, cloudType: cloudType, iacType: iacType, iacVersion: iacVersion, @@ -95,11 +99,12 @@ func (e *Executor) Execute() (normalized interface{}, err error) { return normalized, err } - // Create a new policy engine based on IaC type + // create a new policy engine based on IaC type if e.iacType == "terraform" { - engine := policy.OpaEngine{} + var engine policy.Engine + engine = &opa.OpaEngine{} - err := engine.Initialize("/Users/wsana/go/src/accurics/terrascan/pkg/policies/accurics/v1/opa") + err = engine.Initialize(e.policyPath) if err != nil { return err } diff --git a/pkg/utils/path.go b/pkg/utils/path.go index 4fcfe66ee..6e38345b4 100644 --- a/pkg/utils/path.go +++ b/pkg/utils/path.go @@ -40,3 +40,27 @@ func GetAbsPath(path string) (string, error) { path, _ = filepath.Abs(path) return path, nil } + +// FindAllDirectories Walks the file path and returns a list of all directories within +func FindAllDirectories(basePath string) ([]string, error) { + dirList := make([]string, 0) + err := filepath.Walk(basePath, func(filePath string, fileInfo os.FileInfo, err error) error { + if fileInfo != nil && fileInfo.IsDir() { + dirList = append(dirList, filePath) + } + return err + }) + return dirList, err +} + +// FilterFileInfoBySuffix Given a list of files, returns a subset of files containing a suffix which matches the input filter +func FilterFileInfoBySuffix(allFileList *[]os.FileInfo, filter string) *[]string { + fileList := make([]string, 0) + + for i := range *allFileList { + if strings.HasSuffix((*allFileList)[i].Name(), filter) { + fileList = append(fileList, (*allFileList)[i].Name()) + } + } + return &fileList +} From 45a711906c368abb7c9af90f26cf8b9baaf02b3c Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Sun, 9 Aug 2020 13:01:36 -0700 Subject: [PATCH 110/188] Change OPA engine filename --- pkg/policy/opa/engine.go | 303 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 303 insertions(+) create mode 100644 pkg/policy/opa/engine.go diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go new file mode 100644 index 000000000..c443029f0 --- /dev/null +++ b/pkg/policy/opa/engine.go @@ -0,0 +1,303 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package opa + +import ( + "bytes" + "context" + "encoding/json" + "errors" + "fmt" + "io/ioutil" + "os" + "path/filepath" + "sort" + "text/template" + + "github.com/accurics/terrascan/pkg/utils" + + "github.com/open-policy-agent/opa/ast" + + "go.uber.org/zap" + + "github.com/open-policy-agent/opa/rego" +) + +type Violation struct { + Name string + Description string + LineNumber int + Category string + Data interface{} + RuleData interface{} +} + +type ResultData struct { + EngineType string + Provider string + Violations []*Violation +} + +type RegoMetadata struct { + RuleName string `json:"ruleName"` + File string `json:"file"` + RuleTemplate string `json:"ruleTemplate"` + RuleTemplateArgs map[string]interface{} `json:"ruleTemplateArgs"` + Severity string `json:"severity"` + Description string `json:"description"` + RuleReferenceID string `json:"ruleReferenceId"` + Category string `json:"category"` + Version int `json:"version"` +} + +type RegoData struct { + Metadata RegoMetadata + RawRego []byte + PreparedQuery *rego.PreparedEvalQuery +} + +type EngineStats struct { + ruleCount int + regoFileCount int + metadataFileCount int + metadataCount int +} + +type Engine struct { + Context context.Context + RegoFileMap map[string][]byte + RegoDataMap map[string]*RegoData + stats EngineStats +} + +func (e *Engine) LoadRegoMetadata(metaFilename string) (*RegoMetadata, error) { + // Load metadata file if it exists + metadata, err := ioutil.ReadFile(metaFilename) + if err != nil { + if !errors.Is(err, os.ErrNotExist) { + zap.S().Warn("failed to load rego metadata", zap.String("file", metaFilename)) + } + return nil, err + } + + // Read metadata into struct + regoMetadata := RegoMetadata{} + if err = json.Unmarshal(metadata, ®oMetadata); err != nil { + zap.S().Warn("failed to unmarshal rego metadata", zap.String("file", metaFilename)) + return nil, err + } + return ®oMetadata, err +} + +func (e *Engine) loadRawRegoFilesIntoMap(currentDir string, regoDataList []*RegoData, regoFileMap *map[string][]byte) error { + for i := range regoDataList { + regoPath := filepath.Join(currentDir, regoDataList[i].Metadata.File) + rawRegoData, err := ioutil.ReadFile(regoPath) + if err != nil { + zap.S().Warn("failed to load rego file", zap.String("file", regoPath)) + continue + } + + // Load the raw rego into the map + _, ok := (*regoFileMap)[regoPath] + if ok { + // Already loaded this file, so continue + continue + } + + (*regoFileMap)[regoPath] = rawRegoData + } + return nil +} + +func (e *Engine) LoadRegoFiles(policyPath string) error { + // Walk the file path and find all directories + dirList, err := utils.FindAllDirectories(policyPath) + if err != nil { + return err + } + + if len(dirList) == 0 { + return fmt.Errorf("no directories found for path %s", policyPath) + } + + e.RegoFileMap = make(map[string][]byte) + e.RegoDataMap = make(map[string]*RegoData) + + // Load rego data files from each dir + // First, we read the metadata file, which contains info about the associated rego rule. The .rego file data is + // stored in a map in its raw format. + sort.Strings(dirList) + for i := range dirList { + // Find all files in the current dir + fileInfo, err := ioutil.ReadDir(dirList[i]) + if err != nil { + if !errors.Is(err, os.ErrNotExist) { + zap.S().Error("error while searching for files", zap.String("dir", dirList[i])) + } + continue + } + + // Load the rego metadata first (*.json) + metadataFiles := utils.FilterFileInfoBySuffix(&fileInfo, RegoMetadataFileSuffix) + if metadataFiles == nil { + return fmt.Errorf("no metadata files were found") + } + + var regoDataList []*RegoData + for j := range *metadataFiles { + filePath := filepath.Join(dirList[i], (*metadataFiles)[j]) + + var regoMetadata *RegoMetadata + regoMetadata, err = e.LoadRegoMetadata(filePath) + if err != nil { + continue + } + + regoData := RegoData{ + Metadata: *regoMetadata, + } + + regoDataList = append(regoDataList, ®oData) + e.stats.metadataFileCount++ + } + + // Read in raw rego data from associated rego files + if err = e.loadRawRegoFilesIntoMap(dirList[i], regoDataList, &e.RegoFileMap); err != nil { + continue + } + + for j := range regoDataList { + e.stats.metadataCount++ + // Apply templates if available + var templateData bytes.Buffer + t := template.New("opa") + _, err = t.Parse(string(e.RegoFileMap[filepath.Join(dirList[i], regoDataList[j].Metadata.RuleTemplate+".rego")])) + if err != nil { + zap.S().Warn("unable to parse template", zap.String("template", regoDataList[j].Metadata.RuleTemplate)) + continue + } + if err = t.Execute(&templateData, regoDataList[j].Metadata.RuleTemplateArgs); err != nil { + zap.S().Warn("unable to execute template", zap.String("template", regoDataList[j].Metadata.RuleTemplate)) + continue + } + + regoDataList[j].RawRego = templateData.Bytes() + e.RegoDataMap[regoDataList[j].Metadata.RuleName] = regoDataList[j] + } + } + + e.stats.ruleCount = len(e.RegoDataMap) + zap.S().Infof("loaded %d Rego rules from %d rego files (%d metadata files).", e.stats.ruleCount, e.stats.regoFileCount, e.stats.metadataCount) + + return err +} + +func (e *Engine) CompileRegoFiles() error { + for k := range e.RegoDataMap { + compiler, err := ast.CompileModules(map[string]string{ + e.RegoDataMap[k].Metadata.RuleName: string(e.RegoDataMap[k].RawRego), + }) + + r := rego.New( + rego.Query(RuleQueryBase+"."+e.RegoDataMap[k].Metadata.RuleName), + rego.Compiler(compiler), + ) + + // Create a prepared query that can be evaluated. + query, err := r.PrepareForEval(e.Context) + if err != nil { + return err + } + + e.RegoDataMap[k].PreparedQuery = &query + } + + return nil +} + +// Initialize Initializes the Opa engine +// Handles loading all rules, filtering, compiling, and preparing for evaluation +func (e *Engine) Initialize(policyPath string) error { + e.Context = context.Background() + + if err := e.LoadRegoFiles(policyPath); err != nil { + return err + } + + err := e.CompileRegoFiles() + if err != nil { + return err + } + + return nil +} + +func (e *Engine) Configure() error { + return nil +} + +func (e *Engine) GetResults() error { + return nil +} + +func (e *Engine) Release() error { + return nil +} + +func (e *Engine) Evaluate(inputData *interface{}) error { + + sortedKeys := make([]string, len(e.RegoDataMap)) + x := 0 + for k := range e.RegoDataMap { + sortedKeys[x] = k + x++ + } + sort.Strings(sortedKeys) + + for _, k := range sortedKeys { + // Execute the prepared query. + rs, err := e.RegoDataMap[k].PreparedQuery.Eval(e.Context, rego.EvalInput(inputData)) + // rs, err := r.Eval(o.Context) + if err != nil { + zap.S().Warn("failed to run prepared query", zap.String("rule", "'"+k+"'"), zap.Any("input", inputData)) + continue + } + + if len(rs) > 0 { + results := rs[0].Expressions[0].Value.([]interface{}) + if len(results) > 0 { + r := e.RegoDataMap[k].Metadata + fmt.Printf("\nResource(s): %v\n[%s] [%s] %s\n %s\n", results, r.Severity, r.RuleReferenceID, r.RuleName, r.Description) + continue + } + // fmt.Printf(" [%s] %v\n", k, results) + } else { + // fmt.Printf("No Result [%s] \n", k) + } + + // Store results + } + + _, err := json.MarshalIndent(inputData, "", " ") + if err != nil { + return err + } + //fmt.Printf("InputData:\n%v\n", string(b)) + + return nil +} From 6bb3463dfb83056d12a3fe5502f8c0dae4e73036 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Sun, 9 Aug 2020 13:03:04 -0700 Subject: [PATCH 111/188] Rename OPA engine #2 --- pkg/policy/opa/opa_engine.go | 293 ----------------------------------- pkg/runtime/executor.go | 2 +- 2 files changed, 1 insertion(+), 294 deletions(-) delete mode 100644 pkg/policy/opa/opa_engine.go diff --git a/pkg/policy/opa/opa_engine.go b/pkg/policy/opa/opa_engine.go deleted file mode 100644 index bea7c9725..000000000 --- a/pkg/policy/opa/opa_engine.go +++ /dev/null @@ -1,293 +0,0 @@ -/* - Copyright (C) 2020 Accurics, Inc. - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. -*/ - -package opa - -import ( - "bytes" - "context" - "encoding/json" - "errors" - "fmt" - "io/ioutil" - "os" - "path/filepath" - "sort" - "text/template" - - "github.com/accurics/terrascan/pkg/utils" - - "github.com/open-policy-agent/opa/ast" - - "go.uber.org/zap" - - "github.com/open-policy-agent/opa/rego" -) - -type Violation struct { - Name string - Description string - LineNumber int - Category string - Data interface{} - RuleData interface{} -} - -type ResultData struct { - EngineType string - Provider string - Violations []*Violation -} - -type RegoMetadata struct { - RuleName string `json:"ruleName"` - File string `json:"file"` - RuleTemplate string `json:"ruleTemplate"` - RuleTemplateArgs map[string]interface{} `json:"ruleTemplateArgs"` - Severity string `json:"severity"` - Description string `json:"description"` - RuleReferenceID string `json:"ruleReferenceId"` - Category string `json:"category"` - Version int `json:"version"` -} - -type RegoData struct { - Metadata RegoMetadata - RawRego []byte - PreparedQuery *rego.PreparedEvalQuery -} - -type EngineStats struct { - ruleCount int - regoFileCount int - metadataFileCount int - metadataCount int -} - -type OpaEngine struct { - Context context.Context - RegoFileMap map[string][]byte - RegoDataMap map[string]*RegoData - stats EngineStats -} - -func (o *OpaEngine) LoadRegoMetadata(metaFilename string) (*RegoMetadata, error) { - // Load metadata file if it exists - metadata, err := ioutil.ReadFile(metaFilename) - if err != nil { - if !errors.Is(err, os.ErrNotExist) { - zap.S().Warn("failed to load rego metadata", zap.String("file", metaFilename)) - } - return nil, err - } - - // Read metadata into struct - regoMetadata := RegoMetadata{} - if err = json.Unmarshal(metadata, ®oMetadata); err != nil { - zap.S().Warn("failed to unmarshal rego metadata", zap.String("file", metaFilename)) - return nil, err - } - return ®oMetadata, err -} - -func (o *OpaEngine) loadRawRegoFilesIntoMap(currentDir string, regoDataList []*RegoData, regoFileMap *map[string][]byte) error { - for i := range regoDataList { - regoPath := filepath.Join(currentDir, regoDataList[i].Metadata.File) - rawRegoData, err := ioutil.ReadFile(regoPath) - if err != nil { - zap.S().Warn("failed to load rego file", zap.String("file", regoPath)) - continue - } - - // Load the raw rego into the map - _, ok := (*regoFileMap)[regoPath] - if ok { - // Already loaded this file, so continue - continue - } - - (*regoFileMap)[regoPath] = rawRegoData - } - return nil -} - -func (o *OpaEngine) LoadRegoFiles(policyPath string) error { - // Walk the file path and find all directories - dirList, err := utils.FindAllDirectories(policyPath) - if err != nil { - return err - } - - if len(dirList) == 0 { - return fmt.Errorf("no directories found for path %s", policyPath) - } - - o.RegoFileMap = make(map[string][]byte) - o.RegoDataMap = make(map[string]*RegoData) - - // Load rego data files from each dir - // First, we read the metadata file, which contains info about the associated rego rule. The .rego file data is - // stored in a map in its raw format. - sort.Strings(dirList) - for i := range dirList { - // Find all files in the current dir - fileInfo, err := ioutil.ReadDir(dirList[i]) - if err != nil { - if !errors.Is(err, os.ErrNotExist) { - zap.S().Error("error while searching for files", zap.String("dir", dirList[i])) - } - continue - } - - // Load the rego metadata first (*.json) - metadataFiles := utils.FilterFileInfoBySuffix(&fileInfo, RegoMetadataFileSuffix) - if metadataFiles == nil { - return fmt.Errorf("no metadata files were found") - } - - var regoDataList []*RegoData - for j := range *metadataFiles { - filePath := filepath.Join(dirList[i], (*metadataFiles)[j]) - - var regoMetadata *RegoMetadata - regoMetadata, err = o.LoadRegoMetadata(filePath) - if err != nil { - continue - } - - regoData := RegoData{ - Metadata: *regoMetadata, - } - - regoDataList = append(regoDataList, ®oData) - o.stats.metadataFileCount++ - } - - // Read in raw rego data from associated rego files - if err = o.loadRawRegoFilesIntoMap(dirList[i], regoDataList, &o.RegoFileMap); err != nil { - continue - } - - for j := range regoDataList { - o.stats.metadataCount++ - // Apply templates if available - var templateData bytes.Buffer - t := template.New("opa") - t.Parse(string(o.RegoFileMap[filepath.Join(dirList[i], regoDataList[j].Metadata.RuleTemplate+".rego")])) - t.Execute(&templateData, regoDataList[j].Metadata.RuleTemplateArgs) - - regoDataList[j].RawRego = templateData.Bytes() - o.RegoDataMap[regoDataList[j].Metadata.RuleName] = regoDataList[j] - } - } - - o.stats.ruleCount = len(o.RegoDataMap) - zap.S().Infof("Loaded %d Rego rules from %d rego files (%d metadata files).", o.stats.ruleCount, o.stats.regoFileCount, o.stats.metadataCount) - - return err -} - -func (o *OpaEngine) CompileRegoFiles() error { - for k := range o.RegoDataMap { - compiler, err := ast.CompileModules(map[string]string{ - o.RegoDataMap[k].Metadata.RuleName: string(o.RegoDataMap[k].RawRego), - }) - - r := rego.New( - rego.Query(RuleQueryBase+"."+o.RegoDataMap[k].Metadata.RuleName), - rego.Compiler(compiler), - ) - - // Create a prepared query that can be evaluated. - query, err := r.PrepareForEval(o.Context) - if err != nil { - return err - } - - o.RegoDataMap[k].PreparedQuery = &query - } - - return nil -} - -// Initialize Initializes the Opa engine -// Handles loading all rules, filtering, compiling, and preparing for evaluation -func (o *OpaEngine) Initialize(policyPath string) error { - o.Context = context.Background() - - if err := o.LoadRegoFiles(policyPath); err != nil { - return err - } - - err := o.CompileRegoFiles() - if err != nil { - return err - } - - return nil -} - -func (o *OpaEngine) Configure() error { - return nil -} - -func (o *OpaEngine) GetResults() error { - return nil -} - -func (o *OpaEngine) Release() error { - return nil -} - -func (o *OpaEngine) Evaluate(inputData *interface{}) error { - - sortedKeys := make([]string, len(o.RegoDataMap)) - x := 0 - for k := range o.RegoDataMap { - sortedKeys[x] = k - x++ - } - sort.Strings(sortedKeys) - - for _, k := range sortedKeys { - // Execute the prepared query. - rs, err := o.RegoDataMap[k].PreparedQuery.Eval(o.Context, rego.EvalInput(inputData)) - // rs, err := r.Eval(o.Context) - if err != nil { - zap.S().Warn("failed to run prepared query", zap.String("rule", "'"+k+"'"), zap.Any("input", inputData)) - continue - } - - if len(rs) > 0 { - results := rs[0].Expressions[0].Value.([]interface{}) - if len(results) > 0 { - r := o.RegoDataMap[k].Metadata - fmt.Printf("\nResource(s): %v\n[%s] [%s] %s\n %s\n", results, r.Severity, r.RuleReferenceID, r.RuleName, r.Description) - continue - } - // fmt.Printf(" [%s] %v\n", k, results) - } else { - // fmt.Printf("No Result [%s] \n", k) - } - - // Store results - } - - b, _ := json.MarshalIndent(inputData, "", " ") - //fmt.Printf("InputData:\n%v\n", string(b)) - - return nil -} diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 253256782..7e3bc43ff 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -102,7 +102,7 @@ func (e *Executor) Execute() (normalized interface{}, err error) { // create a new policy engine based on IaC type if e.iacType == "terraform" { var engine policy.Engine - engine = &opa.OpaEngine{} + engine = &opa.Engine{} err = engine.Initialize(e.policyPath) if err != nil { From 647282db678cfa9146cacd670c0edd84a87c25e0 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Sun, 9 Aug 2020 13:59:31 -0700 Subject: [PATCH 112/188] Fix misc linter errors --- pkg/policy/interface.go | 2 ++ pkg/policy/opa/constants.go | 5 +++-- pkg/policy/opa/engine.go | 17 ++++++++++++++++- 3 files changed, 21 insertions(+), 3 deletions(-) diff --git a/pkg/policy/interface.go b/pkg/policy/interface.go index 3939ee5ac..084f5599c 100644 --- a/pkg/policy/interface.go +++ b/pkg/policy/interface.go @@ -16,12 +16,14 @@ package policy +// Manager Policy Manager interface type Manager interface { Import() error Export() error CreateManager() error } +// Engine Policy Engine interface type Engine interface { Initialize(policyPath string) error Configure() error diff --git a/pkg/policy/opa/constants.go b/pkg/policy/opa/constants.go index cdbc959f4..c00b15022 100644 --- a/pkg/policy/opa/constants.go +++ b/pkg/policy/opa/constants.go @@ -1,7 +1,8 @@ package opa const ( + // RegoMetadataFileSuffix Suffix for files containing rego metadata RegoMetadataFileSuffix = ".json" - RegoFileSuffix = ".rego" - RuleQueryBase = "data.accurics" + // RuleQueryBase Default package to query + RuleQueryBase = "data.accurics" ) diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index c443029f0..ec755f60d 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -37,6 +37,7 @@ import ( "github.com/open-policy-agent/opa/rego" ) +// Violation Contains data for each violation type Violation struct { Name string Description string @@ -46,12 +47,14 @@ type Violation struct { RuleData interface{} } +// ResultData Contains full report data type ResultData struct { EngineType string Provider string Violations []*Violation } +// RegoMetadata The rego metadata struct which is read and saved from disk type RegoMetadata struct { RuleName string `json:"ruleName"` File string `json:"file"` @@ -64,12 +67,14 @@ type RegoMetadata struct { Version int `json:"version"` } +// RegoData Stores all information needed to evaluate and report on a rego rule type RegoData struct { Metadata RegoMetadata RawRego []byte PreparedQuery *rego.PreparedEvalQuery } +// EngineStats Contains misc stats type EngineStats struct { ruleCount int regoFileCount int @@ -77,6 +82,7 @@ type EngineStats struct { metadataCount int } +// Engine Implements the policy engine interface type Engine struct { Context context.Context RegoFileMap map[string][]byte @@ -84,6 +90,7 @@ type Engine struct { stats EngineStats } +// LoadRegoMetadata Loads rego metadata from a given file func (e *Engine) LoadRegoMetadata(metaFilename string) (*RegoMetadata, error) { // Load metadata file if it exists metadata, err := ioutil.ReadFile(metaFilename) @@ -103,6 +110,7 @@ func (e *Engine) LoadRegoMetadata(metaFilename string) (*RegoMetadata, error) { return ®oMetadata, err } +// loadRawRegoFilesIntoMap imports raw rego files into a map func (e *Engine) loadRawRegoFilesIntoMap(currentDir string, regoDataList []*RegoData, regoFileMap *map[string][]byte) error { for i := range regoDataList { regoPath := filepath.Join(currentDir, regoDataList[i].Metadata.File) @@ -124,6 +132,7 @@ func (e *Engine) loadRawRegoFilesIntoMap(currentDir string, regoDataList []*Rego return nil } +// LoadRegoFiles Loads all related rego files from the given policy path into memory func (e *Engine) LoadRegoFiles(policyPath string) error { // Walk the file path and find all directories dirList, err := utils.FindAllDirectories(policyPath) @@ -144,7 +153,8 @@ func (e *Engine) LoadRegoFiles(policyPath string) error { sort.Strings(dirList) for i := range dirList { // Find all files in the current dir - fileInfo, err := ioutil.ReadDir(dirList[i]) + var fileInfo []os.FileInfo + fileInfo, err = ioutil.ReadDir(dirList[i]) if err != nil { if !errors.Is(err, os.ErrNotExist) { zap.S().Error("error while searching for files", zap.String("dir", dirList[i])) @@ -207,6 +217,7 @@ func (e *Engine) LoadRegoFiles(policyPath string) error { return err } +// CompileRegoFiles Compiles rego files for faster evaluation func (e *Engine) CompileRegoFiles() error { for k := range e.RegoDataMap { compiler, err := ast.CompileModules(map[string]string{ @@ -247,18 +258,22 @@ func (e *Engine) Initialize(policyPath string) error { return nil } +// Configure Configures the OPA engine func (e *Engine) Configure() error { return nil } +// GetResults Fetches results from OPA engine policy evaluation func (e *Engine) GetResults() error { return nil } +// Release Performs any tasks required to free resources func (e *Engine) Release() error { return nil } +// Evaluate Executes compiled OPA queries against the input JSON data func (e *Engine) Evaluate(inputData *interface{}) error { sortedKeys := make([]string, len(e.RegoDataMap)) From c6a567492d8d60c97074d229fcebfa677b0178b5 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Sun, 9 Aug 2020 14:46:56 -0700 Subject: [PATCH 113/188] Fix FileInfo-related linter errors --- pkg/data/file/importer.go | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pkg/data/file/importer.go b/pkg/data/file/importer.go index 1cc036441..fc3b808b7 100644 --- a/pkg/data/file/importer.go +++ b/pkg/data/file/importer.go @@ -1,6 +1,7 @@ package file -type FileInfo struct { +// Info File info +type Info struct { Path string Hash string HashType string @@ -12,8 +13,8 @@ type Group struct { Name string IsReadOnly bool VerifySignatures bool - Directories []*FileInfo - Files []*FileInfo + Directories []*Info + Files []*Info } // Metadata File metadata From ed55d99f2c6cb043466c496bfe79443a3480af74 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Sun, 9 Aug 2020 14:58:30 -0700 Subject: [PATCH 114/188] Fixed static checker issues --- pkg/policy/opa/engine.go | 6 +++--- pkg/runtime/executor.go | 22 ++++++++++------------ 2 files changed, 13 insertions(+), 15 deletions(-) diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index ec755f60d..c3717d2de 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -223,6 +223,9 @@ func (e *Engine) CompileRegoFiles() error { compiler, err := ast.CompileModules(map[string]string{ e.RegoDataMap[k].Metadata.RuleName: string(e.RegoDataMap[k].RawRego), }) + if err != nil { + return err + } r := rego.New( rego.Query(RuleQueryBase+"."+e.RegoDataMap[k].Metadata.RuleName), @@ -300,9 +303,6 @@ func (e *Engine) Evaluate(inputData *interface{}) error { fmt.Printf("\nResource(s): %v\n[%s] [%s] %s\n %s\n", results, r.Severity, r.RuleReferenceID, r.RuleName, r.Description) continue } - // fmt.Printf(" [%s] %v\n", k, results) - } else { - // fmt.Printf("No Result [%s] \n", k) } // Store results diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 7e3bc43ff..c14d92ae8 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -27,16 +27,15 @@ import ( // Executor object type Executor struct { - filePath string - dirPath string - policyPath string - cloudType string - iacType string - iacVersion string - configFile string - iacProvider iacProvider.IacProvider - notifiers []notifications.Notifier - policyEngine []policy.Engine + filePath string + dirPath string + policyPath string + cloudType string + iacType string + iacVersion string + configFile string + iacProvider iacProvider.IacProvider + notifiers []notifications.Notifier } // NewExecutor creates a runtime object @@ -101,8 +100,7 @@ func (e *Executor) Execute() (normalized interface{}, err error) { // create a new policy engine based on IaC type if e.iacType == "terraform" { - var engine policy.Engine - engine = &opa.Engine{} + var engine policy.Engine = &opa.Engine{} err = engine.Initialize(e.policyPath) if err != nil { From 0a087d2ac09b19668af112a6e80e00d884dffc79 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Mon, 10 Aug 2020 12:30:20 +0530 Subject: [PATCH 115/188] rebasing changes --- pkg/cli/run.go | 10 ++++++---- pkg/http-server/file-scan.go | 2 +- pkg/notifications/notifiers.go | 2 +- pkg/runtime/executor.go | 2 +- 4 files changed, 9 insertions(+), 7 deletions(-) diff --git a/pkg/cli/run.go b/pkg/cli/run.go index 6d22beae1..280460d59 100644 --- a/pkg/cli/run.go +++ b/pkg/cli/run.go @@ -17,10 +17,10 @@ package cli import ( - // "os" + "os" "github.com/accurics/terrascan/pkg/runtime" - // "github.com/accurics/terrascan/pkg/utils" + "github.com/accurics/terrascan/pkg/utils" ) // Run executes terrascan in CLI mode @@ -32,9 +32,11 @@ func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath, configFile, po if err != nil { return } - _, err = executor.Execute() + + // executor output + violations, err := executor.Execute() if err != nil { return } - // utils.PrintJSON(normalized, os.Stdout) + utils.PrintJSON(violations, os.Stdout) } diff --git a/pkg/http-server/file-scan.go b/pkg/http-server/file-scan.go index 5a60498f3..abe660010 100644 --- a/pkg/http-server/file-scan.go +++ b/pkg/http-server/file-scan.go @@ -83,7 +83,7 @@ func (g *APIHandler) scanFile(w http.ResponseWriter, r *http.Request) { // create a new runtime executor for scanning the uploaded file executor, err := runtime.NewExecutor(iacType, iacVersion, cloudType, - tempFile.Name(), "", "") + tempFile.Name(), "", "", "") if err != nil { zap.S().Error(err) apiErrorResponse(w, err.Error(), http.StatusBadRequest) diff --git a/pkg/notifications/notifiers.go b/pkg/notifications/notifiers.go index add3b8e1f..a4760c982 100644 --- a/pkg/notifications/notifiers.go +++ b/pkg/notifications/notifiers.go @@ -58,7 +58,7 @@ func NewNotifiers(configFile string) ([]Notifier, error) { // empty config file path if configFile == "" { - zap.S().Infof("no config file specified") + zap.S().Debug("no config file specified") return notifiers, nil } diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index c14d92ae8..0db2a17f1 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -104,7 +104,7 @@ func (e *Executor) Execute() (normalized interface{}, err error) { err = engine.Initialize(e.policyPath) if err != nil { - return err + return normalized, err } engine.Evaluate(&normalized) From 900a5d689c95dfc2c9648ef9126822deeee11ad5 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Mon, 10 Aug 2020 03:49:53 -0700 Subject: [PATCH 116/188] fixes merge issues - moves some types into a separate file - addresses some linter/staticcheck issues - fixes bad counter --- go.mod | 3 ++ go.sum | 23 +++++++++++++++ pkg/policy/opa/engine.go | 56 ++---------------------------------- pkg/policy/opa/types.go | 62 ++++++++++++++++++++++++++++++++++++++++ pkg/results/interface.go | 4 +-- pkg/runtime/executor.go | 12 -------- 6 files changed, 92 insertions(+), 68 deletions(-) create mode 100644 pkg/policy/opa/types.go diff --git a/go.mod b/go.mod index bbbd08617..459e68777 100644 --- a/go.mod +++ b/go.mod @@ -15,4 +15,7 @@ require ( github.com/zclconf/go-cty v1.2.1 go.uber.org/zap v1.9.1 golang.org/x/net v0.0.0-20200625001655-4c5254603344 // indirect + golang.org/x/tools v0.0.0-20200809012840-6f4f008689da // indirect + golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect + honnef.co/go/tools v0.0.1-2020.1.5 // indirect ) diff --git a/go.sum b/go.sum index add5200c2..d576b76a8 100644 --- a/go.sum +++ b/go.sum @@ -127,6 +127,7 @@ github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc= +github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI= github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= @@ -296,6 +297,7 @@ github.com/prometheus/procfs v0.0.0-20190117184657-bf6a532e95b1/go.mod h1:c3At6R github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a h1:9ZKAASQSHhDYGoxY8uLVpewe1GDZ2vu2Tr/vTdVAkFQ= github.com/rcrowley/go-metrics v0.0.0-20181016184325-3113b8401b8a/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= +github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= @@ -334,6 +336,7 @@ github.com/xiang90/probing v0.0.0-20160813154853-07dd2e8dfe18/go.mod h1:UETIi67q github.com/xlab/treeprint v0.0.0-20161029104018-1d6e34225557/go.mod h1:ce1O1j6UtZfjr22oyGxGLbauSBp2YVXpARAosm7dHBg= github.com/yashtewari/glob-intersection v0.0.0-20180916065949-5c77d914dd0b h1:vVRagRXf67ESqAb72hG2C/ZwI8NtJF2u2V76EsuOHGY= github.com/yashtewari/glob-intersection v0.0.0-20180916065949-5c77d914dd0b/go.mod h1:HptNXiXVDcJjXe9SqMd0v2FsL9f8dz4GnXgltU6q/co= +github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/zclconf/go-cty v1.0.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s= github.com/zclconf/go-cty v1.1.0/go.mod h1:xnAOWiHeOqg2nWS62VtQ7pbOu17FtxJNW8RLEih+O3s= github.com/zclconf/go-cty v1.2.0/go.mod h1:hOPWgoHbaTUnI5k4D2ld+GRpFJSCe6bCM7m1q/N4PQ8= @@ -353,8 +356,10 @@ golang.org/x/crypto v0.0.0-20190219172222-a4c6cb3142f2/go.mod h1:6SG95UA2DQfeDnf golang.org/x/crypto v0.0.0-20190222235706-ffb98f73852f/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= @@ -367,6 +372,9 @@ golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTk golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE= +golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc= +golang.org/x/mod v0.3.0 h1:RM4zey1++hCTbCVQfnWeKs9/IEsaBLA8vTkd0WVtmH4= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/net v0.0.0-20180530234432-1e491301e022/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180811021610-c39426892332/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -395,6 +403,7 @@ golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -430,8 +439,18 @@ golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBn golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190920225731-5eefd052ad72 h1:bw9doJza/SFBEweII/rHQh338oozWyiFsBRHtrflcws= golang.org/x/tools v0.0.0-20190920225731-5eefd052ad72/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200809012840-6f4f008689da h1:ml5G98G4/tdKT1XNq+ky5iSRdKKux0TANlLAzmXT/hg= +golang.org/x/tools v0.0.0-20200809012840-6f4f008689da/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE= google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M= google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg= @@ -460,6 +479,7 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8 gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/cheggaaa/pb.v1 v1.0.27/go.mod h1:V/YB90LKu/1FcN3WVnfiiE5oMCibMjukxqG/qStrOgw= +gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= @@ -471,5 +491,8 @@ gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc h1:/hemPrYIhOhy8zYrNj+069zDB68us2sMGsfkFJO0iZs= honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.1-2020.1.5 h1:nI5egYTGJakVyOryqLs1cQO5dO0ksin5XXs2pspk75k= +honnef.co/go/tools v0.0.1-2020.1.5/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k= rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8= diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index c3717d2de..3971c60ed 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -37,59 +37,6 @@ import ( "github.com/open-policy-agent/opa/rego" ) -// Violation Contains data for each violation -type Violation struct { - Name string - Description string - LineNumber int - Category string - Data interface{} - RuleData interface{} -} - -// ResultData Contains full report data -type ResultData struct { - EngineType string - Provider string - Violations []*Violation -} - -// RegoMetadata The rego metadata struct which is read and saved from disk -type RegoMetadata struct { - RuleName string `json:"ruleName"` - File string `json:"file"` - RuleTemplate string `json:"ruleTemplate"` - RuleTemplateArgs map[string]interface{} `json:"ruleTemplateArgs"` - Severity string `json:"severity"` - Description string `json:"description"` - RuleReferenceID string `json:"ruleReferenceId"` - Category string `json:"category"` - Version int `json:"version"` -} - -// RegoData Stores all information needed to evaluate and report on a rego rule -type RegoData struct { - Metadata RegoMetadata - RawRego []byte - PreparedQuery *rego.PreparedEvalQuery -} - -// EngineStats Contains misc stats -type EngineStats struct { - ruleCount int - regoFileCount int - metadataFileCount int - metadataCount int -} - -// Engine Implements the policy engine interface -type Engine struct { - Context context.Context - RegoFileMap map[string][]byte - RegoDataMap map[string]*RegoData - stats EngineStats -} - // LoadRegoMetadata Loads rego metadata from a given file func (e *Engine) LoadRegoMetadata(metaFilename string) (*RegoMetadata, error) { // Load metadata file if it exists @@ -212,7 +159,8 @@ func (e *Engine) LoadRegoFiles(policyPath string) error { } e.stats.ruleCount = len(e.RegoDataMap) - zap.S().Infof("loaded %d Rego rules from %d rego files (%d metadata files).", e.stats.ruleCount, e.stats.regoFileCount, e.stats.metadataCount) + e.stats.regoFileCount = len(e.RegoFileMap) + zap.S().Infof("loaded %d Rego rules from %d rego files (%d metadata files).", e.stats.ruleCount, e.stats.regoFileCount, e.stats.metadataFileCount) return err } diff --git a/pkg/policy/opa/types.go b/pkg/policy/opa/types.go new file mode 100644 index 000000000..520972406 --- /dev/null +++ b/pkg/policy/opa/types.go @@ -0,0 +1,62 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package opa + +import ( + "context" + + "github.com/accurics/terrascan/pkg/results" + + "github.com/open-policy-agent/opa/rego" +) + +// RegoMetadata The rego metadata struct which is read and saved from disk +type RegoMetadata struct { + RuleName string `json:"ruleName"` + File string `json:"file"` + RuleTemplate string `json:"ruleTemplate"` + RuleTemplateArgs map[string]interface{} `json:"ruleTemplateArgs"` + Severity string `json:"severity"` + Description string `json:"description"` + RuleReferenceID string `json:"ruleReferenceId"` + Category string `json:"category"` + Version int `json:"version"` +} + +// RegoData Stores all information needed to evaluate and report on a rego rule +type RegoData struct { + Metadata RegoMetadata + RawRego []byte + PreparedQuery *rego.PreparedEvalQuery +} + +// EngineStats Contains misc stats +type EngineStats struct { + ruleCount int + regoFileCount int + metadataFileCount int + metadataCount int +} + +// Engine Implements the policy engine interface +type Engine struct { + Context context.Context + RegoFileMap map[string][]byte + RegoDataMap map[string]*RegoData + ViolationStore *results.Store + stats EngineStats +} diff --git a/pkg/results/interface.go b/pkg/results/interface.go index ba4d66e62..66718591d 100644 --- a/pkg/results/interface.go +++ b/pkg/results/interface.go @@ -2,6 +2,6 @@ package results // Store manages the storage and export of results information type Store interface { - AddResult(result interface{}) - GetResults() interface{} + AddResult(violation *Violation) + GetResults() []*Violation } diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 1e0e50d28..0db2a17f1 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -110,18 +110,6 @@ func (e *Executor) Execute() (normalized interface{}, err error) { engine.Evaluate(&normalized) } - // create a new policy engine based on IaC type - if e.iacType == "terraform" { - var engine policy.Engine = &opa.Engine{} - - err = engine.Initialize(e.policyPath) - if err != nil { - return normalized, err - } - - engine.Evaluate(&normalized) - } - // send notifications, if configured if err = e.SendNotifications(normalized); err != nil { return normalized, err From 3faeda93e0d269bd09c31a2aa44f0fabf1fcac10 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Mon, 10 Aug 2020 03:51:37 -0700 Subject: [PATCH 117/188] add missed store.go file --- pkg/results/store.go | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) create mode 100644 pkg/results/store.go diff --git a/pkg/results/store.go b/pkg/results/store.go new file mode 100644 index 000000000..799224e85 --- /dev/null +++ b/pkg/results/store.go @@ -0,0 +1,27 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package results + +// AddResult Adds individual violations into the violation store +func (s *ViolationStore) AddResult(violation *Violation) { + s.violations = append(s.violations, violation) +} + +// GetResults Retrieves all violations from the violation store +func (s *ViolationStore) GetResults() []*Violation { + return s.violations +} From dbdbe250d501a5e6153b4d0835c38d4175a4d27d Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Mon, 10 Aug 2020 05:17:39 -0700 Subject: [PATCH 118/188] fixed issue processing templates - removed input file output - updated violation output format - added more error logs - changed most log entries to debug --- pkg/cli/run.go | 7 ++----- pkg/policy/opa/engine.go | 41 +++++++++++++++++++++++----------------- 2 files changed, 26 insertions(+), 22 deletions(-) diff --git a/pkg/cli/run.go b/pkg/cli/run.go index 280460d59..d766e1b71 100644 --- a/pkg/cli/run.go +++ b/pkg/cli/run.go @@ -17,10 +17,7 @@ package cli import ( - "os" - "github.com/accurics/terrascan/pkg/runtime" - "github.com/accurics/terrascan/pkg/utils" ) // Run executes terrascan in CLI mode @@ -34,9 +31,9 @@ func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath, configFile, po } // executor output - violations, err := executor.Execute() + _, err = executor.Execute() if err != nil { return } - utils.PrintJSON(violations, os.Stdout) + // utils.PrintJSON(violations, os.Stdout) } diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index 3971c60ed..6d9aa3746 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -43,7 +43,7 @@ func (e *Engine) LoadRegoMetadata(metaFilename string) (*RegoMetadata, error) { metadata, err := ioutil.ReadFile(metaFilename) if err != nil { if !errors.Is(err, os.ErrNotExist) { - zap.S().Warn("failed to load rego metadata", zap.String("file", metaFilename)) + zap.S().Error("failed to load rego metadata", zap.String("file", metaFilename)) } return nil, err } @@ -51,7 +51,7 @@ func (e *Engine) LoadRegoMetadata(metaFilename string) (*RegoMetadata, error) { // Read metadata into struct regoMetadata := RegoMetadata{} if err = json.Unmarshal(metadata, ®oMetadata); err != nil { - zap.S().Warn("failed to unmarshal rego metadata", zap.String("file", metaFilename)) + zap.S().Error("failed to unmarshal rego metadata", zap.String("file", metaFilename)) return nil, err } return ®oMetadata, err @@ -63,7 +63,7 @@ func (e *Engine) loadRawRegoFilesIntoMap(currentDir string, regoDataList []*Rego regoPath := filepath.Join(currentDir, regoDataList[i].Metadata.File) rawRegoData, err := ioutil.ReadFile(regoPath) if err != nil { - zap.S().Warn("failed to load rego file", zap.String("file", regoPath)) + zap.S().Debug("failed to load rego file", zap.String("file", regoPath)) continue } @@ -104,7 +104,7 @@ func (e *Engine) LoadRegoFiles(policyPath string) error { fileInfo, err = ioutil.ReadDir(dirList[i]) if err != nil { if !errors.Is(err, os.ErrNotExist) { - zap.S().Error("error while searching for files", zap.String("dir", dirList[i])) + zap.S().Debug("error while searching for files", zap.String("dir", dirList[i])) } continue } @@ -112,7 +112,8 @@ func (e *Engine) LoadRegoFiles(policyPath string) error { // Load the rego metadata first (*.json) metadataFiles := utils.FilterFileInfoBySuffix(&fileInfo, RegoMetadataFileSuffix) if metadataFiles == nil { - return fmt.Errorf("no metadata files were found") + zap.S().Debug("no metadata files were found", zap.String("dir", dirList[i])) + continue } var regoDataList []*RegoData @@ -122,6 +123,7 @@ func (e *Engine) LoadRegoFiles(policyPath string) error { var regoMetadata *RegoMetadata regoMetadata, err = e.LoadRegoMetadata(filePath) if err != nil { + zap.S().Debug("error loading rego metadata", zap.String("file", filePath)) continue } @@ -135,21 +137,26 @@ func (e *Engine) LoadRegoFiles(policyPath string) error { // Read in raw rego data from associated rego files if err = e.loadRawRegoFilesIntoMap(dirList[i], regoDataList, &e.RegoFileMap); err != nil { + zap.S().Debug("error loading raw rego data", zap.String("dir", dirList[i])) continue } for j := range regoDataList { e.stats.metadataCount++ + + // Check if the template file exists + templateFile := filepath.Join(dirList[i], regoDataList[j].Metadata.File) + // Apply templates if available var templateData bytes.Buffer t := template.New("opa") - _, err = t.Parse(string(e.RegoFileMap[filepath.Join(dirList[i], regoDataList[j].Metadata.RuleTemplate+".rego")])) + _, err = t.Parse(string(e.RegoFileMap[templateFile])) if err != nil { - zap.S().Warn("unable to parse template", zap.String("template", regoDataList[j].Metadata.RuleTemplate)) + zap.S().Debug("unable to parse template", zap.String("template", regoDataList[j].Metadata.RuleTemplate)) continue } if err = t.Execute(&templateData, regoDataList[j].Metadata.RuleTemplateArgs); err != nil { - zap.S().Warn("unable to execute template", zap.String("template", regoDataList[j].Metadata.RuleTemplate)) + zap.S().Debug("unable to execute template", zap.String("template", regoDataList[j].Metadata.RuleTemplate)) continue } @@ -160,7 +167,7 @@ func (e *Engine) LoadRegoFiles(policyPath string) error { e.stats.ruleCount = len(e.RegoDataMap) e.stats.regoFileCount = len(e.RegoFileMap) - zap.S().Infof("loaded %d Rego rules from %d rego files (%d metadata files).", e.stats.ruleCount, e.stats.regoFileCount, e.stats.metadataFileCount) + zap.S().Debugf("loaded %d Rego rules from %d rego files (%d metadata files).", e.stats.ruleCount, e.stats.regoFileCount, e.stats.metadataFileCount) return err } @@ -172,6 +179,8 @@ func (e *Engine) CompileRegoFiles() error { e.RegoDataMap[k].Metadata.RuleName: string(e.RegoDataMap[k].RawRego), }) if err != nil { + zap.S().Error("error compiling rego files", zap.String("rule", e.RegoDataMap[k].Metadata.RuleName), + zap.String("raw rego", string(e.RegoDataMap[k].RawRego)), zap.Error(err)) return err } @@ -183,6 +192,8 @@ func (e *Engine) CompileRegoFiles() error { // Create a prepared query that can be evaluated. query, err := r.PrepareForEval(e.Context) if err != nil { + zap.S().Error("error creating prepared query", zap.String("rule", e.RegoDataMap[k].Metadata.RuleName), + zap.String("raw rego", string(e.RegoDataMap[k].RawRego)), zap.Error(err)) return err } @@ -198,11 +209,13 @@ func (e *Engine) Initialize(policyPath string) error { e.Context = context.Background() if err := e.LoadRegoFiles(policyPath); err != nil { + zap.S().Error("error loading rego files", zap.String("policy path", policyPath)) return err } err := e.CompileRegoFiles() if err != nil { + zap.S().Error("error compiling rego files", zap.String("policy path", policyPath)) return err } @@ -240,7 +253,7 @@ func (e *Engine) Evaluate(inputData *interface{}) error { rs, err := e.RegoDataMap[k].PreparedQuery.Eval(e.Context, rego.EvalInput(inputData)) // rs, err := r.Eval(o.Context) if err != nil { - zap.S().Warn("failed to run prepared query", zap.String("rule", "'"+k+"'"), zap.Any("input", inputData)) + zap.S().Warn("failed to run prepared query", zap.String("rule", "'"+k+"'")) continue } @@ -248,7 +261,7 @@ func (e *Engine) Evaluate(inputData *interface{}) error { results := rs[0].Expressions[0].Value.([]interface{}) if len(results) > 0 { r := e.RegoDataMap[k].Metadata - fmt.Printf("\nResource(s): %v\n[%s] [%s] %s\n %s\n", results, r.Severity, r.RuleReferenceID, r.RuleName, r.Description) + fmt.Printf("[%s] [%s] [%s] %s: %s\n", r.Severity, r.RuleReferenceID, r.Category, r.RuleName, r.Description) continue } } @@ -256,11 +269,5 @@ func (e *Engine) Evaluate(inputData *interface{}) error { // Store results } - _, err := json.MarshalIndent(inputData, "", " ") - if err != nil { - return err - } - //fmt.Printf("InputData:\n%v\n", string(b)) - return nil } From 462d8b862375f24f1988e60c3910ab19944feec0 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Mon, 10 Aug 2020 10:45:50 -0400 Subject: [PATCH 119/188] moves docs requirements file --- requirements.txt => docs/requirements.txt | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename requirements.txt => docs/requirements.txt (100%) diff --git a/requirements.txt b/docs/requirements.txt similarity index 100% rename from requirements.txt rename to docs/requirements.txt From 2c80eda9e28c765c156e7f6258abc43dfeee99eb Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Mon, 10 Aug 2020 10:46:18 -0400 Subject: [PATCH 120/188] adds markdown include --- docs/requirements.txt | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/requirements.txt b/docs/requirements.txt index d0f2375b0..76e0dd3fb 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,2 +1,3 @@ mkdocs==1.1.2 mkdocs-material==5.5.3 +markdown-include==0.5.1 From 6e25de83d73ce7a3bcef7d8e6b0c207834fb73a7 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Tue, 11 Aug 2020 00:54:19 -0700 Subject: [PATCH 121/188] Add separate violation/results and reporter objects --- pkg/policy/interface.go | 4 ++++ pkg/policy/opa/constants.go | 16 ++++++++++++++++ pkg/policy/opa/engine.go | 32 ++++++++++++++++++++++---------- pkg/policy/opa/types.go | 2 +- pkg/runtime/executor.go | 21 +++++++++++++++------ 5 files changed, 58 insertions(+), 17 deletions(-) diff --git a/pkg/policy/interface.go b/pkg/policy/interface.go index 084f5599c..3aa3a548b 100644 --- a/pkg/policy/interface.go +++ b/pkg/policy/interface.go @@ -31,3 +31,7 @@ type Engine interface { GetResults() error Release() error } + +// EngineFactory creates policy engine instances based on iac/cloud type +type EngineFactory struct { +} diff --git a/pkg/policy/opa/constants.go b/pkg/policy/opa/constants.go index c00b15022..9e51f1f56 100644 --- a/pkg/policy/opa/constants.go +++ b/pkg/policy/opa/constants.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package opa const ( diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index 6d9aa3746..4e7c756fb 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -28,13 +28,12 @@ import ( "sort" "text/template" - "github.com/accurics/terrascan/pkg/utils" + "github.com/accurics/terrascan/pkg/results" + "github.com/accurics/terrascan/pkg/utils" "github.com/open-policy-agent/opa/ast" - - "go.uber.org/zap" - "github.com/open-policy-agent/opa/rego" + "go.uber.org/zap" ) // LoadRegoMetadata Loads rego metadata from a given file @@ -258,15 +257,28 @@ func (e *Engine) Evaluate(inputData *interface{}) error { } if len(rs) > 0 { - results := rs[0].Expressions[0].Value.([]interface{}) - if len(results) > 0 { - r := e.RegoDataMap[k].Metadata - fmt.Printf("[%s] [%s] [%s] %s: %s\n", r.Severity, r.RuleReferenceID, r.Category, r.RuleName, r.Description) + res := rs[0].Expressions[0].Value.([]interface{}) + if len(res) > 0 { + // @TODO: Take line number + file info and add to violation + regoData := e.RegoDataMap[k] + // @TODO: Remove this print, should be done by whomever consumes the results below + fmt.Printf("[%s] [%s] [%s] %s: %s\n", regoData.Metadata.Severity, regoData.Metadata.RuleReferenceID, + regoData.Metadata.Category, regoData.Metadata.RuleName, regoData.Metadata.Description) + violation := results.Violation{ + Name: regoData.Metadata.RuleName, + Description: regoData.Metadata.Description, + RuleID: regoData.Metadata.RuleReferenceID, + Category: regoData.Metadata.Category, + RuleData: regoData.RawRego, + InputFile: "", + InputData: res, + LineNumber: 0, + } + + e.ViolationStore.AddResult(&violation) continue } } - - // Store results } return nil diff --git a/pkg/policy/opa/types.go b/pkg/policy/opa/types.go index 520972406..c60748083 100644 --- a/pkg/policy/opa/types.go +++ b/pkg/policy/opa/types.go @@ -57,6 +57,6 @@ type Engine struct { Context context.Context RegoFileMap map[string][]byte RegoDataMap map[string]*RegoData - ViolationStore *results.Store + ViolationStore *results.ViolationStore stats EngineStats } diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 0db2a17f1..1ab8857d7 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -99,17 +99,26 @@ func (e *Executor) Execute() (normalized interface{}, err error) { } // create a new policy engine based on IaC type + var engine policy.Engine + if e.iacType == "terraform" { - var engine policy.Engine = &opa.Engine{} + engine = &opa.Engine{} + } - err = engine.Initialize(e.policyPath) - if err != nil { - return normalized, err - } + if err = engine.Initialize(e.policyPath); err != nil { + return normalized, err + } - engine.Evaluate(&normalized) + if err = engine.Evaluate(&normalized); err != nil { + return normalized, err } + // var reporter publish.Reporter = console.Reporter + /// if err = reporter.ImportData() + // if err = reporter.Publish() { + // + // } + // send notifications, if configured if err = e.SendNotifications(normalized); err != nil { return normalized, err From cd1e30ca0dc1bd6383fed0786c0e7e9481b4dfa3 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Tue, 11 Aug 2020 20:17:43 +0530 Subject: [PATCH 122/188] refactoring policy package --- pkg/cli/run.go | 7 +++-- pkg/policy/interface.go | 8 +++-- pkg/policy/opa/engine.go | 35 +++++++++++++++++---- pkg/results/store.go | 7 +++++ pkg/runtime/executor.go | 67 ++++++++++++++++++---------------------- 5 files changed, 77 insertions(+), 47 deletions(-) diff --git a/pkg/cli/run.go b/pkg/cli/run.go index d766e1b71..280460d59 100644 --- a/pkg/cli/run.go +++ b/pkg/cli/run.go @@ -17,7 +17,10 @@ package cli import ( + "os" + "github.com/accurics/terrascan/pkg/runtime" + "github.com/accurics/terrascan/pkg/utils" ) // Run executes terrascan in CLI mode @@ -31,9 +34,9 @@ func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath, configFile, po } // executor output - _, err = executor.Execute() + violations, err := executor.Execute() if err != nil { return } - // utils.PrintJSON(violations, os.Stdout) + utils.PrintJSON(violations, os.Stdout) } diff --git a/pkg/policy/interface.go b/pkg/policy/interface.go index 3aa3a548b..c686cd58a 100644 --- a/pkg/policy/interface.go +++ b/pkg/policy/interface.go @@ -16,6 +16,10 @@ package policy +import ( + "github.com/accurics/terrascan/pkg/results" +) + // Manager Policy Manager interface type Manager interface { Import() error @@ -25,9 +29,9 @@ type Manager interface { // Engine Policy Engine interface type Engine interface { - Initialize(policyPath string) error + Init(string) error Configure() error - Evaluate(inputData *interface{}) error + Evaluate(*interface{}) ([]*results.Violation, error) GetResults() error Release() error } diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index 4e7c756fb..5e0b86095 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -36,6 +36,26 @@ import ( "go.uber.org/zap" ) +var ( + errInitFailed = fmt.Errorf("failed to initialize OPA policy engine") +) + +// NewEngine returns a new OPA policy engine +func NewEngine(policyPath string) (*Engine, error) { + + // opa engine struct + engine := &Engine{} + + // initialize the engine + if err := engine.Init(policyPath); err != nil { + zap.S().Error("failed to initialize OPA policy engine") + return engine, errInitFailed + } + + // successful + return engine, nil +} + // LoadRegoMetadata Loads rego metadata from a given file func (e *Engine) LoadRegoMetadata(metaFilename string) (*RegoMetadata, error) { // Load metadata file if it exists @@ -202,9 +222,9 @@ func (e *Engine) CompileRegoFiles() error { return nil } -// Initialize Initializes the Opa engine +// Init initializes the Opa engine // Handles loading all rules, filtering, compiling, and preparing for evaluation -func (e *Engine) Initialize(policyPath string) error { +func (e *Engine) Init(policyPath string) error { e.Context = context.Background() if err := e.LoadRegoFiles(policyPath); err != nil { @@ -218,6 +238,9 @@ func (e *Engine) Initialize(policyPath string) error { return err } + // initialize ViolationStore + e.ViolationStore = results.NewViolationStore() + return nil } @@ -237,7 +260,7 @@ func (e *Engine) Release() error { } // Evaluate Executes compiled OPA queries against the input JSON data -func (e *Engine) Evaluate(inputData *interface{}) error { +func (e *Engine) Evaluate(inputData *interface{}) ([]*results.Violation, error) { sortedKeys := make([]string, len(e.RegoDataMap)) x := 0 @@ -262,8 +285,8 @@ func (e *Engine) Evaluate(inputData *interface{}) error { // @TODO: Take line number + file info and add to violation regoData := e.RegoDataMap[k] // @TODO: Remove this print, should be done by whomever consumes the results below - fmt.Printf("[%s] [%s] [%s] %s: %s\n", regoData.Metadata.Severity, regoData.Metadata.RuleReferenceID, - regoData.Metadata.Category, regoData.Metadata.RuleName, regoData.Metadata.Description) + // fmt.Printf("[%s] [%s] [%s] %s: %s\n", regoData.Metadata.Severity, regoData.Metadata.RuleReferenceID, + // regoData.Metadata.Category, regoData.Metadata.RuleName, regoData.Metadata.Description) violation := results.Violation{ Name: regoData.Metadata.RuleName, Description: regoData.Metadata.Description, @@ -281,5 +304,5 @@ func (e *Engine) Evaluate(inputData *interface{}) error { } } - return nil + return e.ViolationStore.GetResults(), nil } diff --git a/pkg/results/store.go b/pkg/results/store.go index 799224e85..fec48c4e6 100644 --- a/pkg/results/store.go +++ b/pkg/results/store.go @@ -16,6 +16,13 @@ package results +// NewViolationStore returns a new violation store +func NewViolationStore() *ViolationStore { + return &ViolationStore{ + violations: []*Violation{}, + } +} + // AddResult Adds individual violations into the violation store func (s *ViolationStore) AddResult(violation *Violation) { s.violations = append(s.violations, violation) diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 1ab8857d7..c1301f9e4 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -27,15 +27,16 @@ import ( // Executor object type Executor struct { - filePath string - dirPath string - policyPath string - cloudType string - iacType string - iacVersion string - configFile string - iacProvider iacProvider.IacProvider - notifiers []notifications.Notifier + filePath string + dirPath string + policyPath string + cloudType string + iacType string + iacVersion string + configFile string + iacProvider iacProvider.IacProvider + policyEngine policy.Engine + notifiers []notifications.Notifier } // NewExecutor creates a runtime object @@ -50,7 +51,7 @@ func NewExecutor(iacType, iacVersion, cloudType, filePath, dirPath, configFile, configFile: configFile, } - // initialized executor + // initialize executor if err = e.Init(); err != nil { return e, err } @@ -81,49 +82,41 @@ func (e *Executor) Init() error { return err } + // create a new policy engine based on IaC type + e.policyEngine, err = opa.NewEngine(e.policyPath) + if err != nil { + zap.S().Errorf("failed to create policy engine. error: '%s'", err) + return err + } + zap.S().Debug("initialized executor") return nil } // Execute validates the inputs, processes the IaC, creates json output -func (e *Executor) Execute() (normalized interface{}, err error) { +func (e *Executor) Execute() (results interface{}, err error) { - // create normalized output from Iac + // create results output from Iac if e.dirPath != "" { - normalized, err = e.iacProvider.LoadIacDir(e.dirPath) + results, err = e.iacProvider.LoadIacDir(e.dirPath) } else { - normalized, err = e.iacProvider.LoadIacFile(e.filePath) + results, err = e.iacProvider.LoadIacFile(e.filePath) } if err != nil { - return normalized, err + return results, err } - // create a new policy engine based on IaC type - var engine policy.Engine - - if e.iacType == "terraform" { - engine = &opa.Engine{} - } - - if err = engine.Initialize(e.policyPath); err != nil { - return normalized, err - } - - if err = engine.Evaluate(&normalized); err != nil { - return normalized, err + // evaluate policies + results, err = e.policyEngine.Evaluate(&results) + if err != nil { + return results, err } - // var reporter publish.Reporter = console.Reporter - /// if err = reporter.ImportData() - // if err = reporter.Publish() { - // - // } - // send notifications, if configured - if err = e.SendNotifications(normalized); err != nil { - return normalized, err + if err = e.SendNotifications(results); err != nil { + return results, err } // successful - return normalized, nil + return results, nil } From c9011358d9c6f6ccc8f7a9d1b1bbf54878f7ac30 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Wed, 12 Aug 2020 01:32:00 +0530 Subject: [PATCH 123/188] add support for writer --- pkg/cli/run.go | 4 ++-- pkg/results/types.go | 16 ++++++++-------- pkg/writer/json.go | 38 ++++++++++++++++++++++++++++++++++++++ pkg/writer/register.go | 30 ++++++++++++++++++++++++++++++ pkg/writer/writer.go | 40 ++++++++++++++++++++++++++++++++++++++++ pkg/writer/yaml.go | 39 +++++++++++++++++++++++++++++++++++++++ 6 files changed, 157 insertions(+), 10 deletions(-) create mode 100644 pkg/writer/json.go create mode 100644 pkg/writer/register.go create mode 100644 pkg/writer/writer.go create mode 100644 pkg/writer/yaml.go diff --git a/pkg/cli/run.go b/pkg/cli/run.go index 280460d59..f24ae3fb6 100644 --- a/pkg/cli/run.go +++ b/pkg/cli/run.go @@ -20,7 +20,7 @@ import ( "os" "github.com/accurics/terrascan/pkg/runtime" - "github.com/accurics/terrascan/pkg/utils" + "github.com/accurics/terrascan/pkg/writer" ) // Run executes terrascan in CLI mode @@ -38,5 +38,5 @@ func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath, configFile, po if err != nil { return } - utils.PrintJSON(violations, os.Stdout) + writer.Write("xml", violations, os.Stdout) } diff --git a/pkg/results/types.go b/pkg/results/types.go index 0dd6377f1..113f8260f 100644 --- a/pkg/results/types.go +++ b/pkg/results/types.go @@ -18,14 +18,14 @@ package results // Violation Contains data for each violation type Violation struct { - Name string - Description string - RuleID string - Category string - RuleData interface{} - InputFile string - InputData interface{} - LineNumber int + Name string `json:"name" yaml:"name" xml:"name,attr"` + Description string `json:"description" yaml:"description" xml:"description, attr"` + RuleID string `json:"rule" yaml:"rule" xml:"rule,attr"` + Category string `json:"category" yaml:"category" xml:"category,attr"` + RuleData interface{} `json:"-" yaml:"-" xml:"-"` + InputFile string `json:"-", yaml:"-", xml:"-"` + InputData interface{} `json:"input_data" yaml:"input_data" xml:"input_data,attr"` + LineNumber int `json:"line" yaml:"line" xml:"line,attr"` } // ViolationStore Storage area for violation data diff --git a/pkg/writer/json.go b/pkg/writer/json.go new file mode 100644 index 000000000..6fbae649f --- /dev/null +++ b/pkg/writer/json.go @@ -0,0 +1,38 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package writer + +import ( + "encoding/json" + "io" +) + +const ( + jsonFormat supportedFormat = "json" +) + +func init() { + RegisterWriter(jsonFormat, JSONWriter) +} + +// JSONWriter prints data in JSON format +func JSONWriter(data interface{}, writer io.Writer) error { + j, _ := json.MarshalIndent(data, "", " ") + writer.Write(j) + writer.Write([]byte{'\n'}) + return nil +} diff --git a/pkg/writer/register.go b/pkg/writer/register.go new file mode 100644 index 000000000..3d7925de1 --- /dev/null +++ b/pkg/writer/register.go @@ -0,0 +1,30 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package writer + +import "io" + +// supportedFormat data type for supported formats +type supportedFormat string + +// writerMap stores mapping of supported writer formats with respective functions +var writerMap = make(map[supportedFormat](func(interface{}, io.Writer) error)) + +// RegisterWriter registers a writer for terrascan +func RegisterWriter(format supportedFormat, writerFunc func(interface{}, io.Writer) error) { + writerMap[format] = writerFunc +} diff --git a/pkg/writer/writer.go b/pkg/writer/writer.go new file mode 100644 index 000000000..97bc799f8 --- /dev/null +++ b/pkg/writer/writer.go @@ -0,0 +1,40 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package writer + +import ( + "fmt" + "io" + + "go.uber.org/zap" +) + +var ( + errNotSupported = fmt.Errorf("output format not supported") +) + +// Write method writes in the given format using the respective writer func +func Write(format supportedFormat, data interface{}, writer io.Writer) error { + + writerFunc, present := writerMap[format] + if !present { + zap.S().Error("output format '%s' not supported", format) + return errNotSupported + } + + return writerFunc(data, writer) +} diff --git a/pkg/writer/yaml.go b/pkg/writer/yaml.go new file mode 100644 index 000000000..2d7f8d7ca --- /dev/null +++ b/pkg/writer/yaml.go @@ -0,0 +1,39 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package writer + +import ( + "io" + + "gopkg.in/yaml.v2" +) + +const ( + yamlFormat supportedFormat = "yaml" +) + +func init() { + RegisterWriter(yamlFormat, YAMLWriter) +} + +// YAMLWriter prints data in YAML format +func YAMLWriter(data interface{}, writer io.Writer) error { + j, _ := yaml.Marshal(data) + writer.Write(j) + writer.Write([]byte{'\n'}) + return nil +} From 268760aca54e37cedf004f4037de1a09500d91dd Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Tue, 11 Aug 2020 22:58:34 -0400 Subject: [PATCH 124/188] updated to use built in snippets --- docs/changelog.md | 2 +- docs/contributing.md | 2 +- docs/requirements.txt | 1 - mkdocs.yml | 5 ++++- 4 files changed, 6 insertions(+), 4 deletions(-) diff --git a/docs/changelog.md b/docs/changelog.md index f4d16fd97..786b75d5a 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1 +1 @@ -{!CHANGELOG.md!} +--8<-- "CHANGELOG.md" diff --git a/docs/contributing.md b/docs/contributing.md index 66e8e2b46..ea38c9bff 100644 --- a/docs/contributing.md +++ b/docs/contributing.md @@ -1 +1 @@ -{!CONTRIBUTING.md!} +--8<-- "CONTRIBUTING.md" diff --git a/docs/requirements.txt b/docs/requirements.txt index 76e0dd3fb..d0f2375b0 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,3 +1,2 @@ mkdocs==1.1.2 mkdocs-material==5.5.3 -markdown-include==0.5.1 diff --git a/mkdocs.yml b/mkdocs.yml index 64bd6e2bf..bbcdb2d61 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -44,6 +44,9 @@ nav: - About: about.md markdown_extensions: - - markdown_include.include + - pymdownx.highlight + - pymdownx.inlinehilite + - pymdownx.superfences + - pymdownx.snippets - toc: permalink: true From d1243c4facaf5f1d39edbf127c21e30e14a03a6a Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Mon, 10 Aug 2020 09:06:17 -0400 Subject: [PATCH 125/188] initial getting started --- docs/getting-started.md | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/getting-started.md b/docs/getting-started.md index 0dc486bf0..25cdf28ba 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -1,6 +1,7 @@ # Getting Started ## Installation +Terrascan's binary can be found on the package for each [release](/~https://github.com/accurics/terrascan/releases). ## Scanning From bbbf2a55c43305f35f6343293fe59a1cef7ff8f7 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Tue, 11 Aug 2020 23:08:15 -0400 Subject: [PATCH 126/188] adds policy rule description --- docs/policies.md | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/docs/policies.md b/docs/policies.md index dfb5e9daa..797750d82 100644 --- a/docs/policies.md +++ b/docs/policies.md @@ -1,3 +1,27 @@ # Policies +Terrascan policies are written using the [Rego policy language](https://www.openpolicyagent.org/docs/latest/policy-language/). With each rego policy a JSON "rule" file is included which defines metadata for the policy. + +## Rule JSON file + +The rule files follow this naming convention: `....json` + +Here's an example of the contents of a rule file: + +``` json linenums="1" +{ + "ruleName": "unrestrictedIngressAccess", + "rule": "unrestrictedIngressAccess.rego", + "ruleTemplate": "unrestrictedIngressAccess", + "ruleArgument": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure no security groups allow ingress from 0.0.0.0/0 to ALL ports and protocols", + "ruleReferenceId": "AWS.SecurityGroup.NetworkPortsSecurity.High.0094", + "category": "Network Ports Security", + "version" : "1" +} +``` + ## AWS From 9950bcd521f62932e06d2b00eecaaadbf7833ff4 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Tue, 11 Aug 2020 23:53:59 -0400 Subject: [PATCH 127/188] adds policy rule file description --- docs/policies.md | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/docs/policies.md b/docs/policies.md index 797750d82..25c266e00 100644 --- a/docs/policies.md +++ b/docs/policies.md @@ -1,6 +1,6 @@ # Policies -Terrascan policies are written using the [Rego policy language](https://www.openpolicyagent.org/docs/latest/policy-language/). With each rego policy a JSON "rule" file is included which defines metadata for the policy. +Terrascan policies are written using the [Rego policy language](https://www.openpolicyagent.org/docs/latest/policy-language/). With each rego policy a JSON "rule" file is included which defines metadata for the policy. Policies included within Terrascan are stored in the [pkg/policies/opa/rego](/~https://github.com/accurics/terrascan/tree/master/pkg/policies/opa/rego) directory. ## Rule JSON file @@ -24,4 +24,17 @@ Here's an example of the contents of a rule file: } ``` +| Key | Value | +| ------------------- | --------------------------------------------- | +| ruleName | Short name for the rule | +| rule | File name of the rego policy | +| ruleTemplate | FIXME | +| ruleArgument | FIXME | +| ruleArgument.prefix | FIXME | +| severity | Likelihood x impact of issue | +| description | Description of the issue found with this rule | +| ruleReferenceId | Unique ID of the rule in the format `...` | +| category | Descriptive category for this rule | +| version | Version number for the rule/rego | + ## AWS From 483920a15199194df181f6c4196226fca71879a1 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Wed, 12 Aug 2020 10:38:36 +0530 Subject: [PATCH 128/188] changing input/ouput type from interface{} to data specific types --- go.mod | 1 + pkg/cli/run.go | 2 +- pkg/policy/interface.go | 3 ++- pkg/policy/opa/engine.go | 4 ++-- pkg/runtime/executor.go | 11 +++++++---- 5 files changed, 13 insertions(+), 8 deletions(-) diff --git a/go.mod b/go.mod index 459e68777..7cd2b8da7 100644 --- a/go.mod +++ b/go.mod @@ -17,5 +17,6 @@ require ( golang.org/x/net v0.0.0-20200625001655-4c5254603344 // indirect golang.org/x/tools v0.0.0-20200809012840-6f4f008689da // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect + gopkg.in/yaml.v2 v2.3.0 honnef.co/go/tools v0.0.1-2020.1.5 // indirect ) diff --git a/pkg/cli/run.go b/pkg/cli/run.go index f24ae3fb6..963a7c22e 100644 --- a/pkg/cli/run.go +++ b/pkg/cli/run.go @@ -38,5 +38,5 @@ func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath, configFile, po if err != nil { return } - writer.Write("xml", violations, os.Stdout) + writer.Write("yaml", violations, os.Stdout) } diff --git a/pkg/policy/interface.go b/pkg/policy/interface.go index c686cd58a..62765b331 100644 --- a/pkg/policy/interface.go +++ b/pkg/policy/interface.go @@ -17,6 +17,7 @@ package policy import ( + "github.com/accurics/terrascan/pkg/iac-providers/output" "github.com/accurics/terrascan/pkg/results" ) @@ -31,7 +32,7 @@ type Manager interface { type Engine interface { Init(string) error Configure() error - Evaluate(*interface{}) ([]*results.Violation, error) + Evaluate(output.AllResourceConfigs) ([]*results.Violation, error) GetResults() error Release() error } diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index 5e0b86095..e3ecc271f 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -28,8 +28,8 @@ import ( "sort" "text/template" + "github.com/accurics/terrascan/pkg/iac-providers/output" "github.com/accurics/terrascan/pkg/results" - "github.com/accurics/terrascan/pkg/utils" "github.com/open-policy-agent/opa/ast" "github.com/open-policy-agent/opa/rego" @@ -260,7 +260,7 @@ func (e *Engine) Release() error { } // Evaluate Executes compiled OPA queries against the input JSON data -func (e *Engine) Evaluate(inputData *interface{}) ([]*results.Violation, error) { +func (e *Engine) Evaluate(inputData output.AllResourceConfigs) ([]*results.Violation, error) { sortedKeys := make([]string, len(e.RegoDataMap)) x := 0 diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index c1301f9e4..983deae3d 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -20,9 +20,11 @@ import ( "go.uber.org/zap" iacProvider "github.com/accurics/terrascan/pkg/iac-providers" + "github.com/accurics/terrascan/pkg/iac-providers/output" "github.com/accurics/terrascan/pkg/notifications" "github.com/accurics/terrascan/pkg/policy" opa "github.com/accurics/terrascan/pkg/policy/opa" + "github.com/accurics/terrascan/pkg/results" ) // Executor object @@ -94,20 +96,21 @@ func (e *Executor) Init() error { } // Execute validates the inputs, processes the IaC, creates json output -func (e *Executor) Execute() (results interface{}, err error) { +func (e *Executor) Execute() (results []*results.Violation, err error) { // create results output from Iac + var normalized output.AllResourceConfigs if e.dirPath != "" { - results, err = e.iacProvider.LoadIacDir(e.dirPath) + normalized, err = e.iacProvider.LoadIacDir(e.dirPath) } else { - results, err = e.iacProvider.LoadIacFile(e.filePath) + normalized, err = e.iacProvider.LoadIacFile(e.filePath) } if err != nil { return results, err } // evaluate policies - results, err = e.policyEngine.Evaluate(&results) + results, err = e.policyEngine.Evaluate(normalized) if err != nil { return results, err } From 0c3d58f2d7988266bf41cfb1f3066ccee7f442d8 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Wed, 12 Aug 2020 12:03:20 +0530 Subject: [PATCH 129/188] add unit tests for FindAllDirectories func --- pkg/utils/path_test.go | 43 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/pkg/utils/path_test.go b/pkg/utils/path_test.go index a2efc55b8..5b47d4076 100644 --- a/pkg/utils/path_test.go +++ b/pkg/utils/path_test.go @@ -17,7 +17,9 @@ package utils import ( + "fmt" "os" + "reflect" "testing" ) @@ -67,3 +69,44 @@ func TestGetAbsPath(t *testing.T) { }) } } + +func TestFindAllDirectories(t *testing.T) { + + table := []struct { + name string + basePath string + want []string + wantErr error + }{ + { + name: "happy path", + basePath: "./testdata", + want: []string{"./testdata", "testdata/emptydir", "testdata/testdir1", "testdata/testdir2"}, + wantErr: nil, + }, + { + name: "empty dir", + basePath: "./testdata/emptydir", + want: []string{"./testdata/emptydir"}, + wantErr: nil, + }, + { + name: "invalid dir", + basePath: "./testdata/nothere", + want: []string{}, + wantErr: fmt.Errorf("lstat ./testdata/nothere: no such file or directory"), + }, + } + + for _, tt := range table { + t.Run(tt.name, func(t *testing.T) { + got, gotErr := FindAllDirectories(tt.basePath) + if !reflect.DeepEqual(gotErr, tt.wantErr) { + t.Errorf("gotErr: '%+v', wantErr: '%+v'", gotErr, tt.wantErr) + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("got: '%v', want: '%v'", got, tt.want) + } + }) + } +} From 1892bac1643467ca444c5c0f6cdc08f3c7b29829 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Wed, 12 Aug 2020 02:06:51 -0700 Subject: [PATCH 130/188] wrap engine input and output objects to lock the engine interface added low/medium/high/total violation counts removed reporting placeholder code (reporting to be done by caller of executor) --- go.mod | 2 +- go.sum | 2 ++ pkg/policy/interface.go | 7 +------ pkg/policy/opa/engine.go | 30 +++++++++++++++++++++--------- pkg/policy/opa/types.go | 12 ++++++------ pkg/policy/types.go | 16 ++++++++++++++++ pkg/results/types.go | 14 ++++++++++++-- pkg/runtime/executor.go | 5 ++--- 8 files changed, 61 insertions(+), 27 deletions(-) create mode 100644 pkg/policy/types.go diff --git a/go.mod b/go.mod index 7cd2b8da7..dffe6982b 100644 --- a/go.mod +++ b/go.mod @@ -15,7 +15,7 @@ require ( github.com/zclconf/go-cty v1.2.1 go.uber.org/zap v1.9.1 golang.org/x/net v0.0.0-20200625001655-4c5254603344 // indirect - golang.org/x/tools v0.0.0-20200809012840-6f4f008689da // indirect + golang.org/x/tools v0.0.0-20200811215021-48a8ffc5b207 // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect gopkg.in/yaml.v2 v2.3.0 honnef.co/go/tools v0.0.1-2020.1.5 // indirect diff --git a/go.sum b/go.sum index d576b76a8..8aee93936 100644 --- a/go.sum +++ b/go.sum @@ -445,6 +445,8 @@ golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200809012840-6f4f008689da h1:ml5G98G4/tdKT1XNq+ky5iSRdKKux0TANlLAzmXT/hg= golang.org/x/tools v0.0.0-20200809012840-6f4f008689da/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200811215021-48a8ffc5b207 h1:8Kg+JssU1jBZs8GIrL5pl4nVyaqyyhdmHAR4D1zGErg= +golang.org/x/tools v0.0.0-20200811215021-48a8ffc5b207/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= diff --git a/pkg/policy/interface.go b/pkg/policy/interface.go index 62765b331..c74a79886 100644 --- a/pkg/policy/interface.go +++ b/pkg/policy/interface.go @@ -16,11 +16,6 @@ package policy -import ( - "github.com/accurics/terrascan/pkg/iac-providers/output" - "github.com/accurics/terrascan/pkg/results" -) - // Manager Policy Manager interface type Manager interface { Import() error @@ -32,7 +27,7 @@ type Manager interface { type Engine interface { Init(string) error Configure() error - Evaluate(output.AllResourceConfigs) ([]*results.Violation, error) + Evaluate(EngineInput) (EngineOutput, error) GetResults() error Release() error } diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index e3ecc271f..90f4d56ef 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -26,9 +26,11 @@ import ( "os" "path/filepath" "sort" + "strings" "text/template" - "github.com/accurics/terrascan/pkg/iac-providers/output" + "github.com/accurics/terrascan/pkg/policy" + "github.com/accurics/terrascan/pkg/results" "github.com/accurics/terrascan/pkg/utils" "github.com/open-policy-agent/opa/ast" @@ -239,7 +241,7 @@ func (e *Engine) Init(policyPath string) error { } // initialize ViolationStore - e.ViolationStore = results.NewViolationStore() + e.Results.ViolationStore = results.NewViolationStore() return nil } @@ -260,7 +262,7 @@ func (e *Engine) Release() error { } // Evaluate Executes compiled OPA queries against the input JSON data -func (e *Engine) Evaluate(inputData output.AllResourceConfigs) ([]*results.Violation, error) { +func (e *Engine) Evaluate(engineInput policy.EngineInput) (policy.EngineOutput, error) { sortedKeys := make([]string, len(e.RegoDataMap)) x := 0 @@ -272,7 +274,7 @@ func (e *Engine) Evaluate(inputData output.AllResourceConfigs) ([]*results.Viola for _, k := range sortedKeys { // Execute the prepared query. - rs, err := e.RegoDataMap[k].PreparedQuery.Eval(e.Context, rego.EvalInput(inputData)) + rs, err := e.RegoDataMap[k].PreparedQuery.Eval(e.Context, rego.EvalInput(engineInput.InputData)) // rs, err := r.Eval(o.Context) if err != nil { zap.S().Warn("failed to run prepared query", zap.String("rule", "'"+k+"'")) @@ -284,13 +286,11 @@ func (e *Engine) Evaluate(inputData output.AllResourceConfigs) ([]*results.Viola if len(res) > 0 { // @TODO: Take line number + file info and add to violation regoData := e.RegoDataMap[k] - // @TODO: Remove this print, should be done by whomever consumes the results below - // fmt.Printf("[%s] [%s] [%s] %s: %s\n", regoData.Metadata.Severity, regoData.Metadata.RuleReferenceID, - // regoData.Metadata.Category, regoData.Metadata.RuleName, regoData.Metadata.Description) violation := results.Violation{ Name: regoData.Metadata.RuleName, Description: regoData.Metadata.Description, RuleID: regoData.Metadata.RuleReferenceID, + Severity: regoData.Metadata.Severity, Category: regoData.Metadata.Category, RuleData: regoData.RawRego, InputFile: "", @@ -298,11 +298,23 @@ func (e *Engine) Evaluate(inputData output.AllResourceConfigs) ([]*results.Viola LineNumber: 0, } - e.ViolationStore.AddResult(&violation) + severity := regoData.Metadata.Severity + if strings.ToLower(severity) == "high" { + e.Results.ViolationStore.HighCount++ + } else if strings.ToLower(severity) == "medium" { + e.Results.ViolationStore.MediumCount++ + } else if strings.ToLower(severity) == "low" { + e.Results.ViolationStore.LowCount++ + } else { + zap.S().Warn("invalid severity found in rule definition", + zap.String("rule id", violation.RuleID), zap.String("severity", severity)) + } + e.Results.ViolationStore.TotalCount++ + e.Results.ViolationStore.AddResult(&violation) continue } } } - return e.ViolationStore.GetResults(), nil + return e.Results, nil } diff --git a/pkg/policy/opa/types.go b/pkg/policy/opa/types.go index c60748083..980d3b53e 100644 --- a/pkg/policy/opa/types.go +++ b/pkg/policy/opa/types.go @@ -19,7 +19,7 @@ package opa import ( "context" - "github.com/accurics/terrascan/pkg/results" + "github.com/accurics/terrascan/pkg/policy" "github.com/open-policy-agent/opa/rego" ) @@ -54,9 +54,9 @@ type EngineStats struct { // Engine Implements the policy engine interface type Engine struct { - Context context.Context - RegoFileMap map[string][]byte - RegoDataMap map[string]*RegoData - ViolationStore *results.ViolationStore - stats EngineStats + Context context.Context + RegoFileMap map[string][]byte + RegoDataMap map[string]*RegoData + Results policy.EngineOutput + stats EngineStats } diff --git a/pkg/policy/types.go b/pkg/policy/types.go new file mode 100644 index 000000000..e9020dfb9 --- /dev/null +++ b/pkg/policy/types.go @@ -0,0 +1,16 @@ +package policy + +import ( + "github.com/accurics/terrascan/pkg/iac-providers/output" + "github.com/accurics/terrascan/pkg/results" +) + +// EngineInput Contains data used as input to the engine +type EngineInput struct { + InputData *output.AllResourceConfigs +} + +// EngineOutput Contains data output from the engine +type EngineOutput struct { + ViolationStore *results.ViolationStore +} diff --git a/pkg/results/types.go b/pkg/results/types.go index 113f8260f..5ad2f9501 100644 --- a/pkg/results/types.go +++ b/pkg/results/types.go @@ -19,16 +19,26 @@ package results // Violation Contains data for each violation type Violation struct { Name string `json:"name" yaml:"name" xml:"name,attr"` - Description string `json:"description" yaml:"description" xml:"description, attr"` + Description string `json:"description" yaml:"description" xml:"description,attr"` RuleID string `json:"rule" yaml:"rule" xml:"rule,attr"` + Severity string `json:"severity" yaml:"severity" xml:"severity,attr"` Category string `json:"category" yaml:"category" xml:"category,attr"` RuleData interface{} `json:"-" yaml:"-" xml:"-"` - InputFile string `json:"-", yaml:"-", xml:"-"` + InputFile string `json:"-" yaml:"-" xml:"-"` InputData interface{} `json:"input_data" yaml:"input_data" xml:"input_data,attr"` LineNumber int `json:"line" yaml:"line" xml:"line,attr"` } +// ViolationStats Contains stats related to the violation data +type ViolationStats struct { + LowCount int + MediumCount int + HighCount int + TotalCount int +} + // ViolationStore Storage area for violation data type ViolationStore struct { violations []*Violation + ViolationStats } diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 983deae3d..df36a6315 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -24,7 +24,6 @@ import ( "github.com/accurics/terrascan/pkg/notifications" "github.com/accurics/terrascan/pkg/policy" opa "github.com/accurics/terrascan/pkg/policy/opa" - "github.com/accurics/terrascan/pkg/results" ) // Executor object @@ -96,7 +95,7 @@ func (e *Executor) Init() error { } // Execute validates the inputs, processes the IaC, creates json output -func (e *Executor) Execute() (results []*results.Violation, err error) { +func (e *Executor) Execute() (results policy.EngineOutput, err error) { // create results output from Iac var normalized output.AllResourceConfigs @@ -110,7 +109,7 @@ func (e *Executor) Execute() (results []*results.Violation, err error) { } // evaluate policies - results, err = e.policyEngine.Evaluate(normalized) + results, err = e.policyEngine.Evaluate(policy.EngineInput{InputData: &normalized}) if err != nil { return results, err } From 3f96d230f4700d8285570d0756d5de3ecdec7c49 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Wed, 12 Aug 2020 15:40:00 +0530 Subject: [PATCH 131/188] fixing violations output --- pkg/policy/opa/engine.go | 8 ++++---- pkg/policy/types.go | 2 +- pkg/results/store.go | 6 +++--- pkg/results/types.go | 14 +++++++------- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index 90f4d56ef..f490d1ad5 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -300,16 +300,16 @@ func (e *Engine) Evaluate(engineInput policy.EngineInput) (policy.EngineOutput, severity := regoData.Metadata.Severity if strings.ToLower(severity) == "high" { - e.Results.ViolationStore.HighCount++ + e.Results.ViolationStore.Count.HighCount++ } else if strings.ToLower(severity) == "medium" { - e.Results.ViolationStore.MediumCount++ + e.Results.ViolationStore.Count.MediumCount++ } else if strings.ToLower(severity) == "low" { - e.Results.ViolationStore.LowCount++ + e.Results.ViolationStore.Count.LowCount++ } else { zap.S().Warn("invalid severity found in rule definition", zap.String("rule id", violation.RuleID), zap.String("severity", severity)) } - e.Results.ViolationStore.TotalCount++ + e.Results.ViolationStore.Count.TotalCount++ e.Results.ViolationStore.AddResult(&violation) continue } diff --git a/pkg/policy/types.go b/pkg/policy/types.go index e9020dfb9..f16ddae3c 100644 --- a/pkg/policy/types.go +++ b/pkg/policy/types.go @@ -12,5 +12,5 @@ type EngineInput struct { // EngineOutput Contains data output from the engine type EngineOutput struct { - ViolationStore *results.ViolationStore + *results.ViolationStore } diff --git a/pkg/results/store.go b/pkg/results/store.go index fec48c4e6..8e3252951 100644 --- a/pkg/results/store.go +++ b/pkg/results/store.go @@ -19,16 +19,16 @@ package results // NewViolationStore returns a new violation store func NewViolationStore() *ViolationStore { return &ViolationStore{ - violations: []*Violation{}, + Violations: []*Violation{}, } } // AddResult Adds individual violations into the violation store func (s *ViolationStore) AddResult(violation *Violation) { - s.violations = append(s.violations, violation) + s.Violations = append(s.Violations, violation) } // GetResults Retrieves all violations from the violation store func (s *ViolationStore) GetResults() []*Violation { - return s.violations + return s.Violations } diff --git a/pkg/results/types.go b/pkg/results/types.go index 5ad2f9501..3cac12d18 100644 --- a/pkg/results/types.go +++ b/pkg/results/types.go @@ -25,20 +25,20 @@ type Violation struct { Category string `json:"category" yaml:"category" xml:"category,attr"` RuleData interface{} `json:"-" yaml:"-" xml:"-"` InputFile string `json:"-" yaml:"-" xml:"-"` - InputData interface{} `json:"input_data" yaml:"input_data" xml:"input_data,attr"` + InputData interface{} `json:"-" yaml:"-" xml:"-"` LineNumber int `json:"line" yaml:"line" xml:"line,attr"` } // ViolationStats Contains stats related to the violation data type ViolationStats struct { - LowCount int - MediumCount int - HighCount int - TotalCount int + LowCount int `json:"low"` + MediumCount int `json:"medium"` + HighCount int `json:"high"` + TotalCount int `json:"total"` } // ViolationStore Storage area for violation data type ViolationStore struct { - violations []*Violation - ViolationStats + Violations []*Violation `json:"violations"` + Count ViolationStats `json:"count"` } From 1df64e4f402900dcb0b818238546380b84927c7c Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Wed, 12 Aug 2020 18:01:01 +0530 Subject: [PATCH 132/188] add source line info to normalized resource config --- pkg/iac-providers/output/types.go | 1 + pkg/iac-providers/terraform/v12/resource.go | 1 + 2 files changed, 2 insertions(+) diff --git a/pkg/iac-providers/output/types.go b/pkg/iac-providers/output/types.go index 681f65f96..2b52a2403 100644 --- a/pkg/iac-providers/output/types.go +++ b/pkg/iac-providers/output/types.go @@ -21,6 +21,7 @@ type ResourceConfig struct { ID string `json:"id"` Name string `json:"name"` Source string `json:"source"` + Line int `json:"line"` Type string `json:"type"` Config interface{} `json:"config"` } diff --git a/pkg/iac-providers/terraform/v12/resource.go b/pkg/iac-providers/terraform/v12/resource.go index 0b6fd6472..1d3f96d77 100644 --- a/pkg/iac-providers/terraform/v12/resource.go +++ b/pkg/iac-providers/terraform/v12/resource.go @@ -53,6 +53,7 @@ func CreateResourceConfig(managedResource *hclConfigs.Resource) (resourceConfig Name: managedResource.Name, Type: managedResource.Type, Source: managedResource.DeclRange.Filename, + Line: managedResource.DeclRange.Start.Line, Config: goOut, } From 201648c49ca2674ceecbcb0f523277c7259dcd81 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Wed, 12 Aug 2020 18:02:02 +0530 Subject: [PATCH 133/188] add output format option to cli args --- cmd/terrascan/main.go | 5 ++++- pkg/cli/run.go | 5 +++-- pkg/writer/writer.go | 4 ++-- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/cmd/terrascan/main.go b/cmd/terrascan/main.go index 453c93961..db116a4ff 100644 --- a/cmd/terrascan/main.go +++ b/cmd/terrascan/main.go @@ -49,6 +49,9 @@ func main() { // config file configFile = flag.String("config", "", "config file path") + + // output type + output = flag.String("output", "yaml", "output format (json, xml, yaml)") ) flag.Parse() @@ -65,6 +68,6 @@ func main() { } else { logging.Init(*logType, *logLevel) zap.S().Debug("running terrascan in cli mode") - cli.Run(*iacType, *iacVersion, *cloudType, *iacFilePath, *iacDirPath, *configFile, *policyPath) + cli.Run(*iacType, *iacVersion, *cloudType, *iacFilePath, *iacDirPath, *configFile, *policyPath, *output) } } diff --git a/pkg/cli/run.go b/pkg/cli/run.go index 963a7c22e..3e66cdf68 100644 --- a/pkg/cli/run.go +++ b/pkg/cli/run.go @@ -24,7 +24,8 @@ import ( ) // Run executes terrascan in CLI mode -func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath, configFile, policyPath string) { +func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath, configFile, + policyPath, format string) { // create a new runtime executor for processing IaC executor, err := runtime.NewExecutor(iacType, iacVersion, cloudType, iacFilePath, @@ -38,5 +39,5 @@ func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath, configFile, po if err != nil { return } - writer.Write("yaml", violations, os.Stdout) + writer.Write(format, violations, os.Stdout) } diff --git a/pkg/writer/writer.go b/pkg/writer/writer.go index 97bc799f8..a4ef7f964 100644 --- a/pkg/writer/writer.go +++ b/pkg/writer/writer.go @@ -28,9 +28,9 @@ var ( ) // Write method writes in the given format using the respective writer func -func Write(format supportedFormat, data interface{}, writer io.Writer) error { +func Write(format string, data interface{}, writer io.Writer) error { - writerFunc, present := writerMap[format] + writerFunc, present := writerMap[supportedFormat(format)] if !present { zap.S().Error("output format '%s' not supported", format) return errNotSupported From 62d625bf72817a6b42f60c2d654ef431a6b5d8bd Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Wed, 12 Aug 2020 21:51:08 +0530 Subject: [PATCH 134/188] add xml writer support --- pkg/writer/json.go | 4 +++- pkg/writer/register.go | 10 +++++++--- pkg/writer/writer.go | 3 ++- pkg/writer/xml.go | 40 ++++++++++++++++++++++++++++++++++++++++ pkg/writer/yaml.go | 3 ++- 5 files changed, 54 insertions(+), 6 deletions(-) create mode 100644 pkg/writer/xml.go diff --git a/pkg/writer/json.go b/pkg/writer/json.go index 6fbae649f..4df22391b 100644 --- a/pkg/writer/json.go +++ b/pkg/writer/json.go @@ -19,6 +19,8 @@ package writer import ( "encoding/json" "io" + + "github.com/accurics/terrascan/pkg/policy" ) const ( @@ -30,7 +32,7 @@ func init() { } // JSONWriter prints data in JSON format -func JSONWriter(data interface{}, writer io.Writer) error { +func JSONWriter(data policy.EngineOutput, writer io.Writer) error { j, _ := json.MarshalIndent(data, "", " ") writer.Write(j) writer.Write([]byte{'\n'}) diff --git a/pkg/writer/register.go b/pkg/writer/register.go index 3d7925de1..55fe7b173 100644 --- a/pkg/writer/register.go +++ b/pkg/writer/register.go @@ -16,15 +16,19 @@ package writer -import "io" +import ( + "io" + + "github.com/accurics/terrascan/pkg/policy" +) // supportedFormat data type for supported formats type supportedFormat string // writerMap stores mapping of supported writer formats with respective functions -var writerMap = make(map[supportedFormat](func(interface{}, io.Writer) error)) +var writerMap = make(map[supportedFormat](func(policy.EngineOutput, io.Writer) error)) // RegisterWriter registers a writer for terrascan -func RegisterWriter(format supportedFormat, writerFunc func(interface{}, io.Writer) error) { +func RegisterWriter(format supportedFormat, writerFunc func(policy.EngineOutput, io.Writer) error) { writerMap[format] = writerFunc } diff --git a/pkg/writer/writer.go b/pkg/writer/writer.go index a4ef7f964..3773fee7f 100644 --- a/pkg/writer/writer.go +++ b/pkg/writer/writer.go @@ -20,6 +20,7 @@ import ( "fmt" "io" + "github.com/accurics/terrascan/pkg/policy" "go.uber.org/zap" ) @@ -28,7 +29,7 @@ var ( ) // Write method writes in the given format using the respective writer func -func Write(format string, data interface{}, writer io.Writer) error { +func Write(format string, data policy.EngineOutput, writer io.Writer) error { writerFunc, present := writerMap[supportedFormat(format)] if !present { diff --git a/pkg/writer/xml.go b/pkg/writer/xml.go new file mode 100644 index 000000000..2d868f5a9 --- /dev/null +++ b/pkg/writer/xml.go @@ -0,0 +1,40 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package writer + +import ( + "encoding/xml" + "io" + + "github.com/accurics/terrascan/pkg/policy" +) + +const ( + xmlFormat supportedFormat = "xml" +) + +func init() { + RegisterWriter(xmlFormat, XMLWriter) +} + +// XMLWriter prints data in XML format +func XMLWriter(data policy.EngineOutput, writer io.Writer) error { + j, _ := xml.MarshalIndent(data, "", " ") + writer.Write(j) + writer.Write([]byte{'\n'}) + return nil +} diff --git a/pkg/writer/yaml.go b/pkg/writer/yaml.go index 2d7f8d7ca..77346109d 100644 --- a/pkg/writer/yaml.go +++ b/pkg/writer/yaml.go @@ -19,6 +19,7 @@ package writer import ( "io" + "github.com/accurics/terrascan/pkg/policy" "gopkg.in/yaml.v2" ) @@ -31,7 +32,7 @@ func init() { } // YAMLWriter prints data in YAML format -func YAMLWriter(data interface{}, writer io.Writer) error { +func YAMLWriter(data policy.EngineOutput, writer io.Writer) error { j, _ := yaml.Marshal(data) writer.Write(j) writer.Write([]byte{'\n'}) From 1c5bdc05d80cec9a6ec61932376626c72db3a17a Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Wed, 12 Aug 2020 16:29:23 -0400 Subject: [PATCH 135/188] adds favicons --- docs/img/terrascan_favicon_16px.ico | Bin 0 -> 1150 bytes docs/img/terrascan_favicon_32px-16px.ico | Bin 0 -> 5430 bytes docs/img/terrascan_favicon_32px.ico | Bin 0 -> 4286 bytes 3 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 docs/img/terrascan_favicon_16px.ico create mode 100644 docs/img/terrascan_favicon_32px-16px.ico create mode 100644 docs/img/terrascan_favicon_32px.ico diff --git a/docs/img/terrascan_favicon_16px.ico b/docs/img/terrascan_favicon_16px.ico new file mode 100644 index 0000000000000000000000000000000000000000..95916d084a94317d7246f8355325e4965c306804 GIT binary patch literal 1150 zcmaiz&ubG=5XWCoP%qMxXD^CJwaLby$u^Dduhhndl(apBv4y_rQJ?l-CJ~YF$?m?H@4T7!W`yw3udPkc|6!5c zE5soo#4&=FXvJHnhY(~tc-ljLH~g6$enI$%Bg7CzD(?b>chm zn?MV^#y0k0Um1g+#D3xt@r!8HHGiwphkaR7-+O@GxBsg>N3nYYFr(6!HLO2MY&B_B z^3#b}LoU2#1z@=$m{$~-5vgGmoZy(W%vGYISHDVUv7 z!`d?MZK&`Avfl{eQp!Lksqfa$`Yu1LDas*1b*NIycO-r0-p^p9I0S7X zfQ)TmbK@H}*T2H<(PaF1RKa;81j`}4Eo-RXue5)heTV$Z95mODOrMU8wJ%s-`wTmw zK|6CC_ikrVoO}$^q5RUce?<0&%wG$iz*ue=%Zs1TJr;&B5`=wC$HjiyyVs9+Ou_uz z2e{Lt&OvIN@&ZcJ4thpAD35?7F&xIE^8~9a71-k@%m+cPm(QHF zJNIYYRnY$|iSp7sRw_$){kDM88yD5(1yn1G7|5q#6iM4H_vSliE%(3x<+9DaVK_k~ zole}Jx`on=i&wJ+JbW>TloNyDl9uacU)JpQpVz}oy+g#MmV11s ze|ZRMalH9aX+~74?JV6X`!oE`Yiw(opZXxyVl>aMO83Kfv&?7p*Zlt6C&cO=A#y&C Ndc1!<>wc_Z?H|ABt3?0+ literal 0 HcmV?d00001 diff --git a/docs/img/terrascan_favicon_32px-16px.ico b/docs/img/terrascan_favicon_32px-16px.ico new file mode 100644 index 0000000000000000000000000000000000000000..a8118df0860d8ebb7dbfba7fc99901a51ea90bfd GIT binary patch literal 5430 zcmcgw`*T&*6+VcL)y`D^gEF0|(;qE)19_9&b50UMLIUI|5D91vf@Gp3K;wgl67wLY zjFCq`FhC;^36BgqO~7ADKfMr=zrg*>ia=n>rzu+2QN#U*YVOd=N$#f6qci4YU zTC7D(B7RQP5>xLFip;+7nF}rzJk&9%3=^C*oTIBcL_yiAJ4sK3;(vhnG=evkbO9nW%oeS=ad2Cc;8euJNijU) zRa}g}ft0DRQ)1wh#>0Iv33g$u_uKbLGHMdwKA#9D!|;grxOCg8F<82A9xBUo;WTsq zu>_PA*(faxg!~tOZYPN|o4xH5q@>ekz@2&H* zYvZx#g+d%Z+KadPk78?A8#;Hi;>58&oajG->ZWfN@>{gx3FV)czsgOA`zzuR*hI7j(L{e&5;=xf zIesgDRRlpfUj3x9MpEh9OrELe1-)xcdn4+{FT|jX$ofs?`3PPuD^^kUJ9xKc4NSJI z#z`K0d1F0&I`N2CeP#-gMyMZ|e{4Ks^%;*K`v0K)ip5>T&xqN?8e$vK6NKKirajS! z*2FJb;>1>bem;LpWD#2k<;FGQJ0etn-n-VcCmPX;N4&RfN08?ZJ`;&<;$Or_edGNW zk9Z|RvL@uX=!koX8sbZ0JU!#>ONL}g=J(p5ed0di1>z<#p6rS4OO|9x_NX>!osf;9 z`!6w3*%PIcOv#px2pja9P>m{1KcwI44<0;!BwIS9%h;f25J`Og5a%6=hwjQJ*c;2) z<79_tgLFujuh);ea@D8#{%1(m*mR;{-*$VZ!`(RzPNC_sKD~6Aa|qg@S{=W}Br~1R z9hETwTRA7VjfrrN(1s1>Zuj*|mvqYaI|=m*e{b}sbTgiAJ>~79KT9HFIyO%(W3YW1 z+}EeWDU0BdPU%*^jGS{uvxPH~yN-!&h-K^a@A+q-L_xk`xU4U;9_0>)r3W576=dsu6pPX8{6=;xX_ z=I77E!QXac<1_WJv+4WGoR1DLh7ZNVPL9E>>}hyxaXC?jSy|J}oHV8#yr(srnKPJs zD-We}4Y6$`pJt}oWs7n3>Q(d|dY%4qZshs79h}c7OZr~hwGEdq58?9A5O#NUA}@=3 zQ85g~!JHlA=PY7ObtWFIbn6cFf19WDuYKxqT)TD+`(E7vyMp#B_RL|$`&U(^7#g~S z>o;!T265@)MKhz0b{1|rXD80S?piN*Ox}j{`}cR0{#7dVe^mU%tC{;U zeLVTj+n7CP2Ao|yzt@;Q>?3)ZiRjw;8(g5?3t#;W+h1-+?#zhXHo4n>^bK6Sa)oF4 zu$lYhgQ>xg|I)pN_%8emlz(&FRDAHqljeMDs>=~*U<}kpHYS>U%*#kbb!9QCD~gcI zck_MW<+X{!!jgQP`{Hx_<s=^ zLv_q^a4O;uaAFX!^<6`mPu`k=%~^K6(t(@g-MecC+{MA#<+o2*-&D=}^IBB@Q4Gq9 zTzqo&4E{MhjHY$JhEqg69l;)?d|)3`Z&zI^Hcnjv0?AWN?kYd!|B|X=*v+1PXKBRQ z`~7Fk{u@Z2iq&kY*Z~D=DZ~xxZr0f_xe-3BQe1MNWJdJao{}or5=hv@a$C=Zo82{zS zS;+V>HtHKDH*{}?`pn4tS2je>AG%*_d2Tvq_C9-cJc`QF@k~Q44j<^j$7epod7kh2 zb6?=okI&-x;a>LY2Fx$df?da2*-yU}_tnf>eJ6!>T$+{J;>aLUDPG0FKnV6n)UQ|6rEkjXW1cHk}28YDx==DPsm1{MdUmx znaWMsqQBYbn_A%gY}T|FsaLusL$b#7d!!Mhl?~&6e@R!gK0ofNvD^Os@@)y)AUpo= z-=Dq>dWP9?7vKGPO?Q^=F8}`4yVm@EE?UDIE^D-R3`_VGWjr&lcNzCp{n gd%&_L3yu8qfneR{Kc83$g7=3`mL=*9se>MK{uiaagE>LFPqh+xROBX1GvSN&nr7SKaCSr(@C{RHXjY3<5E)uI<6HO$?bKcBj=5?k_n-JTazBl*Y?>paizk8>K zF^a!0ykO|B(eSKcJZBijO9Y3E6hhxid_QsNE~i6_#$bV!$U4q|hE)I2ecc!wAuBIzFPymU#Ybh}CY zpEOR$My>mY7_RQ&u9Hsb7Kh6QeI`Z`iqqrt8)h>75Pbefw>ZRQ+n}c(iF`hed^=n) zJ>*>-4>Lb5*amTk%i(q6u38PqKI%Q-&(Q0&CMOowiSe)wC&7FrHrNJn*?I8Wp}iV; zk4a}LVNqAwFl?P11FI?l)~B>#TSBmmxWp;npCHsPhW15s;$}Rpa_Xz2KTBLO4KFL1 z494JK60DQsVdlAH#3^p|OZBYqI-K!za&IB`dHV7(`4_l!4J|u~dS8$C$!%f`9DP1c zajR#mZwAltJsQ)}$f0rhQA!>hA`jEklOUNP?U+LO z_bIO&kgo^vjd$S9r&nr zA2QQ97ZSrj9PHdNe&#~PR5|gj6}Qe%=d=A3|HgGI(c9aLV@D3bTR{62d)Bbx{c7<% zbab?%ufHGtM0;BsmKMzpn+q#77T!tGur~T?$FAD|zjJPx&(thpR%#ivEyZ@wP&+Z-Q9~eL*_d&)i`nQid zwEvk0vmh28Ga4Q*d6^&c4fc8ezLYPuAJ$-ivJG{IU@i9V-FGP$=$p>|3)_DbgZyj@ zKVEObpIu$pvE@ye+2pD5_aM~+`=ENe_N8KDu8oIhVw7FGs!#d9q&NrOoj!hZsVn#D zdEy?j|9VoRuwm^Abg&*UHOjjUxjH! zj1Oa@zG2sf&dord8GQcA2KW6#byCK>DbMVi-VJfc&P&DSiZYx#bpkhViGi*sk#t1B=kKONq++$+cFx8lBmb<3U^#O=C&=n}*^L3bke{Yy44qYX!>OZCMb z;HjkU;&@~iq@iHJG#j}Elj$#OU_I;ekbjRTZmSpr-T{K*9&-P3E`oj=bSKdrQFle3 z&B_^jj>WT6xv=-2VtS1FX^&!THjq=ksz1nY-8FR1I68b!>6C8!-sA8FKRKs3=#H#) zfqSTIP%LzZTFH2@2ifOgEx82sg=*TNyB_ac_8t$^n(UA+>6GqZ%8+->6S7gcaNkFz zQ?)5ucptIPa-HWqpY1Wtxp~Dc9nv+V--C_dTG=p~c=Y!#afR|Zan~Lj@%Jys7QYR$ zDZuWkR4u>b%7 literal 0 HcmV?d00001 From 0264e8db4f2ff1fd5dbaf3322b2328ce13a7a5e1 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Thu, 13 Aug 2020 00:29:52 -0400 Subject: [PATCH 136/188] updates font --- mkdocs.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mkdocs.yml b/mkdocs.yml index bbcdb2d61..7532dc7f8 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -18,6 +18,8 @@ theme: palette: primary: green language: en + font: + text: Montserrat # Social Icons extra: From acc72ca27239daf8b412c26221b8ab216a48515a Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Thu, 13 Aug 2020 00:40:31 -0400 Subject: [PATCH 137/188] adds missing descriptions --- docs/policies.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/policies.md b/docs/policies.md index 25c266e00..ee21d817e 100644 --- a/docs/policies.md +++ b/docs/policies.md @@ -28,9 +28,9 @@ Here's an example of the contents of a rule file: | ------------------- | --------------------------------------------- | | ruleName | Short name for the rule | | rule | File name of the rego policy | -| ruleTemplate | FIXME | -| ruleArgument | FIXME | -| ruleArgument.prefix | FIXME | +| ruleTemplate | Rego policy template Used for the rule | +| ruleArgument | Argument passed to the template | +| ruleArgument.prefix | Used for making rego policies unique | | severity | Likelihood x impact of issue | | description | Description of the issue found with this rule | | ruleReferenceId | Unique ID of the rule in the format `...` | From 23f4bfe5d036431869839bb3bcd23435c595a23a Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Wed, 12 Aug 2020 23:12:52 -0700 Subject: [PATCH 138/188] =?UTF-8?q?added=20line=20number=20and=20file=20na?= =?UTF-8?q?me=20output=20support=20policy=20changes=E2=80=94removed=20poli?= =?UTF-8?q?cies=20with=20errors=20(will=20need=20to=20fix).=20these=20erro?= =?UTF-8?q?rs=20include=20things=20like=20missing=20reference=20IDs=20and?= =?UTF-8?q?=20invalid=20severity=20strings=20record=20time=20duration=20of?= =?UTF-8?q?=20the=20scan=20fixed=20Engine.GetResults=20and=20updated=20the?= =?UTF-8?q?=20interface=20reduced=20the=20scope=20of=20the=20members=20of?= =?UTF-8?q?=20the=20Engine=20type=20misc=20code=20cleanup?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- go.mod | 2 +- go.sum | 2 + ....EncryptionandKeyManagement.High.0407.json | 7 +- ....EncryptionandKeyManagement.High.0408.json | 7 +- .../AWS.CloudFront.Logging.Medium.0567.json | 7 +- .../AWS.CloudTrail.Logging.High.0399.json | 7 +- .../AWS.CloudTrail.Logging.Low.0559.json | 9 +- .../AWS.CloudTrail.Logging.Medium.0460.json | 7 +- .../opa/rego/aws/aws_db_instance/.json | 13 -- .../rdsPubliclyAccessible.rego | 9 - .../AWS.IamUser.IAM.High.0390.json | 7 +- .../AWS.Iam.IAM.Low.0540.json | 7 +- .../AWS.Iam.IAM.Medium.0454.json | 7 +- .../AWS.Iam.IAM.Medium.0455.json | 7 +- .../AWS.Iam.IAM.Medium.0456.json | 7 +- .../AWS.Iam.IAM.Medium.0457.json | 7 +- .../AWS.Iam.IAM.Medium.0458.json | 7 +- .../AWS.Iam.IAM.Medium.0495.json | 7 +- .../AWS.IamPolicy.IAM.High.0392.json | 7 +- .../AWS.IamPolicy.IAM.High.0392.json | 7 +- .../AWS.IamUser.IAM.High.0387.json | 9 +- .../AWS.IamUser.IAM.High.0388.json | 9 +- ....Instance.NetworkSecurity.Medium.0506.json | 7 +- ....EncryptionandKeyManagement.High.0412.json | 7 +- .../AWS.KMS.Logging.High.0400.json | 7 +- ...hConfiguration.DataSecurity.High.0102.json | 7 +- ....EncryptionandKeyManagement.High.0405.json | 13 -- .../AWS.S3Bucket.IAM.High.0370.json | 13 -- .../AWS.S3Bucket.IAM.High.0377.json | 15 -- .../AWS.S3Bucket.IAM.High.0378.json | 15 -- .../AWS.S3Bucket.IAM.High.0379.json | 15 -- .../AWS.S3Bucket.IAM.High.0381.json | 15 -- ...WS.S3Bucket.NetworkSecurity.High.0417.json | 13 -- .../aws/aws_s3_bucket/noS3BucketSseRules.rego | 9 - .../rego/aws/aws_s3_bucket/s3AclGrants.rego | 8 - .../s3BucketNoWebsiteIndexDoc.rego | 8 - .../aws_s3_bucket/s3VersioningMfaFalse.rego | 10 - .../AWS.IamPolicy.IAM.High.0374.json | 7 +- .../AWS.S3Bucket.IAM.High.0371.json | 7 +- .../AWS.S3Bucket.IAM.High.0372.json | 7 +- ...curityGroup.NetworkSecurity.High.0094.json | 7 +- .../aws_vpc/AWS.VPC.Logging.Medium.0470.json | 7 +- .../aws_vpc/AWS.VPC.Logging.Medium.0471.json | 7 +- pkg/policy/interface.go | 13 +- pkg/policy/opa/engine.go | 173 +++++++++++------- pkg/policy/opa/types.go | 27 +-- pkg/results/types.go | 20 +- pkg/utils/resource.go | 35 ++++ 48 files changed, 262 insertions(+), 368 deletions(-) delete mode 100755 pkg/policies/opa/rego/aws/aws_db_instance/.json delete mode 100755 pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego create mode 100644 pkg/utils/resource.go diff --git a/go.mod b/go.mod index dffe6982b..ea53117c6 100644 --- a/go.mod +++ b/go.mod @@ -15,7 +15,7 @@ require ( github.com/zclconf/go-cty v1.2.1 go.uber.org/zap v1.9.1 golang.org/x/net v0.0.0-20200625001655-4c5254603344 // indirect - golang.org/x/tools v0.0.0-20200811215021-48a8ffc5b207 // indirect + golang.org/x/tools v0.0.0-20200812231640-9176cd30088c // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect gopkg.in/yaml.v2 v2.3.0 honnef.co/go/tools v0.0.1-2020.1.5 // indirect diff --git a/go.sum b/go.sum index 8aee93936..b2eeab546 100644 --- a/go.sum +++ b/go.sum @@ -447,6 +447,8 @@ golang.org/x/tools v0.0.0-20200809012840-6f4f008689da h1:ml5G98G4/tdKT1XNq+ky5iS golang.org/x/tools v0.0.0-20200809012840-6f4f008689da/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200811215021-48a8ffc5b207 h1:8Kg+JssU1jBZs8GIrL5pl4nVyaqyyhdmHAR4D1zGErg= golang.org/x/tools v0.0.0-20200811215021-48a8ffc5b207/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200812231640-9176cd30088c h1:ZSTOUQugXA1i88foZV5ck1FrcnEYhGmlpiPXgDWmhG0= +golang.org/x/tools v0.0.0-20200812231640-9176cd30088c/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json index e2f3a8a84..87a931b83 100755 --- a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json @@ -1,13 +1,12 @@ { - "ruleName": "cloudfrontNoHTTPSTraffic", + "name": "cloudfrontNoHTTPSTraffic", "file": "cloudfrontNoHTTPSTraffic.rego", - "ruleTemplate": "cloudfrontNoHTTPSTraffic", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "Use encrypted connection between CloudFront and origin server", - "ruleReferenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0407", + "referenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0407", "category": "Encryption and Key Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json index 3ee435f99..417d50dcd 100755 --- a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json @@ -1,13 +1,12 @@ { - "ruleName": "cloudfrontNoSecureCiphers", + "name": "cloudfrontNoSecureCiphers", "file": "cloudfrontNoSecureCiphers.rego", - "ruleTemplate": "cloudfrontNoSecureCiphers", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "Secure ciphers are not used in CloudFront distribution", - "ruleReferenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0408", + "referenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0408", "category": "Encryption and Key Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json index a20956e92..2d26be5a4 100755 --- a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json @@ -1,13 +1,12 @@ { - "ruleName": "cloudfrontNoLogging", + "name": "cloudfrontNoLogging", "file": "cloudfrontNoLogging.rego", - "ruleTemplate": "cloudfrontNoLogging", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "MEDIUM", "description": "Ensure that your AWS Cloudfront distributions have the Logging feature enabled in order to track all viewer requests for the content delivered through the Content Delivery Network (CDN).", - "ruleReferenceId": "AWS.CloudFront.Logging.Medium.0567", + "referenceId": "AWS.CloudFront.Logging.Medium.0567", "category": "Logging", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.High.0399.json b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.High.0399.json index 8c070cbad..ba033e951 100755 --- a/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.High.0399.json +++ b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.High.0399.json @@ -1,13 +1,12 @@ { - "ruleName": "cloudTrailLogNotEncrypted", + "name": "cloudTrailLogNotEncrypted", "file": "cloudTrailLogNotEncrypted.rego", - "ruleTemplate": "cloudTrailLogNotEncrypted", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "Cloud Trail Log Not Enabled", - "ruleReferenceId": "AWS.CloudTrail.Logging.High.0399", + "referenceId": "AWS.CloudTrail.Logging.High.0399", "category": "Logging", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Low.0559.json b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Low.0559.json index 9fcf02b5f..8ff0317d1 100755 --- a/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Low.0559.json +++ b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Low.0559.json @@ -1,13 +1,12 @@ { - "ruleName": "reme_enableSNSTopic", + "name": "reme_enableSNSTopic", "file": "enableSNSTopic.rego", - "ruleTemplate": "enableSNSTopic", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "reme_" }, "severity": "MEDIUM", "description": "Ensure appropriate subscribers to each SNS topic", - "ruleReferenceId": "AWS.CloudTrail.Logging.Low.0559", + "referenceId": "AWS.CloudTrail.Logging.Low.0559", "category": "Logging", - "version": 0 + "version": 1 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Medium.0460.json b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Medium.0460.json index ec9b6b080..52e43d7ca 100755 --- a/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Medium.0460.json +++ b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Medium.0460.json @@ -1,13 +1,12 @@ { - "ruleName": "cloudTrailMultiRegionNotCreated", + "name": "cloudTrailMultiRegionNotCreated", "file": "cloudTrailMultiRegionNotCreated.rego", - "ruleTemplate": "cloudTrailMultiRegionNotCreated", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "MEDIUM", "description": "Cloud Trail Multi Region not enabled", - "ruleReferenceId": "AWS.CloudTrail.Logging.Medium.0460", + "referenceId": "AWS.CloudTrail.Logging.Medium.0460", "category": "Logging", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_instance/.json b/pkg/policies/opa/rego/aws/aws_db_instance/.json deleted file mode 100755 index 06c786984..000000000 --- a/pkg/policies/opa/rego/aws/aws_db_instance/.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "ruleName": "rdsPubliclyAccessible", - "file": "rdsPubliclyAccessible.rego", - "ruleTemplate": "rdsPubliclyAccessible", - "ruleTemplateArgs": { - "prefix": "" - }, - "severity": "HIGH", - "description": "RDS Instance publicly_accessible flag is true", - "ruleReferenceId": "", - "category": "Data Security", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego b/pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego deleted file mode 100755 index 601e8c85e..000000000 --- a/pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego +++ /dev/null @@ -1,9 +0,0 @@ -package accurics - -{{.prefix}}rdsPubliclyAccessible[retVal] { - db := input.aws_db_instance[_] - db.config.publicly_accessible == true - traverse = "publicly_accessible" - retVal := { "Id": db.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "publicly_accessible", "AttributeDataType": "bool", "Expected": false, "Actual": db.config.publicly_accessible } -} - diff --git a/pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0390.json b/pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0390.json index 2e0b4d321..8846f55b0 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0390.json +++ b/pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0390.json @@ -1,13 +1,12 @@ { - "ruleName": "noAccessKeyForRootAccount", + "name": "noAccessKeyForRootAccount", "file": "noAccessKeyForRootAccount.rego", - "ruleTemplate": "noAccessKeyForRootAccount", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "The root account is the most privileged user in an AWS account. AWS Access Keys provide programmatic access to a given AWS account. It is recommended that all access keys associated with the root account be removed. Removing access keys associated with the root account limits vectors by which the account can be compromised. Additionally, removing the root access keys encourages the creation and use of role based accounts that are least privileged.", - "ruleReferenceId": "AWS.IamUser.IAM.High.0390", + "referenceId": "AWS.IamUser.IAM.High.0390", "category": "Identity and Access Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0540.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0540.json index 40b2ad60c..b9df1d00a 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0540.json +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0540.json @@ -1,14 +1,13 @@ { - "ruleName": "passwordRotateEvery90Days", + "name": "passwordRotateEvery90Days", "file": "passwordRotateEvery90Days.rego", - "ruleTemplate": "passwordRotateEvery90Days", - "ruleTemplateArgs": { + "templateArgs": { "name": "passwordRotateEvery90Days", "prefix": "" }, "severity": "LOW", "description": "Reducing the password lifetime increases account resiliency against brute force login attempts", - "ruleReferenceId": "AWS.Iam.IAM.Low.0540", + "referenceId": "AWS.Iam.IAM.Low.0540", "category": "IAM", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0454.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0454.json index 0be97abb7..3fc67b062 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0454.json +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0454.json @@ -1,15 +1,14 @@ { - "ruleName": "passwordRequireLowerCase", + "name": "passwordRequireLowerCase", "file": "passwordPolicyRequirement.rego", - "ruleTemplate": "passwordRequireLowerCase", - "ruleTemplateArgs": { + "templateArgs": { "name": "passwordRequireLowerCase", "prefix": "", "required_parameter": "require_lowercase_characters" }, "severity": "MEDIUM", "description": "Lower case alphabet not present in the Password, Password Complexity is not high. Increased Password complexity increases resiliency against brute force attack", - "ruleReferenceId": "AWS.Iam.IAM.Medium.0454", + "referenceId": "AWS.Iam.IAM.Medium.0454", "category": "IAM", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0455.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0455.json index 30595871b..e35773b24 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0455.json +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0455.json @@ -1,15 +1,14 @@ { - "ruleName": "passwordRequireNumber", + "name": "passwordRequireNumber", "file": "passwordPolicyRequirement.rego", - "ruleTemplate": "passwordRequireNumber", - "ruleTemplateArgs": { + "templateArgs": { "name": "passwordRequireNumber", "prefix": "", "required_parameter": "require_numbers" }, "severity": "MEDIUM", "description": "Number not present in the Password, Password Complexity is not high. Increased Password complexity increases resiliency against brute force attack", - "ruleReferenceId": "AWS.Iam.IAM.Medium.0455", + "referenceId": "AWS.Iam.IAM.Medium.0455", "category": "IAM", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0456.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0456.json index a4e46cc3b..a3fa77f15 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0456.json +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0456.json @@ -1,15 +1,14 @@ { - "ruleName": "passwordRequireSymbol", + "name": "passwordRequireSymbol", "file": "passwordPolicyRequirement.rego", - "ruleTemplate": "passwordRequireSymbol", - "ruleTemplateArgs": { + "templateArgs": { "name": "passwordRequireSymbol", "prefix": "", "required_parameter": "require_symbols" }, "severity": "MEDIUM", "description": "Special symbols not present in the Password, Password Complexity is not high. Increased Password complexity increases resiliency against brute force attack", - "ruleReferenceId": "AWS.Iam.IAM.Medium.0456", + "referenceId": "AWS.Iam.IAM.Medium.0456", "category": "IAM", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0457.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0457.json index dae9c9fb6..a05fa6b48 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0457.json +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0457.json @@ -1,15 +1,14 @@ { - "ruleName": "passwordRequireUpperCase", + "name": "passwordRequireUpperCase", "file": "passwordPolicyRequirement.rego", - "ruleTemplate": "passwordRequireUpperCase", - "ruleTemplateArgs": { + "templateArgs": { "name": "passwordRequireUpperCase", "prefix": "", "required_parameter": "require_uppercase_characters" }, "severity": "MEDIUM", "description": "Upper case alphabet not present in the Password, Password Complexity is not high. Increased Password complexity increases resiliency against brute force attack", - "ruleReferenceId": "AWS.Iam.IAM.Medium.0457", + "referenceId": "AWS.Iam.IAM.Medium.0457", "category": "IAM", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0458.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0458.json index 46c09c988..f7099f01a 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0458.json +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0458.json @@ -1,8 +1,7 @@ { - "ruleName": "passwordRequireMinLength14", + "name": "passwordRequireMinLength14", "file": "passwordMinLength.rego", - "ruleTemplate": "passwordRequireMinLength14", - "ruleTemplateArgs": { + "templateArgs": { "name": "passwordRequireMinLength14", "parameter": "minimum_password_length", "prefix": "", @@ -10,7 +9,7 @@ }, "severity": "MEDIUM", "description": "Setting a lengthy password increases account resiliency against brute force login attempts", - "ruleReferenceId": "AWS.Iam.IAM.Medium.0458", + "referenceId": "AWS.Iam.IAM.Medium.0458", "category": "IAM", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0495.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0495.json index b5aa69d69..6247989fd 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0495.json +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0495.json @@ -1,8 +1,7 @@ { - "ruleName": "passwordRequireMinLength", + "name": "passwordRequireMinLength", "file": "passwordMinLength.rego", - "ruleTemplate": "passwordRequireMinLength", - "ruleTemplateArgs": { + "templateArgs": { "name": "passwordRequireMinLength", "parameter": "minimum_password_length", "prefix": "", @@ -10,7 +9,7 @@ }, "severity": "MEDIUM", "description": "Setting a lengthy password increases account resiliency against brute force login attempts", - "ruleReferenceId": "AWS.Iam.IAM.Medium.0495", + "referenceId": "AWS.Iam.IAM.Medium.0495", "category": "IAM", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_group_policy/AWS.IamPolicy.IAM.High.0392.json b/pkg/policies/opa/rego/aws/aws_iam_group_policy/AWS.IamPolicy.IAM.High.0392.json index ece948f53..04116e916 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_group_policy/AWS.IamPolicy.IAM.High.0392.json +++ b/pkg/policies/opa/rego/aws/aws_iam_group_policy/AWS.IamPolicy.IAM.High.0392.json @@ -1,13 +1,12 @@ { - "ruleName": "iamGrpPolicyWithFullAdminCntrl", + "name": "iamGrpPolicyWithFullAdminCntrl", "file": "iamGrpPolicyWithFullAdminCntrl.rego", - "ruleTemplate": "iamGrpPolicyWithFullAdminCntrl", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "It is recommended and considered a standard security advice to grant least privileges that is, granting only the permissions required to perform a task. IAM policies are the means by which privileges are granted to users, groups, or roles. Determine what users need to do and then craft policies for them that let the users perform only those tasks, instead of granting full administrative privileges.", - "ruleReferenceId": "AWS.IamPolicy.IAM.High.0392", + "referenceId": "AWS.IamPolicy.IAM.High.0392", "category": "Identity and Access Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_policy/AWS.IamPolicy.IAM.High.0392.json b/pkg/policies/opa/rego/aws/aws_iam_policy/AWS.IamPolicy.IAM.High.0392.json index d0f99213c..1680636e0 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_policy/AWS.IamPolicy.IAM.High.0392.json +++ b/pkg/policies/opa/rego/aws/aws_iam_policy/AWS.IamPolicy.IAM.High.0392.json @@ -1,13 +1,12 @@ { - "ruleName": "reme_iamPolicyWithFullAdminControl", + "name": "reme_iamPolicyWithFullAdminControl", "file": "iamPolicyWithFullAdminControl.rego", - "ruleTemplate": "iamPolicyWithFullAdminControl", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "reme_" }, "severity": "HIGH", "description": "It is recommended and considered a standard security advice to grant least privileges that is, granting only the permissions required to perform a task. IAM policies are the means by which privileges are granted to users, groups, or roles. Determine what users need to do and then craft policies for them that let the users perform only those tasks, instead of granting full administrative privileges.", - "ruleReferenceId": "AWS.IamPolicy.IAM.High.0392", + "referenceId": "AWS.IamPolicy.IAM.High.0392", "category": "Identity and Access Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0387.json b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0387.json index 706366e9f..cdee7d311 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0387.json +++ b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0387.json @@ -1,13 +1,12 @@ { - "ruleName": "rootUserNotContainMfaTypeHardware", + "name": "rootUserNotContainMfaTypeHardware", "file": "rootUserNotContainMfaTypeHardware.rego", - "ruleTemplate": "rootUserNotContainMfaTypeHardware", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "Ensure Hardware MFA device is enabled for the \"root\" account", - "ruleReferenceId": "AWS.IamUser.IAM.High.0387", + "referenceId": "AWS.IamUser.IAM.High.0387", "category": "Identity and Access Management", - "version": 0 + "version": 1 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0388.json b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0388.json index 31623a9b5..4a6c74079 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0388.json +++ b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0388.json @@ -1,13 +1,12 @@ { - "ruleName": "rootUserNotContainMfaTypeVirtual", + "name": "rootUserNotContainMfaTypeVirtual", "file": "rootUserNotContainMfaTypeVirtual.rego", - "ruleTemplate": "rootUserNotContainMfaTypeVirtual", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "Ensure Virtual MFA device is enabled for the \"root\" account", - "ruleReferenceId": "AWS.IamUser.IAM.High.0388", + "referenceId": "AWS.IamUser.IAM.High.0388", "category": "Identity and Access Management", - "version": 0 + "version": 1 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_instance/AWS.Instance.NetworkSecurity.Medium.0506.json b/pkg/policies/opa/rego/aws/aws_instance/AWS.Instance.NetworkSecurity.Medium.0506.json index 9b05b3442..c38981ff7 100755 --- a/pkg/policies/opa/rego/aws/aws_instance/AWS.Instance.NetworkSecurity.Medium.0506.json +++ b/pkg/policies/opa/rego/aws/aws_instance/AWS.Instance.NetworkSecurity.Medium.0506.json @@ -1,13 +1,12 @@ { - "ruleName": "instanceWithNoVpc", + "name": "instanceWithNoVpc", "file": "instanceWithNoVpc.rego", - "ruleTemplate": "instanceWithNoVpc", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "MEDIUM", "description": "Instance should be configured in vpc. AWS VPCs provides the controls to facilitate a formal process for approving and testing all network connections and changes to the firewall and router configurations.", - "ruleReferenceId": "AWS.Instance.NetworkSecurity.Medium.0506", + "referenceId": "AWS.Instance.NetworkSecurity.Medium.0506", "category": "Network Security", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_kinesis_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0412.json b/pkg/policies/opa/rego/aws/aws_kinesis_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0412.json index 2d4242f06..05a0a7ba5 100755 --- a/pkg/policies/opa/rego/aws/aws_kinesis_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0412.json +++ b/pkg/policies/opa/rego/aws/aws_kinesis_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0412.json @@ -1,13 +1,12 @@ { - "ruleName": "kinesisNotEncryptedWithKms", + "name": "kinesisNotEncryptedWithKms", "file": "aws_kinesis_stream.rego", - "ruleTemplate": "kinesisNotEncryptedWithKms", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "Kinesis Streams and metadata are not protected", - "ruleReferenceId": "AWS.Kinesis.EncryptionandKeyManagement.High.0412", + "referenceId": "AWS.Kinesis.EncryptionandKeyManagement.High.0412", "category": "Encryption and Key Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json b/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json index b4074468f..8a7440114 100755 --- a/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json +++ b/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json @@ -1,13 +1,12 @@ { - "ruleName": "kmsKeyRotationDisabled", + "name": "kmsKeyRotationDisabled", "file": "kmsKeyRotationDisabled.rego", - "ruleTemplate": "kmsKeyRotationDisabled", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "Ensure rotation for customer created CMKs is enabled", - "ruleReferenceId": "AWS.KMS.Logging.High.0400", + "referenceId": "AWS.KMS.Logging.High.0400", "category": "Logging", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0102.json b/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0102.json index d5c4b6dd5..c72437096 100755 --- a/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0102.json +++ b/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0102.json @@ -1,13 +1,12 @@ { - "ruleName": "hardCodedKey", + "name": "hardCodedKey", "file": "hardCodedKey.rego", - "ruleTemplate": "hardCodedKey", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "Avoid using base64 encoded private keys as part of config", - "ruleReferenceId": "AWS.LaunchConfiguration.DataSecurity.High.0102", + "referenceId": "AWS.LaunchConfiguration.DataSecurity.High.0102", "category": "Data Security", "version": 1 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json deleted file mode 100755 index a758e0b44..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "ruleName": "noS3BucketSseRules", - "file": "noS3BucketSseRules.rego", - "ruleTemplate": "noS3BucketSseRules", - "ruleTemplateArgs": { - "prefix": "" - }, - "severity": "HIGH", - "description": "Ensure that S3 Buckets have server side encryption at rest enabled to protect sensitive data.", - "ruleReferenceId": "AWS.S3Bucket.EncryptionandKeyManagement.High.0405", - "category": "Encryption and Key Management", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json deleted file mode 100755 index 1a38cf231..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "ruleName": "s3VersioningMfaFalse", - "file": "s3VersioningMfaFalse.rego", - "ruleTemplate": "s3VersioningMfaFalse", - "ruleTemplateArgs": { - "prefix": "" - }, - "severity": "HIGH", - "description": "Enabling MFA delete for versioning is a good way to add extra protection to sensitive files stored in buckets.aws s3api put-bucket-versioning --bucket bucketname --versioning-configuration Status=Enabled,MFADelete=Enabled --mfa your-mfa-serial-number mfa-code", - "ruleReferenceId": "AWS.S3Bucket.IAM.High.0370", - "category": "IAM", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json deleted file mode 100755 index 4a008b21e..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "ruleName": "allUsersReadAccess", - "file": "s3AclGrants.rego", - "ruleTemplate": "allUsersReadAccess", - "ruleTemplateArgs": { - "access": "public-read", - "name": "allUsersReadAccess", - "prefix": "" - }, - "severity": "HIGH", - "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", - "ruleReferenceId": "AWS.S3Bucket.IAM.High.0377", - "category": "IAM", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json deleted file mode 100755 index b9b8584ed..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "ruleName": "authUsersReadAccess", - "file": "s3AclGrants.rego", - "ruleTemplate": "authUsersReadAccess", - "ruleTemplateArgs": { - "access": "authenticated-read", - "name": "authUsersReadAccess", - "prefix": "" - }, - "severity": "HIGH", - "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", - "ruleReferenceId": "AWS.S3Bucket.IAM.High.0378", - "category": "IAM", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json deleted file mode 100755 index a8286931b..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "ruleName": "allUsersWriteAccess", - "file": "s3AclGrants.rego", - "ruleTemplate": "allUsersWriteAccess", - "ruleTemplateArgs": { - "access": "public-read-write", - "name": "allUsersWriteAccess", - "prefix": "" - }, - "severity": "HIGH", - "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", - "ruleReferenceId": "AWS.S3Bucket.IAM.High.0379", - "category": "IAM", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json deleted file mode 100755 index e413dd20e..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "ruleName": "allUsersReadWriteAccess", - "file": "s3AclGrants.rego", - "ruleTemplate": "allUsersReadWriteAccess", - "ruleTemplateArgs": { - "access": "public-read-write", - "name": "allUsersReadWriteAccess", - "prefix": "" - }, - "severity": "HIGH", - "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", - "ruleReferenceId": "AWS.S3Bucket.IAM.High.0381", - "category": "IAM", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json deleted file mode 100755 index 1bc2de912..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "ruleName": "s3BucketNoWebsiteIndexDoc", - "file": "s3BucketNoWebsiteIndexDoc.rego", - "ruleTemplate": "s3BucketNoWebsiteIndexDoc", - "ruleTemplateArgs": { - "prefix": "" - }, - "severity": "HIGH", - "description": "Ensure that there are not any static websites being hosted on buckets you aren't aware of", - "ruleReferenceId": "AWS.S3Bucket.NetworkSecurity.High.0417", - "category": "Network Security", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego deleted file mode 100755 index 2661fa0a9..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego +++ /dev/null @@ -1,9 +0,0 @@ -package accurics - -{{.prefix}}noS3BucketSseRules[retVal] { - bucket := input.aws_s3_bucket[_] - bucket.config.server_side_encryption_configuration == [] - rc = "ewogICJzZXJ2ZXJfc2lkZV9lbmNyeXB0aW9uX2NvbmZpZ3VyYXRpb24iOiB7CiAgICAicnVsZSI6IHsKICAgICAgImFwcGx5X3NlcnZlcl9zaWRlX2VuY3J5cHRpb25fYnlfZGVmYXVsdCI6IHsKICAgICAgICAic3NlX2FsZ29yaXRobSI6ICJBRVMyNTYiCiAgICAgIH0KICAgIH0KICB9Cn0=" - traverse = "" - retVal := { "Id": bucket.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "server_side_encryption_configuration", "AttributeDataType": "base64", "Expected": rc, "Actual": null } -} diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego deleted file mode 100755 index fc83f4a0f..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego +++ /dev/null @@ -1,8 +0,0 @@ -package accurics - -{{.prefix}}{{.name}}[retVal] { - bucket := input.aws_s3_bucket[_] - bucket.config.acl == "{{.access}}" - traverse = "acl" - retVal := { "Id": bucket.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "acl", "AttributeDataType": "string", "Expected": "private", "Actual": bucket.config.acl } -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego deleted file mode 100755 index 7ee714f1e..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego +++ /dev/null @@ -1,8 +0,0 @@ -package accurics - -{{.prefix}}s3BucketNoWebsiteIndexDoc[retVal] { - bucket := input.aws_s3_bucket[_] - count(bucket.config.website) > 0 - traverse = "website" - retVal := { "Id": bucket.id, "ReplaceType": "delete", "CodeType": "block", "Traverse": traverse, "Attribute": "website", "AttributeDataType": "block", "Expected": null, "Actual": null } -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego deleted file mode 100755 index d2c28b5b5..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego +++ /dev/null @@ -1,10 +0,0 @@ -package accurics - -{{.prefix}}s3VersioningMfaFalse[retVal] { - bucket := input.aws_s3_bucket[_] - some i - mfa := bucket.config.versioning[i] - mfa.mfa_delete == false - traverse := sprintf("versioning[%d].mfa_delete", [i]) - retVal := { "Id": bucket.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "versioning.mfa_delete", "AttributeDataType": "bool", "Expected": true, "Actual": mfa.mfa_delete } -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0374.json b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0374.json index 2f36ad689..497176f1a 100755 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0374.json +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0374.json @@ -1,15 +1,14 @@ { - "ruleName": "allowListActionFromAllPrncpls", + "name": "allowListActionFromAllPrncpls", "file": "actionsFromAllPrincipals.rego", - "ruleTemplate": "allowListActionFromAllPrncpls", - "ruleTemplateArgs": { + "templateArgs": { "Action": "s3:List", "name": "allowListActionFromAllPrncpls", "prefix": "" }, "severity": "HIGH", "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", - "ruleReferenceId": "AWS.IamPolicy.IAM.High.0374", + "referenceId": "AWS.IamPolicy.IAM.High.0374", "category": "Identity and Access Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0371.json b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0371.json index e6cf3edc3..23beecd9f 100755 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0371.json +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0371.json @@ -1,13 +1,12 @@ { - "ruleName": "allowActionsFromAllPrincipals", + "name": "allowActionsFromAllPrincipals", "file": "allowActionsFromAllPrincipals.rego", - "ruleTemplate": "allowActionsFromAllPrincipals", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", - "ruleReferenceId": "AWS.S3Bucket.IAM.High.0371", + "referenceId": "AWS.S3Bucket.IAM.High.0371", "category": "Identity and Access Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0372.json b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0372.json index bf5b87e94..d56acd755 100755 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0372.json +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0372.json @@ -1,15 +1,14 @@ { - "ruleName": "allowDeleteActionFromAllPrncpls", + "name": "allowDeleteActionFromAllPrncpls", "file": "actionsFromAllPrincipals.rego", - "ruleTemplate": "allowDeleteActionFromAllPrncpls", - "ruleTemplateArgs": { + "templateArgs": { "Action": "s3:Delete", "name": "allowDeleteActionFromAllPrncpls", "prefix": "" }, "severity": "HIGH", "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", - "ruleReferenceId": "AWS.S3Bucket.IAM.High.0372", + "referenceId": "AWS.S3Bucket.IAM.High.0372", "category": "Identity and Access Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_security_group/AWS.SecurityGroup.NetworkSecurity.High.0094.json b/pkg/policies/opa/rego/aws/aws_security_group/AWS.SecurityGroup.NetworkSecurity.High.0094.json index e53f8c6d9..bc83db676 100755 --- a/pkg/policies/opa/rego/aws/aws_security_group/AWS.SecurityGroup.NetworkSecurity.High.0094.json +++ b/pkg/policies/opa/rego/aws/aws_security_group/AWS.SecurityGroup.NetworkSecurity.High.0094.json @@ -1,13 +1,12 @@ { - "ruleName": "unrestrictedIngressAccess", + "name": "unrestrictedIngressAccess", "file": "unrestrictedIngressAccess.rego", - "ruleTemplate": "unrestrictedIngressAccess", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": " It is recommended that no security group allows unrestricted ingress access", - "ruleReferenceId": "AWS.SecurityGroup.NetworkSecurity.High.0094", + "referenceId": "AWS.SecurityGroup.NetworkSecurity.High.0094", "category": "Network Ports Security", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0470.json b/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0470.json index d44940e99..cae0c8b4b 100755 --- a/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0470.json +++ b/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0470.json @@ -1,13 +1,12 @@ { - "ruleName": "vpcFlowLogsNotEnabled", + "name": "vpcFlowLogsNotEnabled", "file": "vpcFlowLogsNotEnabled.rego", - "ruleTemplate": "vpcFlowLogsNotEnabled", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "MEDIUM", "description": "Ensure VPC flow logging is enabled in all VPCs", - "ruleReferenceId": "AWS.VPC.Logging.Medium.0470", + "referenceId": "AWS.VPC.Logging.Medium.0470", "category": "Logging", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0471.json b/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0471.json index 8c22bcc45..d731e86b3 100755 --- a/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0471.json +++ b/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0471.json @@ -1,13 +1,12 @@ { - "ruleName": "defaultVpcExist", + "name": "defaultVpcExist", "file": "defaultVpcExist.rego", - "ruleTemplate": "defaultVpcExist", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "MEDIUM", "description": "Avoid creating resources in default VPC", - "ruleReferenceId": "AWS.VPC.Logging.Medium.0471", + "referenceId": "AWS.VPC.Logging.Medium.0471", "category": "Logging", "version": 1 } \ No newline at end of file diff --git a/pkg/policy/interface.go b/pkg/policy/interface.go index c74a79886..30162b724 100644 --- a/pkg/policy/interface.go +++ b/pkg/policy/interface.go @@ -16,22 +16,11 @@ package policy -// Manager Policy Manager interface -type Manager interface { - Import() error - Export() error - CreateManager() error -} - // Engine Policy Engine interface type Engine interface { Init(string) error Configure() error Evaluate(EngineInput) (EngineOutput, error) - GetResults() error + GetResults() EngineOutput Release() error } - -// EngineFactory creates policy engine instances based on iac/cloud type -type EngineFactory struct { -} diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index f490d1ad5..9d6e23304 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -28,6 +28,9 @@ import ( "sort" "strings" "text/template" + "time" + + "github.com/accurics/terrascan/pkg/iac-providers/output" "github.com/accurics/terrascan/pkg/policy" @@ -112,8 +115,8 @@ func (e *Engine) LoadRegoFiles(policyPath string) error { return fmt.Errorf("no directories found for path %s", policyPath) } - e.RegoFileMap = make(map[string][]byte) - e.RegoDataMap = make(map[string]*RegoData) + e.regoFileMap = make(map[string][]byte) + e.regoDataMap = make(map[string]*RegoData) // Load rego data files from each dir // First, we read the metadata file, which contains info about the associated rego rule. The .rego file data is @@ -157,7 +160,7 @@ func (e *Engine) LoadRegoFiles(policyPath string) error { } // Read in raw rego data from associated rego files - if err = e.loadRawRegoFilesIntoMap(dirList[i], regoDataList, &e.RegoFileMap); err != nil { + if err = e.loadRawRegoFilesIntoMap(dirList[i], regoDataList, &e.regoFileMap); err != nil { zap.S().Debug("error loading raw rego data", zap.String("dir", dirList[i])) continue } @@ -171,23 +174,23 @@ func (e *Engine) LoadRegoFiles(policyPath string) error { // Apply templates if available var templateData bytes.Buffer t := template.New("opa") - _, err = t.Parse(string(e.RegoFileMap[templateFile])) + _, err = t.Parse(string(e.regoFileMap[templateFile])) if err != nil { - zap.S().Debug("unable to parse template", zap.String("template", regoDataList[j].Metadata.RuleTemplate)) + zap.S().Debug("unable to parse template", zap.String("template", regoDataList[j].Metadata.File)) continue } - if err = t.Execute(&templateData, regoDataList[j].Metadata.RuleTemplateArgs); err != nil { - zap.S().Debug("unable to execute template", zap.String("template", regoDataList[j].Metadata.RuleTemplate)) + if err = t.Execute(&templateData, regoDataList[j].Metadata.TemplateArgs); err != nil { + zap.S().Debug("unable to execute template", zap.String("template", regoDataList[j].Metadata.File)) continue } regoDataList[j].RawRego = templateData.Bytes() - e.RegoDataMap[regoDataList[j].Metadata.RuleName] = regoDataList[j] + e.regoDataMap[regoDataList[j].Metadata.Name] = regoDataList[j] } } - e.stats.ruleCount = len(e.RegoDataMap) - e.stats.regoFileCount = len(e.RegoFileMap) + e.stats.ruleCount = len(e.regoDataMap) + e.stats.regoFileCount = len(e.regoFileMap) zap.S().Debugf("loaded %d Rego rules from %d rego files (%d metadata files).", e.stats.ruleCount, e.stats.regoFileCount, e.stats.metadataFileCount) return err @@ -195,30 +198,30 @@ func (e *Engine) LoadRegoFiles(policyPath string) error { // CompileRegoFiles Compiles rego files for faster evaluation func (e *Engine) CompileRegoFiles() error { - for k := range e.RegoDataMap { + for k := range e.regoDataMap { compiler, err := ast.CompileModules(map[string]string{ - e.RegoDataMap[k].Metadata.RuleName: string(e.RegoDataMap[k].RawRego), + e.regoDataMap[k].Metadata.Name: string(e.regoDataMap[k].RawRego), }) if err != nil { - zap.S().Error("error compiling rego files", zap.String("rule", e.RegoDataMap[k].Metadata.RuleName), - zap.String("raw rego", string(e.RegoDataMap[k].RawRego)), zap.Error(err)) + zap.S().Error("error compiling rego files", zap.String("rule", e.regoDataMap[k].Metadata.Name), + zap.String("raw rego", string(e.regoDataMap[k].RawRego)), zap.Error(err)) return err } r := rego.New( - rego.Query(RuleQueryBase+"."+e.RegoDataMap[k].Metadata.RuleName), + rego.Query(RuleQueryBase+"."+e.regoDataMap[k].Metadata.Name), rego.Compiler(compiler), ) // Create a prepared query that can be evaluated. - query, err := r.PrepareForEval(e.Context) + query, err := r.PrepareForEval(e.context) if err != nil { - zap.S().Error("error creating prepared query", zap.String("rule", e.RegoDataMap[k].Metadata.RuleName), - zap.String("raw rego", string(e.RegoDataMap[k].RawRego)), zap.Error(err)) + zap.S().Error("error creating prepared query", zap.String("rule", e.regoDataMap[k].Metadata.Name), + zap.String("raw rego", string(e.regoDataMap[k].RawRego)), zap.Error(err)) return err } - e.RegoDataMap[k].PreparedQuery = &query + e.regoDataMap[k].PreparedQuery = &query } return nil @@ -227,7 +230,7 @@ func (e *Engine) CompileRegoFiles() error { // Init initializes the Opa engine // Handles loading all rules, filtering, compiling, and preparing for evaluation func (e *Engine) Init(policyPath string) error { - e.Context = context.Background() + e.context = context.Background() if err := e.LoadRegoFiles(policyPath); err != nil { zap.S().Error("error loading rego files", zap.String("policy path", policyPath)) @@ -241,7 +244,7 @@ func (e *Engine) Init(policyPath string) error { } // initialize ViolationStore - e.Results.ViolationStore = results.NewViolationStore() + e.results.ViolationStore = results.NewViolationStore() return nil } @@ -252,8 +255,8 @@ func (e *Engine) Configure() error { } // GetResults Fetches results from OPA engine policy evaluation -func (e *Engine) GetResults() error { - return nil +func (e *Engine) GetResults() policy.EngineOutput { + return e.results } // Release Performs any tasks required to free resources @@ -261,60 +264,106 @@ func (e *Engine) Release() error { return nil } -// Evaluate Executes compiled OPA queries against the input JSON data -func (e *Engine) Evaluate(engineInput policy.EngineInput) (policy.EngineOutput, error) { +// reportViolation Add a violation for a given resource +func (e *Engine) reportViolation(regoData *RegoData, resource *output.ResourceConfig) { + violation := results.Violation{ + RuleName: regoData.Metadata.Name, + Description: regoData.Metadata.Description, + RuleID: regoData.Metadata.ReferenceID, + Severity: regoData.Metadata.Severity, + Category: regoData.Metadata.Category, + RuleData: regoData.RawRego, + ResourceName: resource.Name, + ResourceType: resource.Type, + ResourceData: resource.Config, + File: resource.Source, + LineNumber: resource.Line, + } - sortedKeys := make([]string, len(e.RegoDataMap)) - x := 0 - for k := range e.RegoDataMap { - sortedKeys[x] = k - x++ + severity := regoData.Metadata.Severity + if strings.ToLower(severity) == "high" { + e.results.ViolationStore.Count.HighCount++ + } else if strings.ToLower(severity) == "medium" { + e.results.ViolationStore.Count.MediumCount++ + } else if strings.ToLower(severity) == "low" { + e.results.ViolationStore.Count.LowCount++ + } else { + zap.S().Warn("invalid severity found in rule definition", + zap.String("rule id", violation.RuleID), zap.String("severity", severity)) } - sort.Strings(sortedKeys) + e.results.ViolationStore.Count.TotalCount++ + + e.results.ViolationStore.AddResult(&violation) +} + +// Evaluate Executes compiled OPA queries against the input JSON data +func (e *Engine) Evaluate(engineInput policy.EngineInput) (policy.EngineOutput, error) { + // Keep track of how long it takes to evaluate the policies + start := time.Now() - for _, k := range sortedKeys { + // Evaluate the policy against each resource type + for k := range e.regoDataMap { // Execute the prepared query. - rs, err := e.RegoDataMap[k].PreparedQuery.Eval(e.Context, rego.EvalInput(engineInput.InputData)) - // rs, err := r.Eval(o.Context) + rs, err := e.regoDataMap[k].PreparedQuery.Eval(e.context, rego.EvalInput(engineInput.InputData)) if err != nil { zap.S().Warn("failed to run prepared query", zap.String("rule", "'"+k+"'")) continue } - if len(rs) > 0 { - res := rs[0].Expressions[0].Value.([]interface{}) - if len(res) > 0 { - // @TODO: Take line number + file info and add to violation - regoData := e.RegoDataMap[k] - violation := results.Violation{ - Name: regoData.Metadata.RuleName, - Description: regoData.Metadata.Description, - RuleID: regoData.Metadata.RuleReferenceID, - Severity: regoData.Metadata.Severity, - Category: regoData.Metadata.Category, - RuleData: regoData.RawRego, - InputFile: "", - InputData: res, - LineNumber: 0, + if len(rs) == 0 || len(rs[0].Expressions) == 0 { + continue + } + + resourceViolations := rs[0].Expressions[0].Value.([]interface{}) + if len(resourceViolations) == 0 { + continue + } + + // Report a violation for each resource returned by the policy evaluation + for i := range resourceViolations { + var resourceID string + + // The return values come in two categories--either a map[string]interface{} type, where the "Id" key + // contains the resource ID, or a string type which is the resource ID. This resource ID is where a + // violation was found + switch res := resourceViolations[i].(type) { + case map[string]interface{}: + _, ok := res["Id"] + if !ok { + zap.S().Warn("no Id key found in resource map", zap.Any("resource", res)) + continue } - severity := regoData.Metadata.Severity - if strings.ToLower(severity) == "high" { - e.Results.ViolationStore.Count.HighCount++ - } else if strings.ToLower(severity) == "medium" { - e.Results.ViolationStore.Count.MediumCount++ - } else if strings.ToLower(severity) == "low" { - e.Results.ViolationStore.Count.LowCount++ - } else { - zap.S().Warn("invalid severity found in rule definition", - zap.String("rule id", violation.RuleID), zap.String("severity", severity)) + _, ok = res["Id"].(string) + if !ok { + zap.S().Warn("id key was invalid", zap.Any("resource", res)) + continue } - e.Results.ViolationStore.Count.TotalCount++ - e.Results.ViolationStore.AddResult(&violation) + resourceID = res["Id"].(string) + case string: + resourceID = res + default: + zap.S().Warn("resource ID format was invalid", zap.Any("resource", res)) continue } + + // Locate the resource details within the input map + var resource *output.ResourceConfig + resource, err = utils.FindResourceByID(resourceID, engineInput.InputData) + if err != nil { + zap.S().Error(err) + continue + } + if resource == nil { + zap.S().Warn("resource was not found", zap.String("resource id", resourceID)) + continue + } + + // Report the violation + e.reportViolation(e.regoDataMap[k], resource) } } - return e.Results, nil + e.stats.runTime = time.Since(start) + return e.results, nil } diff --git a/pkg/policy/opa/types.go b/pkg/policy/opa/types.go index 980d3b53e..d4aea3d6e 100644 --- a/pkg/policy/opa/types.go +++ b/pkg/policy/opa/types.go @@ -18,6 +18,7 @@ package opa import ( "context" + "time" "github.com/accurics/terrascan/pkg/policy" @@ -26,15 +27,14 @@ import ( // RegoMetadata The rego metadata struct which is read and saved from disk type RegoMetadata struct { - RuleName string `json:"ruleName"` - File string `json:"file"` - RuleTemplate string `json:"ruleTemplate"` - RuleTemplateArgs map[string]interface{} `json:"ruleTemplateArgs"` - Severity string `json:"severity"` - Description string `json:"description"` - RuleReferenceID string `json:"ruleReferenceId"` - Category string `json:"category"` - Version int `json:"version"` + Name string `json:"name"` + File string `json:"file"` + TemplateArgs map[string]interface{} `json:"templateArgs"` + Severity string `json:"severity"` + Description string `json:"description"` + ReferenceID string `json:"referenceId"` + Category string `json:"category"` + Version int `json:"version"` } // RegoData Stores all information needed to evaluate and report on a rego rule @@ -50,13 +50,14 @@ type EngineStats struct { regoFileCount int metadataFileCount int metadataCount int + runTime time.Duration } // Engine Implements the policy engine interface type Engine struct { - Context context.Context - RegoFileMap map[string][]byte - RegoDataMap map[string]*RegoData - Results policy.EngineOutput + results policy.EngineOutput + context context.Context + regoFileMap map[string][]byte + regoDataMap map[string]*RegoData stats EngineStats } diff --git a/pkg/results/types.go b/pkg/results/types.go index 3cac12d18..a2600e23c 100644 --- a/pkg/results/types.go +++ b/pkg/results/types.go @@ -18,15 +18,17 @@ package results // Violation Contains data for each violation type Violation struct { - Name string `json:"name" yaml:"name" xml:"name,attr"` - Description string `json:"description" yaml:"description" xml:"description,attr"` - RuleID string `json:"rule" yaml:"rule" xml:"rule,attr"` - Severity string `json:"severity" yaml:"severity" xml:"severity,attr"` - Category string `json:"category" yaml:"category" xml:"category,attr"` - RuleData interface{} `json:"-" yaml:"-" xml:"-"` - InputFile string `json:"-" yaml:"-" xml:"-"` - InputData interface{} `json:"-" yaml:"-" xml:"-"` - LineNumber int `json:"line" yaml:"line" xml:"line,attr"` + RuleName string `json:"ruleName" yaml:"ruleName" xml:"ruleName,attr"` + Description string `json:"description" yaml:"description" xml:"description,attr"` + RuleID string `json:"rule" yaml:"rule" xml:"rule,attr"` + Severity string `json:"severity" yaml:"severity" xml:"severity,attr"` + Category string `json:"category" yaml:"category" xml:"category,attr"` + RuleData interface{} `json:"-" yaml:"-" xml:"-"` + ResourceName string `json:"resourceName" yaml:"resourceName" xml:"resourceName,attr"` + ResourceType string `json:"resourceType" yaml:"resourceType" xml:"resourceType,attr"` + ResourceData interface{} `json:"-" yaml:"-" xml:"-"` + File string `json:"-" yaml:"-" xml:"-"` + LineNumber int `json:"line" yaml:"line" xml:"line,attr"` } // ViolationStats Contains stats related to the violation data diff --git a/pkg/utils/resource.go b/pkg/utils/resource.go new file mode 100644 index 000000000..2d271e493 --- /dev/null +++ b/pkg/utils/resource.go @@ -0,0 +1,35 @@ +package utils + +import ( + "fmt" + "strings" + + "github.com/accurics/terrascan/pkg/iac-providers/output" +) + +// FindResourceByID Finds a given resource within the resource map and returns a reference to that resource +func FindResourceByID(resourceID string, normalizedResources *output.AllResourceConfigs) (*output.ResourceConfig, error) { + resTypeName := strings.Split(resourceID, ".") + if len(resTypeName) < 2 { + return nil, fmt.Errorf("resource ID has an invalid format %s", resourceID) + } + + resourceType := resTypeName[0] + + found := false + var resource output.ResourceConfig + resourceTypeList := (*normalizedResources)[resourceType] + for i := range resourceTypeList { + if resourceTypeList[i].ID == resourceID { + resource = resourceTypeList[i] + found = true + break + } + } + + if !found { + return nil, nil + } + + return &resource, nil +} From ef820ea837601a783f6219b325447f6d4c2a6458 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Thu, 13 Aug 2020 00:28:39 -0700 Subject: [PATCH 139/188] fix file name not showing in output also adds resource data in the violation --- pkg/policy/opa/engine.go | 1 + pkg/results/types.go | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index 9d6e23304..52ee00f25 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -272,6 +272,7 @@ func (e *Engine) reportViolation(regoData *RegoData, resource *output.ResourceCo RuleID: regoData.Metadata.ReferenceID, Severity: regoData.Metadata.Severity, Category: regoData.Metadata.Category, + RuleFile: regoData.Metadata.File, RuleData: regoData.RawRego, ResourceName: resource.Name, ResourceType: resource.Type, diff --git a/pkg/results/types.go b/pkg/results/types.go index a2600e23c..4664b258b 100644 --- a/pkg/results/types.go +++ b/pkg/results/types.go @@ -23,11 +23,12 @@ type Violation struct { RuleID string `json:"rule" yaml:"rule" xml:"rule,attr"` Severity string `json:"severity" yaml:"severity" xml:"severity,attr"` Category string `json:"category" yaml:"category" xml:"category,attr"` + RuleFile string `json:"ruleFile" yaml:"ruleFile" xml:"ruleFile,attr"` RuleData interface{} `json:"-" yaml:"-" xml:"-"` ResourceName string `json:"resourceName" yaml:"resourceName" xml:"resourceName,attr"` ResourceType string `json:"resourceType" yaml:"resourceType" xml:"resourceType,attr"` - ResourceData interface{} `json:"-" yaml:"-" xml:"-"` - File string `json:"-" yaml:"-" xml:"-"` + ResourceData interface{} `json:"resourceData" yaml:"resourceData" xml:"resourceData,attr"` + File string `json:"file" yaml:"file" xml:"file,attr"` LineNumber int `json:"line" yaml:"line" xml:"line,attr"` } From 59203de7b31b6921ae4c87015f991b814eb1a34e Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 12:58:16 +0530 Subject: [PATCH 140/188] fix file paths for terraform config dir --- pkg/iac-providers/terraform/v12/load-dir.go | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/pkg/iac-providers/terraform/v12/load-dir.go b/pkg/iac-providers/terraform/v12/load-dir.go index b0ae7bc80..1002f18df 100644 --- a/pkg/iac-providers/terraform/v12/load-dir.go +++ b/pkg/iac-providers/terraform/v12/load-dir.go @@ -17,17 +17,17 @@ package tfv12 import ( + "bytes" "fmt" "path/filepath" "strings" + "github.com/accurics/terrascan/pkg/iac-providers/output" version "github.com/hashicorp/go-version" "github.com/hashicorp/hcl/v2" hclConfigs "github.com/hashicorp/terraform/configs" "github.com/spf13/afero" "go.uber.org/zap" - - "github.com/accurics/terrascan/pkg/iac-providers/output" ) var ( @@ -122,8 +122,8 @@ func (*TfV12) LoadIacDir(absRootDir string) (allResourcesConfig output.AllResour return allResourcesConfig, fmt.Errorf("failed to create ResourceConfig") } - // append resource config to list of all resources - // allResourcesConfig = append(allResourcesConfig, resourceConfig) + // trimFilePath + resourceConfig.Source = trimFilePath(resourceConfig.Source, absRootDir) // append to normalized output if _, present := allResourcesConfig[resourceConfig.Type]; !present { @@ -142,3 +142,10 @@ func (*TfV12) LoadIacDir(absRootDir string) (allResourcesConfig output.AllResour // successful return allResourcesConfig, nil } + +// trimFilePath returns relative file path wrt to the base path +func trimFilePath(fullPath, basePath string) string { + basePath = strings.TrimSuffix(basePath, "/") + splits := bytes.Split([]byte(fullPath), []byte(basePath)) + return strings.TrimPrefix(string(splits[1]), "/") +} From 81cc9ddeac1831f671d244cc25ae0075d6c5951f Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 13:28:38 +0530 Subject: [PATCH 141/188] fixing output json, yaml tags --- pkg/policy/types.go | 2 +- pkg/results/types.go | 12 ++++++------ pkg/writer/xml.go | 7 ++++++- 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/pkg/policy/types.go b/pkg/policy/types.go index f16ddae3c..97f01fd2e 100644 --- a/pkg/policy/types.go +++ b/pkg/policy/types.go @@ -12,5 +12,5 @@ type EngineInput struct { // EngineOutput Contains data output from the engine type EngineOutput struct { - *results.ViolationStore + *results.ViolationStore `json:"results" yaml:"results" xml:"results,attr"` } diff --git a/pkg/results/types.go b/pkg/results/types.go index 4664b258b..e9b99ed59 100644 --- a/pkg/results/types.go +++ b/pkg/results/types.go @@ -34,14 +34,14 @@ type Violation struct { // ViolationStats Contains stats related to the violation data type ViolationStats struct { - LowCount int `json:"low"` - MediumCount int `json:"medium"` - HighCount int `json:"high"` - TotalCount int `json:"total"` + LowCount int `json:"low" yaml:"low" xml:"low,attr"` + MediumCount int `json:"medium" yaml:"medium" xml:"medium,attr"` + HighCount int `json:"high" yaml:"high" xml:"high,attr"` + TotalCount int `json:"total" yaml:"total" xml:"total,attr"` } // ViolationStore Storage area for violation data type ViolationStore struct { - Violations []*Violation `json:"violations"` - Count ViolationStats `json:"count"` + Violations []*Violation `json:"violations" yaml:"violations" xml:"violations,attr"` + Count ViolationStats `json:"count" yaml:"count" xml:"count,attr"` } diff --git a/pkg/writer/xml.go b/pkg/writer/xml.go index 2d868f5a9..c86e451bb 100644 --- a/pkg/writer/xml.go +++ b/pkg/writer/xml.go @@ -21,6 +21,7 @@ import ( "io" "github.com/accurics/terrascan/pkg/policy" + "go.uber.org/zap" ) const ( @@ -33,7 +34,11 @@ func init() { // XMLWriter prints data in XML format func XMLWriter(data policy.EngineOutput, writer io.Writer) error { - j, _ := xml.MarshalIndent(data, "", " ") + j, err := xml.MarshalIndent(data, "", " ") + if err != nil { + zap.S().Errorf("failed to write XML output. error: '%v'", err) + return err + } writer.Write(j) writer.Write([]byte{'\n'}) return nil From 5ed6e6964840f12244026b12306c238f101a4d74 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 14:55:50 +0530 Subject: [PATCH 142/188] fix iac file path --- pkg/iac-providers/terraform/v12/load-file.go | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/pkg/iac-providers/terraform/v12/load-file.go b/pkg/iac-providers/terraform/v12/load-file.go index 7f4e3c2d2..07af9a928 100644 --- a/pkg/iac-providers/terraform/v12/load-file.go +++ b/pkg/iac-providers/terraform/v12/load-file.go @@ -18,12 +18,12 @@ package tfv12 import ( "fmt" + "path/filepath" + "github.com/accurics/terrascan/pkg/iac-providers/output" hclConfigs "github.com/hashicorp/terraform/configs" "github.com/spf13/afero" "go.uber.org/zap" - - "github.com/accurics/terrascan/pkg/iac-providers/output" ) var ( @@ -58,8 +58,8 @@ func (*TfV12) LoadIacFile(absFilePath string) (allResourcesConfig output.AllReso return allResourcesConfig, fmt.Errorf("failed to create ResourceConfig") } - // append resource config to list of all resources - // allResourcesConfig = append(allResourcesConfig, resourceConfig) + // extract file name from path + resourceConfig.Source = getFileName(resourceConfig.Source) // append to normalized output if _, present := allResourcesConfig[resourceConfig.Type]; !present { @@ -72,3 +72,9 @@ func (*TfV12) LoadIacFile(absFilePath string) (allResourcesConfig output.AllReso // successful return allResourcesConfig, nil } + +// getFileName return file name from the given file path +func getFileName(path string) string { + _, file := filepath.Split(path) + return file +} From 34fa4266d00518fd7e3e795d3e4c20229655c413 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 15:09:48 +0530 Subject: [PATCH 143/188] change json, yaml tags from camel case to underscore notation --- pkg/results/types.go | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pkg/results/types.go b/pkg/results/types.go index e9b99ed59..5bfda9544 100644 --- a/pkg/results/types.go +++ b/pkg/results/types.go @@ -18,16 +18,16 @@ package results // Violation Contains data for each violation type Violation struct { - RuleName string `json:"ruleName" yaml:"ruleName" xml:"ruleName,attr"` + RuleName string `json:"rule_name" yaml:"rule_name" xml:"rule_name,attr"` Description string `json:"description" yaml:"description" xml:"description,attr"` RuleID string `json:"rule" yaml:"rule" xml:"rule,attr"` Severity string `json:"severity" yaml:"severity" xml:"severity,attr"` Category string `json:"category" yaml:"category" xml:"category,attr"` - RuleFile string `json:"ruleFile" yaml:"ruleFile" xml:"ruleFile,attr"` + RuleFile string `json:"rule_file" yaml:"rule_file" xml:"rule_file,attr"` RuleData interface{} `json:"-" yaml:"-" xml:"-"` - ResourceName string `json:"resourceName" yaml:"resourceName" xml:"resourceName,attr"` - ResourceType string `json:"resourceType" yaml:"resourceType" xml:"resourceType,attr"` - ResourceData interface{} `json:"resourceData" yaml:"resourceData" xml:"resourceData,attr"` + ResourceName string `json:"resource_name" yaml:"resource_name" xml:"resource_name,attr"` + ResourceType string `json:"resource_type" yaml:"resource_type" xml:"resource_type,attr"` + ResourceData interface{} `json:"resource_data" yaml:"resource_data" xml:"resource_data,attr"` File string `json:"file" yaml:"file" xml:"file,attr"` LineNumber int `json:"line" yaml:"line" xml:"line,attr"` } From ab2f8d70ee67661197fc5ec43b00d44188b643bb Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 15:17:42 +0530 Subject: [PATCH 144/188] fix TestFindAllDirectories unit tests --- pkg/utils/path_test.go | 15 ++++++++------- pkg/utils/testdata/emptydir/somefile.txt | 1 + pkg/utils/testdata/testdir1/somefile.txt | 1 + pkg/utils/testdata/testdir2/somefile.txt | 1 + 4 files changed, 11 insertions(+), 7 deletions(-) create mode 100644 pkg/utils/testdata/emptydir/somefile.txt create mode 100644 pkg/utils/testdata/testdir1/somefile.txt create mode 100644 pkg/utils/testdata/testdir2/somefile.txt diff --git a/pkg/utils/path_test.go b/pkg/utils/path_test.go index 5b47d4076..2344c50e7 100644 --- a/pkg/utils/path_test.go +++ b/pkg/utils/path_test.go @@ -17,7 +17,6 @@ package utils import ( - "fmt" "os" "reflect" "testing" @@ -90,12 +89,6 @@ func TestFindAllDirectories(t *testing.T) { want: []string{"./testdata/emptydir"}, wantErr: nil, }, - { - name: "invalid dir", - basePath: "./testdata/nothere", - want: []string{}, - wantErr: fmt.Errorf("lstat ./testdata/nothere: no such file or directory"), - }, } for _, tt := range table { @@ -109,4 +102,12 @@ func TestFindAllDirectories(t *testing.T) { } }) } + + t.Run("invalid dir", func(t *testing.T) { + basePath := "./testdata/nothere" + _, gotErr := FindAllDirectories(basePath) + if gotErr == nil { + t.Errorf("got no error; error expected") + } + }) } diff --git a/pkg/utils/testdata/emptydir/somefile.txt b/pkg/utils/testdata/emptydir/somefile.txt new file mode 100644 index 000000000..ebf038b91 --- /dev/null +++ b/pkg/utils/testdata/emptydir/somefile.txt @@ -0,0 +1 @@ +somefile diff --git a/pkg/utils/testdata/testdir1/somefile.txt b/pkg/utils/testdata/testdir1/somefile.txt new file mode 100644 index 000000000..ebf038b91 --- /dev/null +++ b/pkg/utils/testdata/testdir1/somefile.txt @@ -0,0 +1 @@ +somefile diff --git a/pkg/utils/testdata/testdir2/somefile.txt b/pkg/utils/testdata/testdir2/somefile.txt new file mode 100644 index 000000000..ebf038b91 --- /dev/null +++ b/pkg/utils/testdata/testdir2/somefile.txt @@ -0,0 +1 @@ +somefile From dc1508139ecdf8984c93e387a828103b652571a0 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 15:59:17 +0530 Subject: [PATCH 145/188] fix terraform v12 unit tests --- pkg/iac-providers/terraform/v12/load-dir.go | 3 +- .../v12/testdata/tfjson/config1.json | 24 ++++++++---- .../v12/testdata/tfjson/dummyconfig.json | 12 ++++-- .../v12/testdata/tfjson/fullconfig.json | 24 ++++++++---- .../v12/testdata/tfjson/moduleconfigs.json | 39 ++++++++++++------- pkg/runtime/validate.go | 18 +++++---- 6 files changed, 78 insertions(+), 42 deletions(-) diff --git a/pkg/iac-providers/terraform/v12/load-dir.go b/pkg/iac-providers/terraform/v12/load-dir.go index 1002f18df..4a983e8d6 100644 --- a/pkg/iac-providers/terraform/v12/load-dir.go +++ b/pkg/iac-providers/terraform/v12/load-dir.go @@ -145,7 +145,8 @@ func (*TfV12) LoadIacDir(absRootDir string) (allResourcesConfig output.AllResour // trimFilePath returns relative file path wrt to the base path func trimFilePath(fullPath, basePath string) string { - basePath = strings.TrimSuffix(basePath, "/") + basePath = strings.Trim(basePath, ".") + basePath = strings.Trim(basePath, "/") splits := bytes.Split([]byte(fullPath), []byte(basePath)) return strings.TrimPrefix(string(splits[1]), "/") } diff --git a/pkg/iac-providers/terraform/v12/testdata/tfjson/config1.json b/pkg/iac-providers/terraform/v12/testdata/tfjson/config1.json index e5bcee920..8c7f7cd8a 100644 --- a/pkg/iac-providers/terraform/v12/testdata/tfjson/config1.json +++ b/pkg/iac-providers/terraform/v12/testdata/tfjson/config1.json @@ -3,7 +3,8 @@ { "id": "aws_instance.instance_playground", "name": "instance_playground", - "source": "./testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 78, "type": "aws_instance", "config": { "ami": "${lookup(var.aws_amis, var.aws_region)}", @@ -44,7 +45,8 @@ { "id": "aws_internet_gateway.igw_playground", "name": "igw_playground", - "source": "./testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 15, "type": "aws_internet_gateway", "config": { "tags": { @@ -58,7 +60,8 @@ { "id": "aws_key_pair.ec2key_playground", "name": "ec2key_playground", - "source": "./testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 73, "type": "aws_key_pair", "config": { "key_name": "testKey", @@ -70,7 +73,8 @@ { "id": "aws_route_table.rtb_public_playground", "name": "rtb_public_playground", - "source": "./testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 31, "type": "aws_route_table", "config": { "route": [ @@ -90,7 +94,8 @@ { "id": "aws_route_table_association.rta_subnet_public_playground", "name": "rta_subnet_public_playground", - "source": "./testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 42, "type": "aws_route_table_association", "config": { "route_table_id": "${aws_route_table.rtb_public_playground.id}", @@ -102,7 +107,8 @@ { "id": "aws_security_group.sg_playground", "name": "sg_playground", - "source": "./testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 47, "type": "aws_security_group", "config": { "egress": [ @@ -145,7 +151,8 @@ { "id": "aws_subnet.subnet_public_playground", "name": "subnet_public_playground", - "source": "./testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 22, "type": "aws_subnet", "config": { "cidr_block": "${var.cidr_subnet}", @@ -161,7 +168,8 @@ { "id": "aws_vpc.vpc_playground", "name": "vpc_playground", - "source": "./testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 6, "type": "aws_vpc", "config": { "cidr_block": "${var.cidr_vpc}", diff --git a/pkg/iac-providers/terraform/v12/testdata/tfjson/dummyconfig.json b/pkg/iac-providers/terraform/v12/testdata/tfjson/dummyconfig.json index 0c7d8044a..bbf33b988 100644 --- a/pkg/iac-providers/terraform/v12/testdata/tfjson/dummyconfig.json +++ b/pkg/iac-providers/terraform/v12/testdata/tfjson/dummyconfig.json @@ -3,7 +3,8 @@ { "id": "terraform_remote_state.remote", "name": "remote", - "source": "./testdata/dummyconfig/dummyconfig.tf", + "source": "dummyconfig.tf", + "line": 41, "type": "terraform_remote_state", "config": { "backend": "s3", @@ -20,7 +21,8 @@ { "id": "type1.resource1", "name": "resource1", - "source": "./testdata/dummyconfig/dummyconfig.tf", + "source": "dummyconfig.tf", + "line": 1, "type": "type1", "config": { "arr": [ @@ -44,7 +46,8 @@ { "id": "type2.resource2", "name": "resource2", - "source": "./testdata/dummyconfig/dummyconfig.tf", + "source": "dummyconfig.tf", + "line": 13, "type": "type2", "config": { "other": { @@ -64,7 +67,8 @@ { "id": "type3.resource3", "name": "resource3", - "source": "./testdata/dummyconfig/dummyconfig.tf", + "source": "dummyconfig.tf", + "line": 26, "type": "type3", "config": { "cond": "${test3 \u003e 2 ? 1: 0}", diff --git a/pkg/iac-providers/terraform/v12/testdata/tfjson/fullconfig.json b/pkg/iac-providers/terraform/v12/testdata/tfjson/fullconfig.json index 1a36646ca..8c7f7cd8a 100644 --- a/pkg/iac-providers/terraform/v12/testdata/tfjson/fullconfig.json +++ b/pkg/iac-providers/terraform/v12/testdata/tfjson/fullconfig.json @@ -3,7 +3,8 @@ { "id": "aws_instance.instance_playground", "name": "instance_playground", - "source": "testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 78, "type": "aws_instance", "config": { "ami": "${lookup(var.aws_amis, var.aws_region)}", @@ -44,7 +45,8 @@ { "id": "aws_internet_gateway.igw_playground", "name": "igw_playground", - "source": "testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 15, "type": "aws_internet_gateway", "config": { "tags": { @@ -58,7 +60,8 @@ { "id": "aws_key_pair.ec2key_playground", "name": "ec2key_playground", - "source": "testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 73, "type": "aws_key_pair", "config": { "key_name": "testKey", @@ -70,7 +73,8 @@ { "id": "aws_route_table.rtb_public_playground", "name": "rtb_public_playground", - "source": "testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 31, "type": "aws_route_table", "config": { "route": [ @@ -90,7 +94,8 @@ { "id": "aws_route_table_association.rta_subnet_public_playground", "name": "rta_subnet_public_playground", - "source": "testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 42, "type": "aws_route_table_association", "config": { "route_table_id": "${aws_route_table.rtb_public_playground.id}", @@ -102,7 +107,8 @@ { "id": "aws_security_group.sg_playground", "name": "sg_playground", - "source": "testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 47, "type": "aws_security_group", "config": { "egress": [ @@ -145,7 +151,8 @@ { "id": "aws_subnet.subnet_public_playground", "name": "subnet_public_playground", - "source": "testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 22, "type": "aws_subnet", "config": { "cidr_block": "${var.cidr_subnet}", @@ -161,7 +168,8 @@ { "id": "aws_vpc.vpc_playground", "name": "vpc_playground", - "source": "testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 6, "type": "aws_vpc", "config": { "cidr_block": "${var.cidr_vpc}", diff --git a/pkg/iac-providers/terraform/v12/testdata/tfjson/moduleconfigs.json b/pkg/iac-providers/terraform/v12/testdata/tfjson/moduleconfigs.json index 48a10dabf..65764ec73 100644 --- a/pkg/iac-providers/terraform/v12/testdata/tfjson/moduleconfigs.json +++ b/pkg/iac-providers/terraform/v12/testdata/tfjson/moduleconfigs.json @@ -3,7 +3,8 @@ { "id": "aws_cloudfront_distribution.s3-distribution-TLS-v1", "name": "s3-distribution-TLS-v1", - "source": "testdata/moduleconfigs/cloudfront/main.tf", + "source": "cloudfront/main.tf", + "line": 6, "type": "aws_cloudfront_distribution", "config": { "default_cache_behavior": [ @@ -130,7 +131,8 @@ { "id": "aws_cloudtrail.missing-multi-region", "name": "missing-multi-region", - "source": "testdata/moduleconfigs/cloudtrail/main.tf", + "source": "cloudtrail/main.tf", + "line": 1, "type": "aws_cloudtrail", "config": { "include_global_service_events": false, @@ -144,7 +146,8 @@ { "id": "aws_ecs_task_definition.instanceNotInVpc", "name": "instanceNotInVpc", - "source": "testdata/moduleconfigs/ecs/main.tf", + "source": "ecs/main.tf", + "line": 1, "type": "aws_ecs_task_definition", "config": { "container_definitions": "${file(\"ecs/service.json\")}", @@ -157,7 +160,8 @@ { "id": "aws_efs_file_system.efsNotEncrypted", "name": "efsNotEncrypted", - "source": "testdata/moduleconfigs/efs/main.tf", + "source": "efs/main.tf", + "line": 1, "type": "aws_efs_file_system", "config": { "creation_token": "my-product", @@ -171,7 +175,8 @@ { "id": "aws_elasticache_cluster.noMemcachedInElastiCache", "name": "noMemcachedInElastiCache", - "source": "testdata/moduleconfigs/elasticcache/main.tf", + "source": "elasticcache/main.tf", + "line": 1, "type": "aws_elasticache_cluster", "config": { "cluster_id": "cluster-example", @@ -187,7 +192,8 @@ { "id": "aws_guardduty_detector.gaurdDutyDisabled", "name": "gaurdDutyDisabled", - "source": "testdata/moduleconfigs/guardduty/main.tf", + "source": "guardduty/main.tf", + "line": 1, "type": "aws_guardduty_detector", "config": { "enable": false @@ -198,7 +204,8 @@ { "id": "aws_iam_access_key.noAccessKeyForRootAccount", "name": "noAccessKeyForRootAccount", - "source": "testdata/moduleconfigs/iam/main.tf", + "source": "iam/main.tf", + "line": 1, "type": "aws_iam_access_key", "config": { "pgp_key": "keybase:some_person_that_exists", @@ -211,7 +218,8 @@ { "id": "aws_kinesis_stream.kinesisEncryptedWithKms", "name": "kinesisEncryptedWithKms", - "source": "testdata/moduleconfigs/kinesis/main.tf", + "source": "kinesis/main.tf", + "line": 1, "type": "aws_kinesis_stream", "config": { "encryption_type": "KMS", @@ -233,7 +241,8 @@ { "id": "aws_kms_key.kmsKeyDisabled", "name": "kmsKeyDisabled", - "source": "testdata/moduleconfigs/cloudfront/sub-cloudfront/main.tf", + "source": "cloudfront/sub-cloudfront/main.tf", + "line": 1, "type": "aws_kms_key", "config": { "description": "KMS key 2", @@ -249,7 +258,8 @@ { "id": "aws_load_balancer_policy.elbWeakCipher", "name": "elbWeakCipher", - "source": "testdata/moduleconfigs/elb/main.tf", + "source": "elb/main.tf", + "line": 1, "type": "aws_load_balancer_policy", "config": { "load_balancer_name": "some-name", @@ -268,7 +278,8 @@ { "id": "aws_s3_bucket.noS3BucketSseRules", "name": "noS3BucketSseRules", - "source": "testdata/moduleconfigs/s3/main.tf", + "source": "s3/main.tf", + "line": 1, "type": "aws_s3_bucket", "config": { "acl": "private", @@ -284,7 +295,8 @@ { "id": "aws_security_group.acme_web", "name": "acme_web", - "source": "testdata/moduleconfigs/sg/main.tf", + "source": "sg/main.tf", + "line": 1, "type": "aws_security_group", "config": { "description": "Used in the terraform", @@ -322,7 +334,8 @@ { "id": "aws_sqs_queue.sqsQueueExposed", "name": "sqsQueueExposed", - "source": "testdata/moduleconfigs/sqs/main.tf", + "source": "sqs/main.tf", + "line": 1, "type": "aws_sqs_queue", "config": { "kms_data_key_reuse_period_seconds": 300, diff --git a/pkg/runtime/validate.go b/pkg/runtime/validate.go index 715fb2a85..fa9f343aa 100644 --- a/pkg/runtime/validate.go +++ b/pkg/runtime/validate.go @@ -37,6 +37,8 @@ var ( // ValidateInputs validates the inputs to the executor object func (e *Executor) ValidateInputs() error { + var err error + // terrascan can accept either a file or a directory if e.filePath == "" && e.dirPath == "" { zap.S().Errorf("no IaC path specified; use '-f' for file or '-d' for directory") @@ -49,29 +51,29 @@ func (e *Executor) ValidateInputs() error { if e.dirPath != "" { // if directory, check if directory exists - absDirPath, err := utils.GetAbsPath(e.dirPath) + e.dirPath, err = utils.GetAbsPath(e.dirPath) if err != nil { return err } - if _, err := os.Stat(absDirPath); err != nil { - zap.S().Errorf("directory '%s' does not exist", absDirPath) + if _, err := os.Stat(e.dirPath); err != nil { + zap.S().Errorf("directory '%s' does not exist", e.dirPath) return errDirNotExists } - zap.S().Debugf("directory '%s' exists", absDirPath) + zap.S().Debugf("directory '%s' exists", e.dirPath) } else { // if file path, check if file exists - absFilePath, err := utils.GetAbsPath(e.filePath) + e.filePath, err = utils.GetAbsPath(e.filePath) if err != nil { return err } - if _, err := os.Stat(absFilePath); err != nil { - zap.S().Errorf("file '%s' does not exist", absFilePath) + if _, err := os.Stat(e.filePath); err != nil { + zap.S().Errorf("file '%s' does not exist", e.filePath) return errFileNotExists } - zap.S().Debugf("file '%s' exists", absFilePath) + zap.S().Debugf("file '%s' exists", e.filePath) } // check if Iac type is supported From 24ecda8b95a32ff8d3b8059aca95f5165bd93491 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 17:06:15 +0530 Subject: [PATCH 146/188] fix runtime unit tests --- pkg/runtime/executor_test.go | 88 ++++++++++++++++--- ....EncryptionandKeyManagement.High.0407.json | 12 +++ ....EncryptionandKeyManagement.High.0408.json | 12 +++ .../AWS.CloudFront.Logging.Medium.0567.json | 12 +++ .../cloudfrontNoHTTPSTraffic.rego | 10 +++ .../cloudfrontNoLogging.rego | 21 +++++ .../cloudfrontNoSecureCiphers.rego | 19 ++++ 7 files changed, 163 insertions(+), 11 deletions(-) create mode 100755 pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json create mode 100755 pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json create mode 100755 pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json create mode 100755 pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego create mode 100755 pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoLogging.rego create mode 100755 pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego diff --git a/pkg/runtime/executor_test.go b/pkg/runtime/executor_test.go index 62d163754..8d82ee97e 100644 --- a/pkg/runtime/executor_test.go +++ b/pkg/runtime/executor_test.go @@ -26,11 +26,13 @@ import ( tfv12 "github.com/accurics/terrascan/pkg/iac-providers/terraform/v12" "github.com/accurics/terrascan/pkg/notifications" "github.com/accurics/terrascan/pkg/notifications/webhook" + "github.com/accurics/terrascan/pkg/policy" ) var ( - errMockLoadIacDir = fmt.Errorf("mock LoadIacDir") - errMockLoadIacFile = fmt.Errorf("mock LoadIacFile") + errMockLoadIacDir = fmt.Errorf("mock LoadIacDir") + errMockLoadIacFile = fmt.Errorf("mock LoadIacFile") + errMockPolicyEngine = fmt.Errorf("mock PolicyEngine") ) // MockIacProvider mocks IacProvider interface @@ -47,6 +49,31 @@ func (m MockIacProvider) LoadIacFile(file string) (output.AllResourceConfigs, er return m.output, m.err } +// mock policy engine +type MockPolicyEngine struct { + err error +} + +func (m MockPolicyEngine) Init(input string) error { + return m.err +} + +func (m MockPolicyEngine) Configure() error { + return m.err +} + +func (m MockPolicyEngine) Evaluate(input policy.EngineInput) (out policy.EngineOutput, err error) { + return out, m.err +} + +func (m MockPolicyEngine) GetResults() (out policy.EngineOutput) { + return out +} + +func (m MockPolicyEngine) Release() error { + return m.err +} + func TestExecute(t *testing.T) { // TODO: add tests to validate output of Execute() @@ -66,8 +93,9 @@ func TestExecute(t *testing.T) { { name: "test LoadIacDir no error", executor: Executor{ - dirPath: "./testdata/testdir", - iacProvider: MockIacProvider{err: nil}, + dirPath: "./testdata/testdir", + iacProvider: MockIacProvider{err: nil}, + policyEngine: MockPolicyEngine{err: nil}, }, wantErr: nil, }, @@ -82,27 +110,48 @@ func TestExecute(t *testing.T) { { name: "test LoadIacFile no error", executor: Executor{ - filePath: "./testdata/testfile", - iacProvider: MockIacProvider{err: nil}, + filePath: "./testdata/testfile", + iacProvider: MockIacProvider{err: nil}, + policyEngine: MockPolicyEngine{err: nil}, }, wantErr: nil, }, { name: "test SendNofitications no error", executor: Executor{ - iacProvider: MockIacProvider{err: nil}, - notifiers: []notifications.Notifier{&MockNotifier{err: nil}}, + iacProvider: MockIacProvider{err: nil}, + notifiers: []notifications.Notifier{&MockNotifier{err: nil}}, + policyEngine: MockPolicyEngine{err: nil}, }, wantErr: nil, }, { - name: "test SendNofitications no error", + name: "test SendNofitications mock error", executor: Executor{ - iacProvider: MockIacProvider{err: nil}, - notifiers: []notifications.Notifier{&MockNotifier{err: errMockNotifier}}, + iacProvider: MockIacProvider{err: nil}, + notifiers: []notifications.Notifier{&MockNotifier{err: errMockNotifier}}, + policyEngine: MockPolicyEngine{err: nil}, }, wantErr: errMockNotifier, }, + { + name: "test policy enginer no error", + executor: Executor{ + iacProvider: MockIacProvider{err: nil}, + notifiers: []notifications.Notifier{&MockNotifier{err: nil}}, + policyEngine: MockPolicyEngine{err: nil}, + }, + wantErr: nil, + }, + { + name: "test policy engine error", + executor: Executor{ + iacProvider: MockIacProvider{err: nil}, + notifiers: []notifications.Notifier{&MockNotifier{err: nil}}, + policyEngine: MockPolicyEngine{err: errMockPolicyEngine}, + }, + wantErr: errMockPolicyEngine, + }, } for _, tt := range table { @@ -132,6 +181,7 @@ func TestInit(t *testing.T) { cloudType: "aws", iacType: "terraform", iacVersion: "v12", + policyPath: "./testdata/testpolicies", }, wantErr: nil, wantIacProvider: &tfv12.TfV12{}, @@ -146,6 +196,7 @@ func TestInit(t *testing.T) { iacType: "terraform", iacVersion: "v12", configFile: "./testdata/webhook.toml", + policyPath: "./testdata/testpolicies", }, wantErr: nil, wantIacProvider: &tfv12.TfV12{}, @@ -178,6 +229,21 @@ func TestInit(t *testing.T) { wantErr: fmt.Errorf("config file not present"), wantIacProvider: &tfv12.TfV12{}, }, + { + name: "invalid policy path", + executor: Executor{ + filePath: "./testdata/testfile", + dirPath: "", + cloudType: "aws", + iacType: "terraform", + iacVersion: "v12", + configFile: "./testdata/webhook.toml", + policyPath: "./testdata/notthere", + }, + wantErr: fmt.Errorf("failed to initialize OPA policy engine"), + wantIacProvider: &tfv12.TfV12{}, + wantNotifiers: []notifications.Notifier{&webhook.Webhook{}}, + }, } for _, tt := range table { diff --git a/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json new file mode 100755 index 000000000..87a931b83 --- /dev/null +++ b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json @@ -0,0 +1,12 @@ +{ + "name": "cloudfrontNoHTTPSTraffic", + "file": "cloudfrontNoHTTPSTraffic.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Use encrypted connection between CloudFront and origin server", + "referenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0407", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json new file mode 100755 index 000000000..417d50dcd --- /dev/null +++ b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json @@ -0,0 +1,12 @@ +{ + "name": "cloudfrontNoSecureCiphers", + "file": "cloudfrontNoSecureCiphers.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Secure ciphers are not used in CloudFront distribution", + "referenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0408", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json new file mode 100755 index 000000000..2d26be5a4 --- /dev/null +++ b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json @@ -0,0 +1,12 @@ +{ + "name": "cloudfrontNoLogging", + "file": "cloudfrontNoLogging.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Ensure that your AWS Cloudfront distributions have the Logging feature enabled in order to track all viewer requests for the content delivered through the Content Delivery Network (CDN).", + "referenceId": "AWS.CloudFront.Logging.Medium.0567", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego new file mode 100755 index 000000000..6073a927c --- /dev/null +++ b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego @@ -0,0 +1,10 @@ +package accurics + +{{.prefix}}cloudfrontNoHTTPSTraffic[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + some i + orderedcachebehaviour = cloudfront.config.ordered_cache_behavior[i] + orderedcachebehaviour.viewer_protocol_policy == "allow-all" + traverse := sprintf("ordered_cache_behavior[%d].viewer_protocol_policy", [i]) + retVal := { "Id": cloudfront.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "ordered_cache_behavior.viewer_protocol_policy", "AttributeDataType": "string", "Expected": "redirect-to-https", "Actual": orderedcachebehaviour.viewer_protocol_policy } +} \ No newline at end of file diff --git a/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoLogging.rego b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoLogging.rego new file mode 100755 index 000000000..dfd52a3a1 --- /dev/null +++ b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoLogging.rego @@ -0,0 +1,21 @@ +package accurics + +{{.prefix}}cloudfrontNoLogging[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + not cloudfront.config.logging_config + + rc = "ewogICJsb2dnaW5nX2NvbmZpZyI6IHsKICAgICJpbmNsdWRlX2Nvb2tpZXMiOiBmYWxzZSwKICAgICJidWNrZXQiOiAiPGJ1Y2tldD4iLAogICAgInByZWZpeCI6ICI8cHJlZml4PiIKICB9Cn0=" + + traverse = "" + retVal := { "Id": cloudfront.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "logging_config", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} + +{{.prefix}}cloudfrontNoLogging[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + cloudfront.config.logging_config == [] + + rc = "ewogICJsb2dnaW5nX2NvbmZpZyI6IHsKICAgICJpbmNsdWRlX2Nvb2tpZXMiOiBmYWxzZSwKICAgICJidWNrZXQiOiAiPGJ1Y2tldD4iLAogICAgInByZWZpeCI6ICI8cHJlZml4PiIKICB9Cn0=" + + traverse = "" + retVal := { "Id": cloudfront.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "logging_config", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} \ No newline at end of file diff --git a/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego new file mode 100755 index 000000000..9159d825f --- /dev/null +++ b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego @@ -0,0 +1,19 @@ +package accurics + +{{.prefix}}cloudfrontNoSecureCiphers[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + some i + certificate = cloudfront.config.viewer_certificate[i] + certificate.cloudfront_default_certificate = false + not minimumAllowedProtocolVersion(certificate.minimum_protocol_version) + traverse := sprintf("viewer_certificate[%d].minimum_protocol_version", [i]) + retVal := { "Id": cloudfront.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "viewer_certificate.minimum_protocol_version", "AttributeDataType": "string", "Expected": "TLSv1.2", "Actual": certificate.minimum_protocol_version } +} + +minimumAllowedProtocolVersion(currentVersion) { + currentVersion == "TLSv1.1" +} + +minimumAllowedProtocolVersion(currentVersion) { + currentVersion == "TLSv1.2" +} \ No newline at end of file From d776638795f5dfeb20f0252f25c1a1ec0a16b36c Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 17:34:39 +0530 Subject: [PATCH 147/188] fix http-server unit tests --- pkg/http-server/file-scan.go | 10 +++++++-- pkg/http-server/file-scan_test.go | 2 +- pkg/http-server/handler.go | 4 +++- ....EncryptionandKeyManagement.High.0407.json | 12 +++++++++++ ....EncryptionandKeyManagement.High.0408.json | 12 +++++++++++ .../AWS.CloudFront.Logging.Medium.0567.json | 12 +++++++++++ .../cloudfrontNoHTTPSTraffic.rego | 10 +++++++++ .../cloudfrontNoLogging.rego | 21 +++++++++++++++++++ .../cloudfrontNoSecureCiphers.rego | 19 +++++++++++++++++ 9 files changed, 98 insertions(+), 4 deletions(-) create mode 100755 pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json create mode 100755 pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json create mode 100755 pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json create mode 100755 pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego create mode 100755 pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoLogging.rego create mode 100755 pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego diff --git a/pkg/http-server/file-scan.go b/pkg/http-server/file-scan.go index abe660010..b49ebfc29 100644 --- a/pkg/http-server/file-scan.go +++ b/pkg/http-server/file-scan.go @@ -82,8 +82,14 @@ func (g *APIHandler) scanFile(w http.ResponseWriter, r *http.Request) { tempFile.Write(fileBytes) // create a new runtime executor for scanning the uploaded file - executor, err := runtime.NewExecutor(iacType, iacVersion, cloudType, - tempFile.Name(), "", "", "") + var executor *runtime.Executor + if g.test { + executor, err = runtime.NewExecutor(iacType, iacVersion, cloudType, + tempFile.Name(), "", "", "./testdata/testpolicies") + } else { + executor, err = runtime.NewExecutor(iacType, iacVersion, cloudType, + tempFile.Name(), "", "", "") + } if err != nil { zap.S().Error(err) apiErrorResponse(w, err.Error(), http.StatusBadRequest) diff --git a/pkg/http-server/file-scan_test.go b/pkg/http-server/file-scan_test.go index 2e4523b7c..a40016cf4 100644 --- a/pkg/http-server/file-scan_test.go +++ b/pkg/http-server/file-scan_test.go @@ -102,7 +102,7 @@ func TestUpload(t *testing.T) { }) res := httptest.NewRecorder() // new api handler - h := NewAPIHandler() + h := &APIHandler{test: true} h.scanFile(res, req) if res.Code != tt.wantStatus { diff --git a/pkg/http-server/handler.go b/pkg/http-server/handler.go index a72370030..eb86e5a90 100644 --- a/pkg/http-server/handler.go +++ b/pkg/http-server/handler.go @@ -17,7 +17,9 @@ package httpserver // APIHandler struct for http api server -type APIHandler struct{} +type APIHandler struct { + test bool +} // NewAPIHandler returns a new APIHandler{} func NewAPIHandler() *APIHandler { diff --git a/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json new file mode 100755 index 000000000..87a931b83 --- /dev/null +++ b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json @@ -0,0 +1,12 @@ +{ + "name": "cloudfrontNoHTTPSTraffic", + "file": "cloudfrontNoHTTPSTraffic.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Use encrypted connection between CloudFront and origin server", + "referenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0407", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json new file mode 100755 index 000000000..417d50dcd --- /dev/null +++ b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json @@ -0,0 +1,12 @@ +{ + "name": "cloudfrontNoSecureCiphers", + "file": "cloudfrontNoSecureCiphers.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Secure ciphers are not used in CloudFront distribution", + "referenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0408", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json new file mode 100755 index 000000000..2d26be5a4 --- /dev/null +++ b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json @@ -0,0 +1,12 @@ +{ + "name": "cloudfrontNoLogging", + "file": "cloudfrontNoLogging.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Ensure that your AWS Cloudfront distributions have the Logging feature enabled in order to track all viewer requests for the content delivered through the Content Delivery Network (CDN).", + "referenceId": "AWS.CloudFront.Logging.Medium.0567", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego new file mode 100755 index 000000000..6073a927c --- /dev/null +++ b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego @@ -0,0 +1,10 @@ +package accurics + +{{.prefix}}cloudfrontNoHTTPSTraffic[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + some i + orderedcachebehaviour = cloudfront.config.ordered_cache_behavior[i] + orderedcachebehaviour.viewer_protocol_policy == "allow-all" + traverse := sprintf("ordered_cache_behavior[%d].viewer_protocol_policy", [i]) + retVal := { "Id": cloudfront.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "ordered_cache_behavior.viewer_protocol_policy", "AttributeDataType": "string", "Expected": "redirect-to-https", "Actual": orderedcachebehaviour.viewer_protocol_policy } +} \ No newline at end of file diff --git a/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoLogging.rego b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoLogging.rego new file mode 100755 index 000000000..dfd52a3a1 --- /dev/null +++ b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoLogging.rego @@ -0,0 +1,21 @@ +package accurics + +{{.prefix}}cloudfrontNoLogging[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + not cloudfront.config.logging_config + + rc = "ewogICJsb2dnaW5nX2NvbmZpZyI6IHsKICAgICJpbmNsdWRlX2Nvb2tpZXMiOiBmYWxzZSwKICAgICJidWNrZXQiOiAiPGJ1Y2tldD4iLAogICAgInByZWZpeCI6ICI8cHJlZml4PiIKICB9Cn0=" + + traverse = "" + retVal := { "Id": cloudfront.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "logging_config", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} + +{{.prefix}}cloudfrontNoLogging[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + cloudfront.config.logging_config == [] + + rc = "ewogICJsb2dnaW5nX2NvbmZpZyI6IHsKICAgICJpbmNsdWRlX2Nvb2tpZXMiOiBmYWxzZSwKICAgICJidWNrZXQiOiAiPGJ1Y2tldD4iLAogICAgInByZWZpeCI6ICI8cHJlZml4PiIKICB9Cn0=" + + traverse = "" + retVal := { "Id": cloudfront.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "logging_config", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} \ No newline at end of file diff --git a/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego new file mode 100755 index 000000000..9159d825f --- /dev/null +++ b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego @@ -0,0 +1,19 @@ +package accurics + +{{.prefix}}cloudfrontNoSecureCiphers[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + some i + certificate = cloudfront.config.viewer_certificate[i] + certificate.cloudfront_default_certificate = false + not minimumAllowedProtocolVersion(certificate.minimum_protocol_version) + traverse := sprintf("viewer_certificate[%d].minimum_protocol_version", [i]) + retVal := { "Id": cloudfront.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "viewer_certificate.minimum_protocol_version", "AttributeDataType": "string", "Expected": "TLSv1.2", "Actual": certificate.minimum_protocol_version } +} + +minimumAllowedProtocolVersion(currentVersion) { + currentVersion == "TLSv1.1" +} + +minimumAllowedProtocolVersion(currentVersion) { + currentVersion == "TLSv1.2" +} \ No newline at end of file From 91ce740d5284d6e336e1ab6512d937bce332d8e1 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 17:46:34 +0530 Subject: [PATCH 148/188] fix remove unnecessary data from output --- pkg/results/types.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkg/results/types.go b/pkg/results/types.go index 5bfda9544..35e38129a 100644 --- a/pkg/results/types.go +++ b/pkg/results/types.go @@ -20,14 +20,14 @@ package results type Violation struct { RuleName string `json:"rule_name" yaml:"rule_name" xml:"rule_name,attr"` Description string `json:"description" yaml:"description" xml:"description,attr"` - RuleID string `json:"rule" yaml:"rule" xml:"rule,attr"` + RuleID string `json:"rule_id" yaml:"rule_id" xml:"rule_id,attr"` Severity string `json:"severity" yaml:"severity" xml:"severity,attr"` Category string `json:"category" yaml:"category" xml:"category,attr"` - RuleFile string `json:"rule_file" yaml:"rule_file" xml:"rule_file,attr"` + RuleFile string `json:"-" yaml:"-" xml:"-"` RuleData interface{} `json:"-" yaml:"-" xml:"-"` ResourceName string `json:"resource_name" yaml:"resource_name" xml:"resource_name,attr"` ResourceType string `json:"resource_type" yaml:"resource_type" xml:"resource_type,attr"` - ResourceData interface{} `json:"resource_data" yaml:"resource_data" xml:"resource_data,attr"` + ResourceData interface{} `json:"-" yaml:"-" xml:"-"` File string `json:"file" yaml:"file" xml:"file,attr"` LineNumber int `json:"line" yaml:"line" xml:"line,attr"` } From cb8e514da86702db96b8cea5e504a19a5f4642d0 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 17:58:08 +0530 Subject: [PATCH 149/188] make current directory as default for scanning IaC --- cmd/terrascan/main.go | 2 +- pkg/runtime/executor.go | 6 +++--- pkg/runtime/validate.go | 31 +++++++++++++------------------ 3 files changed, 17 insertions(+), 22 deletions(-) diff --git a/cmd/terrascan/main.go b/cmd/terrascan/main.go index db116a4ff..3e3b5ebe3 100644 --- a/cmd/terrascan/main.go +++ b/cmd/terrascan/main.go @@ -37,7 +37,7 @@ func main() { iacType = flag.String("iac", "", "IaC provider (supported values: terraform)") iacVersion = flag.String("iac-version", "default", "IaC version (supported values: 'v12' for terraform)") iacFilePath = flag.String("f", "", "IaC file path") - iacDirPath = flag.String("d", "", "IaC directory path") + iacDirPath = flag.String("d", ".", "IaC directory path") policyPath = flag.String("p", "", "Policy directory path") // cloud flags diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index df36a6315..e0c3c7757 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -99,10 +99,10 @@ func (e *Executor) Execute() (results policy.EngineOutput, err error) { // create results output from Iac var normalized output.AllResourceConfigs - if e.dirPath != "" { - normalized, err = e.iacProvider.LoadIacDir(e.dirPath) - } else { + if e.filePath != "" { normalized, err = e.iacProvider.LoadIacFile(e.filePath) + } else { + normalized, err = e.iacProvider.LoadIacDir(e.dirPath) } if err != nil { return results, err diff --git a/pkg/runtime/validate.go b/pkg/runtime/validate.go index fa9f343aa..2a9c1ec37 100644 --- a/pkg/runtime/validate.go +++ b/pkg/runtime/validate.go @@ -44,25 +44,8 @@ func (e *Executor) ValidateInputs() error { zap.S().Errorf("no IaC path specified; use '-f' for file or '-d' for directory") return errEmptyIacPath } - if e.filePath != "" && e.dirPath != "" { - zap.S().Errorf("cannot accept both '-f %s' and '-d %s' options together", e.filePath, e.dirPath) - return errIncorrectIacPath - } - - if e.dirPath != "" { - // if directory, check if directory exists - e.dirPath, err = utils.GetAbsPath(e.dirPath) - if err != nil { - return err - } - - if _, err := os.Stat(e.dirPath); err != nil { - zap.S().Errorf("directory '%s' does not exist", e.dirPath) - return errDirNotExists - } - zap.S().Debugf("directory '%s' exists", e.dirPath) - } else { + if e.filePath != "" { // if file path, check if file exists e.filePath, err = utils.GetAbsPath(e.filePath) if err != nil { @@ -74,6 +57,18 @@ func (e *Executor) ValidateInputs() error { return errFileNotExists } zap.S().Debugf("file '%s' exists", e.filePath) + } else { + // if directory, check if directory exists + e.dirPath, err = utils.GetAbsPath(e.dirPath) + if err != nil { + return err + } + + if _, err := os.Stat(e.dirPath); err != nil { + zap.S().Errorf("directory '%s' does not exist", e.dirPath) + return errDirNotExists + } + zap.S().Debugf("directory '%s' exists", e.dirPath) } // check if Iac type is supported From c66dbef5d94a683b6698929050ef525e701a1e09 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 19:54:45 +0530 Subject: [PATCH 150/188] fix unit tests for runtime package --- pkg/runtime/validate_test.go | 8 -------- 1 file changed, 8 deletions(-) diff --git a/pkg/runtime/validate_test.go b/pkg/runtime/validate_test.go index 938672e40..0dab33948 100644 --- a/pkg/runtime/validate_test.go +++ b/pkg/runtime/validate_test.go @@ -58,14 +58,6 @@ func TestValidateInputs(t *testing.T) { }, wantErr: errEmptyIacPath, }, - { - name: "incorrect iac path", - executor: Executor{ - filePath: "./testdata/testfile", - dirPath: "./testdata/testdir", - }, - wantErr: errIncorrectIacPath, - }, { name: "filepath does not exist", executor: Executor{ From c33b80592fcb6c6cdabcea143612d88db0711c5a Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 19:58:10 +0530 Subject: [PATCH 151/188] fix static check errors --- pkg/runtime/validate.go | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/pkg/runtime/validate.go b/pkg/runtime/validate.go index 2a9c1ec37..63de4837e 100644 --- a/pkg/runtime/validate.go +++ b/pkg/runtime/validate.go @@ -27,11 +27,10 @@ import ( ) var ( - errEmptyIacPath = fmt.Errorf("empty iac path, either use '-f' or '-d' option") - errIncorrectIacPath = fmt.Errorf("cannot accept both '-f' and '-d' options together") - errDirNotExists = fmt.Errorf("directory does not exist") - errFileNotExists = fmt.Errorf("file does not exist") - errIacNotSupported = fmt.Errorf("iac type or version not supported") + errEmptyIacPath = fmt.Errorf("empty iac path, either use '-f' or '-d' option") + errDirNotExists = fmt.Errorf("directory does not exist") + errFileNotExists = fmt.Errorf("file does not exist") + errIacNotSupported = fmt.Errorf("iac type or version not supported") ) // ValidateInputs validates the inputs to the executor object From 7e152c95049aed2ca8356104bbf95c26d6c9a2b7 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Thu, 13 Aug 2020 21:44:06 -0400 Subject: [PATCH 152/188] updates favicon --- mkdocs.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mkdocs.yml b/mkdocs.yml index 7532dc7f8..b24c177a9 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -12,8 +12,8 @@ repo_url: /~https://github.com/accurics/terrascan/ # Configuration theme: name: material - favicon: img/Terrascan_STACK_Logo_F2F2F2.svg - logo: img/Terrascan_STACK_Logo_F2F2F2.svg + favicon: img/favicon.ico + logo: img/terrascan-icon-white.png custom_dir: docs/overrides palette: primary: green From 8563d3a08649c93da5091e41a0bb4829eacf37d4 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Thu, 13 Aug 2020 21:44:18 -0400 Subject: [PATCH 153/188] adds new favicon --- docs/img/favicon.ico | Bin 0 -> 4286 bytes docs/img/terrascan-icon-white.png | Bin 0 -> 1965 bytes 2 files changed, 0 insertions(+), 0 deletions(-) create mode 100644 docs/img/favicon.ico create mode 100644 docs/img/terrascan-icon-white.png diff --git a/docs/img/favicon.ico b/docs/img/favicon.ico new file mode 100644 index 0000000000000000000000000000000000000000..92f4824e714bc9639fb97ed5ec05eb6b23fa5824 GIT binary patch literal 4286 zcmcgwX>3$g7=3`mL=*9se>MK{uiaagE>LFPqh+xROBX1GvSN&nr7SKaCSr(@C{RHXjY3<5E)uI<6HO$?bKcBj=5?k_n-JTazBl*Y?>paizk8>K zF^a!0ykO|B(eSKcJZBijO9Y3E6hhxid_QsNE~i6_#$bV!$U4q|hE)I2ecc!wAuBIzFPymU#Ybh}CY zpEOR$My>mY7_RQ&u9Hsb7Kh6QeI`Z`iqqrt8)h>75Pbefw>ZRQ+n}c(iF`hed^=n) zJ>*>-4>Lb5*amTk%i(q6u38PqKI%Q-&(Q0&CMOowiSe)wC&7FrHrNJn*?I8Wp}iV; zk4a}LVNqAwFl?P11FI?l)~B>#TSBmmxWp;npCHsPhW15s;$}Rpa_Xz2KTBLO4KFL1 z494JK60DQsVdlAH#3^p|OZBYqI-K!za&IB`dHV7(`4_l!4J|u~dS8$C$!%f`9DP1c zajR#mZwAltJsQ)}$f0rhQA!>hA`jEklOUNP?U+LO z_bIO&kgo^vjd$S9r&nr zA2QQ97ZSrj9PHdNe&#~PR5|gj6}Qe%=d=A3|HgGI(c9aLV@D3bTR{62d)Bbx{c7<% zbab?%ufHGtM0;BsmKMzpn+q#77T!tGur~T?$FAD|zjJPx&(thpR%#ivEyZ@wP&+Z-Q9~eL*_d&)i`nQid zwEvk0vmh28Ga4Q*d6^&c4fc8ezLYPuAJ$-ivJG{IU@i9V-FGP$=$p>|3)_DbgZyj@ zKVEObpIu$pvE@ye+2pD5_aM~+`=ENe_N8KDu8oIhVw7FGs!#d9q&NrOoj!hZsVn#D zdEy?j|9VoRuwm^Abg&*UHOjjUxjH! zj1Oa@zG2sf&dord8GQcA2KW6#byCK>DbMVi-VJfc&P&DSiZYx#bpkhViGi*sk#t1B=kKONq++$+cFx8lBmb<3U^#O=C&=n}*^L3bke{Yy44qYX!>OZCMb z;HjkU;&@~iq@iHJG#j}Elj$#OU_I;ekbjRTZmSpr-T{K*9&-P3E`oj=bSKdrQFle3 z&B_^jj>WT6xv=-2VtS1FX^&!THjq=ksz1nY-8FR1I68b!>6C8!-sA8FKRKs3=#H#) zfqSTIP%LzZTFH2@2ifOgEx82sg=*TNyB_ac_8t$^n(UA+>6GqZ%8+->6S7gcaNkFz zQ?)5ucptIPa-HWqpY1Wtxp~Dc9nv+V--C_dTG=p~c=Y!#afR|Zan~Lj@%Jys7QYR$ zDZuWkR4u>b%7 literal 0 HcmV?d00001 diff --git a/docs/img/terrascan-icon-white.png b/docs/img/terrascan-icon-white.png new file mode 100644 index 0000000000000000000000000000000000000000..6aa1aaa9430964b363c0b27ebbf884d2d74453b5 GIT binary patch literal 1965 zcma)-dpHw}9>-OqO<}XJmD?_DVJ!EF#N3B%hS$AZ)~hUusD;;cv|?GgbXzi0nqg&U zujIb7DY~AL5c8T|oP(ttdZYJr^gQSMd!F;h_xJrh&-btIAK&M5flBt%(cZ1Cp`oD@ z;O|Y_bovhgwKnf5Rdv{=flm2Hoz~FUGWtVX)?@ExXlQId65#C_#^f!p0sn5I z#wRpxwK9+k&Ch6tSQ0vmo)~xw%}JX-JFY*DfMOVcObI(5adn{IN=xz=!Noz{U_=@ zJX6Q9wZAj`?A!v)BCEA3J5!l65`8Fmf0rltNUX!a=wOem;r+W#dEjC-&6T@)PnuUw zNVa36gi!q{R+4{B>wuk8?yam|~G^3!-fdIE>apnU-s*zGc zatM>C-!-7WviEa#gkyk^HfhycS-|*Kw7SMsKejgi#Ko^y7V@G~@(b+#vXB(4CUhcD*wvL@_CjDVa z2Fjk{XSvX;j{%X$(pHm<^n&(gp6xAxQo%VclJ?+SxV!%ZpDc*cgFE{_a-kcWYfZd! zpE)ps>V+q%Jy~rvVrfs&_7LXzW7M6FZuEeCHBci8HQvPO!O=`+T>CyAwl#nDpa8^K zsiahWsMA8&`v;%hdPx>WC(PgXBe;h6UYNPzg_Gwt;+vKBDdNa0;_6{MKWtHc2<6^M zck^|)u|^Od^Y2T{xNez-8u+tLTB~O=HLvhV3JU7^L)Wg$bUG9A@{_pi)*D1_Vk;zc z=>|hf*^$gQ+>=PW5AA#%jX#*Q13sj6QRG_t6x;44TZ~jFxT1|+k>#tieR~#A zJ7*bwn1NqWc`wcJaK;fsBhfGPSk?Bcn7^t}t@(h+)w8o*`2Dz&2d(TG8;_HQP0*ri zfh9M+dK)-qD`~%f9_;uS{7*O2Ybzr!9t8djxiRSq7kukUy2$ctLL*N+93=)Ufpra> zUOq=YDoztRZj=2DYCpR`&IGKxeLQU`lRo`80Wiy&2K$<#=<~Y=U4{!b8$MNj(5kdy z$q&gN<3aA+z_v%_QV_pOfZyx&lMqZYbP**4EtjXzAakdN7P=)){7X+s>bDs{TEie45gSk*v1ze^m%DOIQhczQF-ov6S4aeVGXPOe=jHsV7elI7}-8 zlhobvXqFYN-5(#flpJUH4bs_s0o&I-8?it`1!vx_D(6 z=_W5zdWW7XU8tU8)zR_5%E_;Q{ z*i6)KR5fS-!g{TzrIU?h3%Ks^0U|mDOv~*R74vie^=ZXw0Rf@8hDk?MSV^d`Ze1sU zptSk$Y9&dDMwo9fWD&}@qkMAwtCN+wQXpUTtElf`#x}npyH^|PQ_dOH*N|u-BeINJ z7cR??WWS!5#|hljnDuSS99Pf5TDyAmxmbpkP^_8VmNf;FBxWP%Q!%^mudP?WMs8L) z1{&z~V+t5m#!mO6r!HP!$m+UFe-_@}_=UR*u(qx{u1PS2Q7Fm+>x6_S0b^!fPPaI~ zl-HH-JSw3@%LA!oXfP?>{_z3|@DNnxgR>CPTE=8`PT?5$foffJ_bmiqfSjJ1^Vds6WD`ntKQ-&N+Hkg z>uLRIlYpG9iRNp Date: Thu, 13 Aug 2020 22:17:10 -0400 Subject: [PATCH 154/188] adds footer --- docs/overrides/partials/footer.html | 5 +++++ docs/stylesheets/extra.css | 8 ++++++++ mkdocs.yml | 4 ++++ 3 files changed, 17 insertions(+) create mode 100644 docs/stylesheets/extra.css diff --git a/docs/overrides/partials/footer.html b/docs/overrides/partials/footer.html index 7ad9293cf..505a0fa8d 100644 --- a/docs/overrides/partials/footer.html +++ b/docs/overrides/partials/footer.html @@ -80,6 +80,11 @@ + {% endif %} diff --git a/docs/stylesheets/extra.css b/docs/stylesheets/extra.css new file mode 100644 index 000000000..e7c1df970 --- /dev/null +++ b/docs/stylesheets/extra.css @@ -0,0 +1,8 @@ +.md-footer-copyright__highlight{ + text-align: center; +} + +.md-footer-copyright__policies{ + text-align: center; + margin-top: 1.75em; +} diff --git a/mkdocs.yml b/mkdocs.yml index b24c177a9..00d38dbab 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -21,6 +21,10 @@ theme: font: text: Montserrat +# Custom CSS +extra_css: + - stylesheets/extra.css + # Social Icons extra: social: From 25371fc2d7d13187eecd4ae936d0fb360f311c92 Mon Sep 17 00:00:00 2001 From: Cesar Rodriguez Date: Thu, 13 Aug 2020 23:00:21 -0400 Subject: [PATCH 155/188] updates getting started page --- README.md | 65 ++++++++++++++++------------ docs/getting-started.md | 95 ++++++++++++++++++++++++++++++++++++++++- mkdocs.yml | 2 - 3 files changed, 130 insertions(+), 32 deletions(-) diff --git a/README.md b/README.md index c3fe9c017..eb3bcd45a 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ Detect compliance and security violations across Infrastructure as Code to mitig * Support for AWS, Azure, and GCP ## Installing -Terrascan's binary for your architecture can be found on the releases page. Here's an example of how to install it: +Terrascan's binary for your architecture can be found on the [releases](/~https://github.com/accurics/terrascan/releases) page. Here's an example of how to install it: ``` $ curl --location /~https://github.com/accurics/terrascan/releases/download/v1.0.0/terrascan_darwin_amd64.zip --output terrascan_darwin_amd64.zip @@ -37,21 +37,16 @@ Terrascan can be installed using Homebrew on macOS: brew install terrascan ``` -### Chocolatey -Terrascan can be installed on Windows using Chocolatey: - -``` -choco install terrascan -``` - ### Docker Terrascan is also available as a Docker image and can be used as follows - $ docker run accurics/terrascan +``` +$ docker run accurics/terrascan +``` ## Getting started -To scan your code for security weaknesses you can run the following +To scan your code for security issues you can run the following ``` $ terrascan --iac terraform --iac-version v12 --cloud aws -d pkg/iac-providers/terraform/v12/testdata/moduleconfigs @@ -60,24 +55,38 @@ $ terrascan --iac terraform --iac-version v12 --cloud aws -d pkg/iac-providers/t The following flags are available: ``` -$ terrascan --help -Usage of ./bin/terrascan: - -cloud string - cloud provider (supported values: aws) - -d string - IaC directory path - -f string - IaC file path - -iac string - IaC provider (supported values: terraform) - -iac-version string - IaC version (supported values: 'v12' for terraform) (default "default") - -log-level string - logging level (debug, info, warn, error, panic, fatal) (default "info") - -log-type string - log type (json, console) (default "console") - -server - run terrascan in server mode +$ terrascan -h + +Terrascan + +Scan IaC files for security violations + +Usage + + terrascan -cloud [aws|azure|gcp] [options...] + +Options + +Cloud + -cloud Required. Cloud provider (supported values: aws, azure, gcp) + +IaC (Infrastructure as Code) + -d IaC directory path (default: current working directory) + -f IaC file path + -iac IaC provider (supported values: terraform, default: terraform) + -iac-version IaC version (supported values: 'v12' for Terraform, default: v12) + -p Policy directory path + +Mode + -server Run Terrascan in server mode + +Logging + -log-level Logging level (supported values: debug, info, warn, error, panic, fatal) + -log-type Logging type (supported values: json, yaml, console, default: console) + +Miscellaneous + -config Configuration file path + -version Print the Terrascan version ``` ## Documentation diff --git a/docs/getting-started.md b/docs/getting-started.md index 25cdf28ba..a94306683 100644 --- a/docs/getting-started.md +++ b/docs/getting-started.md @@ -1,8 +1,99 @@ # Getting Started +Terrascan is a static code analyzer for Infrastructure as Code tooling. It can executed with the native binary/executable or by using the [`docker`](#using-docker) container. ## Installation -Terrascan's binary can be found on the package for each [release](/~https://github.com/accurics/terrascan/releases). +Terrascan's binary can be found on the package for each [release](/~https://github.com/accurics/terrascan/releases). Here's an example of how to install it: + +``` Bash linenums="1" +$ curl --location /~https://github.com/accurics/terrascan/releases/download/v1.0.0/terrascan_darwin_amd64.zip --output terrascan_darwin_amd64.zip +$ unzip terrascan_darwin_amd64.zip +Archive: terrascan_darwin_amd64.zip + inflating: terrascan +$ install terrascan /usr/local/bin +$ terrascan --help +``` + +### Installing on macOS +For Mac users, Terrascan can be installed using Homebrew: + +``` Bash linenums="1" +brew install terrascan +``` + +### Using Docker +Terrascan is also available as a Docker image and can used as follows: + +``` Bash linenums="1" +$ docker run accurics/terrascan +``` + +### Building Terrascan +Terrascan can be built locally. This is helpful if you want to be on the latest version or when developing Terrascan. + +``` Bash linenums="1" +$ git clone git@github.com:accurics/terrascan.git +$ cd terrascan +$ make build +$ ./bin/terrascan +``` ## Scanning -## Terrascan CLI +By typing `terrascan` without flags or other arguments, you can display the usage information. + +``` Bash linenums="1" +$ terrascan -h + +Terrascan + +Scan IaC files for security violations + +Usage + + terrascan -cloud [aws|azure|gcp] [options...] + +Options + +Cloud + -cloud Required. Cloud provider (supported values: aws, azure, gcp) + +IaC (Infrastructure as Code) + -d IaC directory path (default: current working directory) + -f IaC file path + -iac IaC provider (supported values: terraform, default: terraform) + -iac-version IaC version (supported values: 'v12' for Terraform, default: v12) + -p Policy directory path + +Mode + -server Run Terrascan in server mode + +Logging + -log-level Logging level (supported values: debug, info, warn, error, panic, fatal) + -log-type Logging type (supported values: json, yaml, console, default: console) + +Miscellaneous + -config Configuration file path + -version Print the Terrascan version +``` + + +``` Bash linenums="1" +$ terrascan -cloud aws -iac terraform -iac-version v12 -p $REGO_POLICIES -d . --output json +``` + +### Example scanning Terraform (HCL2) + +Here's an example of scanning Terraform HCL2 files containing AWS resources: + +``` Bash linenums="1" +terrascan -cloud aws -d ~/iac_folder +``` +In the example above, the `-cloud` flag is used to specify AWS as the cloud provider and the `-d` flag is used to specify the directory to scan. + +### Launch Terrascan in server mode + +To launch Terrascan in server mode you can execute the following: + +``` Bash linenums="1" +terrascan -server +``` diff --git a/mkdocs.yml b/mkdocs.yml index 00d38dbab..74e8a49a9 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -18,8 +18,6 @@ theme: palette: primary: green language: en - font: - text: Montserrat # Custom CSS extra_css: From 15161398e34b36c3e5435d03ef217b574464f212 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Tue, 11 Aug 2020 00:54:19 -0700 Subject: [PATCH 156/188] Add separate violation/results and reporter objects --- pkg/policy/interface.go | 4 ++++ pkg/policy/opa/constants.go | 16 ++++++++++++++++ pkg/policy/opa/engine.go | 32 ++++++++++++++++++++++---------- pkg/policy/opa/types.go | 2 +- pkg/runtime/executor.go | 21 +++++++++++++++------ 5 files changed, 58 insertions(+), 17 deletions(-) diff --git a/pkg/policy/interface.go b/pkg/policy/interface.go index 084f5599c..3aa3a548b 100644 --- a/pkg/policy/interface.go +++ b/pkg/policy/interface.go @@ -31,3 +31,7 @@ type Engine interface { GetResults() error Release() error } + +// EngineFactory creates policy engine instances based on iac/cloud type +type EngineFactory struct { +} diff --git a/pkg/policy/opa/constants.go b/pkg/policy/opa/constants.go index c00b15022..9e51f1f56 100644 --- a/pkg/policy/opa/constants.go +++ b/pkg/policy/opa/constants.go @@ -1,3 +1,19 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + package opa const ( diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index 6d9aa3746..4e7c756fb 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -28,13 +28,12 @@ import ( "sort" "text/template" - "github.com/accurics/terrascan/pkg/utils" + "github.com/accurics/terrascan/pkg/results" + "github.com/accurics/terrascan/pkg/utils" "github.com/open-policy-agent/opa/ast" - - "go.uber.org/zap" - "github.com/open-policy-agent/opa/rego" + "go.uber.org/zap" ) // LoadRegoMetadata Loads rego metadata from a given file @@ -258,15 +257,28 @@ func (e *Engine) Evaluate(inputData *interface{}) error { } if len(rs) > 0 { - results := rs[0].Expressions[0].Value.([]interface{}) - if len(results) > 0 { - r := e.RegoDataMap[k].Metadata - fmt.Printf("[%s] [%s] [%s] %s: %s\n", r.Severity, r.RuleReferenceID, r.Category, r.RuleName, r.Description) + res := rs[0].Expressions[0].Value.([]interface{}) + if len(res) > 0 { + // @TODO: Take line number + file info and add to violation + regoData := e.RegoDataMap[k] + // @TODO: Remove this print, should be done by whomever consumes the results below + fmt.Printf("[%s] [%s] [%s] %s: %s\n", regoData.Metadata.Severity, regoData.Metadata.RuleReferenceID, + regoData.Metadata.Category, regoData.Metadata.RuleName, regoData.Metadata.Description) + violation := results.Violation{ + Name: regoData.Metadata.RuleName, + Description: regoData.Metadata.Description, + RuleID: regoData.Metadata.RuleReferenceID, + Category: regoData.Metadata.Category, + RuleData: regoData.RawRego, + InputFile: "", + InputData: res, + LineNumber: 0, + } + + e.ViolationStore.AddResult(&violation) continue } } - - // Store results } return nil diff --git a/pkg/policy/opa/types.go b/pkg/policy/opa/types.go index 520972406..c60748083 100644 --- a/pkg/policy/opa/types.go +++ b/pkg/policy/opa/types.go @@ -57,6 +57,6 @@ type Engine struct { Context context.Context RegoFileMap map[string][]byte RegoDataMap map[string]*RegoData - ViolationStore *results.Store + ViolationStore *results.ViolationStore stats EngineStats } diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 0db2a17f1..1ab8857d7 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -99,17 +99,26 @@ func (e *Executor) Execute() (normalized interface{}, err error) { } // create a new policy engine based on IaC type + var engine policy.Engine + if e.iacType == "terraform" { - var engine policy.Engine = &opa.Engine{} + engine = &opa.Engine{} + } - err = engine.Initialize(e.policyPath) - if err != nil { - return normalized, err - } + if err = engine.Initialize(e.policyPath); err != nil { + return normalized, err + } - engine.Evaluate(&normalized) + if err = engine.Evaluate(&normalized); err != nil { + return normalized, err } + // var reporter publish.Reporter = console.Reporter + /// if err = reporter.ImportData() + // if err = reporter.Publish() { + // + // } + // send notifications, if configured if err = e.SendNotifications(normalized); err != nil { return normalized, err From 9230d5d83f4835a3b96a8964de89651e837226eb Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Tue, 11 Aug 2020 20:17:43 +0530 Subject: [PATCH 157/188] refactoring policy package --- pkg/cli/run.go | 7 +++-- pkg/policy/interface.go | 8 +++-- pkg/policy/opa/engine.go | 35 +++++++++++++++++---- pkg/results/store.go | 7 +++++ pkg/runtime/executor.go | 67 ++++++++++++++++++---------------------- 5 files changed, 77 insertions(+), 47 deletions(-) diff --git a/pkg/cli/run.go b/pkg/cli/run.go index d766e1b71..280460d59 100644 --- a/pkg/cli/run.go +++ b/pkg/cli/run.go @@ -17,7 +17,10 @@ package cli import ( + "os" + "github.com/accurics/terrascan/pkg/runtime" + "github.com/accurics/terrascan/pkg/utils" ) // Run executes terrascan in CLI mode @@ -31,9 +34,9 @@ func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath, configFile, po } // executor output - _, err = executor.Execute() + violations, err := executor.Execute() if err != nil { return } - // utils.PrintJSON(violations, os.Stdout) + utils.PrintJSON(violations, os.Stdout) } diff --git a/pkg/policy/interface.go b/pkg/policy/interface.go index 3aa3a548b..c686cd58a 100644 --- a/pkg/policy/interface.go +++ b/pkg/policy/interface.go @@ -16,6 +16,10 @@ package policy +import ( + "github.com/accurics/terrascan/pkg/results" +) + // Manager Policy Manager interface type Manager interface { Import() error @@ -25,9 +29,9 @@ type Manager interface { // Engine Policy Engine interface type Engine interface { - Initialize(policyPath string) error + Init(string) error Configure() error - Evaluate(inputData *interface{}) error + Evaluate(*interface{}) ([]*results.Violation, error) GetResults() error Release() error } diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index 4e7c756fb..5e0b86095 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -36,6 +36,26 @@ import ( "go.uber.org/zap" ) +var ( + errInitFailed = fmt.Errorf("failed to initialize OPA policy engine") +) + +// NewEngine returns a new OPA policy engine +func NewEngine(policyPath string) (*Engine, error) { + + // opa engine struct + engine := &Engine{} + + // initialize the engine + if err := engine.Init(policyPath); err != nil { + zap.S().Error("failed to initialize OPA policy engine") + return engine, errInitFailed + } + + // successful + return engine, nil +} + // LoadRegoMetadata Loads rego metadata from a given file func (e *Engine) LoadRegoMetadata(metaFilename string) (*RegoMetadata, error) { // Load metadata file if it exists @@ -202,9 +222,9 @@ func (e *Engine) CompileRegoFiles() error { return nil } -// Initialize Initializes the Opa engine +// Init initializes the Opa engine // Handles loading all rules, filtering, compiling, and preparing for evaluation -func (e *Engine) Initialize(policyPath string) error { +func (e *Engine) Init(policyPath string) error { e.Context = context.Background() if err := e.LoadRegoFiles(policyPath); err != nil { @@ -218,6 +238,9 @@ func (e *Engine) Initialize(policyPath string) error { return err } + // initialize ViolationStore + e.ViolationStore = results.NewViolationStore() + return nil } @@ -237,7 +260,7 @@ func (e *Engine) Release() error { } // Evaluate Executes compiled OPA queries against the input JSON data -func (e *Engine) Evaluate(inputData *interface{}) error { +func (e *Engine) Evaluate(inputData *interface{}) ([]*results.Violation, error) { sortedKeys := make([]string, len(e.RegoDataMap)) x := 0 @@ -262,8 +285,8 @@ func (e *Engine) Evaluate(inputData *interface{}) error { // @TODO: Take line number + file info and add to violation regoData := e.RegoDataMap[k] // @TODO: Remove this print, should be done by whomever consumes the results below - fmt.Printf("[%s] [%s] [%s] %s: %s\n", regoData.Metadata.Severity, regoData.Metadata.RuleReferenceID, - regoData.Metadata.Category, regoData.Metadata.RuleName, regoData.Metadata.Description) + // fmt.Printf("[%s] [%s] [%s] %s: %s\n", regoData.Metadata.Severity, regoData.Metadata.RuleReferenceID, + // regoData.Metadata.Category, regoData.Metadata.RuleName, regoData.Metadata.Description) violation := results.Violation{ Name: regoData.Metadata.RuleName, Description: regoData.Metadata.Description, @@ -281,5 +304,5 @@ func (e *Engine) Evaluate(inputData *interface{}) error { } } - return nil + return e.ViolationStore.GetResults(), nil } diff --git a/pkg/results/store.go b/pkg/results/store.go index 799224e85..fec48c4e6 100644 --- a/pkg/results/store.go +++ b/pkg/results/store.go @@ -16,6 +16,13 @@ package results +// NewViolationStore returns a new violation store +func NewViolationStore() *ViolationStore { + return &ViolationStore{ + violations: []*Violation{}, + } +} + // AddResult Adds individual violations into the violation store func (s *ViolationStore) AddResult(violation *Violation) { s.violations = append(s.violations, violation) diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 1ab8857d7..c1301f9e4 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -27,15 +27,16 @@ import ( // Executor object type Executor struct { - filePath string - dirPath string - policyPath string - cloudType string - iacType string - iacVersion string - configFile string - iacProvider iacProvider.IacProvider - notifiers []notifications.Notifier + filePath string + dirPath string + policyPath string + cloudType string + iacType string + iacVersion string + configFile string + iacProvider iacProvider.IacProvider + policyEngine policy.Engine + notifiers []notifications.Notifier } // NewExecutor creates a runtime object @@ -50,7 +51,7 @@ func NewExecutor(iacType, iacVersion, cloudType, filePath, dirPath, configFile, configFile: configFile, } - // initialized executor + // initialize executor if err = e.Init(); err != nil { return e, err } @@ -81,49 +82,41 @@ func (e *Executor) Init() error { return err } + // create a new policy engine based on IaC type + e.policyEngine, err = opa.NewEngine(e.policyPath) + if err != nil { + zap.S().Errorf("failed to create policy engine. error: '%s'", err) + return err + } + zap.S().Debug("initialized executor") return nil } // Execute validates the inputs, processes the IaC, creates json output -func (e *Executor) Execute() (normalized interface{}, err error) { +func (e *Executor) Execute() (results interface{}, err error) { - // create normalized output from Iac + // create results output from Iac if e.dirPath != "" { - normalized, err = e.iacProvider.LoadIacDir(e.dirPath) + results, err = e.iacProvider.LoadIacDir(e.dirPath) } else { - normalized, err = e.iacProvider.LoadIacFile(e.filePath) + results, err = e.iacProvider.LoadIacFile(e.filePath) } if err != nil { - return normalized, err + return results, err } - // create a new policy engine based on IaC type - var engine policy.Engine - - if e.iacType == "terraform" { - engine = &opa.Engine{} - } - - if err = engine.Initialize(e.policyPath); err != nil { - return normalized, err - } - - if err = engine.Evaluate(&normalized); err != nil { - return normalized, err + // evaluate policies + results, err = e.policyEngine.Evaluate(&results) + if err != nil { + return results, err } - // var reporter publish.Reporter = console.Reporter - /// if err = reporter.ImportData() - // if err = reporter.Publish() { - // - // } - // send notifications, if configured - if err = e.SendNotifications(normalized); err != nil { - return normalized, err + if err = e.SendNotifications(results); err != nil { + return results, err } // successful - return normalized, nil + return results, nil } From 4f0fb96d4cabaf33103473cebda086bf6214e907 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Wed, 12 Aug 2020 01:32:00 +0530 Subject: [PATCH 158/188] add support for writer --- pkg/cli/run.go | 4 ++-- pkg/results/types.go | 16 ++++++++-------- pkg/writer/json.go | 38 ++++++++++++++++++++++++++++++++++++++ pkg/writer/register.go | 30 ++++++++++++++++++++++++++++++ pkg/writer/writer.go | 40 ++++++++++++++++++++++++++++++++++++++++ pkg/writer/yaml.go | 39 +++++++++++++++++++++++++++++++++++++++ 6 files changed, 157 insertions(+), 10 deletions(-) create mode 100644 pkg/writer/json.go create mode 100644 pkg/writer/register.go create mode 100644 pkg/writer/writer.go create mode 100644 pkg/writer/yaml.go diff --git a/pkg/cli/run.go b/pkg/cli/run.go index 280460d59..f24ae3fb6 100644 --- a/pkg/cli/run.go +++ b/pkg/cli/run.go @@ -20,7 +20,7 @@ import ( "os" "github.com/accurics/terrascan/pkg/runtime" - "github.com/accurics/terrascan/pkg/utils" + "github.com/accurics/terrascan/pkg/writer" ) // Run executes terrascan in CLI mode @@ -38,5 +38,5 @@ func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath, configFile, po if err != nil { return } - utils.PrintJSON(violations, os.Stdout) + writer.Write("xml", violations, os.Stdout) } diff --git a/pkg/results/types.go b/pkg/results/types.go index 0dd6377f1..113f8260f 100644 --- a/pkg/results/types.go +++ b/pkg/results/types.go @@ -18,14 +18,14 @@ package results // Violation Contains data for each violation type Violation struct { - Name string - Description string - RuleID string - Category string - RuleData interface{} - InputFile string - InputData interface{} - LineNumber int + Name string `json:"name" yaml:"name" xml:"name,attr"` + Description string `json:"description" yaml:"description" xml:"description, attr"` + RuleID string `json:"rule" yaml:"rule" xml:"rule,attr"` + Category string `json:"category" yaml:"category" xml:"category,attr"` + RuleData interface{} `json:"-" yaml:"-" xml:"-"` + InputFile string `json:"-", yaml:"-", xml:"-"` + InputData interface{} `json:"input_data" yaml:"input_data" xml:"input_data,attr"` + LineNumber int `json:"line" yaml:"line" xml:"line,attr"` } // ViolationStore Storage area for violation data diff --git a/pkg/writer/json.go b/pkg/writer/json.go new file mode 100644 index 000000000..6fbae649f --- /dev/null +++ b/pkg/writer/json.go @@ -0,0 +1,38 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package writer + +import ( + "encoding/json" + "io" +) + +const ( + jsonFormat supportedFormat = "json" +) + +func init() { + RegisterWriter(jsonFormat, JSONWriter) +} + +// JSONWriter prints data in JSON format +func JSONWriter(data interface{}, writer io.Writer) error { + j, _ := json.MarshalIndent(data, "", " ") + writer.Write(j) + writer.Write([]byte{'\n'}) + return nil +} diff --git a/pkg/writer/register.go b/pkg/writer/register.go new file mode 100644 index 000000000..3d7925de1 --- /dev/null +++ b/pkg/writer/register.go @@ -0,0 +1,30 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package writer + +import "io" + +// supportedFormat data type for supported formats +type supportedFormat string + +// writerMap stores mapping of supported writer formats with respective functions +var writerMap = make(map[supportedFormat](func(interface{}, io.Writer) error)) + +// RegisterWriter registers a writer for terrascan +func RegisterWriter(format supportedFormat, writerFunc func(interface{}, io.Writer) error) { + writerMap[format] = writerFunc +} diff --git a/pkg/writer/writer.go b/pkg/writer/writer.go new file mode 100644 index 000000000..97bc799f8 --- /dev/null +++ b/pkg/writer/writer.go @@ -0,0 +1,40 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package writer + +import ( + "fmt" + "io" + + "go.uber.org/zap" +) + +var ( + errNotSupported = fmt.Errorf("output format not supported") +) + +// Write method writes in the given format using the respective writer func +func Write(format supportedFormat, data interface{}, writer io.Writer) error { + + writerFunc, present := writerMap[format] + if !present { + zap.S().Error("output format '%s' not supported", format) + return errNotSupported + } + + return writerFunc(data, writer) +} diff --git a/pkg/writer/yaml.go b/pkg/writer/yaml.go new file mode 100644 index 000000000..2d7f8d7ca --- /dev/null +++ b/pkg/writer/yaml.go @@ -0,0 +1,39 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package writer + +import ( + "io" + + "gopkg.in/yaml.v2" +) + +const ( + yamlFormat supportedFormat = "yaml" +) + +func init() { + RegisterWriter(yamlFormat, YAMLWriter) +} + +// YAMLWriter prints data in YAML format +func YAMLWriter(data interface{}, writer io.Writer) error { + j, _ := yaml.Marshal(data) + writer.Write(j) + writer.Write([]byte{'\n'}) + return nil +} From 0e81499139600f08b1df914a20d8ffdf0855ea21 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Wed, 12 Aug 2020 10:38:36 +0530 Subject: [PATCH 159/188] changing input/ouput type from interface{} to data specific types --- go.mod | 1 + pkg/cli/run.go | 2 +- pkg/policy/interface.go | 3 ++- pkg/policy/opa/engine.go | 4 ++-- pkg/runtime/executor.go | 11 +++++++---- 5 files changed, 13 insertions(+), 8 deletions(-) diff --git a/go.mod b/go.mod index 459e68777..7cd2b8da7 100644 --- a/go.mod +++ b/go.mod @@ -17,5 +17,6 @@ require ( golang.org/x/net v0.0.0-20200625001655-4c5254603344 // indirect golang.org/x/tools v0.0.0-20200809012840-6f4f008689da // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect + gopkg.in/yaml.v2 v2.3.0 honnef.co/go/tools v0.0.1-2020.1.5 // indirect ) diff --git a/pkg/cli/run.go b/pkg/cli/run.go index f24ae3fb6..963a7c22e 100644 --- a/pkg/cli/run.go +++ b/pkg/cli/run.go @@ -38,5 +38,5 @@ func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath, configFile, po if err != nil { return } - writer.Write("xml", violations, os.Stdout) + writer.Write("yaml", violations, os.Stdout) } diff --git a/pkg/policy/interface.go b/pkg/policy/interface.go index c686cd58a..62765b331 100644 --- a/pkg/policy/interface.go +++ b/pkg/policy/interface.go @@ -17,6 +17,7 @@ package policy import ( + "github.com/accurics/terrascan/pkg/iac-providers/output" "github.com/accurics/terrascan/pkg/results" ) @@ -31,7 +32,7 @@ type Manager interface { type Engine interface { Init(string) error Configure() error - Evaluate(*interface{}) ([]*results.Violation, error) + Evaluate(output.AllResourceConfigs) ([]*results.Violation, error) GetResults() error Release() error } diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index 5e0b86095..e3ecc271f 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -28,8 +28,8 @@ import ( "sort" "text/template" + "github.com/accurics/terrascan/pkg/iac-providers/output" "github.com/accurics/terrascan/pkg/results" - "github.com/accurics/terrascan/pkg/utils" "github.com/open-policy-agent/opa/ast" "github.com/open-policy-agent/opa/rego" @@ -260,7 +260,7 @@ func (e *Engine) Release() error { } // Evaluate Executes compiled OPA queries against the input JSON data -func (e *Engine) Evaluate(inputData *interface{}) ([]*results.Violation, error) { +func (e *Engine) Evaluate(inputData output.AllResourceConfigs) ([]*results.Violation, error) { sortedKeys := make([]string, len(e.RegoDataMap)) x := 0 diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index c1301f9e4..983deae3d 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -20,9 +20,11 @@ import ( "go.uber.org/zap" iacProvider "github.com/accurics/terrascan/pkg/iac-providers" + "github.com/accurics/terrascan/pkg/iac-providers/output" "github.com/accurics/terrascan/pkg/notifications" "github.com/accurics/terrascan/pkg/policy" opa "github.com/accurics/terrascan/pkg/policy/opa" + "github.com/accurics/terrascan/pkg/results" ) // Executor object @@ -94,20 +96,21 @@ func (e *Executor) Init() error { } // Execute validates the inputs, processes the IaC, creates json output -func (e *Executor) Execute() (results interface{}, err error) { +func (e *Executor) Execute() (results []*results.Violation, err error) { // create results output from Iac + var normalized output.AllResourceConfigs if e.dirPath != "" { - results, err = e.iacProvider.LoadIacDir(e.dirPath) + normalized, err = e.iacProvider.LoadIacDir(e.dirPath) } else { - results, err = e.iacProvider.LoadIacFile(e.filePath) + normalized, err = e.iacProvider.LoadIacFile(e.filePath) } if err != nil { return results, err } // evaluate policies - results, err = e.policyEngine.Evaluate(&results) + results, err = e.policyEngine.Evaluate(normalized) if err != nil { return results, err } From 93018e379743f6f73205156981b291a20c752a1b Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Wed, 12 Aug 2020 12:03:20 +0530 Subject: [PATCH 160/188] add unit tests for FindAllDirectories func --- pkg/utils/path_test.go | 43 ++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/pkg/utils/path_test.go b/pkg/utils/path_test.go index a2efc55b8..5b47d4076 100644 --- a/pkg/utils/path_test.go +++ b/pkg/utils/path_test.go @@ -17,7 +17,9 @@ package utils import ( + "fmt" "os" + "reflect" "testing" ) @@ -67,3 +69,44 @@ func TestGetAbsPath(t *testing.T) { }) } } + +func TestFindAllDirectories(t *testing.T) { + + table := []struct { + name string + basePath string + want []string + wantErr error + }{ + { + name: "happy path", + basePath: "./testdata", + want: []string{"./testdata", "testdata/emptydir", "testdata/testdir1", "testdata/testdir2"}, + wantErr: nil, + }, + { + name: "empty dir", + basePath: "./testdata/emptydir", + want: []string{"./testdata/emptydir"}, + wantErr: nil, + }, + { + name: "invalid dir", + basePath: "./testdata/nothere", + want: []string{}, + wantErr: fmt.Errorf("lstat ./testdata/nothere: no such file or directory"), + }, + } + + for _, tt := range table { + t.Run(tt.name, func(t *testing.T) { + got, gotErr := FindAllDirectories(tt.basePath) + if !reflect.DeepEqual(gotErr, tt.wantErr) { + t.Errorf("gotErr: '%+v', wantErr: '%+v'", gotErr, tt.wantErr) + } + if !reflect.DeepEqual(got, tt.want) { + t.Errorf("got: '%v', want: '%v'", got, tt.want) + } + }) + } +} From 204501a36a3cefaa326d737a3ed19b0f75b1cc03 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Wed, 12 Aug 2020 02:06:51 -0700 Subject: [PATCH 161/188] wrap engine input and output objects to lock the engine interface added low/medium/high/total violation counts removed reporting placeholder code (reporting to be done by caller of executor) --- go.mod | 2 +- go.sum | 2 ++ pkg/policy/interface.go | 7 +------ pkg/policy/opa/engine.go | 30 +++++++++++++++++++++--------- pkg/policy/opa/types.go | 12 ++++++------ pkg/policy/types.go | 16 ++++++++++++++++ pkg/results/types.go | 14 ++++++++++++-- pkg/runtime/executor.go | 5 ++--- 8 files changed, 61 insertions(+), 27 deletions(-) create mode 100644 pkg/policy/types.go diff --git a/go.mod b/go.mod index 7cd2b8da7..dffe6982b 100644 --- a/go.mod +++ b/go.mod @@ -15,7 +15,7 @@ require ( github.com/zclconf/go-cty v1.2.1 go.uber.org/zap v1.9.1 golang.org/x/net v0.0.0-20200625001655-4c5254603344 // indirect - golang.org/x/tools v0.0.0-20200809012840-6f4f008689da // indirect + golang.org/x/tools v0.0.0-20200811215021-48a8ffc5b207 // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect gopkg.in/yaml.v2 v2.3.0 honnef.co/go/tools v0.0.1-2020.1.5 // indirect diff --git a/go.sum b/go.sum index d576b76a8..8aee93936 100644 --- a/go.sum +++ b/go.sum @@ -445,6 +445,8 @@ golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtn golang.org/x/tools v0.0.0-20191130070609-6e064ea0cf2d/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200809012840-6f4f008689da h1:ml5G98G4/tdKT1XNq+ky5iSRdKKux0TANlLAzmXT/hg= golang.org/x/tools v0.0.0-20200809012840-6f4f008689da/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200811215021-48a8ffc5b207 h1:8Kg+JssU1jBZs8GIrL5pl4nVyaqyyhdmHAR4D1zGErg= +golang.org/x/tools v0.0.0-20200811215021-48a8ffc5b207/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= diff --git a/pkg/policy/interface.go b/pkg/policy/interface.go index 62765b331..c74a79886 100644 --- a/pkg/policy/interface.go +++ b/pkg/policy/interface.go @@ -16,11 +16,6 @@ package policy -import ( - "github.com/accurics/terrascan/pkg/iac-providers/output" - "github.com/accurics/terrascan/pkg/results" -) - // Manager Policy Manager interface type Manager interface { Import() error @@ -32,7 +27,7 @@ type Manager interface { type Engine interface { Init(string) error Configure() error - Evaluate(output.AllResourceConfigs) ([]*results.Violation, error) + Evaluate(EngineInput) (EngineOutput, error) GetResults() error Release() error } diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index e3ecc271f..90f4d56ef 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -26,9 +26,11 @@ import ( "os" "path/filepath" "sort" + "strings" "text/template" - "github.com/accurics/terrascan/pkg/iac-providers/output" + "github.com/accurics/terrascan/pkg/policy" + "github.com/accurics/terrascan/pkg/results" "github.com/accurics/terrascan/pkg/utils" "github.com/open-policy-agent/opa/ast" @@ -239,7 +241,7 @@ func (e *Engine) Init(policyPath string) error { } // initialize ViolationStore - e.ViolationStore = results.NewViolationStore() + e.Results.ViolationStore = results.NewViolationStore() return nil } @@ -260,7 +262,7 @@ func (e *Engine) Release() error { } // Evaluate Executes compiled OPA queries against the input JSON data -func (e *Engine) Evaluate(inputData output.AllResourceConfigs) ([]*results.Violation, error) { +func (e *Engine) Evaluate(engineInput policy.EngineInput) (policy.EngineOutput, error) { sortedKeys := make([]string, len(e.RegoDataMap)) x := 0 @@ -272,7 +274,7 @@ func (e *Engine) Evaluate(inputData output.AllResourceConfigs) ([]*results.Viola for _, k := range sortedKeys { // Execute the prepared query. - rs, err := e.RegoDataMap[k].PreparedQuery.Eval(e.Context, rego.EvalInput(inputData)) + rs, err := e.RegoDataMap[k].PreparedQuery.Eval(e.Context, rego.EvalInput(engineInput.InputData)) // rs, err := r.Eval(o.Context) if err != nil { zap.S().Warn("failed to run prepared query", zap.String("rule", "'"+k+"'")) @@ -284,13 +286,11 @@ func (e *Engine) Evaluate(inputData output.AllResourceConfigs) ([]*results.Viola if len(res) > 0 { // @TODO: Take line number + file info and add to violation regoData := e.RegoDataMap[k] - // @TODO: Remove this print, should be done by whomever consumes the results below - // fmt.Printf("[%s] [%s] [%s] %s: %s\n", regoData.Metadata.Severity, regoData.Metadata.RuleReferenceID, - // regoData.Metadata.Category, regoData.Metadata.RuleName, regoData.Metadata.Description) violation := results.Violation{ Name: regoData.Metadata.RuleName, Description: regoData.Metadata.Description, RuleID: regoData.Metadata.RuleReferenceID, + Severity: regoData.Metadata.Severity, Category: regoData.Metadata.Category, RuleData: regoData.RawRego, InputFile: "", @@ -298,11 +298,23 @@ func (e *Engine) Evaluate(inputData output.AllResourceConfigs) ([]*results.Viola LineNumber: 0, } - e.ViolationStore.AddResult(&violation) + severity := regoData.Metadata.Severity + if strings.ToLower(severity) == "high" { + e.Results.ViolationStore.HighCount++ + } else if strings.ToLower(severity) == "medium" { + e.Results.ViolationStore.MediumCount++ + } else if strings.ToLower(severity) == "low" { + e.Results.ViolationStore.LowCount++ + } else { + zap.S().Warn("invalid severity found in rule definition", + zap.String("rule id", violation.RuleID), zap.String("severity", severity)) + } + e.Results.ViolationStore.TotalCount++ + e.Results.ViolationStore.AddResult(&violation) continue } } } - return e.ViolationStore.GetResults(), nil + return e.Results, nil } diff --git a/pkg/policy/opa/types.go b/pkg/policy/opa/types.go index c60748083..980d3b53e 100644 --- a/pkg/policy/opa/types.go +++ b/pkg/policy/opa/types.go @@ -19,7 +19,7 @@ package opa import ( "context" - "github.com/accurics/terrascan/pkg/results" + "github.com/accurics/terrascan/pkg/policy" "github.com/open-policy-agent/opa/rego" ) @@ -54,9 +54,9 @@ type EngineStats struct { // Engine Implements the policy engine interface type Engine struct { - Context context.Context - RegoFileMap map[string][]byte - RegoDataMap map[string]*RegoData - ViolationStore *results.ViolationStore - stats EngineStats + Context context.Context + RegoFileMap map[string][]byte + RegoDataMap map[string]*RegoData + Results policy.EngineOutput + stats EngineStats } diff --git a/pkg/policy/types.go b/pkg/policy/types.go new file mode 100644 index 000000000..e9020dfb9 --- /dev/null +++ b/pkg/policy/types.go @@ -0,0 +1,16 @@ +package policy + +import ( + "github.com/accurics/terrascan/pkg/iac-providers/output" + "github.com/accurics/terrascan/pkg/results" +) + +// EngineInput Contains data used as input to the engine +type EngineInput struct { + InputData *output.AllResourceConfigs +} + +// EngineOutput Contains data output from the engine +type EngineOutput struct { + ViolationStore *results.ViolationStore +} diff --git a/pkg/results/types.go b/pkg/results/types.go index 113f8260f..5ad2f9501 100644 --- a/pkg/results/types.go +++ b/pkg/results/types.go @@ -19,16 +19,26 @@ package results // Violation Contains data for each violation type Violation struct { Name string `json:"name" yaml:"name" xml:"name,attr"` - Description string `json:"description" yaml:"description" xml:"description, attr"` + Description string `json:"description" yaml:"description" xml:"description,attr"` RuleID string `json:"rule" yaml:"rule" xml:"rule,attr"` + Severity string `json:"severity" yaml:"severity" xml:"severity,attr"` Category string `json:"category" yaml:"category" xml:"category,attr"` RuleData interface{} `json:"-" yaml:"-" xml:"-"` - InputFile string `json:"-", yaml:"-", xml:"-"` + InputFile string `json:"-" yaml:"-" xml:"-"` InputData interface{} `json:"input_data" yaml:"input_data" xml:"input_data,attr"` LineNumber int `json:"line" yaml:"line" xml:"line,attr"` } +// ViolationStats Contains stats related to the violation data +type ViolationStats struct { + LowCount int + MediumCount int + HighCount int + TotalCount int +} + // ViolationStore Storage area for violation data type ViolationStore struct { violations []*Violation + ViolationStats } diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index 983deae3d..df36a6315 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -24,7 +24,6 @@ import ( "github.com/accurics/terrascan/pkg/notifications" "github.com/accurics/terrascan/pkg/policy" opa "github.com/accurics/terrascan/pkg/policy/opa" - "github.com/accurics/terrascan/pkg/results" ) // Executor object @@ -96,7 +95,7 @@ func (e *Executor) Init() error { } // Execute validates the inputs, processes the IaC, creates json output -func (e *Executor) Execute() (results []*results.Violation, err error) { +func (e *Executor) Execute() (results policy.EngineOutput, err error) { // create results output from Iac var normalized output.AllResourceConfigs @@ -110,7 +109,7 @@ func (e *Executor) Execute() (results []*results.Violation, err error) { } // evaluate policies - results, err = e.policyEngine.Evaluate(normalized) + results, err = e.policyEngine.Evaluate(policy.EngineInput{InputData: &normalized}) if err != nil { return results, err } From 1afe5b95c088be43bfa3eb912e1f5f5277e59e09 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Wed, 12 Aug 2020 15:40:00 +0530 Subject: [PATCH 162/188] fixing violations output --- pkg/policy/opa/engine.go | 8 ++++---- pkg/policy/types.go | 2 +- pkg/results/store.go | 6 +++--- pkg/results/types.go | 14 +++++++------- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index 90f4d56ef..f490d1ad5 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -300,16 +300,16 @@ func (e *Engine) Evaluate(engineInput policy.EngineInput) (policy.EngineOutput, severity := regoData.Metadata.Severity if strings.ToLower(severity) == "high" { - e.Results.ViolationStore.HighCount++ + e.Results.ViolationStore.Count.HighCount++ } else if strings.ToLower(severity) == "medium" { - e.Results.ViolationStore.MediumCount++ + e.Results.ViolationStore.Count.MediumCount++ } else if strings.ToLower(severity) == "low" { - e.Results.ViolationStore.LowCount++ + e.Results.ViolationStore.Count.LowCount++ } else { zap.S().Warn("invalid severity found in rule definition", zap.String("rule id", violation.RuleID), zap.String("severity", severity)) } - e.Results.ViolationStore.TotalCount++ + e.Results.ViolationStore.Count.TotalCount++ e.Results.ViolationStore.AddResult(&violation) continue } diff --git a/pkg/policy/types.go b/pkg/policy/types.go index e9020dfb9..f16ddae3c 100644 --- a/pkg/policy/types.go +++ b/pkg/policy/types.go @@ -12,5 +12,5 @@ type EngineInput struct { // EngineOutput Contains data output from the engine type EngineOutput struct { - ViolationStore *results.ViolationStore + *results.ViolationStore } diff --git a/pkg/results/store.go b/pkg/results/store.go index fec48c4e6..8e3252951 100644 --- a/pkg/results/store.go +++ b/pkg/results/store.go @@ -19,16 +19,16 @@ package results // NewViolationStore returns a new violation store func NewViolationStore() *ViolationStore { return &ViolationStore{ - violations: []*Violation{}, + Violations: []*Violation{}, } } // AddResult Adds individual violations into the violation store func (s *ViolationStore) AddResult(violation *Violation) { - s.violations = append(s.violations, violation) + s.Violations = append(s.Violations, violation) } // GetResults Retrieves all violations from the violation store func (s *ViolationStore) GetResults() []*Violation { - return s.violations + return s.Violations } diff --git a/pkg/results/types.go b/pkg/results/types.go index 5ad2f9501..3cac12d18 100644 --- a/pkg/results/types.go +++ b/pkg/results/types.go @@ -25,20 +25,20 @@ type Violation struct { Category string `json:"category" yaml:"category" xml:"category,attr"` RuleData interface{} `json:"-" yaml:"-" xml:"-"` InputFile string `json:"-" yaml:"-" xml:"-"` - InputData interface{} `json:"input_data" yaml:"input_data" xml:"input_data,attr"` + InputData interface{} `json:"-" yaml:"-" xml:"-"` LineNumber int `json:"line" yaml:"line" xml:"line,attr"` } // ViolationStats Contains stats related to the violation data type ViolationStats struct { - LowCount int - MediumCount int - HighCount int - TotalCount int + LowCount int `json:"low"` + MediumCount int `json:"medium"` + HighCount int `json:"high"` + TotalCount int `json:"total"` } // ViolationStore Storage area for violation data type ViolationStore struct { - violations []*Violation - ViolationStats + Violations []*Violation `json:"violations"` + Count ViolationStats `json:"count"` } From 759bf66a3602dc036775bd45b87304b6d365607b Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Wed, 12 Aug 2020 18:01:01 +0530 Subject: [PATCH 163/188] add source line info to normalized resource config --- pkg/iac-providers/output/types.go | 1 + pkg/iac-providers/terraform/v12/resource.go | 1 + 2 files changed, 2 insertions(+) diff --git a/pkg/iac-providers/output/types.go b/pkg/iac-providers/output/types.go index 681f65f96..2b52a2403 100644 --- a/pkg/iac-providers/output/types.go +++ b/pkg/iac-providers/output/types.go @@ -21,6 +21,7 @@ type ResourceConfig struct { ID string `json:"id"` Name string `json:"name"` Source string `json:"source"` + Line int `json:"line"` Type string `json:"type"` Config interface{} `json:"config"` } diff --git a/pkg/iac-providers/terraform/v12/resource.go b/pkg/iac-providers/terraform/v12/resource.go index 0b6fd6472..1d3f96d77 100644 --- a/pkg/iac-providers/terraform/v12/resource.go +++ b/pkg/iac-providers/terraform/v12/resource.go @@ -53,6 +53,7 @@ func CreateResourceConfig(managedResource *hclConfigs.Resource) (resourceConfig Name: managedResource.Name, Type: managedResource.Type, Source: managedResource.DeclRange.Filename, + Line: managedResource.DeclRange.Start.Line, Config: goOut, } From c5ecf5b1d613d5a1fc52b175371c980e1dddda48 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Wed, 12 Aug 2020 18:02:02 +0530 Subject: [PATCH 164/188] add output format option to cli args --- cmd/terrascan/main.go | 5 ++++- pkg/cli/run.go | 5 +++-- pkg/writer/writer.go | 4 ++-- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/cmd/terrascan/main.go b/cmd/terrascan/main.go index 453c93961..db116a4ff 100644 --- a/cmd/terrascan/main.go +++ b/cmd/terrascan/main.go @@ -49,6 +49,9 @@ func main() { // config file configFile = flag.String("config", "", "config file path") + + // output type + output = flag.String("output", "yaml", "output format (json, xml, yaml)") ) flag.Parse() @@ -65,6 +68,6 @@ func main() { } else { logging.Init(*logType, *logLevel) zap.S().Debug("running terrascan in cli mode") - cli.Run(*iacType, *iacVersion, *cloudType, *iacFilePath, *iacDirPath, *configFile, *policyPath) + cli.Run(*iacType, *iacVersion, *cloudType, *iacFilePath, *iacDirPath, *configFile, *policyPath, *output) } } diff --git a/pkg/cli/run.go b/pkg/cli/run.go index 963a7c22e..3e66cdf68 100644 --- a/pkg/cli/run.go +++ b/pkg/cli/run.go @@ -24,7 +24,8 @@ import ( ) // Run executes terrascan in CLI mode -func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath, configFile, policyPath string) { +func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath, configFile, + policyPath, format string) { // create a new runtime executor for processing IaC executor, err := runtime.NewExecutor(iacType, iacVersion, cloudType, iacFilePath, @@ -38,5 +39,5 @@ func Run(iacType, iacVersion, cloudType, iacFilePath, iacDirPath, configFile, po if err != nil { return } - writer.Write("yaml", violations, os.Stdout) + writer.Write(format, violations, os.Stdout) } diff --git a/pkg/writer/writer.go b/pkg/writer/writer.go index 97bc799f8..a4ef7f964 100644 --- a/pkg/writer/writer.go +++ b/pkg/writer/writer.go @@ -28,9 +28,9 @@ var ( ) // Write method writes in the given format using the respective writer func -func Write(format supportedFormat, data interface{}, writer io.Writer) error { +func Write(format string, data interface{}, writer io.Writer) error { - writerFunc, present := writerMap[format] + writerFunc, present := writerMap[supportedFormat(format)] if !present { zap.S().Error("output format '%s' not supported", format) return errNotSupported From 1ff02e8764305cf4f59b1c2ce3795317ebd6c084 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Wed, 12 Aug 2020 21:51:08 +0530 Subject: [PATCH 165/188] add xml writer support --- pkg/writer/json.go | 4 +++- pkg/writer/register.go | 10 +++++++--- pkg/writer/writer.go | 3 ++- pkg/writer/xml.go | 40 ++++++++++++++++++++++++++++++++++++++++ pkg/writer/yaml.go | 3 ++- 5 files changed, 54 insertions(+), 6 deletions(-) create mode 100644 pkg/writer/xml.go diff --git a/pkg/writer/json.go b/pkg/writer/json.go index 6fbae649f..4df22391b 100644 --- a/pkg/writer/json.go +++ b/pkg/writer/json.go @@ -19,6 +19,8 @@ package writer import ( "encoding/json" "io" + + "github.com/accurics/terrascan/pkg/policy" ) const ( @@ -30,7 +32,7 @@ func init() { } // JSONWriter prints data in JSON format -func JSONWriter(data interface{}, writer io.Writer) error { +func JSONWriter(data policy.EngineOutput, writer io.Writer) error { j, _ := json.MarshalIndent(data, "", " ") writer.Write(j) writer.Write([]byte{'\n'}) diff --git a/pkg/writer/register.go b/pkg/writer/register.go index 3d7925de1..55fe7b173 100644 --- a/pkg/writer/register.go +++ b/pkg/writer/register.go @@ -16,15 +16,19 @@ package writer -import "io" +import ( + "io" + + "github.com/accurics/terrascan/pkg/policy" +) // supportedFormat data type for supported formats type supportedFormat string // writerMap stores mapping of supported writer formats with respective functions -var writerMap = make(map[supportedFormat](func(interface{}, io.Writer) error)) +var writerMap = make(map[supportedFormat](func(policy.EngineOutput, io.Writer) error)) // RegisterWriter registers a writer for terrascan -func RegisterWriter(format supportedFormat, writerFunc func(interface{}, io.Writer) error) { +func RegisterWriter(format supportedFormat, writerFunc func(policy.EngineOutput, io.Writer) error) { writerMap[format] = writerFunc } diff --git a/pkg/writer/writer.go b/pkg/writer/writer.go index a4ef7f964..3773fee7f 100644 --- a/pkg/writer/writer.go +++ b/pkg/writer/writer.go @@ -20,6 +20,7 @@ import ( "fmt" "io" + "github.com/accurics/terrascan/pkg/policy" "go.uber.org/zap" ) @@ -28,7 +29,7 @@ var ( ) // Write method writes in the given format using the respective writer func -func Write(format string, data interface{}, writer io.Writer) error { +func Write(format string, data policy.EngineOutput, writer io.Writer) error { writerFunc, present := writerMap[supportedFormat(format)] if !present { diff --git a/pkg/writer/xml.go b/pkg/writer/xml.go new file mode 100644 index 000000000..2d868f5a9 --- /dev/null +++ b/pkg/writer/xml.go @@ -0,0 +1,40 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package writer + +import ( + "encoding/xml" + "io" + + "github.com/accurics/terrascan/pkg/policy" +) + +const ( + xmlFormat supportedFormat = "xml" +) + +func init() { + RegisterWriter(xmlFormat, XMLWriter) +} + +// XMLWriter prints data in XML format +func XMLWriter(data policy.EngineOutput, writer io.Writer) error { + j, _ := xml.MarshalIndent(data, "", " ") + writer.Write(j) + writer.Write([]byte{'\n'}) + return nil +} diff --git a/pkg/writer/yaml.go b/pkg/writer/yaml.go index 2d7f8d7ca..77346109d 100644 --- a/pkg/writer/yaml.go +++ b/pkg/writer/yaml.go @@ -19,6 +19,7 @@ package writer import ( "io" + "github.com/accurics/terrascan/pkg/policy" "gopkg.in/yaml.v2" ) @@ -31,7 +32,7 @@ func init() { } // YAMLWriter prints data in YAML format -func YAMLWriter(data interface{}, writer io.Writer) error { +func YAMLWriter(data policy.EngineOutput, writer io.Writer) error { j, _ := yaml.Marshal(data) writer.Write(j) writer.Write([]byte{'\n'}) From 31332898066962703661b6cfe7c0aed65eb2b455 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Wed, 12 Aug 2020 23:12:52 -0700 Subject: [PATCH 166/188] =?UTF-8?q?added=20line=20number=20and=20file=20na?= =?UTF-8?q?me=20output=20support=20policy=20changes=E2=80=94removed=20poli?= =?UTF-8?q?cies=20with=20errors=20(will=20need=20to=20fix).=20these=20erro?= =?UTF-8?q?rs=20include=20things=20like=20missing=20reference=20IDs=20and?= =?UTF-8?q?=20invalid=20severity=20strings=20record=20time=20duration=20of?= =?UTF-8?q?=20the=20scan=20fixed=20Engine.GetResults=20and=20updated=20the?= =?UTF-8?q?=20interface=20reduced=20the=20scope=20of=20the=20members=20of?= =?UTF-8?q?=20the=20Engine=20type=20misc=20code=20cleanup?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- go.mod | 2 +- go.sum | 2 + ....EncryptionandKeyManagement.High.0407.json | 7 +- ....EncryptionandKeyManagement.High.0408.json | 7 +- .../AWS.CloudFront.Logging.Medium.0567.json | 7 +- .../AWS.CloudTrail.Logging.High.0399.json | 7 +- .../AWS.CloudTrail.Logging.Low.0559.json | 9 +- .../AWS.CloudTrail.Logging.Medium.0460.json | 7 +- .../opa/rego/aws/aws_db_instance/.json | 13 -- .../rdsPubliclyAccessible.rego | 9 - .../AWS.IamUser.IAM.High.0390.json | 7 +- .../AWS.Iam.IAM.Low.0540.json | 7 +- .../AWS.Iam.IAM.Medium.0454.json | 7 +- .../AWS.Iam.IAM.Medium.0455.json | 7 +- .../AWS.Iam.IAM.Medium.0456.json | 7 +- .../AWS.Iam.IAM.Medium.0457.json | 7 +- .../AWS.Iam.IAM.Medium.0458.json | 7 +- .../AWS.Iam.IAM.Medium.0495.json | 7 +- .../AWS.IamPolicy.IAM.High.0392.json | 7 +- .../AWS.IamPolicy.IAM.High.0392.json | 7 +- .../AWS.IamUser.IAM.High.0387.json | 9 +- .../AWS.IamUser.IAM.High.0388.json | 9 +- ....Instance.NetworkSecurity.Medium.0506.json | 7 +- ....EncryptionandKeyManagement.High.0412.json | 7 +- .../AWS.KMS.Logging.High.0400.json | 7 +- ...hConfiguration.DataSecurity.High.0102.json | 7 +- ....EncryptionandKeyManagement.High.0405.json | 13 -- .../AWS.S3Bucket.IAM.High.0370.json | 13 -- .../AWS.S3Bucket.IAM.High.0377.json | 15 -- .../AWS.S3Bucket.IAM.High.0378.json | 15 -- .../AWS.S3Bucket.IAM.High.0379.json | 15 -- .../AWS.S3Bucket.IAM.High.0381.json | 15 -- ...WS.S3Bucket.NetworkSecurity.High.0417.json | 13 -- .../aws/aws_s3_bucket/noS3BucketSseRules.rego | 9 - .../rego/aws/aws_s3_bucket/s3AclGrants.rego | 8 - .../s3BucketNoWebsiteIndexDoc.rego | 8 - .../aws_s3_bucket/s3VersioningMfaFalse.rego | 10 - .../AWS.IamPolicy.IAM.High.0374.json | 7 +- .../AWS.S3Bucket.IAM.High.0371.json | 7 +- .../AWS.S3Bucket.IAM.High.0372.json | 7 +- ...curityGroup.NetworkSecurity.High.0094.json | 7 +- .../aws_vpc/AWS.VPC.Logging.Medium.0470.json | 7 +- .../aws_vpc/AWS.VPC.Logging.Medium.0471.json | 7 +- pkg/policy/interface.go | 13 +- pkg/policy/opa/engine.go | 173 +++++++++++------- pkg/policy/opa/types.go | 27 +-- pkg/results/types.go | 20 +- pkg/utils/resource.go | 35 ++++ 48 files changed, 262 insertions(+), 368 deletions(-) delete mode 100755 pkg/policies/opa/rego/aws/aws_db_instance/.json delete mode 100755 pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego delete mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego create mode 100644 pkg/utils/resource.go diff --git a/go.mod b/go.mod index dffe6982b..ea53117c6 100644 --- a/go.mod +++ b/go.mod @@ -15,7 +15,7 @@ require ( github.com/zclconf/go-cty v1.2.1 go.uber.org/zap v1.9.1 golang.org/x/net v0.0.0-20200625001655-4c5254603344 // indirect - golang.org/x/tools v0.0.0-20200811215021-48a8ffc5b207 // indirect + golang.org/x/tools v0.0.0-20200812231640-9176cd30088c // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect gopkg.in/yaml.v2 v2.3.0 honnef.co/go/tools v0.0.1-2020.1.5 // indirect diff --git a/go.sum b/go.sum index 8aee93936..b2eeab546 100644 --- a/go.sum +++ b/go.sum @@ -447,6 +447,8 @@ golang.org/x/tools v0.0.0-20200809012840-6f4f008689da h1:ml5G98G4/tdKT1XNq+ky5iS golang.org/x/tools v0.0.0-20200809012840-6f4f008689da/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200811215021-48a8ffc5b207 h1:8Kg+JssU1jBZs8GIrL5pl4nVyaqyyhdmHAR4D1zGErg= golang.org/x/tools v0.0.0-20200811215021-48a8ffc5b207/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= +golang.org/x/tools v0.0.0-20200812231640-9176cd30088c h1:ZSTOUQugXA1i88foZV5ck1FrcnEYhGmlpiPXgDWmhG0= +golang.org/x/tools v0.0.0-20200812231640-9176cd30088c/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json index e2f3a8a84..87a931b83 100755 --- a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json @@ -1,13 +1,12 @@ { - "ruleName": "cloudfrontNoHTTPSTraffic", + "name": "cloudfrontNoHTTPSTraffic", "file": "cloudfrontNoHTTPSTraffic.rego", - "ruleTemplate": "cloudfrontNoHTTPSTraffic", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "Use encrypted connection between CloudFront and origin server", - "ruleReferenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0407", + "referenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0407", "category": "Encryption and Key Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json index 3ee435f99..417d50dcd 100755 --- a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json @@ -1,13 +1,12 @@ { - "ruleName": "cloudfrontNoSecureCiphers", + "name": "cloudfrontNoSecureCiphers", "file": "cloudfrontNoSecureCiphers.rego", - "ruleTemplate": "cloudfrontNoSecureCiphers", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "Secure ciphers are not used in CloudFront distribution", - "ruleReferenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0408", + "referenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0408", "category": "Encryption and Key Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json index a20956e92..2d26be5a4 100755 --- a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json @@ -1,13 +1,12 @@ { - "ruleName": "cloudfrontNoLogging", + "name": "cloudfrontNoLogging", "file": "cloudfrontNoLogging.rego", - "ruleTemplate": "cloudfrontNoLogging", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "MEDIUM", "description": "Ensure that your AWS Cloudfront distributions have the Logging feature enabled in order to track all viewer requests for the content delivered through the Content Delivery Network (CDN).", - "ruleReferenceId": "AWS.CloudFront.Logging.Medium.0567", + "referenceId": "AWS.CloudFront.Logging.Medium.0567", "category": "Logging", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.High.0399.json b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.High.0399.json index 8c070cbad..ba033e951 100755 --- a/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.High.0399.json +++ b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.High.0399.json @@ -1,13 +1,12 @@ { - "ruleName": "cloudTrailLogNotEncrypted", + "name": "cloudTrailLogNotEncrypted", "file": "cloudTrailLogNotEncrypted.rego", - "ruleTemplate": "cloudTrailLogNotEncrypted", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "Cloud Trail Log Not Enabled", - "ruleReferenceId": "AWS.CloudTrail.Logging.High.0399", + "referenceId": "AWS.CloudTrail.Logging.High.0399", "category": "Logging", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Low.0559.json b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Low.0559.json index 9fcf02b5f..8ff0317d1 100755 --- a/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Low.0559.json +++ b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Low.0559.json @@ -1,13 +1,12 @@ { - "ruleName": "reme_enableSNSTopic", + "name": "reme_enableSNSTopic", "file": "enableSNSTopic.rego", - "ruleTemplate": "enableSNSTopic", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "reme_" }, "severity": "MEDIUM", "description": "Ensure appropriate subscribers to each SNS topic", - "ruleReferenceId": "AWS.CloudTrail.Logging.Low.0559", + "referenceId": "AWS.CloudTrail.Logging.Low.0559", "category": "Logging", - "version": 0 + "version": 1 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Medium.0460.json b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Medium.0460.json index ec9b6b080..52e43d7ca 100755 --- a/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Medium.0460.json +++ b/pkg/policies/opa/rego/aws/aws_cloudtrail/AWS.CloudTrail.Logging.Medium.0460.json @@ -1,13 +1,12 @@ { - "ruleName": "cloudTrailMultiRegionNotCreated", + "name": "cloudTrailMultiRegionNotCreated", "file": "cloudTrailMultiRegionNotCreated.rego", - "ruleTemplate": "cloudTrailMultiRegionNotCreated", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "MEDIUM", "description": "Cloud Trail Multi Region not enabled", - "ruleReferenceId": "AWS.CloudTrail.Logging.Medium.0460", + "referenceId": "AWS.CloudTrail.Logging.Medium.0460", "category": "Logging", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_instance/.json b/pkg/policies/opa/rego/aws/aws_db_instance/.json deleted file mode 100755 index 06c786984..000000000 --- a/pkg/policies/opa/rego/aws/aws_db_instance/.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "ruleName": "rdsPubliclyAccessible", - "file": "rdsPubliclyAccessible.rego", - "ruleTemplate": "rdsPubliclyAccessible", - "ruleTemplateArgs": { - "prefix": "" - }, - "severity": "HIGH", - "description": "RDS Instance publicly_accessible flag is true", - "ruleReferenceId": "", - "category": "Data Security", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego b/pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego deleted file mode 100755 index 601e8c85e..000000000 --- a/pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego +++ /dev/null @@ -1,9 +0,0 @@ -package accurics - -{{.prefix}}rdsPubliclyAccessible[retVal] { - db := input.aws_db_instance[_] - db.config.publicly_accessible == true - traverse = "publicly_accessible" - retVal := { "Id": db.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "publicly_accessible", "AttributeDataType": "bool", "Expected": false, "Actual": db.config.publicly_accessible } -} - diff --git a/pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0390.json b/pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0390.json index 2e0b4d321..8846f55b0 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0390.json +++ b/pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0390.json @@ -1,13 +1,12 @@ { - "ruleName": "noAccessKeyForRootAccount", + "name": "noAccessKeyForRootAccount", "file": "noAccessKeyForRootAccount.rego", - "ruleTemplate": "noAccessKeyForRootAccount", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "The root account is the most privileged user in an AWS account. AWS Access Keys provide programmatic access to a given AWS account. It is recommended that all access keys associated with the root account be removed. Removing access keys associated with the root account limits vectors by which the account can be compromised. Additionally, removing the root access keys encourages the creation and use of role based accounts that are least privileged.", - "ruleReferenceId": "AWS.IamUser.IAM.High.0390", + "referenceId": "AWS.IamUser.IAM.High.0390", "category": "Identity and Access Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0540.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0540.json index 40b2ad60c..b9df1d00a 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0540.json +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0540.json @@ -1,14 +1,13 @@ { - "ruleName": "passwordRotateEvery90Days", + "name": "passwordRotateEvery90Days", "file": "passwordRotateEvery90Days.rego", - "ruleTemplate": "passwordRotateEvery90Days", - "ruleTemplateArgs": { + "templateArgs": { "name": "passwordRotateEvery90Days", "prefix": "" }, "severity": "LOW", "description": "Reducing the password lifetime increases account resiliency against brute force login attempts", - "ruleReferenceId": "AWS.Iam.IAM.Low.0540", + "referenceId": "AWS.Iam.IAM.Low.0540", "category": "IAM", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0454.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0454.json index 0be97abb7..3fc67b062 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0454.json +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0454.json @@ -1,15 +1,14 @@ { - "ruleName": "passwordRequireLowerCase", + "name": "passwordRequireLowerCase", "file": "passwordPolicyRequirement.rego", - "ruleTemplate": "passwordRequireLowerCase", - "ruleTemplateArgs": { + "templateArgs": { "name": "passwordRequireLowerCase", "prefix": "", "required_parameter": "require_lowercase_characters" }, "severity": "MEDIUM", "description": "Lower case alphabet not present in the Password, Password Complexity is not high. Increased Password complexity increases resiliency against brute force attack", - "ruleReferenceId": "AWS.Iam.IAM.Medium.0454", + "referenceId": "AWS.Iam.IAM.Medium.0454", "category": "IAM", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0455.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0455.json index 30595871b..e35773b24 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0455.json +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0455.json @@ -1,15 +1,14 @@ { - "ruleName": "passwordRequireNumber", + "name": "passwordRequireNumber", "file": "passwordPolicyRequirement.rego", - "ruleTemplate": "passwordRequireNumber", - "ruleTemplateArgs": { + "templateArgs": { "name": "passwordRequireNumber", "prefix": "", "required_parameter": "require_numbers" }, "severity": "MEDIUM", "description": "Number not present in the Password, Password Complexity is not high. Increased Password complexity increases resiliency against brute force attack", - "ruleReferenceId": "AWS.Iam.IAM.Medium.0455", + "referenceId": "AWS.Iam.IAM.Medium.0455", "category": "IAM", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0456.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0456.json index a4e46cc3b..a3fa77f15 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0456.json +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0456.json @@ -1,15 +1,14 @@ { - "ruleName": "passwordRequireSymbol", + "name": "passwordRequireSymbol", "file": "passwordPolicyRequirement.rego", - "ruleTemplate": "passwordRequireSymbol", - "ruleTemplateArgs": { + "templateArgs": { "name": "passwordRequireSymbol", "prefix": "", "required_parameter": "require_symbols" }, "severity": "MEDIUM", "description": "Special symbols not present in the Password, Password Complexity is not high. Increased Password complexity increases resiliency against brute force attack", - "ruleReferenceId": "AWS.Iam.IAM.Medium.0456", + "referenceId": "AWS.Iam.IAM.Medium.0456", "category": "IAM", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0457.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0457.json index dae9c9fb6..a05fa6b48 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0457.json +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0457.json @@ -1,15 +1,14 @@ { - "ruleName": "passwordRequireUpperCase", + "name": "passwordRequireUpperCase", "file": "passwordPolicyRequirement.rego", - "ruleTemplate": "passwordRequireUpperCase", - "ruleTemplateArgs": { + "templateArgs": { "name": "passwordRequireUpperCase", "prefix": "", "required_parameter": "require_uppercase_characters" }, "severity": "MEDIUM", "description": "Upper case alphabet not present in the Password, Password Complexity is not high. Increased Password complexity increases resiliency against brute force attack", - "ruleReferenceId": "AWS.Iam.IAM.Medium.0457", + "referenceId": "AWS.Iam.IAM.Medium.0457", "category": "IAM", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0458.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0458.json index 46c09c988..f7099f01a 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0458.json +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0458.json @@ -1,8 +1,7 @@ { - "ruleName": "passwordRequireMinLength14", + "name": "passwordRequireMinLength14", "file": "passwordMinLength.rego", - "ruleTemplate": "passwordRequireMinLength14", - "ruleTemplateArgs": { + "templateArgs": { "name": "passwordRequireMinLength14", "parameter": "minimum_password_length", "prefix": "", @@ -10,7 +9,7 @@ }, "severity": "MEDIUM", "description": "Setting a lengthy password increases account resiliency against brute force login attempts", - "ruleReferenceId": "AWS.Iam.IAM.Medium.0458", + "referenceId": "AWS.Iam.IAM.Medium.0458", "category": "IAM", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0495.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0495.json index b5aa69d69..6247989fd 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0495.json +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Medium.0495.json @@ -1,8 +1,7 @@ { - "ruleName": "passwordRequireMinLength", + "name": "passwordRequireMinLength", "file": "passwordMinLength.rego", - "ruleTemplate": "passwordRequireMinLength", - "ruleTemplateArgs": { + "templateArgs": { "name": "passwordRequireMinLength", "parameter": "minimum_password_length", "prefix": "", @@ -10,7 +9,7 @@ }, "severity": "MEDIUM", "description": "Setting a lengthy password increases account resiliency against brute force login attempts", - "ruleReferenceId": "AWS.Iam.IAM.Medium.0495", + "referenceId": "AWS.Iam.IAM.Medium.0495", "category": "IAM", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_group_policy/AWS.IamPolicy.IAM.High.0392.json b/pkg/policies/opa/rego/aws/aws_iam_group_policy/AWS.IamPolicy.IAM.High.0392.json index ece948f53..04116e916 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_group_policy/AWS.IamPolicy.IAM.High.0392.json +++ b/pkg/policies/opa/rego/aws/aws_iam_group_policy/AWS.IamPolicy.IAM.High.0392.json @@ -1,13 +1,12 @@ { - "ruleName": "iamGrpPolicyWithFullAdminCntrl", + "name": "iamGrpPolicyWithFullAdminCntrl", "file": "iamGrpPolicyWithFullAdminCntrl.rego", - "ruleTemplate": "iamGrpPolicyWithFullAdminCntrl", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "It is recommended and considered a standard security advice to grant least privileges that is, granting only the permissions required to perform a task. IAM policies are the means by which privileges are granted to users, groups, or roles. Determine what users need to do and then craft policies for them that let the users perform only those tasks, instead of granting full administrative privileges.", - "ruleReferenceId": "AWS.IamPolicy.IAM.High.0392", + "referenceId": "AWS.IamPolicy.IAM.High.0392", "category": "Identity and Access Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_policy/AWS.IamPolicy.IAM.High.0392.json b/pkg/policies/opa/rego/aws/aws_iam_policy/AWS.IamPolicy.IAM.High.0392.json index d0f99213c..1680636e0 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_policy/AWS.IamPolicy.IAM.High.0392.json +++ b/pkg/policies/opa/rego/aws/aws_iam_policy/AWS.IamPolicy.IAM.High.0392.json @@ -1,13 +1,12 @@ { - "ruleName": "reme_iamPolicyWithFullAdminControl", + "name": "reme_iamPolicyWithFullAdminControl", "file": "iamPolicyWithFullAdminControl.rego", - "ruleTemplate": "iamPolicyWithFullAdminControl", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "reme_" }, "severity": "HIGH", "description": "It is recommended and considered a standard security advice to grant least privileges that is, granting only the permissions required to perform a task. IAM policies are the means by which privileges are granted to users, groups, or roles. Determine what users need to do and then craft policies for them that let the users perform only those tasks, instead of granting full administrative privileges.", - "ruleReferenceId": "AWS.IamPolicy.IAM.High.0392", + "referenceId": "AWS.IamPolicy.IAM.High.0392", "category": "Identity and Access Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0387.json b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0387.json index 706366e9f..cdee7d311 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0387.json +++ b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0387.json @@ -1,13 +1,12 @@ { - "ruleName": "rootUserNotContainMfaTypeHardware", + "name": "rootUserNotContainMfaTypeHardware", "file": "rootUserNotContainMfaTypeHardware.rego", - "ruleTemplate": "rootUserNotContainMfaTypeHardware", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "Ensure Hardware MFA device is enabled for the \"root\" account", - "ruleReferenceId": "AWS.IamUser.IAM.High.0387", + "referenceId": "AWS.IamUser.IAM.High.0387", "category": "Identity and Access Management", - "version": 0 + "version": 1 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0388.json b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0388.json index 31623a9b5..4a6c74079 100755 --- a/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0388.json +++ b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0388.json @@ -1,13 +1,12 @@ { - "ruleName": "rootUserNotContainMfaTypeVirtual", + "name": "rootUserNotContainMfaTypeVirtual", "file": "rootUserNotContainMfaTypeVirtual.rego", - "ruleTemplate": "rootUserNotContainMfaTypeVirtual", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "Ensure Virtual MFA device is enabled for the \"root\" account", - "ruleReferenceId": "AWS.IamUser.IAM.High.0388", + "referenceId": "AWS.IamUser.IAM.High.0388", "category": "Identity and Access Management", - "version": 0 + "version": 1 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_instance/AWS.Instance.NetworkSecurity.Medium.0506.json b/pkg/policies/opa/rego/aws/aws_instance/AWS.Instance.NetworkSecurity.Medium.0506.json index 9b05b3442..c38981ff7 100755 --- a/pkg/policies/opa/rego/aws/aws_instance/AWS.Instance.NetworkSecurity.Medium.0506.json +++ b/pkg/policies/opa/rego/aws/aws_instance/AWS.Instance.NetworkSecurity.Medium.0506.json @@ -1,13 +1,12 @@ { - "ruleName": "instanceWithNoVpc", + "name": "instanceWithNoVpc", "file": "instanceWithNoVpc.rego", - "ruleTemplate": "instanceWithNoVpc", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "MEDIUM", "description": "Instance should be configured in vpc. AWS VPCs provides the controls to facilitate a formal process for approving and testing all network connections and changes to the firewall and router configurations.", - "ruleReferenceId": "AWS.Instance.NetworkSecurity.Medium.0506", + "referenceId": "AWS.Instance.NetworkSecurity.Medium.0506", "category": "Network Security", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_kinesis_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0412.json b/pkg/policies/opa/rego/aws/aws_kinesis_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0412.json index 2d4242f06..05a0a7ba5 100755 --- a/pkg/policies/opa/rego/aws/aws_kinesis_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0412.json +++ b/pkg/policies/opa/rego/aws/aws_kinesis_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0412.json @@ -1,13 +1,12 @@ { - "ruleName": "kinesisNotEncryptedWithKms", + "name": "kinesisNotEncryptedWithKms", "file": "aws_kinesis_stream.rego", - "ruleTemplate": "kinesisNotEncryptedWithKms", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "Kinesis Streams and metadata are not protected", - "ruleReferenceId": "AWS.Kinesis.EncryptionandKeyManagement.High.0412", + "referenceId": "AWS.Kinesis.EncryptionandKeyManagement.High.0412", "category": "Encryption and Key Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json b/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json index b4074468f..8a7440114 100755 --- a/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json +++ b/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json @@ -1,13 +1,12 @@ { - "ruleName": "kmsKeyRotationDisabled", + "name": "kmsKeyRotationDisabled", "file": "kmsKeyRotationDisabled.rego", - "ruleTemplate": "kmsKeyRotationDisabled", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "Ensure rotation for customer created CMKs is enabled", - "ruleReferenceId": "AWS.KMS.Logging.High.0400", + "referenceId": "AWS.KMS.Logging.High.0400", "category": "Logging", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0102.json b/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0102.json index d5c4b6dd5..c72437096 100755 --- a/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0102.json +++ b/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0102.json @@ -1,13 +1,12 @@ { - "ruleName": "hardCodedKey", + "name": "hardCodedKey", "file": "hardCodedKey.rego", - "ruleTemplate": "hardCodedKey", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "Avoid using base64 encoded private keys as part of config", - "ruleReferenceId": "AWS.LaunchConfiguration.DataSecurity.High.0102", + "referenceId": "AWS.LaunchConfiguration.DataSecurity.High.0102", "category": "Data Security", "version": 1 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json deleted file mode 100755 index a758e0b44..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "ruleName": "noS3BucketSseRules", - "file": "noS3BucketSseRules.rego", - "ruleTemplate": "noS3BucketSseRules", - "ruleTemplateArgs": { - "prefix": "" - }, - "severity": "HIGH", - "description": "Ensure that S3 Buckets have server side encryption at rest enabled to protect sensitive data.", - "ruleReferenceId": "AWS.S3Bucket.EncryptionandKeyManagement.High.0405", - "category": "Encryption and Key Management", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json deleted file mode 100755 index 1a38cf231..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "ruleName": "s3VersioningMfaFalse", - "file": "s3VersioningMfaFalse.rego", - "ruleTemplate": "s3VersioningMfaFalse", - "ruleTemplateArgs": { - "prefix": "" - }, - "severity": "HIGH", - "description": "Enabling MFA delete for versioning is a good way to add extra protection to sensitive files stored in buckets.aws s3api put-bucket-versioning --bucket bucketname --versioning-configuration Status=Enabled,MFADelete=Enabled --mfa your-mfa-serial-number mfa-code", - "ruleReferenceId": "AWS.S3Bucket.IAM.High.0370", - "category": "IAM", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json deleted file mode 100755 index 4a008b21e..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "ruleName": "allUsersReadAccess", - "file": "s3AclGrants.rego", - "ruleTemplate": "allUsersReadAccess", - "ruleTemplateArgs": { - "access": "public-read", - "name": "allUsersReadAccess", - "prefix": "" - }, - "severity": "HIGH", - "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", - "ruleReferenceId": "AWS.S3Bucket.IAM.High.0377", - "category": "IAM", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json deleted file mode 100755 index b9b8584ed..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "ruleName": "authUsersReadAccess", - "file": "s3AclGrants.rego", - "ruleTemplate": "authUsersReadAccess", - "ruleTemplateArgs": { - "access": "authenticated-read", - "name": "authUsersReadAccess", - "prefix": "" - }, - "severity": "HIGH", - "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", - "ruleReferenceId": "AWS.S3Bucket.IAM.High.0378", - "category": "IAM", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json deleted file mode 100755 index a8286931b..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "ruleName": "allUsersWriteAccess", - "file": "s3AclGrants.rego", - "ruleTemplate": "allUsersWriteAccess", - "ruleTemplateArgs": { - "access": "public-read-write", - "name": "allUsersWriteAccess", - "prefix": "" - }, - "severity": "HIGH", - "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", - "ruleReferenceId": "AWS.S3Bucket.IAM.High.0379", - "category": "IAM", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json deleted file mode 100755 index e413dd20e..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json +++ /dev/null @@ -1,15 +0,0 @@ -{ - "ruleName": "allUsersReadWriteAccess", - "file": "s3AclGrants.rego", - "ruleTemplate": "allUsersReadWriteAccess", - "ruleTemplateArgs": { - "access": "public-read-write", - "name": "allUsersReadWriteAccess", - "prefix": "" - }, - "severity": "HIGH", - "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", - "ruleReferenceId": "AWS.S3Bucket.IAM.High.0381", - "category": "IAM", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json deleted file mode 100755 index 1bc2de912..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "ruleName": "s3BucketNoWebsiteIndexDoc", - "file": "s3BucketNoWebsiteIndexDoc.rego", - "ruleTemplate": "s3BucketNoWebsiteIndexDoc", - "ruleTemplateArgs": { - "prefix": "" - }, - "severity": "HIGH", - "description": "Ensure that there are not any static websites being hosted on buckets you aren't aware of", - "ruleReferenceId": "AWS.S3Bucket.NetworkSecurity.High.0417", - "category": "Network Security", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego deleted file mode 100755 index 2661fa0a9..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego +++ /dev/null @@ -1,9 +0,0 @@ -package accurics - -{{.prefix}}noS3BucketSseRules[retVal] { - bucket := input.aws_s3_bucket[_] - bucket.config.server_side_encryption_configuration == [] - rc = "ewogICJzZXJ2ZXJfc2lkZV9lbmNyeXB0aW9uX2NvbmZpZ3VyYXRpb24iOiB7CiAgICAicnVsZSI6IHsKICAgICAgImFwcGx5X3NlcnZlcl9zaWRlX2VuY3J5cHRpb25fYnlfZGVmYXVsdCI6IHsKICAgICAgICAic3NlX2FsZ29yaXRobSI6ICJBRVMyNTYiCiAgICAgIH0KICAgIH0KICB9Cn0=" - traverse = "" - retVal := { "Id": bucket.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "server_side_encryption_configuration", "AttributeDataType": "base64", "Expected": rc, "Actual": null } -} diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego deleted file mode 100755 index fc83f4a0f..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego +++ /dev/null @@ -1,8 +0,0 @@ -package accurics - -{{.prefix}}{{.name}}[retVal] { - bucket := input.aws_s3_bucket[_] - bucket.config.acl == "{{.access}}" - traverse = "acl" - retVal := { "Id": bucket.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "acl", "AttributeDataType": "string", "Expected": "private", "Actual": bucket.config.acl } -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego deleted file mode 100755 index 7ee714f1e..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego +++ /dev/null @@ -1,8 +0,0 @@ -package accurics - -{{.prefix}}s3BucketNoWebsiteIndexDoc[retVal] { - bucket := input.aws_s3_bucket[_] - count(bucket.config.website) > 0 - traverse = "website" - retVal := { "Id": bucket.id, "ReplaceType": "delete", "CodeType": "block", "Traverse": traverse, "Attribute": "website", "AttributeDataType": "block", "Expected": null, "Actual": null } -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego deleted file mode 100755 index d2c28b5b5..000000000 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego +++ /dev/null @@ -1,10 +0,0 @@ -package accurics - -{{.prefix}}s3VersioningMfaFalse[retVal] { - bucket := input.aws_s3_bucket[_] - some i - mfa := bucket.config.versioning[i] - mfa.mfa_delete == false - traverse := sprintf("versioning[%d].mfa_delete", [i]) - retVal := { "Id": bucket.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "versioning.mfa_delete", "AttributeDataType": "bool", "Expected": true, "Actual": mfa.mfa_delete } -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0374.json b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0374.json index 2f36ad689..497176f1a 100755 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0374.json +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0374.json @@ -1,15 +1,14 @@ { - "ruleName": "allowListActionFromAllPrncpls", + "name": "allowListActionFromAllPrncpls", "file": "actionsFromAllPrincipals.rego", - "ruleTemplate": "allowListActionFromAllPrncpls", - "ruleTemplateArgs": { + "templateArgs": { "Action": "s3:List", "name": "allowListActionFromAllPrncpls", "prefix": "" }, "severity": "HIGH", "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", - "ruleReferenceId": "AWS.IamPolicy.IAM.High.0374", + "referenceId": "AWS.IamPolicy.IAM.High.0374", "category": "Identity and Access Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0371.json b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0371.json index e6cf3edc3..23beecd9f 100755 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0371.json +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0371.json @@ -1,13 +1,12 @@ { - "ruleName": "allowActionsFromAllPrincipals", + "name": "allowActionsFromAllPrincipals", "file": "allowActionsFromAllPrincipals.rego", - "ruleTemplate": "allowActionsFromAllPrincipals", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", - "ruleReferenceId": "AWS.S3Bucket.IAM.High.0371", + "referenceId": "AWS.S3Bucket.IAM.High.0371", "category": "Identity and Access Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0372.json b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0372.json index bf5b87e94..d56acd755 100755 --- a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0372.json +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.S3Bucket.IAM.High.0372.json @@ -1,15 +1,14 @@ { - "ruleName": "allowDeleteActionFromAllPrncpls", + "name": "allowDeleteActionFromAllPrncpls", "file": "actionsFromAllPrincipals.rego", - "ruleTemplate": "allowDeleteActionFromAllPrncpls", - "ruleTemplateArgs": { + "templateArgs": { "Action": "s3:Delete", "name": "allowDeleteActionFromAllPrncpls", "prefix": "" }, "severity": "HIGH", "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", - "ruleReferenceId": "AWS.S3Bucket.IAM.High.0372", + "referenceId": "AWS.S3Bucket.IAM.High.0372", "category": "Identity and Access Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_security_group/AWS.SecurityGroup.NetworkSecurity.High.0094.json b/pkg/policies/opa/rego/aws/aws_security_group/AWS.SecurityGroup.NetworkSecurity.High.0094.json index e53f8c6d9..bc83db676 100755 --- a/pkg/policies/opa/rego/aws/aws_security_group/AWS.SecurityGroup.NetworkSecurity.High.0094.json +++ b/pkg/policies/opa/rego/aws/aws_security_group/AWS.SecurityGroup.NetworkSecurity.High.0094.json @@ -1,13 +1,12 @@ { - "ruleName": "unrestrictedIngressAccess", + "name": "unrestrictedIngressAccess", "file": "unrestrictedIngressAccess.rego", - "ruleTemplate": "unrestrictedIngressAccess", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "HIGH", "description": " It is recommended that no security group allows unrestricted ingress access", - "ruleReferenceId": "AWS.SecurityGroup.NetworkSecurity.High.0094", + "referenceId": "AWS.SecurityGroup.NetworkSecurity.High.0094", "category": "Network Ports Security", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0470.json b/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0470.json index d44940e99..cae0c8b4b 100755 --- a/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0470.json +++ b/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0470.json @@ -1,13 +1,12 @@ { - "ruleName": "vpcFlowLogsNotEnabled", + "name": "vpcFlowLogsNotEnabled", "file": "vpcFlowLogsNotEnabled.rego", - "ruleTemplate": "vpcFlowLogsNotEnabled", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "MEDIUM", "description": "Ensure VPC flow logging is enabled in all VPCs", - "ruleReferenceId": "AWS.VPC.Logging.Medium.0470", + "referenceId": "AWS.VPC.Logging.Medium.0470", "category": "Logging", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0471.json b/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0471.json index 8c22bcc45..d731e86b3 100755 --- a/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0471.json +++ b/pkg/policies/opa/rego/aws/aws_vpc/AWS.VPC.Logging.Medium.0471.json @@ -1,13 +1,12 @@ { - "ruleName": "defaultVpcExist", + "name": "defaultVpcExist", "file": "defaultVpcExist.rego", - "ruleTemplate": "defaultVpcExist", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "" }, "severity": "MEDIUM", "description": "Avoid creating resources in default VPC", - "ruleReferenceId": "AWS.VPC.Logging.Medium.0471", + "referenceId": "AWS.VPC.Logging.Medium.0471", "category": "Logging", "version": 1 } \ No newline at end of file diff --git a/pkg/policy/interface.go b/pkg/policy/interface.go index c74a79886..30162b724 100644 --- a/pkg/policy/interface.go +++ b/pkg/policy/interface.go @@ -16,22 +16,11 @@ package policy -// Manager Policy Manager interface -type Manager interface { - Import() error - Export() error - CreateManager() error -} - // Engine Policy Engine interface type Engine interface { Init(string) error Configure() error Evaluate(EngineInput) (EngineOutput, error) - GetResults() error + GetResults() EngineOutput Release() error } - -// EngineFactory creates policy engine instances based on iac/cloud type -type EngineFactory struct { -} diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index f490d1ad5..9d6e23304 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -28,6 +28,9 @@ import ( "sort" "strings" "text/template" + "time" + + "github.com/accurics/terrascan/pkg/iac-providers/output" "github.com/accurics/terrascan/pkg/policy" @@ -112,8 +115,8 @@ func (e *Engine) LoadRegoFiles(policyPath string) error { return fmt.Errorf("no directories found for path %s", policyPath) } - e.RegoFileMap = make(map[string][]byte) - e.RegoDataMap = make(map[string]*RegoData) + e.regoFileMap = make(map[string][]byte) + e.regoDataMap = make(map[string]*RegoData) // Load rego data files from each dir // First, we read the metadata file, which contains info about the associated rego rule. The .rego file data is @@ -157,7 +160,7 @@ func (e *Engine) LoadRegoFiles(policyPath string) error { } // Read in raw rego data from associated rego files - if err = e.loadRawRegoFilesIntoMap(dirList[i], regoDataList, &e.RegoFileMap); err != nil { + if err = e.loadRawRegoFilesIntoMap(dirList[i], regoDataList, &e.regoFileMap); err != nil { zap.S().Debug("error loading raw rego data", zap.String("dir", dirList[i])) continue } @@ -171,23 +174,23 @@ func (e *Engine) LoadRegoFiles(policyPath string) error { // Apply templates if available var templateData bytes.Buffer t := template.New("opa") - _, err = t.Parse(string(e.RegoFileMap[templateFile])) + _, err = t.Parse(string(e.regoFileMap[templateFile])) if err != nil { - zap.S().Debug("unable to parse template", zap.String("template", regoDataList[j].Metadata.RuleTemplate)) + zap.S().Debug("unable to parse template", zap.String("template", regoDataList[j].Metadata.File)) continue } - if err = t.Execute(&templateData, regoDataList[j].Metadata.RuleTemplateArgs); err != nil { - zap.S().Debug("unable to execute template", zap.String("template", regoDataList[j].Metadata.RuleTemplate)) + if err = t.Execute(&templateData, regoDataList[j].Metadata.TemplateArgs); err != nil { + zap.S().Debug("unable to execute template", zap.String("template", regoDataList[j].Metadata.File)) continue } regoDataList[j].RawRego = templateData.Bytes() - e.RegoDataMap[regoDataList[j].Metadata.RuleName] = regoDataList[j] + e.regoDataMap[regoDataList[j].Metadata.Name] = regoDataList[j] } } - e.stats.ruleCount = len(e.RegoDataMap) - e.stats.regoFileCount = len(e.RegoFileMap) + e.stats.ruleCount = len(e.regoDataMap) + e.stats.regoFileCount = len(e.regoFileMap) zap.S().Debugf("loaded %d Rego rules from %d rego files (%d metadata files).", e.stats.ruleCount, e.stats.regoFileCount, e.stats.metadataFileCount) return err @@ -195,30 +198,30 @@ func (e *Engine) LoadRegoFiles(policyPath string) error { // CompileRegoFiles Compiles rego files for faster evaluation func (e *Engine) CompileRegoFiles() error { - for k := range e.RegoDataMap { + for k := range e.regoDataMap { compiler, err := ast.CompileModules(map[string]string{ - e.RegoDataMap[k].Metadata.RuleName: string(e.RegoDataMap[k].RawRego), + e.regoDataMap[k].Metadata.Name: string(e.regoDataMap[k].RawRego), }) if err != nil { - zap.S().Error("error compiling rego files", zap.String("rule", e.RegoDataMap[k].Metadata.RuleName), - zap.String("raw rego", string(e.RegoDataMap[k].RawRego)), zap.Error(err)) + zap.S().Error("error compiling rego files", zap.String("rule", e.regoDataMap[k].Metadata.Name), + zap.String("raw rego", string(e.regoDataMap[k].RawRego)), zap.Error(err)) return err } r := rego.New( - rego.Query(RuleQueryBase+"."+e.RegoDataMap[k].Metadata.RuleName), + rego.Query(RuleQueryBase+"."+e.regoDataMap[k].Metadata.Name), rego.Compiler(compiler), ) // Create a prepared query that can be evaluated. - query, err := r.PrepareForEval(e.Context) + query, err := r.PrepareForEval(e.context) if err != nil { - zap.S().Error("error creating prepared query", zap.String("rule", e.RegoDataMap[k].Metadata.RuleName), - zap.String("raw rego", string(e.RegoDataMap[k].RawRego)), zap.Error(err)) + zap.S().Error("error creating prepared query", zap.String("rule", e.regoDataMap[k].Metadata.Name), + zap.String("raw rego", string(e.regoDataMap[k].RawRego)), zap.Error(err)) return err } - e.RegoDataMap[k].PreparedQuery = &query + e.regoDataMap[k].PreparedQuery = &query } return nil @@ -227,7 +230,7 @@ func (e *Engine) CompileRegoFiles() error { // Init initializes the Opa engine // Handles loading all rules, filtering, compiling, and preparing for evaluation func (e *Engine) Init(policyPath string) error { - e.Context = context.Background() + e.context = context.Background() if err := e.LoadRegoFiles(policyPath); err != nil { zap.S().Error("error loading rego files", zap.String("policy path", policyPath)) @@ -241,7 +244,7 @@ func (e *Engine) Init(policyPath string) error { } // initialize ViolationStore - e.Results.ViolationStore = results.NewViolationStore() + e.results.ViolationStore = results.NewViolationStore() return nil } @@ -252,8 +255,8 @@ func (e *Engine) Configure() error { } // GetResults Fetches results from OPA engine policy evaluation -func (e *Engine) GetResults() error { - return nil +func (e *Engine) GetResults() policy.EngineOutput { + return e.results } // Release Performs any tasks required to free resources @@ -261,60 +264,106 @@ func (e *Engine) Release() error { return nil } -// Evaluate Executes compiled OPA queries against the input JSON data -func (e *Engine) Evaluate(engineInput policy.EngineInput) (policy.EngineOutput, error) { +// reportViolation Add a violation for a given resource +func (e *Engine) reportViolation(regoData *RegoData, resource *output.ResourceConfig) { + violation := results.Violation{ + RuleName: regoData.Metadata.Name, + Description: regoData.Metadata.Description, + RuleID: regoData.Metadata.ReferenceID, + Severity: regoData.Metadata.Severity, + Category: regoData.Metadata.Category, + RuleData: regoData.RawRego, + ResourceName: resource.Name, + ResourceType: resource.Type, + ResourceData: resource.Config, + File: resource.Source, + LineNumber: resource.Line, + } - sortedKeys := make([]string, len(e.RegoDataMap)) - x := 0 - for k := range e.RegoDataMap { - sortedKeys[x] = k - x++ + severity := regoData.Metadata.Severity + if strings.ToLower(severity) == "high" { + e.results.ViolationStore.Count.HighCount++ + } else if strings.ToLower(severity) == "medium" { + e.results.ViolationStore.Count.MediumCount++ + } else if strings.ToLower(severity) == "low" { + e.results.ViolationStore.Count.LowCount++ + } else { + zap.S().Warn("invalid severity found in rule definition", + zap.String("rule id", violation.RuleID), zap.String("severity", severity)) } - sort.Strings(sortedKeys) + e.results.ViolationStore.Count.TotalCount++ + + e.results.ViolationStore.AddResult(&violation) +} + +// Evaluate Executes compiled OPA queries against the input JSON data +func (e *Engine) Evaluate(engineInput policy.EngineInput) (policy.EngineOutput, error) { + // Keep track of how long it takes to evaluate the policies + start := time.Now() - for _, k := range sortedKeys { + // Evaluate the policy against each resource type + for k := range e.regoDataMap { // Execute the prepared query. - rs, err := e.RegoDataMap[k].PreparedQuery.Eval(e.Context, rego.EvalInput(engineInput.InputData)) - // rs, err := r.Eval(o.Context) + rs, err := e.regoDataMap[k].PreparedQuery.Eval(e.context, rego.EvalInput(engineInput.InputData)) if err != nil { zap.S().Warn("failed to run prepared query", zap.String("rule", "'"+k+"'")) continue } - if len(rs) > 0 { - res := rs[0].Expressions[0].Value.([]interface{}) - if len(res) > 0 { - // @TODO: Take line number + file info and add to violation - regoData := e.RegoDataMap[k] - violation := results.Violation{ - Name: regoData.Metadata.RuleName, - Description: regoData.Metadata.Description, - RuleID: regoData.Metadata.RuleReferenceID, - Severity: regoData.Metadata.Severity, - Category: regoData.Metadata.Category, - RuleData: regoData.RawRego, - InputFile: "", - InputData: res, - LineNumber: 0, + if len(rs) == 0 || len(rs[0].Expressions) == 0 { + continue + } + + resourceViolations := rs[0].Expressions[0].Value.([]interface{}) + if len(resourceViolations) == 0 { + continue + } + + // Report a violation for each resource returned by the policy evaluation + for i := range resourceViolations { + var resourceID string + + // The return values come in two categories--either a map[string]interface{} type, where the "Id" key + // contains the resource ID, or a string type which is the resource ID. This resource ID is where a + // violation was found + switch res := resourceViolations[i].(type) { + case map[string]interface{}: + _, ok := res["Id"] + if !ok { + zap.S().Warn("no Id key found in resource map", zap.Any("resource", res)) + continue } - severity := regoData.Metadata.Severity - if strings.ToLower(severity) == "high" { - e.Results.ViolationStore.Count.HighCount++ - } else if strings.ToLower(severity) == "medium" { - e.Results.ViolationStore.Count.MediumCount++ - } else if strings.ToLower(severity) == "low" { - e.Results.ViolationStore.Count.LowCount++ - } else { - zap.S().Warn("invalid severity found in rule definition", - zap.String("rule id", violation.RuleID), zap.String("severity", severity)) + _, ok = res["Id"].(string) + if !ok { + zap.S().Warn("id key was invalid", zap.Any("resource", res)) + continue } - e.Results.ViolationStore.Count.TotalCount++ - e.Results.ViolationStore.AddResult(&violation) + resourceID = res["Id"].(string) + case string: + resourceID = res + default: + zap.S().Warn("resource ID format was invalid", zap.Any("resource", res)) continue } + + // Locate the resource details within the input map + var resource *output.ResourceConfig + resource, err = utils.FindResourceByID(resourceID, engineInput.InputData) + if err != nil { + zap.S().Error(err) + continue + } + if resource == nil { + zap.S().Warn("resource was not found", zap.String("resource id", resourceID)) + continue + } + + // Report the violation + e.reportViolation(e.regoDataMap[k], resource) } } - return e.Results, nil + e.stats.runTime = time.Since(start) + return e.results, nil } diff --git a/pkg/policy/opa/types.go b/pkg/policy/opa/types.go index 980d3b53e..d4aea3d6e 100644 --- a/pkg/policy/opa/types.go +++ b/pkg/policy/opa/types.go @@ -18,6 +18,7 @@ package opa import ( "context" + "time" "github.com/accurics/terrascan/pkg/policy" @@ -26,15 +27,14 @@ import ( // RegoMetadata The rego metadata struct which is read and saved from disk type RegoMetadata struct { - RuleName string `json:"ruleName"` - File string `json:"file"` - RuleTemplate string `json:"ruleTemplate"` - RuleTemplateArgs map[string]interface{} `json:"ruleTemplateArgs"` - Severity string `json:"severity"` - Description string `json:"description"` - RuleReferenceID string `json:"ruleReferenceId"` - Category string `json:"category"` - Version int `json:"version"` + Name string `json:"name"` + File string `json:"file"` + TemplateArgs map[string]interface{} `json:"templateArgs"` + Severity string `json:"severity"` + Description string `json:"description"` + ReferenceID string `json:"referenceId"` + Category string `json:"category"` + Version int `json:"version"` } // RegoData Stores all information needed to evaluate and report on a rego rule @@ -50,13 +50,14 @@ type EngineStats struct { regoFileCount int metadataFileCount int metadataCount int + runTime time.Duration } // Engine Implements the policy engine interface type Engine struct { - Context context.Context - RegoFileMap map[string][]byte - RegoDataMap map[string]*RegoData - Results policy.EngineOutput + results policy.EngineOutput + context context.Context + regoFileMap map[string][]byte + regoDataMap map[string]*RegoData stats EngineStats } diff --git a/pkg/results/types.go b/pkg/results/types.go index 3cac12d18..a2600e23c 100644 --- a/pkg/results/types.go +++ b/pkg/results/types.go @@ -18,15 +18,17 @@ package results // Violation Contains data for each violation type Violation struct { - Name string `json:"name" yaml:"name" xml:"name,attr"` - Description string `json:"description" yaml:"description" xml:"description,attr"` - RuleID string `json:"rule" yaml:"rule" xml:"rule,attr"` - Severity string `json:"severity" yaml:"severity" xml:"severity,attr"` - Category string `json:"category" yaml:"category" xml:"category,attr"` - RuleData interface{} `json:"-" yaml:"-" xml:"-"` - InputFile string `json:"-" yaml:"-" xml:"-"` - InputData interface{} `json:"-" yaml:"-" xml:"-"` - LineNumber int `json:"line" yaml:"line" xml:"line,attr"` + RuleName string `json:"ruleName" yaml:"ruleName" xml:"ruleName,attr"` + Description string `json:"description" yaml:"description" xml:"description,attr"` + RuleID string `json:"rule" yaml:"rule" xml:"rule,attr"` + Severity string `json:"severity" yaml:"severity" xml:"severity,attr"` + Category string `json:"category" yaml:"category" xml:"category,attr"` + RuleData interface{} `json:"-" yaml:"-" xml:"-"` + ResourceName string `json:"resourceName" yaml:"resourceName" xml:"resourceName,attr"` + ResourceType string `json:"resourceType" yaml:"resourceType" xml:"resourceType,attr"` + ResourceData interface{} `json:"-" yaml:"-" xml:"-"` + File string `json:"-" yaml:"-" xml:"-"` + LineNumber int `json:"line" yaml:"line" xml:"line,attr"` } // ViolationStats Contains stats related to the violation data diff --git a/pkg/utils/resource.go b/pkg/utils/resource.go new file mode 100644 index 000000000..2d271e493 --- /dev/null +++ b/pkg/utils/resource.go @@ -0,0 +1,35 @@ +package utils + +import ( + "fmt" + "strings" + + "github.com/accurics/terrascan/pkg/iac-providers/output" +) + +// FindResourceByID Finds a given resource within the resource map and returns a reference to that resource +func FindResourceByID(resourceID string, normalizedResources *output.AllResourceConfigs) (*output.ResourceConfig, error) { + resTypeName := strings.Split(resourceID, ".") + if len(resTypeName) < 2 { + return nil, fmt.Errorf("resource ID has an invalid format %s", resourceID) + } + + resourceType := resTypeName[0] + + found := false + var resource output.ResourceConfig + resourceTypeList := (*normalizedResources)[resourceType] + for i := range resourceTypeList { + if resourceTypeList[i].ID == resourceID { + resource = resourceTypeList[i] + found = true + break + } + } + + if !found { + return nil, nil + } + + return &resource, nil +} From 0ecec7749d4ee466b66a7dde5208db9b6396f093 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Thu, 13 Aug 2020 00:28:39 -0700 Subject: [PATCH 167/188] fix file name not showing in output also adds resource data in the violation --- pkg/policy/opa/engine.go | 1 + pkg/results/types.go | 5 +++-- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/pkg/policy/opa/engine.go b/pkg/policy/opa/engine.go index 9d6e23304..52ee00f25 100644 --- a/pkg/policy/opa/engine.go +++ b/pkg/policy/opa/engine.go @@ -272,6 +272,7 @@ func (e *Engine) reportViolation(regoData *RegoData, resource *output.ResourceCo RuleID: regoData.Metadata.ReferenceID, Severity: regoData.Metadata.Severity, Category: regoData.Metadata.Category, + RuleFile: regoData.Metadata.File, RuleData: regoData.RawRego, ResourceName: resource.Name, ResourceType: resource.Type, diff --git a/pkg/results/types.go b/pkg/results/types.go index a2600e23c..4664b258b 100644 --- a/pkg/results/types.go +++ b/pkg/results/types.go @@ -23,11 +23,12 @@ type Violation struct { RuleID string `json:"rule" yaml:"rule" xml:"rule,attr"` Severity string `json:"severity" yaml:"severity" xml:"severity,attr"` Category string `json:"category" yaml:"category" xml:"category,attr"` + RuleFile string `json:"ruleFile" yaml:"ruleFile" xml:"ruleFile,attr"` RuleData interface{} `json:"-" yaml:"-" xml:"-"` ResourceName string `json:"resourceName" yaml:"resourceName" xml:"resourceName,attr"` ResourceType string `json:"resourceType" yaml:"resourceType" xml:"resourceType,attr"` - ResourceData interface{} `json:"-" yaml:"-" xml:"-"` - File string `json:"-" yaml:"-" xml:"-"` + ResourceData interface{} `json:"resourceData" yaml:"resourceData" xml:"resourceData,attr"` + File string `json:"file" yaml:"file" xml:"file,attr"` LineNumber int `json:"line" yaml:"line" xml:"line,attr"` } From cb7c3eee451ff5eb879423d560bc2030bcad247a Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 12:58:16 +0530 Subject: [PATCH 168/188] fix file paths for terraform config dir --- pkg/iac-providers/terraform/v12/load-dir.go | 15 +++++++++++---- 1 file changed, 11 insertions(+), 4 deletions(-) diff --git a/pkg/iac-providers/terraform/v12/load-dir.go b/pkg/iac-providers/terraform/v12/load-dir.go index b0ae7bc80..1002f18df 100644 --- a/pkg/iac-providers/terraform/v12/load-dir.go +++ b/pkg/iac-providers/terraform/v12/load-dir.go @@ -17,17 +17,17 @@ package tfv12 import ( + "bytes" "fmt" "path/filepath" "strings" + "github.com/accurics/terrascan/pkg/iac-providers/output" version "github.com/hashicorp/go-version" "github.com/hashicorp/hcl/v2" hclConfigs "github.com/hashicorp/terraform/configs" "github.com/spf13/afero" "go.uber.org/zap" - - "github.com/accurics/terrascan/pkg/iac-providers/output" ) var ( @@ -122,8 +122,8 @@ func (*TfV12) LoadIacDir(absRootDir string) (allResourcesConfig output.AllResour return allResourcesConfig, fmt.Errorf("failed to create ResourceConfig") } - // append resource config to list of all resources - // allResourcesConfig = append(allResourcesConfig, resourceConfig) + // trimFilePath + resourceConfig.Source = trimFilePath(resourceConfig.Source, absRootDir) // append to normalized output if _, present := allResourcesConfig[resourceConfig.Type]; !present { @@ -142,3 +142,10 @@ func (*TfV12) LoadIacDir(absRootDir string) (allResourcesConfig output.AllResour // successful return allResourcesConfig, nil } + +// trimFilePath returns relative file path wrt to the base path +func trimFilePath(fullPath, basePath string) string { + basePath = strings.TrimSuffix(basePath, "/") + splits := bytes.Split([]byte(fullPath), []byte(basePath)) + return strings.TrimPrefix(string(splits[1]), "/") +} From 4bf1ab51fdebc506694f8d91bdb09ba51db04200 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 13:28:38 +0530 Subject: [PATCH 169/188] fixing output json, yaml tags --- pkg/policy/types.go | 2 +- pkg/results/types.go | 12 ++++++------ pkg/writer/xml.go | 7 ++++++- 3 files changed, 13 insertions(+), 8 deletions(-) diff --git a/pkg/policy/types.go b/pkg/policy/types.go index f16ddae3c..97f01fd2e 100644 --- a/pkg/policy/types.go +++ b/pkg/policy/types.go @@ -12,5 +12,5 @@ type EngineInput struct { // EngineOutput Contains data output from the engine type EngineOutput struct { - *results.ViolationStore + *results.ViolationStore `json:"results" yaml:"results" xml:"results,attr"` } diff --git a/pkg/results/types.go b/pkg/results/types.go index 4664b258b..e9b99ed59 100644 --- a/pkg/results/types.go +++ b/pkg/results/types.go @@ -34,14 +34,14 @@ type Violation struct { // ViolationStats Contains stats related to the violation data type ViolationStats struct { - LowCount int `json:"low"` - MediumCount int `json:"medium"` - HighCount int `json:"high"` - TotalCount int `json:"total"` + LowCount int `json:"low" yaml:"low" xml:"low,attr"` + MediumCount int `json:"medium" yaml:"medium" xml:"medium,attr"` + HighCount int `json:"high" yaml:"high" xml:"high,attr"` + TotalCount int `json:"total" yaml:"total" xml:"total,attr"` } // ViolationStore Storage area for violation data type ViolationStore struct { - Violations []*Violation `json:"violations"` - Count ViolationStats `json:"count"` + Violations []*Violation `json:"violations" yaml:"violations" xml:"violations,attr"` + Count ViolationStats `json:"count" yaml:"count" xml:"count,attr"` } diff --git a/pkg/writer/xml.go b/pkg/writer/xml.go index 2d868f5a9..c86e451bb 100644 --- a/pkg/writer/xml.go +++ b/pkg/writer/xml.go @@ -21,6 +21,7 @@ import ( "io" "github.com/accurics/terrascan/pkg/policy" + "go.uber.org/zap" ) const ( @@ -33,7 +34,11 @@ func init() { // XMLWriter prints data in XML format func XMLWriter(data policy.EngineOutput, writer io.Writer) error { - j, _ := xml.MarshalIndent(data, "", " ") + j, err := xml.MarshalIndent(data, "", " ") + if err != nil { + zap.S().Errorf("failed to write XML output. error: '%v'", err) + return err + } writer.Write(j) writer.Write([]byte{'\n'}) return nil From 9cef225ab5f4eb6eb1489ed7974fbd2da44d0eae Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 14:55:50 +0530 Subject: [PATCH 170/188] fix iac file path --- pkg/iac-providers/terraform/v12/load-file.go | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/pkg/iac-providers/terraform/v12/load-file.go b/pkg/iac-providers/terraform/v12/load-file.go index 7f4e3c2d2..07af9a928 100644 --- a/pkg/iac-providers/terraform/v12/load-file.go +++ b/pkg/iac-providers/terraform/v12/load-file.go @@ -18,12 +18,12 @@ package tfv12 import ( "fmt" + "path/filepath" + "github.com/accurics/terrascan/pkg/iac-providers/output" hclConfigs "github.com/hashicorp/terraform/configs" "github.com/spf13/afero" "go.uber.org/zap" - - "github.com/accurics/terrascan/pkg/iac-providers/output" ) var ( @@ -58,8 +58,8 @@ func (*TfV12) LoadIacFile(absFilePath string) (allResourcesConfig output.AllReso return allResourcesConfig, fmt.Errorf("failed to create ResourceConfig") } - // append resource config to list of all resources - // allResourcesConfig = append(allResourcesConfig, resourceConfig) + // extract file name from path + resourceConfig.Source = getFileName(resourceConfig.Source) // append to normalized output if _, present := allResourcesConfig[resourceConfig.Type]; !present { @@ -72,3 +72,9 @@ func (*TfV12) LoadIacFile(absFilePath string) (allResourcesConfig output.AllReso // successful return allResourcesConfig, nil } + +// getFileName return file name from the given file path +func getFileName(path string) string { + _, file := filepath.Split(path) + return file +} From 52bfc38ac2ce51824393ce852c2eac5a1a53f10d Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 15:09:48 +0530 Subject: [PATCH 171/188] change json, yaml tags from camel case to underscore notation --- pkg/results/types.go | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/pkg/results/types.go b/pkg/results/types.go index e9b99ed59..5bfda9544 100644 --- a/pkg/results/types.go +++ b/pkg/results/types.go @@ -18,16 +18,16 @@ package results // Violation Contains data for each violation type Violation struct { - RuleName string `json:"ruleName" yaml:"ruleName" xml:"ruleName,attr"` + RuleName string `json:"rule_name" yaml:"rule_name" xml:"rule_name,attr"` Description string `json:"description" yaml:"description" xml:"description,attr"` RuleID string `json:"rule" yaml:"rule" xml:"rule,attr"` Severity string `json:"severity" yaml:"severity" xml:"severity,attr"` Category string `json:"category" yaml:"category" xml:"category,attr"` - RuleFile string `json:"ruleFile" yaml:"ruleFile" xml:"ruleFile,attr"` + RuleFile string `json:"rule_file" yaml:"rule_file" xml:"rule_file,attr"` RuleData interface{} `json:"-" yaml:"-" xml:"-"` - ResourceName string `json:"resourceName" yaml:"resourceName" xml:"resourceName,attr"` - ResourceType string `json:"resourceType" yaml:"resourceType" xml:"resourceType,attr"` - ResourceData interface{} `json:"resourceData" yaml:"resourceData" xml:"resourceData,attr"` + ResourceName string `json:"resource_name" yaml:"resource_name" xml:"resource_name,attr"` + ResourceType string `json:"resource_type" yaml:"resource_type" xml:"resource_type,attr"` + ResourceData interface{} `json:"resource_data" yaml:"resource_data" xml:"resource_data,attr"` File string `json:"file" yaml:"file" xml:"file,attr"` LineNumber int `json:"line" yaml:"line" xml:"line,attr"` } From b50538af1c9a974f43a84a0c44d0cef485fcd0d8 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 15:17:42 +0530 Subject: [PATCH 172/188] fix TestFindAllDirectories unit tests --- pkg/utils/path_test.go | 15 ++++++++------- pkg/utils/testdata/emptydir/somefile.txt | 1 + pkg/utils/testdata/testdir1/somefile.txt | 1 + pkg/utils/testdata/testdir2/somefile.txt | 1 + 4 files changed, 11 insertions(+), 7 deletions(-) create mode 100644 pkg/utils/testdata/emptydir/somefile.txt create mode 100644 pkg/utils/testdata/testdir1/somefile.txt create mode 100644 pkg/utils/testdata/testdir2/somefile.txt diff --git a/pkg/utils/path_test.go b/pkg/utils/path_test.go index 5b47d4076..2344c50e7 100644 --- a/pkg/utils/path_test.go +++ b/pkg/utils/path_test.go @@ -17,7 +17,6 @@ package utils import ( - "fmt" "os" "reflect" "testing" @@ -90,12 +89,6 @@ func TestFindAllDirectories(t *testing.T) { want: []string{"./testdata/emptydir"}, wantErr: nil, }, - { - name: "invalid dir", - basePath: "./testdata/nothere", - want: []string{}, - wantErr: fmt.Errorf("lstat ./testdata/nothere: no such file or directory"), - }, } for _, tt := range table { @@ -109,4 +102,12 @@ func TestFindAllDirectories(t *testing.T) { } }) } + + t.Run("invalid dir", func(t *testing.T) { + basePath := "./testdata/nothere" + _, gotErr := FindAllDirectories(basePath) + if gotErr == nil { + t.Errorf("got no error; error expected") + } + }) } diff --git a/pkg/utils/testdata/emptydir/somefile.txt b/pkg/utils/testdata/emptydir/somefile.txt new file mode 100644 index 000000000..ebf038b91 --- /dev/null +++ b/pkg/utils/testdata/emptydir/somefile.txt @@ -0,0 +1 @@ +somefile diff --git a/pkg/utils/testdata/testdir1/somefile.txt b/pkg/utils/testdata/testdir1/somefile.txt new file mode 100644 index 000000000..ebf038b91 --- /dev/null +++ b/pkg/utils/testdata/testdir1/somefile.txt @@ -0,0 +1 @@ +somefile diff --git a/pkg/utils/testdata/testdir2/somefile.txt b/pkg/utils/testdata/testdir2/somefile.txt new file mode 100644 index 000000000..ebf038b91 --- /dev/null +++ b/pkg/utils/testdata/testdir2/somefile.txt @@ -0,0 +1 @@ +somefile From c6bf4ce9e39fa1b9928d33d886c026732598d9ca Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 15:59:17 +0530 Subject: [PATCH 173/188] fix terraform v12 unit tests --- pkg/iac-providers/terraform/v12/load-dir.go | 3 +- .../v12/testdata/tfjson/config1.json | 24 ++++++++---- .../v12/testdata/tfjson/dummyconfig.json | 12 ++++-- .../v12/testdata/tfjson/fullconfig.json | 24 ++++++++---- .../v12/testdata/tfjson/moduleconfigs.json | 39 ++++++++++++------- pkg/runtime/validate.go | 18 +++++---- 6 files changed, 78 insertions(+), 42 deletions(-) diff --git a/pkg/iac-providers/terraform/v12/load-dir.go b/pkg/iac-providers/terraform/v12/load-dir.go index 1002f18df..4a983e8d6 100644 --- a/pkg/iac-providers/terraform/v12/load-dir.go +++ b/pkg/iac-providers/terraform/v12/load-dir.go @@ -145,7 +145,8 @@ func (*TfV12) LoadIacDir(absRootDir string) (allResourcesConfig output.AllResour // trimFilePath returns relative file path wrt to the base path func trimFilePath(fullPath, basePath string) string { - basePath = strings.TrimSuffix(basePath, "/") + basePath = strings.Trim(basePath, ".") + basePath = strings.Trim(basePath, "/") splits := bytes.Split([]byte(fullPath), []byte(basePath)) return strings.TrimPrefix(string(splits[1]), "/") } diff --git a/pkg/iac-providers/terraform/v12/testdata/tfjson/config1.json b/pkg/iac-providers/terraform/v12/testdata/tfjson/config1.json index e5bcee920..8c7f7cd8a 100644 --- a/pkg/iac-providers/terraform/v12/testdata/tfjson/config1.json +++ b/pkg/iac-providers/terraform/v12/testdata/tfjson/config1.json @@ -3,7 +3,8 @@ { "id": "aws_instance.instance_playground", "name": "instance_playground", - "source": "./testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 78, "type": "aws_instance", "config": { "ami": "${lookup(var.aws_amis, var.aws_region)}", @@ -44,7 +45,8 @@ { "id": "aws_internet_gateway.igw_playground", "name": "igw_playground", - "source": "./testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 15, "type": "aws_internet_gateway", "config": { "tags": { @@ -58,7 +60,8 @@ { "id": "aws_key_pair.ec2key_playground", "name": "ec2key_playground", - "source": "./testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 73, "type": "aws_key_pair", "config": { "key_name": "testKey", @@ -70,7 +73,8 @@ { "id": "aws_route_table.rtb_public_playground", "name": "rtb_public_playground", - "source": "./testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 31, "type": "aws_route_table", "config": { "route": [ @@ -90,7 +94,8 @@ { "id": "aws_route_table_association.rta_subnet_public_playground", "name": "rta_subnet_public_playground", - "source": "./testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 42, "type": "aws_route_table_association", "config": { "route_table_id": "${aws_route_table.rtb_public_playground.id}", @@ -102,7 +107,8 @@ { "id": "aws_security_group.sg_playground", "name": "sg_playground", - "source": "./testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 47, "type": "aws_security_group", "config": { "egress": [ @@ -145,7 +151,8 @@ { "id": "aws_subnet.subnet_public_playground", "name": "subnet_public_playground", - "source": "./testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 22, "type": "aws_subnet", "config": { "cidr_block": "${var.cidr_subnet}", @@ -161,7 +168,8 @@ { "id": "aws_vpc.vpc_playground", "name": "vpc_playground", - "source": "./testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 6, "type": "aws_vpc", "config": { "cidr_block": "${var.cidr_vpc}", diff --git a/pkg/iac-providers/terraform/v12/testdata/tfjson/dummyconfig.json b/pkg/iac-providers/terraform/v12/testdata/tfjson/dummyconfig.json index 0c7d8044a..bbf33b988 100644 --- a/pkg/iac-providers/terraform/v12/testdata/tfjson/dummyconfig.json +++ b/pkg/iac-providers/terraform/v12/testdata/tfjson/dummyconfig.json @@ -3,7 +3,8 @@ { "id": "terraform_remote_state.remote", "name": "remote", - "source": "./testdata/dummyconfig/dummyconfig.tf", + "source": "dummyconfig.tf", + "line": 41, "type": "terraform_remote_state", "config": { "backend": "s3", @@ -20,7 +21,8 @@ { "id": "type1.resource1", "name": "resource1", - "source": "./testdata/dummyconfig/dummyconfig.tf", + "source": "dummyconfig.tf", + "line": 1, "type": "type1", "config": { "arr": [ @@ -44,7 +46,8 @@ { "id": "type2.resource2", "name": "resource2", - "source": "./testdata/dummyconfig/dummyconfig.tf", + "source": "dummyconfig.tf", + "line": 13, "type": "type2", "config": { "other": { @@ -64,7 +67,8 @@ { "id": "type3.resource3", "name": "resource3", - "source": "./testdata/dummyconfig/dummyconfig.tf", + "source": "dummyconfig.tf", + "line": 26, "type": "type3", "config": { "cond": "${test3 \u003e 2 ? 1: 0}", diff --git a/pkg/iac-providers/terraform/v12/testdata/tfjson/fullconfig.json b/pkg/iac-providers/terraform/v12/testdata/tfjson/fullconfig.json index 1a36646ca..8c7f7cd8a 100644 --- a/pkg/iac-providers/terraform/v12/testdata/tfjson/fullconfig.json +++ b/pkg/iac-providers/terraform/v12/testdata/tfjson/fullconfig.json @@ -3,7 +3,8 @@ { "id": "aws_instance.instance_playground", "name": "instance_playground", - "source": "testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 78, "type": "aws_instance", "config": { "ami": "${lookup(var.aws_amis, var.aws_region)}", @@ -44,7 +45,8 @@ { "id": "aws_internet_gateway.igw_playground", "name": "igw_playground", - "source": "testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 15, "type": "aws_internet_gateway", "config": { "tags": { @@ -58,7 +60,8 @@ { "id": "aws_key_pair.ec2key_playground", "name": "ec2key_playground", - "source": "testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 73, "type": "aws_key_pair", "config": { "key_name": "testKey", @@ -70,7 +73,8 @@ { "id": "aws_route_table.rtb_public_playground", "name": "rtb_public_playground", - "source": "testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 31, "type": "aws_route_table", "config": { "route": [ @@ -90,7 +94,8 @@ { "id": "aws_route_table_association.rta_subnet_public_playground", "name": "rta_subnet_public_playground", - "source": "testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 42, "type": "aws_route_table_association", "config": { "route_table_id": "${aws_route_table.rtb_public_playground.id}", @@ -102,7 +107,8 @@ { "id": "aws_security_group.sg_playground", "name": "sg_playground", - "source": "testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 47, "type": "aws_security_group", "config": { "egress": [ @@ -145,7 +151,8 @@ { "id": "aws_subnet.subnet_public_playground", "name": "subnet_public_playground", - "source": "testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 22, "type": "aws_subnet", "config": { "cidr_block": "${var.cidr_subnet}", @@ -161,7 +168,8 @@ { "id": "aws_vpc.vpc_playground", "name": "vpc_playground", - "source": "testdata/tfconfigs/config1.tf", + "source": "config1.tf", + "line": 6, "type": "aws_vpc", "config": { "cidr_block": "${var.cidr_vpc}", diff --git a/pkg/iac-providers/terraform/v12/testdata/tfjson/moduleconfigs.json b/pkg/iac-providers/terraform/v12/testdata/tfjson/moduleconfigs.json index 48a10dabf..65764ec73 100644 --- a/pkg/iac-providers/terraform/v12/testdata/tfjson/moduleconfigs.json +++ b/pkg/iac-providers/terraform/v12/testdata/tfjson/moduleconfigs.json @@ -3,7 +3,8 @@ { "id": "aws_cloudfront_distribution.s3-distribution-TLS-v1", "name": "s3-distribution-TLS-v1", - "source": "testdata/moduleconfigs/cloudfront/main.tf", + "source": "cloudfront/main.tf", + "line": 6, "type": "aws_cloudfront_distribution", "config": { "default_cache_behavior": [ @@ -130,7 +131,8 @@ { "id": "aws_cloudtrail.missing-multi-region", "name": "missing-multi-region", - "source": "testdata/moduleconfigs/cloudtrail/main.tf", + "source": "cloudtrail/main.tf", + "line": 1, "type": "aws_cloudtrail", "config": { "include_global_service_events": false, @@ -144,7 +146,8 @@ { "id": "aws_ecs_task_definition.instanceNotInVpc", "name": "instanceNotInVpc", - "source": "testdata/moduleconfigs/ecs/main.tf", + "source": "ecs/main.tf", + "line": 1, "type": "aws_ecs_task_definition", "config": { "container_definitions": "${file(\"ecs/service.json\")}", @@ -157,7 +160,8 @@ { "id": "aws_efs_file_system.efsNotEncrypted", "name": "efsNotEncrypted", - "source": "testdata/moduleconfigs/efs/main.tf", + "source": "efs/main.tf", + "line": 1, "type": "aws_efs_file_system", "config": { "creation_token": "my-product", @@ -171,7 +175,8 @@ { "id": "aws_elasticache_cluster.noMemcachedInElastiCache", "name": "noMemcachedInElastiCache", - "source": "testdata/moduleconfigs/elasticcache/main.tf", + "source": "elasticcache/main.tf", + "line": 1, "type": "aws_elasticache_cluster", "config": { "cluster_id": "cluster-example", @@ -187,7 +192,8 @@ { "id": "aws_guardduty_detector.gaurdDutyDisabled", "name": "gaurdDutyDisabled", - "source": "testdata/moduleconfigs/guardduty/main.tf", + "source": "guardduty/main.tf", + "line": 1, "type": "aws_guardduty_detector", "config": { "enable": false @@ -198,7 +204,8 @@ { "id": "aws_iam_access_key.noAccessKeyForRootAccount", "name": "noAccessKeyForRootAccount", - "source": "testdata/moduleconfigs/iam/main.tf", + "source": "iam/main.tf", + "line": 1, "type": "aws_iam_access_key", "config": { "pgp_key": "keybase:some_person_that_exists", @@ -211,7 +218,8 @@ { "id": "aws_kinesis_stream.kinesisEncryptedWithKms", "name": "kinesisEncryptedWithKms", - "source": "testdata/moduleconfigs/kinesis/main.tf", + "source": "kinesis/main.tf", + "line": 1, "type": "aws_kinesis_stream", "config": { "encryption_type": "KMS", @@ -233,7 +241,8 @@ { "id": "aws_kms_key.kmsKeyDisabled", "name": "kmsKeyDisabled", - "source": "testdata/moduleconfigs/cloudfront/sub-cloudfront/main.tf", + "source": "cloudfront/sub-cloudfront/main.tf", + "line": 1, "type": "aws_kms_key", "config": { "description": "KMS key 2", @@ -249,7 +258,8 @@ { "id": "aws_load_balancer_policy.elbWeakCipher", "name": "elbWeakCipher", - "source": "testdata/moduleconfigs/elb/main.tf", + "source": "elb/main.tf", + "line": 1, "type": "aws_load_balancer_policy", "config": { "load_balancer_name": "some-name", @@ -268,7 +278,8 @@ { "id": "aws_s3_bucket.noS3BucketSseRules", "name": "noS3BucketSseRules", - "source": "testdata/moduleconfigs/s3/main.tf", + "source": "s3/main.tf", + "line": 1, "type": "aws_s3_bucket", "config": { "acl": "private", @@ -284,7 +295,8 @@ { "id": "aws_security_group.acme_web", "name": "acme_web", - "source": "testdata/moduleconfigs/sg/main.tf", + "source": "sg/main.tf", + "line": 1, "type": "aws_security_group", "config": { "description": "Used in the terraform", @@ -322,7 +334,8 @@ { "id": "aws_sqs_queue.sqsQueueExposed", "name": "sqsQueueExposed", - "source": "testdata/moduleconfigs/sqs/main.tf", + "source": "sqs/main.tf", + "line": 1, "type": "aws_sqs_queue", "config": { "kms_data_key_reuse_period_seconds": 300, diff --git a/pkg/runtime/validate.go b/pkg/runtime/validate.go index 715fb2a85..fa9f343aa 100644 --- a/pkg/runtime/validate.go +++ b/pkg/runtime/validate.go @@ -37,6 +37,8 @@ var ( // ValidateInputs validates the inputs to the executor object func (e *Executor) ValidateInputs() error { + var err error + // terrascan can accept either a file or a directory if e.filePath == "" && e.dirPath == "" { zap.S().Errorf("no IaC path specified; use '-f' for file or '-d' for directory") @@ -49,29 +51,29 @@ func (e *Executor) ValidateInputs() error { if e.dirPath != "" { // if directory, check if directory exists - absDirPath, err := utils.GetAbsPath(e.dirPath) + e.dirPath, err = utils.GetAbsPath(e.dirPath) if err != nil { return err } - if _, err := os.Stat(absDirPath); err != nil { - zap.S().Errorf("directory '%s' does not exist", absDirPath) + if _, err := os.Stat(e.dirPath); err != nil { + zap.S().Errorf("directory '%s' does not exist", e.dirPath) return errDirNotExists } - zap.S().Debugf("directory '%s' exists", absDirPath) + zap.S().Debugf("directory '%s' exists", e.dirPath) } else { // if file path, check if file exists - absFilePath, err := utils.GetAbsPath(e.filePath) + e.filePath, err = utils.GetAbsPath(e.filePath) if err != nil { return err } - if _, err := os.Stat(absFilePath); err != nil { - zap.S().Errorf("file '%s' does not exist", absFilePath) + if _, err := os.Stat(e.filePath); err != nil { + zap.S().Errorf("file '%s' does not exist", e.filePath) return errFileNotExists } - zap.S().Debugf("file '%s' exists", absFilePath) + zap.S().Debugf("file '%s' exists", e.filePath) } // check if Iac type is supported From 71e285432b72ed55980bfd3fb7ae4672d65769e7 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 17:06:15 +0530 Subject: [PATCH 174/188] fix runtime unit tests --- pkg/runtime/executor_test.go | 88 ++++++++++++++++--- ....EncryptionandKeyManagement.High.0407.json | 12 +++ ....EncryptionandKeyManagement.High.0408.json | 12 +++ .../AWS.CloudFront.Logging.Medium.0567.json | 12 +++ .../cloudfrontNoHTTPSTraffic.rego | 10 +++ .../cloudfrontNoLogging.rego | 21 +++++ .../cloudfrontNoSecureCiphers.rego | 19 ++++ 7 files changed, 163 insertions(+), 11 deletions(-) create mode 100755 pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json create mode 100755 pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json create mode 100755 pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json create mode 100755 pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego create mode 100755 pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoLogging.rego create mode 100755 pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego diff --git a/pkg/runtime/executor_test.go b/pkg/runtime/executor_test.go index 62d163754..8d82ee97e 100644 --- a/pkg/runtime/executor_test.go +++ b/pkg/runtime/executor_test.go @@ -26,11 +26,13 @@ import ( tfv12 "github.com/accurics/terrascan/pkg/iac-providers/terraform/v12" "github.com/accurics/terrascan/pkg/notifications" "github.com/accurics/terrascan/pkg/notifications/webhook" + "github.com/accurics/terrascan/pkg/policy" ) var ( - errMockLoadIacDir = fmt.Errorf("mock LoadIacDir") - errMockLoadIacFile = fmt.Errorf("mock LoadIacFile") + errMockLoadIacDir = fmt.Errorf("mock LoadIacDir") + errMockLoadIacFile = fmt.Errorf("mock LoadIacFile") + errMockPolicyEngine = fmt.Errorf("mock PolicyEngine") ) // MockIacProvider mocks IacProvider interface @@ -47,6 +49,31 @@ func (m MockIacProvider) LoadIacFile(file string) (output.AllResourceConfigs, er return m.output, m.err } +// mock policy engine +type MockPolicyEngine struct { + err error +} + +func (m MockPolicyEngine) Init(input string) error { + return m.err +} + +func (m MockPolicyEngine) Configure() error { + return m.err +} + +func (m MockPolicyEngine) Evaluate(input policy.EngineInput) (out policy.EngineOutput, err error) { + return out, m.err +} + +func (m MockPolicyEngine) GetResults() (out policy.EngineOutput) { + return out +} + +func (m MockPolicyEngine) Release() error { + return m.err +} + func TestExecute(t *testing.T) { // TODO: add tests to validate output of Execute() @@ -66,8 +93,9 @@ func TestExecute(t *testing.T) { { name: "test LoadIacDir no error", executor: Executor{ - dirPath: "./testdata/testdir", - iacProvider: MockIacProvider{err: nil}, + dirPath: "./testdata/testdir", + iacProvider: MockIacProvider{err: nil}, + policyEngine: MockPolicyEngine{err: nil}, }, wantErr: nil, }, @@ -82,27 +110,48 @@ func TestExecute(t *testing.T) { { name: "test LoadIacFile no error", executor: Executor{ - filePath: "./testdata/testfile", - iacProvider: MockIacProvider{err: nil}, + filePath: "./testdata/testfile", + iacProvider: MockIacProvider{err: nil}, + policyEngine: MockPolicyEngine{err: nil}, }, wantErr: nil, }, { name: "test SendNofitications no error", executor: Executor{ - iacProvider: MockIacProvider{err: nil}, - notifiers: []notifications.Notifier{&MockNotifier{err: nil}}, + iacProvider: MockIacProvider{err: nil}, + notifiers: []notifications.Notifier{&MockNotifier{err: nil}}, + policyEngine: MockPolicyEngine{err: nil}, }, wantErr: nil, }, { - name: "test SendNofitications no error", + name: "test SendNofitications mock error", executor: Executor{ - iacProvider: MockIacProvider{err: nil}, - notifiers: []notifications.Notifier{&MockNotifier{err: errMockNotifier}}, + iacProvider: MockIacProvider{err: nil}, + notifiers: []notifications.Notifier{&MockNotifier{err: errMockNotifier}}, + policyEngine: MockPolicyEngine{err: nil}, }, wantErr: errMockNotifier, }, + { + name: "test policy enginer no error", + executor: Executor{ + iacProvider: MockIacProvider{err: nil}, + notifiers: []notifications.Notifier{&MockNotifier{err: nil}}, + policyEngine: MockPolicyEngine{err: nil}, + }, + wantErr: nil, + }, + { + name: "test policy engine error", + executor: Executor{ + iacProvider: MockIacProvider{err: nil}, + notifiers: []notifications.Notifier{&MockNotifier{err: nil}}, + policyEngine: MockPolicyEngine{err: errMockPolicyEngine}, + }, + wantErr: errMockPolicyEngine, + }, } for _, tt := range table { @@ -132,6 +181,7 @@ func TestInit(t *testing.T) { cloudType: "aws", iacType: "terraform", iacVersion: "v12", + policyPath: "./testdata/testpolicies", }, wantErr: nil, wantIacProvider: &tfv12.TfV12{}, @@ -146,6 +196,7 @@ func TestInit(t *testing.T) { iacType: "terraform", iacVersion: "v12", configFile: "./testdata/webhook.toml", + policyPath: "./testdata/testpolicies", }, wantErr: nil, wantIacProvider: &tfv12.TfV12{}, @@ -178,6 +229,21 @@ func TestInit(t *testing.T) { wantErr: fmt.Errorf("config file not present"), wantIacProvider: &tfv12.TfV12{}, }, + { + name: "invalid policy path", + executor: Executor{ + filePath: "./testdata/testfile", + dirPath: "", + cloudType: "aws", + iacType: "terraform", + iacVersion: "v12", + configFile: "./testdata/webhook.toml", + policyPath: "./testdata/notthere", + }, + wantErr: fmt.Errorf("failed to initialize OPA policy engine"), + wantIacProvider: &tfv12.TfV12{}, + wantNotifiers: []notifications.Notifier{&webhook.Webhook{}}, + }, } for _, tt := range table { diff --git a/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json new file mode 100755 index 000000000..87a931b83 --- /dev/null +++ b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json @@ -0,0 +1,12 @@ +{ + "name": "cloudfrontNoHTTPSTraffic", + "file": "cloudfrontNoHTTPSTraffic.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Use encrypted connection between CloudFront and origin server", + "referenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0407", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json new file mode 100755 index 000000000..417d50dcd --- /dev/null +++ b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json @@ -0,0 +1,12 @@ +{ + "name": "cloudfrontNoSecureCiphers", + "file": "cloudfrontNoSecureCiphers.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Secure ciphers are not used in CloudFront distribution", + "referenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0408", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json new file mode 100755 index 000000000..2d26be5a4 --- /dev/null +++ b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json @@ -0,0 +1,12 @@ +{ + "name": "cloudfrontNoLogging", + "file": "cloudfrontNoLogging.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Ensure that your AWS Cloudfront distributions have the Logging feature enabled in order to track all viewer requests for the content delivered through the Content Delivery Network (CDN).", + "referenceId": "AWS.CloudFront.Logging.Medium.0567", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego new file mode 100755 index 000000000..6073a927c --- /dev/null +++ b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego @@ -0,0 +1,10 @@ +package accurics + +{{.prefix}}cloudfrontNoHTTPSTraffic[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + some i + orderedcachebehaviour = cloudfront.config.ordered_cache_behavior[i] + orderedcachebehaviour.viewer_protocol_policy == "allow-all" + traverse := sprintf("ordered_cache_behavior[%d].viewer_protocol_policy", [i]) + retVal := { "Id": cloudfront.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "ordered_cache_behavior.viewer_protocol_policy", "AttributeDataType": "string", "Expected": "redirect-to-https", "Actual": orderedcachebehaviour.viewer_protocol_policy } +} \ No newline at end of file diff --git a/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoLogging.rego b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoLogging.rego new file mode 100755 index 000000000..dfd52a3a1 --- /dev/null +++ b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoLogging.rego @@ -0,0 +1,21 @@ +package accurics + +{{.prefix}}cloudfrontNoLogging[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + not cloudfront.config.logging_config + + rc = "ewogICJsb2dnaW5nX2NvbmZpZyI6IHsKICAgICJpbmNsdWRlX2Nvb2tpZXMiOiBmYWxzZSwKICAgICJidWNrZXQiOiAiPGJ1Y2tldD4iLAogICAgInByZWZpeCI6ICI8cHJlZml4PiIKICB9Cn0=" + + traverse = "" + retVal := { "Id": cloudfront.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "logging_config", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} + +{{.prefix}}cloudfrontNoLogging[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + cloudfront.config.logging_config == [] + + rc = "ewogICJsb2dnaW5nX2NvbmZpZyI6IHsKICAgICJpbmNsdWRlX2Nvb2tpZXMiOiBmYWxzZSwKICAgICJidWNrZXQiOiAiPGJ1Y2tldD4iLAogICAgInByZWZpeCI6ICI8cHJlZml4PiIKICB9Cn0=" + + traverse = "" + retVal := { "Id": cloudfront.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "logging_config", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} \ No newline at end of file diff --git a/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego new file mode 100755 index 000000000..9159d825f --- /dev/null +++ b/pkg/runtime/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego @@ -0,0 +1,19 @@ +package accurics + +{{.prefix}}cloudfrontNoSecureCiphers[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + some i + certificate = cloudfront.config.viewer_certificate[i] + certificate.cloudfront_default_certificate = false + not minimumAllowedProtocolVersion(certificate.minimum_protocol_version) + traverse := sprintf("viewer_certificate[%d].minimum_protocol_version", [i]) + retVal := { "Id": cloudfront.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "viewer_certificate.minimum_protocol_version", "AttributeDataType": "string", "Expected": "TLSv1.2", "Actual": certificate.minimum_protocol_version } +} + +minimumAllowedProtocolVersion(currentVersion) { + currentVersion == "TLSv1.1" +} + +minimumAllowedProtocolVersion(currentVersion) { + currentVersion == "TLSv1.2" +} \ No newline at end of file From 38cdd406a6b6bfe56692398c8739a748f14c77ff Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 17:34:39 +0530 Subject: [PATCH 175/188] fix http-server unit tests --- pkg/http-server/file-scan.go | 10 +++++++-- pkg/http-server/file-scan_test.go | 2 +- pkg/http-server/handler.go | 4 +++- ....EncryptionandKeyManagement.High.0407.json | 12 +++++++++++ ....EncryptionandKeyManagement.High.0408.json | 12 +++++++++++ .../AWS.CloudFront.Logging.Medium.0567.json | 12 +++++++++++ .../cloudfrontNoHTTPSTraffic.rego | 10 +++++++++ .../cloudfrontNoLogging.rego | 21 +++++++++++++++++++ .../cloudfrontNoSecureCiphers.rego | 19 +++++++++++++++++ 9 files changed, 98 insertions(+), 4 deletions(-) create mode 100755 pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json create mode 100755 pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json create mode 100755 pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json create mode 100755 pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego create mode 100755 pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoLogging.rego create mode 100755 pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego diff --git a/pkg/http-server/file-scan.go b/pkg/http-server/file-scan.go index abe660010..b49ebfc29 100644 --- a/pkg/http-server/file-scan.go +++ b/pkg/http-server/file-scan.go @@ -82,8 +82,14 @@ func (g *APIHandler) scanFile(w http.ResponseWriter, r *http.Request) { tempFile.Write(fileBytes) // create a new runtime executor for scanning the uploaded file - executor, err := runtime.NewExecutor(iacType, iacVersion, cloudType, - tempFile.Name(), "", "", "") + var executor *runtime.Executor + if g.test { + executor, err = runtime.NewExecutor(iacType, iacVersion, cloudType, + tempFile.Name(), "", "", "./testdata/testpolicies") + } else { + executor, err = runtime.NewExecutor(iacType, iacVersion, cloudType, + tempFile.Name(), "", "", "") + } if err != nil { zap.S().Error(err) apiErrorResponse(w, err.Error(), http.StatusBadRequest) diff --git a/pkg/http-server/file-scan_test.go b/pkg/http-server/file-scan_test.go index 2e4523b7c..a40016cf4 100644 --- a/pkg/http-server/file-scan_test.go +++ b/pkg/http-server/file-scan_test.go @@ -102,7 +102,7 @@ func TestUpload(t *testing.T) { }) res := httptest.NewRecorder() // new api handler - h := NewAPIHandler() + h := &APIHandler{test: true} h.scanFile(res, req) if res.Code != tt.wantStatus { diff --git a/pkg/http-server/handler.go b/pkg/http-server/handler.go index a72370030..eb86e5a90 100644 --- a/pkg/http-server/handler.go +++ b/pkg/http-server/handler.go @@ -17,7 +17,9 @@ package httpserver // APIHandler struct for http api server -type APIHandler struct{} +type APIHandler struct { + test bool +} // NewAPIHandler returns a new APIHandler{} func NewAPIHandler() *APIHandler { diff --git a/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json new file mode 100755 index 000000000..87a931b83 --- /dev/null +++ b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0407.json @@ -0,0 +1,12 @@ +{ + "name": "cloudfrontNoHTTPSTraffic", + "file": "cloudfrontNoHTTPSTraffic.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Use encrypted connection between CloudFront and origin server", + "referenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0407", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json new file mode 100755 index 000000000..417d50dcd --- /dev/null +++ b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.EncryptionandKeyManagement.High.0408.json @@ -0,0 +1,12 @@ +{ + "name": "cloudfrontNoSecureCiphers", + "file": "cloudfrontNoSecureCiphers.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Secure ciphers are not used in CloudFront distribution", + "referenceId": "AWS.CloudFront.EncryptionandKeyManagement.High.0408", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json new file mode 100755 index 000000000..2d26be5a4 --- /dev/null +++ b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/AWS.CloudFront.Logging.Medium.0567.json @@ -0,0 +1,12 @@ +{ + "name": "cloudfrontNoLogging", + "file": "cloudfrontNoLogging.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Ensure that your AWS Cloudfront distributions have the Logging feature enabled in order to track all viewer requests for the content delivered through the Content Delivery Network (CDN).", + "referenceId": "AWS.CloudFront.Logging.Medium.0567", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego new file mode 100755 index 000000000..6073a927c --- /dev/null +++ b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoHTTPSTraffic.rego @@ -0,0 +1,10 @@ +package accurics + +{{.prefix}}cloudfrontNoHTTPSTraffic[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + some i + orderedcachebehaviour = cloudfront.config.ordered_cache_behavior[i] + orderedcachebehaviour.viewer_protocol_policy == "allow-all" + traverse := sprintf("ordered_cache_behavior[%d].viewer_protocol_policy", [i]) + retVal := { "Id": cloudfront.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "ordered_cache_behavior.viewer_protocol_policy", "AttributeDataType": "string", "Expected": "redirect-to-https", "Actual": orderedcachebehaviour.viewer_protocol_policy } +} \ No newline at end of file diff --git a/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoLogging.rego b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoLogging.rego new file mode 100755 index 000000000..dfd52a3a1 --- /dev/null +++ b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoLogging.rego @@ -0,0 +1,21 @@ +package accurics + +{{.prefix}}cloudfrontNoLogging[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + not cloudfront.config.logging_config + + rc = "ewogICJsb2dnaW5nX2NvbmZpZyI6IHsKICAgICJpbmNsdWRlX2Nvb2tpZXMiOiBmYWxzZSwKICAgICJidWNrZXQiOiAiPGJ1Y2tldD4iLAogICAgInByZWZpeCI6ICI8cHJlZml4PiIKICB9Cn0=" + + traverse = "" + retVal := { "Id": cloudfront.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "logging_config", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} + +{{.prefix}}cloudfrontNoLogging[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + cloudfront.config.logging_config == [] + + rc = "ewogICJsb2dnaW5nX2NvbmZpZyI6IHsKICAgICJpbmNsdWRlX2Nvb2tpZXMiOiBmYWxzZSwKICAgICJidWNrZXQiOiAiPGJ1Y2tldD4iLAogICAgInByZWZpeCI6ICI8cHJlZml4PiIKICB9Cn0=" + + traverse = "" + retVal := { "Id": cloudfront.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "logging_config", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} \ No newline at end of file diff --git a/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego new file mode 100755 index 000000000..9159d825f --- /dev/null +++ b/pkg/http-server/testdata/testpolicies/aws_cloudfront_distribution/cloudfrontNoSecureCiphers.rego @@ -0,0 +1,19 @@ +package accurics + +{{.prefix}}cloudfrontNoSecureCiphers[retVal]{ + cloudfront = input.aws_cloudfront_distribution[_] + some i + certificate = cloudfront.config.viewer_certificate[i] + certificate.cloudfront_default_certificate = false + not minimumAllowedProtocolVersion(certificate.minimum_protocol_version) + traverse := sprintf("viewer_certificate[%d].minimum_protocol_version", [i]) + retVal := { "Id": cloudfront.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "viewer_certificate.minimum_protocol_version", "AttributeDataType": "string", "Expected": "TLSv1.2", "Actual": certificate.minimum_protocol_version } +} + +minimumAllowedProtocolVersion(currentVersion) { + currentVersion == "TLSv1.1" +} + +minimumAllowedProtocolVersion(currentVersion) { + currentVersion == "TLSv1.2" +} \ No newline at end of file From 066e21cab2ff3f2eee2c93c95dde98cbe56d76eb Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 17:46:34 +0530 Subject: [PATCH 176/188] fix remove unnecessary data from output --- pkg/results/types.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pkg/results/types.go b/pkg/results/types.go index 5bfda9544..35e38129a 100644 --- a/pkg/results/types.go +++ b/pkg/results/types.go @@ -20,14 +20,14 @@ package results type Violation struct { RuleName string `json:"rule_name" yaml:"rule_name" xml:"rule_name,attr"` Description string `json:"description" yaml:"description" xml:"description,attr"` - RuleID string `json:"rule" yaml:"rule" xml:"rule,attr"` + RuleID string `json:"rule_id" yaml:"rule_id" xml:"rule_id,attr"` Severity string `json:"severity" yaml:"severity" xml:"severity,attr"` Category string `json:"category" yaml:"category" xml:"category,attr"` - RuleFile string `json:"rule_file" yaml:"rule_file" xml:"rule_file,attr"` + RuleFile string `json:"-" yaml:"-" xml:"-"` RuleData interface{} `json:"-" yaml:"-" xml:"-"` ResourceName string `json:"resource_name" yaml:"resource_name" xml:"resource_name,attr"` ResourceType string `json:"resource_type" yaml:"resource_type" xml:"resource_type,attr"` - ResourceData interface{} `json:"resource_data" yaml:"resource_data" xml:"resource_data,attr"` + ResourceData interface{} `json:"-" yaml:"-" xml:"-"` File string `json:"file" yaml:"file" xml:"file,attr"` LineNumber int `json:"line" yaml:"line" xml:"line,attr"` } From 9571b6796cb76bfa0c71315b860587ed0663bc4d Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 17:58:08 +0530 Subject: [PATCH 177/188] make current directory as default for scanning IaC --- cmd/terrascan/main.go | 2 +- pkg/runtime/executor.go | 6 +++--- pkg/runtime/validate.go | 31 +++++++++++++------------------ 3 files changed, 17 insertions(+), 22 deletions(-) diff --git a/cmd/terrascan/main.go b/cmd/terrascan/main.go index db116a4ff..3e3b5ebe3 100644 --- a/cmd/terrascan/main.go +++ b/cmd/terrascan/main.go @@ -37,7 +37,7 @@ func main() { iacType = flag.String("iac", "", "IaC provider (supported values: terraform)") iacVersion = flag.String("iac-version", "default", "IaC version (supported values: 'v12' for terraform)") iacFilePath = flag.String("f", "", "IaC file path") - iacDirPath = flag.String("d", "", "IaC directory path") + iacDirPath = flag.String("d", ".", "IaC directory path") policyPath = flag.String("p", "", "Policy directory path") // cloud flags diff --git a/pkg/runtime/executor.go b/pkg/runtime/executor.go index df36a6315..e0c3c7757 100644 --- a/pkg/runtime/executor.go +++ b/pkg/runtime/executor.go @@ -99,10 +99,10 @@ func (e *Executor) Execute() (results policy.EngineOutput, err error) { // create results output from Iac var normalized output.AllResourceConfigs - if e.dirPath != "" { - normalized, err = e.iacProvider.LoadIacDir(e.dirPath) - } else { + if e.filePath != "" { normalized, err = e.iacProvider.LoadIacFile(e.filePath) + } else { + normalized, err = e.iacProvider.LoadIacDir(e.dirPath) } if err != nil { return results, err diff --git a/pkg/runtime/validate.go b/pkg/runtime/validate.go index fa9f343aa..2a9c1ec37 100644 --- a/pkg/runtime/validate.go +++ b/pkg/runtime/validate.go @@ -44,25 +44,8 @@ func (e *Executor) ValidateInputs() error { zap.S().Errorf("no IaC path specified; use '-f' for file or '-d' for directory") return errEmptyIacPath } - if e.filePath != "" && e.dirPath != "" { - zap.S().Errorf("cannot accept both '-f %s' and '-d %s' options together", e.filePath, e.dirPath) - return errIncorrectIacPath - } - - if e.dirPath != "" { - // if directory, check if directory exists - e.dirPath, err = utils.GetAbsPath(e.dirPath) - if err != nil { - return err - } - - if _, err := os.Stat(e.dirPath); err != nil { - zap.S().Errorf("directory '%s' does not exist", e.dirPath) - return errDirNotExists - } - zap.S().Debugf("directory '%s' exists", e.dirPath) - } else { + if e.filePath != "" { // if file path, check if file exists e.filePath, err = utils.GetAbsPath(e.filePath) if err != nil { @@ -74,6 +57,18 @@ func (e *Executor) ValidateInputs() error { return errFileNotExists } zap.S().Debugf("file '%s' exists", e.filePath) + } else { + // if directory, check if directory exists + e.dirPath, err = utils.GetAbsPath(e.dirPath) + if err != nil { + return err + } + + if _, err := os.Stat(e.dirPath); err != nil { + zap.S().Errorf("directory '%s' does not exist", e.dirPath) + return errDirNotExists + } + zap.S().Debugf("directory '%s' exists", e.dirPath) } // check if Iac type is supported From b98401e8ecf7942e899132cb090a528d6eaf6b41 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 19:54:45 +0530 Subject: [PATCH 178/188] fix unit tests for runtime package --- pkg/runtime/validate_test.go | 8 -------- 1 file changed, 8 deletions(-) diff --git a/pkg/runtime/validate_test.go b/pkg/runtime/validate_test.go index 938672e40..0dab33948 100644 --- a/pkg/runtime/validate_test.go +++ b/pkg/runtime/validate_test.go @@ -58,14 +58,6 @@ func TestValidateInputs(t *testing.T) { }, wantErr: errEmptyIacPath, }, - { - name: "incorrect iac path", - executor: Executor{ - filePath: "./testdata/testfile", - dirPath: "./testdata/testdir", - }, - wantErr: errIncorrectIacPath, - }, { name: "filepath does not exist", executor: Executor{ From e822fad7460d21662bf2f6467510b586a3b24797 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Thu, 13 Aug 2020 19:58:10 +0530 Subject: [PATCH 179/188] fix static check errors --- pkg/runtime/validate.go | 9 ++++----- 1 file changed, 4 insertions(+), 5 deletions(-) diff --git a/pkg/runtime/validate.go b/pkg/runtime/validate.go index 2a9c1ec37..63de4837e 100644 --- a/pkg/runtime/validate.go +++ b/pkg/runtime/validate.go @@ -27,11 +27,10 @@ import ( ) var ( - errEmptyIacPath = fmt.Errorf("empty iac path, either use '-f' or '-d' option") - errIncorrectIacPath = fmt.Errorf("cannot accept both '-f' and '-d' options together") - errDirNotExists = fmt.Errorf("directory does not exist") - errFileNotExists = fmt.Errorf("file does not exist") - errIacNotSupported = fmt.Errorf("iac type or version not supported") + errEmptyIacPath = fmt.Errorf("empty iac path, either use '-f' or '-d' option") + errDirNotExists = fmt.Errorf("directory does not exist") + errFileNotExists = fmt.Errorf("file does not exist") + errIacNotSupported = fmt.Errorf("iac type or version not supported") ) // ValidateInputs validates the inputs to the executor object From 930daf55d27acde91a97b1187667a001b4c68556 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Thu, 13 Aug 2020 20:45:48 -0700 Subject: [PATCH 180/188] update azure policies with the latest format --- .../accurics.azure.CAM.162.json | 7 +++---- .../accurics.azure.EKM.156.json | 7 +++---- .../accurics.azure.NPS.171.json | 7 +++---- .../accurics.azure.NPS.172.json | 7 +++---- .../accurics.azure.NPS.35.json | 7 +++---- .../accurics.azure.NPS.36.json | 7 +++---- .../accurics.azure.NPS.37.json | 7 +++---- .../accurics.azure.LOG.151.json | 7 +++---- .../accurics.azure.LOG.152.json | 7 +++---- .../accurics.azure.LOG.155.json | 7 +++---- .../accurics.azure.BDR.163.json | 7 +++---- .../accurics.azure.EKM.1.json | 7 +++---- .../accurics.azure.EKM.23.json | 7 +++---- .../accurics.azure.NS.30.json | 7 +++---- .../accurics.azure.NS.31.json | 7 +++---- .../accurics.azure.MON.157.json | 7 +++---- .../accurics.azure.NS.21.json | 7 +++---- .../accurics.azure.NS.5.json | 7 +++---- .../opa/rego/azure/azurerm_sql_server/.json | 13 ------------ .../sqlServerADAdminConfigured.rego | 21 ------------------- 20 files changed, 54 insertions(+), 106 deletions(-) delete mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_server/.json delete mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerADAdminConfigured.rego diff --git a/pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/accurics.azure.CAM.162.json b/pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/accurics.azure.CAM.162.json index b5a48fb4e..5f34b68a4 100755 --- a/pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/accurics.azure.CAM.162.json +++ b/pkg/policies/opa/rego/azure/azurerm_cosmosdb_account/accurics.azure.CAM.162.json @@ -1,13 +1,12 @@ { - "ruleName": "reme_noTags", + "name": "reme_noTags", "file": "noTags.rego", - "ruleTemplate": "noTags", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "reme_" }, "severity": "MEDIUM", "description": "Ensure that Cosmos DB Account has an associated tag", - "ruleReferenceId": "accurics.azure.CAM.162", + "referenceId": "accurics.azure.CAM.162", "category": "Cloud Assets Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_managed_disk/accurics.azure.EKM.156.json b/pkg/policies/opa/rego/azure/azurerm_managed_disk/accurics.azure.EKM.156.json index 6ea0f84ce..8164f7535 100755 --- a/pkg/policies/opa/rego/azure/azurerm_managed_disk/accurics.azure.EKM.156.json +++ b/pkg/policies/opa/rego/azure/azurerm_managed_disk/accurics.azure.EKM.156.json @@ -1,13 +1,12 @@ { - "ruleName": "reme_checkDataDisksEncrypted", + "name": "reme_checkDataDisksEncrypted", "file": "checkDataDisksEncrypted.rego", - "ruleTemplate": "checkDataDisksEncrypted", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "reme_" }, "severity": "MEDIUM", "description": "Ensure that 'OS disk' are encrypted", - "ruleReferenceId": "accurics.azure.EKM.156", + "referenceId": "accurics.azure.EKM.156", "category": "Encryption and Key Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.171.json b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.171.json index 0b6e1be26..d4631dbb0 100755 --- a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.171.json +++ b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.171.json @@ -1,8 +1,7 @@ { - "ruleName": "reme_networkPort3389ExposedPublicEntire", + "name": "reme_networkPort3389ExposedPublicEntire", "file": "networkPortExposedPublic.rego", - "ruleTemplate": "networkPortExposedPublic", - "ruleTemplateArgs": { + "templateArgs": { "endLimit": 0, "evalHosts": true, "name": "networkPort3389ExposedPublicEntire", @@ -13,7 +12,7 @@ }, "severity": "HIGH", "description": "Remote Desktop (TCP:3389) is exposed to the entire public internet", - "ruleReferenceId": "accurics.azure.NPS.171", + "referenceId": "accurics.azure.NPS.171", "category": "Network Ports Security", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.172.json b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.172.json index 9a1ffd52d..d64397813 100755 --- a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.172.json +++ b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.172.json @@ -1,8 +1,7 @@ { - "ruleName": "reme_networkPort22ExposedPublicEntire", + "name": "reme_networkPort22ExposedPublicEntire", "file": "networkPortExposedPublic.rego", - "ruleTemplate": "networkPortExposedPublic", - "ruleTemplateArgs": { + "templateArgs": { "endLimit": 0, "evalHosts": true, "name": "networkPort22ExposedPublicEntire", @@ -13,7 +12,7 @@ }, "severity": "HIGH", "description": "SSH (TCP:22) is exposed to the entire public internet", - "ruleReferenceId": "accurics.azure.NPS.172", + "referenceId": "accurics.azure.NPS.172", "category": "Network Ports Security", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.35.json b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.35.json index 001a10bd5..57414c54b 100755 --- a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.35.json +++ b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.35.json @@ -1,8 +1,7 @@ { - "ruleName": "reme_networkPort9090ExposedPublicWide", + "name": "reme_networkPort9090ExposedPublicWide", "file": "networkPortExposedPublic.rego", - "ruleTemplate": "networkPortExposedPublic", - "ruleTemplateArgs": { + "templateArgs": { "endLimit": 1, "evalHosts": false, "name": "networkPort9090ExposedPublicWide", @@ -13,7 +12,7 @@ }, "severity": "HIGH", "description": "CiscoSecure, WebSM (TCP:9090) is exposed to the wide public internet", - "ruleReferenceId": "accurics.azure.NPS.35", + "referenceId": "accurics.azure.NPS.35", "category": "Network Ports Security", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.36.json b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.36.json index 8a6b3c74f..62760d897 100755 --- a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.36.json +++ b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.36.json @@ -1,8 +1,7 @@ { - "ruleName": "reme_networkPort3389ExposedPublicWide", + "name": "reme_networkPort3389ExposedPublicWide", "file": "networkPortExposedPublic.rego", - "ruleTemplate": "networkPortExposedPublic", - "ruleTemplateArgs": { + "templateArgs": { "endLimit": 1, "evalHosts": false, "name": "networkPort3389ExposedPublicWide", @@ -13,7 +12,7 @@ }, "severity": "HIGH", "description": "Remote Desktop (TCP:3389) is exposed to the wide public internet", - "ruleReferenceId": "accurics.azure.NPS.36", + "referenceId": "accurics.azure.NPS.36", "category": "Network Ports Security", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.37.json b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.37.json index 4035cbcd7..40d552cef 100755 --- a/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.37.json +++ b/pkg/policies/opa/rego/azure/azurerm_network_security_rule/accurics.azure.NPS.37.json @@ -1,8 +1,7 @@ { - "ruleName": "reme_networkPort22ExposedPublicWide", + "name": "reme_networkPort22ExposedPublicWide", "file": "networkPortExposedPublic.rego", - "ruleTemplate": "networkPortExposedPublic", - "ruleTemplateArgs": { + "templateArgs": { "endLimit": 1, "evalHosts": false, "name": "networkPort22ExposedPublicWide", @@ -13,7 +12,7 @@ }, "severity": "HIGH", "description": "SSH (TCP:22) is exposed to the wide public internet", - "ruleReferenceId": "accurics.azure.NPS.37", + "referenceId": "accurics.azure.NPS.37", "category": "Network Ports Security", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.151.json b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.151.json index dacf226b5..3559154bd 100755 --- a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.151.json +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.151.json @@ -1,13 +1,12 @@ { - "ruleName": "reme_connectionThrottling", + "name": "reme_connectionThrottling", "file": "connectionThrottling.rego", - "ruleTemplate": "connectionThrottling", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "reme_" }, "severity": "MEDIUM", "description": "Ensure server parameter 'connection_throttling' is set to 'ON' for PostgreSQL Database Server", - "ruleReferenceId": "accurics.azure.LOG.151", + "referenceId": "accurics.azure.LOG.151", "category": "Logging", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.152.json b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.152.json index 54c058d46..e11c479d8 100755 --- a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.152.json +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.152.json @@ -1,13 +1,12 @@ { - "ruleName": "reme_logConnections", + "name": "reme_logConnections", "file": "logConnections.rego", - "ruleTemplate": "logConnections", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "reme_" }, "severity": "MEDIUM", "description": "Ensure server parameter 'log_connections' is set to 'ON' for PostgreSQL Database Server", - "ruleReferenceId": "accurics.azure.LOG.152", + "referenceId": "accurics.azure.LOG.152", "category": "Logging", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.155.json b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.155.json index 052a928b9..72c6c8852 100755 --- a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.155.json +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.155.json @@ -1,13 +1,12 @@ { - "ruleName": "reme_logRetention", + "name": "reme_logRetention", "file": "logRetention.rego", - "ruleTemplate": "logRetention", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "reme_" }, "severity": "MEDIUM", "description": "Ensure server parameter 'log_retention_days' is greater than 3 days for PostgreSQL Database Server", - "ruleReferenceId": "accurics.azure.LOG.155", + "referenceId": "accurics.azure.LOG.155", "category": "Logging", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.BDR.163.json b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.BDR.163.json index f00c25c6f..3ed3be2b9 100755 --- a/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.BDR.163.json +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.BDR.163.json @@ -1,13 +1,12 @@ { - "ruleName": "reme_geoRedundancyDisabled", + "name": "reme_geoRedundancyDisabled", "file": "geoRedundancyDisabled.rego", - "ruleTemplate": "geoRedundancyDisabled", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "reme_" }, "severity": "HIGH", "description": "Ensure that Geo Redundant Backups is enabled on PostgreSQL", - "ruleReferenceId": "accurics.azure.BDR.163", + "referenceId": "accurics.azure.BDR.163", "category": "Backup and Disaster Recovery", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.EKM.1.json b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.EKM.1.json index 5b7d411b4..e4057b64c 100755 --- a/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.EKM.1.json +++ b/pkg/policies/opa/rego/azure/azurerm_postgresql_server/accurics.azure.EKM.1.json @@ -1,13 +1,12 @@ { - "ruleName": "reme_sslEnforceDisabled", + "name": "reme_sslEnforceDisabled", "file": "sslEnforceDisabled.rego", - "ruleTemplate": "sslEnforceDisabled", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "reme_" }, "severity": "HIGH", "description": "Ensure 'Enforce SSL connection' is set to 'ENABLED' for PostgreSQL Database Server", - "ruleReferenceId": "accurics.azure.EKM.1", + "referenceId": "accurics.azure.EKM.1", "category": "Encryption and Key Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.EKM.23.json b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.EKM.23.json index f25530c3e..44997883e 100755 --- a/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.EKM.23.json +++ b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.EKM.23.json @@ -1,13 +1,12 @@ { - "ruleName": "reme_nonSslEnabled", + "name": "reme_nonSslEnabled", "file": "nonSslEnabled.rego", - "ruleTemplate": "nonSslEnabled", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "reme_" }, "severity": "MEDIUM", "description": "Ensure that the Redis Cache accepts only SSL connections", - "ruleReferenceId": "accurics.azure.EKM.23", + "referenceId": "accurics.azure.EKM.23", "category": "Encryption and Key Management", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.30.json b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.30.json index 134fb0295..9829654f7 100755 --- a/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.30.json +++ b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.30.json @@ -1,15 +1,14 @@ { - "ruleName": "reme_entirelyAccessible", + "name": "reme_entirelyAccessible", "file": "publiclyAccessible.rego", - "ruleTemplate": "publiclyAccessible", - "ruleTemplateArgs": { + "templateArgs": { "isEntire": true, "name": "entirelyAccessible", "prefix": "reme_" }, "severity": "HIGH", "description": "Ensure there are no firewall rules allowing unrestricted access to Redis from the Internet", - "ruleReferenceId": "accurics.azure.NS.30", + "referenceId": "accurics.azure.NS.30", "category": "Network Security", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.31.json b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.31.json index 24ad82625..4a691d7a1 100755 --- a/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.31.json +++ b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.31.json @@ -1,15 +1,14 @@ { - "ruleName": "reme_publiclyAccessible", + "name": "reme_publiclyAccessible", "file": "publiclyAccessible.rego", - "ruleTemplate": "publiclyAccessible", - "ruleTemplateArgs": { + "templateArgs": { "isEntire": false, "name": "publiclyAccessible", "prefix": "reme_" }, "severity": "HIGH", "description": "Ensure there are no firewall rules allowing unrestricted access to Redis from other Azure sources", - "ruleReferenceId": "accurics.azure.NS.31", + "referenceId": "accurics.azure.NS.31", "category": "Network Security", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_database/accurics.azure.MON.157.json b/pkg/policies/opa/rego/azure/azurerm_sql_database/accurics.azure.MON.157.json index 9c3d806ad..4e6ac6145 100755 --- a/pkg/policies/opa/rego/azure/azurerm_sql_database/accurics.azure.MON.157.json +++ b/pkg/policies/opa/rego/azure/azurerm_sql_database/accurics.azure.MON.157.json @@ -1,13 +1,12 @@ { - "ruleName": "reme_checkAuditEnabled", + "name": "reme_checkAuditEnabled", "file": "checkAuditEnabled.rego", - "ruleTemplate": "checkAuditEnabled", - "ruleTemplateArgs": { + "templateArgs": { "prefix": "reme_" }, "severity": "MEDIUM", "description": "Ensure that 'Threat Detection' is enabled for Azure SQL Database", - "ruleReferenceId": "accurics.azure.MON.157", + "referenceId": "accurics.azure.MON.157", "category": "Monitoring", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.21.json b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.21.json index 473c088a7..9473e9851 100755 --- a/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.21.json +++ b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.21.json @@ -1,15 +1,14 @@ { - "ruleName": "reme_sqlIngressAccess", + "name": "reme_sqlIngressAccess", "file": "checkPublicAccessNotAllow.rego", - "ruleTemplate": "checkPublicAccessNotAllow", - "ruleTemplateArgs": { + "templateArgs": { "isEntire": false, "name": "sqlIngressAccess", "prefix": "reme_" }, "severity": "HIGH", "description": "Ensure that no SQL Server allows ingress from 0.0.0.0/0 (ANY IP)", - "ruleReferenceId": "accurics.azure.NS.21", + "referenceId": "accurics.azure.NS.21", "category": "Network Security", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.5.json b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.5.json index 8af78d71b..90343b405 100755 --- a/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.5.json +++ b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.5.json @@ -1,15 +1,14 @@ { - "ruleName": "reme_sqlPublicAccess", + "name": "reme_sqlPublicAccess", "file": "checkPublicAccessNotAllow.rego", - "ruleTemplate": "checkPublicAccessNotAllow", - "ruleTemplateArgs": { + "templateArgs": { "isEntire": true, "name": "sqlPublicAccess", "prefix": "reme_" }, "severity": "HIGH", "description": "Ensure entire Azure infrastructure doesn't have access to Azure SQL ServerEnsure entire Azure infrastructure doesn't have access to Azure SQL Server", - "ruleReferenceId": "accurics.azure.NS.5", + "referenceId": "accurics.azure.NS.5", "category": "Network Security", "version": 2 } \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_server/.json b/pkg/policies/opa/rego/azure/azurerm_sql_server/.json deleted file mode 100755 index 7b995d40d..000000000 --- a/pkg/policies/opa/rego/azure/azurerm_sql_server/.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "ruleName": "reme_sqlServerADAdminConfigured", - "file": "sqlServerADAdminConfigured.rego", - "ruleTemplate": "sqlServerADAdminConfigured", - "ruleTemplateArgs": { - "prefix": "reme_" - }, - "severity": "HIGH", - "description": "Ensure that Azure Active Directory Admin is configured for SQL Server", - "ruleReferenceId": "", - "category": "Data Security", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerADAdminConfigured.rego b/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerADAdminConfigured.rego deleted file mode 100755 index ed63dd4ee..000000000 --- a/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerADAdminConfigured.rego +++ /dev/null @@ -1,21 +0,0 @@ -package accurics - -{{.prefix}}sqlServerADAdminConfigured[retVal] { - sql_server := input.azurerm_sql_server[_] - sql_server.type == "azurerm_sql_server" - key := concat("-", [sql_server.config.resource_group_name, sql_server.config.name]) - not adAdminExist(key) - rc = "ZGF0YSAiYXp1cmVybV9jbGllbnRfY29uZmlnIiAiY3VycmVudCIge30KCnJlc291cmNlICJhenVyZXJtX3NxbF9hY3RpdmVfZGlyZWN0b3J5X2FkbWluaXN0cmF0b3IiICIjI3Jlc291cmNlX25hbWUjIyIgewogIHNlcnZlcl9uYW1lICAgICAgICAgPSBhenVyZXJtX3NxbF9zZXJ2ZXIuIyNyZXNvdXJjZV9uYW1lIyMubmFtZQogIHJlc291cmNlX2dyb3VwX25hbWUgPSBhenVyZXJtX3Jlc291cmNlX2dyb3VwLiMjcmVzb3VyY2VfbmFtZSMjLm5hbWUKICBsb2dpbiAgICAgICAgICAgICAgID0gInNxbGFkbWluIgogIHRlbmFudF9pZCAgICAgICAgICAgPSBkYXRhLmF6dXJlcm1fY2xpZW50X2NvbmZpZy5jdXJyZW50LnRlbmFudF9pZAogIG9iamVjdF9pZCAgICAgICAgICAgPSBkYXRhLmF6dXJlcm1fY2xpZW50X2NvbmZpZy5jdXJyZW50Lm9iamVjdF9pZAp9" - decode_rc = base64.decode(rc) - replaced := replace(decode_rc, "##resource_name##", sql_server.name) - traverse = "" - retVal := { "Id": sql_server.id, "ReplaceType": "add", "CodeType": "resource", "Traverse": traverse, "Attribute": traverse, "AttributeDataType": "resource", "Expected": base64.encode(replaced), "Actual": null } -} - -adAdminExist(rg_servername) = exists { - ad_admin_set := { ad_id | input.azurerm_sql_active_directory_administrator[i].type == "azurerm_sql_active_directory_administrator"; ad_id := concat("-", [input.azurerm_sql_active_directory_administrator[i].config.resource_group_name, input.azurerm_sql_active_directory_administrator[i].config.server_name]) } - ad_admin_set[rg_servername] - exists = true -} else = false { - true -} \ No newline at end of file From 0638d9c10a15efad8bf1760f31f14763ec68daaa Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 14 Aug 2020 09:00:29 +0530 Subject: [PATCH 181/188] add terraform init support --- cmd/terrascan/main.go | 15 +++-- go.mod | 1 + go.sum | 33 +++++++++++ pkg/initialize/run.go | 119 ++++++++++++++++++++++++++++++++++++++++ pkg/runtime/validate.go | 22 +++++--- 5 files changed, 179 insertions(+), 11 deletions(-) create mode 100644 pkg/initialize/run.go diff --git a/cmd/terrascan/main.go b/cmd/terrascan/main.go index 3e3b5ebe3..b7cf28d93 100644 --- a/cmd/terrascan/main.go +++ b/cmd/terrascan/main.go @@ -19,11 +19,11 @@ package main import ( "flag" - "go.uber.org/zap" - "github.com/accurics/terrascan/pkg/cli" httpServer "github.com/accurics/terrascan/pkg/http-server" + "github.com/accurics/terrascan/pkg/initialize" "github.com/accurics/terrascan/pkg/logging" + "go.uber.org/zap" ) func main() { @@ -61,12 +61,19 @@ func main() { return } + // initialize logger + logging.Init(*logType, *logLevel) + + // initialize terrascan + if err := initialize.Run(); err != nil { + zap.S().Error("failed to initialize terrascan") + return + } + // if server mode set, run terrascan as a server, else run it as CLI if *server { - logging.Init(*logType, *logLevel) httpServer.Start() } else { - logging.Init(*logType, *logLevel) zap.S().Debug("running terrascan in cli mode") cli.Run(*iacType, *iacVersion, *cloudType, *iacFilePath, *iacDirPath, *configFile, *policyPath, *output) } diff --git a/go.mod b/go.mod index ea53117c6..d16447ec9 100644 --- a/go.mod +++ b/go.mod @@ -17,6 +17,7 @@ require ( golang.org/x/net v0.0.0-20200625001655-4c5254603344 // indirect golang.org/x/tools v0.0.0-20200812231640-9176cd30088c // indirect golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect + gopkg.in/src-d/go-git.v4 v4.13.1 gopkg.in/yaml.v2 v2.3.0 honnef.co/go/tools v0.0.1-2020.1.5 // indirect ) diff --git a/go.sum b/go.sum index b2eeab546..681052ecc 100644 --- a/go.sum +++ b/go.sum @@ -36,11 +36,13 @@ github.com/agext/levenshtein v1.2.1/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki github.com/agext/levenshtein v1.2.2 h1:0S/Yg6LYmFJ5stwQeRp6EeOcCbj7xiqQSdNelsXvaqE= github.com/agext/levenshtein v1.2.2/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/agl/ed25519 v0.0.0-20150830182803-278e1ec8e8a6/go.mod h1:WPjqKcmVOxf0XSf3YxCJs6N6AOSrOx3obionmG7T0y0= +github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/ghQa61ZWa/C2Aw3RkjiTBOix7dkqa1VLIs= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/aliyun/alibaba-cloud-sdk-go v0.0.0-20190329064014-6e358769c32a/go.mod h1:T9M45xf79ahXVelWoOBmH0y4aC1t5kXO5BxwyakgIGA= github.com/aliyun/aliyun-oss-go-sdk v0.0.0-20190103054945-8205d1f41e70/go.mod h1:T/Aws4fEfogEE9v+HPhhw+CntffsBHJ8nXQCwKr0/g8= github.com/aliyun/aliyun-tablestore-go-sdk v4.1.2+incompatible/go.mod h1:LDQHRZylxvcg8H7wBIDfvO5g/cy4/sz1iucBlc2l3Jw= +github.com/anmitsu/go-shlex v0.0.0-20161002113705-648efa622239/go.mod h1:2FmKhYUyUczH0OGQWaF5ceTx0UBShxjsH6f8oGKYe2c= github.com/antchfx/xpath v0.0.0-20190129040759-c8489ed3251e/go.mod h1:Yee4kTMuNiPYJ7nSNorELQMr1J33uOpXDMByNYhvtNk= github.com/antchfx/xquery v0.0.0-20180515051857-ad5b8c7a47b0/go.mod h1:LzD22aAzDP8/dyiCKFp31He4m2GPjl0AFyzDtZzUu9M= github.com/apparentlymart/go-cidr v1.0.1/go.mod h1:EBcsNrHc3zQeuaeCeCtQruQm+n9/YjEn/vI25Lg7Gwc= @@ -54,6 +56,7 @@ github.com/armon/circbuf v0.0.0-20190214190532-5111143e8da2/go.mod h1:3U/XgcO3hC github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY= github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= +github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= github.com/aws/aws-sdk-go v1.15.78/go.mod h1:E3/ieXAlvM0XWO57iftYVDLLvQ824smPP3ATZkfNZeM= github.com/aws/aws-sdk-go v1.25.3/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.30.12/go.mod h1:5zCpMtNQVjRREroY7sYe8lOMRSxkhG6MZveU8YkpAk0= @@ -77,6 +80,7 @@ github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3Ee github.com/coreos/go-systemd v0.0.0-20181012123002-c6f51f82210d/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= +github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= @@ -85,13 +89,17 @@ github.com/dimchansky/utfbom v1.1.0/go.mod h1:rO41eb7gLfo8SF1jd9F8HplJm1Fewwi4mQ github.com/dnaeon/go-vcr v0.0.0-20180920040454-5637cf3d8a31/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E= github.com/dylanmei/iso8601 v0.1.0/go.mod h1:w9KhXSgIyROl1DefbMYIE7UVSIvELTbMrCfx+QkYnoQ= github.com/dylanmei/winrmtest v0.0.0-20190225150635-99b7fe2fddf1/go.mod h1:lcy9/2gH1jn/VCLouHA6tOEwLoNVd4GW6zhuKLmHC2Y= +github.com/emirpasic/gods v1.12.0 h1:QAUIPSaCu4G+POclxeqb3F+WPpdKqFGlw36+yOzGlrg= +github.com/emirpasic/gods v1.12.0/go.mod h1:YfzfFFoVP/catgzJb4IKIqXjX78Ha8FMSDh3ymbK86o= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4= +github.com/flynn/go-shlex v0.0.0-20150515145356-3f9db97f8568/go.mod h1:xEzjJPgXI435gkrCt3MPfRiAkVrwSbHsst4LCFVfpJc= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/ghodss/yaml v0.0.0-20180820084758-c7ce16629ff4/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk= github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04= +github.com/gliderlabs/ssh v0.2.2/go.mod h1:U7qILu1NlMHj9FlMhZLlkCdDnU1DBEAqr0aevW3Awn0= github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as= github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE= github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg= @@ -192,6 +200,9 @@ github.com/hashicorp/vault v0.10.4/go.mod h1:KfSyffbKxoVyspOdlaGVjIuwLobi07qD1bA github.com/hashicorp/yamux v0.0.0-20180604194846-3520598351bb/go.mod h1:+NfK9FKeTrX5uv1uIXGdwYDTeHna2qgaIlx54MXqjAM= github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NHg9XEKhtSvM= github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= +github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jhump/protoreflect v1.6.0/go.mod h1:eaTn3RZAmMBcV0fifFvlm6VHNz3wSkYyXYWUh7ymB74= github.com/jmespath/go-jmespath v0.0.0-20160202185014-0b12d6b521d8/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= @@ -203,6 +214,8 @@ github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1 github.com/jtolds/gls v4.2.1+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/kardianos/osext v0.0.0-20190222173326-2bc1f35cddc0/go.mod h1:1NbS8ALrpOvjt0rHPNLyCIeMtbizbir8U//inJ+zuB8= +github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd h1:Coekwdh0v2wtGp9Gmz1Ze3eVRAWJMLokvN3QjdzCHLY= +github.com/kevinburke/ssh_config v0.0.0-20190725054713-01f96b0aa0cd/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/keybase/go-crypto v0.0.0-20161004153544-93f5b35093ba/go.mod h1:ghbZscTyKdM07+Fw3KSi0hcJm+AlEUWj8QLlPtijN/M= github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= @@ -214,6 +227,7 @@ github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFB github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw= github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= @@ -246,6 +260,7 @@ github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceT github.com/mitchellh/colorstring v0.0.0-20190213212951-d06e56a500db/go.mod h1:l0dey0ia/Uv7NcFFVbCLtqEBQbrT4OCwCSKTEv6enCw= github.com/mitchellh/copystructure v1.0.0/go.mod h1:SNtv71yrdKgLRyLFxmLdkAbkKEFWgYaq1OVrnRcwhnw= github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= +github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-linereader v0.0.0-20190213213312-1b945b3263eb/go.mod h1:OaY7UOoTkkrX3wRwjpYRKafIkkyeD0UtweSHAWWiqQM= github.com/mitchellh/go-testing-interface v0.0.0-20171004221916-a61a99592b77/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI= @@ -270,6 +285,7 @@ github.com/open-policy-agent/opa v0.22.0 h1:KZvn0uMQIorBIwYk8Vc89dp8No9FIEF8eFl0 github.com/open-policy-agent/opa v0.22.0/go.mod h1:rrwxoT/b011T0cyj+gg2VvxqTtn6N3gp/jzmr3fjW44= github.com/packer-community/winrmcp v0.0.0-20180102160824-81144009af58/go.mod h1:f6Izs6JvFTdnRbziASagjZ2vmf55NSIkC/weStxCHqk= github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc= +github.com/pelletier/go-buffruneio v0.2.0/go.mod h1:JkE26KsDizTr40EUHkXVtNPvgGtbSNq5BcowyYOWdKo= github.com/pelletier/go-toml v1.8.0 h1:Keo9qb7iRJs2voHvunFtuuYFsbWeOBh8/P9v/kVMFtw= github.com/pelletier/go-toml v1.8.0/go.mod h1:D6yutnOGMveHEPV7VQOuvI/gXY61bv+9bAOTRnLElKs= github.com/peterh/liner v0.0.0-20170211195444-bf27d3ba8e1d/go.mod h1:xIteQHvHuaLYG9IFj6mSxM0fCKrs34IrEQUhOYuGPHc= @@ -315,8 +331,11 @@ github.com/spf13/cobra v0.0.0-20181021141114-fe5e611709b0/go.mod h1:1l0Ry5zgKvJa github.com/spf13/pflag v0.0.0-20181024212040-082b515c9490/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.2/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/src-d/gcfg v1.4.0 h1:xXbNR5AlLSA315x2UO+fTSSAXCDf+Ar38/6oyGbDKQ4= +github.com/src-d/gcfg v1.4.0/go.mod h1:p/UMsR43ujA89BJY9duynAwIpvqEujIH/jFlfL7jWoI= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= @@ -331,6 +350,7 @@ github.com/ugorji/go v0.0.0-20180813092308-00b869d2f4a5/go.mod h1:hnLbHMwcvSihnD github.com/ulikunitz/xz v0.5.5/go.mod h1:2bypXElzHzzJZwzH67Y6wb67pO62Rzfn7BSiF4ABRW8= github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= github.com/vmihailenco/msgpack v4.0.1+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= +github.com/xanzy/ssh-agent v0.2.1 h1:TCbipTQL2JiiCprBWx9frJ2eJlCYT00NmctrHxVAr70= github.com/xanzy/ssh-agent v0.2.1/go.mod h1:mLlQY/MoOhWBj+gOGMQkOeiEvkx+8pJSI+0Bx9h2kr4= github.com/xiang90/probing v0.0.0-20160813154853-07dd2e8dfe18/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= github.com/xlab/treeprint v0.0.0-20161029104018-1d6e34225557/go.mod h1:ce1O1j6UtZfjr22oyGxGLbauSBp2YVXpARAosm7dHBg= @@ -358,9 +378,11 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20190701094942-4def268fd1a4/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200510223506-06a226fb4e37/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9 h1:psW17arqaxU48Z5kZ0CQnkZWQJsqcURM6tKiBApRjXI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -391,6 +413,7 @@ golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190603091049-60506f45cf65/go.mod h1:HSz+uSET+XFnRR8LxR5pz3Of3rY3CfYBVs4xY44aLks= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20191009170851-d66e71096ffb/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200202094626-16171245cfb2/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200625001655-4c5254603344 h1:vGXIOMxbNfDTk/aXCmfdLgkrSV+Z2tcbze+pEc3v5W4= @@ -419,7 +442,9 @@ golang.org/x/sys v0.0.0-20190502175342-a43fa875dd82/go.mod h1:h1NjWce9XRLGQEsW7w golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190804053845-51ab0e2deafa/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd h1:xhmwyvizuTgC2qz7ZlMluP20uW+C3Rm0FD/WLDX8884= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -439,6 +464,7 @@ golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBn golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20190729092621-ff9f1409240a/go.mod h1:jcCCGcm9btYwXyDqrUWc6MKQKKGJCWEQ3AfLSRIbEuI= golang.org/x/tools v0.0.0-20190920225731-5eefd052ad72 h1:bw9doJza/SFBEweII/rHQh338oozWyiFsBRHtrflcws= golang.org/x/tools v0.0.0-20190920225731-5eefd052ad72/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= @@ -487,6 +513,13 @@ gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/ini.v1 v1.42.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo= +gopkg.in/src-d/go-billy.v4 v4.3.2 h1:0SQA1pRztfTFx2miS8sA97XvooFeNOmvUenF4o0EcVg= +gopkg.in/src-d/go-billy.v4 v4.3.2/go.mod h1:nDjArDMp+XMs1aFAESLRjfGSgfvoYN0hDfzEk0GjC98= +gopkg.in/src-d/go-git-fixtures.v3 v3.5.0/go.mod h1:dLBcvytrw/TYZsNTWCnkNF2DSIlzWYqTe3rJR56Ac7g= +gopkg.in/src-d/go-git.v4 v4.13.1 h1:SRtFyV8Kxc0UP7aCHcijOMQGPxHSmMOPrzulQWolkYE= +gopkg.in/src-d/go-git.v4 v4.13.1/go.mod h1:nx5NYcxdKxq5fpltdHnPa2Exj4Sx0EclMWZQbYDu2z8= +gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= +gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/pkg/initialize/run.go b/pkg/initialize/run.go new file mode 100644 index 000000000..31d0e5966 --- /dev/null +++ b/pkg/initialize/run.go @@ -0,0 +1,119 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package initialize + +import ( + "fmt" + "os" + + "go.uber.org/zap" + "gopkg.in/src-d/go-git.v4" + "gopkg.in/src-d/go-git.v4/config" + "gopkg.in/src-d/go-git.v4/plumbing" +) + +const ( + repoURL = "/~https://github.com/accurics/terrascan.git" + branch = "terrascan-v1.0" +) + +var ( + basePath = os.Getenv("HOME") + "/.terrascan" + basePolicyPath = basePath + "/pkg/policies/opa/rego" + + // AWSDefaultPolicyPath default policy path for aws + AWSDefaultPolicyPath = basePolicyPath + "/aws" + + // AzureDefaultPolicyPath default policy path for azure + AzureDefaultPolicyPath = basePolicyPath + "/azure" +) + +var policyPathMap = make(map[string]string) + +func init() { + policyPathMap["aws"] = AWSDefaultPolicyPath + policyPathMap["azure"] = AzureDefaultPolicyPath +} + +// IsCloudSupported checks if cloud provider is present in policyPathMap +func IsCloudSupported(cloud string) bool { + _, supported := policyPathMap[cloud] + return supported +} + +// GetPolicyPath returns the policy path for a cloud provider +func GetPolicyPath(cloud string) string { + return policyPathMap[cloud] +} + +// Run initializes terrascan if not done already +func Run() error { + + // check if policy paths exist + if path, err := os.Stat(basePolicyPath); err == nil && path.IsDir() { + return nil + } + + // download policies + os.RemoveAll(basePath) + if err := DownloadPolicies(); err != nil { + return err + } + + return nil +} + +// DownloadPolicies clones the policies to a local folder +func DownloadPolicies() error { + + // clone the repo + r, err := git.PlainClone(basePath, false, &git.CloneOptions{ + URL: repoURL, + }) + if err != nil { + zap.S().Errorf("failed to download policies. error: '%v'", err) + return err + } + + // create working tree + w, err := r.Worktree() + if err != nil { + zap.S().Errorf("failed to create working tree. error: '%v'", err) + return err + } + + // fetch references + err = r.Fetch(&git.FetchOptions{ + RefSpecs: []config.RefSpec{"refs/*:refs/*", "HEAD:refs/heads/HEAD"}, + }) + if err != nil { + zap.S().Errorf("failed to fetch references from repo. error: '%v'", err) + return err + } + + // checkout policies branch + err = w.Checkout(&git.CheckoutOptions{ + Branch: plumbing.ReferenceName(fmt.Sprintf("refs/heads/%s", branch)), + Force: true, + }) + if err != nil { + zap.S().Errorf("failed to checkout branch '%v'. error: '%v'", branch, err) + return err + } + + return nil +} diff --git a/pkg/runtime/validate.go b/pkg/runtime/validate.go index 63de4837e..f94e884a9 100644 --- a/pkg/runtime/validate.go +++ b/pkg/runtime/validate.go @@ -20,17 +20,18 @@ import ( "fmt" "os" + IacProvider "github.com/accurics/terrascan/pkg/iac-providers" + "github.com/accurics/terrascan/pkg/initialize" "github.com/accurics/terrascan/pkg/utils" "go.uber.org/zap" - - IacProvider "github.com/accurics/terrascan/pkg/iac-providers" ) var ( - errEmptyIacPath = fmt.Errorf("empty iac path, either use '-f' or '-d' option") - errDirNotExists = fmt.Errorf("directory does not exist") - errFileNotExists = fmt.Errorf("file does not exist") - errIacNotSupported = fmt.Errorf("iac type or version not supported") + errEmptyIacPath = fmt.Errorf("empty iac path, either use '-f' or '-d' option") + errDirNotExists = fmt.Errorf("directory does not exist") + errFileNotExists = fmt.Errorf("file does not exist") + errIacNotSupported = fmt.Errorf("iac type or version not supported") + errCloudNotSupported = fmt.Errorf("cloud type not supported") ) // ValidateInputs validates the inputs to the executor object @@ -77,7 +78,14 @@ func (e *Executor) ValidateInputs() error { } zap.S().Debugf("iac type '%s', version '%s' is supported", e.iacType, e.iacVersion) - // check if policy type is supported + // check if cloud type is supported + if !initialize.IsCloudSupported(e.cloudType) { + zap.S().Errorf("cloud type '%s' not supported", e.cloudType) + return errCloudNotSupported + } + if e.policyPath == "" { + e.policyPath = initialize.GetPolicyPath(e.cloudType) + } // successful zap.S().Debug("input validation successful") From 0f84d9e5d67de66cad611a33f69927511648dd3b Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 14 Aug 2020 12:23:21 +0530 Subject: [PATCH 182/188] refactor terrascan init code path --- pkg/config/global.go | 60 +++++++++++++++++++++++++++++++++++ pkg/config/types.go | 36 +++++++++++++++++++++ pkg/initialize/run.go | 40 ++++------------------- pkg/policy/aws.go | 26 +++++++++++++++ pkg/policy/azure.go | 26 +++++++++++++++ pkg/policy/cloud-providers.go | 48 ++++++++++++++++++++++++++++ pkg/runtime/validate.go | 6 ++-- 7 files changed, 206 insertions(+), 36 deletions(-) create mode 100644 pkg/config/global.go create mode 100644 pkg/config/types.go create mode 100644 pkg/policy/aws.go create mode 100644 pkg/policy/azure.go create mode 100644 pkg/policy/cloud-providers.go diff --git a/pkg/config/global.go b/pkg/config/global.go new file mode 100644 index 000000000..1fcb9a1ed --- /dev/null +++ b/pkg/config/global.go @@ -0,0 +1,60 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package config + +import ( + "os" +) + +const ( + policyRepoURL = "/~https://github.com/accurics/terrascan.git" + policyBranch = "terrascan-v1.0" +) + +var ( + policyRepoPath = os.Getenv("HOME") + "/.terrascan" + policyBasePath = policyRepoPath + "/pkg/policies/opa/rego" +) + +func init() { + Global.Policy = PolicyConfig{ + BasePath: policyBasePath, + RepoPath: policyRepoPath, + RepoURL: policyRepoURL, + Branch: policyBranch, + } +} + +// GetPolicyBasePath returns policy base path as set in global config +func GetPolicyBasePath() string { + return Global.Policy.BasePath +} + +// GetPolicyRepoPath return path to the policies repo locally downloaded +func GetPolicyRepoPath() string { + return Global.Policy.RepoPath +} + +// GetPolicyRepoURL returns policy repo url +func GetPolicyRepoURL() string { + return Global.Policy.RepoURL +} + +// GetPolicyBranch returns policy repo url +func GetPolicyBranch() string { + return Global.Policy.Branch +} diff --git a/pkg/config/types.go b/pkg/config/types.go new file mode 100644 index 000000000..3e40ff1ac --- /dev/null +++ b/pkg/config/types.go @@ -0,0 +1,36 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package config + +// Global initalizes GlobalConfig struct +var Global *GlobalConfig = &GlobalConfig{} + +// GlobalConfig struct defines global variables/configurations across terrascan +type GlobalConfig struct { + Policy PolicyConfig +} + +// PolicyConfig struct define policy specific configurations +type PolicyConfig struct { + // policy local path + BasePath string + RepoPath string + + // policy git url and branch + RepoURL string + Branch string +} diff --git a/pkg/initialize/run.go b/pkg/initialize/run.go index 31d0e5966..08027fa34 100644 --- a/pkg/initialize/run.go +++ b/pkg/initialize/run.go @@ -20,46 +20,20 @@ import ( "fmt" "os" + "github.com/accurics/terrascan/pkg/config" "go.uber.org/zap" "gopkg.in/src-d/go-git.v4" - "gopkg.in/src-d/go-git.v4/config" + gitConfig "gopkg.in/src-d/go-git.v4/config" "gopkg.in/src-d/go-git.v4/plumbing" ) -const ( - repoURL = "/~https://github.com/accurics/terrascan.git" - branch = "terrascan-v1.0" -) - var ( - basePath = os.Getenv("HOME") + "/.terrascan" - basePolicyPath = basePath + "/pkg/policies/opa/rego" - - // AWSDefaultPolicyPath default policy path for aws - AWSDefaultPolicyPath = basePolicyPath + "/aws" - - // AzureDefaultPolicyPath default policy path for azure - AzureDefaultPolicyPath = basePolicyPath + "/azure" + basePath = config.GetPolicyRepoPath() + basePolicyPath = config.GetPolicyBasePath() + repoURL = config.GetPolicyRepoURL() + branch = config.GetPolicyBranch() ) -var policyPathMap = make(map[string]string) - -func init() { - policyPathMap["aws"] = AWSDefaultPolicyPath - policyPathMap["azure"] = AzureDefaultPolicyPath -} - -// IsCloudSupported checks if cloud provider is present in policyPathMap -func IsCloudSupported(cloud string) bool { - _, supported := policyPathMap[cloud] - return supported -} - -// GetPolicyPath returns the policy path for a cloud provider -func GetPolicyPath(cloud string) string { - return policyPathMap[cloud] -} - // Run initializes terrascan if not done already func Run() error { @@ -98,7 +72,7 @@ func DownloadPolicies() error { // fetch references err = r.Fetch(&git.FetchOptions{ - RefSpecs: []config.RefSpec{"refs/*:refs/*", "HEAD:refs/heads/HEAD"}, + RefSpecs: []gitConfig.RefSpec{"refs/*:refs/*", "HEAD:refs/heads/HEAD"}, }) if err != nil { zap.S().Errorf("failed to fetch references from repo. error: '%v'", err) diff --git a/pkg/policy/aws.go b/pkg/policy/aws.go new file mode 100644 index 000000000..7c52dff75 --- /dev/null +++ b/pkg/policy/aws.go @@ -0,0 +1,26 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package policy + +const ( + aws supportedCloudType = "aws" +) + +func init() { + // Register aws as a cloud provider with terrascan + RegisterCloudProvider(aws) +} diff --git a/pkg/policy/azure.go b/pkg/policy/azure.go new file mode 100644 index 000000000..4d0f0fb64 --- /dev/null +++ b/pkg/policy/azure.go @@ -0,0 +1,26 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package policy + +const ( + azure supportedCloudType = "azure" +) + +func init() { + // Register azure as a cloud provider with terrascan + RegisterCloudProvider(azure) +} diff --git a/pkg/policy/cloud-providers.go b/pkg/policy/cloud-providers.go new file mode 100644 index 000000000..2d6f370ea --- /dev/null +++ b/pkg/policy/cloud-providers.go @@ -0,0 +1,48 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package policy + +import ( + "github.com/accurics/terrascan/pkg/config" +) + +// supportedCloudType data type for supported cloud types in terrascan +type supportedCloudType string + +// supportedCloudProvider map of supported cloud provider and its default policy path +var supportedCloudProvider = make(map[supportedCloudType]string) + +var ( + basePolicyPath = config.GetPolicyBasePath() +) + +// RegisterCloudProvider registers a cloud provider with terrascan +func RegisterCloudProvider(cloudType supportedCloudType) { + policyPath := basePolicyPath + "/" + string(cloudType) + supportedCloudProvider[cloudType] = policyPath +} + +// IsCloudProviderSupported returns whether a cloud provider is supported in terrascan +func IsCloudProviderSupported(cloudType string) bool { + _, supported := supportedCloudProvider[supportedCloudType(cloudType)] + return supported +} + +// GetDefaultPolicyPath returns the path to default policies for a given cloud provider +func GetDefaultPolicyPath(cloudType string) string { + return supportedCloudProvider[supportedCloudType(cloudType)] +} diff --git a/pkg/runtime/validate.go b/pkg/runtime/validate.go index f94e884a9..eb99b115c 100644 --- a/pkg/runtime/validate.go +++ b/pkg/runtime/validate.go @@ -21,7 +21,7 @@ import ( "os" IacProvider "github.com/accurics/terrascan/pkg/iac-providers" - "github.com/accurics/terrascan/pkg/initialize" + "github.com/accurics/terrascan/pkg/policy" "github.com/accurics/terrascan/pkg/utils" "go.uber.org/zap" ) @@ -79,12 +79,12 @@ func (e *Executor) ValidateInputs() error { zap.S().Debugf("iac type '%s', version '%s' is supported", e.iacType, e.iacVersion) // check if cloud type is supported - if !initialize.IsCloudSupported(e.cloudType) { + if !policy.IsCloudProviderSupported(e.cloudType) { zap.S().Errorf("cloud type '%s' not supported", e.cloudType) return errCloudNotSupported } if e.policyPath == "" { - e.policyPath = initialize.GetPolicyPath(e.cloudType) + e.policyPath = policy.GetDefaultPolicyPath(e.cloudType) } // successful From 43c3ccd22a8edf497b5ba13b6df4a3bd2a8f19d5 Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 14 Aug 2020 15:32:38 +0530 Subject: [PATCH 183/188] update usage message --- cmd/terrascan/main.go | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/cmd/terrascan/main.go b/cmd/terrascan/main.go index b7cf28d93..8bd97c2ed 100644 --- a/cmd/terrascan/main.go +++ b/cmd/terrascan/main.go @@ -35,13 +35,13 @@ func main() { // IaC flags iacType = flag.String("iac", "", "IaC provider (supported values: terraform)") - iacVersion = flag.String("iac-version", "default", "IaC version (supported values: 'v12' for terraform)") + iacVersion = flag.String("iac-version", "v12", "IaC version (supported values: 'v12' for terraform)") iacFilePath = flag.String("f", "", "IaC file path") iacDirPath = flag.String("d", ".", "IaC directory path") policyPath = flag.String("p", "", "Policy directory path") // cloud flags - cloudType = flag.String("cloud", "", "cloud provider (supported values: aws)") + cloudType = flag.String("cloud", "", "cloud provider (supported values: aws, azure)") // logging flags logLevel = flag.String("log-level", "info", "logging level (debug, info, warn, error, panic, fatal)") @@ -51,7 +51,7 @@ func main() { configFile = flag.String("config", "", "config file path") // output type - output = flag.String("output", "yaml", "output format (json, xml, yaml)") + output = flag.String("output", "yaml", "output format (json, yaml)") ) flag.Parse() From 92e5cd25c83ed24a4d0948608a4c321176f5403f Mon Sep 17 00:00:00 2001 From: Yusuf Kanchwala Date: Fri, 14 Aug 2020 15:48:26 +0530 Subject: [PATCH 184/188] add version information --- cmd/terrascan/main.go | 11 +++++++++++ pkg/version/version.go | 25 +++++++++++++++++++++++++ 2 files changed, 36 insertions(+) create mode 100644 pkg/version/version.go diff --git a/cmd/terrascan/main.go b/cmd/terrascan/main.go index 8bd97c2ed..09cbeb556 100644 --- a/cmd/terrascan/main.go +++ b/cmd/terrascan/main.go @@ -18,11 +18,13 @@ package main import ( "flag" + "fmt" "github.com/accurics/terrascan/pkg/cli" httpServer "github.com/accurics/terrascan/pkg/http-server" "github.com/accurics/terrascan/pkg/initialize" "github.com/accurics/terrascan/pkg/logging" + "github.com/accurics/terrascan/pkg/version" "go.uber.org/zap" ) @@ -52,6 +54,9 @@ func main() { // output type output = flag.String("output", "yaml", "output format (json, yaml)") + + //version + ver = flag.Bool("version", false, "terrascan version") ) flag.Parse() @@ -61,6 +66,12 @@ func main() { return } + // print version + if *ver { + fmt.Println(version.Get()) + return + } + // initialize logger logging.Init(*logType, *logLevel) diff --git a/pkg/version/version.go b/pkg/version/version.go new file mode 100644 index 000000000..8096b2540 --- /dev/null +++ b/pkg/version/version.go @@ -0,0 +1,25 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package version + +// Terrascan The Terrascan version +const Terrascan = "v1.0.0" + +// Get returns the terrascan version +func Get() string { + return Terrascan +} From f4beb3aa98540680d918a9483d262ef83e38b18c Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Fri, 14 Aug 2020 07:01:31 -0700 Subject: [PATCH 185/188] updated to the latest aws and azure policy set --- .../AWS.AMI.NS.Medium.1040.json | 12 +++ .../amiSharedToMultipleAccounts.rego | 15 ++++ .../AWS.API Gateway.Logging.Medium.0569.json | 10 +++ .../apiGatewaySettingMetrics.rego | 13 +++ .../AWS.APIGateway.Medium.0568.json | 10 +++ ...IGateway.Network Security.Medium.0570.json | 10 +++ .../apiGatewayContentEncoding.rego | 6 ++ .../apiGatewayEndpointConfig.rego | 8 ++ .../AWS.API Gateway.Logging.Medium.0567.json | 10 +++ .../AWS.API Gateway.Logging.Medium.0571.json | 10 +++ .../AWS.API Gateway.Logging.Medium.0572.json | 12 +++ ... Gateway.Network Security.Medium.0565.json | 10 +++ .../apiGatewayLogging.rego | 9 ++ .../aws_api_gateway_stage/apiGatewayName.rego | 12 +++ .../apiGatewaySslCertificate.rego | 6 ++ .../apiGatewayTracing.rego | 10 +++ .../aws_athena/AWS.Athena.Medium.0592.json | 10 +++ .../aws/aws_athena/athenaQueryEncryption.rego | 11 +++ .../AWS.CloudFormation.Medium.0599.json | 13 +++ .../AWS.CloudFormation.Medium.0601.json | 10 +++ .../AWS.CloudFormation.Medium.0603.json | 13 +++ .../AWS.CloudFormation.Medium.0604.json | 13 +++ .../AWS.CloudFormation.Medium.0605.json | 10 +++ .../awsCloudFormationInUse.rego | 8 ++ .../cloudFormationStackDrift.rego | 8 ++ .../cloudFormationStackNotifs.rego | 7 ++ .../cloudFormationStackPolicy.rego | 7 ++ .../cloudFormationTerminationProtection.rego | 7 ++ ....CloudFront.Network Security.Low.0568.json | 12 +++ .../cloudfrontNoGeoRestriction.rego | 10 +++ .../AWS.CloudWatch.Logging.Medium.0631.json | 10 +++ .../awsCloudWatchRetentionPreiod.rego | 10 +++ ....Encryption&KeyManagement.Medium.0660.json | 10 +++ .../aws/aws_config/awsConfigEncryptedVol.rego | 8 ++ .../AWS.Config.Logging.HIGH.0590.json | 12 +++ .../configEnabledForAllRegions.rego | 21 +++++ .../AWS.AWS RDS.NS.High.0101.json | 12 +++ .../aws_db_instance/AWS.RDS.DS.High.1041.json | 12 +++ .../aws_db_instance/AWS.RDS.DS.High.1042.json | 12 +++ .../AWS.RDS.DataSecurity.High.0414.json | 10 +++ .../AWS.RDS.DataSecurity.High.0577.json | 12 +++ .../rdsAutoMinorVersionUpgradeEnabled.rego | 9 ++ .../aws/aws_db_instance/rdsCAExpired.rego | 8 ++ .../rdsHasStorageEncrypted.rego | 21 +++++ .../aws_db_instance/rdsIamAuthEnabled.rego | 15 ++++ .../rdsPubliclyAccessible.rego | 9 ++ .../AWS.RDS.NetworkSecurity.High.0101.json | 12 +++ .../AWS.RDS.NetworkSecurity.High.0102.json | 12 +++ .../AWS.RDS.NetworkSecurity.High.0103.json | 12 +++ .../rdsHostsHigherThan256.rego | 12 +++ .../aws_db_security_group/rdsIsPublic.rego | 10 +++ .../rdsScopeIsPublic.rego | 19 +++++ .../AWS.EBS.DataSecurity.High.0580.json | 12 +++ .../ebsDefaultEncryption.rego | 9 ++ ....EncryptionandKeyManagement.High.0413.json | 13 +++ .../aws/aws_ebs_volume/ebsEncryption.rego | 22 +++++ ....Encryption&KeyManagement.Medium.0688.json | 10 +++ .../opa/rego/aws/aws_ec2/awsAmiEncrypted.rego | 8 ++ .../AWS.ECR.DataSecurity.High.0578.json | 12 +++ .../scanOnPushDisabled.rego | 16 ++++ .../AWS.ECR.DataSecurity.High.0579.json | 12 +++ .../ecrRepoIsPublic.rego | 40 +++++++++ .../aws_ecs_service/AWS.ECS.High.0436.json | 10 +++ .../aws/aws_ecs_service/ecsServiceAdmin.rego | 6 ++ ....EcsCluster.NetworkSecurity.High.0104.json | 12 +++ ...hConfiguration.DataSecurity.High.0101.json | 14 ++++ ...tainerDefinitionContainsSensitiveInfo.rego | 19 +++++ .../instanceNotInVpc.rego | 8 ++ ....EncryptionandKeyManagement.High.0409.json | 13 +++ ....EncryptionandKeyManagement.High.0410.json | 13 +++ .../efsEncryptedFalse.rego | 8 ++ .../efsEncryptedWithNoKms.rego | 9 ++ .../aws_efs_file_system/efsNotEncrypted.rego | 8 ++ ...WS.ElastiCache.DataSecurity.High.0424.json | 14 ++++ ...WS.ElastiCache.DataSecurity.High.0425.json | 19 +++++ ...stiCache.HighAvailability.Medium.0757.json | 10 +++ .../elastiCacheMultiAZ.rego | 7 ++ .../noMemcachedInElastiCache.rego | 8 ++ .../redisVersionCompliance.rego | 18 ++++ ....Encryption&KeyManagement.Medium.0768.json | 10 +++ ....Encryption&KeyManagement.Medium.0778.json | 10 +++ ...AWS.Elasticsearch.Logging.Medium.0573.json | 12 +++ .../elastiSearchEncryptAtRest.rego | 22 +++++ .../elastiSearchNoKms.rego | 15 ++++ .../esloggingdisabled.rego | 36 ++++++++ ...AWS.ELB.NetworkPortsSecurity.Low.0563.json | 12 +++ .../elbInstanceProtocolNotSecured.rego | 10 +++ .../aws/aws_elb/elbLbProtocolNotSecured.rego | 10 +++ ...uardDuty Enabled.Security.Medium.0575.json | 12 +++ .../gaurdDutyDisabled.rego | 8 ++ .../AWS.IamUser.IAM.High.0391.json | 12 +++ .../programmaticAccessCreation.rego | 21 +++++ .../AWS.Iam.IAM.Low.0539.json | 15 ++++ .../passwordResuseNotAllowed.rego | 13 +++ .../AWS.IamPolicy.IAM.High.0392.json | 12 +++ .../iamRoleWithFullAdminCntrl.rego | 51 ++++++++++++ .../AWS.IamPolicy.IAM.High.0392.json | 12 +++ .../iamRolePolicyWithFullAdminCntrl.rego | 50 +++++++++++ .../AWS.Iam.IAM.High.0391.json | 12 +++ .../noPasswordPolicyEnabled.rego | 43 ++++++++++ .../AWS.IamPolicy.IAM.High.0392.json | 12 +++ .../AWS.IamUser.IAM.High.0389.json | 12 +++ .../iamPolicyWithFullAdminControl.rego | 51 ++++++++++++ .../aws/aws_iam_user_policy/passAndMFA.rego | 19 +++++ ....EncryptionandKeyManagement.High.0411.json | 12 +++ .../kinesis_sse_disabled.rego | 9 ++ .../kinesis_sse_not_configured.rego | 9 ++ .../AWS.KMS.Logging.High.0400.json | 4 +- .../AWS.KMS.NetworkSecurity.High.0566.json | 12 +++ .../rego/aws/aws_kms_key/kmsKeyDisabled.rego | 8 ++ .../aws/aws_kms_key/kmsKeyExposedPolicy.rego | 50 +++++++++++ .../aws_kms_key/kmsKeyNoDeletionWindow.rego | 19 +++++ .../AWS.ElasticSearch.IAM.Medium.0878.json | 10 +++ .../rego/aws/aws_lambda/awsLambdaRole.rego | 2 + .../AWS.VPC.Logging.Medium.0470.json | 12 +++ .../lambdaXRayTracingDisabled.rego | 19 +++++ ....EncryptionandKeyManagement.High.0413.json | 12 +++ ...hConfiguration.DataSecurity.High.0101.json | 12 +++ .../hardCodedShellScript.rego | 23 +++++ .../hardCodedUrl.rego | 23 +++++ ...aunchConfigurationEBSBlockUnEncrypted.rego | 24 ++++++ ...unchConfigurationRootBlockUnEncrypted.rego | 24 ++++++ ....EncryptionandKeyManagement.High.0401.json | 16 ++++ ....EncryptionandKeyManagement.High.0403.json | 83 +++++++++++++++++++ .../elbSsLTsLProtocol.rego | 16 ++++ .../elbWeakCipher.rego | 15 ++++ .../AWS.Macie.Security.Medium.0576.json | 12 +++ .../macieIsNotAssociated.rego | 16 ++++ ...AWS.ElasticSearch.Logging.Medium.0885.json | 10 +++ ...ticSearch.NetworkSecurity.Medium.0887.json | 10 +++ .../rego/aws/aws_mq/awsMqLoggingEnabled.rego | 21 +++++ .../aws/aws_mq/awsMqPubliclyAccessible.rego | 7 ++ .../AWS.Organizations.IAM.MEDIUM.0590.json | 12 +++ .../scpFullAccess.rego | 67 +++++++++++++++ ....EncryptionandKeyManagement.High.0414.json | 12 +++ .../aws_rds_cluster/storageNotEncrypted.rego | 25 ++++++ ....EncryptionandKeyManagement.High.0415.json | 12 +++ .../AWS.Redshift.Logging.Medium.0565.json | 12 +++ ...WS.Redshift.NetworkSecurity.HIGH.0564.json | 12 +++ .../redshiftAuditLogs.rego | 17 ++++ .../redshiftEncryptedFalse.rego | 15 ++++ .../redshiftEncryptedWithNoKms.rego | 9 ++ .../redshiftPublicAccess.rego | 15 ++++ ...oute53 query logs.Logging.Medium.0574.json | 12 +++ .../route53LoggingDisabled.rego | 12 +++ ...e53HostedZone.DNSManagement.High.0422.json | 12 +++ .../noRoute53RecordSet.rego | 17 ++++ .../AWS.S3Bucket.DS.High.1043.json | 12 +++ ....EncryptionandKeyManagement.High.0405.json | 12 +++ .../AWS.S3Bucket.IAM.High.0370.json | 12 +++ .../AWS.S3Bucket.IAM.High.0377.json | 14 ++++ .../AWS.S3Bucket.IAM.High.0378.json | 14 ++++ .../AWS.S3Bucket.IAM.High.0379.json | 14 ++++ .../AWS.S3Bucket.IAM.High.0381.json | 14 ++++ ...WS.S3Bucket.NetworkSecurity.High.0417.json | 12 +++ .../aws/aws_s3_bucket/noS3BucketSseRules.rego | 9 ++ .../rego/aws/aws_s3_bucket/s3AclGrants.rego | 8 ++ .../s3BucketNoWebsiteIndexDoc.rego | 8 ++ .../s3BucketSseRulesWithKmsNull.rego | 30 +++++++ .../aws/aws_s3_bucket/s3EnforceUserACL.rego | 16 ++++ .../rego/aws/aws_s3_bucket/s3Versioning.rego | 17 ++++ .../aws_s3_bucket/s3VersioningMfaFalse.rego | 10 +++ .../AWS.IamPolicy.IAM.High.0373.json | 14 ++++ .../AWS.IamPolicy.IAM.High.0375.json | 14 ++++ .../AWS.IamPolicy.IAM.High.0376.json | 14 ++++ .../aws_sns_topic/AWS.SNS.NS.Medium.1044.json | 12 +++ .../aws/aws_sns_topic/snsPublicAccess.rego | 38 +++++++++ .../AWS.SQS.NetworkSecurity.High.0569.json | 12 +++ .../AWS.SQS.NetworkSecurity.High.0570.json | 12 +++ .../aws/aws_sqs_queue/sqsQueueExposed.rego | 37 +++++++++ .../aws/aws_sqs_queue/sqsSseDisabled.rego | 15 ++++ .../accurics.azure.NS.147.json | 12 +++ .../appGatewayWAFEnabled.rego | 30 +++++++ .../accurics.azure.AKS.3.json | 12 +++ .../accurics.azure.EKM.164.json | 12 +++ .../containerRegistryAdminEnabled.rego | 8 ++ .../containerRegistryResourceLock.rego | 27 ++++++ .../accurics.azure.EKM.164.json | 12 +++ .../accurics.azure.EKM.20.json | 12 +++ .../keyVaultAuditLoggingEnabled.rego | 23 +++++ .../keyVaultSoftDeleteEnabled.rego | 9 ++ .../accurics.azure.EKM.25.json | 12 +++ .../checkKeyExpirationIsSet.rego | 27 ++++++ .../accurics.azure.EKM.26.json | 12 +++ .../checkSecretExpirationIsSet.rego | 27 ++++++ .../KubeDashboardDisabled.rego | 8 ++ .../accurics.azure.NS.382.json | 10 +++ .../accurics.azure.NS.383.json | 10 +++ .../networkPolicyEnabled.rego | 8 ++ .../accurics.azure.NS.387.json | 12 +++ .../networkWatcherExist.rego | 15 ++++ .../accurics.azure.NS.11.json | 12 +++ .../accurics.azure.NS.342.json | 12 +++ .../networkWatcherCheck.rego | 12 +++ .../accurics.azure.LOG.151.json | 12 --- .../accurics.azure.LOG.152.json | 12 --- .../accurics.azure.LOG.155.json | 12 --- .../connectionThrottling.rego | 10 --- .../logConnections.rego | 10 --- .../logRetention.rego | 14 ---- .../accurics.azure.NS.13.json | 12 +++ .../accurics.azure.NS.166.json | 12 +++ .../azurerm_redis_cache/allowLessHosts.rego | 17 ++++ .../redisCacheNoUpdatePatchSchedule.rego | 17 ++++ .../accurics.azure.NS.272.json | 12 +++ .../resourceGroupLock.rego | 27 ++++++ .../accurics.azure.IAM.388.json | 12 +++ .../checkGuestUser.rego | 6 ++ .../accurics.azure.OPS.349.json | 12 +++ .../securityCenterPrincingTier.rego | 6 ++ .../accurics.azure.IAM.137.json | 12 +++ .../sqlServerADPredictableAccount.rego | 12 +++ .../accurics.azure.NS.169.json | 12 +++ .../moreHostsAllowed.rego | 17 ++++ .../accurics.azure.IAM.10.json | 12 +++ .../accurics.azure.IAM.138.json | 12 +++ .../accurics.azure.LOG.356.json | 10 +++ .../accurics.azure.MON.354.json | 10 +++ .../sqlAuditingRetention.rego | 7 ++ .../sqlServerADAdminConfigured.rego | 21 +++++ .../sqlServerAuditingEnabled.rego | 6 ++ .../sqlServerPredictableAccount.rego | 47 +++++++++++ .../accurics.azure.EKM.7.json | 12 +++ .../accurics.azure.NS.2.json | 12 +++ .../accurics.azure.NS.4.json | 12 +++ ...storageAccountCheckNetworkDefaultRule.rego | 10 +++ .../storageAccountEnableHttps.rego | 8 ++ .../storageAccountOpenToPublic.rego | 27 ++++++ ...ccountTrustedMicrosoftServicesEnabled.rego | 17 ++++ .../accurics.azure.IAM.368.json | 12 +++ .../checkStorageContainerAccess.rego | 6 ++ .../accurics.azure.NS.18.json | 12 +++ .../vmAttachedToNetwork.rego | 7 ++ .../accurics.azure.NS.161.json | 12 +++ .../noSecurityGroupAssociated.rego | 30 +++++++ 235 files changed, 3353 insertions(+), 72 deletions(-) create mode 100755 pkg/policies/opa/rego/aws/aws_ami_launch_permission/AWS.AMI.NS.Medium.1040.json create mode 100755 pkg/policies/opa/rego/aws/aws_ami_launch_permission/amiSharedToMultipleAccounts.rego create mode 100755 pkg/policies/opa/rego/aws/aws_api_gateway_method_settings/AWS.API Gateway.Logging.Medium.0569.json create mode 100755 pkg/policies/opa/rego/aws/aws_api_gateway_method_settings/apiGatewaySettingMetrics.rego create mode 100755 pkg/policies/opa/rego/aws/aws_api_gateway_rest_api/AWS.APIGateway.Medium.0568.json create mode 100755 pkg/policies/opa/rego/aws/aws_api_gateway_rest_api/AWS.APIGateway.Network Security.Medium.0570.json create mode 100755 pkg/policies/opa/rego/aws/aws_api_gateway_rest_api/apiGatewayContentEncoding.rego create mode 100755 pkg/policies/opa/rego/aws/aws_api_gateway_rest_api/apiGatewayEndpointConfig.rego create mode 100755 pkg/policies/opa/rego/aws/aws_api_gateway_stage/AWS.API Gateway.Logging.Medium.0567.json create mode 100755 pkg/policies/opa/rego/aws/aws_api_gateway_stage/AWS.API Gateway.Logging.Medium.0571.json create mode 100755 pkg/policies/opa/rego/aws/aws_api_gateway_stage/AWS.API Gateway.Logging.Medium.0572.json create mode 100755 pkg/policies/opa/rego/aws/aws_api_gateway_stage/AWS.API Gateway.Network Security.Medium.0565.json create mode 100755 pkg/policies/opa/rego/aws/aws_api_gateway_stage/apiGatewayLogging.rego create mode 100755 pkg/policies/opa/rego/aws/aws_api_gateway_stage/apiGatewayName.rego create mode 100755 pkg/policies/opa/rego/aws/aws_api_gateway_stage/apiGatewaySslCertificate.rego create mode 100755 pkg/policies/opa/rego/aws/aws_api_gateway_stage/apiGatewayTracing.rego create mode 100755 pkg/policies/opa/rego/aws/aws_athena/AWS.Athena.Medium.0592.json create mode 100755 pkg/policies/opa/rego/aws/aws_athena/athenaQueryEncryption.rego create mode 100755 pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0599.json create mode 100755 pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0601.json create mode 100755 pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0603.json create mode 100755 pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0604.json create mode 100755 pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0605.json create mode 100755 pkg/policies/opa/rego/aws/aws_cloudformation_stack/awsCloudFormationInUse.rego create mode 100755 pkg/policies/opa/rego/aws/aws_cloudformation_stack/cloudFormationStackDrift.rego create mode 100755 pkg/policies/opa/rego/aws/aws_cloudformation_stack/cloudFormationStackNotifs.rego create mode 100755 pkg/policies/opa/rego/aws/aws_cloudformation_stack/cloudFormationStackPolicy.rego create mode 100755 pkg/policies/opa/rego/aws/aws_cloudformation_stack/cloudFormationTerminationProtection.rego create mode 100755 pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Network Security.Low.0568.json create mode 100755 pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoGeoRestriction.rego create mode 100755 pkg/policies/opa/rego/aws/aws_cloudwatch/AWS.CloudWatch.Logging.Medium.0631.json create mode 100755 pkg/policies/opa/rego/aws/aws_cloudwatch/awsCloudWatchRetentionPreiod.rego create mode 100755 pkg/policies/opa/rego/aws/aws_config/AWS.Config.Encryption&KeyManagement.Medium.0660.json create mode 100755 pkg/policies/opa/rego/aws/aws_config/awsConfigEncryptedVol.rego create mode 100755 pkg/policies/opa/rego/aws/aws_config_configuration_aggregator/AWS.Config.Logging.HIGH.0590.json create mode 100755 pkg/policies/opa/rego/aws/aws_config_configuration_aggregator/configEnabledForAllRegions.rego create mode 100755 pkg/policies/opa/rego/aws/aws_db_instance/AWS.AWS RDS.NS.High.0101.json create mode 100755 pkg/policies/opa/rego/aws/aws_db_instance/AWS.RDS.DS.High.1041.json create mode 100755 pkg/policies/opa/rego/aws/aws_db_instance/AWS.RDS.DS.High.1042.json create mode 100755 pkg/policies/opa/rego/aws/aws_db_instance/AWS.RDS.DataSecurity.High.0414.json create mode 100755 pkg/policies/opa/rego/aws/aws_db_instance/AWS.RDS.DataSecurity.High.0577.json create mode 100755 pkg/policies/opa/rego/aws/aws_db_instance/rdsAutoMinorVersionUpgradeEnabled.rego create mode 100755 pkg/policies/opa/rego/aws/aws_db_instance/rdsCAExpired.rego create mode 100755 pkg/policies/opa/rego/aws/aws_db_instance/rdsHasStorageEncrypted.rego create mode 100755 pkg/policies/opa/rego/aws/aws_db_instance/rdsIamAuthEnabled.rego create mode 100755 pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego create mode 100755 pkg/policies/opa/rego/aws/aws_db_security_group/AWS.RDS.NetworkSecurity.High.0101.json create mode 100755 pkg/policies/opa/rego/aws/aws_db_security_group/AWS.RDS.NetworkSecurity.High.0102.json create mode 100755 pkg/policies/opa/rego/aws/aws_db_security_group/AWS.RDS.NetworkSecurity.High.0103.json create mode 100755 pkg/policies/opa/rego/aws/aws_db_security_group/rdsHostsHigherThan256.rego create mode 100755 pkg/policies/opa/rego/aws/aws_db_security_group/rdsIsPublic.rego create mode 100755 pkg/policies/opa/rego/aws/aws_db_security_group/rdsScopeIsPublic.rego create mode 100755 pkg/policies/opa/rego/aws/aws_ebs_encryption_by_default/AWS.EBS.DataSecurity.High.0580.json create mode 100755 pkg/policies/opa/rego/aws/aws_ebs_encryption_by_default/ebsDefaultEncryption.rego create mode 100755 pkg/policies/opa/rego/aws/aws_ebs_volume/AWS.EcsCluster.EncryptionandKeyManagement.High.0413.json create mode 100755 pkg/policies/opa/rego/aws/aws_ebs_volume/ebsEncryption.rego create mode 100755 pkg/policies/opa/rego/aws/aws_ec2/AWS.EC2.Encryption&KeyManagement.Medium.0688.json create mode 100755 pkg/policies/opa/rego/aws/aws_ec2/awsAmiEncrypted.rego create mode 100755 pkg/policies/opa/rego/aws/aws_ecr_repository/AWS.ECR.DataSecurity.High.0578.json create mode 100755 pkg/policies/opa/rego/aws/aws_ecr_repository/scanOnPushDisabled.rego create mode 100755 pkg/policies/opa/rego/aws/aws_ecr_repository_policy/AWS.ECR.DataSecurity.High.0579.json create mode 100755 pkg/policies/opa/rego/aws/aws_ecr_repository_policy/ecrRepoIsPublic.rego create mode 100755 pkg/policies/opa/rego/aws/aws_ecs_service/AWS.ECS.High.0436.json create mode 100755 pkg/policies/opa/rego/aws/aws_ecs_service/ecsServiceAdmin.rego create mode 100755 pkg/policies/opa/rego/aws/aws_ecs_task_definition/AWS.EcsCluster.NetworkSecurity.High.0104.json create mode 100755 pkg/policies/opa/rego/aws/aws_ecs_task_definition/AWS.LaunchConfiguration.DataSecurity.High.0101.json create mode 100755 pkg/policies/opa/rego/aws/aws_ecs_task_definition/containerDefinitionContainsSensitiveInfo.rego create mode 100755 pkg/policies/opa/rego/aws/aws_ecs_task_definition/instanceNotInVpc.rego create mode 100755 pkg/policies/opa/rego/aws/aws_efs_file_system/AWS.EFS.EncryptionandKeyManagement.High.0409.json create mode 100755 pkg/policies/opa/rego/aws/aws_efs_file_system/AWS.EFS.EncryptionandKeyManagement.High.0410.json create mode 100755 pkg/policies/opa/rego/aws/aws_efs_file_system/efsEncryptedFalse.rego create mode 100755 pkg/policies/opa/rego/aws/aws_efs_file_system/efsEncryptedWithNoKms.rego create mode 100755 pkg/policies/opa/rego/aws/aws_efs_file_system/efsNotEncrypted.rego create mode 100755 pkg/policies/opa/rego/aws/aws_elasticache_cluster/AWS.ElastiCache.DataSecurity.High.0424.json create mode 100755 pkg/policies/opa/rego/aws/aws_elasticache_cluster/AWS.ElastiCache.DataSecurity.High.0425.json create mode 100755 pkg/policies/opa/rego/aws/aws_elasticache_cluster/AWS.ElastiCache.HighAvailability.Medium.0757.json create mode 100755 pkg/policies/opa/rego/aws/aws_elasticache_cluster/elastiCacheMultiAZ.rego create mode 100755 pkg/policies/opa/rego/aws/aws_elasticache_cluster/noMemcachedInElastiCache.rego create mode 100755 pkg/policies/opa/rego/aws/aws_elasticache_cluster/redisVersionCompliance.rego create mode 100755 pkg/policies/opa/rego/aws/aws_elasticsearch_domain/AWS.ElasticSearch.Encryption&KeyManagement.Medium.0768.json create mode 100755 pkg/policies/opa/rego/aws/aws_elasticsearch_domain/AWS.ElasticSearch.Encryption&KeyManagement.Medium.0778.json create mode 100755 pkg/policies/opa/rego/aws/aws_elasticsearch_domain/AWS.Elasticsearch.Logging.Medium.0573.json create mode 100755 pkg/policies/opa/rego/aws/aws_elasticsearch_domain/elastiSearchEncryptAtRest.rego create mode 100755 pkg/policies/opa/rego/aws/aws_elasticsearch_domain/elastiSearchNoKms.rego create mode 100755 pkg/policies/opa/rego/aws/aws_elasticsearch_domain/esloggingdisabled.rego create mode 100755 pkg/policies/opa/rego/aws/aws_elb/AWS.ELB.NetworkPortsSecurity.Low.0563.json create mode 100755 pkg/policies/opa/rego/aws/aws_elb/elbInstanceProtocolNotSecured.rego create mode 100755 pkg/policies/opa/rego/aws/aws_elb/elbLbProtocolNotSecured.rego create mode 100755 pkg/policies/opa/rego/aws/aws_guardduty_detector/AWS.GuardDuty Enabled.Security.Medium.0575.json create mode 100755 pkg/policies/opa/rego/aws/aws_guardduty_detector/gaurdDutyDisabled.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0391.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_access_key/programmaticAccessCreation.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0539.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordResuseNotAllowed.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_role/AWS.IamPolicy.IAM.High.0392.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_role/iamRoleWithFullAdminCntrl.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_role_policy/AWS.IamPolicy.IAM.High.0392.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_role_policy/iamRolePolicyWithFullAdminCntrl.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_user_login_profile/AWS.Iam.IAM.High.0391.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_user_login_profile/noPasswordPolicyEnabled.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamPolicy.IAM.High.0392.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0389.json create mode 100755 pkg/policies/opa/rego/aws/aws_iam_user_policy/iamPolicyWithFullAdminControl.rego create mode 100755 pkg/policies/opa/rego/aws/aws_iam_user_policy/passAndMFA.rego create mode 100755 pkg/policies/opa/rego/aws/aws_kinesis_firehose_delivery_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0411.json create mode 100755 pkg/policies/opa/rego/aws/aws_kinesis_firehose_delivery_stream/kinesis_sse_disabled.rego create mode 100755 pkg/policies/opa/rego/aws/aws_kinesis_firehose_delivery_stream/kinesis_sse_not_configured.rego create mode 100755 pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.NetworkSecurity.High.0566.json create mode 100755 pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyDisabled.rego create mode 100755 pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyExposedPolicy.rego create mode 100755 pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyNoDeletionWindow.rego create mode 100755 pkg/policies/opa/rego/aws/aws_lambda/AWS.ElasticSearch.IAM.Medium.0878.json create mode 100755 pkg/policies/opa/rego/aws/aws_lambda/awsLambdaRole.rego create mode 100755 pkg/policies/opa/rego/aws/aws_lambda_function/AWS.VPC.Logging.Medium.0470.json create mode 100755 pkg/policies/opa/rego/aws/aws_lambda_function/lambdaXRayTracingDisabled.rego create mode 100755 pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.EcsCluster.EncryptionandKeyManagement.High.0413.json create mode 100755 pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0101.json create mode 100755 pkg/policies/opa/rego/aws/aws_launch_configuration/hardCodedShellScript.rego create mode 100755 pkg/policies/opa/rego/aws/aws_launch_configuration/hardCodedUrl.rego create mode 100755 pkg/policies/opa/rego/aws/aws_launch_configuration/launchConfigurationEBSBlockUnEncrypted.rego create mode 100755 pkg/policies/opa/rego/aws/aws_launch_configuration/launchConfigurationRootBlockUnEncrypted.rego create mode 100755 pkg/policies/opa/rego/aws/aws_load_balancer_policy/AWS.ELB.EncryptionandKeyManagement.High.0401.json create mode 100755 pkg/policies/opa/rego/aws/aws_load_balancer_policy/AWS.ELB.EncryptionandKeyManagement.High.0403.json create mode 100755 pkg/policies/opa/rego/aws/aws_load_balancer_policy/elbSsLTsLProtocol.rego create mode 100755 pkg/policies/opa/rego/aws/aws_load_balancer_policy/elbWeakCipher.rego create mode 100755 pkg/policies/opa/rego/aws/aws_macie_member_account_association/AWS.Macie.Security.Medium.0576.json create mode 100755 pkg/policies/opa/rego/aws/aws_macie_member_account_association/macieIsNotAssociated.rego create mode 100755 pkg/policies/opa/rego/aws/aws_mq/AWS.ElasticSearch.Logging.Medium.0885.json create mode 100755 pkg/policies/opa/rego/aws/aws_mq/AWS.ElasticSearch.NetworkSecurity.Medium.0887.json create mode 100755 pkg/policies/opa/rego/aws/aws_mq/awsMqLoggingEnabled.rego create mode 100755 pkg/policies/opa/rego/aws/aws_mq/awsMqPubliclyAccessible.rego create mode 100755 pkg/policies/opa/rego/aws/aws_organizations_policy/AWS.Organizations.IAM.MEDIUM.0590.json create mode 100755 pkg/policies/opa/rego/aws/aws_organizations_policy/scpFullAccess.rego create mode 100755 pkg/policies/opa/rego/aws/aws_rds_cluster/AWS.RDS.EncryptionandKeyManagement.High.0414.json create mode 100755 pkg/policies/opa/rego/aws/aws_rds_cluster/storageNotEncrypted.rego create mode 100755 pkg/policies/opa/rego/aws/aws_redshift_cluster/AWS.Redshift.EncryptionandKeyManagement.High.0415.json create mode 100755 pkg/policies/opa/rego/aws/aws_redshift_cluster/AWS.Redshift.Logging.Medium.0565.json create mode 100755 pkg/policies/opa/rego/aws/aws_redshift_cluster/AWS.Redshift.NetworkSecurity.HIGH.0564.json create mode 100755 pkg/policies/opa/rego/aws/aws_redshift_cluster/redshiftAuditLogs.rego create mode 100755 pkg/policies/opa/rego/aws/aws_redshift_cluster/redshiftEncryptedFalse.rego create mode 100755 pkg/policies/opa/rego/aws/aws_redshift_cluster/redshiftEncryptedWithNoKms.rego create mode 100755 pkg/policies/opa/rego/aws/aws_redshift_cluster/redshiftPublicAccess.rego create mode 100755 pkg/policies/opa/rego/aws/aws_route53_query_log/AWS.Route53 query logs.Logging.Medium.0574.json create mode 100755 pkg/policies/opa/rego/aws/aws_route53_query_log/route53LoggingDisabled.rego create mode 100755 pkg/policies/opa/rego/aws/aws_route53_record/AWS.Route53HostedZone.DNSManagement.High.0422.json create mode 100755 pkg/policies/opa/rego/aws/aws_route53_record/noRoute53RecordSet.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.DS.High.1043.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketSseRulesWithKmsNull.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/s3EnforceUserACL.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/s3Versioning.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0373.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0375.json create mode 100755 pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0376.json create mode 100755 pkg/policies/opa/rego/aws/aws_sns_topic/AWS.SNS.NS.Medium.1044.json create mode 100755 pkg/policies/opa/rego/aws/aws_sns_topic/snsPublicAccess.rego create mode 100755 pkg/policies/opa/rego/aws/aws_sqs_queue/AWS.SQS.NetworkSecurity.High.0569.json create mode 100755 pkg/policies/opa/rego/aws/aws_sqs_queue/AWS.SQS.NetworkSecurity.High.0570.json create mode 100755 pkg/policies/opa/rego/aws/aws_sqs_queue/sqsQueueExposed.rego create mode 100755 pkg/policies/opa/rego/aws/aws_sqs_queue/sqsSseDisabled.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_application_gateway/accurics.azure.NS.147.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_application_gateway/appGatewayWAFEnabled.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_container_registry/accurics.azure.AKS.3.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_container_registry/accurics.azure.EKM.164.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_container_registry/containerRegistryAdminEnabled.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_container_registry/containerRegistryResourceLock.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_key_vault/accurics.azure.EKM.164.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_key_vault/accurics.azure.EKM.20.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_key_vault/keyVaultAuditLoggingEnabled.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_key_vault/keyVaultSoftDeleteEnabled.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_key_vault_key/accurics.azure.EKM.25.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_key_vault_key/checkKeyExpirationIsSet.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_key_vault_secret/accurics.azure.EKM.26.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_key_vault_secret/checkSecretExpirationIsSet.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_kubernetes_cluster/KubeDashboardDisabled.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_kubernetes_cluster/accurics.azure.NS.382.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_kubernetes_cluster/accurics.azure.NS.383.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_kubernetes_cluster/networkPolicyEnabled.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_network_watcher/accurics.azure.NS.387.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_network_watcher/networkWatcherExist.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_network_watcher_flow_log/accurics.azure.NS.11.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_network_watcher_flow_log/accurics.azure.NS.342.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_network_watcher_flow_log/networkWatcherCheck.rego delete mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.151.json delete mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.152.json delete mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.155.json delete mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/connectionThrottling.rego delete mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logConnections.rego delete mode 100755 pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logRetention.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.13.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.166.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_redis_cache/allowLessHosts.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_redis_cache/redisCacheNoUpdatePatchSchedule.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_resource_group/accurics.azure.NS.272.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_resource_group/resourceGroupLock.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_role_assignment/accurics.azure.IAM.388.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_role_assignment/checkGuestUser.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_security_center_subscription_pricing/accurics.azure.OPS.349.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_security_center_subscription_pricing/securityCenterPrincingTier.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_active_directory_administrator/accurics.azure.IAM.137.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_active_directory_administrator/sqlServerADPredictableAccount.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.169.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/moreHostsAllowed.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_server/accurics.azure.IAM.10.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_server/accurics.azure.IAM.138.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_server/accurics.azure.LOG.356.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_server/accurics.azure.MON.354.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_server/sqlAuditingRetention.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerADAdminConfigured.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerAuditingEnabled.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerPredictableAccount.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_storage_account/accurics.azure.EKM.7.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_storage_account/accurics.azure.NS.2.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_storage_account/accurics.azure.NS.4.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_storage_account/storageAccountCheckNetworkDefaultRule.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_storage_account/storageAccountEnableHttps.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_storage_account/storageAccountOpenToPublic.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_storage_account/storageAccountTrustedMicrosoftServicesEnabled.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_storage_container/accurics.azure.IAM.368.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_storage_container/checkStorageContainerAccess.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_virtual_machine/accurics.azure.NS.18.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_virtual_machine/vmAttachedToNetwork.rego create mode 100755 pkg/policies/opa/rego/azure/azurerm_virtual_network/accurics.azure.NS.161.json create mode 100755 pkg/policies/opa/rego/azure/azurerm_virtual_network/noSecurityGroupAssociated.rego diff --git a/pkg/policies/opa/rego/aws/aws_ami_launch_permission/AWS.AMI.NS.Medium.1040.json b/pkg/policies/opa/rego/aws/aws_ami_launch_permission/AWS.AMI.NS.Medium.1040.json new file mode 100755 index 000000000..277464b38 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_ami_launch_permission/AWS.AMI.NS.Medium.1040.json @@ -0,0 +1,12 @@ +{ + "name": "amiSharedToMultipleAccounts", + "file": "amiSharedToMultipleAccounts.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Limit access to AWS AMIs", + "referenceId": "AWS.AMI.NS.Medium.1040", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_ami_launch_permission/amiSharedToMultipleAccounts.rego b/pkg/policies/opa/rego/aws/aws_ami_launch_permission/amiSharedToMultipleAccounts.rego new file mode 100755 index 000000000..c217766d2 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_ami_launch_permission/amiSharedToMultipleAccounts.rego @@ -0,0 +1,15 @@ +package accurics + +{{.prefix}}amiSharedToMultipleAccounts[retVal] { + all_permissions := input.aws_ami_launch_permission + launch_permission := input.aws_ami_launch_permission[_] + + image_id = launch_permission.config.image_id + account_id = launch_permission.config.account_id + + accounts := [ account | all_permissions[i].config.image_id == image_id; account := all_permissions[i].config.account_id ] + count(accounts) > 1 + account_id != accounts[0] + + retVal := { "Id": launch_permission.id, "ReplaceType": "delete", "CodeType": "resource", "Traverse": "", "Attribute": "", "AttributeDataType": "resource", "Expected": null, "Actual": null } +} diff --git a/pkg/policies/opa/rego/aws/aws_api_gateway_method_settings/AWS.API Gateway.Logging.Medium.0569.json b/pkg/policies/opa/rego/aws/aws_api_gateway_method_settings/AWS.API Gateway.Logging.Medium.0569.json new file mode 100755 index 000000000..6b65587b0 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_api_gateway_method_settings/AWS.API Gateway.Logging.Medium.0569.json @@ -0,0 +1,10 @@ +{ + "name": "apiGatewaySettingMetrics", + "file": "apiGatewaySettingMetrics.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Enable Detailed CloudWatch Metrics for APIs", + "referenceId": "AWS.API Gateway.Logging.Medium.0569", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_api_gateway_method_settings/apiGatewaySettingMetrics.rego b/pkg/policies/opa/rego/aws/aws_api_gateway_method_settings/apiGatewaySettingMetrics.rego new file mode 100755 index 000000000..001f00274 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_api_gateway_method_settings/apiGatewaySettingMetrics.rego @@ -0,0 +1,13 @@ +package accurics + +apiGatewaySettingMetrics[retVal] +{ + api := input.aws_api_gateway_method_settings[_] + some i + data:=api.config.settings[i] + data.metrics_enabled == false + + traverse = sprintf("settings[%d].metrics_enabled",[i]) + retVal := { "Id": api.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "settings.metrics_enabled", "AttributeDataType": "bool", "Expected": true, "Actual": data.metrics_enabled } + +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_api_gateway_rest_api/AWS.APIGateway.Medium.0568.json b/pkg/policies/opa/rego/aws/aws_api_gateway_rest_api/AWS.APIGateway.Medium.0568.json new file mode 100755 index 000000000..2c84fcb04 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_api_gateway_rest_api/AWS.APIGateway.Medium.0568.json @@ -0,0 +1,10 @@ +{ + "name": "apiGatewayContentEncoding", + "file": "apiGatewayContentEncoding.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Enable Content Encoding", + "referenceId": "AWS.APIGateway.Medium.0568", + "category": " ", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_api_gateway_rest_api/AWS.APIGateway.Network Security.Medium.0570.json b/pkg/policies/opa/rego/aws/aws_api_gateway_rest_api/AWS.APIGateway.Network Security.Medium.0570.json new file mode 100755 index 000000000..a4212322f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_api_gateway_rest_api/AWS.APIGateway.Network Security.Medium.0570.json @@ -0,0 +1,10 @@ +{ + "name": "apiGatewayEndpointConfig", + "file": "apiGatewayEndpointConfig.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "API Gateway Private Endpoints", + "referenceId": "AWS.APIGateway.Network Security.Medium.0570", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_api_gateway_rest_api/apiGatewayContentEncoding.rego b/pkg/policies/opa/rego/aws/aws_api_gateway_rest_api/apiGatewayContentEncoding.rego new file mode 100755 index 000000000..162533594 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_api_gateway_rest_api/apiGatewayContentEncoding.rego @@ -0,0 +1,6 @@ +package accurics + +apiGatewayContentEncoding[api.id]{ + api := input.aws_api_gateway_rest_api[_] + api.config.minimum_compression_size < 0 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_api_gateway_rest_api/apiGatewayEndpointConfig.rego b/pkg/policies/opa/rego/aws/aws_api_gateway_rest_api/apiGatewayEndpointConfig.rego new file mode 100755 index 000000000..d7d6483d4 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_api_gateway_rest_api/apiGatewayEndpointConfig.rego @@ -0,0 +1,8 @@ +package accurics + +apiGatewayEndpointConfig[api.id]{ + api := input.aws_api_gateway_rest_api[_] + data := api.config.endpoint_configuration[_] + var := data.types[_] + not var == "PRIVATE" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_api_gateway_stage/AWS.API Gateway.Logging.Medium.0567.json b/pkg/policies/opa/rego/aws/aws_api_gateway_stage/AWS.API Gateway.Logging.Medium.0567.json new file mode 100755 index 000000000..9a2b6b912 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_api_gateway_stage/AWS.API Gateway.Logging.Medium.0567.json @@ -0,0 +1,10 @@ +{ + "name": "apiGatewayName", + "file": "apiGatewayName.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Enable AWS CloudWatch Logs for APIs", + "referenceId": "AWS.API Gateway.Logging.Medium.0567", + "category": "Logging", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_api_gateway_stage/AWS.API Gateway.Logging.Medium.0571.json b/pkg/policies/opa/rego/aws/aws_api_gateway_stage/AWS.API Gateway.Logging.Medium.0571.json new file mode 100755 index 000000000..30c5192a9 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_api_gateway_stage/AWS.API Gateway.Logging.Medium.0571.json @@ -0,0 +1,10 @@ +{ + "name": "apiGatewayTracing", + "file": "apiGatewayTracing.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Enable Active Tracing", + "referenceId": "AWS.API Gateway.Logging.Medium.0571", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_api_gateway_stage/AWS.API Gateway.Logging.Medium.0572.json b/pkg/policies/opa/rego/aws/aws_api_gateway_stage/AWS.API Gateway.Logging.Medium.0572.json new file mode 100755 index 000000000..a7bbdcbf4 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_api_gateway_stage/AWS.API Gateway.Logging.Medium.0572.json @@ -0,0 +1,12 @@ +{ + "name": "apiGatewayLogging", + "file": "apiGatewayLogging.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Ensure that AWS CloudWatch logs are enabled for all your APIs created with Amazon API Gateway service in order to track and analyze execution behavior at the API stage level.", + "referenceId": "AWS.API Gateway.Logging.Medium.0572", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_api_gateway_stage/AWS.API Gateway.Network Security.Medium.0565.json b/pkg/policies/opa/rego/aws/aws_api_gateway_stage/AWS.API Gateway.Network Security.Medium.0565.json new file mode 100755 index 000000000..12d685baf --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_api_gateway_stage/AWS.API Gateway.Network Security.Medium.0565.json @@ -0,0 +1,10 @@ +{ + "name": "apiGatewaySslCertificate", + "file": "apiGatewaySslCertificate.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Enable SSL Client Certificate", + "referenceId": "AWS.API Gateway.Network Security.Medium.0565", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_api_gateway_stage/apiGatewayLogging.rego b/pkg/policies/opa/rego/aws/aws_api_gateway_stage/apiGatewayLogging.rego new file mode 100755 index 000000000..c3bb3e29a --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_api_gateway_stage/apiGatewayLogging.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}apiGatewayLogging[retVal] { + api := input.aws_api_gateway_stage[_] + count(api.config.access_log_settings) == 0 + rc = "ewogICJhY2Nlc3NfbG9nX3NldHRpbmdzIjogewogICAgImRlc3RpbmF0aW9uX2FybiI6ICI8ZGVzdGluYXRpb25fYXJuPiIsCiAgICAiZm9ybWF0IjogIjxmb3JtYXQ+PiIKICB9Cn0=" + traverse = "" + retVal := { "Id": api.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "access_log_settings", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_api_gateway_stage/apiGatewayName.rego b/pkg/policies/opa/rego/aws/aws_api_gateway_stage/apiGatewayName.rego new file mode 100755 index 000000000..954a9eef1 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_api_gateway_stage/apiGatewayName.rego @@ -0,0 +1,12 @@ +package accurics + +apiGatewayName[api.id] { + api := input.aws_api_gateway_stage[_] + not checkExists(api.config.stage_name) +} + +checkExists(val) = true +{ + cloud := input.aws_cloudwatch_log_group[_] + val == cloud.name +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_api_gateway_stage/apiGatewaySslCertificate.rego b/pkg/policies/opa/rego/aws/aws_api_gateway_stage/apiGatewaySslCertificate.rego new file mode 100755 index 000000000..632b0a8bb --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_api_gateway_stage/apiGatewaySslCertificate.rego @@ -0,0 +1,6 @@ +package accurics + +apiGatewaySslCertificate[api.id] { + api := input.aws_api_gateway_stage[_] + api.config.client_certificate_id == null +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_api_gateway_stage/apiGatewayTracing.rego b/pkg/policies/opa/rego/aws/aws_api_gateway_stage/apiGatewayTracing.rego new file mode 100755 index 000000000..991e80a5a --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_api_gateway_stage/apiGatewayTracing.rego @@ -0,0 +1,10 @@ +package accurics + +apiGatewayTracing[retVal] { + api := input.aws_api_gateway_stage[_] + api.config.xray_tracing_enabled == false + + traverse = "xray_tracing_enabled" + retVal := { "Id": api.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "xray_tracing_enabled", "AttributeDataType": "bool", "Expected": true, "Actual": api.config.xray_tracing_enabled } + +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_athena/AWS.Athena.Medium.0592.json b/pkg/policies/opa/rego/aws/aws_athena/AWS.Athena.Medium.0592.json new file mode 100755 index 000000000..a3d234c90 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_athena/AWS.Athena.Medium.0592.json @@ -0,0 +1,10 @@ +{ + "name": "athenaQueryEncryption", + "file": "athenaQueryEncryption.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Enable Encryption for AWS Athena Query Results", + "referenceId": "AWS.Athena.Medium.0592", + "category": " ", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_athena/athenaQueryEncryption.rego b/pkg/policies/opa/rego/aws/aws_athena/athenaQueryEncryption.rego new file mode 100755 index 000000000..d988e4582 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_athena/athenaQueryEncryption.rego @@ -0,0 +1,11 @@ +package accurics + +athenaQueryEncryption[api.id]{ + api := input.aws_athena[_] + data := api.config.configuration[_] + resConfig := data.result_configuration[_] + encOpt := resConfig.encryption_configuration[_] + not encOpt.encryption_option == "SSE_KMS" + not encOpt.encryption_option == "CSE_KMS" + not encOpt.encryption_option == "SSE_S3" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0599.json b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0599.json new file mode 100755 index 000000000..dde62a447 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0599.json @@ -0,0 +1,13 @@ +{ + "name": "awsCloudFormationInUse", + "file": "awsCloudFormationInUse.rego", + "templateArgs": { + "name": "awsCloudFormationInUse", + "property": "template_url" + }, + "severity": "MEDIUM", + "description": "AWS CloudFormation Not In Use", + "referenceId": "AWS.CloudFormation.Medium.0599", + "category": " ", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0601.json b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0601.json new file mode 100755 index 000000000..409c977b2 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0601.json @@ -0,0 +1,10 @@ +{ + "name": "cloudFormationStackDrift", + "file": "cloudFormationStackDrift.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "AWS CloudFormation Has Been Drifted.", + "referenceId": "AWS.CloudFormation.Medium.0601", + "category": " ", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0603.json b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0603.json new file mode 100755 index 000000000..1d8ff70f7 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0603.json @@ -0,0 +1,13 @@ +{ + "name": "cloudFormationStackNotifs", + "file": "cloudFormationStackNotifs.rego", + "templateArgs": { + "name": "cloudFormationStackNotifs", + "property": "notification_arns" + }, + "severity": "MEDIUM", + "description": "Enable AWS CloudFormation Stack Notifications", + "referenceId": "AWS.CloudFormation.Medium.0603", + "category": " ", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0604.json b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0604.json new file mode 100755 index 000000000..304c39752 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0604.json @@ -0,0 +1,13 @@ +{ + "name": "cloudFormationStackPolicy", + "file": "cloudFormationStackPolicy.rego", + "templateArgs": { + "name": "cloudFormationStackPolicy", + "property": "policy_url" + }, + "severity": "MEDIUM", + "description": "AWS CloudFormation Stack Policy", + "referenceId": "AWS.CloudFormation.Medium.0604", + "category": " ", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0605.json b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0605.json new file mode 100755 index 000000000..9cc88e624 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/AWS.CloudFormation.Medium.0605.json @@ -0,0 +1,10 @@ +{ + "name": "cloudFormationTerminationProtection", + "file": "cloudFormationTerminationProtection.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Enable AWS CloudFormation Stack Termination Protection", + "referenceId": "AWS.CloudFormation.Medium.0605", + "category": " ", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudformation_stack/awsCloudFormationInUse.rego b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/awsCloudFormationInUse.rego new file mode 100755 index 000000000..797e2ac2c --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/awsCloudFormationInUse.rego @@ -0,0 +1,8 @@ +package accurics + +{{.name}}[api.id] +{ + api := input.aws_cloudformation_stack[_] + api.config.{{.property}} == null +} + diff --git a/pkg/policies/opa/rego/aws/aws_cloudformation_stack/cloudFormationStackDrift.rego b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/cloudFormationStackDrift.rego new file mode 100755 index 000000000..56a9a0fd3 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/cloudFormationStackDrift.rego @@ -0,0 +1,8 @@ +package accurics + +cloudFormationStackDrift[api.id] +{ + api := input.aws_config_config_rule[_] + data := api.config.source[_] + data.source_identifier == "CLOUDFORMATION_STACK_DRIFT_DETECTION_CHECK" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudformation_stack/cloudFormationStackNotifs.rego b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/cloudFormationStackNotifs.rego new file mode 100755 index 000000000..2935d817c --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/cloudFormationStackNotifs.rego @@ -0,0 +1,7 @@ +package accurics + +{{.name}}[api.id] +{ + api := input.aws_cloudformation_stack[_] + api.config.{{.property}} == null +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudformation_stack/cloudFormationStackPolicy.rego b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/cloudFormationStackPolicy.rego new file mode 100755 index 000000000..2935d817c --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/cloudFormationStackPolicy.rego @@ -0,0 +1,7 @@ +package accurics + +{{.name}}[api.id] +{ + api := input.aws_cloudformation_stack[_] + api.config.{{.property}} == null +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudformation_stack/cloudFormationTerminationProtection.rego b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/cloudFormationTerminationProtection.rego new file mode 100755 index 000000000..f38dd66e3 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudformation_stack/cloudFormationTerminationProtection.rego @@ -0,0 +1,7 @@ +package accurics + +cloudFormationTerminationProtection[api.id] +{ + api := input.aws_cloudformation_stack_set_instance[_] + api.config.retain_stack == false +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Network Security.Low.0568.json b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Network Security.Low.0568.json new file mode 100755 index 000000000..efffc8f0d --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/AWS.CloudFront.Network Security.Low.0568.json @@ -0,0 +1,12 @@ +{ + "name": "cloudfrontNoGeoRestriction", + "file": "cloudfrontNoGeoRestriction.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "LOW", + "description": "Ensure that geo restriction is enabled for your Amazon CloudFront CDN distribution to whitelist or blacklist a country in order to allow or restrict users in specific locations from accessing web application content.", + "referenceId": "AWS.CloudFront.Network Security.Low.0568", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoGeoRestriction.rego b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoGeoRestriction.rego new file mode 100755 index 000000000..45f3091dc --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudfront_distribution/cloudfrontNoGeoRestriction.rego @@ -0,0 +1,10 @@ +package accurics + +{{.prefix}}cloudfrontNoGeoRestriction[retVal] { + cloudfront = input.aws_cloudfront_distribution[_] + some i + restrict = cloudfront.config.restrictions[i] + restrict.geo_restriction[j].restriction_type == "none" + traverse := sprintf("restrictions[%d].geo_restriction[%d].restriction_type", [i]) + retVal := { "Id": cloudfront.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "restrictions.geo_restriction.restriction_type", "AttributeDataType": "string", "Expected": "whitelist", "Actual": restrict.geo_restriction[_].restriction_type } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudwatch/AWS.CloudWatch.Logging.Medium.0631.json b/pkg/policies/opa/rego/aws/aws_cloudwatch/AWS.CloudWatch.Logging.Medium.0631.json new file mode 100755 index 000000000..c352c35dc --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudwatch/AWS.CloudWatch.Logging.Medium.0631.json @@ -0,0 +1,10 @@ +{ + "name": "awsCloudWatchRetentionPreiod", + "file": "awsCloudWatchRetentionPreiod.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "App-Tier CloudWatch Log Group Retention Period", + "referenceId": "AWS.CloudWatch.Logging.Medium.0631", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_cloudwatch/awsCloudWatchRetentionPreiod.rego b/pkg/policies/opa/rego/aws/aws_cloudwatch/awsCloudWatchRetentionPreiod.rego new file mode 100755 index 000000000..26bcadf61 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_cloudwatch/awsCloudWatchRetentionPreiod.rego @@ -0,0 +1,10 @@ +package accurics + +awsCloudWatchRetentionPreiod[retVal] +{ + api := input.aws_cloudwatch_log_group[_] + api.config.retention_in_days == 0 + + traverse = "retention_in_days" + retVal := { "Id": api.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "retention_in_days", "AttributeDataType": "integer", "Expected": "", "Actual": api.config.retention_in_days } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_config/AWS.Config.Encryption&KeyManagement.Medium.0660.json b/pkg/policies/opa/rego/aws/aws_config/AWS.Config.Encryption&KeyManagement.Medium.0660.json new file mode 100755 index 000000000..3766b2974 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_config/AWS.Config.Encryption&KeyManagement.Medium.0660.json @@ -0,0 +1,10 @@ +{ + "name": "awsConfigEncryptedVol", + "file": "awsConfigEncryptedVol.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "AWS Config Rule for Web-Tier Encrypted Volumes", + "referenceId": "AWS.Config.Encryption\u0026KeyManagement.Medium.0660", + "category": "Encryption \u0026 Key Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_config/awsConfigEncryptedVol.rego b/pkg/policies/opa/rego/aws/aws_config/awsConfigEncryptedVol.rego new file mode 100755 index 000000000..12a45e73f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_config/awsConfigEncryptedVol.rego @@ -0,0 +1,8 @@ +package accurics + +awsConfigEncryptedVol[api.id] +{ + api := input.aws_config_config_rule[_] + data := api.config.source[_] + not data.source_identifier == "ENCRYPTED_VOLUMES" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_config_configuration_aggregator/AWS.Config.Logging.HIGH.0590.json b/pkg/policies/opa/rego/aws/aws_config_configuration_aggregator/AWS.Config.Logging.HIGH.0590.json new file mode 100755 index 000000000..5d5e24a38 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_config_configuration_aggregator/AWS.Config.Logging.HIGH.0590.json @@ -0,0 +1,12 @@ +{ + "name": "configEnabledForAllRegions", + "file": "configEnabledForAllRegions.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure AWS Config is enabled in all regions", + "referenceId": "AWS.Config.Logging.HIGH.0590", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_config_configuration_aggregator/configEnabledForAllRegions.rego b/pkg/policies/opa/rego/aws/aws_config_configuration_aggregator/configEnabledForAllRegions.rego new file mode 100755 index 000000000..54be9bf46 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_config_configuration_aggregator/configEnabledForAllRegions.rego @@ -0,0 +1,21 @@ +package accurics + +{{.prefix}}configEnabledForAllRegions[retVal]{ + con = input.aws_config_configuration_aggregator[_] + some i + ag_source = con.config.account_aggregation_source[i] + # need some logic to guess ReplaceType as add / edit, we get this value in both cases + ag_source.all_regions == false + traverse = sprintf("account_aggregation_source[%d].all_regions", [i]) + retVal := { "Id": con.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "account_aggregation_source.all_regions", "AttributeDataType": "boolean", "Expected": true, "Actual": ag_source.all_regions } +} + +{{.prefix}}configEnabledForAllRegions[retVal]{ + con = input.aws_config_configuration_aggregator[_] + some i + ag_source = con.config.organization_aggregation_source[i] + # need some logic to guess ReplaceType as add / edit, we get this value in both cases + ag_source.all_regions == false + traverse = sprintf("organization_aggregation_source[%d].all_regions", [i]) + retVal := { "Id": con.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "organization_aggregation_source.all_regions", "AttributeDataType": "boolean", "Expected": true, "Actual": ag_source.all_regions } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_instance/AWS.AWS RDS.NS.High.0101.json b/pkg/policies/opa/rego/aws/aws_db_instance/AWS.AWS RDS.NS.High.0101.json new file mode 100755 index 000000000..1f2eb29ea --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_instance/AWS.AWS RDS.NS.High.0101.json @@ -0,0 +1,12 @@ +{ + "name": "rdsPubliclyAccessible", + "file": "rdsPubliclyAccessible.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "RDS Instance publicly_accessible flag is true", + "referenceId": "AWS.AWS RDS.NS.High.0101", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_instance/AWS.RDS.DS.High.1041.json b/pkg/policies/opa/rego/aws/aws_db_instance/AWS.RDS.DS.High.1041.json new file mode 100755 index 000000000..686c80a30 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_instance/AWS.RDS.DS.High.1041.json @@ -0,0 +1,12 @@ +{ + "name": "rdsAutoMinorVersionUpgradeEnabled", + "file": "rdsAutoMinorVersionUpgradeEnabled.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "RDS Instance Auto Minor Version Upgrade flag disabled", + "referenceId": "AWS.RDS.DS.High.1041", + "category": "Data Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_instance/AWS.RDS.DS.High.1042.json b/pkg/policies/opa/rego/aws/aws_db_instance/AWS.RDS.DS.High.1042.json new file mode 100755 index 000000000..c96c1d5cd --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_instance/AWS.RDS.DS.High.1042.json @@ -0,0 +1,12 @@ +{ + "name": "rdsCAExpired", + "file": "rdsCAExpired.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure Certificate used in RDS instance is updated", + "referenceId": "AWS.RDS.DS.High.1042", + "category": "Data Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_instance/AWS.RDS.DataSecurity.High.0414.json b/pkg/policies/opa/rego/aws/aws_db_instance/AWS.RDS.DataSecurity.High.0414.json new file mode 100755 index 000000000..a00bcec75 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_instance/AWS.RDS.DataSecurity.High.0414.json @@ -0,0 +1,10 @@ +{ + "name": "rdsHasStorageEncrypted", + "file": "rdsHasStorageEncrypted.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure that your RDS database instances have automated backups enabled for point-in-time recovery. To back up your database instances, AWS RDS take automatically a full daily snapshot of your data (with transactions logs) during the specified backup window and keeps the backups for a limited period of time (known as retention period) defined by the instance owner.", + "referenceId": "AWS.RDS.DataSecurity.High.0414", + "category": "Data Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_instance/AWS.RDS.DataSecurity.High.0577.json b/pkg/policies/opa/rego/aws/aws_db_instance/AWS.RDS.DataSecurity.High.0577.json new file mode 100755 index 000000000..041946158 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_instance/AWS.RDS.DataSecurity.High.0577.json @@ -0,0 +1,12 @@ +{ + "name": "rdsIamAuthEnabled", + "file": "rdsIamAuthEnabled.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure that your RDS database has IAM Authentication enabled.", + "referenceId": "AWS.RDS.DataSecurity.High.0577", + "category": "Data Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_instance/rdsAutoMinorVersionUpgradeEnabled.rego b/pkg/policies/opa/rego/aws/aws_db_instance/rdsAutoMinorVersionUpgradeEnabled.rego new file mode 100755 index 000000000..9183bcb46 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_instance/rdsAutoMinorVersionUpgradeEnabled.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}rdsAutoMinorVersionUpgradeEnabled[retVal] { + db := input.aws_db_instance[_] + db.config.auto_minor_version_upgrade == false + traverse = "auto_minor_version_upgrade" + retVal := { "Id": db.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "auto_minor_version_upgrade", "AttributeDataType": "bool", "Expected": true, "Actual": db.config.auto_minor_version_upgrade } +} + diff --git a/pkg/policies/opa/rego/aws/aws_db_instance/rdsCAExpired.rego b/pkg/policies/opa/rego/aws/aws_db_instance/rdsCAExpired.rego new file mode 100755 index 000000000..8a33ede52 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_instance/rdsCAExpired.rego @@ -0,0 +1,8 @@ +package accurics + +{{.prefix}}rdsCAExpired[retVal] { + rds = input.aws_db_instance[_] + rds.config.ca_cert_identifier != "rds-ca-2019" + traverse = "ca_cert_identifier" + retVal := { "Id": rds.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "ca_cert_identifier", "AttributeDataType": "string", "Expected": "rds-ca-2019", "Actual": rds.config.ca_cert_identifier} +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_instance/rdsHasStorageEncrypted.rego b/pkg/policies/opa/rego/aws/aws_db_instance/rdsHasStorageEncrypted.rego new file mode 100755 index 000000000..19c0b5051 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_instance/rdsHasStorageEncrypted.rego @@ -0,0 +1,21 @@ +package accurics + +rdsHasStorageEncrypted[data.id]{ + data := input.aws_db_instance[_] + data.config.storage_encrypted == null +} + +rdsHasStorageEncrypted[data.id]{ + data := input.aws_db_instance[_] + data.config.storage_encrypted == false +} + +rdsHasStorageEncrypted[data.id]{ + data := input.aws_db_instance[_] + not data.config.kms_key_id +} + +rdsHasStorageEncrypted[data.id]{ + data := input.aws_db_instance[_] + data.config.kms_key_id == null +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_instance/rdsIamAuthEnabled.rego b/pkg/policies/opa/rego/aws/aws_db_instance/rdsIamAuthEnabled.rego new file mode 100755 index 000000000..c5de90fb3 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_instance/rdsIamAuthEnabled.rego @@ -0,0 +1,15 @@ +package accurics + +{{.prefix}}rdsIamAuthEnabled[retVal] { + rds := input.aws_db_instance[_] + not rds.config.iam_database_authentication_enabled + traverse = "iam_database_authentication_enabled" + retVal := { "Id": rds.id, "ReplaceType": "add", "CodeType": "attribute", "Traverse": traverse, "Attribute": "iam_database_authentication_enabled", "AttributeDataType": "boolean", "Expected": true, "Actual": null } +} + +{{.prefix}}rdsIamAuthEnabled[retVal] { + rds := input.aws_db_instance[_] + rds.config.iam_database_authentication_enabled == false + traverse = "iam_database_authentication_enabled" + retVal := { "Id": rds.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "iam_database_authentication_enabled", "AttributeDataType": "boolean", "Expected": true, "Actual": rds.config.iam_database_authentication_enabled } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego b/pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego new file mode 100755 index 000000000..601e8c85e --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_instance/rdsPubliclyAccessible.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}rdsPubliclyAccessible[retVal] { + db := input.aws_db_instance[_] + db.config.publicly_accessible == true + traverse = "publicly_accessible" + retVal := { "Id": db.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "publicly_accessible", "AttributeDataType": "bool", "Expected": false, "Actual": db.config.publicly_accessible } +} + diff --git a/pkg/policies/opa/rego/aws/aws_db_security_group/AWS.RDS.NetworkSecurity.High.0101.json b/pkg/policies/opa/rego/aws/aws_db_security_group/AWS.RDS.NetworkSecurity.High.0101.json new file mode 100755 index 000000000..0185a8531 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_security_group/AWS.RDS.NetworkSecurity.High.0101.json @@ -0,0 +1,12 @@ +{ + "name": "rdsIsPublic", + "file": "rdsIsPublic.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "RDS should not be defined with public interface. Firewall and router configurations should be used to restrict connections between untrusted networks and any system components in the cloud environment.", + "referenceId": "AWS.RDS.NetworkSecurity.High.0101", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_security_group/AWS.RDS.NetworkSecurity.High.0102.json b/pkg/policies/opa/rego/aws/aws_db_security_group/AWS.RDS.NetworkSecurity.High.0102.json new file mode 100755 index 000000000..b4e39d29f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_security_group/AWS.RDS.NetworkSecurity.High.0102.json @@ -0,0 +1,12 @@ +{ + "name": "rdsScopeIsPublic", + "file": "rdsScopeIsPublic.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "RDS should not be open to a public scope. Firewall and router configurations should be used to restrict connections between untrusted networks and any system components in the cloud environment.", + "referenceId": "AWS.RDS.NetworkSecurity.High.0102", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_security_group/AWS.RDS.NetworkSecurity.High.0103.json b/pkg/policies/opa/rego/aws/aws_db_security_group/AWS.RDS.NetworkSecurity.High.0103.json new file mode 100755 index 000000000..53ed03b4c --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_security_group/AWS.RDS.NetworkSecurity.High.0103.json @@ -0,0 +1,12 @@ +{ + "name": "rdsHostsHigherThan256", + "file": "rdsHostsHigherThan256.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "RDS should not be open to a large scope. Firewall and router configurations should be used to restrict connections between untrusted networks and any system components in the cloud environment.", + "referenceId": "AWS.RDS.NetworkSecurity.High.0103", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_security_group/rdsHostsHigherThan256.rego b/pkg/policies/opa/rego/aws/aws_db_security_group/rdsHostsHigherThan256.rego new file mode 100755 index 000000000..92e05d7e2 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_security_group/rdsHostsHigherThan256.rego @@ -0,0 +1,12 @@ +package accurics + +{{.prefix}}rdsHostsHigherThan256[retVal] { + sg = input.aws_db_security_group[_] + some i + ingress = sg.config.ingress[i] + hosts = split(ingress.cidr, "/") + to_number(hosts[1]) <= 24 + expected := sprintf("%s/%d", [hosts[0], 23]) + traverse := sprintf("ingress[%d].cidr", [i]) + retVal := { "Id": sg.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "ingress.cidr", "AttributeDataType": "list", "Expected": expected, "Actual": ingress.cidr } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_security_group/rdsIsPublic.rego b/pkg/policies/opa/rego/aws/aws_db_security_group/rdsIsPublic.rego new file mode 100755 index 000000000..5fc047ea7 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_security_group/rdsIsPublic.rego @@ -0,0 +1,10 @@ +package accurics + +{{.prefix}}rdsIsPublic[retVal] { + sg = input.aws_db_security_group[_] + some i + ingress = sg.config.ingress[i] + ingress.cidr == "0.0.0.0/0" + traverse := sprintf("ingress[%d].cidr", [i]) + retVal := { "Id": sg.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "ingress.cidr", "AttributeDataType": "list", "Expected": "", "Actual": ingress.cidr } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_db_security_group/rdsScopeIsPublic.rego b/pkg/policies/opa/rego/aws/aws_db_security_group/rdsScopeIsPublic.rego new file mode 100755 index 000000000..fc4f89498 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_db_security_group/rdsScopeIsPublic.rego @@ -0,0 +1,19 @@ +package accurics + +{{.prefix}}rdsScopeIsPublic[retVal] { + sg = input.aws_db_security_group[_] + some i + ingress = sg.config.ingress[i] + checkScopeIsPublic(ingress.cidr) + traverse := sprintf("ingress[%d].cidr", [i]) + retVal := { "Id": sg.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "ingress.cidr", "AttributeDataType": "list", "Expected": "", "Actual": ingress.cidr } +} + +scopeIsPrivate(scope) { + private_ips = ["10.0.0.0/8", "192.168.0.0/16", "172.16.0.0/12"] + net.cidr_contains(private_ips[_], scope) +} + +checkScopeIsPublic(val) { + not scopeIsPrivate(val) +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_ebs_encryption_by_default/AWS.EBS.DataSecurity.High.0580.json b/pkg/policies/opa/rego/aws/aws_ebs_encryption_by_default/AWS.EBS.DataSecurity.High.0580.json new file mode 100755 index 000000000..fa17f9bfc --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_ebs_encryption_by_default/AWS.EBS.DataSecurity.High.0580.json @@ -0,0 +1,12 @@ +{ + "name": "ebsDefaultEncryption", + "file": "ebsDefaultEncryption.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure that the AWS EBS that hold sensitive and critical data is encrypted by default to fulfill compliance requirements for data-at-rest encryption.", + "referenceId": "AWS.EBS.DataSecurity.High.0580", + "category": "Data Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_ebs_encryption_by_default/ebsDefaultEncryption.rego b/pkg/policies/opa/rego/aws/aws_ebs_encryption_by_default/ebsDefaultEncryption.rego new file mode 100755 index 000000000..be3e2912c --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_ebs_encryption_by_default/ebsDefaultEncryption.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}ebsDefaultEncryption[retVal] { + ebsEncrypt := input.aws_ebs_encryption_by_default[_] + ebsEncrypt.config.enabled == false + + traverse = "enabled" + retVal := { "Id": ebsEncrypt.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "enabled", "AttributeDataType": "bool", "Expected": true, "Actual": ebsEncrypt.config.enabled } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_ebs_volume/AWS.EcsCluster.EncryptionandKeyManagement.High.0413.json b/pkg/policies/opa/rego/aws/aws_ebs_volume/AWS.EcsCluster.EncryptionandKeyManagement.High.0413.json new file mode 100755 index 000000000..33ac2d40e --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_ebs_volume/AWS.EcsCluster.EncryptionandKeyManagement.High.0413.json @@ -0,0 +1,13 @@ +{ + "name": "ebsVolumeEncryptedWithNoKms", + "file": "ebsEncryption.rego", + "templateArgs": { + "name": "ebsVolumeEncryptedWithNoKms", + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure that AWS EBS clusters are encrypted. Data encryption at rest, prevents unauthorized users from accessing sensitive data on your AWS EBS clusters and associated cache storage systems.", + "referenceId": "AWS.EcsCluster.EncryptionandKeyManagement.High.0413", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_ebs_volume/ebsEncryption.rego b/pkg/policies/opa/rego/aws/aws_ebs_volume/ebsEncryption.rego new file mode 100755 index 000000000..4e584b36c --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_ebs_volume/ebsEncryption.rego @@ -0,0 +1,22 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + block := input.aws_ebs_volume[_] + checkEncryption(block.config) == true + + traverse = "encrypted" + retVal := { "Id": block.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "encrypted", "AttributeDataType": "bool", "Expected": true, "Actual": false } +} + +checkEncryption(c) = true { + not c.encrypted +} + +checkEncryption(c) =true { + c.encrypted == false +} + +checkEncryption(c) =true { + c.encrypted == true + not c.kms_key_id +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_ec2/AWS.EC2.Encryption&KeyManagement.Medium.0688.json b/pkg/policies/opa/rego/aws/aws_ec2/AWS.EC2.Encryption&KeyManagement.Medium.0688.json new file mode 100755 index 000000000..d628d7067 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_ec2/AWS.EC2.Encryption&KeyManagement.Medium.0688.json @@ -0,0 +1,10 @@ +{ + "name": "awsAmiEncrypted", + "file": "awsAmiEncrypted.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Enable AWS AMI Encryption", + "referenceId": "AWS.EC2.Encryption\u0026KeyManagement.Medium.0688", + "category": "Encryption \u0026 KeyManagement", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_ec2/awsAmiEncrypted.rego b/pkg/policies/opa/rego/aws/aws_ec2/awsAmiEncrypted.rego new file mode 100755 index 000000000..8340c0530 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_ec2/awsAmiEncrypted.rego @@ -0,0 +1,8 @@ +package accurics + +awsAmiEncrypted[api.id] +{ + api := input.aws_ami[_] + data := api.config.ebs_block_device[_] + not data.encrypted == true +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_ecr_repository/AWS.ECR.DataSecurity.High.0578.json b/pkg/policies/opa/rego/aws/aws_ecr_repository/AWS.ECR.DataSecurity.High.0578.json new file mode 100755 index 000000000..4a769c4ab --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_ecr_repository/AWS.ECR.DataSecurity.High.0578.json @@ -0,0 +1,12 @@ +{ + "name": "scanOnPushDisabled", + "file": "scanOnPushDisabled.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Unscanned images may contain vulnerabilities", + "referenceId": "AWS.ECR.DataSecurity.High.0578", + "category": "Data Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_ecr_repository/scanOnPushDisabled.rego b/pkg/policies/opa/rego/aws/aws_ecr_repository/scanOnPushDisabled.rego new file mode 100755 index 000000000..a2cb8802f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_ecr_repository/scanOnPushDisabled.rego @@ -0,0 +1,16 @@ +package accurics + +{{.prefix}}scanOnPushDisabled[retVal] { + imageScan := input.aws_ecr_repository[_] + imageScan.config.image_scanning_configuration == [] + traverse = "image_scanning_configuration[0].scan_on_push" + retVal := { "Id": imageScan.id, "ReplaceType": "add", "CodeType": "attribute", "Traverse": traverse, "Attribute": "image_scanning_configuration.scan_on_push", "AttributeDataType": "bool", "Expected": true, "Actual": imageScan.config.image_scanning_configuration[_].scan_on_push } +} + +{{.prefix}}scanOnPushDisabled[retVal] { + imageScan := input.aws_ecr_repository[_] + some i + imageScan.config.image_scanning_configuration[i].scan_on_push == false + traverse := sprintf("image_scanning_configuration[%d].scan_on_push", [i]) + retVal := { "Id": imageScan.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "image_scanning_configuration.scan_on_push", "AttributeDataType": "bool", "Expected": true, "Actual": imageScan.config.image_scanning_configuration[_].scan_on_push } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_ecr_repository_policy/AWS.ECR.DataSecurity.High.0579.json b/pkg/policies/opa/rego/aws/aws_ecr_repository_policy/AWS.ECR.DataSecurity.High.0579.json new file mode 100755 index 000000000..b26bd0725 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_ecr_repository_policy/AWS.ECR.DataSecurity.High.0579.json @@ -0,0 +1,12 @@ +{ + "name": "ecrRepoIsPublic", + "file": "ecrRepoIsPublic.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Identify any exposed Amazon ECR image repositories available within your AWS account and update their permissions in order to protect against unauthorized access. Amazon Elastic Container Registry (ECR) is a managed Docker registry service that makes it easy for DevOps teams to store, manage and deploy Docker container images. An ECR repository is a collection of Docker images available on AWS cloud.", + "referenceId": "AWS.ECR.DataSecurity.High.0579", + "category": "Data Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_ecr_repository_policy/ecrRepoIsPublic.rego b/pkg/policies/opa/rego/aws/aws_ecr_repository_policy/ecrRepoIsPublic.rego new file mode 100755 index 000000000..7b39e0565 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_ecr_repository_policy/ecrRepoIsPublic.rego @@ -0,0 +1,40 @@ +package accurics + + +{{.prefix}}ecrRepoIsPublic[retVal] { + repo = input.aws_ecr_repository_policy[_] + policy := json_unmarshal(repo.config.policy) + statement = policy.Statement[_] + policyCheck(statement, "Allow", "*") == true + + statements := [ item | item := replace_if_needed(policy.Statement[_]) ] + expected := object.union(policy, {"Statement": statements}) + traverse = "policy" + retVal := { "Id": repo.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "policy", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +replace_if_needed(statement) = value { + policyCheck(statement, "Allow", "*") == true + # value := object.union(statement, { "Principal": { "AWS": "arn:aws:iam::##account_number##:root"} }) + value := statement +} + +replace_if_needed(statement) = value { + not policyCheck(statement, "Allow", "*") + value := statement +} + +policyCheck(s, e ,r) = true { + s.Effect == e + s.Principal == r +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_ecs_service/AWS.ECS.High.0436.json b/pkg/policies/opa/rego/aws/aws_ecs_service/AWS.ECS.High.0436.json new file mode 100755 index 000000000..69d361249 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_ecs_service/AWS.ECS.High.0436.json @@ -0,0 +1,10 @@ +{ + "name": "ecsServiceAdmin", + "file": "ecsServiceAdmin.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Identify any exposed Amazon ECR image repositories available within your AWS account and update their permissions in order to protect against unauthorized access. Amazon Elastic Container Registry (ECR) is a managed Docker registry service that makes it easy for DevOps teams to store, manage and deploy Docker container images. An ECR repository is a collection of Docker images available on AWS cloud.", + "referenceId": "AWS.ECS.High.0436", + "category": "Data Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_ecs_service/ecsServiceAdmin.rego b/pkg/policies/opa/rego/aws/aws_ecs_service/ecsServiceAdmin.rego new file mode 100755 index 000000000..7df48ef2b --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_ecs_service/ecsServiceAdmin.rego @@ -0,0 +1,6 @@ +package accurics + +ecsServiceAdmin[data.id] { + data := input.aws_ecs_service[_] + contains(data.config.iam_role, "admin") +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_ecs_task_definition/AWS.EcsCluster.NetworkSecurity.High.0104.json b/pkg/policies/opa/rego/aws/aws_ecs_task_definition/AWS.EcsCluster.NetworkSecurity.High.0104.json new file mode 100755 index 000000000..fa8cfc308 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_ecs_task_definition/AWS.EcsCluster.NetworkSecurity.High.0104.json @@ -0,0 +1,12 @@ +{ + "name": "instanceNotInVpc", + "file": "instanceNotInVpc.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Like any other EC2 instance it is recommended to place ECS instance within a VPC. AWS VPCs provides the controls to facilitate a formal process for approving and testing all network connections and changes to the firewall and router configurations", + "referenceId": "AWS.EcsCluster.NetworkSecurity.High.0104", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_ecs_task_definition/AWS.LaunchConfiguration.DataSecurity.High.0101.json b/pkg/policies/opa/rego/aws/aws_ecs_task_definition/AWS.LaunchConfiguration.DataSecurity.High.0101.json new file mode 100755 index 000000000..f9777c044 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_ecs_task_definition/AWS.LaunchConfiguration.DataSecurity.High.0101.json @@ -0,0 +1,14 @@ +{ + "name": "containerDefinitionContainsPASSWORD", + "file": "containerDefinitionContainsSensitiveInfo.rego", + "templateArgs": { + "keyword": "PASSWORD", + "name": "containerDefinitionContainsPASSWORD", + "prefix": "" + }, + "severity": "HIGH", + "description": "Sensitive Information Disclosure", + "referenceId": "AWS.LaunchConfiguration.DataSecurity.High.0101", + "category": "Data Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_ecs_task_definition/containerDefinitionContainsSensitiveInfo.rego b/pkg/policies/opa/rego/aws/aws_ecs_task_definition/containerDefinitionContainsSensitiveInfo.rego new file mode 100755 index 000000000..52c2c54eb --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_ecs_task_definition/containerDefinitionContainsSensitiveInfo.rego @@ -0,0 +1,19 @@ +package accurics + +{{.prefix}}{{.name}}[instance.id]{ + instance := input.aws_ecs_task_definition[_] + taskDef := instance.config.container_definitions + taskDefJson := json_unmarshal(taskDef) + envEntry := taskDefJson[_].environment[_] + contains(upper(envEntry.name), upper("{{.keyword}}")) +} + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_ecs_task_definition/instanceNotInVpc.rego b/pkg/policies/opa/rego/aws/aws_ecs_task_definition/instanceNotInVpc.rego new file mode 100755 index 000000000..63e255820 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_ecs_task_definition/instanceNotInVpc.rego @@ -0,0 +1,8 @@ +package accurics + +{{.prefix}}instanceNotInVpc[retVal] { + instance := input.aws_ecs_task_definition[_] + instance.config.network_mode != "awsvpc" + traverse = "network_mode" + retVal := { "Id": instance.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "network_mode", "AttributeDataType": "string", "Expected": "awsvpc", "Actual": instance.config.network_mode} +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_efs_file_system/AWS.EFS.EncryptionandKeyManagement.High.0409.json b/pkg/policies/opa/rego/aws/aws_efs_file_system/AWS.EFS.EncryptionandKeyManagement.High.0409.json new file mode 100755 index 000000000..30b816476 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_efs_file_system/AWS.EFS.EncryptionandKeyManagement.High.0409.json @@ -0,0 +1,13 @@ +{ + "name": "efsEncryptedFalse", + "file": "efsEncryptedFalse.rego", + "templateArgs": { + "name": "efsEncryptedFalse", + "prefix": "" + }, + "severity": "HIGH", + "description": "Enable encryption of your EFS file systems in order to protect your data and metadata from breaches or unauthorized access and fulfill compliance requirements for data-at-rest encryption within your organization.", + "referenceId": "AWS.EFS.EncryptionandKeyManagement.High.0409", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_efs_file_system/AWS.EFS.EncryptionandKeyManagement.High.0410.json b/pkg/policies/opa/rego/aws/aws_efs_file_system/AWS.EFS.EncryptionandKeyManagement.High.0410.json new file mode 100755 index 000000000..3f8e4e2c7 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_efs_file_system/AWS.EFS.EncryptionandKeyManagement.High.0410.json @@ -0,0 +1,13 @@ +{ + "name": "efsEncryptedWithNoKms", + "file": "efsEncryptedWithNoKms.rego", + "templateArgs": { + "name": "efsEncryptedWithNoKms", + "prefix": "" + }, + "severity": "HIGH", + "description": "Enable encryption of your EFS file systems in order to protect your data and metadata from breaches or unauthorized access and fulfill compliance requirements for data-at-rest encryption within your organization.", + "referenceId": "AWS.EFS.EncryptionandKeyManagement.High.0410", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_efs_file_system/efsEncryptedFalse.rego b/pkg/policies/opa/rego/aws/aws_efs_file_system/efsEncryptedFalse.rego new file mode 100755 index 000000000..4e5f97f8e --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_efs_file_system/efsEncryptedFalse.rego @@ -0,0 +1,8 @@ +package accurics + +{{.prefix}}{{.name}}[retVal]{ + efs_file = input.aws_efs_file_system[_] + efs_file.config.encrypted == false + traverse = "encrypted" + retVal := { "Id": efs_file.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "encrypted", "AttributeDataType": "bool", "Expected": true, "Actual": efs_file.config.encrypted } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_efs_file_system/efsEncryptedWithNoKms.rego b/pkg/policies/opa/rego/aws/aws_efs_file_system/efsEncryptedWithNoKms.rego new file mode 100755 index 000000000..ef0b3f034 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_efs_file_system/efsEncryptedWithNoKms.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}{{.name}}[retVal]{ + efs_file = input.aws_efs_file_system[_] + efs_file.config.encrypted == true + not efs_file.config.kms_key_id + traverse = "kms_key_id" + retVal := { "Id": efs_file.id, "ReplaceType": "add", "CodeType": "attribute", "Traverse": traverse, "Attribute": "kms_key_id", "AttributeDataType": "string", "Expected": "", "Actual": null } +} diff --git a/pkg/policies/opa/rego/aws/aws_efs_file_system/efsNotEncrypted.rego b/pkg/policies/opa/rego/aws/aws_efs_file_system/efsNotEncrypted.rego new file mode 100755 index 000000000..5ad2195ff --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_efs_file_system/efsNotEncrypted.rego @@ -0,0 +1,8 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + efs_file = input.aws_efs_file_system[_] + not efs_file.config.encrypted + traverse = "encrypted" + retVal := { "Id": efs_file.id, "ReplaceType": "add", "CodeType": "attribute", "Traverse": traverse, "Attribute": "encrypted", "AttributeDataType": "bool", "Expected": true, "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_elasticache_cluster/AWS.ElastiCache.DataSecurity.High.0424.json b/pkg/policies/opa/rego/aws/aws_elasticache_cluster/AWS.ElastiCache.DataSecurity.High.0424.json new file mode 100755 index 000000000..b629a4e66 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_elasticache_cluster/AWS.ElastiCache.DataSecurity.High.0424.json @@ -0,0 +1,14 @@ +{ + "name": "noMemcachedInElastiCache", + "file": "noMemcachedInElastiCache.rego", + "templateArgs": { + "elasticache_engine": "memcached", + "name": "noMemcachedInElastiCache", + "prefix": "" + }, + "severity": "HIGH", + "description": "ElastiCache for Memcached is not in use in AWS PCI DSS environments", + "referenceId": "AWS.ElastiCache.DataSecurity.High.0424", + "category": "Data Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_elasticache_cluster/AWS.ElastiCache.DataSecurity.High.0425.json b/pkg/policies/opa/rego/aws/aws_elasticache_cluster/AWS.ElastiCache.DataSecurity.High.0425.json new file mode 100755 index 000000000..eddc3d33a --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_elasticache_cluster/AWS.ElastiCache.DataSecurity.High.0425.json @@ -0,0 +1,19 @@ +{ + "name": "redisVersionCompliance", + "file": "redisVersionCompliance.rego", + "templateArgs": { + "allowed_versions": [ + "3.2.6", + "3.2.10", + "4.0.10", + "5.0.0" + ], + "engine": "redis", + "prefix": "" + }, + "severity": "HIGH", + "description": "ElastiCache for Redis version is not compliant with AWS PCI DSS requirements", + "referenceId": "AWS.ElastiCache.DataSecurity.High.0425", + "category": "Data Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_elasticache_cluster/AWS.ElastiCache.HighAvailability.Medium.0757.json b/pkg/policies/opa/rego/aws/aws_elasticache_cluster/AWS.ElastiCache.HighAvailability.Medium.0757.json new file mode 100755 index 000000000..4d012a410 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_elasticache_cluster/AWS.ElastiCache.HighAvailability.Medium.0757.json @@ -0,0 +1,10 @@ +{ + "name": "elastiCacheMultiAZ", + "file": "elastiCacheMultiAZ.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "AWS ElastiCache Multi-AZ", + "referenceId": "AWS.ElastiCache.HighAvailability.Medium.0757", + "category": "High Availability", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_elasticache_cluster/elastiCacheMultiAZ.rego b/pkg/policies/opa/rego/aws/aws_elasticache_cluster/elastiCacheMultiAZ.rego new file mode 100755 index 000000000..f39dfddac --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_elasticache_cluster/elastiCacheMultiAZ.rego @@ -0,0 +1,7 @@ +package accurics + +elastiCacheMultiAZ[api.id] +{ + api := input.aws_elasticache_cluster[_] + not api.az_mode == "cross-az" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_elasticache_cluster/noMemcachedInElastiCache.rego b/pkg/policies/opa/rego/aws/aws_elasticache_cluster/noMemcachedInElastiCache.rego new file mode 100755 index 000000000..1b31861ef --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_elasticache_cluster/noMemcachedInElastiCache.rego @@ -0,0 +1,8 @@ +package accurics + +{{.prefix}}noMemcachedInElastiCache[retVal] { + elasticache = input.aws_elasticache_cluster[_] + elasticache.config.engine != "redis" + traverse = "engine" + retVal := { "Id": elasticache.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "engine", "AttributeDataType": "string", "Expected": "redis", "Actual": elasticache.config.engine } +} diff --git a/pkg/policies/opa/rego/aws/aws_elasticache_cluster/redisVersionCompliance.rego b/pkg/policies/opa/rego/aws/aws_elasticache_cluster/redisVersionCompliance.rego new file mode 100755 index 000000000..ad14ce4c8 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_elasticache_cluster/redisVersionCompliance.rego @@ -0,0 +1,18 @@ +package accurics + +{{.prefix}}redisVersionCompliance[retVal] { + elasticache = input.aws_elasticache_cluster[_] + elasticache.config.engine == "redis" + engine_version = elasticache.config.engine_version + min_version_string = "4.0.10" + min_version = eval_version_number(min_version_string) + actual_version = eval_version_number(engine_version) + actual_version < min_version + traverse = "engine_version" + retVal := { "Id": elasticache.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "engine_version", "AttributeDataType": "string", "Expected": min_version_string, "Actual": engine_version } +} + +eval_version_number(engine_version) = numeric_version { + version = split(engine_version, ".") + numeric_version = to_number(version[0]) * 100 + to_number(version[1]) * 10 + to_number(version[2]) +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_elasticsearch_domain/AWS.ElasticSearch.Encryption&KeyManagement.Medium.0768.json b/pkg/policies/opa/rego/aws/aws_elasticsearch_domain/AWS.ElasticSearch.Encryption&KeyManagement.Medium.0768.json new file mode 100755 index 000000000..a08e462e8 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_elasticsearch_domain/AWS.ElasticSearch.Encryption&KeyManagement.Medium.0768.json @@ -0,0 +1,10 @@ +{ + "name": "elastiSearchNoKms", + "file": "elastiSearchNoKms.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "ElasticSearch Domain Encrypted with KMS CMKs", + "referenceId": "AWS.ElasticSearch.Encryption\u0026KeyManagement.Medium.0768", + "category": "Encryption \u0026 Key Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_elasticsearch_domain/AWS.ElasticSearch.Encryption&KeyManagement.Medium.0778.json b/pkg/policies/opa/rego/aws/aws_elasticsearch_domain/AWS.ElasticSearch.Encryption&KeyManagement.Medium.0778.json new file mode 100755 index 000000000..49a76f02c --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_elasticsearch_domain/AWS.ElasticSearch.Encryption&KeyManagement.Medium.0778.json @@ -0,0 +1,10 @@ +{ + "name": "elastiSearchEncryptAtRest", + "file": "elastiSearchEncryptAtRest.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Enable AWS ElasticSearch Encryption At Rest", + "referenceId": "AWS.ElasticSearch.Encryption\u0026KeyManagement.Medium.0778", + "category": "Encryption \u0026 Key Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_elasticsearch_domain/AWS.Elasticsearch.Logging.Medium.0573.json b/pkg/policies/opa/rego/aws/aws_elasticsearch_domain/AWS.Elasticsearch.Logging.Medium.0573.json new file mode 100755 index 000000000..d1f38ea45 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_elasticsearch_domain/AWS.Elasticsearch.Logging.Medium.0573.json @@ -0,0 +1,12 @@ +{ + "name": "esloggingdisabled", + "file": "esloggingdisabled.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Ensure that your AWS Elasticsearch clusters have enabled the support for publishing slow logs to AWS CloudWatch Logs. This feature enables you to publish slow logs from the indexing and search operations performed on your ES clusters and gain full insight into the performance of these operations.", + "referenceId": "AWS.Elasticsearch.Logging.Medium.0573", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_elasticsearch_domain/elastiSearchEncryptAtRest.rego b/pkg/policies/opa/rego/aws/aws_elasticsearch_domain/elastiSearchEncryptAtRest.rego new file mode 100755 index 000000000..e166d0945 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_elasticsearch_domain/elastiSearchEncryptAtRest.rego @@ -0,0 +1,22 @@ +package accurics + +elastiSearchEncryptAtRest[api.id] +{ + api := input.aws_elasticsearch_domain[_] + not api.config.encrypt_at_rest +} + +elastiSearchEncryptAtRest[api.id] +{ + api := input.aws_elasticsearch_domain[_] + data := api.config.encrypt_at_rest[_] + not data.enabled +} + +elastiSearchEncryptAtRest[api.id] +{ + api := input.aws_elasticsearch_domain[_] + data := api.config.encrypt_at_rest[_] + data.enabled == false +} + diff --git a/pkg/policies/opa/rego/aws/aws_elasticsearch_domain/elastiSearchNoKms.rego b/pkg/policies/opa/rego/aws/aws_elasticsearch_domain/elastiSearchNoKms.rego new file mode 100755 index 000000000..cb948a9e3 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_elasticsearch_domain/elastiSearchNoKms.rego @@ -0,0 +1,15 @@ +package accurics + +elastiSearchNoKms[api.id] +{ + api := input.aws_elasticsearch_domain[_] + data := api.config.encrypt_at_rest[_] + not data.kms_key_id +} + +elastiSearchNoKms[api.id] +{ + api := input.aws_elasticsearch_domain[_] + data := api.config.encrypt_at_rest[_] + not data.kms_key_id == null +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_elasticsearch_domain/esloggingdisabled.rego b/pkg/policies/opa/rego/aws/aws_elasticsearch_domain/esloggingdisabled.rego new file mode 100755 index 000000000..418e4581a --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_elasticsearch_domain/esloggingdisabled.rego @@ -0,0 +1,36 @@ +package accurics + +{{.prefix}}esloggingdisabled[retVal] { + esin := input.aws_elasticsearch_domain[_] + esin.config.log_publishing_options == [] + esin.type == "aws_elasticsearch_domain" + rc = "ewogICJsb2dfcHVibGlzaGluZ19vcHRpb25zIjogewogICAgImNsb3Vkd2F0Y2hfbG9nX2dyb3VwX2FybiI6ICI8Y2xvdWR3YXRjaF9sb2dfZ3JvdXBfYXJuPiIsCiAgICAibG9nX3R5cGUiOiAiPGxvZ190eXBlPiIsCiAgICAiZW5hYmxlZCI6IHRydWUKICB9Cn0=" + traverse = "" + retVal := { "Id": esin.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "", "AttributeDataType": "block", "Expected": rc, "Actual": null } +} + +{{.prefix}}esloggingdisabled[retVal] { + esin := input.aws_elasticsearch_domain[_] + esin.type == "aws_elasticsearch_domain" + not esin.config.log_publishing_options + rc = "ewogICJsb2dfcHVibGlzaGluZ19vcHRpb25zIjogewogICAgImNsb3Vkd2F0Y2hfbG9nX2dyb3VwX2FybiI6ICI8Y2xvdWR3YXRjaF9sb2dfZ3JvdXBfYXJuPiIsCiAgICAibG9nX3R5cGUiOiAiPGxvZ190eXBlPiIsCiAgICAiZW5hYmxlZCI6IHRydWUKICB9Cn0=" + traverse = "" + retVal := { "Id": esin.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "", "AttributeDataType": "block", "Expected": rc, "Actual": null } +} + +{{.prefix}}esloggingdisabled[retVal] { + esin := input.aws_elasticsearch_domain[_] + esin.type == "aws_elasticsearch_domain" + some i + esin.config.log_publishing_options[i].log_type != "INDEX_SLOW_LOGS" + traverse := sprintf("log_publishing_options[%d].log_type", [i]) + retVal := { "Id": esin.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "log_publishing_options.log_type", "AttributeDataType": "string", "Expected": "INDEX_SLOW_LOGS", "Actual": esin.config.log_publishing_options[_].log_type } +} + +{{.prefix}}esloggingdisabled[retVal] { + esin := input.aws_elasticsearch_domain[_] + some i + esin.config.log_publishing_options[i].enabled == false + traverse := sprintf("log_publishing_options[%d].enabled", [i]) + retVal := { "Id": esin.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "log_publishing_options.enabled", "AttributeDataType": "bool", "Expected": true, "Actual": esin.config.log_publishing_options[_].enabled } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_elb/AWS.ELB.NetworkPortsSecurity.Low.0563.json b/pkg/policies/opa/rego/aws/aws_elb/AWS.ELB.NetworkPortsSecurity.Low.0563.json new file mode 100755 index 000000000..3eac45d3e --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_elb/AWS.ELB.NetworkPortsSecurity.Low.0563.json @@ -0,0 +1,12 @@ +{ + "name": "elbLbProtocolNotSecured", + "file": "elbLbProtocolNotSecured.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "LOW", + "description": "AWS ELB incoming traffic not encrypted", + "referenceId": "AWS.ELB.NetworkPortsSecurity.Low.0563", + "category": "Network Ports Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_elb/elbInstanceProtocolNotSecured.rego b/pkg/policies/opa/rego/aws/aws_elb/elbInstanceProtocolNotSecured.rego new file mode 100755 index 000000000..a53ee33e1 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_elb/elbInstanceProtocolNotSecured.rego @@ -0,0 +1,10 @@ +package accurics + +{{.prefix}}elbInstanceProtocolNotSecured[retVal] { + elb = input.aws_elb[_] + some i + listener = elb.config.listener[i] + listener.instance_protocol != "https" + traverse := sprintf("listener[%d].instance_protocol", [i]) + retVal := { "Id": elb.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "listener.instance_protocol", "AttributeDataType": "string", "Expected": "https", "Actual": listener.instance_protocol } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_elb/elbLbProtocolNotSecured.rego b/pkg/policies/opa/rego/aws/aws_elb/elbLbProtocolNotSecured.rego new file mode 100755 index 000000000..a53376158 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_elb/elbLbProtocolNotSecured.rego @@ -0,0 +1,10 @@ +package accurics + +{{.prefix}}elbLbProtocolNotSecured[retVal] { + elb = input.aws_elb[_] + some i + listener = elb.config.listener[i] + listener.lb_protocol != "https" + traverse := sprintf("listener[%d].lb_protocol", [i]) + retVal := { "Id": elb.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "listener.lb_protocol", "AttributeDataType": "string", "Expected": "https", "Actual": listener.lb_protocol } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_guardduty_detector/AWS.GuardDuty Enabled.Security.Medium.0575.json b/pkg/policies/opa/rego/aws/aws_guardduty_detector/AWS.GuardDuty Enabled.Security.Medium.0575.json new file mode 100755 index 000000000..b9ee8fe50 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_guardduty_detector/AWS.GuardDuty Enabled.Security.Medium.0575.json @@ -0,0 +1,12 @@ +{ + "name": "gaurdDutyDisabled", + "file": "gaurdDutyDisabled.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Ensure that Amazon GuardDuty service is currently enabled in all regions in order to protect your AWS environment and infrastructure (AWS accounts and resources, IAM credentials, guest operating systems, applications, etc) against security threats. AWS GuardDuty is a managed threat detection service that continuously monitors your VPC flow logs, AWS CloudTrail event logs and DNS logs for malicious or unauthorized behavior. The service monitors for activity such as unusual API calls, potentially compromised EC2 instances or potentially unauthorized deployments that indicate a possible AWS account compromise. AWS GuardDuty operates entirely on Amazon Web Services infrastructure and does not affect the performance or reliability of your applications. The service does not require any software agents, sensors or network appliances.", + "referenceId": "AWS.GuardDuty Enabled.Security.Medium.0575", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_guardduty_detector/gaurdDutyDisabled.rego b/pkg/policies/opa/rego/aws/aws_guardduty_detector/gaurdDutyDisabled.rego new file mode 100755 index 000000000..d92b54292 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_guardduty_detector/gaurdDutyDisabled.rego @@ -0,0 +1,8 @@ +package accurics + +{{.prefix}}gaurdDutyDisabled[retVal] { + duty := input.aws_guardduty_detector[_] + duty.config.enable == false + traverse = "enable" + retVal := { "Id": duty.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "enable", "AttributeDataType": "bool", "Expected": true, "Actual": duty.config.enable } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0391.json b/pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0391.json new file mode 100755 index 000000000..e1bc3ed5a --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_access_key/AWS.IamUser.IAM.High.0391.json @@ -0,0 +1,12 @@ +{ + "name": "programmaticAccessCreation", + "file": "programmaticAccessCreation.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Ensure that there are no exposed Amazon IAM access keys in order to protect your AWS resources against unapproved access", + "referenceId": "AWS.IamUser.IAM.High.0391", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_access_key/programmaticAccessCreation.rego b/pkg/policies/opa/rego/aws/aws_iam_access_key/programmaticAccessCreation.rego new file mode 100755 index 000000000..b31e5f25a --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_access_key/programmaticAccessCreation.rego @@ -0,0 +1,21 @@ +package accurics + +{{.prefix}}programmaticAccessCreation[retVal] { + access := input.aws_iam_access_key[_] + access.type == "aws_iam_access_key" + status = getStatus(access.config) + status == "Active" + access.config.user != "root" + traverse = "status" + retVal := { "Id": access.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "status", "AttributeDataType": "string", "Expected": "Inactive", "Actual": status } + +} + +getStatus(config) = "Active" { + # defaults to Active + not config.status +} + +getStatus(config) = "Active" { + config.status == "Active" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0539.json b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0539.json new file mode 100755 index 000000000..96fee0698 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/AWS.Iam.IAM.Low.0539.json @@ -0,0 +1,15 @@ +{ + "name": "passwordResuseNotAllowed", + "file": "passwordResuseNotAllowed.rego", + "templateArgs": { + "name": "passwordResuseNotAllowed", + "parameter": "password_reuse_prevention", + "prefix": "", + "value": 0 + }, + "severity": "LOW", + "description": "It is recommended that the password policy prevent the reuse of passwords.Preventing password reuse increases account resiliency against brute force login attempts", + "referenceId": "AWS.Iam.IAM.Low.0539", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordResuseNotAllowed.rego b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordResuseNotAllowed.rego new file mode 100755 index 000000000..d1a749b13 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_account_password_policy/passwordResuseNotAllowed.rego @@ -0,0 +1,13 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + password_policy := input.aws_iam_account_password_policy[_] + check_validity(password_policy.config, {{.value}}) == true + password_policy_id := password_policy.id + traverse = "{{.parameter}}" + retVal := { "Id": password_policy.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "{{.parameter}}", "AttributeDataType": "int", "Expected": {{.value}}, "Actual": password_policy.config.{{.parameter}} } +} + +check_validity(p, v) = true { + p.{{.parameter}} > v +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_role/AWS.IamPolicy.IAM.High.0392.json b/pkg/policies/opa/rego/aws/aws_iam_role/AWS.IamPolicy.IAM.High.0392.json new file mode 100755 index 000000000..1c8849129 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_role/AWS.IamPolicy.IAM.High.0392.json @@ -0,0 +1,12 @@ +{ + "name": "iamRoleWithFullAdminCntrl", + "file": "iamRoleWithFullAdminCntrl.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "It is recommended and considered a standard security advice to grant least privileges that is, granting only the permissions required to perform a task. IAM policies are the means by which privileges are granted to users, groups, or roles. Determine what users need to do and then craft policies for them that let the users perform only those tasks, instead of granting full administrative privileges.", + "referenceId": "AWS.IamPolicy.IAM.High.0392", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_role/iamRoleWithFullAdminCntrl.rego b/pkg/policies/opa/rego/aws/aws_iam_role/iamRoleWithFullAdminCntrl.rego new file mode 100755 index 000000000..9c5f503ff --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_role/iamRoleWithFullAdminCntrl.rego @@ -0,0 +1,51 @@ +package accurics + +{{.prefix}}iamRoleWithFullAdminCntrl[retVal] { + iamUserMfa = input.aws_iam_role[_] + policy := json_unmarshal(iamUserMfa.config.assume_role_policy) + statement = policy.Statement[_] + policyCheck(statement, "*", "Allow", "*") == true + + statements := [ statement | statement := replace_if_needed(policy.Statement[_]) ] + expected := object.union(policy, {"Statement": statements}) + + traverse = "policy" + retVal := { "Id": iamUserMfa.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "policy", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +replace_if_needed(statement) = value { + policyCheck(statement, "*", "Allow", "*") == true + actions := [ action | action := replace_action_if_needed( statement.Action[_] ) ] + value := object.union(statement, { "Action": actions }) +} + +replace_if_needed(statement) = value { + not policyCheck(statement, "*", "Allow", "*") + value := statement +} + +replace_action_if_needed(action) = value { + action == "*" + value := "##resource:action##" +} + +replace_action_if_needed(action) = value { + action != "*" + value := action +} + +policyCheck(s, a, e ,r) = true { + s.Action[_] = a + s.Effect == e + s.Resource == r +} diff --git a/pkg/policies/opa/rego/aws/aws_iam_role_policy/AWS.IamPolicy.IAM.High.0392.json b/pkg/policies/opa/rego/aws/aws_iam_role_policy/AWS.IamPolicy.IAM.High.0392.json new file mode 100755 index 000000000..5d4810eb8 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_role_policy/AWS.IamPolicy.IAM.High.0392.json @@ -0,0 +1,12 @@ +{ + "name": "iamRolePolicyWithFullAdminCntrl", + "file": "iamRolePolicyWithFullAdminCntrl.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "It is recommended and considered a standard security advice to grant least privileges that is, granting only the permissions required to perform a task. IAM policies are the means by which privileges are granted to users, groups, or roles. Determine what users need to do and then craft policies for them that let the users perform only those tasks, instead of granting full administrative privileges.", + "referenceId": "AWS.IamPolicy.IAM.High.0392", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_role_policy/iamRolePolicyWithFullAdminCntrl.rego b/pkg/policies/opa/rego/aws/aws_iam_role_policy/iamRolePolicyWithFullAdminCntrl.rego new file mode 100755 index 000000000..1be604314 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_role_policy/iamRolePolicyWithFullAdminCntrl.rego @@ -0,0 +1,50 @@ +package accurics + +{{.prefix}}iamRolePolicyWithFullAdminCntrl[retVal] { + iamUserMfa = input.aws_iam_role_policy[_] + policy := json_unmarshal(iamUserMfa.config.policy) + statement = policy.Statement[_] + policyCheck(statement, "*", "Allow", "*") == true + + statements := [ statement | statement := replace_if_needed(policy.Statement[_]) ] + expected := object.union(policy, {"Statement": statements}) + traverse = "policy" + retVal := { "Id": iamUserMfa.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "policy", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +replace_if_needed(statement) = value { + policyCheck(statement, "*", "Allow", "*") == true + actions := [ action | action := replace_action_if_needed( statement.Action[_] ) ] + value := object.union(statement, { "Action": actions }) +} + +replace_if_needed(statement) = value { + not policyCheck(statement, "*", "Allow", "*") + value := statement +} + +replace_action_if_needed(action) = value { + action == "*" + value := "##resource:action##" +} + +replace_action_if_needed(action) = value { + action != "*" + value := action +} + +policyCheck(s, a, e ,r) = true { + s.Action[_] = a + s.Effect == e + s.Resource == r +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_user_login_profile/AWS.Iam.IAM.High.0391.json b/pkg/policies/opa/rego/aws/aws_iam_user_login_profile/AWS.Iam.IAM.High.0391.json new file mode 100755 index 000000000..ee7ae0938 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_user_login_profile/AWS.Iam.IAM.High.0391.json @@ -0,0 +1,12 @@ +{ + "name": "noPasswordPolicyEnabled", + "file": "noPasswordPolicyEnabled.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Password policies are, in part, used to enforce password complexity requirements. IAM password policies can be used to ensure password are comprised of different character sets, have minimal length, rotation and history restrictions", + "referenceId": "AWS.Iam.IAM.High.0391", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_user_login_profile/noPasswordPolicyEnabled.rego b/pkg/policies/opa/rego/aws/aws_iam_user_login_profile/noPasswordPolicyEnabled.rego new file mode 100755 index 000000000..3fe7fb85f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_user_login_profile/noPasswordPolicyEnabled.rego @@ -0,0 +1,43 @@ +package accurics + +{{.prefix}}noPasswordPolicyEnabled[result.retVal] { + policy := input.aws_iam_user_login_profile[_] + result := checkPassword(policy.id, policy.config) + result != null +} + +checkPassword(id, c) = { "retVal": retVal } { + c.password_length < 14 + c.password_reset_required == false + traverse = "" + rc := "ewogICJwYXNzd29yZF9sZW5ndGgiOiAxNiwKICAicGFzc3dvcmRfcmVzZXRfcmVxdWlyZWQiOiB0cnVlCn0=" + retVal := { "Id": id, "ReplaceType": "edit", "CodeType": "block", "Traverse": traverse, "Attribute": "", "AttributeDataType": "block", "Expected": rc, "Actual": null } +} + +checkPassword(id, c) = { "retVal": retVal } { + c.password_length >= 14 + c.password_reset_required == false + traverse = "password_reset_required" + retVal := { "Id": id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "password_reset_required", "AttributeDataType": "boolean", "Expected": true, "Actual": c.password_reset_required } +} + +checkPassword(id, c) = { "retVal": retVal } { + not c.password_length + c.password_reset_required == false + traverse = "password_reset_required" + retVal := { "Id": id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "password_reset_required", "AttributeDataType": "boolean", "Expected": true, "Actual": c.password_reset_required } +} + +checkPassword(id, c) = { "retVal": retVal } { + c.password_length < 14 + c.password_reset_required == true + traverse = "password_length" + retVal := { "Id": id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "password_length", "AttributeDataType": "int", "Expected": 14, "Actual": c.password_length } +} + +checkPassword(id, c) = { "retVal": retVal } { + c.password_length < 14 + not c.password_reset_required + traverse = "password_length" + retVal := { "Id": id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "password_length", "AttributeDataType": "int", "Expected": 14, "Actual": c.password_length } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamPolicy.IAM.High.0392.json b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamPolicy.IAM.High.0392.json new file mode 100755 index 000000000..8a2472c69 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamPolicy.IAM.High.0392.json @@ -0,0 +1,12 @@ +{ + "name": "iamPolicyWithFullAdminControl", + "file": "iamPolicyWithFullAdminControl.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "It is recommended and considered a standard security advice to grant least privileges that is, granting only the permissions required to perform a task. IAM policies are the means by which privileges are granted to users, groups, or roles. Determine what users need to do and then craft policies for them that let the users perform only those tasks, instead of granting full administrative privileges.", + "referenceId": "AWS.IamPolicy.IAM.High.0392", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0389.json b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0389.json new file mode 100755 index 000000000..da8ab9656 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_user_policy/AWS.IamUser.IAM.High.0389.json @@ -0,0 +1,12 @@ +{ + "name": "userWithPassNotContainMfaActive", + "file": "passAndMFA.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "It is recommended that MFA be enabled for all accounts that have a console password. Enabling MFA provides increased security for console access as it requires the authenticating principal to possess a device that emits a time-sensitive key and have knowledge of a credential", + "referenceId": "AWS.IamUser.IAM.High.0389", + "category": "Identity and Access Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_iam_user_policy/iamPolicyWithFullAdminControl.rego b/pkg/policies/opa/rego/aws/aws_iam_user_policy/iamPolicyWithFullAdminControl.rego new file mode 100755 index 000000000..6e8ea098f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_user_policy/iamPolicyWithFullAdminControl.rego @@ -0,0 +1,51 @@ +package accurics + +{{.prefix}}iamPolicyWithFullAdminControl[retVal] { + iamUserMfa = input.aws_iam_user_policy[_] + policy := json_unmarshal(iamUserMfa.config.policy) + statement = policy.Statement[_] + policyCheck(statement, "*", "Allow", "*") == true + + statements := [ statement | statement := replace_if_needed(policy.Statement[_]) ] + expected := object.union(policy, {"Statement": statements}) + + traverse = "policy" + retVal := { "Id": iamUserMfa.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "policy", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +replace_if_needed(statement) = value { + policyCheck(statement, "*", "Allow", "*") == true + actions := [ action | action := replace_action_if_needed( statement.Action[_] ) ] + value := object.union(statement, { "Action": actions }) +} + +replace_if_needed(statement) = value { + not policyCheck(statement, "*", "Allow", "*") + value := statement +} + +replace_action_if_needed(action) = value { + action == "*" + value := "##resource:action##" +} + +replace_action_if_needed(action) = value { + action != "*" + value := action +} + +policyCheck(s, a, e ,r) = true { + s.Action[_] = a + s.Effect == e + s.Resource == r +} diff --git a/pkg/policies/opa/rego/aws/aws_iam_user_policy/passAndMFA.rego b/pkg/policies/opa/rego/aws/aws_iam_user_policy/passAndMFA.rego new file mode 100755 index 000000000..20ca7e6b7 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_iam_user_policy/passAndMFA.rego @@ -0,0 +1,19 @@ +package accurics + +{{.prefix}}userWithPassNotContainMfaActive[iamUserMfa_id] { + iamUserMfa = input.aws_iam_user_policy[_] + policy := json_unmarshal(iamUserMfa.config.policy) + statement := policy.Statement[_] + statement.Condition.Bool[_] = false + iamUserMfa_id = iamUserMfa.id +} + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} diff --git a/pkg/policies/opa/rego/aws/aws_kinesis_firehose_delivery_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0411.json b/pkg/policies/opa/rego/aws/aws_kinesis_firehose_delivery_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0411.json new file mode 100755 index 000000000..b614a7bef --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_kinesis_firehose_delivery_stream/AWS.Kinesis.EncryptionandKeyManagement.High.0411.json @@ -0,0 +1,12 @@ +{ + "name": "kinesisSseNotConfigured", + "file": "kinesis_sse_not_configured.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "AWS Kinesis Server data at rest has server side encryption (SSE)", + "referenceId": "AWS.Kinesis.EncryptionandKeyManagement.High.0411", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_kinesis_firehose_delivery_stream/kinesis_sse_disabled.rego b/pkg/policies/opa/rego/aws/aws_kinesis_firehose_delivery_stream/kinesis_sse_disabled.rego new file mode 100755 index 000000000..fee403e83 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_kinesis_firehose_delivery_stream/kinesis_sse_disabled.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}kinesisSseDisabled[retVal] { + stream = input.aws_kinesis_firehose_delivery_stream[_] + some i + stream.config.server_side_encryption[i].enabled == false + traverse := sprintf("server_side_encryption[%d].enabled", [i]) + retVal := { "Id": stream.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "server_side_encryption.enabled", "AttributeDataType": "bool", "Expected": true, "Actual": stream.config.server_side_encryption[i].enabled } +} diff --git a/pkg/policies/opa/rego/aws/aws_kinesis_firehose_delivery_stream/kinesis_sse_not_configured.rego b/pkg/policies/opa/rego/aws/aws_kinesis_firehose_delivery_stream/kinesis_sse_not_configured.rego new file mode 100755 index 000000000..bc3c63e92 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_kinesis_firehose_delivery_stream/kinesis_sse_not_configured.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}kinesisSseNotConfigured[retVal] { + stream := input.aws_kinesis_firehose_delivery_stream[_] + count(stream.config.server_side_encryption) == 0 + rc = "ewogICJzZXJ2ZXJfc2lkZV9lbmNyeXB0aW9uIjogewogICAgImVuYWJsZWQiOiB0cnVlCiAgfQp9" + traverse = "" + retVal := { "Id": stream.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "server_side_encryption", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} diff --git a/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json b/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json index 8a7440114..adc28d47f 100755 --- a/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json +++ b/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.Logging.High.0400.json @@ -1,6 +1,6 @@ { - "name": "kmsKeyRotationDisabled", - "file": "kmsKeyRotationDisabled.rego", + "name": "kmsKeyNoDeletionWindow", + "file": "kmsKeyNoDeletionWindow.rego", "templateArgs": { "prefix": "" }, diff --git a/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.NetworkSecurity.High.0566.json b/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.NetworkSecurity.High.0566.json new file mode 100755 index 000000000..48513be47 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_kms_key/AWS.KMS.NetworkSecurity.High.0566.json @@ -0,0 +1,12 @@ +{ + "name": "kmsKeyExposedPolicy", + "file": "kmsKeyExposedPolicy.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Identify any publicly accessible AWS Key Management Service master keys and update their access policy in order to stop any unsigned requests made to these resources.", + "referenceId": "AWS.KMS.NetworkSecurity.High.0566", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyDisabled.rego b/pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyDisabled.rego new file mode 100755 index 000000000..205eea98d --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyDisabled.rego @@ -0,0 +1,8 @@ +package accurics + +{{.prefix}}kmsKeyDisabled[retVal] { + kms_key = input.aws_kms_key[_] + kms_key.config.is_enabled == false + traverse = "is_enabled" + retVal := { "Id": kms_key.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "is_enabled", "AttributeDataType": "bool", "Expected": true, "Actual": kms_key.config.is_enabled } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyExposedPolicy.rego b/pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyExposedPolicy.rego new file mode 100755 index 000000000..9ac864cc0 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyExposedPolicy.rego @@ -0,0 +1,50 @@ +package accurics + +{{.prefix}}kmsKeyExposedPolicy[retVal] { + kms = input.aws_kms_key[_] + policy := json_unmarshal(kms.config.policy) + statement = policy.Statement[_] + check_role(statement, "kms:*", "*", "*") == true + + statements := [ statement | statement := replace_if_needed(policy.Statement[_]) ] + expected := object.union(policy, {"Statement": statements}) + traverse = "policy" + retVal := { "Id": kms.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "policy", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +replace_if_needed(statement) = value { + check_role(statement, "kms:*", "*", "*") == true + actions := [ action | action := replace_action_if_needed( statement.Action[_] ) ] + value := object.union(statement, { "Principal": "##principal##", "Action": actions }) +} + +replace_if_needed(statement) = value { + not check_role(statement, "kms:*", "*", "*") + value := statement +} + +replace_action_if_needed(action) = value { + action == "kms:*" + value := "kms:##kms_action##" +} + +replace_action_if_needed(action) = value { + action != "kms:*" + value := action +} + +check_role(s, a, p, r) = true { + s.Action[_] = a + s.Principal.AWS == p + s.Resource == r +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyNoDeletionWindow.rego b/pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyNoDeletionWindow.rego new file mode 100755 index 000000000..0af0099a3 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_kms_key/kmsKeyNoDeletionWindow.rego @@ -0,0 +1,19 @@ +package accurics + +{{.prefix}}kmsKeyNoDeletionWindow[retVal] { + kms_key = input.aws_kms_key[_] + kms_key.config.is_enabled == true + kms_key.config.enable_key_rotation == true + invalid_window_in_days(kms_key.config.deletion_window_in_days) == true + traverse = "deletion_window_in_days" + retVal := { "Id": kms_key.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "deletion_window_in_days", "AttributeDataType": "int", "Expected": 90, "Actual": kms_key.config.deletion_window_in_days } +} + +invalid_window_in_days(days) = true { + days == null +} + +invalid_window_in_days(days) = true { + days != null + days > 90 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_lambda/AWS.ElasticSearch.IAM.Medium.0878.json b/pkg/policies/opa/rego/aws/aws_lambda/AWS.ElasticSearch.IAM.Medium.0878.json new file mode 100755 index 000000000..7fc5e90cb --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_lambda/AWS.ElasticSearch.IAM.Medium.0878.json @@ -0,0 +1,10 @@ +{ + "name": "awsLambdaRole", + "file": "awsLambdaRole.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Lambda Functions with Admin Privileges", + "referenceId": "AWS.ElasticSearch.IAM.Medium.0878", + "category": "Identity and Access Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_lambda/awsLambdaRole.rego b/pkg/policies/opa/rego/aws/aws_lambda/awsLambdaRole.rego new file mode 100755 index 000000000..ad73f356c --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_lambda/awsLambdaRole.rego @@ -0,0 +1,2 @@ +package accurics + diff --git a/pkg/policies/opa/rego/aws/aws_lambda_function/AWS.VPC.Logging.Medium.0470.json b/pkg/policies/opa/rego/aws/aws_lambda_function/AWS.VPC.Logging.Medium.0470.json new file mode 100755 index 000000000..fdd7fb5b5 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_lambda_function/AWS.VPC.Logging.Medium.0470.json @@ -0,0 +1,12 @@ +{ + "name": "lambdaXRayTracingDisabled", + "file": "lambdaXRayTracingDisabled.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "LOW", + "description": "Lambda tracing is not enabled.", + "referenceId": "AWS.VPC.Logging.Medium.0470", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_lambda_function/lambdaXRayTracingDisabled.rego b/pkg/policies/opa/rego/aws/aws_lambda_function/lambdaXRayTracingDisabled.rego new file mode 100755 index 000000000..7897422da --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_lambda_function/lambdaXRayTracingDisabled.rego @@ -0,0 +1,19 @@ +package accurics + +{{.prefix}}lambdaXRayTracingDisabled[retVal] { + lambda = input.aws_lambda_function[_] + lambda.type == "aws_lambda_function" + not lambda.config.tracing_config + rc = "ewogICJ0cmFjaW5nX2NvbmZpZyI6IHsKICAgICJtb2RlIjogIkFjdGl2ZSIKICB9Cn0=" + retVal := { "Id": lambda.id, "ReplaceType": "add", "CodeType": "block", "Traverse": "", "Attribute": "tracing_config", "AttributeDataType": "base64", "Expected": rc, "Actual": null} +} + +{{.prefix}}lambdaXRayTracingDisabled[retVal] { + lambda = input.aws_lambda_function[_] + lambda.type == "aws_lambda_function" + some i + tracing = lambda.config.tracing_config[i] + tracing.mode != "Active" + traverse = sprintf("tracing_config[%d].mode", [i]) + retVal := { "Id": lambda.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "tracing_config.mode", "AttributeDataType": "string", "Expected": "Active", "Actual": tracing.mode } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.EcsCluster.EncryptionandKeyManagement.High.0413.json b/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.EcsCluster.EncryptionandKeyManagement.High.0413.json new file mode 100755 index 000000000..f7db277f3 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.EcsCluster.EncryptionandKeyManagement.High.0413.json @@ -0,0 +1,12 @@ +{ + "name": "launchConfigurationEBSBlockUnEncrypted", + "file": "launchConfigurationEBSBlockUnEncrypted.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure that AWS ECS clusters are encrypted. Data encryption at rest, prevents unauthorized users from accessing sensitive data on your AWS ECS clusters and associated cache storage systems.", + "referenceId": "AWS.EcsCluster.EncryptionandKeyManagement.High.0413", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0101.json b/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0101.json new file mode 100755 index 000000000..bd85ed5f7 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_launch_configuration/AWS.LaunchConfiguration.DataSecurity.High.0101.json @@ -0,0 +1,12 @@ +{ + "name": "hardCodedShellScript", + "file": "hardCodedShellScript.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Avoid using base64 encoded shell script as part of config", + "referenceId": "AWS.LaunchConfiguration.DataSecurity.High.0101", + "category": "Data Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_launch_configuration/hardCodedShellScript.rego b/pkg/policies/opa/rego/aws/aws_launch_configuration/hardCodedShellScript.rego new file mode 100755 index 000000000..c40c555f3 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_launch_configuration/hardCodedShellScript.rego @@ -0,0 +1,23 @@ +package accurics + +{{.prefix}}hardCodedShellScript[res.id]{ + res = input.aws_instance[_] + value = base64NullCheck(res.config.user_data_base64) + startswith(value, "#!/") +} + +{{.prefix}}hardCodedShellScript[res.id]{ + res = input.aws_launch_configuration[_] + value = base64NullCheck(res.config.user_data_base64) + startswith(value, "#!/") +} + +base64NullCheck(s) = result { + s == null + result := base64.decode("e30=") +} + +base64NullCheck(s) = result { + s != null + result := base64.decode(s) +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_launch_configuration/hardCodedUrl.rego b/pkg/policies/opa/rego/aws/aws_launch_configuration/hardCodedUrl.rego new file mode 100755 index 000000000..8cf2a8df8 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_launch_configuration/hardCodedUrl.rego @@ -0,0 +1,23 @@ +package accurics + +{{.prefix}}hardCodedUrl[res.id]{ + res = input.aws_instance[_] + value = base64NullCheck(res.config.user_data_base64) + contains(value, "https://") +} + +{{.prefix}}hardCodedUrl[res.id]{ + res = input.aws_launch_configuration[_] + value = base64NullCheck(res.config.user_data_base64) + contains(value, "http://") +} + +base64NullCheck(s) = result { + s == null + result := base64.decode("e30=") +} + +base64NullCheck(s) = result { + s != null + result := base64.decode(s) +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_launch_configuration/launchConfigurationEBSBlockUnEncrypted.rego b/pkg/policies/opa/rego/aws/aws_launch_configuration/launchConfigurationEBSBlockUnEncrypted.rego new file mode 100755 index 000000000..3ac65ddfb --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_launch_configuration/launchConfigurationEBSBlockUnEncrypted.rego @@ -0,0 +1,24 @@ +package accurics + +{{.prefix}}launchConfigurationEBSBlockUnEncrypted[result.retVal] { + block := input.aws_launch_configuration[_] + result := checkEncryption(block.id, block.config) + result != null +} + +checkEncryption(id, c) = { "retVal": retVal } { + some i + ebsBlock := c.ebs_block_device[i] + ebsBlock.encrypted != null + ebsBlock.encrypted == false + traverse := sprintf("ebs_block_device[%d].encrypted", [i]) + retVal := { "Id": id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "ebs_block_device.encrypted", "AttributeDataType": "bool", "Expected": true, "Actual": ebsBlock.encrypted } +} + +checkEncryption(id, c) = { "retVal": retVal } { + some i + ebsBlock := c.ebs_block_device[i] + ebsBlock.encrypted == null + traverse := sprintf("ebs_block_device[%d].encrypted", [i]) + retVal := { "Id": id, "ReplaceType": "add", "CodeType": "attribute", "Traverse": traverse, "Attribute": "ebs_block_device.encrypted", "AttributeDataType": "bool", "Expected": true, "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_launch_configuration/launchConfigurationRootBlockUnEncrypted.rego b/pkg/policies/opa/rego/aws/aws_launch_configuration/launchConfigurationRootBlockUnEncrypted.rego new file mode 100755 index 000000000..5d0944a89 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_launch_configuration/launchConfigurationRootBlockUnEncrypted.rego @@ -0,0 +1,24 @@ +package accurics + +{{.prefix}}launchConfigurationRootBlockUnEncrypted[result.retVal] { + block := input.aws_launch_configuration[_] + result := checkEncryption(block.id, block.config) + result != null +} + +checkEncryption(id, c) = { "retVal": retVal } { + some i + rootBlock := c.root_block_device[i] + rootBlock.encrypted != null + rootBlock.encrypted == false + traverse := sprintf("root_block_device[%d].encrypted", [i]) + retVal := { "Id": id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "root_block_device.encrypted", "AttributeDataType": "bool", "Expected": true, "Actual": rootBlock.encrypted } +} + +checkEncryption(id, c) = { "retVal": retVal } { + some i + rootBlock := c.root_block_device[i] + rootBlock.encrypted == null + traverse := sprintf("root_block_device[%d].encrypted", [i]) + retVal := { "Id": id, "ReplaceType": "add", "CodeType": "attribute", "Traverse": traverse, "Attribute": "root_block_device.encrypted", "AttributeDataType": "bool", "Expected": true, "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_load_balancer_policy/AWS.ELB.EncryptionandKeyManagement.High.0401.json b/pkg/policies/opa/rego/aws/aws_load_balancer_policy/AWS.ELB.EncryptionandKeyManagement.High.0401.json new file mode 100755 index 000000000..06d89a9bd --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_load_balancer_policy/AWS.ELB.EncryptionandKeyManagement.High.0401.json @@ -0,0 +1,16 @@ +{ + "name": "elbSsLTsLProtocol", + "file": "elbSsLTsLProtocol.rego", + "templateArgs": { + "prefix": "", + "security_protocols": [ + "Protocol-SSLv3", + "Protocol-TLSv1" + ] + }, + "severity": "HIGH", + "description": "Using insecure ciphers for your ELB Predefined or Custom Security Policy, could make the SSL connection between the client and the load balancer vulnerable to exploits. TLS 1.0 was recommended to be disabled by PCI Council after June 30, 2016", + "referenceId": "AWS.ELB.EncryptionandKeyManagement.High.0401", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_load_balancer_policy/AWS.ELB.EncryptionandKeyManagement.High.0403.json b/pkg/policies/opa/rego/aws/aws_load_balancer_policy/AWS.ELB.EncryptionandKeyManagement.High.0403.json new file mode 100755 index 000000000..799fd1062 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_load_balancer_policy/AWS.ELB.EncryptionandKeyManagement.High.0403.json @@ -0,0 +1,83 @@ +{ + "name": "elbWeakCipher", + "file": "elbWeakCipher.rego", + "templateArgs": { + "prefix": "", + "weak_ciphers": [ + "DHE-DSS-AES128-SHA", + "CAMELLIA128-SHA", + "EDH-RSA-DES-CBC3-SHA", + "DES-CBC3-SHA", + "ECDHE-RSA-RC4-SHA", + "RC4-SHA", + "ECDHE-ECDSA-RC4-SHA", + "DHE-DSS-AES256-GCM-SHA384", + "DHE-RSA-AES256-GCM-SHA384", + "DHE-RSA-AES256-SHA256", + "DHE-DSS-AES256-SHA256", + "DHE-RSA-AES256-SHA", + "DHE-DSS-AES256-SHA", + "DHE-RSA-CAMELLIA256-SHA", + "DHE-DSS-CAMELLIA256-SHA", + "CAMELLIA256-SHA", + "EDH-DSS-DES-CBC3-SHA", + "DHE-DSS-AES128-GCM-SHA256", + "DHE-RSA-AES128-GCM-SHA256", + "DHE-RSA-AES128-SHA256", + "DHE-DSS-AES128-SHA256", + "DHE-RSA-CAMELLIA128-SHA", + "DHE-DSS-CAMELLIA128-SHA", + "ADH-AES128-GCM-SHA256", + "ADH-AES128-SHA", + "ADH-AES128-SHA256", + "ADH-AES256-GCM-SHA384", + "ADH-AES256-SHA", + "ADH-AES256-SHA256", + "ADH-CAMELLIA128-SHA", + "ADH-CAMELLIA256-SHA", + "ADH-DES-CBC3-SHA", + "ADH-DES-CBC-SHA", + "ADH-RC4-MD5", + "ADH-SEED-SHA", + "DES-CBC-SHA", + "DHE-DSS-SEED-SHA", + "DHE-RSA-SEED-SHA", + "EDH-DSS-DES-CBC-SHA", + "EDH-RSA-DES-CBC-SHA", + "IDEA-CBC-SHA", + "RC4-MD5", + "SEED-SHA", + "DES-CBC3-MD5", + "DES-CBC-MD5", + "RC2-CBC-MD5", + "PSK-AES256-CBC-SHA", + "PSK-3DES-EDE-CBC-SHA", + "KRB5-DES-CBC3-SHA", + "KRB5-DES-CBC3-MD5", + "PSK-AES128-CBC-SHA", + "PSK-RC4-SHA", + "KRB5-RC4-SHA", + "KRB5-RC4-MD5", + "KRB5-DES-CBC-SHA", + "KRB5-DES-CBC-MD5", + "EXP-EDH-RSA-DES-CBC-SHA", + "EXP-EDH-DSS-DES-CBC-SHA", + "EXP-ADH-DES-CBC-SHA", + "EXP-DES-CBC-SHA", + "EXP-RC2-CBC-MD5", + "EXP-KRB5-RC2-CBC-SHA", + "EXP-KRB5-DES-CBC-SHA", + "EXP-KRB5-RC2-CBC-MD5", + "EXP-KRB5-DES-CBC-MD5", + "EXP-ADH-RC4-MD5", + "EXP-RC4-MD5", + "EXP-KRB5-RC4-SHA", + "EXP-KRB5-RC4-MD5" + ] + }, + "severity": "HIGH", + "description": "Remove insecure ciphers for your ELB Predefined or Custom Security Policy, to reduce the risk of the SSL connection between the client and the load balancer being exploited.", + "referenceId": "AWS.ELB.EncryptionandKeyManagement.High.0403", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_load_balancer_policy/elbSsLTsLProtocol.rego b/pkg/policies/opa/rego/aws/aws_load_balancer_policy/elbSsLTsLProtocol.rego new file mode 100755 index 000000000..a0f9718ac --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_load_balancer_policy/elbSsLTsLProtocol.rego @@ -0,0 +1,16 @@ +package accurics + +{{.prefix}}elbSsLTsLProtocol[retVal] { + lb = input.aws_load_balancer_policy[_] + some i + policy := lb.config.policy_attribute[i] + name := policy.name + contains([{{range .security_protocols}}{{- printf "%q" . }},{{end}}], name) + traverse := sprintf("policy_attribute[%d].name", [i]) + retVal := { "Id": lb.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "policy_attribute.name", "AttributeDataType": "string", "Expected": "Protocol-TLSv1.2", "Actual": name } + +} + +contains(security_protocol, nam) { + security_protocol[_] = nam +} diff --git a/pkg/policies/opa/rego/aws/aws_load_balancer_policy/elbWeakCipher.rego b/pkg/policies/opa/rego/aws/aws_load_balancer_policy/elbWeakCipher.rego new file mode 100755 index 000000000..2f18855e7 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_load_balancer_policy/elbWeakCipher.rego @@ -0,0 +1,15 @@ +package accurics + +{{.prefix}}elbWeakCipher[retVal] { + lb = input.aws_load_balancer_policy[_] + some i + policy := lb.config.policy_attribute[i] + name := policy.name + contains([{{range .weak_ciphers}}{{- printf "%q" . }},{{end}}], name) + traverse := sprintf("policy_attribute[%d].name", [i]) + retVal := { "Id": lb.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "policy_attribute.name", "AttributeDataType": "string", "Expected": "AES256-SHA256", "Actual": name } +} + +contains(weak_cipher, nam) { + weak_cipher[_] = nam +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_macie_member_account_association/AWS.Macie.Security.Medium.0576.json b/pkg/policies/opa/rego/aws/aws_macie_member_account_association/AWS.Macie.Security.Medium.0576.json new file mode 100755 index 000000000..b7fe3091a --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_macie_member_account_association/AWS.Macie.Security.Medium.0576.json @@ -0,0 +1,12 @@ +{ + "name": "macieIsNotAssociated", + "file": "macieIsNotAssociated.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Ensure that Amazon Macie service is currently in use in order to classify and protect sensitive information such as credit cards, financial records or Personally Identifiable Information (PII), available in your AWS account. AWS Macie is a data security service that utilizes machine learning to automatically discover, classify and protect critical data within AWS cloud. Once enabled and configured, Macie will scan your S3 buckets to identify sensitive information, bring this data to your attention and analyze access patterns and user behavior to prevent any data leakage. Macie can also help you with governance, compliance and audit standards. For example, the service can enable you to comply with General Data Protection Regulation (GDPR)regulations around encryption and pseudonymization of data as it recognizes Personally Identifiable Information (PII).", + "referenceId": "AWS.Macie.Security.Medium.0576", + "category": "Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_macie_member_account_association/macieIsNotAssociated.rego b/pkg/policies/opa/rego/aws/aws_macie_member_account_association/macieIsNotAssociated.rego new file mode 100755 index 000000000..e4223f7b3 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_macie_member_account_association/macieIsNotAssociated.rego @@ -0,0 +1,16 @@ +package accurics + +{{.prefix}}macieIsNotAssociated[retVal] { + check_empty(input) + rc := "ZGF0YSAiYXdzX2NhbGxlcl9pZGVudGl0eSIgImN1cnJlbnQiIHt9CgpyZXNvdXJjZSAiYXdzX21hY2llX21lbWJlcl9hY2NvdW50X2Fzc29jaWF0aW9uIiAibWFjaWVfbWVtYmVyX2Fzc29jaWF0aW9uX25hbWUiIHsKICAgICJtZW1iZXJfYWNjb3VudF9pZCI6ICIke2RhdGEuYXdzX2NhbGxlcl9pZGVudGl0eS5jdXJyZW50LmFjY291bnRfaWR9Igp9" + traverse = "" + retVal := { "Id": "no_macie_member_account_association", "ReplaceType": "add", "CodeType": "resource", "Traverse": traverse, "Attribute": "", "AttributeDataType": "resource", "Expected": rc, "Actual": null } +} + +check_empty(macie_input) = true { + not macie_input.aws_macie_member_account_association +} + +check_empty(macie_input) = true { + count(macie_input.aws_macie_member_account_association) <= 0 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_mq/AWS.ElasticSearch.Logging.Medium.0885.json b/pkg/policies/opa/rego/aws/aws_mq/AWS.ElasticSearch.Logging.Medium.0885.json new file mode 100755 index 000000000..0664b7ccf --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_mq/AWS.ElasticSearch.Logging.Medium.0885.json @@ -0,0 +1,10 @@ +{ + "name": "awsMqLoggingEnabled", + "file": "awsMqLoggingEnabled.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Enable AWS MQ Log Exports", + "referenceId": "AWS.ElasticSearch.Logging.Medium.0885", + "category": "Logging", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_mq/AWS.ElasticSearch.NetworkSecurity.Medium.0887.json b/pkg/policies/opa/rego/aws/aws_mq/AWS.ElasticSearch.NetworkSecurity.Medium.0887.json new file mode 100755 index 000000000..9d4e0fc75 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_mq/AWS.ElasticSearch.NetworkSecurity.Medium.0887.json @@ -0,0 +1,10 @@ +{ + "name": "awsMqPubliclyAccessible", + "file": "awsMqPubliclyAccessible.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Publicly Accessible MQ Brokers", + "referenceId": "AWS.ElasticSearch.NetworkSecurity.Medium.0887", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_mq/awsMqLoggingEnabled.rego b/pkg/policies/opa/rego/aws/aws_mq/awsMqLoggingEnabled.rego new file mode 100755 index 000000000..fa785e8fd --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_mq/awsMqLoggingEnabled.rego @@ -0,0 +1,21 @@ +package accurics + +awsMqLoggingEnabled[api.id] +{ + api := input.aws_mq_broker[_] + api.config.logs +} + +awsMqLoggingEnabled[api.id] +{ + api := input.aws_mq_broker[_] + var := api.config.logs[_] + var.audit == false +} + +awsMqLoggingEnabled[api.id] +{ + api := input.aws_mq_broker[_] + var := api.config.logs[_] + var.general == false +} diff --git a/pkg/policies/opa/rego/aws/aws_mq/awsMqPubliclyAccessible.rego b/pkg/policies/opa/rego/aws/aws_mq/awsMqPubliclyAccessible.rego new file mode 100755 index 000000000..0ab107e6f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_mq/awsMqPubliclyAccessible.rego @@ -0,0 +1,7 @@ +package accurics + +awsMqPubliclyAccessible[api.id] +{ + api := input.aws_mq_broker[_] + api.config.publicly_accessible == true +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_organizations_policy/AWS.Organizations.IAM.MEDIUM.0590.json b/pkg/policies/opa/rego/aws/aws_organizations_policy/AWS.Organizations.IAM.MEDIUM.0590.json new file mode 100755 index 000000000..3330bdc28 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_organizations_policy/AWS.Organizations.IAM.MEDIUM.0590.json @@ -0,0 +1,12 @@ +{ + "name": "scpFullAccess", + "file": "scpFullAccess.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Ensure that All Features is enabled within your Amazon Organizations to achieve full control over the use of AWS services and actions across multiple AWS accounts using Service Control Policies (SCPs).", + "referenceId": "AWS.Organizations.IAM.MEDIUM.0590", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_organizations_policy/scpFullAccess.rego b/pkg/policies/opa/rego/aws/aws_organizations_policy/scpFullAccess.rego new file mode 100755 index 000000000..9b825adc0 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_organizations_policy/scpFullAccess.rego @@ -0,0 +1,67 @@ +package accurics + +# this is still buggy, logic is still unstable + +{{.prefix}}scpFullAccess[retVal]{ + org_policy = input.aws_organizations_policy[_] + org_policy.config.type == "SERVICE_CONTROL_POLICY" + content := json_unmarshal(org_policy.config.content) + # policyCheck(content, "*", "Allow", "*") == true + + statements := [ content | content := replace_if_needed(content.Statement) ] + expected := object.union(content, {"Statement": statements}) + traverse = "content" + retVal := { "Id": org_policy.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "content", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +replace_if_needed(statement) = value { + not policyCheck(statement, "*", "Allow", "*") == true + value := object.union(statement, { "Resource": "*" }) +} + +replace_if_needed(statement) = value { + policyCheck(statement, "*", "Allow", "*") + value := statement +} + +policyCheck(s, r, e, a) = true { + s.Effect == e + action := is_array(s.Action) + s.Action[_] == a + resource := is_array(s.Resource) + s.Resource[_] == r +} + +policyCheck(s, r, e, a) = true { + s.Effect == e + action := is_string(s.Action) + s.Action == a + resource := is_array(s.Resource) + s.Resource[_] == r +} + +policyCheck(s, r, e, a) = true { + s.Effect == e + action := is_array(s.Action) + s.Action[_] == a + resource := is_string(s.Resource) + s.Resource == r +} + +policyCheck(s, r, e, a) = true { + s.Effect == e + action := is_string(s.Action) + s.Action == a + resource := is_string(s.Resource) + s.Resource == r +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_rds_cluster/AWS.RDS.EncryptionandKeyManagement.High.0414.json b/pkg/policies/opa/rego/aws/aws_rds_cluster/AWS.RDS.EncryptionandKeyManagement.High.0414.json new file mode 100755 index 000000000..c56e73b31 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_rds_cluster/AWS.RDS.EncryptionandKeyManagement.High.0414.json @@ -0,0 +1,12 @@ +{ + "name": "storageNotEncrypted", + "file": "storageNotEncrypted.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Encrypt Amazon RDS instances and snapshots at rest, by enabling the encryption option for your Amazon RDS DB instance", + "referenceId": "AWS.RDS.EncryptionandKeyManagement.High.0414", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_rds_cluster/storageNotEncrypted.rego b/pkg/policies/opa/rego/aws/aws_rds_cluster/storageNotEncrypted.rego new file mode 100755 index 000000000..80ad42c12 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_rds_cluster/storageNotEncrypted.rego @@ -0,0 +1,25 @@ +package accurics + +{{.prefix}}storageNotEncrypted[retVal]{ + rds = input.aws_rds_cluster[_] + not Encrypted(rds.config) + traverse = "storage_encrypted" + retVal := { "Id": rds.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "storage_encrypted", "AttributeDataType": "bool", "Expected": true, "Actual": rds.config.storage_encrypted } +} + +{{.prefix}}storageNotEncrypted[retVal]{ + rds = input.aws_db_instance[_] + not Encrypted(rds.config) + traverse = "storage_encrypted" + retVal := { "Id": rds.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "storage_encrypted", "AttributeDataType": "bool", "Expected": true, "Actual": rds.config.storage_encrypted } +} + +Encrypted(config) = true { + config.storage_encrypted != null + config.storage_encrypted == true +} + +Encrypted(config) = true { + config.storage_encrypted == null + config.kms_key_id != "" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_redshift_cluster/AWS.Redshift.EncryptionandKeyManagement.High.0415.json b/pkg/policies/opa/rego/aws/aws_redshift_cluster/AWS.Redshift.EncryptionandKeyManagement.High.0415.json new file mode 100755 index 000000000..67f8d28bd --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_redshift_cluster/AWS.Redshift.EncryptionandKeyManagement.High.0415.json @@ -0,0 +1,12 @@ +{ + "name": "redshiftEncryptedFalse", + "file": "redshiftEncryptedFalse.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Use customer-managed KMS keys instead of AWS-managed keys, to have granular control over encrypting and encrypting data. Encrypt Redshift clusters with a Customer-managed KMS key. This is a recommended best practice.", + "referenceId": "AWS.Redshift.EncryptionandKeyManagement.High.0415", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_redshift_cluster/AWS.Redshift.Logging.Medium.0565.json b/pkg/policies/opa/rego/aws/aws_redshift_cluster/AWS.Redshift.Logging.Medium.0565.json new file mode 100755 index 000000000..e03a420a6 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_redshift_cluster/AWS.Redshift.Logging.Medium.0565.json @@ -0,0 +1,12 @@ +{ + "name": "redshiftAuditLogs", + "file": "redshiftAuditLogs.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Ensure audit logging is enabled for Redshift clusters for security and troubleshooting purposes.", + "referenceId": "AWS.Redshift.Logging.Medium.0565", + "category": "Logging", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_redshift_cluster/AWS.Redshift.NetworkSecurity.HIGH.0564.json b/pkg/policies/opa/rego/aws/aws_redshift_cluster/AWS.Redshift.NetworkSecurity.HIGH.0564.json new file mode 100755 index 000000000..d4b1cd65a --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_redshift_cluster/AWS.Redshift.NetworkSecurity.HIGH.0564.json @@ -0,0 +1,12 @@ +{ + "name": "redshiftPublicAccess", + "file": "redshiftPublicAccess.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure Redshift clusters are not publicly accessible to minimise security risks.", + "referenceId": "AWS.Redshift.NetworkSecurity.HIGH.0564", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_redshift_cluster/redshiftAuditLogs.rego b/pkg/policies/opa/rego/aws/aws_redshift_cluster/redshiftAuditLogs.rego new file mode 100755 index 000000000..facf9a56c --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_redshift_cluster/redshiftAuditLogs.rego @@ -0,0 +1,17 @@ +package accurics + +{{.prefix}}redshiftAuditLogs[retVal]{ + redshift = input.aws_redshift_cluster[_] + redshift.config.logging == [] + rc = "ewogICJsb2dnaW5nIjogewogICAgImVuYWJsZSI6IHRydWUsCiAgICAiYnVja2V0X25hbWUiOiAiPGJ1Y2tldF9uYW1lPiIsCiAgICAiczNfa2V5X3ByZWZpeCI6ICI8czNfa2V5X3ByZWZpeD4iCiAgfQp9" + traverse = "" + retVal := { "Id": redshift.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "", "AttributeDataType": "block", "Expected": rc, "Actual": null } +} + +{{.prefix}}redshiftAuditLogs[retVal]{ + redshift = input.aws_redshift_cluster[_] + redshift.config.logging[_].enable == false + rc = "ewogICJlbmFibGUiOiB0cnVlLAogICJidWNrZXRfbmFtZSI6ICI8YnVja2V0X25hbWU+IiwKICAiczNfa2V5X3ByZWZpeCI6ICI8czNfa2V5X3ByZWZpeD4iCn0=" + traverse = "logging" + retVal := { "Id": redshift.id, "ReplaceType": "edit", "CodeType": "block", "Traverse": traverse, "Attribute": "logging", "AttributeDataType": "block", "Expected": rc, "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_redshift_cluster/redshiftEncryptedFalse.rego b/pkg/policies/opa/rego/aws/aws_redshift_cluster/redshiftEncryptedFalse.rego new file mode 100755 index 000000000..9a5a28700 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_redshift_cluster/redshiftEncryptedFalse.rego @@ -0,0 +1,15 @@ +package accurics + +{{.prefix}}redshiftEncryptedFalse[retVal]{ + redshift_cluster = input.aws_redshift_cluster[_] + redshift_cluster.config.encrypted == false + traverse = "encrypted" + retVal := { "Id": redshift_cluster.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "encrypted", "AttributeDataType": "bool", "Expected": true, "Actual": redshift_cluster.config.encrypted } +} + +{{.prefix}}redshiftEncryptedFalse[retVal]{ + redshift_cluster = input.aws_redshift_cluster[_] + not redshift_cluster.config.encrypted + traverse = "encrypted" + retVal := { "Id": redshift_cluster.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "encrypted", "AttributeDataType": "bool", "Expected": true, "Actual": false } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_redshift_cluster/redshiftEncryptedWithNoKms.rego b/pkg/policies/opa/rego/aws/aws_redshift_cluster/redshiftEncryptedWithNoKms.rego new file mode 100755 index 000000000..20a895f3e --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_redshift_cluster/redshiftEncryptedWithNoKms.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}{{.name}}[retVal]{ + redshift_cluster = input.aws_redshift_cluster[_] + redshift_cluster.config.encrypted == true + not redshift_cluster.config.kms_key_id + traverse = "kms_key_id" + retVal := { "Id": redshift_cluster.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "kms_key_id", "AttributeDataType": "string", "Expected": "", "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_redshift_cluster/redshiftPublicAccess.rego b/pkg/policies/opa/rego/aws/aws_redshift_cluster/redshiftPublicAccess.rego new file mode 100755 index 000000000..a5e23b948 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_redshift_cluster/redshiftPublicAccess.rego @@ -0,0 +1,15 @@ +package accurics + +{{.prefix}}redshiftPublicAccess[retVal]{ + redshift = input.aws_redshift_cluster[_] + redshift.config.publicly_accessible == true + traverse = "publicly_accessible" + retVal := { "Id": redshift.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "publicly_accessible", "AttributeDataType": "bool", "Expected": false, "Actual": redshift.config.publicly_accessible } +} + +{{.prefix}}redshiftPublicAccess[retVal] { + redshift = input.aws_redshift_cluster[_] + not redshift.config.publicly_accessible + traverse = "publicly_accessible" + retVal := { "Id": redshift.id, "ReplaceType": "add", "CodeType": "attribute", "Traverse": traverse, "Attribute": "publicly_accessible", "AttributeDataType": "bool", "Expected": false, "Actual": true } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_route53_query_log/AWS.Route53 query logs.Logging.Medium.0574.json b/pkg/policies/opa/rego/aws/aws_route53_query_log/AWS.Route53 query logs.Logging.Medium.0574.json new file mode 100755 index 000000000..3b2e84187 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_route53_query_log/AWS.Route53 query logs.Logging.Medium.0574.json @@ -0,0 +1,12 @@ +{ + "name": "route53LoggingDisabled", + "file": "route53LoggingDisabled.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Ensure CloudWatch logging is enabled for Route53 hosted zones.", + "referenceId": "AWS.Route53 query logs.Logging.Medium.0574", + "category": "Logging", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_route53_query_log/route53LoggingDisabled.rego b/pkg/policies/opa/rego/aws/aws_route53_query_log/route53LoggingDisabled.rego new file mode 100755 index 000000000..a8a6356eb --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_route53_query_log/route53LoggingDisabled.rego @@ -0,0 +1,12 @@ +package accurics + +{{.prefix}}route53LoggingDisabled[route.id] { + route := input.aws_route53_zone[_] + not input.aws_route53_query_log +} + +{{.prefix}}route53LoggingDisabled[route.id] { + route := input.aws_route53_query_log[_] + logName := route.config.cloudwatch_log_group_arn + not re_match(route.name, logName) +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_route53_record/AWS.Route53HostedZone.DNSManagement.High.0422.json b/pkg/policies/opa/rego/aws/aws_route53_record/AWS.Route53HostedZone.DNSManagement.High.0422.json new file mode 100755 index 000000000..63a9bd81f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_route53_record/AWS.Route53HostedZone.DNSManagement.High.0422.json @@ -0,0 +1,12 @@ +{ + "name": "noRoute53RecordSet", + "file": "noRoute53RecordSet.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Route53HostedZone should have recordSets.", + "referenceId": "AWS.Route53HostedZone.DNSManagement.High.0422", + "category": "DNS Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_route53_record/noRoute53RecordSet.rego b/pkg/policies/opa/rego/aws/aws_route53_record/noRoute53RecordSet.rego new file mode 100755 index 000000000..6587fe52d --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_route53_record/noRoute53RecordSet.rego @@ -0,0 +1,17 @@ +package accurics + +{{.prefix}}noRoute53RecordSet[retVal] { + route := input.aws_route53_record[_] + check_empty_records(route.config.records) + traverse = "records" + retVal := { "Id": route.id, "ReplaceType": "add", "CodeType": "attribute", "Traverse": traverse, "Attribute": "records", "AttributeDataType": "list", "Expected": [""], "Actual": null } +} + +check_empty_records(records) = true { + records == null +} + +check_empty_records(records) = true { + records != null + count(records) <= 0 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.DS.High.1043.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.DS.High.1043.json new file mode 100755 index 000000000..c6886c6a5 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.DS.High.1043.json @@ -0,0 +1,12 @@ +{ + "name": "s3EnforceUserACL", + "file": "s3EnforceUserACL.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "S3 bucket Access is allowed to all AWS Account Users.", + "referenceId": "AWS.S3Bucket.DS.High.1043", + "category": "S3", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json new file mode 100755 index 000000000..ffc4645d6 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.EncryptionandKeyManagement.High.0405.json @@ -0,0 +1,12 @@ +{ + "name": "s3BucketSseRulesWithKmsNull", + "file": "s3BucketSseRulesWithKmsNull.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure that S3 Buckets have server side encryption at rest enabled with KMS key to protect sensitive data.", + "referenceId": "AWS.S3Bucket.EncryptionandKeyManagement.High.0405", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json new file mode 100755 index 000000000..262a45897 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0370.json @@ -0,0 +1,12 @@ +{ + "name": "s3Versioning", + "file": "s3Versioning.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Enabling S3 versioning will enable easy recovery from both unintended user actions, like deletes and overwrites", + "referenceId": "AWS.S3Bucket.IAM.High.0370", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json new file mode 100755 index 000000000..3354d2579 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0377.json @@ -0,0 +1,14 @@ +{ + "name": "allUsersReadAccess", + "file": "s3AclGrants.rego", + "templateArgs": { + "access": "public-read", + "name": "allUsersReadAccess", + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "referenceId": "AWS.S3Bucket.IAM.High.0377", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json new file mode 100755 index 000000000..1879b8dc3 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0378.json @@ -0,0 +1,14 @@ +{ + "name": "authUsersReadAccess", + "file": "s3AclGrants.rego", + "templateArgs": { + "access": "authenticated-read", + "name": "authUsersReadAccess", + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "referenceId": "AWS.S3Bucket.IAM.High.0378", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json new file mode 100755 index 000000000..1fcbb5622 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0379.json @@ -0,0 +1,14 @@ +{ + "name": "allUsersWriteAccess", + "file": "s3AclGrants.rego", + "templateArgs": { + "access": "public-read-write", + "name": "allUsersWriteAccess", + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "referenceId": "AWS.S3Bucket.IAM.High.0379", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json new file mode 100755 index 000000000..7d8313989 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.IAM.High.0381.json @@ -0,0 +1,14 @@ +{ + "name": "allUsersReadWriteAccess", + "file": "s3AclGrants.rego", + "templateArgs": { + "access": "public-read-write", + "name": "allUsersReadWriteAccess", + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "referenceId": "AWS.S3Bucket.IAM.High.0381", + "category": "IAM", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json new file mode 100755 index 000000000..11f0c2764 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/AWS.S3Bucket.NetworkSecurity.High.0417.json @@ -0,0 +1,12 @@ +{ + "name": "s3BucketNoWebsiteIndexDoc", + "file": "s3BucketNoWebsiteIndexDoc.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure that there are not any static websites being hosted on buckets you aren't aware of", + "referenceId": "AWS.S3Bucket.NetworkSecurity.High.0417", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego new file mode 100755 index 000000000..2661fa0a9 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/noS3BucketSseRules.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}noS3BucketSseRules[retVal] { + bucket := input.aws_s3_bucket[_] + bucket.config.server_side_encryption_configuration == [] + rc = "ewogICJzZXJ2ZXJfc2lkZV9lbmNyeXB0aW9uX2NvbmZpZ3VyYXRpb24iOiB7CiAgICAicnVsZSI6IHsKICAgICAgImFwcGx5X3NlcnZlcl9zaWRlX2VuY3J5cHRpb25fYnlfZGVmYXVsdCI6IHsKICAgICAgICAic3NlX2FsZ29yaXRobSI6ICJBRVMyNTYiCiAgICAgIH0KICAgIH0KICB9Cn0=" + traverse = "" + retVal := { "Id": bucket.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "server_side_encryption_configuration", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego new file mode 100755 index 000000000..fc83f4a0f --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3AclGrants.rego @@ -0,0 +1,8 @@ +package accurics + +{{.prefix}}{{.name}}[retVal] { + bucket := input.aws_s3_bucket[_] + bucket.config.acl == "{{.access}}" + traverse = "acl" + retVal := { "Id": bucket.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "acl", "AttributeDataType": "string", "Expected": "private", "Actual": bucket.config.acl } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego new file mode 100755 index 000000000..7ee714f1e --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketNoWebsiteIndexDoc.rego @@ -0,0 +1,8 @@ +package accurics + +{{.prefix}}s3BucketNoWebsiteIndexDoc[retVal] { + bucket := input.aws_s3_bucket[_] + count(bucket.config.website) > 0 + traverse = "website" + retVal := { "Id": bucket.id, "ReplaceType": "delete", "CodeType": "block", "Traverse": traverse, "Attribute": "website", "AttributeDataType": "block", "Expected": null, "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketSseRulesWithKmsNull.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketSseRulesWithKmsNull.rego new file mode 100755 index 000000000..26e4d934b --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3BucketSseRulesWithKmsNull.rego @@ -0,0 +1,30 @@ +package accurics + +{{.prefix}}s3BucketSseRulesWithKmsNull[retVal] { + bucket := input.aws_s3_bucket[_] + some i, j, k + sse := bucket.config.server_side_encryption_configuration[i] + sse_rule := sse.rule[j] + sse_apply := sse_rule.apply_server_side_encryption_by_default[k] + + not hasEncryption(sse_apply) + + sse_apply.kms_master_key_id == null + traverse := sprintf("server_side_encryption_configuration[%d].rule[%d].apply_server_side_encryption_by_default[%d].kms_master_key_id", [i, j, k]) + retVal := { "Id": bucket.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "server_side_encryption_configuration.rule.apply_server_side_encryption_by_default.kms_master_key_id", "AttributeDataType": "string", "Expected": "", "Actual": sse_apply.kms_master_key_id } +} + +hasEncryption(sse) { + not check_empty(sse.kms_master_key_id) +} + +hasEncryption(sse) { + check_empty(sse.kms_master_key_id) + sse.sse_algorithm == "AES256" +} +check_empty(key) { + key == null +} +check_empty(key) { + key == "" +} diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3EnforceUserACL.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3EnforceUserACL.rego new file mode 100755 index 000000000..971c61c7a --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3EnforceUserACL.rego @@ -0,0 +1,16 @@ +package accurics + +{{.prefix}}s3EnforceUserACL[retVal] { + bucket := input.aws_s3_bucket[_] + + bucket_policies_set := { policy_id | policy_id := split(input.aws_s3_bucket_policy[_].id, "." )[1] } + + not bucket_policies_set[split(bucket.id, ".")[1]] + + rc = "cmVzb3VyY2UgImF3c19zM19idWNrZXRfcG9saWN5IiAiIyNyZXNvdXJjZV9uYW1lIyNQb2xpY3kiIHsKICBidWNrZXQgPSAiJHthd3NfczNfYnVja2V0LiMjcmVzb3VyY2VfbmFtZSMjLmlkfSIKCiAgcG9saWN5ID0gPDxQT0xJQ1kKewogICJWZXJzaW9uIjogIjIwMTItMTAtMTciLAogICJTdGF0ZW1lbnQiOiBbCiAgICB7CiAgICAgICJTaWQiOiAiIyNyZXNvdXJjZV9uYW1lIyMtcmVzdHJpY3QtYWNjZXNzLXRvLXVzZXJzLW9yLXJvbGVzIiwKICAgICAgIkVmZmVjdCI6ICJBbGxvdyIsCiAgICAgICJQcmluY2lwYWwiOiBbCiAgICAgICAgewogICAgICAgICAgIkFXUyI6IFsKICAgICAgICAgICAgImFybjphd3M6aWFtOjojI2Fjb3VudF9pZCMjOnJvbGUvIyNyb2xlX25hbWUjIyIsCiAgICAgICAgICAgICJhcm46YXdzOmlhbTo6IyNhY291bnRfaWQjIzp1c2VyLyMjdXNlcl9uYW1lIyMiCiAgICAgICAgICBdCiAgICAgICAgfQogICAgICBdLAogICAgICAiQWN0aW9uIjogInMzOkdldE9iamVjdCIsCiAgICAgICJSZXNvdXJjZSI6ICJhcm46YXdzOnMzOjo6JHthd3NfczNfYnVja2V0LiMjcmVzb3VyY2VfbmFtZSMjLmlkfS8qIgogICAgfQogIF0KfQpQT0xJQ1kKfQ==" + decode_rc = base64.decode(rc) + replaced_resource_name := replace(decode_rc, "##resource_name##", bucket.name) + + traverse = "" + retVal := { "Id": bucket.id, "ReplaceType": "add", "CodeType": "resource", "Traverse": traverse, "Attribute": "", "AttributeDataType": "resource", "Expected": base64.encode(replaced_resource_name), "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3Versioning.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3Versioning.rego new file mode 100755 index 000000000..707241099 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3Versioning.rego @@ -0,0 +1,17 @@ +package accurics + +{{.prefix}}s3Versioning[retVal] { + bucket := input.aws_s3_bucket[_] + some i + ver := bucket.config.versioning[i] + ver.enabled == false + traverse := sprintf("versioning[%d].enabled", [i]) + retVal := { "Id": bucket.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "versioning.enabled", "AttributeDataType": "bool", "Expected": true, "Actual": ver.enabled } +} + +{{.prefix}}s3Versioning[retVal] { + bucket := input.aws_s3_bucket[_] + not bucket.config.versioning + rc := "ewogICJ2ZXJzaW9uaW5nIjogewogICAgImVuYWJsZWQiOiB0cnVlCiAgfQp9" + retVal := { "Id": bucket.id, "ReplaceType": "add", "CodeType": "block", "Attribute": "", "AttributeDataType": "block", "Expected": rc } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego new file mode 100755 index 000000000..d2c28b5b5 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket/s3VersioningMfaFalse.rego @@ -0,0 +1,10 @@ +package accurics + +{{.prefix}}s3VersioningMfaFalse[retVal] { + bucket := input.aws_s3_bucket[_] + some i + mfa := bucket.config.versioning[i] + mfa.mfa_delete == false + traverse := sprintf("versioning[%d].mfa_delete", [i]) + retVal := { "Id": bucket.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "versioning.mfa_delete", "AttributeDataType": "bool", "Expected": true, "Actual": mfa.mfa_delete } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0373.json b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0373.json new file mode 100755 index 000000000..8395d227d --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0373.json @@ -0,0 +1,14 @@ +{ + "name": "allowGetActionFromAllPrncpls", + "file": "actionsFromAllPrincipals.rego", + "templateArgs": { + "Action": "s3:Get", + "name": "allowGetActionFromAllPrncpls", + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "referenceId": "AWS.IamPolicy.IAM.High.0373", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0375.json b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0375.json new file mode 100755 index 000000000..5faced8be --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0375.json @@ -0,0 +1,14 @@ +{ + "name": "allowPutActionFromAllPrncpls", + "file": "actionsFromAllPrincipals.rego", + "templateArgs": { + "Action": "s3:Put", + "name": "allowPutActionFromAllPrncpls", + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "referenceId": "AWS.IamPolicy.IAM.High.0375", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0376.json b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0376.json new file mode 100755 index 000000000..990945694 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_s3_bucket_policy/AWS.IamPolicy.IAM.High.0376.json @@ -0,0 +1,14 @@ +{ + "name": "allowWriteACPActionFromAllPrncpls", + "file": "actionsFromAllPrincipals.rego", + "templateArgs": { + "Action": "s3:PutBucketAcl", + "name": "allowWriteACPActionFromAllPrncpls", + "prefix": "" + }, + "severity": "HIGH", + "description": "Misconfigured S3 buckets can leak private information to the entire internet or allow unauthorized data tampering / deletion", + "referenceId": "AWS.IamPolicy.IAM.High.0376", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_sns_topic/AWS.SNS.NS.Medium.1044.json b/pkg/policies/opa/rego/aws/aws_sns_topic/AWS.SNS.NS.Medium.1044.json new file mode 100755 index 000000000..4ea0d17e0 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_sns_topic/AWS.SNS.NS.Medium.1044.json @@ -0,0 +1,12 @@ +{ + "name": "snsPublicAccess", + "file": "snsPublicAccess.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "MEDIUM", + "description": "Ensure SNS Topic is Publicly Accessible For Subscription", + "referenceId": "AWS.SNS.NS.Medium.1044", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_sns_topic/snsPublicAccess.rego b/pkg/policies/opa/rego/aws/aws_sns_topic/snsPublicAccess.rego new file mode 100755 index 000000000..f231033e2 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_sns_topic/snsPublicAccess.rego @@ -0,0 +1,38 @@ +package accurics + +{{.prefix}}snsPublicAccess[retVal] { + sns := input.aws_sns_topic[_] + policy := json_unmarshal(sns.config.policy) + statement = policy.Statement[_] + check_role(statement, "Allow", "*") == true + + statements := [ statement | statement := replace_if_needed(policy.Statement[_]) ] + expected := object.union( policy, {"Statement": statements} ) + traverse = "policy" + retVal := { "Id": sns.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "policy", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +replace_if_needed(statement) = value { + check_role(statement, "Allow", "*") == true + value := object.union(statement, { "Principal": {"AWS": "##principal##"} }) +} + +replace_if_needed(statement) = value { + not check_role(statement, "Allow", "*") + value := statement +} + +check_role(s, e, p) = true { + s.Effect == e + s.Principal.AWS == p +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_sqs_queue/AWS.SQS.NetworkSecurity.High.0569.json b/pkg/policies/opa/rego/aws/aws_sqs_queue/AWS.SQS.NetworkSecurity.High.0569.json new file mode 100755 index 000000000..40f74167e --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_sqs_queue/AWS.SQS.NetworkSecurity.High.0569.json @@ -0,0 +1,12 @@ +{ + "name": "sqsQueueExposed", + "file": "sqsQueueExposed.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Identify any publicly accessible SQS queues available in your AWS account and update their permissions in order to protect against unauthorized users.", + "referenceId": "AWS.SQS.NetworkSecurity.High.0569", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_sqs_queue/AWS.SQS.NetworkSecurity.High.0570.json b/pkg/policies/opa/rego/aws/aws_sqs_queue/AWS.SQS.NetworkSecurity.High.0570.json new file mode 100755 index 000000000..1138ac326 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_sqs_queue/AWS.SQS.NetworkSecurity.High.0570.json @@ -0,0 +1,12 @@ +{ + "name": "sqsSseDisabled", + "file": "sqsSseDisabled.rego", + "templateArgs": { + "prefix": "" + }, + "severity": "HIGH", + "description": "Ensure that your Amazon Simple Queue Service (SQS) queues are protecting the contents of their messages using Server-Side Encryption (SSE). The SQS service uses an AWS KMS Customer Master Key (CMK) to generate data keys required for the encryption/decryption process of SQS messages. There is no additional charge for using SQS Server-Side Encryption, however, there is a charge for using AWS KMS", + "referenceId": "AWS.SQS.NetworkSecurity.High.0570", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_sqs_queue/sqsQueueExposed.rego b/pkg/policies/opa/rego/aws/aws_sqs_queue/sqsQueueExposed.rego new file mode 100755 index 000000000..64f55690e --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_sqs_queue/sqsQueueExposed.rego @@ -0,0 +1,37 @@ +package accurics + +{{.prefix}}sqsQueueExposed[retVal] { + sqs := input.aws_sqs_queue[_] + policy := json_unmarshal(sqs.config.policy) + statement = policy.Statement[_] + check_role(statement, "*") == true + + statements := [ statement | statement := replace_if_needed(policy.Statement[_]) ] + expected := object.union(policy, {"Statement": statements}) + traverse = "policy" + retVal := { "Id": sqs.id, "ReplaceType": "edit", "CodeType": "document", "Traverse": traverse, "Attribute": "policy", "AttributeDataType": "base64", "Expected": base64.encode(json.marshal(expected))} +} + +json_unmarshal(s) = result { + s == null + result := json.unmarshal("{}") +} + +json_unmarshal(s) = result { + s != null + result := json.unmarshal(s) +} + +replace_if_needed(statement) = value { + check_role(statement, "*") == true + value := object.union(statement, { "Principal": "##principal###" }) +} + +replace_if_needed(statement) = value { + not check_role(statement, "*") + value := statement +} + +check_role(s, p) = true { + s.Principal == p +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/aws/aws_sqs_queue/sqsSseDisabled.rego b/pkg/policies/opa/rego/aws/aws_sqs_queue/sqsSseDisabled.rego new file mode 100755 index 000000000..eca11e516 --- /dev/null +++ b/pkg/policies/opa/rego/aws/aws_sqs_queue/sqsSseDisabled.rego @@ -0,0 +1,15 @@ +package accurics + +{{.prefix}}sqsSseDisabled[retVal] { + sqs := input.aws_sqs_queue[_] + check_empty(sqs.config.kms_master_key_id) + traverse = "kms_master_key_id" + retVal := { "Id": sqs.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "kms_master_key_id", "AttributeDataType": "string", "Expected": "" } +} + +check_empty(key) { + key == null +} +check_empty(key) { + key == "" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_application_gateway/accurics.azure.NS.147.json b/pkg/policies/opa/rego/azure/azurerm_application_gateway/accurics.azure.NS.147.json new file mode 100755 index 000000000..30a41c9be --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_application_gateway/accurics.azure.NS.147.json @@ -0,0 +1,12 @@ +{ + "name": "reme_appGatewayWAFEnabled", + "file": "appGatewayWAFEnabled.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure Azure Application Gateway Web application firewall (WAF) is enabled", + "referenceId": "accurics.azure.NS.147", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_application_gateway/appGatewayWAFEnabled.rego b/pkg/policies/opa/rego/azure/azurerm_application_gateway/appGatewayWAFEnabled.rego new file mode 100755 index 000000000..7f0f2f584 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_application_gateway/appGatewayWAFEnabled.rego @@ -0,0 +1,30 @@ +package accurics + +{{.prefix}}appGatewayWAFEnabled[retVal] { + ag := input.azurerm_application_gateway[_] + ag.type = "azurerm_application_gateway" + object.get(ag.config, "waf_configuration", "undefined") == "undefined" + rc = "ewogICJ3YWZfY29uZmlndXJhdGlvbiI6IHsKICAgICJlbmFibGVkIjogdHJ1ZSwKICAgICJmaXJld2FsbF9tb2RlIjogIkRldGVjdGlvbiIsCiAgICAicnVsZV9zZXRfdHlwZSI6ICJPV0FTUCIKICB9Cn0=" + retVal := { "Id": ag.id, "ReplaceType": "add", "CodeType": "block", "Traverse": "", "Attribute": "waf_configuration", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} + +{{.prefix}}appGatewayWAFEnabled[retVal] { + ag := input.azurerm_application_gateway[_] + ag.type = "azurerm_application_gateway" + object.get(ag.config, "waf_configuration", "undefined") != "undefined" + count(ag.config.waf_configuration) <= 0 + rc = "ewogICJ3YWZfY29uZmlndXJhdGlvbiI6IHsKICAgICJlbmFibGVkIjogdHJ1ZSwKICAgICJmaXJld2FsbF9tb2RlIjogIkRldGVjdGlvbiIsCiAgICAicnVsZV9zZXRfdHlwZSI6ICJPV0FTUCIKICB9Cn0=" + retVal := { "Id": ag.id, "ReplaceType": "add", "CodeType": "block", "Traverse": "", "Attribute": "waf_configuration", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} + +{{.prefix}}appGatewayWAFEnabled[retVal] { + ag := input.azurerm_application_gateway[_] + ag.type = "azurerm_application_gateway" + object.get(ag.config, "waf_configuration", "undefined") != "undefined" + count(ag.config.waf_configuration) > 0 + some i + waf_config := ag.config.waf_configuration[i] + object.get(waf_config, "enabled", "undefined") == false + traverse := sprintf("waf_configuration[%d].enabled", [i]) + retVal := { "Id": ag.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "waf_configuration.enabled", "AttributeDataType": "boolean", "Expected": true, "Actual": waf_config.enabled } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_container_registry/accurics.azure.AKS.3.json b/pkg/policies/opa/rego/azure/azurerm_container_registry/accurics.azure.AKS.3.json new file mode 100755 index 000000000..ffbb52b7d --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_container_registry/accurics.azure.AKS.3.json @@ -0,0 +1,12 @@ +{ + "name": "reme_containerRegistryResourceLock", + "file": "containerRegistryResourceLock.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure Container Registry has locks", + "referenceId": "accurics.azure.AKS.3", + "category": "Azure Container Services", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_container_registry/accurics.azure.EKM.164.json b/pkg/policies/opa/rego/azure/azurerm_container_registry/accurics.azure.EKM.164.json new file mode 100755 index 000000000..6e3bc0b93 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_container_registry/accurics.azure.EKM.164.json @@ -0,0 +1,12 @@ +{ + "name": "reme_containerRegistryAdminEnabled", + "file": "containerRegistryAdminEnabled.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure that admin user is disabled for Container Registry", + "referenceId": "accurics.azure.EKM.164", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_container_registry/containerRegistryAdminEnabled.rego b/pkg/policies/opa/rego/azure/azurerm_container_registry/containerRegistryAdminEnabled.rego new file mode 100755 index 000000000..c8d778ea5 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_container_registry/containerRegistryAdminEnabled.rego @@ -0,0 +1,8 @@ +package accurics + +{{.prefix}}containerRegistryAdminEnabled[retVal] { + acr := input.azurerm_container_registry[_] + acr.config.admin_enabled == true + traverse = "admin_enabled" + retVal := { "Id": acr.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": traverse, "AttributeDataType": "bool", "Expected": false, "Actual": acr.config.admin_enabled } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_container_registry/containerRegistryResourceLock.rego b/pkg/policies/opa/rego/azure/azurerm_container_registry/containerRegistryResourceLock.rego new file mode 100755 index 000000000..fffa7f7c3 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_container_registry/containerRegistryResourceLock.rego @@ -0,0 +1,27 @@ +package accurics + +{{.prefix}}containerRegistryResourceLock[retVal] { + registry := input.azurerm_container_registry[_] + registry_input := input + registry.type == "azurerm_container_registry" + + not resourceLockExist(registry, registry_input) + + rc = "cmVzb3VyY2UgImF6dXJlcm1fbWFuYWdlbWVudF9sb2NrIiAiIyNyZXNvdXJjZV9uYW1lIyMiIHsKICBuYW1lICAgICAgID0gImF6dXJlcm1fbWFuYWdlbWVudF9sb2NrLiMjcmVzb3VyY2VfbmFtZSMjIgogIHNjb3BlICAgICAgPSBhenVyZXJtX2NvbnRhaW5lcl9yZWdpc3RyeS4jI3Jlc291cmNlX25hbWUjIy5pZAogIGxvY2tfbGV2ZWwgPSAiQ2FuTm90RGVsZXRlIgogICMgYXp1cmVybV9tYW5hZ2VtZW50X2xvY2sgZG9lcyBub3QgY29udGFpbiB0YWdzLCBhbmQgd2UgY2Fubm90IG1hdGNoIHRoZW0gbm90IHVubGVzcyB0aGUgcmVzb3VyY2UgaXMgZGVwbG95ZWQgaW4gdGhlIGNsb3VkLgogIG5vdGVzICAgICAgPSAiQ2Fubm90IERlbGV0ZSBSZXNvdXJjZSIKfQ==" + decode_rc = base64.decode(rc) + replaced_registry_id := replace(decode_rc, "##resource_name##", registry.name) + + traverse = "" + retVal := { "Id": registry.id, "ReplaceType": "add", "CodeType": "resource", "Traverse": traverse, "Attribute": "", "AttributeDataType": "resource", "Expected": base64.encode(replaced_registry_id), "Actual": null } +} + +resourceLockExist(registry, registry_input) = exists { + resource_lock_exist_set := { resource_lock_id | input.azurerm_management_lock[i].type == "azurerm_management_lock"; resource_lock_id := input.azurerm_management_lock[i].config.scope } + resource_lock_exist_set[registry.id] + exists = true +} else = exists { + resource_lock_exist_set := { resource_id | input.azurerm_management_lock[i].type == "azurerm_management_lock"; resource_id := input.azurerm_management_lock[i].config.name } + registry_name := sprintf("azurerm_container_registry.%s", [registry.name]) + resource_lock_exist_set[registry_name] + exists = true +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_key_vault/accurics.azure.EKM.164.json b/pkg/policies/opa/rego/azure/azurerm_key_vault/accurics.azure.EKM.164.json new file mode 100755 index 000000000..a1c581dfa --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_key_vault/accurics.azure.EKM.164.json @@ -0,0 +1,12 @@ +{ + "name": "reme_keyVaultSoftDeleteEnabled", + "file": "keyVaultSoftDeleteEnabled.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure the key vault is recoverable - enable \"Soft Delete\" setting for a Key Vault", + "referenceId": "accurics.azure.EKM.164", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_key_vault/accurics.azure.EKM.20.json b/pkg/policies/opa/rego/azure/azurerm_key_vault/accurics.azure.EKM.20.json new file mode 100755 index 000000000..d3f39083b --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_key_vault/accurics.azure.EKM.20.json @@ -0,0 +1,12 @@ +{ + "name": "reme_keyVaultAuditLoggingEnabled", + "file": "keyVaultAuditLoggingEnabled.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure that logging for Azure KeyVault is 'Enabled'", + "referenceId": "accurics.azure.EKM.20", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_key_vault/keyVaultAuditLoggingEnabled.rego b/pkg/policies/opa/rego/azure/azurerm_key_vault/keyVaultAuditLoggingEnabled.rego new file mode 100755 index 000000000..f8e2aa63e --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_key_vault/keyVaultAuditLoggingEnabled.rego @@ -0,0 +1,23 @@ +package accurics + +{{.prefix}}keyVaultAuditLoggingEnabled[retVal] { + kv := input.azurerm_key_vault[_] + kv.type == "azurerm_key_vault" + not loggingExist(kv) + rc = "cmVzb3VyY2UgImF6dXJlcm1fbW9uaXRvcl9kaWFnbm9zdGljX3NldHRpbmciICIjI3Jlc291cmNlX25hbWUjIyIgewogIG5hbWUgICAgICAgICAgICAgICA9ICJhenVyZXJtX2tleV92YXVsdC4jI3Jlc291cmNlX25hbWUjIy5sb2ciCiAgdGFyZ2V0X3Jlc291cmNlX2lkID0gYXp1cmVybV9rZXlfdmF1bHQuIyNyZXNvdXJjZV9uYW1lIyMuaWQKICBzdG9yYWdlX2FjY291bnRfaWQgPSAjI3N0b3JhZ2VfYWNjb3VudF9pZCMjCiAgbG9nIHsKICAgIGNhdGVnb3J5ID0gIkF1ZGl0RXZlbnQiCiAgICBlbmFibGVkICA9IHRydWUKICB9Cn0=" + decode_rc = base64.decode(rc) + replaced_vpc_id := replace(decode_rc, "##resource_name##", kv.name) + traverse = "" + retVal := { "Id": kv.id, "ReplaceType": "add", "CodeType": "resource", "Traverse": traverse, "Attribute": "", "AttributeDataType": "resource", "Expected": base64.encode(replaced_vpc_id), "Actual": null } +} + +loggingExist(key_vault) = exists { + log_set := { key_vault_id | key_vault_id := input.azurerm_monitor_diagnostic_setting[i].config.target_resource_id } + log_set[key_vault.id] + exists = true +} else = exists { + log_set := { resource_name | resource_name := input.azurerm_monitor_diagnostic_setting[i].name } + log_name := sprintf("azurerm_key_vault.%s.log", [key_vault.name]) + log_set[log_name] + exists = true +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_key_vault/keyVaultSoftDeleteEnabled.rego b/pkg/policies/opa/rego/azure/azurerm_key_vault/keyVaultSoftDeleteEnabled.rego new file mode 100755 index 000000000..141b385d0 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_key_vault/keyVaultSoftDeleteEnabled.rego @@ -0,0 +1,9 @@ +package accurics + +{{.prefix}}keyVaultSoftDeleteEnabled[retVal] { + kv := input.azurerm_key_vault[_] + kv.type == "azurerm_key_vault" + traverse = "soft_delete_enabled" + kv.config.soft_delete_enabled == false + retVal := { "Id": kv.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": traverse, "AttributeDataType": "bool", "Expected": true, "Actual": kv.config.soft_delete_enabled } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_key_vault_key/accurics.azure.EKM.25.json b/pkg/policies/opa/rego/azure/azurerm_key_vault_key/accurics.azure.EKM.25.json new file mode 100755 index 000000000..0dee8bd34 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_key_vault_key/accurics.azure.EKM.25.json @@ -0,0 +1,12 @@ +{ + "name": "reme_checkKeyExpirationIsSet", + "file": "checkKeyExpirationIsSet.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure that the expiration date is set on all keys", + "referenceId": "accurics.azure.EKM.25", + "category": "Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_key_vault_key/checkKeyExpirationIsSet.rego b/pkg/policies/opa/rego/azure/azurerm_key_vault_key/checkKeyExpirationIsSet.rego new file mode 100755 index 000000000..0a2751d85 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_key_vault_key/checkKeyExpirationIsSet.rego @@ -0,0 +1,27 @@ +package accurics + +{{.prefix}}checkKeyExpirationIsSet[retVal] { + vault_key := input.azurerm_key_vault_key[_] + vault_key.config.expiration_date == null + traverse = "expiration_date" + expected := getExpiryRfc3339(time.now_ns()) + retVal := { "Id": vault_key.id, "ReplaceType": "add", "CodeType": "attribute", "Traverse": traverse, "Attribute": "expiration_date", "AttributeDataType": "string", "Expected": expected, "Actual": null } +} + +{{.prefix}}checkKeyExpirationIsSet[retVal] { + vault_key := input.azurerm_key_vault_key[_] + vault_key.config.expiration_date != null + now := time.now_ns() + expiration := time.parse_rfc3339_ns(vault_key.config.expiration_date) + (expiration - now) > (2 * 365 * 24 * 60 * 60 * 1000000000) # 2 years + traverse = "expiration_date" + expected := getExpiryRfc3339(now) + retVal := { "Id": vault_key.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "expiration_date", "AttributeDataType": "string", "Expected": expected, "Actual": null } +} + +getExpiryRfc3339(curtime) = expiry { + expiryNs := time.add_date(curtime, 1, 0, 1) + dateAr := time.date(expiryNs) + timeAr := time.clock(expiryNs) + expiry := sprintf("%d-%d-%dT%d:%d:%dZ", array.concat(dateAr, timeAr)) +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_key_vault_secret/accurics.azure.EKM.26.json b/pkg/policies/opa/rego/azure/azurerm_key_vault_secret/accurics.azure.EKM.26.json new file mode 100755 index 000000000..c0412daf9 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_key_vault_secret/accurics.azure.EKM.26.json @@ -0,0 +1,12 @@ +{ + "name": "reme_checkSecretExpirationIsSet", + "file": "checkSecretExpirationIsSet.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure that the expiration date is set on all secrets", + "referenceId": "accurics.azure.EKM.26", + "category": "Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_key_vault_secret/checkSecretExpirationIsSet.rego b/pkg/policies/opa/rego/azure/azurerm_key_vault_secret/checkSecretExpirationIsSet.rego new file mode 100755 index 000000000..1be813117 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_key_vault_secret/checkSecretExpirationIsSet.rego @@ -0,0 +1,27 @@ +package accurics + +{{.prefix}}checkSecretExpirationIsSet[retVal] { + vault_key := input.azurerm_key_vault_secret[_] + vault_key.config.expiration_date == null + traverse = "expiration_date" + expected := getExpiryRfc3339(time.now_ns()) + retVal := { "Id": vault_key.id, "ReplaceType": "add", "CodeType": "attribute", "Traverse": traverse, "Attribute": "expiration_date", "AttributeDataType": "string", "Expected": expected, "Actual": null } +} + +{{.prefix}}checkSecretExpirationIsSet[retVal] { + vault_key := input.azurerm_key_vault_secret[_] + vault_key.config.expiration_date != null + now := time.now_ns() + expiration := time.parse_rfc3339_ns(vault_key.config.expiration_date) + (expiration - now) > (2 * 365 * 24 * 60 * 60 * 1000000000) # 2 years + traverse = "expiration_date" + expected := getExpiryRfc3339(now) + retVal := { "Id": vault_key.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "expiration_date", "AttributeDataType": "string", "Expected": expected, "Actual": null } +} + +getExpiryRfc3339(curtime) = expiry { + expiryNs := time.add_date(curtime, 1, 0, 1) + dateAr := time.date(expiryNs) + timeAr := time.clock(expiryNs) + expiry := sprintf("%d-%d-%dT%d:%d:%dZ", array.concat(dateAr, timeAr)) +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_kubernetes_cluster/KubeDashboardDisabled.rego b/pkg/policies/opa/rego/azure/azurerm_kubernetes_cluster/KubeDashboardDisabled.rego new file mode 100755 index 000000000..4780c50b5 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_kubernetes_cluster/KubeDashboardDisabled.rego @@ -0,0 +1,8 @@ +package accurics + +KubeDashboardDisabled[api.id]{ + api := input.azurerm_kubernetes_cluster[_] + var := api.config.addon_profile[_] + data := var.kube_dashboard[_] + not data.enabled == false +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_kubernetes_cluster/accurics.azure.NS.382.json b/pkg/policies/opa/rego/azure/azurerm_kubernetes_cluster/accurics.azure.NS.382.json new file mode 100755 index 000000000..557554d9e --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_kubernetes_cluster/accurics.azure.NS.382.json @@ -0,0 +1,10 @@ +{ + "name": "networkPolicyEnabled", + "file": "networkPolicyEnabled.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure AKS cluster has Network Policy configured.", + "referenceId": "accurics.azure.NS.382", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_kubernetes_cluster/accurics.azure.NS.383.json b/pkg/policies/opa/rego/azure/azurerm_kubernetes_cluster/accurics.azure.NS.383.json new file mode 100755 index 000000000..5d68d0ff0 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_kubernetes_cluster/accurics.azure.NS.383.json @@ -0,0 +1,10 @@ +{ + "name": "KubeDashboardDisabled", + "file": "KubeDashboardDisabled.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure Kube Dashboard is disabled", + "referenceId": "accurics.azure.NS.383", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_kubernetes_cluster/networkPolicyEnabled.rego b/pkg/policies/opa/rego/azure/azurerm_kubernetes_cluster/networkPolicyEnabled.rego new file mode 100755 index 000000000..a9d838380 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_kubernetes_cluster/networkPolicyEnabled.rego @@ -0,0 +1,8 @@ +package accurics + +networkPolicyEnabled[api.id]{ + api := input.azurerm_kubernetes_cluster[_] + var := api.config.network_profile[_] + not var.network_policy == "azure" + not var.network_policy == "calico" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_watcher/accurics.azure.NS.387.json b/pkg/policies/opa/rego/azure/azurerm_network_watcher/accurics.azure.NS.387.json new file mode 100755 index 000000000..24ea25ff7 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_network_watcher/accurics.azure.NS.387.json @@ -0,0 +1,12 @@ +{ + "name": "reme_networkWatcherExist", + "file": "networkWatcherExist.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure that Network Watcher is 'Enabled'", + "referenceId": "accurics.azure.NS.387", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_watcher/networkWatcherExist.rego b/pkg/policies/opa/rego/azure/azurerm_network_watcher/networkWatcherExist.rego new file mode 100755 index 000000000..21085e73c --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_network_watcher/networkWatcherExist.rego @@ -0,0 +1,15 @@ +package accurics + +{{.prefix}}networkWatcherExist[retVal] { + not input.azurerm_network_watcher + rc = "cmVzb3VyY2UgImF6dXJlcm1fbmV0d29ya193YXRjaGVyIiAibmV0d29ya193YXRjaGVyIiB7CiAgbmFtZSAgICAgICAgICAgICAgICA9ICJuZXR3b3JrX3dhdGNoZXIiCiAgbG9jYXRpb24gICAgICAgICAgICA9ICMjcmVzb3VyY2VfZ3JvdXBfbG9jYXRpb24jIwogIHJlc291cmNlX2dyb3VwX25hbWUgPSAjI3Jlc291cmNlX2dyb3VwX25hbWUjIwp9" + traverse = "" + retVal := { "Id": "network_watcher_does_not_exist", "ReplaceType": "add", "CodeType": "resource", "Traverse": traverse, "Attribute": "", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} + +{{.prefix}}networkWatcherExist[retVal] { + count(input.azurerm_network_watcher) <= 0 + rc = "cmVzb3VyY2UgImF6dXJlcm1fbmV0d29ya193YXRjaGVyIiAibmV0d29ya193YXRjaGVyIiB7CiAgbmFtZSAgICAgICAgICAgICAgICA9ICJuZXR3b3JrX3dhdGNoZXIiCiAgbG9jYXRpb24gICAgICAgICAgICA9ICMjcmVzb3VyY2VfZ3JvdXBfbG9jYXRpb24jIwogIHJlc291cmNlX2dyb3VwX25hbWUgPSAjI3Jlc291cmNlX2dyb3VwX25hbWUjIwp9" + traverse = "" + retVal := { "Id": "network_watcher_does_not_exist", "ReplaceType": "add", "CodeType": "resource", "Traverse": traverse, "Attribute": "", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_watcher_flow_log/accurics.azure.NS.11.json b/pkg/policies/opa/rego/azure/azurerm_network_watcher_flow_log/accurics.azure.NS.11.json new file mode 100755 index 000000000..31f0e88c9 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_network_watcher_flow_log/accurics.azure.NS.11.json @@ -0,0 +1,12 @@ +{ + "name": "reme_networkWatcherEnabled", + "file": "networkWatcherCheck.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Enable Network Watcher for Azure subscriptions. Network diagnostic and visualization tools available with Network Watcher help users understand, diagnose, and gain insights to the network in Azure.", + "referenceId": "accurics.azure.NS.11", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_watcher_flow_log/accurics.azure.NS.342.json b/pkg/policies/opa/rego/azure/azurerm_network_watcher_flow_log/accurics.azure.NS.342.json new file mode 100755 index 000000000..c28bb3117 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_network_watcher_flow_log/accurics.azure.NS.342.json @@ -0,0 +1,12 @@ +{ + "name": "reme_logRetensionGraterThan90Days", + "file": "networkWatcherCheck.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Network Security Group Flow Logs should be enabled and the retention period is set to greater than or equal to 90 days. Flow logs enable capturing information about IP traffic flowing in and out of network security groups. Logs can be used to check for anomalies and give insight into suspected breaches.", + "referenceId": "accurics.azure.NS.342", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_network_watcher_flow_log/networkWatcherCheck.rego b/pkg/policies/opa/rego/azure/azurerm_network_watcher_flow_log/networkWatcherCheck.rego new file mode 100755 index 000000000..c3b4a3117 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_network_watcher_flow_log/networkWatcherCheck.rego @@ -0,0 +1,12 @@ +package accurics + +{{.prefix}}logRetensionGraterThan90Days[log_object.id] { + log_object := input.azurerm_network_watcher_flow_log[_] + retention_policy := log_object.config.retention_policy[_] + retention_policy.days < 90 +} + +{{.prefix}}networkWatcherEnabled[log_object.id] { + log_object := input.azurerm_network_watcher_flow_log[_] + log_object.config.enabled == false +} diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.151.json b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.151.json deleted file mode 100755 index 3559154bd..000000000 --- a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.151.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "name": "reme_connectionThrottling", - "file": "connectionThrottling.rego", - "templateArgs": { - "prefix": "reme_" - }, - "severity": "MEDIUM", - "description": "Ensure server parameter 'connection_throttling' is set to 'ON' for PostgreSQL Database Server", - "referenceId": "accurics.azure.LOG.151", - "category": "Logging", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.152.json b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.152.json deleted file mode 100755 index e11c479d8..000000000 --- a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.152.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "name": "reme_logConnections", - "file": "logConnections.rego", - "templateArgs": { - "prefix": "reme_" - }, - "severity": "MEDIUM", - "description": "Ensure server parameter 'log_connections' is set to 'ON' for PostgreSQL Database Server", - "referenceId": "accurics.azure.LOG.152", - "category": "Logging", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.155.json b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.155.json deleted file mode 100755 index 72c6c8852..000000000 --- a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/accurics.azure.LOG.155.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "name": "reme_logRetention", - "file": "logRetention.rego", - "templateArgs": { - "prefix": "reme_" - }, - "severity": "MEDIUM", - "description": "Ensure server parameter 'log_retention_days' is greater than 3 days for PostgreSQL Database Server", - "referenceId": "accurics.azure.LOG.155", - "category": "Logging", - "version": 2 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/connectionThrottling.rego b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/connectionThrottling.rego deleted file mode 100755 index 54beb31fd..000000000 --- a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/connectionThrottling.rego +++ /dev/null @@ -1,10 +0,0 @@ -package accurics - -{{.prefix}}connectionThrottling[retVal] { - psql_config := input.azurerm_postgresql_configuration[_] - psql_config.config.name == "connection_throttling" - psql_config.config.value != "on" - - traverse = "value" - retVal := { "Id": psql_config.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "value", "AttributeDataType": "string", "Expected": "on", "Actual": psql_config.config.value } -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logConnections.rego b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logConnections.rego deleted file mode 100755 index dfc2dc516..000000000 --- a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logConnections.rego +++ /dev/null @@ -1,10 +0,0 @@ -package accurics - -{{.prefix}}logConnections[retVal] { - psql_config := input.azurerm_postgresql_configuration[_] - psql_config.config.name == "log_connections" - psql_config.config.value != "on" - - traverse = "value" - retVal := { "Id": psql_config.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "value", "AttributeDataType": "string", "Expected": "on", "Actual": psql_config.config.value } -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logRetention.rego b/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logRetention.rego deleted file mode 100755 index ce6644b5e..000000000 --- a/pkg/policies/opa/rego/azure/azurerm_postgresql_configuration/logRetention.rego +++ /dev/null @@ -1,14 +0,0 @@ -package accurics - -{{.prefix}}logRetention[retVal] { - psql_config := input.azurerm_postgresql_configuration[_] - psql_config.config.name == "log_retention_days" - not checkValid(psql_config.config.value) - - traverse = "value" - retVal := { "Id": psql_config.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": "value", "AttributeDataType": "string", "Expected": "4", "Actual": psql_config.config.value } -} - -checkValid(val) = true { - val == ["4", "5", "6", "7"][_] -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.13.json b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.13.json new file mode 100755 index 000000000..9320b6859 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.13.json @@ -0,0 +1,12 @@ +{ + "name": "reme_redisCacheNoUpdatePatchSchedule", + "file": "redisCacheNoUpdatePatchSchedule.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure that Redis is updated regularly with security and operational updates.\n\nNote this feature is only available to Premium tier Redis Caches.", + "referenceId": "accurics.azure.NS.13", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.166.json b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.166.json new file mode 100755 index 000000000..d41afddf9 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_redis_cache/accurics.azure.NS.166.json @@ -0,0 +1,12 @@ +{ + "name": "reme_allowLessHosts", + "file": "allowLessHosts.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure there are no firewall rules allowing Redis Cache access for a large number of source IPs", + "referenceId": "accurics.azure.NS.166", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_redis_cache/allowLessHosts.rego b/pkg/policies/opa/rego/azure/azurerm_redis_cache/allowLessHosts.rego new file mode 100755 index 000000000..a5a8259f0 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_redis_cache/allowLessHosts.rego @@ -0,0 +1,17 @@ +package accurics + +{{.prefix}}allowLessHosts[retVal] { + redis := input.azurerm_redis_firewall_rule[_] + sHosts := calculateHosts(redis.config.start_ip) + eHosts := calculateHosts(redis.config.end_ip) + abs(eHosts - sHosts) >= 256 + rc := "ewogICJzdGFydF9pcCI6ICI8c3RhcnRfaXA+IiwKICAiZW5kX2lwIjogIjxlbmRfaXA+Igp9" + retVal := { "Id": redis.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": "", "Attribute": "", "AttributeDataType": "base64", "Expected": rc, "Actual": { "start_ip": redis.config.start_ip, "end_ip": redis.config.end_ip } } +} + +calculateHosts(val) = ans { + ipVals := split(val, ".") + # 2^24 = 16777216, 2^16 = 65536, 2^8 = 256 + # no of hosts in IP p.q.r.s = (p * 2^24) + (q * 2^16) + (r * 2^8) + s + ans = (to_number(ipVals[0]) * 16777216) + (to_number(ipVals[1]) * 65536) + (to_number(ipVals[2]) * 256) + to_number(ipVals[3]) +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_redis_cache/redisCacheNoUpdatePatchSchedule.rego b/pkg/policies/opa/rego/azure/azurerm_redis_cache/redisCacheNoUpdatePatchSchedule.rego new file mode 100755 index 000000000..3cbe98d10 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_redis_cache/redisCacheNoUpdatePatchSchedule.rego @@ -0,0 +1,17 @@ +package accurics + +{{.prefix}}redisCacheNoUpdatePatchSchedule[retVal] { + redis := input.azurerm_redis_cache[_] + count(redis.config.patch_schedule) <= 0 + emptyPatchSchedule(redis) == true + rc = "ewogICJwYXRjaF9zY2hlZHVsZSI6IHsKICAgICJkYXlfb2Zfd2VlayI6ICJTdW5kYXkiLAogICAgInN0YXJ0X2hvdXJfdXRjIjogMAogIH0KfQ==" + traverse = "patch_schedule" + retVal := { "Id": redis.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": traverse, "AttributeDataType": "base64", "Expected": rc, "Actual": redis.config.patch_schedule } +} + +emptyPatchSchedule(redis) = true { + not redis.config.patch_schedule +} +emptyPatchSchedule(redis) = true { + count(redis.config.patch_schedule) <= 0 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_resource_group/accurics.azure.NS.272.json b/pkg/policies/opa/rego/azure/azurerm_resource_group/accurics.azure.NS.272.json new file mode 100755 index 000000000..76505b6d2 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_resource_group/accurics.azure.NS.272.json @@ -0,0 +1,12 @@ +{ + "name": "reme_resourceGroupLock", + "file": "resourceGroupLock.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "LOW", + "description": "Ensure that Azure Resource Group has resource lock enabled", + "referenceId": "accurics.azure.NS.272", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_resource_group/resourceGroupLock.rego b/pkg/policies/opa/rego/azure/azurerm_resource_group/resourceGroupLock.rego new file mode 100755 index 000000000..2ab392054 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_resource_group/resourceGroupLock.rego @@ -0,0 +1,27 @@ +package accurics + +{{.prefix}}resourceGroupLock[retVal] { + resource_group := input.azurerm_resource_group[_] + registry_input := input + resource_group.type == "azurerm_resource_group" + + not resourceLockExist(resource_group) + + rc = "cmVzb3VyY2UgImF6dXJlcm1fbWFuYWdlbWVudF9sb2NrIiAiIyNyZXNvdXJjZV9uYW1lIyMiIHsKICBuYW1lICAgICAgID0gImF6dXJlcm1fcmVzb3VyY2VfZ3JvdXAuIyNyZXNvdXJjZV9uYW1lIyMiCiAgc2NvcGUgICAgICA9IGF6dXJlcm1fcmVzb3VyY2VfZ3JvdXAuIyNyZXNvdXJjZV9uYW1lIyMuaWQKICBsb2NrX2xldmVsID0gIkNhbk5vdERlbGV0ZSIKICAjIGF6dXJlcm1fbWFuYWdlbWVudF9sb2NrIGRvZXMgbm90IGNvbnRhaW4gdGFncywgYW5kIHdlIGNhbm5vdCBtYXRjaCB0aGVtIG5vdCB1bmxlc3MgdGhlIHJlc291cmNlIGlzIGRlcGxveWVkIGluIHRoZSBjbG91ZC4KICBub3RlcyAgICAgID0gIkNhbm5vdCBEZWxldGUgUmVzb3VyY2UiCn0=" + decode_rc = base64.decode(rc) + replaced_resource_group_id := replace(decode_rc, "##resource_name##", resource_group.name) + + traverse = "" + retVal := { "Id": resource_group.id, "ReplaceType": "add", "CodeType": "resource", "Traverse": traverse, "Attribute": "", "AttributeDataType": "resource", "Expected": base64.encode(replaced_resource_group_id), "Actual": null } +} + +resourceLockExist(resource_group) = exists { + resource_lock_exist_set := { resource_lock_id | resource_lock_id := input.azurerm_management_lock[i].config.scope } + resource_lock_exist_set[resource_group.id] + exists = true +} else = exists { + resource_lock_exist_set := { resource_id | resource_id := input.azurerm_management_lock[i].config.name } + resource_group_name := sprintf("azurerm_resource_group.%s", [resource_group.name]) + resource_lock_exist_set[resource_group_name] + exists = true +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_role_assignment/accurics.azure.IAM.388.json b/pkg/policies/opa/rego/azure/azurerm_role_assignment/accurics.azure.IAM.388.json new file mode 100755 index 000000000..783742eb9 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_role_assignment/accurics.azure.IAM.388.json @@ -0,0 +1,12 @@ +{ + "name": "reme_checkGuestUser", + "file": "checkGuestUser.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure that there are no guest users", + "referenceId": "accurics.azure.IAM.388", + "category": "Identity and Access Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_role_assignment/checkGuestUser.rego b/pkg/policies/opa/rego/azure/azurerm_role_assignment/checkGuestUser.rego new file mode 100755 index 000000000..092ed0c87 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_role_assignment/checkGuestUser.rego @@ -0,0 +1,6 @@ +package accurics + +{{.prefix}}checkGuestUser[role_assignment.id] { + role_assignment := input.azurerm_role_assignment[_] + role_assignment.config.role_definition_name == "Guest" +} diff --git a/pkg/policies/opa/rego/azure/azurerm_security_center_subscription_pricing/accurics.azure.OPS.349.json b/pkg/policies/opa/rego/azure/azurerm_security_center_subscription_pricing/accurics.azure.OPS.349.json new file mode 100755 index 000000000..93539ac0e --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_security_center_subscription_pricing/accurics.azure.OPS.349.json @@ -0,0 +1,12 @@ +{ + "name": "reme_securityCenterPrincingTier", + "file": "securityCenterPrincingTier.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure that standard pricing tiers are selected", + "referenceId": "accurics.azure.OPS.349", + "category": "Operational Efficiency", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_security_center_subscription_pricing/securityCenterPrincingTier.rego b/pkg/policies/opa/rego/azure/azurerm_security_center_subscription_pricing/securityCenterPrincingTier.rego new file mode 100755 index 000000000..27a3f9fb8 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_security_center_subscription_pricing/securityCenterPrincingTier.rego @@ -0,0 +1,6 @@ +package accurics + +{{.prefix}}securityCenterPrincingTier[subscription.id] { + subscription := input.azurerm_security_center_subscription_pricing[_] + subscription.config.tier != "Standard" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_active_directory_administrator/accurics.azure.IAM.137.json b/pkg/policies/opa/rego/azure/azurerm_sql_active_directory_administrator/accurics.azure.IAM.137.json new file mode 100755 index 000000000..1ed03fc76 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_active_directory_administrator/accurics.azure.IAM.137.json @@ -0,0 +1,12 @@ +{ + "name": "reme_sqlServerADPredictableAccount", + "file": "sqlServerADPredictableAccount.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Avoid using names like 'Admin' for an Azure SQL Server Active Directory Administrator account", + "referenceId": "accurics.azure.IAM.137", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_active_directory_administrator/sqlServerADPredictableAccount.rego b/pkg/policies/opa/rego/azure/azurerm_sql_active_directory_administrator/sqlServerADPredictableAccount.rego new file mode 100755 index 000000000..c5f4b8301 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_active_directory_administrator/sqlServerADPredictableAccount.rego @@ -0,0 +1,12 @@ +package accurics + +{{.prefix}}sqlServerADPredictableAccount[retVal] { + known_user = { "azure_superuser", "azure_pg_admin", "admin", "administrator", "root", "guest", "public" } + sql_server := input.azurerm_sql_active_directory_administrator[_] + sql_server.type == "azurerm_sql_active_directory_administrator" + user := lower(sql_server.config.login) + known_user[user] + uuid_user = uuid.rfc4122(sql_server.config.login) + traverse := "login" + retVal := { "Id": sql_server.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": traverse, "AttributeDataType": "string", "Expected": uuid_user, "Actual": sql_server.config.login } +} diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.169.json b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.169.json new file mode 100755 index 000000000..c5ee6dfb0 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/accurics.azure.NS.169.json @@ -0,0 +1,12 @@ +{ + "name": "reme_moreHostsAllowed", + "file": "moreHostsAllowed.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Restrict Azure SQL Server accessibility to a minimal address range", + "referenceId": "accurics.azure.NS.169", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/moreHostsAllowed.rego b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/moreHostsAllowed.rego new file mode 100755 index 000000000..eb4b580cb --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_firewall_rule/moreHostsAllowed.rego @@ -0,0 +1,17 @@ +package accurics + +{{.prefix}}moreHostsAllowed[retVal] { + sql_rule := input.azurerm_sql_firewall_rule[_] + sHosts := calculateHosts(sql_rule.config.start_ip_address) + eHosts := calculateHosts(sql_rule.config.end_ip_address) + abs(eHosts - sHosts) >= 256 + rc := "ewogICJzdGFydF9pcF9hZGRyZXNzIjogIjxzdGFydF9pcF9hZGRyZXNzPiIsCiAgImVuZF9pcF9hZGRyZXNzIjogIjxlbmRfaXBfYWRkcmVzcz4iCn0=" + retVal := { "Id": sql_rule.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": "", "Attribute": "", "AttributeDataType": "base64", "Expected": rc, "Actual": { "start_ip_address": sql_rule.config.start_ip_address, "end_ip_address": sql_rule.config.end_ip_address } } +} + +calculateHosts(val) = ans { + ipVals := split(val, ".") + # 2^24 = 16777216, 2^16 = 65536, 2^8 = 256 + # no of hosts in IP p.q.r.s = (p * 2^24) + (q * 2^16) + (r * 2^8) + s + ans = (to_number(ipVals[0]) * 16777216) + (to_number(ipVals[1]) * 65536) + (to_number(ipVals[2]) * 256) + to_number(ipVals[3]) +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_server/accurics.azure.IAM.10.json b/pkg/policies/opa/rego/azure/azurerm_sql_server/accurics.azure.IAM.10.json new file mode 100755 index 000000000..834c84439 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_server/accurics.azure.IAM.10.json @@ -0,0 +1,12 @@ +{ + "name": "reme_sqlServerADAdminConfigured", + "file": "sqlServerADAdminConfigured.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure that Azure Active Directory Admin is configured for SQL Server", + "referenceId": "accurics.azure.IAM.10", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_server/accurics.azure.IAM.138.json b/pkg/policies/opa/rego/azure/azurerm_sql_server/accurics.azure.IAM.138.json new file mode 100755 index 000000000..6270c62f0 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_server/accurics.azure.IAM.138.json @@ -0,0 +1,12 @@ +{ + "name": "reme_sqlServerPredictableAccount", + "file": "sqlServerPredictableAccount.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Avoid using names like 'Admin' for an Azure SQL Server admin account login", + "referenceId": "accurics.azure.IAM.138", + "category": "Identity and Access Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_server/accurics.azure.LOG.356.json b/pkg/policies/opa/rego/azure/azurerm_sql_server/accurics.azure.LOG.356.json new file mode 100755 index 000000000..142f604a6 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_server/accurics.azure.LOG.356.json @@ -0,0 +1,10 @@ +{ + "name": "sqlAuditingRetention", + "file": "sqlAuditingRetention.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure that 'Auditing' Retention is 'greater than 90 days' for SQL servers.", + "referenceId": "accurics.azure.LOG.356", + "category": "Logging", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_server/accurics.azure.MON.354.json b/pkg/policies/opa/rego/azure/azurerm_sql_server/accurics.azure.MON.354.json new file mode 100755 index 000000000..00c00b181 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_server/accurics.azure.MON.354.json @@ -0,0 +1,10 @@ +{ + "name": "sqlServerAuditingEnabled", + "file": "sqlServerAuditingEnabled.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure that 'Auditing' is set to 'On' for SQL servers", + "referenceId": "accurics.azure.MON.354", + "category": "Monitoring", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlAuditingRetention.rego b/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlAuditingRetention.rego new file mode 100755 index 000000000..cab3fc64f --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlAuditingRetention.rego @@ -0,0 +1,7 @@ +package accurics + +sqlAuditingRetention[api.id]{ + api := input.azurerm_sql_server[_] + var := api.config.extended_auditing_policy[_] + var.retention_in_days < 90 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerADAdminConfigured.rego b/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerADAdminConfigured.rego new file mode 100755 index 000000000..ed63dd4ee --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerADAdminConfigured.rego @@ -0,0 +1,21 @@ +package accurics + +{{.prefix}}sqlServerADAdminConfigured[retVal] { + sql_server := input.azurerm_sql_server[_] + sql_server.type == "azurerm_sql_server" + key := concat("-", [sql_server.config.resource_group_name, sql_server.config.name]) + not adAdminExist(key) + rc = "ZGF0YSAiYXp1cmVybV9jbGllbnRfY29uZmlnIiAiY3VycmVudCIge30KCnJlc291cmNlICJhenVyZXJtX3NxbF9hY3RpdmVfZGlyZWN0b3J5X2FkbWluaXN0cmF0b3IiICIjI3Jlc291cmNlX25hbWUjIyIgewogIHNlcnZlcl9uYW1lICAgICAgICAgPSBhenVyZXJtX3NxbF9zZXJ2ZXIuIyNyZXNvdXJjZV9uYW1lIyMubmFtZQogIHJlc291cmNlX2dyb3VwX25hbWUgPSBhenVyZXJtX3Jlc291cmNlX2dyb3VwLiMjcmVzb3VyY2VfbmFtZSMjLm5hbWUKICBsb2dpbiAgICAgICAgICAgICAgID0gInNxbGFkbWluIgogIHRlbmFudF9pZCAgICAgICAgICAgPSBkYXRhLmF6dXJlcm1fY2xpZW50X2NvbmZpZy5jdXJyZW50LnRlbmFudF9pZAogIG9iamVjdF9pZCAgICAgICAgICAgPSBkYXRhLmF6dXJlcm1fY2xpZW50X2NvbmZpZy5jdXJyZW50Lm9iamVjdF9pZAp9" + decode_rc = base64.decode(rc) + replaced := replace(decode_rc, "##resource_name##", sql_server.name) + traverse = "" + retVal := { "Id": sql_server.id, "ReplaceType": "add", "CodeType": "resource", "Traverse": traverse, "Attribute": traverse, "AttributeDataType": "resource", "Expected": base64.encode(replaced), "Actual": null } +} + +adAdminExist(rg_servername) = exists { + ad_admin_set := { ad_id | input.azurerm_sql_active_directory_administrator[i].type == "azurerm_sql_active_directory_administrator"; ad_id := concat("-", [input.azurerm_sql_active_directory_administrator[i].config.resource_group_name, input.azurerm_sql_active_directory_administrator[i].config.server_name]) } + ad_admin_set[rg_servername] + exists = true +} else = false { + true +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerAuditingEnabled.rego b/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerAuditingEnabled.rego new file mode 100755 index 000000000..24acd7db4 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerAuditingEnabled.rego @@ -0,0 +1,6 @@ +package accurics + +sqlServerAuditingEnabled[api.id]{ + api := input.azurerm_sql_server[_] + count(api.config.extended_auditing_policy) == 0 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerPredictableAccount.rego b/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerPredictableAccount.rego new file mode 100755 index 000000000..5d1585362 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_sql_server/sqlServerPredictableAccount.rego @@ -0,0 +1,47 @@ +package accurics + +{{.prefix}}sqlServerPredictableAccount[retVal] { + known_user = { "azure_superuser", "azure_pg_admin", "admin", "administrator", "root", "guest", "public" } + sql_server := input.azurerm_sql_server[_] + sql_server.type == "azurerm_sql_server" + user := lower(sql_server.config.administrator_login) + known_user[user] + uuid_user = uuid.rfc4122(sql_server.config.administrator_login) + traverse := "administrator_login" + retVal := { "Id": sql_server.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": traverse, "AttributeDataType": "string", "Expected": uuid_user, "Actual": sql_server.config.administrator_login } +} + +{{.prefix}}sqlServerPredictableAccount[retVal] { + known_user = { "azure_superuser", "azure_pg_admin", "admin", "administrator", "root", "guest", "public" } + sql_server := input.azurerm_mysql_server[_] + sql_server.type == "azurerm_mysql_server" + user := lower(sql_server.config.administrator_login) + known_user[user] + uuid_user = uuid.rfc4122(sql_server.config.administrator_login) + traverse := "administrator_login" + retVal := { "Id": sql_server.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": traverse, "AttributeDataType": "string", "Expected": uuid_user, "Actual": sql_server.config.administrator_login } +} + + +{{.prefix}}sqlServerPredictableAccount[retVal] { + known_user = { "azure_superuser", "azure_pg_admin", "admin", "administrator", "root", "guest", "public" } + sql_server := input.azurerm_postgresql_server[_] + sql_server.type == "azurerm_postgresql_server" + user := lower(sql_server.config.administrator_login) + known_user[user] + uuid_user = uuid.rfc4122(sql_server.config.administrator_login) + traverse := "administrator_login" + retVal := { "Id": sql_server.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": traverse, "AttributeDataType": "string", "Expected": uuid_user, "Actual": sql_server.config.administrator_login } +} + +{{.prefix}}sqlServerPredictableAccount[retVal] { + known_user = { "azure_superuser", "azure_pg_admin", "admin", "administrator", "root", "guest", "public" } + sql_server := input.azurerm_mssql_server[_] + sql_server.type == "azurerm_mssql_server" + user := lower(sql_server.config.administrator_login) + known_user[user] + uuid_user = uuid.rfc4122(sql_server.config.administrator_login) + traverse := "administrator_login" + retVal := { "Id": sql_server.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": traverse, "AttributeDataType": "string", "Expected": uuid_user, "Actual": sql_server.config.administrator_login } +} + diff --git a/pkg/policies/opa/rego/azure/azurerm_storage_account/accurics.azure.EKM.7.json b/pkg/policies/opa/rego/azure/azurerm_storage_account/accurics.azure.EKM.7.json new file mode 100755 index 000000000..2dd9cb18e --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_storage_account/accurics.azure.EKM.7.json @@ -0,0 +1,12 @@ +{ + "name": "reme_storageAccountEnableHttps", + "file": "storageAccountEnableHttps.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure that 'Secure transfer required' is enabled for Storage Accounts", + "referenceId": "accurics.azure.EKM.7", + "category": "Encryption and Key Management", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_storage_account/accurics.azure.NS.2.json b/pkg/policies/opa/rego/azure/azurerm_storage_account/accurics.azure.NS.2.json new file mode 100755 index 000000000..89040ea40 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_storage_account/accurics.azure.NS.2.json @@ -0,0 +1,12 @@ +{ + "name": "reme_storageAccountTrustedMicrosoftServicesEnabled", + "file": "storageAccountTrustedMicrosoftServicesEnabled.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure 'Trusted Microsoft Services' is enabled for Storage Account access", + "referenceId": "accurics.azure.NS.2", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_storage_account/accurics.azure.NS.4.json b/pkg/policies/opa/rego/azure/azurerm_storage_account/accurics.azure.NS.4.json new file mode 100755 index 000000000..bd3ca6a16 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_storage_account/accurics.azure.NS.4.json @@ -0,0 +1,12 @@ +{ + "name": "reme_storageAccountOpenToPublic", + "file": "storageAccountOpenToPublic.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure default network access rule for Storage Accounts is not open to public", + "referenceId": "accurics.azure.NS.4", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_storage_account/storageAccountCheckNetworkDefaultRule.rego b/pkg/policies/opa/rego/azure/azurerm_storage_account/storageAccountCheckNetworkDefaultRule.rego new file mode 100755 index 000000000..b6f5098c5 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_storage_account/storageAccountCheckNetworkDefaultRule.rego @@ -0,0 +1,10 @@ +package accurics + +{{.prefix}}storageAccountCheckNetworkDefaultRule[retVal] { + storage_account := input.azurerm_storage_account[_] + some i + network_rule := storage_account.config.network_rules[i] + network_rule.default_action == "Allow" + traverse = sprintf("network_rules[%d].default_action", [i]) + retVal := { "Id": storage_account.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": traverse, "AttributeDataType": "string", "Expected": "Deny", "Actual": network_rule.default_action } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_storage_account/storageAccountEnableHttps.rego b/pkg/policies/opa/rego/azure/azurerm_storage_account/storageAccountEnableHttps.rego new file mode 100755 index 000000000..1fedf66cf --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_storage_account/storageAccountEnableHttps.rego @@ -0,0 +1,8 @@ +package accurics + +{{.prefix}}storageAccountEnableHttps[retVal] { + enablehttp := input.azurerm_storage_account[_] + enablehttp.config.enable_https_traffic_only == false + traverse := "enable_https_traffic_only" + retVal := { "Id": enablehttp.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": traverse, "AttributeDataType": "string", "Expected": true, "Actual": enablehttp.config.enable_https_traffic_only } +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_storage_account/storageAccountOpenToPublic.rego b/pkg/policies/opa/rego/azure/azurerm_storage_account/storageAccountOpenToPublic.rego new file mode 100755 index 000000000..06ac915fa --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_storage_account/storageAccountOpenToPublic.rego @@ -0,0 +1,27 @@ +package accurics + +{{.prefix}}storageAccountOpenToPublic[retVal] { + storage_account := input.azurerm_storage_account[_] + some i + network_rule := storage_account.config.network_rules[i] + arrayContains(network_rule.ip_rules, "0.0.0.0/0") == true + expected := [ item | item := replace_cidr(storage_account.config.network_rules[i].ip_rules[_]) ] + traverse := sprintf("network_rules[%d].ip_rules", [i]) + retVal := { "Id": storage_account.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": traverse, "AttributeDataType": "list", "Expected": expected, "Actual": storage_account.config.network_rules[i].ip_rules } +} + +arrayContains(items, elem) = true { + items[_] = elem +} else = false { + true +} + +replace_cidr(cidr) = value { + cidr == "0.0.0.0/0" + value := "" +} + +replace_cidr(cidr) = value { + cidr != "0.0.0.0/0" + value := cidr +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_storage_account/storageAccountTrustedMicrosoftServicesEnabled.rego b/pkg/policies/opa/rego/azure/azurerm_storage_account/storageAccountTrustedMicrosoftServicesEnabled.rego new file mode 100755 index 000000000..aae9e0c74 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_storage_account/storageAccountTrustedMicrosoftServicesEnabled.rego @@ -0,0 +1,17 @@ +package accurics + +{{.prefix}}storageAccountTrustedMicrosoftServicesEnabled[retVal] { + storage_account := input.azurerm_storage_account[_] + some i + network_rule := storage_account.config.network_rules[i] + arrayContains(network_rule.bypass, "AzureServices") == false + traverse = sprintf("network_rules[%d].bypass", [i]) + expected := array.concat(network_rule.bypass, ["AzureServices"]) + retVal := { "Id": storage_account.id, "ReplaceType": "edit", "CodeType": "attribute", "Traverse": traverse, "Attribute": traverse, "AttributeDataType": "list", "Expected": expected, "Actual": network_rule.bypass } +} + +arrayContains(items, elem) = true { + items[_] = elem +} else = false { + true +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_storage_container/accurics.azure.IAM.368.json b/pkg/policies/opa/rego/azure/azurerm_storage_container/accurics.azure.IAM.368.json new file mode 100755 index 000000000..3cce9018b --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_storage_container/accurics.azure.IAM.368.json @@ -0,0 +1,12 @@ +{ + "name": "reme_checkStorageContainerAccess", + "file": "checkStorageContainerAccess.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Anonymous, public read access to a container and its blobs can be enabled in Azure Blob storage. This is only recommended if absolutely necessary.", + "referenceId": "accurics.azure.IAM.368", + "category": "Identity and Access Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_storage_container/checkStorageContainerAccess.rego b/pkg/policies/opa/rego/azure/azurerm_storage_container/checkStorageContainerAccess.rego new file mode 100755 index 000000000..0331f15dd --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_storage_container/checkStorageContainerAccess.rego @@ -0,0 +1,6 @@ +package accurics + +{{.prefix}}checkStorageContainerAccess[storage_container.id] { + storage_container := input.azurerm_storage_container[_] + storage_container.config.container_access_type != "private" +} diff --git a/pkg/policies/opa/rego/azure/azurerm_virtual_machine/accurics.azure.NS.18.json b/pkg/policies/opa/rego/azure/azurerm_virtual_machine/accurics.azure.NS.18.json new file mode 100755 index 000000000..346e27da1 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_virtual_machine/accurics.azure.NS.18.json @@ -0,0 +1,12 @@ +{ + "name": "reme_vmAttachedToNetwork", + "file": "vmAttachedToNetwork.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "HIGH", + "description": "Ensure that at least one Network Security Group is attached to all VMs and subnets that are public", + "referenceId": "accurics.azure.NS.18", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_virtual_machine/vmAttachedToNetwork.rego b/pkg/policies/opa/rego/azure/azurerm_virtual_machine/vmAttachedToNetwork.rego new file mode 100755 index 000000000..d7a577b84 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_virtual_machine/vmAttachedToNetwork.rego @@ -0,0 +1,7 @@ +package accurics + +{{.prefix}}vmAttachedToNetwork[vm.id] { + vm := input.azurerm_virtual_machine[_] + vm.type == "azurerm_virtual_machine" + count(object.get(vm.config, "network_interface_ids", "undefined")) <= 0 +} diff --git a/pkg/policies/opa/rego/azure/azurerm_virtual_network/accurics.azure.NS.161.json b/pkg/policies/opa/rego/azure/azurerm_virtual_network/accurics.azure.NS.161.json new file mode 100755 index 000000000..7fb2acf75 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_virtual_network/accurics.azure.NS.161.json @@ -0,0 +1,12 @@ +{ + "name": "reme_noSecurityGroupAssociated", + "file": "noSecurityGroupAssociated.rego", + "templateArgs": { + "prefix": "reme_" + }, + "severity": "MEDIUM", + "description": "Ensure that Azure Virtual Network subnet is configured with a Network Security Group", + "referenceId": "accurics.azure.NS.161", + "category": "Network Security", + "version": 2 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/azure/azurerm_virtual_network/noSecurityGroupAssociated.rego b/pkg/policies/opa/rego/azure/azurerm_virtual_network/noSecurityGroupAssociated.rego new file mode 100755 index 000000000..b9924e7a7 --- /dev/null +++ b/pkg/policies/opa/rego/azure/azurerm_virtual_network/noSecurityGroupAssociated.rego @@ -0,0 +1,30 @@ +package accurics + +{{.prefix}}noSecurityGroupAssociated[retVal] { + vn := input.azurerm_virtual_network[_] + vn.type = "azurerm_virtual_network" + object.get(vn.config, "subnet", "undefined") != "undefined" + not sgExists(vn.config) + + traverse = "subnet[0].security_group" + retVal := { "Id": vn.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "subnet.security_group", "AttributeDataType": "string", "Expected": "${azurerm_network_security_group..id}", "Actual": "" } +} + +{{.prefix}}noSecurityGroupAssociated[retVal] { + vn := input.azurerm_virtual_network[_] + vn.type = "azurerm_virtual_network" + object.get(vn.config, "subnet", "undefined") == "undefined" + rc = "ewogICJzdWJuZXQiOiB7CiAgICAibmFtZSI6ICJzdWJuZXQzIiwKICAgICJhZGRyZXNzX3ByZWZpeCI6ICI8Y2lkcj4iLAogICAgInNlY3VyaXR5X2dyb3VwIjogIiR7YXp1cmVybV9uZXR3b3JrX3NlY3VyaXR5X2dyb3VwLjxzZWN1cml0eV9ncm91cF9uYW1lPi5pZH0iCiAgfQp9" + traverse = "" + retVal := { "Id": vn.id, "ReplaceType": "add", "CodeType": "block", "Traverse": traverse, "Attribute": "subnet", "AttributeDataType": "base64", "Expected": rc, "Actual": null } +} + +sgExists(cfg) = true { + subs = cfg.subnet[_] + subs.security_group != "" +} + +sgExists(cfg) = true { + subs = cfg.subnet[_] + object.get(subs, "security_group", "undefined") == "undefined" +} \ No newline at end of file From 36742e591daccece52efa8ca7ab5e419a8a34f10 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Fri, 14 Aug 2020 07:32:38 -0700 Subject: [PATCH 186/188] add first drop of gcp policies --- .../accurics.gcp.IAM.145.json | 10 +++++++ .../github_repository/privateRepoEnabled.rego | 7 +++++ .../accurics.gcp.IAM.106.json | 10 +++++++ .../bqDatasetPubliclyAccessible.rego | 7 +++++ .../accurics.gcp.EKM.131.json | 10 +++++++ .../vmEncryptedwithCsek.rego | 7 +++++ .../accurics.gcp.NS.017.json | 13 +++++++++ .../accurics.gcp.NS.111.json | 13 +++++++++ .../accurics.gcp.NS.123.json | 10 +++++++ .../google_compute_firewall/portIsOpen.rego | 10 +++++++ .../unrestrictedRdpAccess.rego | 11 ++++++++ .../accurics.gcp.EKM.132.json | 10 +++++++ .../accurics.gcp.IAM.124.json | 10 +++++++ .../accurics.gcp.IAM.128.json | 10 +++++++ .../accurics.gcp.NS.125.json | 10 +++++++ .../accurics.gcp.NS.126.json | 10 +++++++ .../accurics.gcp.NS.129.json | 10 +++++++ .../accurics.gcp.NS.130.json | 10 +++++++ .../accurics.gcp.NS.133.json | 10 +++++++ .../checkIpForward.rego | 7 +++++ .../checkVM_NoFullCloudAccess.rego | 8 ++++++ .../defaultServiceAccountUsed.rego | 9 +++++++ .../encryptedwithCsek.rego | 7 +++++ .../osLoginEnabled.rego | 17 ++++++++++++ .../projectWideSshKeysUsed.rego | 17 ++++++++++++ .../serialPortEnabled.rego | 17 ++++++++++++ .../shieldedVmEenabled.rego | 22 +++++++++++++++ .../accurics.gcp.IAM.127.json | 10 +++++++ .../checkOSLoginEnabled.rego | 14 ++++++++++ .../accurics.gcp.EKM.134.json | 10 +++++++ .../weakCipherSuitesEnabled.rego | 7 +++++ .../accurics.gcp.LOG.118.json | 10 +++++++ .../vpcFlowLogEnabled.rego | 6 +++++ .../accurics.gcp.IAM.104.json | 10 +++++++ .../accurics.gcp.IAM.110.json | 10 +++++++ .../accurics.gcp.IAM.142.json | 10 +++++++ .../accurics.gcp.LOG.100.json | 10 +++++++ .../accurics.gcp.MON.143.json | 10 +++++++ .../accurics.gcp.NS.103.json | 10 +++++++ .../accurics.gcp.NS.109.json | 10 +++++++ .../accurics.gcp.NS.112.json | 10 +++++++ .../accurics.gcp.NS.117.json | 10 +++++++ .../accurics.gcp.OPS.113.json | 10 +++++++ .../accurics.gcp.OPS.115.json | 10 +++++++ .../accurics.gcp.OPS.116.json | 10 +++++++ .../clientCertificateEnabled.rego | 7 +++++ .../clusterLabelsEnabled.rego | 6 +++++ .../gkeBasicAuthDisabled.rego | 8 ++++++ .../gkeControlPlaneNotPublic.rego | 7 +++++ .../ipAliasingEnabled.rego | 6 +++++ .../legacyAuthEnabled.rego | 6 +++++ .../masterAuthEnabled.rego | 7 +++++ .../networkPolicyEnabled.rego | 6 +++++ .../podSecurityPolicyEnabled.rego | 7 +++++ .../privateClusterEnabled.rego | 8 ++++++ .../stackDriverLoggingEnabled.rego | 6 +++++ .../stackDriverMonitoringEnabled.rego | 6 +++++ .../accurics.gcp.OPS.101.json | 10 +++++++ .../accurics.gcp.OPS.114.json | 10 +++++++ .../accurics.gcp.OPS.144.json | 10 +++++++ .../autoNodeRepairEnabled.rego | 8 ++++++ .../autoNodeUpgradeEnabled.rego | 7 +++++ .../cosNodeImageUsed.rego | 13 +++++++++ .../accurics.gcp.EKM.108.json | 10 +++++++ .../accurics.gcp.NS.107.json | 10 +++++++ .../dnsStateIsNotOn.rego | 6 +++++ .../rsaSha1NotUsedDNSSEC.rego | 8 ++++++ .../accurics.gcp.EKM.007.json | 10 +++++++ .../accurics.gcp.EKM.139.json | 10 +++++++ .../checkRotation365Days.rego | 6 +++++ .../checkRotation90Days.rego | 7 +++++ .../google_project/accurics.gcp.NS.119.json | 10 +++++++ .../google_project/autoCreateNetDisabled.rego | 8 ++++++ .../accurics.gcp.LOG.010.json | 10 +++++++ .../checkAuditLoggingConfig.rego | 24 +++++++++++++++++ .../accurics.gcp.IAM.002.json | 10 +++++++ .../accurics.gcp.IAM.136.json | 10 +++++++ .../iamServiceAccountUsed.rego | 13 +++++++++ .../noGmailAccount.rego | 7 +++++ .../accurics.gcp.IAM.137.json | 10 +++++++ .../accurics.gcp.IAM.138.json | 10 +++++++ .../iamServiceAccountUsed.rego | 13 +++++++++ .../serviceAccountAdminPriviledges.rego | 8 ++++++ .../accurics.gcp.BDR.105.json | 10 +++++++ .../accurics.gcp.EKM.141.json | 10 +++++++ .../accurics.gcp.NS.102.json | 10 +++++++ .../backupConfigEnabled.rego | 8 ++++++ .../checkDatabaseSettings.rego | 27 +++++++++++++++++++ .../accurics.gcp.IAM.122.json | 10 +++++++ .../accurics.gcp.LOG.012.json | 10 +++++++ .../accurics.gcp.LOG.023.json | 10 +++++++ .../checkStorageBucketConfig.rego | 12 +++++++++ .../uniformBucketEnabled.rego | 7 +++++ .../accurics.gcp.IAM.121.json | 10 +++++++ .../checkPubliclyAccessible.rego | 15 +++++++++++ .../accurics.gcp.IAM.120.json | 10 +++++++ .../checkNoPublicAccess.rego | 9 +++++++ 97 files changed, 965 insertions(+) create mode 100755 pkg/policies/opa/rego/gcp/github_repository/accurics.gcp.IAM.145.json create mode 100755 pkg/policies/opa/rego/gcp/github_repository/privateRepoEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_bigquery_dataset/accurics.gcp.IAM.106.json create mode 100755 pkg/policies/opa/rego/gcp/google_bigquery_dataset/bqDatasetPubliclyAccessible.rego create mode 100755 pkg/policies/opa/rego/gcp/google_compute_disk/accurics.gcp.EKM.131.json create mode 100755 pkg/policies/opa/rego/gcp/google_compute_disk/vmEncryptedwithCsek.rego create mode 100755 pkg/policies/opa/rego/gcp/google_compute_firewall/accurics.gcp.NS.017.json create mode 100755 pkg/policies/opa/rego/gcp/google_compute_firewall/accurics.gcp.NS.111.json create mode 100755 pkg/policies/opa/rego/gcp/google_compute_firewall/accurics.gcp.NS.123.json create mode 100755 pkg/policies/opa/rego/gcp/google_compute_firewall/portIsOpen.rego create mode 100755 pkg/policies/opa/rego/gcp/google_compute_firewall/unrestrictedRdpAccess.rego create mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.EKM.132.json create mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.IAM.124.json create mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.IAM.128.json create mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.125.json create mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.126.json create mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.129.json create mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.130.json create mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.133.json create mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/checkIpForward.rego create mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/checkVM_NoFullCloudAccess.rego create mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/defaultServiceAccountUsed.rego create mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/encryptedwithCsek.rego create mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/osLoginEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/projectWideSshKeysUsed.rego create mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/serialPortEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/shieldedVmEenabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_compute_project_metadata/accurics.gcp.IAM.127.json create mode 100755 pkg/policies/opa/rego/gcp/google_compute_project_metadata/checkOSLoginEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_compute_ssl_policy/accurics.gcp.EKM.134.json create mode 100755 pkg/policies/opa/rego/gcp/google_compute_ssl_policy/weakCipherSuitesEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_compute_subnetwork/accurics.gcp.LOG.118.json create mode 100755 pkg/policies/opa/rego/gcp/google_compute_subnetwork/vpcFlowLogEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.IAM.104.json create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.IAM.110.json create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.IAM.142.json create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.LOG.100.json create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.MON.143.json create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.NS.103.json create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.NS.109.json create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.NS.112.json create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.NS.117.json create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.OPS.113.json create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.OPS.115.json create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.OPS.116.json create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/clientCertificateEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/clusterLabelsEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/gkeBasicAuthDisabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/gkeControlPlaneNotPublic.rego create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/ipAliasingEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/legacyAuthEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/masterAuthEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/networkPolicyEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/podSecurityPolicyEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/privateClusterEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/stackDriverLoggingEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_container_cluster/stackDriverMonitoringEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_container_node_pool/accurics.gcp.OPS.101.json create mode 100755 pkg/policies/opa/rego/gcp/google_container_node_pool/accurics.gcp.OPS.114.json create mode 100755 pkg/policies/opa/rego/gcp/google_container_node_pool/accurics.gcp.OPS.144.json create mode 100755 pkg/policies/opa/rego/gcp/google_container_node_pool/autoNodeRepairEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_container_node_pool/autoNodeUpgradeEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_container_node_pool/cosNodeImageUsed.rego create mode 100755 pkg/policies/opa/rego/gcp/google_dns_managed_zone/accurics.gcp.EKM.108.json create mode 100755 pkg/policies/opa/rego/gcp/google_dns_managed_zone/accurics.gcp.NS.107.json create mode 100755 pkg/policies/opa/rego/gcp/google_dns_managed_zone/dnsStateIsNotOn.rego create mode 100755 pkg/policies/opa/rego/gcp/google_dns_managed_zone/rsaSha1NotUsedDNSSEC.rego create mode 100755 pkg/policies/opa/rego/gcp/google_kms_crypto_key/accurics.gcp.EKM.007.json create mode 100755 pkg/policies/opa/rego/gcp/google_kms_crypto_key/accurics.gcp.EKM.139.json create mode 100755 pkg/policies/opa/rego/gcp/google_kms_crypto_key/checkRotation365Days.rego create mode 100755 pkg/policies/opa/rego/gcp/google_kms_crypto_key/checkRotation90Days.rego create mode 100755 pkg/policies/opa/rego/gcp/google_project/accurics.gcp.NS.119.json create mode 100755 pkg/policies/opa/rego/gcp/google_project/autoCreateNetDisabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_project_iam_audit_config/accurics.gcp.LOG.010.json create mode 100755 pkg/policies/opa/rego/gcp/google_project_iam_audit_config/checkAuditLoggingConfig.rego create mode 100755 pkg/policies/opa/rego/gcp/google_project_iam_binding/accurics.gcp.IAM.002.json create mode 100755 pkg/policies/opa/rego/gcp/google_project_iam_binding/accurics.gcp.IAM.136.json create mode 100755 pkg/policies/opa/rego/gcp/google_project_iam_binding/iamServiceAccountUsed.rego create mode 100755 pkg/policies/opa/rego/gcp/google_project_iam_binding/noGmailAccount.rego create mode 100755 pkg/policies/opa/rego/gcp/google_project_iam_member/accurics.gcp.IAM.137.json create mode 100755 pkg/policies/opa/rego/gcp/google_project_iam_member/accurics.gcp.IAM.138.json create mode 100755 pkg/policies/opa/rego/gcp/google_project_iam_member/iamServiceAccountUsed.rego create mode 100755 pkg/policies/opa/rego/gcp/google_project_iam_member/serviceAccountAdminPriviledges.rego create mode 100755 pkg/policies/opa/rego/gcp/google_sql_database_instance/accurics.gcp.BDR.105.json create mode 100755 pkg/policies/opa/rego/gcp/google_sql_database_instance/accurics.gcp.EKM.141.json create mode 100755 pkg/policies/opa/rego/gcp/google_sql_database_instance/accurics.gcp.NS.102.json create mode 100755 pkg/policies/opa/rego/gcp/google_sql_database_instance/backupConfigEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_sql_database_instance/checkDatabaseSettings.rego create mode 100755 pkg/policies/opa/rego/gcp/google_storage_bucket/accurics.gcp.IAM.122.json create mode 100755 pkg/policies/opa/rego/gcp/google_storage_bucket/accurics.gcp.LOG.012.json create mode 100755 pkg/policies/opa/rego/gcp/google_storage_bucket/accurics.gcp.LOG.023.json create mode 100755 pkg/policies/opa/rego/gcp/google_storage_bucket/checkStorageBucketConfig.rego create mode 100755 pkg/policies/opa/rego/gcp/google_storage_bucket/uniformBucketEnabled.rego create mode 100755 pkg/policies/opa/rego/gcp/google_storage_bucket_iam_binding/accurics.gcp.IAM.121.json create mode 100755 pkg/policies/opa/rego/gcp/google_storage_bucket_iam_binding/checkPubliclyAccessible.rego create mode 100755 pkg/policies/opa/rego/gcp/google_storage_bucket_iam_member/accurics.gcp.IAM.120.json create mode 100755 pkg/policies/opa/rego/gcp/google_storage_bucket_iam_member/checkNoPublicAccess.rego diff --git a/pkg/policies/opa/rego/gcp/github_repository/accurics.gcp.IAM.145.json b/pkg/policies/opa/rego/gcp/github_repository/accurics.gcp.IAM.145.json new file mode 100755 index 000000000..e20203af8 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/github_repository/accurics.gcp.IAM.145.json @@ -0,0 +1,10 @@ +{ + "name": "privateRepoEnabled", + "file": "privateRepoEnabled.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Repository is Not Private.", + "referenceId": "accurics.gcp.IAM.145", + "category": "Identity \u0026 Access Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/github_repository/privateRepoEnabled.rego b/pkg/policies/opa/rego/gcp/github_repository/privateRepoEnabled.rego new file mode 100755 index 000000000..78a4678f5 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/github_repository/privateRepoEnabled.rego @@ -0,0 +1,7 @@ +package accurics + +privateRepoEnabled[api.id] +{ + api := input.github_repository[_] + not api.config.private == true +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_bigquery_dataset/accurics.gcp.IAM.106.json b/pkg/policies/opa/rego/gcp/google_bigquery_dataset/accurics.gcp.IAM.106.json new file mode 100755 index 000000000..8679e8cec --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_bigquery_dataset/accurics.gcp.IAM.106.json @@ -0,0 +1,10 @@ +{ + "name": "bqDatasetPubliclyAccessible", + "file": "bqDatasetPubliclyAccessible.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "BigQuery datasets may be anonymously or publicly accessible.", + "referenceId": "accurics.gcp.IAM.106", + "category": "Identity \u0026 Access Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_bigquery_dataset/bqDatasetPubliclyAccessible.rego b/pkg/policies/opa/rego/gcp/google_bigquery_dataset/bqDatasetPubliclyAccessible.rego new file mode 100755 index 000000000..93732f482 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_bigquery_dataset/bqDatasetPubliclyAccessible.rego @@ -0,0 +1,7 @@ +package accurics + +bqDatasetPubliclyAccessible[api.id]{ + api := input.google_bigquery_dataset[_] + data := api.config.access[_] + data.special_group == "allAuthenticatedUsers" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_disk/accurics.gcp.EKM.131.json b/pkg/policies/opa/rego/gcp/google_compute_disk/accurics.gcp.EKM.131.json new file mode 100755 index 000000000..ea9e827e6 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_disk/accurics.gcp.EKM.131.json @@ -0,0 +1,10 @@ +{ + "name": "vmEncryptedwithCsek", + "file": "vmEncryptedwithCsek.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure VM disks for critical VMs are encrypted with Customer Supplied Encryption Keys (CSEK) .", + "referenceId": "accurics.gcp.EKM.131", + "category": "Encryption \u0026 Key Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_disk/vmEncryptedwithCsek.rego b/pkg/policies/opa/rego/gcp/google_compute_disk/vmEncryptedwithCsek.rego new file mode 100755 index 000000000..c59a04e62 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_disk/vmEncryptedwithCsek.rego @@ -0,0 +1,7 @@ +package accurics + +vmEncryptedwithCsek[api.id] +{ + api := input.google_compute_disk[_] + not api.config.disk_encryption_key == null +} diff --git a/pkg/policies/opa/rego/gcp/google_compute_firewall/accurics.gcp.NS.017.json b/pkg/policies/opa/rego/gcp/google_compute_firewall/accurics.gcp.NS.017.json new file mode 100755 index 000000000..0d9381450 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_firewall/accurics.gcp.NS.017.json @@ -0,0 +1,13 @@ +{ + "name": "port22Open", + "file": "portIsOpen.rego", + "templateArgs": { + "name": "port22Open", + "port_number": "22" + }, + "severity": "MEDIUM", + "description": "Ensure that SSH access is restricted from the internet", + "referenceId": "accurics.gcp.NS.017", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_firewall/accurics.gcp.NS.111.json b/pkg/policies/opa/rego/gcp/google_compute_firewall/accurics.gcp.NS.111.json new file mode 100755 index 000000000..e62d58bb6 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_firewall/accurics.gcp.NS.111.json @@ -0,0 +1,13 @@ +{ + "name": "port3389Open", + "file": "portIsOpen.rego", + "templateArgs": { + "name": "port3389Open", + "port_number": "3389" + }, + "severity": "MEDIUM", + "description": "Ensure that SSH access is restricted from the internet", + "referenceId": "accurics.gcp.NS.111", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_firewall/accurics.gcp.NS.123.json b/pkg/policies/opa/rego/gcp/google_compute_firewall/accurics.gcp.NS.123.json new file mode 100755 index 000000000..df56fcbfe --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_firewall/accurics.gcp.NS.123.json @@ -0,0 +1,10 @@ +{ + "name": "unrestrictedRdpAccess", + "file": "unrestrictedRdpAccess.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure Google compute firewall ingress does not allow unrestricted rdp access.", + "referenceId": "accurics.gcp.NS.123", + "category": "Network Security ", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_firewall/portIsOpen.rego b/pkg/policies/opa/rego/gcp/google_compute_firewall/portIsOpen.rego new file mode 100755 index 000000000..00f0817ca --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_firewall/portIsOpen.rego @@ -0,0 +1,10 @@ +package accurics + +{{.name}}[rule.id] { + rule := input.google_compute_firewall[_] + config := rule.config + config.direction == "INGRESS" + config.source_ranges[_] == "0.0.0.0/0" + fire_rule := config.allow[_] + fire_rule.ports[_] == "{{.port_number}}" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_firewall/unrestrictedRdpAccess.rego b/pkg/policies/opa/rego/gcp/google_compute_firewall/unrestrictedRdpAccess.rego new file mode 100755 index 000000000..c657b1002 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_firewall/unrestrictedRdpAccess.rego @@ -0,0 +1,11 @@ +package accurics + +unrestrictedRdpAccess[api.id] +{ + api := input.google_compute_firewall[_] + data := api.config + data.direction == "INGRESS" + fire_rule := data.allow[_] + fire_rule.protocol == "tcp" + fire_rule.ports[_] == "3389" +} diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.EKM.132.json b/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.EKM.132.json new file mode 100755 index 000000000..d390cfbb8 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.EKM.132.json @@ -0,0 +1,10 @@ +{ + "name": "encryptedwithCsek", + "file": "encryptedwithCsek.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure VM disks for critical VMs are encrypted with Customer Supplied Encryption Keys (CSEK) .", + "referenceId": "accurics.gcp.EKM.132", + "category": "Encryption \u0026 Key Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.IAM.124.json b/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.IAM.124.json new file mode 100755 index 000000000..3d5e5ab47 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.IAM.124.json @@ -0,0 +1,10 @@ +{ + "name": "defaultServiceAccountUsed", + "file": "defaultServiceAccountUsed.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Instances may have been configured to use the default service account with full access to all Cloud APIs", + "referenceId": "accurics.gcp.IAM.124", + "category": "Identity \u0026 Access Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.IAM.128.json b/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.IAM.128.json new file mode 100755 index 000000000..30cd8f9e0 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.IAM.128.json @@ -0,0 +1,10 @@ +{ + "name": "osLoginEnabled", + "file": "osLoginEnabled.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure that no instance in the project overrides the project setting for enabling OSLogin", + "referenceId": "accurics.gcp.IAM.128", + "category": "Identity \u0026 Access Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.125.json b/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.125.json new file mode 100755 index 000000000..a1488c856 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.125.json @@ -0,0 +1,10 @@ +{ + "name": "checkVM_NoFullCloudAccess", + "file": "checkVM_NoFullCloudAccess.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Instances may have been configured to use the default service account with full access to all Cloud APIs", + "referenceId": "accurics.gcp.NS.125", + "category": "Access Control", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.126.json b/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.126.json new file mode 100755 index 000000000..fdec91878 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.126.json @@ -0,0 +1,10 @@ +{ + "name": "projectWideSshKeysUsed", + "file": "projectWideSshKeysUsed.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure 'Block Project-wide SSH keys' is enabled for VM instances.", + "referenceId": "accurics.gcp.NS.126", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.129.json b/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.129.json new file mode 100755 index 000000000..01f037c84 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.129.json @@ -0,0 +1,10 @@ +{ + "name": "serialPortEnabled", + "file": "serialPortEnabled.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure 'Enable connecting to serial ports' is not enabled for VM instances.", + "referenceId": "accurics.gcp.NS.129", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.130.json b/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.130.json new file mode 100755 index 000000000..bcedd7c86 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.130.json @@ -0,0 +1,10 @@ +{ + "name": "checkIpForward", + "file": "checkIpForward.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure IP forwarding is not enabled on Instances.", + "referenceId": "accurics.gcp.NS.130", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.133.json b/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.133.json new file mode 100755 index 000000000..6afeccef6 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.133.json @@ -0,0 +1,10 @@ +{ + "name": "shieldedVmEenabled", + "file": "shieldedVmEenabled.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure Compute instances are launched with Shielded VM enabled.", + "referenceId": "accurics.gcp.NS.133", + "category": "Network Security ", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/checkIpForward.rego b/pkg/policies/opa/rego/gcp/google_compute_instance/checkIpForward.rego new file mode 100755 index 000000000..8f5b59734 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_instance/checkIpForward.rego @@ -0,0 +1,7 @@ +package accurics + +checkIpForward[api.id] +{ + api := input.google_compute_instance[_] + not api.config.can_ip_forward == true +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/checkVM_NoFullCloudAccess.rego b/pkg/policies/opa/rego/gcp/google_compute_instance/checkVM_NoFullCloudAccess.rego new file mode 100755 index 000000000..a0e59db5b --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_instance/checkVM_NoFullCloudAccess.rego @@ -0,0 +1,8 @@ +package accurics + +checkVM_NoFullCloudAccess[log_object.id] { + log_object := input.google_compute_instance[_] + service_account := log_object.config.service_account[_] + scope := service_account.scopes[_] + contains(scope, "cloud-platform") +} diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/defaultServiceAccountUsed.rego b/pkg/policies/opa/rego/gcp/google_compute_instance/defaultServiceAccountUsed.rego new file mode 100755 index 000000000..ed44d0550 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_instance/defaultServiceAccountUsed.rego @@ -0,0 +1,9 @@ +package accurics + +defaultServiceAccountUsed[api.id] +{ + api := input.google_compute_instance[_] + data := api.config + fire_rule := data.service_account[_] + contains(fire_rule.email, "@developer.gserviceaccount.com") +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/encryptedwithCsek.rego b/pkg/policies/opa/rego/gcp/google_compute_instance/encryptedwithCsek.rego new file mode 100755 index 000000000..99774c09f --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_instance/encryptedwithCsek.rego @@ -0,0 +1,7 @@ +package accurics + +encryptedwithCsek[api.id] +{ + api := input.google_compute_disk[_] + not api.config.disk_encryption_key == null +} diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/osLoginEnabled.rego b/pkg/policies/opa/rego/gcp/google_compute_instance/osLoginEnabled.rego new file mode 100755 index 000000000..44be172f7 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_instance/osLoginEnabled.rego @@ -0,0 +1,17 @@ +package accurics + +osLoginEnabled[api.id] +{ + api := input.google_compute_instance[_] + api.config.metadata != null + meta_str := lower(object.get(api.config.metadata,"enable-oslogin","undefined")) + meta_str == "false" +} + +osLoginEnabled[api.id] +{ + api := input.google_compute_project_metadata[_] + api.config.metadata != null + meta_str := lower(object.get(api.config.metadata,"enable-oslogin","undefined")) + meta_str == "false" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/projectWideSshKeysUsed.rego b/pkg/policies/opa/rego/gcp/google_compute_instance/projectWideSshKeysUsed.rego new file mode 100755 index 000000000..7e8b938f3 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_instance/projectWideSshKeysUsed.rego @@ -0,0 +1,17 @@ +package accurics + +projectWideSshKeysUsed[api.id] +{ + api := input.google_compute_instance[_] + api.config.metadata != null + meta_str := lower(object.get(api.config.metadata,"block-project-ssh-keys","undefined")) + meta_str == "false" +} + +projectWideSshKeysUsed[api.id] +{ + api := input.google_compute_project_metadata[_] + api.config.metadata != null + meta_str := lower(object.get(api.config.metadata,"block-project-ssh-keys","undefined")) + meta_str == "false" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/serialPortEnabled.rego b/pkg/policies/opa/rego/gcp/google_compute_instance/serialPortEnabled.rego new file mode 100755 index 000000000..60157161b --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_instance/serialPortEnabled.rego @@ -0,0 +1,17 @@ +package accurics + +serialPortEnabled[api.id] +{ + api := input.google_compute_instance[_] + api.config.metadata != null + meta_str := lower(object.get(api.config.metadata,"serial-port-enable","undefined")) + meta_str == "false" +} + +serialPortEnabled[api.id] +{ + api := input.google_compute_project_metadata[_] + api.config.metadata != null + meta_str := lower(object.get(api.config.metadata,"serial-port-enable","undefined")) + meta_str == "false" +} diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/shieldedVmEenabled.rego b/pkg/policies/opa/rego/gcp/google_compute_instance/shieldedVmEenabled.rego new file mode 100755 index 000000000..864745a11 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_instance/shieldedVmEenabled.rego @@ -0,0 +1,22 @@ +package accurics + +shieldedVmEenabled[api.id] +{ + api := input.google_compute_instance[_] + count(api.config.shielded_instance_config) == 0 + +} + +shieldedVmEenabled[api.id] +{ + api := input.google_compute_instance[_] + data := api.config.shielded_instance_config[_] + not data.enable_integrity_monitoring == true +} + +shieldedVmEenabled[api.id] +{ + api := input.google_compute_instance[_] + data := api.config.shielded_instance_config[_] + not data.enable_vtpm == true +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_project_metadata/accurics.gcp.IAM.127.json b/pkg/policies/opa/rego/gcp/google_compute_project_metadata/accurics.gcp.IAM.127.json new file mode 100755 index 000000000..9525cc591 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_project_metadata/accurics.gcp.IAM.127.json @@ -0,0 +1,10 @@ +{ + "name": "checkOSLoginEnabled", + "file": "checkOSLoginEnabled.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure oslogin is enabled for a Project", + "referenceId": "accurics.gcp.IAM.127", + "category": "Access Control", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_project_metadata/checkOSLoginEnabled.rego b/pkg/policies/opa/rego/gcp/google_compute_project_metadata/checkOSLoginEnabled.rego new file mode 100755 index 000000000..773c4146e --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_project_metadata/checkOSLoginEnabled.rego @@ -0,0 +1,14 @@ +package accurics + +checkOSLoginEnabled[metadata.id] { + metadata := input.google_compute_project_metadata[_] + metadata.config.metadata == null +} { + metadata := input.google_compute_project_metadata[_] + metadata.config.metadata != null + not metadata.config.metadata["enable-oslogin"] +} { + metadata := input.google_compute_project_metadata[_] + metadata.config.metadata != null + metadata.config.metadata["enable-oslogin"] != "TRUE" +} diff --git a/pkg/policies/opa/rego/gcp/google_compute_ssl_policy/accurics.gcp.EKM.134.json b/pkg/policies/opa/rego/gcp/google_compute_ssl_policy/accurics.gcp.EKM.134.json new file mode 100755 index 000000000..137749fdb --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_ssl_policy/accurics.gcp.EKM.134.json @@ -0,0 +1,10 @@ +{ + "name": "weakCipherSuitesEnabled", + "file": "weakCipherSuitesEnabled.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure no HTTPS or SSL proxy load balancers permit SSL policies with weak cipher suites.", + "referenceId": "accurics.gcp.EKM.134", + "category": "Encryption \u0026 Key Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_ssl_policy/weakCipherSuitesEnabled.rego b/pkg/policies/opa/rego/gcp/google_compute_ssl_policy/weakCipherSuitesEnabled.rego new file mode 100755 index 000000000..ee935555c --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_ssl_policy/weakCipherSuitesEnabled.rego @@ -0,0 +1,7 @@ +package accurics + +weakCipherSuitesEnabled[api.id] +{ + api := input.google_compute_ssl_policy[_] + not api.config.min_tls_version == "TLS_1_2" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_subnetwork/accurics.gcp.LOG.118.json b/pkg/policies/opa/rego/gcp/google_compute_subnetwork/accurics.gcp.LOG.118.json new file mode 100755 index 000000000..c467a5a51 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_subnetwork/accurics.gcp.LOG.118.json @@ -0,0 +1,10 @@ +{ + "name": "vpcFlowLogEnabled", + "file": "vpcFlowLogEnabled.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure that VPC Flow Logs is enabled for every subnet in a VPC Network.", + "referenceId": "accurics.gcp.LOG.118", + "category": "Logging ", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_subnetwork/vpcFlowLogEnabled.rego b/pkg/policies/opa/rego/gcp/google_compute_subnetwork/vpcFlowLogEnabled.rego new file mode 100755 index 000000000..e10d81529 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_compute_subnetwork/vpcFlowLogEnabled.rego @@ -0,0 +1,6 @@ +package accurics + +vpcFlowLogEnabled[api.id]{ + api := input.google_compute_subnetwork[_] + count(api.config.log_config) == 0 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.IAM.104.json b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.IAM.104.json new file mode 100755 index 000000000..8fabda924 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.IAM.104.json @@ -0,0 +1,10 @@ +{ + "name": "clientCertificateEnabled", + "file": "clientCertificateEnabled.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure Kubernetes Cluster is created with Client Certificate enabled.", + "referenceId": "accurics.gcp.IAM.104", + "category": "Identity \u0026 Access Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.IAM.110.json b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.IAM.110.json new file mode 100755 index 000000000..a8cdc2e08 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.IAM.110.json @@ -0,0 +1,10 @@ +{ + "name": "gkeBasicAuthDisabled", + "file": "gkeBasicAuthDisabled.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure GKE basic auth is disabled.", + "referenceId": "accurics.gcp.IAM.110", + "category": "Identity \u0026 Access Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.IAM.142.json b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.IAM.142.json new file mode 100755 index 000000000..ca81dd704 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.IAM.142.json @@ -0,0 +1,10 @@ +{ + "name": "legacyAuthEnabled", + "file": "legacyAuthEnabled.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure Legacy Authorization is set to disabled on Kubernetes Engine Clusters.", + "referenceId": "accurics.gcp.IAM.142", + "category": "Identity \u0026 Access Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.LOG.100.json b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.LOG.100.json new file mode 100755 index 000000000..e2249957c --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.LOG.100.json @@ -0,0 +1,10 @@ +{ + "name": "stackDriverLoggingEnabled", + "file": "stackDriverLoggingEnabled.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure Stackdriver Logging is enabled on Kubernetes Engine Clusters.", + "referenceId": "accurics.gcp.LOG.100", + "category": "Logging", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.MON.143.json b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.MON.143.json new file mode 100755 index 000000000..5c985b757 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.MON.143.json @@ -0,0 +1,10 @@ +{ + "name": "stackDriverMonitoringEnabled", + "file": "stackDriverMonitoringEnabled.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure Stackdriver Logging is enabled on Kubernetes Engine Clusters.", + "referenceId": "accurics.gcp.MON.143", + "category": "Monitoring", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.NS.103.json b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.NS.103.json new file mode 100755 index 000000000..8996bea8b --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.NS.103.json @@ -0,0 +1,10 @@ +{ + "name": "networkPolicyEnabled", + "file": "networkPolicyEnabled.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure Network policy is enabled on Kubernetes Engine Clusters.", + "referenceId": "accurics.gcp.NS.103", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.NS.109.json b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.NS.109.json new file mode 100755 index 000000000..9e7cd8b38 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.NS.109.json @@ -0,0 +1,10 @@ +{ + "name": "gkeControlPlaneNotPublic", + "file": "gkeControlPlaneNotPublic.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure GKE Control Plane is not public.", + "referenceId": "accurics.gcp.NS.109", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.NS.112.json b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.NS.112.json new file mode 100755 index 000000000..9006a735d --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.NS.112.json @@ -0,0 +1,10 @@ +{ + "name": "masterAuthEnabled", + "file": "masterAuthEnabled.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure Master Authentication is set to enabled on Kubernetes Engine Clusters.", + "referenceId": "accurics.gcp.NS.112", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.NS.117.json b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.NS.117.json new file mode 100755 index 000000000..edb4f73e5 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.NS.117.json @@ -0,0 +1,10 @@ +{ + "name": "privateClusterEnabled", + "file": "privateClusterEnabled.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure Kubernetes Cluster is created with Private cluster enabled.", + "referenceId": "accurics.gcp.NS.117", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.OPS.113.json b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.OPS.113.json new file mode 100755 index 000000000..9bed69456 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.OPS.113.json @@ -0,0 +1,10 @@ +{ + "name": "clusterLabelsEnabled", + "file": "clusterLabelsEnabled.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure Kubernetes Clusters are configured with Labels.", + "referenceId": "accurics.gcp.OPS.113", + "category": "Operational Efficiency", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.OPS.115.json b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.OPS.115.json new file mode 100755 index 000000000..e47f6ef62 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.OPS.115.json @@ -0,0 +1,10 @@ +{ + "name": "ipAliasingEnabled", + "file": "ipAliasingEnabled.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure Kubernetes Cluster is created with Alias IP ranges enabled", + "referenceId": "accurics.gcp.OPS.115", + "category": "Operational Efficiency", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.OPS.116.json b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.OPS.116.json new file mode 100755 index 000000000..4fc96fcb9 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/accurics.gcp.OPS.116.json @@ -0,0 +1,10 @@ +{ + "name": "podSecurityPolicyEnabled", + "file": "podSecurityPolicyEnabled.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure PodSecurityPolicy controller is enabled on the Kubernetes Engine Clusters.", + "referenceId": "accurics.gcp.OPS.116", + "category": "Operational Efficiency", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/clientCertificateEnabled.rego b/pkg/policies/opa/rego/gcp/google_container_cluster/clientCertificateEnabled.rego new file mode 100755 index 000000000..e57df922d --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/clientCertificateEnabled.rego @@ -0,0 +1,7 @@ +package accurics + +clientCertificateEnabled[container_cluster.id] { + container_cluster := input.google_container_cluster[_] + master := container_cluster.config.master_auth[_] + master.client_certificate_config[_].issue_client_certificate == false +} diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/clusterLabelsEnabled.rego b/pkg/policies/opa/rego/gcp/google_container_cluster/clusterLabelsEnabled.rego new file mode 100755 index 000000000..e2b95e60e --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/clusterLabelsEnabled.rego @@ -0,0 +1,6 @@ +package accurics + +clusterLabelsEnabled[api.id]{ + api := input.google_container_cluster[_] + api.config.resource_labels == null +} diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/gkeBasicAuthDisabled.rego b/pkg/policies/opa/rego/gcp/google_container_cluster/gkeBasicAuthDisabled.rego new file mode 100755 index 000000000..bf253ebaf --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/gkeBasicAuthDisabled.rego @@ -0,0 +1,8 @@ +package accurics + +gkeBasicAuthDisabled[api.id]{ + api := input.google_container_cluster[_] + data := api.config.master_auth[_] + not data.username == null + not data.password == null +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/gkeControlPlaneNotPublic.rego b/pkg/policies/opa/rego/gcp/google_container_cluster/gkeControlPlaneNotPublic.rego new file mode 100755 index 000000000..826e38884 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/gkeControlPlaneNotPublic.rego @@ -0,0 +1,7 @@ +package accurics + +gkeControlPlaneNotPublic[api.id]{ + api := input.google_container_cluster[_] + data := api.config.private_cluster_config[_] + not data.enable_private_endpoint == true +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/ipAliasingEnabled.rego b/pkg/policies/opa/rego/gcp/google_container_cluster/ipAliasingEnabled.rego new file mode 100755 index 000000000..3953d5d0c --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/ipAliasingEnabled.rego @@ -0,0 +1,6 @@ +package accurics + +ipAliasingEnabled[container_cluster.id] { + container_cluster := input.google_container_cluster[_] + container_cluster.config.ip_allocation_policy == [] +} diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/legacyAuthEnabled.rego b/pkg/policies/opa/rego/gcp/google_container_cluster/legacyAuthEnabled.rego new file mode 100755 index 000000000..5aeada177 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/legacyAuthEnabled.rego @@ -0,0 +1,6 @@ +package accurics + +legacyAuthEnabled[container_cluster.id] { + container_cluster := input.google_container_cluster[_] + container_cluster.config.enable_legacy_abac == true +} diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/masterAuthEnabled.rego b/pkg/policies/opa/rego/gcp/google_container_cluster/masterAuthEnabled.rego new file mode 100755 index 000000000..4d35b82d3 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/masterAuthEnabled.rego @@ -0,0 +1,7 @@ +package accurics + +masterAuthEnabled[container_cluster.id] { + container_cluster := input.google_container_cluster[_] + master := container_cluster.config.master_auth[_] + master.username == null +} diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/networkPolicyEnabled.rego b/pkg/policies/opa/rego/gcp/google_container_cluster/networkPolicyEnabled.rego new file mode 100755 index 000000000..3b623c272 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/networkPolicyEnabled.rego @@ -0,0 +1,6 @@ +package accurics + +networkPolicyEnabled[container_cluster.id] { + container_cluster := input.google_container_cluster[_] + container_cluster.config.network_policy[_].enabled == false +} diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/podSecurityPolicyEnabled.rego b/pkg/policies/opa/rego/gcp/google_container_cluster/podSecurityPolicyEnabled.rego new file mode 100755 index 000000000..93ea6e4d1 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/podSecurityPolicyEnabled.rego @@ -0,0 +1,7 @@ +package accurics + +podSecurityPolicyEnabled[api.id]{ + api := input.google_container_cluster[_] + data := api.config.pod_security_policy_config[_] + not data.enable_private_endpoint == true +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/privateClusterEnabled.rego b/pkg/policies/opa/rego/gcp/google_container_cluster/privateClusterEnabled.rego new file mode 100755 index 000000000..9017d1fe5 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/privateClusterEnabled.rego @@ -0,0 +1,8 @@ +package accurics + +privateClusterEnabled[api.id]{ + api := input.google_container_cluster[_] + data := api.config.private_cluster_config[_] + not data.enable_private_endpoint == true + not data.enable_private_nodes == true +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/stackDriverLoggingEnabled.rego b/pkg/policies/opa/rego/gcp/google_container_cluster/stackDriverLoggingEnabled.rego new file mode 100755 index 000000000..27491c669 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/stackDriverLoggingEnabled.rego @@ -0,0 +1,6 @@ +package accurics + +stackDriverLoggingEnabled[container_cluster.id] { + container_cluster := input.google_container_cluster[_] + container_cluster.config.logging_service != "logging.googleapis.com/kubernetes" +} diff --git a/pkg/policies/opa/rego/gcp/google_container_cluster/stackDriverMonitoringEnabled.rego b/pkg/policies/opa/rego/gcp/google_container_cluster/stackDriverMonitoringEnabled.rego new file mode 100755 index 000000000..c573a2a87 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_cluster/stackDriverMonitoringEnabled.rego @@ -0,0 +1,6 @@ +package accurics + +stackDriverMonitoringEnabled[container_cluster.id] { + container_cluster := input.google_container_cluster[_] + container_cluster.config.monitoring_service != "monitoring.googleapis.com/kubernetes" +} diff --git a/pkg/policies/opa/rego/gcp/google_container_node_pool/accurics.gcp.OPS.101.json b/pkg/policies/opa/rego/gcp/google_container_node_pool/accurics.gcp.OPS.101.json new file mode 100755 index 000000000..a2c455b1c --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_node_pool/accurics.gcp.OPS.101.json @@ -0,0 +1,10 @@ +{ + "name": "autoNodeUpgradeEnabled", + "file": "autoNodeUpgradeEnabled.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure 'Automatic node upgrade' is enabled for Kubernetes Clusters.", + "referenceId": "accurics.gcp.OPS.101", + "category": "Operational Efficiency", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_node_pool/accurics.gcp.OPS.114.json b/pkg/policies/opa/rego/gcp/google_container_node_pool/accurics.gcp.OPS.114.json new file mode 100755 index 000000000..eadea4754 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_node_pool/accurics.gcp.OPS.114.json @@ -0,0 +1,10 @@ +{ + "name": "cosNodeImageUsed", + "file": "cosNodeImageUsed.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure Container-Optimized OS (cos) is used for Kubernetes Engine Clusters Node image.", + "referenceId": "accurics.gcp.OPS.114", + "category": "Operational Efficiency", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_node_pool/accurics.gcp.OPS.144.json b/pkg/policies/opa/rego/gcp/google_container_node_pool/accurics.gcp.OPS.144.json new file mode 100755 index 000000000..113362121 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_node_pool/accurics.gcp.OPS.144.json @@ -0,0 +1,10 @@ +{ + "name": "autoNodeRepairEnabled", + "file": "autoNodeRepairEnabled.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure 'Automatic node repair' is enabled for Kubernetes Clusters.", + "referenceId": "accurics.gcp.OPS.144", + "category": "Operational Efficiency", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_node_pool/autoNodeRepairEnabled.rego b/pkg/policies/opa/rego/gcp/google_container_node_pool/autoNodeRepairEnabled.rego new file mode 100755 index 000000000..9d9eb448b --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_node_pool/autoNodeRepairEnabled.rego @@ -0,0 +1,8 @@ +package accurics + +autoNodeRepairEnabled[api.id] +{ + api := input.google_container_node_pool[_] + data := api.config.management[_] + data.auto_repair == false +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_node_pool/autoNodeUpgradeEnabled.rego b/pkg/policies/opa/rego/gcp/google_container_node_pool/autoNodeUpgradeEnabled.rego new file mode 100755 index 000000000..b384ac9e7 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_node_pool/autoNodeUpgradeEnabled.rego @@ -0,0 +1,7 @@ +package accurics + +autoNodeUpgradeEnabled[api.id]{ + api := input.google_container_node_pool[_] + data := api.config.management[_] + data.auto_upgrade == false +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_container_node_pool/cosNodeImageUsed.rego b/pkg/policies/opa/rego/gcp/google_container_node_pool/cosNodeImageUsed.rego new file mode 100755 index 000000000..24812115f --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_container_node_pool/cosNodeImageUsed.rego @@ -0,0 +1,13 @@ +package accurics + +cosNodeImageUsed[api.id]{ + api := input.google_container_node_pool[_] + data := api.config.node_config[_] + not data.image_type == "cos" +} + +# cosNodeImageUsed[api.id]{ +# api := input.google_container_node_pool[_] +# data := api.config.node_config[_] +# not data.image_type +#} diff --git a/pkg/policies/opa/rego/gcp/google_dns_managed_zone/accurics.gcp.EKM.108.json b/pkg/policies/opa/rego/gcp/google_dns_managed_zone/accurics.gcp.EKM.108.json new file mode 100755 index 000000000..d3a5b282b --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_dns_managed_zone/accurics.gcp.EKM.108.json @@ -0,0 +1,10 @@ +{ + "name": "rsaSha1NotUsedDNSSEC", + "file": "rsaSha1NotUsedDNSSEC.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure that RSASHA1 is not used for the zone-signing and key-signing keys in Cloud DNS DNSSEC.", + "referenceId": "accurics.gcp.EKM.108", + "category": "Encryption \u0026 Key Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_dns_managed_zone/accurics.gcp.NS.107.json b/pkg/policies/opa/rego/gcp/google_dns_managed_zone/accurics.gcp.NS.107.json new file mode 100755 index 000000000..b8b66baf7 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_dns_managed_zone/accurics.gcp.NS.107.json @@ -0,0 +1,10 @@ +{ + "name": "dnsStateIsNotOn", + "file": "dnsStateIsNotOn.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure that DNSSEC is enabled for Cloud DNS.", + "referenceId": "accurics.gcp.NS.107", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_dns_managed_zone/dnsStateIsNotOn.rego b/pkg/policies/opa/rego/gcp/google_dns_managed_zone/dnsStateIsNotOn.rego new file mode 100755 index 000000000..8f0528ce0 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_dns_managed_zone/dnsStateIsNotOn.rego @@ -0,0 +1,6 @@ +package accurics + +dnsStateIsNotOn[dnsconfig.id] { + dnsconfig := input.google_dns_managed_zone[_] + state := dnsconfig.config.dnssec_config[_].state != "on" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_dns_managed_zone/rsaSha1NotUsedDNSSEC.rego b/pkg/policies/opa/rego/gcp/google_dns_managed_zone/rsaSha1NotUsedDNSSEC.rego new file mode 100755 index 000000000..d394c679f --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_dns_managed_zone/rsaSha1NotUsedDNSSEC.rego @@ -0,0 +1,8 @@ +package accurics + +rsaSha1NotUsedDNSSEC[api.id]{ + api := input.google_dns_managed_zone[_] + data := api.config.dnssec_config[_] + var := data.default_key_specs[_] + var.algorithm == "rsasha1" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_kms_crypto_key/accurics.gcp.EKM.007.json b/pkg/policies/opa/rego/gcp/google_kms_crypto_key/accurics.gcp.EKM.007.json new file mode 100755 index 000000000..2c69a2437 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_kms_crypto_key/accurics.gcp.EKM.007.json @@ -0,0 +1,10 @@ +{ + "name": "checkRotation365Days", + "file": "checkRotation365Days.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure Encryption keys are rotated within a period of 365 days.", + "referenceId": "accurics.gcp.EKM.007", + "category": "Encryption \u0026 Key Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_kms_crypto_key/accurics.gcp.EKM.139.json b/pkg/policies/opa/rego/gcp/google_kms_crypto_key/accurics.gcp.EKM.139.json new file mode 100755 index 000000000..ea1a93d7d --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_kms_crypto_key/accurics.gcp.EKM.139.json @@ -0,0 +1,10 @@ +{ + "name": "checkRotation90Days", + "file": "checkRotation90Days.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure Encryption keys are rotated within a period of 90 days.", + "referenceId": "accurics.gcp.EKM.139", + "category": "Encryption \u0026 Key Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_kms_crypto_key/checkRotation365Days.rego b/pkg/policies/opa/rego/gcp/google_kms_crypto_key/checkRotation365Days.rego new file mode 100755 index 000000000..e8d6961fb --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_kms_crypto_key/checkRotation365Days.rego @@ -0,0 +1,6 @@ +package accurics + +checkRotation365Days[kms.id] { + kms := input.google_kms_crypto_key[_] + kms.config.rotation_period <= "31536000s" +} diff --git a/pkg/policies/opa/rego/gcp/google_kms_crypto_key/checkRotation90Days.rego b/pkg/policies/opa/rego/gcp/google_kms_crypto_key/checkRotation90Days.rego new file mode 100755 index 000000000..9f6c82c9a --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_kms_crypto_key/checkRotation90Days.rego @@ -0,0 +1,7 @@ +package accurics + +checkRotation90Days[api.id] +{ + api := input.google_kms_crypto_key[_] + api.config.rotation_period <= "7776000s" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_project/accurics.gcp.NS.119.json b/pkg/policies/opa/rego/gcp/google_project/accurics.gcp.NS.119.json new file mode 100755 index 000000000..31ed7b676 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_project/accurics.gcp.NS.119.json @@ -0,0 +1,10 @@ +{ + "name": "autoCreateNetDisabled", + "file": "autoCreateNetDisabled.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure that the default network does not exist in a project.", + "referenceId": "accurics.gcp.NS.119", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_project/autoCreateNetDisabled.rego b/pkg/policies/opa/rego/gcp/google_project/autoCreateNetDisabled.rego new file mode 100755 index 000000000..924baf1e9 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_project/autoCreateNetDisabled.rego @@ -0,0 +1,8 @@ +package accurics + +autoCreateNetDisabled[api.id] +{ + api := input.google_project[_] + not api.config.auto_create_network == false +} + diff --git a/pkg/policies/opa/rego/gcp/google_project_iam_audit_config/accurics.gcp.LOG.010.json b/pkg/policies/opa/rego/gcp/google_project_iam_audit_config/accurics.gcp.LOG.010.json new file mode 100755 index 000000000..249bedcc7 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_project_iam_audit_config/accurics.gcp.LOG.010.json @@ -0,0 +1,10 @@ +{ + "name": "checkAuditLoggingConfig", + "file": "checkAuditLoggingConfig.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure that Cloud Audit Logging is configured properly across all services and all users from a project.", + "referenceId": "accurics.gcp.LOG.010", + "category": "Logging", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_project_iam_audit_config/checkAuditLoggingConfig.rego b/pkg/policies/opa/rego/gcp/google_project_iam_audit_config/checkAuditLoggingConfig.rego new file mode 100755 index 000000000..ee6dc3fab --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_project_iam_audit_config/checkAuditLoggingConfig.rego @@ -0,0 +1,24 @@ +package accurics + +checkAuditLoggingConfig[iam_audit.id] { + iam_audit := input.google_project_iam_audit_config[_] + iam_audit.config.service != "allServices" +} { + iam_audit := input.google_project_iam_audit_config[_] + count(iam_audit.config.audit_log_config) < 3 + audit_log_config := iam_audit.config.audit_log_config[_] + not check_log_type_value(audit_log_config) + count(audit_log_config.exempted_members) != 0 +} + +check_log_type_value(item) { + item.log_type == "ADMIN_READ" +} + +check_log_type_value(item) { + item.log_type == "DATA_READ" +} + +check_log_type_value(item) { + item.log_type == "DATA_WRITE" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_project_iam_binding/accurics.gcp.IAM.002.json b/pkg/policies/opa/rego/gcp/google_project_iam_binding/accurics.gcp.IAM.002.json new file mode 100755 index 000000000..406bb6e6f --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_project_iam_binding/accurics.gcp.IAM.002.json @@ -0,0 +1,10 @@ +{ + "name": "noGmailAccount", + "file": "noGmailAccount.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure that corporate login credentials are used instead of Gmail accounts.", + "referenceId": "accurics.gcp.IAM.002", + "category": "Identity and Access Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_project_iam_binding/accurics.gcp.IAM.136.json b/pkg/policies/opa/rego/gcp/google_project_iam_binding/accurics.gcp.IAM.136.json new file mode 100755 index 000000000..1720dd60d --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_project_iam_binding/accurics.gcp.IAM.136.json @@ -0,0 +1,10 @@ +{ + "name": "iamServiceAccountUsed", + "file": "iamServiceAccountUsed.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure that IAM users are not assigned the Service Account User or Service Account Token Creator roles at project level.", + "referenceId": "accurics.gcp.IAM.136", + "category": "Identity \u0026 Access Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_project_iam_binding/iamServiceAccountUsed.rego b/pkg/policies/opa/rego/gcp/google_project_iam_binding/iamServiceAccountUsed.rego new file mode 100755 index 000000000..c8481e6c8 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_project_iam_binding/iamServiceAccountUsed.rego @@ -0,0 +1,13 @@ +package accurics + +iamServiceAccountUsed[api.id] +{ + api := input.google_project_iam_binding[_] + api.config.role == "roles/iam.serviceAccountUser" +} + +iamServiceAccountUsed[api.id] +{ + api := input.google_project_iam_binding[_] + api.config.role == "roles/iam.serviceAccountTokenCreator" +} diff --git a/pkg/policies/opa/rego/gcp/google_project_iam_binding/noGmailAccount.rego b/pkg/policies/opa/rego/gcp/google_project_iam_binding/noGmailAccount.rego new file mode 100755 index 000000000..e0218e56a --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_project_iam_binding/noGmailAccount.rego @@ -0,0 +1,7 @@ +package accurics + +noGmailAccount[member.id] { + member := input.google_project_iam_binding[_] + mail := member.config.members[_] + contains(mail, "gmail.com") +} diff --git a/pkg/policies/opa/rego/gcp/google_project_iam_member/accurics.gcp.IAM.137.json b/pkg/policies/opa/rego/gcp/google_project_iam_member/accurics.gcp.IAM.137.json new file mode 100755 index 000000000..6a21bf281 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_project_iam_member/accurics.gcp.IAM.137.json @@ -0,0 +1,10 @@ +{ + "name": "iamServiceAccountUsed", + "file": "iamServiceAccountUsed.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure that IAM users are not assigned the Service Account User or Service Account Token Creator roles at project level.", + "referenceId": "accurics.gcp.IAM.137", + "category": "Identity \u0026 Access Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_project_iam_member/accurics.gcp.IAM.138.json b/pkg/policies/opa/rego/gcp/google_project_iam_member/accurics.gcp.IAM.138.json new file mode 100755 index 000000000..5a4001f91 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_project_iam_member/accurics.gcp.IAM.138.json @@ -0,0 +1,10 @@ +{ + "name": "serviceAccountAdminPriviledges", + "file": "serviceAccountAdminPriviledges.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure that Service Account has no Admin privileges.", + "referenceId": "accurics.gcp.IAM.138", + "category": "Identity \u0026 Access Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_project_iam_member/iamServiceAccountUsed.rego b/pkg/policies/opa/rego/gcp/google_project_iam_member/iamServiceAccountUsed.rego new file mode 100755 index 000000000..15b659f6e --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_project_iam_member/iamServiceAccountUsed.rego @@ -0,0 +1,13 @@ +package accurics + +iamServiceAccountUsed[api.id] +{ + api := input.google_project_iam_member[_] + api.config.role == "roles/iam.serviceAccountUser" +} + +iamServiceAccountUsed[api.id] +{ + api := input.google_project_iam_member[_] + api.config.role == "roles/iam.serviceAccountTokenCreator" +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_project_iam_member/serviceAccountAdminPriviledges.rego b/pkg/policies/opa/rego/gcp/google_project_iam_member/serviceAccountAdminPriviledges.rego new file mode 100755 index 000000000..37e7166bd --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_project_iam_member/serviceAccountAdminPriviledges.rego @@ -0,0 +1,8 @@ +package accurics + +serviceAccountAdminPriviledges[api.id] +{ + api := input.google_project_iam_member[_] + api.config.role == "roles/editor" + endswith(api.config.member, ".gserviceaccount.com") +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_sql_database_instance/accurics.gcp.BDR.105.json b/pkg/policies/opa/rego/gcp/google_sql_database_instance/accurics.gcp.BDR.105.json new file mode 100755 index 000000000..c4df825a1 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_sql_database_instance/accurics.gcp.BDR.105.json @@ -0,0 +1,10 @@ +{ + "name": "backupConfigEnabled", + "file": "backupConfigEnabled.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure all Cloud SQL database instance have backup configuration enabled.", + "referenceId": "accurics.gcp.BDR.105", + "category": "Backup \u0026 Disaster Recovery", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_sql_database_instance/accurics.gcp.EKM.141.json b/pkg/policies/opa/rego/gcp/google_sql_database_instance/accurics.gcp.EKM.141.json new file mode 100755 index 000000000..11e1063be --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_sql_database_instance/accurics.gcp.EKM.141.json @@ -0,0 +1,10 @@ +{ + "name": "checkRequireSSLEnabled", + "file": "checkDatabaseSettings.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure that Cloud SQL database instance requires all incoming connections to use SSL", + "referenceId": "accurics.gcp.EKM.141", + "category": "Encryption \u0026 Key Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_sql_database_instance/accurics.gcp.NS.102.json b/pkg/policies/opa/rego/gcp/google_sql_database_instance/accurics.gcp.NS.102.json new file mode 100755 index 000000000..d030b5ee1 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_sql_database_instance/accurics.gcp.NS.102.json @@ -0,0 +1,10 @@ +{ + "name": "checkNoPublicAccess", + "file": "checkDatabaseSettings.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure that Cloud SQL database Instances are not open to the world.", + "referenceId": "accurics.gcp.NS.102", + "category": "Network Security", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_sql_database_instance/backupConfigEnabled.rego b/pkg/policies/opa/rego/gcp/google_sql_database_instance/backupConfigEnabled.rego new file mode 100755 index 000000000..8932543c0 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_sql_database_instance/backupConfigEnabled.rego @@ -0,0 +1,8 @@ +package accurics + +backupConfigEnabled[api.id]{ + api := input.google_sql_database_instance[_] + data := api.config.settings[_] + var := data.backup_configuration[_] + var.enabled == false +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_sql_database_instance/checkDatabaseSettings.rego b/pkg/policies/opa/rego/gcp/google_sql_database_instance/checkDatabaseSettings.rego new file mode 100755 index 000000000..4cc03bd89 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_sql_database_instance/checkDatabaseSettings.rego @@ -0,0 +1,27 @@ +package accurics + +checkRequireSSLEnabled[db_instance.id] { + db_instance := input.google_sql_database_instance[_] + setting := db_instance.config.settings[_] + not setting.ip_configuration +} { + db_instance := input.google_sql_database_instance[_] + setting := db_instance.config.settings[_] + ip_configuration = setting.ip_configuration[_] + not ip_configuration.require_ssl +} { + db_instance := input.google_sql_database_instance[_] + setting := db_instance.config.settings[_] + ip_configuration = setting.ip_configuration[_] + ip_configuration.require_ssl == false +} + +checkNoPublicAccess[db_instance.id] { + db_instance := input.google_sql_database_instance[_] + setting := db_instance.config.settings[_] + count(setting.ip_configuration) > 0 + ip_configuration = setting.ip_configuration[_] + count(ip_configuration.authorized_networks) > 0 + authorized_network = ip_configuration.authorized_networks[_] + authorized_network.value == "0.0.0.0" +} diff --git a/pkg/policies/opa/rego/gcp/google_storage_bucket/accurics.gcp.IAM.122.json b/pkg/policies/opa/rego/gcp/google_storage_bucket/accurics.gcp.IAM.122.json new file mode 100755 index 000000000..7a745caef --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_storage_bucket/accurics.gcp.IAM.122.json @@ -0,0 +1,10 @@ +{ + "name": "uniformBucketEnabled", + "file": "uniformBucketEnabled.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure that Cloud Storage buckets have uniform bucket-level access enabled.", + "referenceId": "accurics.gcp.IAM.122", + "category": "Identity \u0026 Access Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_storage_bucket/accurics.gcp.LOG.012.json b/pkg/policies/opa/rego/gcp/google_storage_bucket/accurics.gcp.LOG.012.json new file mode 100755 index 000000000..f1940d10a --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_storage_bucket/accurics.gcp.LOG.012.json @@ -0,0 +1,10 @@ +{ + "name": "checkVersioningEnabled", + "file": "checkStorageBucketConfig.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure that object versioning is enabled on log-buckets.", + "referenceId": "accurics.gcp.LOG.012", + "category": "Logging", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_storage_bucket/accurics.gcp.LOG.023.json b/pkg/policies/opa/rego/gcp/google_storage_bucket/accurics.gcp.LOG.023.json new file mode 100755 index 000000000..4ed0ae3b9 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_storage_bucket/accurics.gcp.LOG.023.json @@ -0,0 +1,10 @@ +{ + "name": "checkLoggingEnabled", + "file": "checkStorageBucketConfig.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure that logging is enabled for Cloud storage buckets.", + "referenceId": "accurics.gcp.LOG.023", + "category": "Logging", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_storage_bucket/checkStorageBucketConfig.rego b/pkg/policies/opa/rego/gcp/google_storage_bucket/checkStorageBucketConfig.rego new file mode 100755 index 000000000..122f1b949 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_storage_bucket/checkStorageBucketConfig.rego @@ -0,0 +1,12 @@ +package accurics + +checkVersioningEnabled[log_object.id] { + log_object := input.google_storage_bucket[_] + versioning := log_object.config.versioning[_] + versioning.enabled == false +} + +checkLoggingEnabled[log_object.id] { + log_object := input.google_storage_bucket[_] + count(log_object.config.logging) <= 0 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_storage_bucket/uniformBucketEnabled.rego b/pkg/policies/opa/rego/gcp/google_storage_bucket/uniformBucketEnabled.rego new file mode 100755 index 000000000..7db39b09d --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_storage_bucket/uniformBucketEnabled.rego @@ -0,0 +1,7 @@ +package accurics + +uniformBucketEnabled[api.id] +{ + api := input.google_storage_bucket[_] + not api.config.bucket_policy_only == true +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_storage_bucket_iam_binding/accurics.gcp.IAM.121.json b/pkg/policies/opa/rego/gcp/google_storage_bucket_iam_binding/accurics.gcp.IAM.121.json new file mode 100755 index 000000000..9399307c8 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_storage_bucket_iam_binding/accurics.gcp.IAM.121.json @@ -0,0 +1,10 @@ +{ + "name": "checkPubliclyAccessible", + "file": "checkPubliclyAccessible.rego", + "templateArgs": null, + "severity": "MEDIUM", + "description": "Ensure that Cloud Storage bucket is not anonymously or publicly accessible.", + "referenceId": "accurics.gcp.IAM.121", + "category": "Identity \u0026 Access Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_storage_bucket_iam_binding/checkPubliclyAccessible.rego b/pkg/policies/opa/rego/gcp/google_storage_bucket_iam_binding/checkPubliclyAccessible.rego new file mode 100755 index 000000000..905fb859d --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_storage_bucket_iam_binding/checkPubliclyAccessible.rego @@ -0,0 +1,15 @@ +package accurics + +checkPubliclyAccessible[api.id] +{ + api := input.google_storage_bucket_iam_binding[_] + data := api.config.members[_] + contains(data,"allUsers") +} + +checkPubliclyAccessible[api.id] +{ + api := input.google_storage_bucket_iam_binding[_] + data := api.config.members[_] + contains(data,"allAuthenticatedUsers") +} diff --git a/pkg/policies/opa/rego/gcp/google_storage_bucket_iam_member/accurics.gcp.IAM.120.json b/pkg/policies/opa/rego/gcp/google_storage_bucket_iam_member/accurics.gcp.IAM.120.json new file mode 100755 index 000000000..a6f625866 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_storage_bucket_iam_member/accurics.gcp.IAM.120.json @@ -0,0 +1,10 @@ +{ + "name": "checkNoPublicAccess", + "file": "checkNoPublicAccess.rego", + "templateArgs": null, + "severity": "HIGH", + "description": "Ensure that Cloud Storage bucket is not anonymously or publicly Accessible.", + "referenceId": "accurics.gcp.IAM.120", + "category": "Identity \u0026 Access Management", + "version": 1 +} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_storage_bucket_iam_member/checkNoPublicAccess.rego b/pkg/policies/opa/rego/gcp/google_storage_bucket_iam_member/checkNoPublicAccess.rego new file mode 100755 index 000000000..c563fa089 --- /dev/null +++ b/pkg/policies/opa/rego/gcp/google_storage_bucket_iam_member/checkNoPublicAccess.rego @@ -0,0 +1,9 @@ +package accurics + +checkNoPublicAccess[bucket_iam.id] { + bucket_iam := input.google_storage_bucket_iam_member[_] + bucket_iam_members := bucket_iam.config.members + public_access_users := ["allUsers", "allAuthenticatedUsers"] + some i, j + bucket_iam_members[i] == public_access_users[j] +} \ No newline at end of file From 45c7fcca6a1791639b18a83c22cb7cbd6b98f5e1 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Fri, 14 Aug 2020 11:58:11 -0700 Subject: [PATCH 187/188] Add the gcp provider support into terrascan --- pkg/policy/gcp.go | 26 ++++++++++++++++++++++++++ 1 file changed, 26 insertions(+) create mode 100644 pkg/policy/gcp.go diff --git a/pkg/policy/gcp.go b/pkg/policy/gcp.go new file mode 100644 index 000000000..a9fab5778 --- /dev/null +++ b/pkg/policy/gcp.go @@ -0,0 +1,26 @@ +/* + Copyright (C) 2020 Accurics, Inc. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +*/ + +package policy + +const ( + gcp supportedCloudType = "gcp" +) + +func init() { + // Register gcp as a cloud provider with terrascan + RegisterCloudProvider(gcp) +} From 593f5804dc0d414195b1f00d2e8be64c0b2c6f93 Mon Sep 17 00:00:00 2001 From: Willie Sana Date: Fri, 14 Aug 2020 12:04:53 -0700 Subject: [PATCH 188/188] removed 3 gcp rules with warnings for now --- .../accurics.gcp.IAM.128.json | 10 ---------- .../accurics.gcp.NS.126.json | 10 ---------- .../accurics.gcp.NS.129.json | 10 ---------- .../google_compute_instance/osLoginEnabled.rego | 17 ----------------- .../projectWideSshKeysUsed.rego | 17 ----------------- .../serialPortEnabled.rego | 17 ----------------- 6 files changed, 81 deletions(-) delete mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.IAM.128.json delete mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.126.json delete mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.129.json delete mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/osLoginEnabled.rego delete mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/projectWideSshKeysUsed.rego delete mode 100755 pkg/policies/opa/rego/gcp/google_compute_instance/serialPortEnabled.rego diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.IAM.128.json b/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.IAM.128.json deleted file mode 100755 index 30cd8f9e0..000000000 --- a/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.IAM.128.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "name": "osLoginEnabled", - "file": "osLoginEnabled.rego", - "templateArgs": null, - "severity": "MEDIUM", - "description": "Ensure that no instance in the project overrides the project setting for enabling OSLogin", - "referenceId": "accurics.gcp.IAM.128", - "category": "Identity \u0026 Access Management", - "version": 1 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.126.json b/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.126.json deleted file mode 100755 index fdec91878..000000000 --- a/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.126.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "name": "projectWideSshKeysUsed", - "file": "projectWideSshKeysUsed.rego", - "templateArgs": null, - "severity": "MEDIUM", - "description": "Ensure 'Block Project-wide SSH keys' is enabled for VM instances.", - "referenceId": "accurics.gcp.NS.126", - "category": "Network Security", - "version": 1 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.129.json b/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.129.json deleted file mode 100755 index 01f037c84..000000000 --- a/pkg/policies/opa/rego/gcp/google_compute_instance/accurics.gcp.NS.129.json +++ /dev/null @@ -1,10 +0,0 @@ -{ - "name": "serialPortEnabled", - "file": "serialPortEnabled.rego", - "templateArgs": null, - "severity": "MEDIUM", - "description": "Ensure 'Enable connecting to serial ports' is not enabled for VM instances.", - "referenceId": "accurics.gcp.NS.129", - "category": "Network Security", - "version": 1 -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/osLoginEnabled.rego b/pkg/policies/opa/rego/gcp/google_compute_instance/osLoginEnabled.rego deleted file mode 100755 index 44be172f7..000000000 --- a/pkg/policies/opa/rego/gcp/google_compute_instance/osLoginEnabled.rego +++ /dev/null @@ -1,17 +0,0 @@ -package accurics - -osLoginEnabled[api.id] -{ - api := input.google_compute_instance[_] - api.config.metadata != null - meta_str := lower(object.get(api.config.metadata,"enable-oslogin","undefined")) - meta_str == "false" -} - -osLoginEnabled[api.id] -{ - api := input.google_compute_project_metadata[_] - api.config.metadata != null - meta_str := lower(object.get(api.config.metadata,"enable-oslogin","undefined")) - meta_str == "false" -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/projectWideSshKeysUsed.rego b/pkg/policies/opa/rego/gcp/google_compute_instance/projectWideSshKeysUsed.rego deleted file mode 100755 index 7e8b938f3..000000000 --- a/pkg/policies/opa/rego/gcp/google_compute_instance/projectWideSshKeysUsed.rego +++ /dev/null @@ -1,17 +0,0 @@ -package accurics - -projectWideSshKeysUsed[api.id] -{ - api := input.google_compute_instance[_] - api.config.metadata != null - meta_str := lower(object.get(api.config.metadata,"block-project-ssh-keys","undefined")) - meta_str == "false" -} - -projectWideSshKeysUsed[api.id] -{ - api := input.google_compute_project_metadata[_] - api.config.metadata != null - meta_str := lower(object.get(api.config.metadata,"block-project-ssh-keys","undefined")) - meta_str == "false" -} \ No newline at end of file diff --git a/pkg/policies/opa/rego/gcp/google_compute_instance/serialPortEnabled.rego b/pkg/policies/opa/rego/gcp/google_compute_instance/serialPortEnabled.rego deleted file mode 100755 index 60157161b..000000000 --- a/pkg/policies/opa/rego/gcp/google_compute_instance/serialPortEnabled.rego +++ /dev/null @@ -1,17 +0,0 @@ -package accurics - -serialPortEnabled[api.id] -{ - api := input.google_compute_instance[_] - api.config.metadata != null - meta_str := lower(object.get(api.config.metadata,"serial-port-enable","undefined")) - meta_str == "false" -} - -serialPortEnabled[api.id] -{ - api := input.google_compute_project_metadata[_] - api.config.metadata != null - meta_str := lower(object.get(api.config.metadata,"serial-port-enable","undefined")) - meta_str == "false" -}