├── requirements.txt ├── .github └── contributing.md ├── examples_to_test ├── test.yaml └── test_folder │ ├── rds.yaml │ ├── s3.yaml │ └── vpc.yaml ├── diff_tool.py ├── .gitignore ├── cli_converter.py ├── README.md ├── security_analyzer.py ├── docs_generator.py ├── templates └── index.html ├── app.py ├── state_file_generator.py └── cf_to_tf_converter.py /requirements.txt: -------------------------------------------------------------------------------- 1 | Flask==2.0.1 2 | Werkzeug==2.0.1 3 | PyYAML==5.4.1 4 | argparse==1.4.0 -------------------------------------------------------------------------------- /.github/contributing.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | Contributions are welcome! Please feel free to submit a Pull Request. 3 | 4 | 1. Fork the repository 5 | 2. Create your feature branch (`git checkout -b feature/AmazingFeature`) 6 | 3. Commit your changes (`git commit -m 'Add some AmazingFeature'`) 7 | 4. Push to the branch (`git push origin feature/AmazingFeature`) 8 | 5. Open a Pull Request -------------------------------------------------------------------------------- /examples_to_test/test.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: 'CloudFormation template to create an S3 bucket' 3 | 4 | Parameters: 5 | BucketName: 6 | Type: String 7 | Description: 'Name of the S3 bucket to create' 8 | 9 | Resources: 10 | MyS3Bucket: 11 | Type: 'AWS::S3::Bucket' 12 | Properties: 13 | BucketName: !Ref BucketName 14 | AccessControl: Private 15 | VersioningConfiguration: 16 | Status: Enabled 17 | BucketEncryption: 18 | ServerSideEncryptionConfiguration: 19 | - ServerSideEncryptionByDefault: 20 | SSEAlgorithm: AES256 21 | 22 | Outputs: 23 | BucketName: 24 | Description: 'Name of the S3 bucket' 25 | Value: !Ref MyS3Bucket 26 | BucketARN: 27 | Description: 'ARN of the S3 bucket' 28 | Value: !GetAtt MyS3Bucket.Arn -------------------------------------------------------------------------------- /diff_tool.py: -------------------------------------------------------------------------------- 1 | import difflib 2 | from cf_to_tf_converter import convert_to_terraform, load_cloudformation_template 3 | 4 | def compare_cf_tf(cf_content, tf_content): 5 | # Convert CloudFormation to Terraform 6 | cf_template = load_cloudformation_template(cf_content) 7 | cf_as_tf = convert_to_terraform(cf_template) 8 | 9 | # Compare 10 | diff = difflib.unified_diff( 11 | cf_as_tf.splitlines(keepends=True), 12 | tf_content.splitlines(keepends=True), 13 | fromfile='CloudFormation (converted)', 14 | tofile='Existing Terraform' 15 | ) 16 | 17 | return ''.join(diff) 18 | 19 | def generate_diff_report(cf_content, tf_content): 20 | diff = compare_cf_tf(cf_content, tf_content) 21 | report = f"Diff between converted CloudFormation and existing Terraform:\n\n{diff}" 22 | return report 23 | 24 | if __name__ == "__main__": 25 | cf_file = "path/to/cloudformation.yaml" 26 | tf_file = "path/to/terraform.tf" 27 | with open(cf_file, 'r') as cf, open(tf_file, 'r') as tf: 28 | report = generate_diff_report(cf.read(), tf.read()) 29 | print(report) -------------------------------------------------------------------------------- /examples_to_test/test_folder/rds.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: 'CloudFormation template for RDS instance with CloudWatch alarm' 3 | 4 | Parameters: 5 | DBName: 6 | Type: String 7 | Description: 'The database name' 8 | DBUsername: 9 | Type: String 10 | Description: 'The database admin account username' 11 | DBPassword: 12 | Type: String 13 | NoEcho: true 14 | Description: 'The database admin account password' 15 | DBInstanceClass: 16 | Type: String 17 | Default: db.t3.micro 18 | Description: 'The database instance type' 19 | 20 | Resources: 21 | MyDBInstance: 22 | Type: AWS::RDS::DBInstance 23 | Properties: 24 | DBName: !Ref DBName 25 | Engine: mysql 26 | MasterUsername: !Ref DBUsername 27 | MasterUserPassword: !Ref DBPassword 28 | DBInstanceClass: !Ref DBInstanceClass 29 | AllocatedStorage: '20' 30 | PubliclyAccessible: false 31 | BackupRetentionPeriod: 7 32 | MultiAZ: false 33 | 34 | CPUUtilizationAlarm: 35 | Type: AWS::CloudWatch::Alarm 36 | Properties: 37 | AlarmDescription: 'Alarm if CPU exceeds 75% for 5 minutes' 38 | Namespace: 'AWS/RDS' 39 | MetricName: CPUUtilization 40 | Dimensions: 41 | - Name: DBInstanceIdentifier 42 | Value: !Ref MyDBInstance 43 | Statistic: Average 44 | Period: 300 45 | EvaluationPeriods: 1 46 | Threshold: 75 47 | ComparisonOperator: GreaterThanThreshold 48 | AlarmActions: 49 | - !Ref AlarmTopic 50 | 51 | AlarmTopic: 52 | Type: AWS::SNS::Topic 53 | Properties: 54 | DisplayName: 'RDS Alarm Topic' 55 | TopicName: 'RDSAlarmTopic' 56 | 57 | Outputs: 58 | DBInstanceEndpoint: 59 | Description: 'Connection endpoint for the database' 60 | Value: !GetAtt MyDBInstance.Endpoint.Address 61 | DBInstancePort: 62 | Description: 'Port for the database connection' 63 | Value: !GetAtt MyDBInstance.Endpoint.Port 64 | AlarmTopicARN: 65 | Description: 'ARN of the SNS topic for alarms' 66 | Value: !Ref AlarmTopic -------------------------------------------------------------------------------- /examples_to_test/test_folder/s3.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: 'CloudFormation template for S3 bucket with Lambda trigger' 3 | 4 | Parameters: 5 | BucketName: 6 | Type: String 7 | Description: 'Name of the S3 bucket to create' 8 | LambdaFunctionName: 9 | Type: String 10 | Description: 'Name of the Lambda function' 11 | 12 | Resources: 13 | MyS3Bucket: 14 | Type: 'AWS::S3::Bucket' 15 | Properties: 16 | BucketName: !Ref BucketName 17 | VersioningConfiguration: 18 | Status: Enabled 19 | BucketEncryption: 20 | ServerSideEncryptionConfiguration: 21 | - ServerSideEncryptionByDefault: 22 | SSEAlgorithm: AES256 23 | 24 | LambdaFunction: 25 | Type: 'AWS::Lambda::Function' 26 | Properties: 27 | FunctionName: !Ref LambdaFunctionName 28 | Handler: index.handler 29 | Role: !GetAtt LambdaExecutionRole.Arn 30 | Code: 31 | ZipFile: | 32 | exports.handler = async (event) => { 33 | console.log('Received S3 event:', JSON.stringify(event, null, 2)); 34 | return { statusCode: 200, body: 'Hello from Lambda!' }; 35 | }; 36 | Runtime: nodejs14.x 37 | 38 | LambdaExecutionRole: 39 | Type: 'AWS::IAM::Role' 40 | Properties: 41 | AssumeRolePolicyDocument: 42 | Version: '2012-10-17' 43 | Statement: 44 | - Effect: Allow 45 | Principal: 46 | Service: lambda.amazonaws.com 47 | Action: 'sts:AssumeRole' 48 | Policies: 49 | - PolicyName: LambdaS3Access 50 | PolicyDocument: 51 | Version: '2012-10-17' 52 | Statement: 53 | - Effect: Allow 54 | Action: 55 | - 's3:GetObject' 56 | - 's3:PutObject' 57 | Resource: !Sub 'arn:aws:s3:::${MyS3Bucket}/*' 58 | 59 | S3BucketPermission: 60 | Type: 'AWS::Lambda::Permission' 61 | Properties: 62 | FunctionName: !Ref LambdaFunction 63 | Action: 'lambda:InvokeFunction' 64 | Principal: 's3.amazonaws.com' 65 | SourceArn: !GetAtt MyS3Bucket.Arn 66 | 67 | Outputs: 68 | BucketName: 69 | Description: 'Name of the created S3 bucket' 70 | Value: !Ref MyS3Bucket 71 | LambdaFunctionArn: 72 | Description: 'ARN of the Lambda function' 73 | Value: !GetAtt LambdaFunction.Arn -------------------------------------------------------------------------------- /examples_to_test/test_folder/vpc.yaml: -------------------------------------------------------------------------------- 1 | AWSTemplateFormatVersion: '2010-09-09' 2 | Description: 'CloudFormation template for VPC with EC2 instance' 3 | 4 | Parameters: 5 | VpcCIDR: 6 | Type: String 7 | Default: 10.0.0.0/16 8 | Description: 'CIDR block for the VPC' 9 | PublicSubnetCIDR: 10 | Type: String 11 | Default: 10.0.1.0/24 12 | Description: 'CIDR block for the public subnet' 13 | InstanceType: 14 | Type: String 15 | Default: t2.micro 16 | Description: 'EC2 instance type' 17 | KeyName: 18 | Type: AWS::EC2::KeyPair::KeyName 19 | Description: 'Name of an existing EC2 KeyPair to enable SSH access to the instance' 20 | 21 | Resources: 22 | MyVPC: 23 | Type: AWS::EC2::VPC 24 | Properties: 25 | CidrBlock: !Ref VpcCIDR 26 | EnableDnsHostnames: true 27 | Tags: 28 | - Key: Name 29 | Value: MyVPC 30 | 31 | PublicSubnet: 32 | Type: AWS::EC2::Subnet 33 | Properties: 34 | VpcId: !Ref MyVPC 35 | CidrBlock: !Ref PublicSubnetCIDR 36 | MapPublicIpOnLaunch: true 37 | Tags: 38 | - Key: Name 39 | Value: Public Subnet 40 | 41 | InternetGateway: 42 | Type: AWS::EC2::InternetGateway 43 | 44 | AttachGateway: 45 | Type: AWS::EC2::VPCGatewayAttachment 46 | Properties: 47 | VpcId: !Ref MyVPC 48 | InternetGatewayId: !Ref InternetGateway 49 | 50 | PublicRouteTable: 51 | Type: AWS::EC2::RouteTable 52 | Properties: 53 | VpcId: !Ref MyVPC 54 | Tags: 55 | - Key: Name 56 | Value: Public Route Table 57 | 58 | PublicRoute: 59 | Type: AWS::EC2::Route 60 | DependsOn: AttachGateway 61 | Properties: 62 | RouteTableId: !Ref PublicRouteTable 63 | DestinationCidrBlock: 0.0.0.0/0 64 | GatewayId: !Ref InternetGateway 65 | 66 | PublicSubnetRouteTableAssociation: 67 | Type: AWS::EC2::SubnetRouteTableAssociation 68 | Properties: 69 | SubnetId: !Ref PublicSubnet 70 | RouteTableId: !Ref PublicRouteTable 71 | 72 | EC2Instance: 73 | Type: AWS::EC2::Instance 74 | Properties: 75 | InstanceType: !Ref InstanceType 76 | KeyName: !Ref KeyName 77 | ImageId: ami-0aa7d40eeae50c9a9 # Amazon Linux 2 AMI in us-east-1 78 | NetworkInterfaces: 79 | - AssociatePublicIpAddress: "true" 80 | DeviceIndex: "0" 81 | GroupSet: 82 | - !Ref EC2SecurityGroup 83 | SubnetId: !Ref PublicSubnet 84 | Tags: 85 | - Key: Name 86 | Value: My EC2 Instance 87 | 88 | EC2SecurityGroup: 89 | Type: AWS::EC2::SecurityGroup 90 | Properties: 91 | GroupDescription: Allow SSH and HTTP 92 | VpcId: !Ref MyVPC 93 | SecurityGroupIngress: 94 | - IpProtocol: tcp 95 | FromPort: 22 96 | ToPort: 22 97 | CidrIp: 0.0.0.0/0 98 | - IpProtocol: tcp 99 | FromPort: 80 100 | ToPort: 80 101 | CidrIp: 0.0.0.0/0 102 | 103 | Outputs: 104 | VPC: 105 | Description: A reference to the created VPC 106 | Value: !Ref MyVPC 107 | Export: 108 | Name: !Sub "${AWS::StackName}-VPCID" 109 | PublicSubnet: 110 | Description: A reference to the public subnet 111 | Value: !Ref PublicSubnet 112 | Export: 113 | Name: !Sub "${AWS::StackName}-PublicSubnet" 114 | EC2InstancePublicDNS: 115 | Description: Public DNSName of the newly created EC2 instance 116 | Value: !GetAtt EC2Instance.PublicDnsName 117 | EC2InstancePublicIP: 118 | Description: Public IP address of the newly created EC2 instance 119 | Value: !GetAtt EC2Instance.PublicIp -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Gitignore template located at https://github.com/github/gitignore/blob/main/Python.gitignore 2 | 3 | # Byte-compiled / optimized / DLL files 4 | __pycache__/ 5 | *.py[cod] 6 | *$py.class 7 | 8 | # C extensions 9 | *.so 10 | 11 | # Distribution / packaging 12 | .Python 13 | build/ 14 | develop-eggs/ 15 | dist/ 16 | downloads/ 17 | eggs/ 18 | .eggs/ 19 | lib/ 20 | lib64/ 21 | parts/ 22 | sdist/ 23 | var/ 24 | wheels/ 25 | share/python-wheels/ 26 | *.egg-info/ 27 | .installed.cfg 28 | *.egg 29 | MANIFEST 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | # Unit test / coverage reports 42 | htmlcov/ 43 | .tox/ 44 | .nox/ 45 | .coverage 46 | .coverage.* 47 | .cache 48 | nosetests.xml 49 | coverage.xml 50 | *.cover 51 | *.py,cover 52 | .hypothesis/ 53 | .pytest_cache/ 54 | cover/ 55 | 56 | # Translations 57 | *.mo 58 | *.pot 59 | 60 | # Django stuff: 61 | *.log 62 | local_settings.py 63 | db.sqlite3 64 | db.sqlite3-journal 65 | 66 | # Flask stuff: 67 | instance/ 68 | .webassets-cache 69 | 70 | # Scrapy stuff: 71 | .scrapy 72 | 73 | # Sphinx documentation 74 | docs/_build/ 75 | 76 | # PyBuilder 77 | .pybuilder/ 78 | target/ 79 | 80 | # Jupyter Notebook 81 | .ipynb_checkpoints 82 | 83 | # IPython 84 | profile_default/ 85 | ipython_config.py 86 | 87 | # pyenv 88 | # For a library or package, you might want to ignore these files since the code is 89 | # intended to run in multiple environments; otherwise, check them in: 90 | # .python-version 91 | 92 | # pipenv 93 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 94 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 95 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 96 | # install all needed dependencies. 97 | #Pipfile.lock 98 | 99 | # poetry 100 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 101 | # This is especially recommended for binary packages to ensure reproducibility, and is more 102 | # commonly ignored for libraries. 103 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 104 | #poetry.lock 105 | 106 | # pdm 107 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. 108 | #pdm.lock 109 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it 110 | # in version control. 111 | # https://pdm.fming.dev/latest/usage/project/#working-with-version-control 112 | .pdm.toml 113 | .pdm-python 114 | .pdm-build/ 115 | 116 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm 117 | __pypackages__/ 118 | 119 | # Celery stuff 120 | celerybeat-schedule 121 | celerybeat.pid 122 | 123 | # SageMath parsed files 124 | *.sage.py 125 | 126 | # Environments 127 | .env 128 | .venv 129 | env/ 130 | venv/ 131 | ENV/ 132 | env.bak/ 133 | venv.bak/ 134 | 135 | # Spyder project settings 136 | .spyderproject 137 | .spyproject 138 | 139 | # Rope project settings 140 | .ropeproject 141 | 142 | # mkdocs documentation 143 | /site 144 | 145 | # mypy 146 | .mypy_cache/ 147 | .dmypy.json 148 | dmypy.json 149 | 150 | # Pyre type checker 151 | .pyre/ 152 | 153 | # pytype static type analyzer 154 | .pytype/ 155 | 156 | # Cython debug symbols 157 | cython_debug/ 158 | 159 | # PyCharm 160 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can 161 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 162 | # and can be added to the global gitignore or merged into this file. For a more nuclear 163 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 164 | #.idea/ -------------------------------------------------------------------------------- /cli_converter.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | import argparse 4 | import zipfile 5 | from cf_to_tf_converter import process_cf_file 6 | from docs_generator import generate_docs, save_docs 7 | from state_file_generator import generate_state_file 8 | from diff_tool import generate_diff_report 9 | 10 | def convert_files(input_path, output_dir, regions): 11 | if not os.path.exists(output_dir): 12 | os.makedirs(output_dir) 13 | 14 | if os.path.isfile(input_path): 15 | if input_path.endswith('.zip'): 16 | with zipfile.ZipFile(input_path, 'r') as zip_ref: 17 | zip_ref.extractall(output_dir) 18 | for root, _, files in os.walk(output_dir): 19 | for file in files: 20 | if file.endswith(('.yaml', '.yml', '.json')): 21 | file_path = os.path.join(root, file) 22 | convert_single_file(file_path, output_dir) 23 | else: 24 | convert_single_file(input_path, output_dir) 25 | elif os.path.isdir(input_path): 26 | for root, _, files in os.walk(input_path): 27 | for file in files: 28 | if file.endswith(('.yaml', '.yml', '.json')): 29 | file_path = os.path.join(root, file) 30 | convert_single_file(file_path, output_dir) 31 | else: 32 | print(f"Error: {input_path} is not a valid file or directory") 33 | sys.exit(1) 34 | 35 | # Generate state file 36 | resource_types = ["aws_s3_bucket", "aws_ec2_instance", "aws_vpc", "aws_subnet", "aws_security_group"] 37 | state_file = generate_state_file(regions, resource_types) 38 | state_file_path = os.path.join(output_dir, 'terraform.tfstate') 39 | with open(state_file_path, 'w') as f: 40 | f.write(state_file) 41 | print(f"State file generated: {state_file_path}") 42 | 43 | def convert_single_file(file_path, output_dir): 44 | try: 45 | result = process_cf_file(file_path) 46 | tf_output = result["terraform_code"] 47 | security_report = result["security_report"] 48 | security_score = result["security_score"] 49 | security_issues = result["security_issues"] 50 | 51 | output_filename = os.path.splitext(os.path.basename(file_path))[0] 52 | tf_output_path = os.path.join(output_dir, f"{output_filename}.tf") 53 | report_output_path = os.path.join(output_dir, f"{output_filename}_security_report.txt") 54 | docs_output_path = os.path.join(output_dir, f"{output_filename}_docs.md") 55 | diff_output_path = os.path.join(output_dir, f"{output_filename}_diff.txt") 56 | 57 | with open(tf_output_path, 'w') as f: 58 | f.write(tf_output) 59 | with open(report_output_path, 'w') as f: 60 | f.write(f"Security Score: {security_score}/100\n\n") 61 | f.write(security_report) 62 | 63 | # Generate and save documentation 64 | docs = generate_docs(tf_output, security_issues) 65 | save_docs(docs, docs_output_path) 66 | 67 | # Generate diff report 68 | with open(file_path, 'r') as cf_file, open(tf_output_path, 'r') as tf_file: 69 | diff_report = generate_diff_report(cf_file.read(), tf_file.read()) 70 | with open(diff_output_path, 'w') as f: 71 | f.write(diff_report) 72 | 73 | print(f"Converted {file_path} to {tf_output_path}") 74 | print(f"Security report saved to {report_output_path}") 75 | print(f"Documentation saved to {docs_output_path}") 76 | print(f"Diff report saved to {diff_output_path}") 77 | print(f"Security Score: {security_score}/100") 78 | except Exception as e: 79 | print(f"Error converting {file_path}: {str(e)}") 80 | 81 | def main(): 82 | parser = argparse.ArgumentParser(description='Convert CloudFormation templates to Terraform') 83 | parser.add_argument('input', help='Input file or directory path') 84 | parser.add_argument('-o', '--output', default='converted_files', help='Output directory (default: converted_files)') 85 | parser.add_argument('-r', '--regions', nargs='+', default=['us-west-2'], help='AWS regions for state file generation (default: us-west-2)') 86 | args = parser.parse_args() 87 | 88 | input_path = os.path.abspath(args.input) 89 | output_dir = os.path.abspath(args.output) 90 | 91 | convert_files(input_path, output_dir, args.regions) 92 | print(f"Conversion complete. Converted files are in {output_dir}") 93 | 94 | if __name__ == '__main__': 95 | main() -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # CloudFormation to Terraform Converter 2 | 3 | ## Table of Contents 4 | 1. [Introduction](#introduction) 5 | 2. [Features](#features) 6 | 3. [Prerequisites](#prerequisites) 7 | 4. [Installation](#installation) 8 | 5. [Usage](#usage) 9 | - [Web Application](#web-application) 10 | - [Command-Line Interface](#command-line-interface) 11 | 6. [Project Structure](#project-structure) 12 | 7. [How It Works](#how-it-works) 13 | 8. [Customization](#customization) 14 | 9. [Limitations](#limitations) 15 | 10. [Troubleshooting](#troubleshooting) 16 | 11. [Contributing](#contributing) 17 | 18 | ## Introduction 19 | 20 | The CloudFormation to Terraform Converter is a tool that simplifies the process of migrating AWS CloudFormation templates to Terraform configuration files. This tool is designed for cloud engineers and DevOps professionals who are transitioning from AWS-specific infrastructure-as-code to a more cloud-agnostic approach using Terraform. 21 | 22 | ## Features 23 | 24 | - Web-based interface for easy file uploads 25 | - Command-line interface for local file conversion 26 | - Supports single file, multiple files, and ZIP file uploads 27 | - Converts CloudFormation (YAML/JSON) to Terraform (.tf) format 28 | - Provides immediate download of converted files 29 | - Allows multiple conversions without page refresh in web interface 30 | - Responsive design for various device sizes 31 | 32 | ## Prerequisites 33 | 34 | - Python 3.7+ 35 | - Flask 36 | - Werkzeug 37 | - PyYAML 38 | - A modern web browser (Chrome, Firefox, Safari, or Edge) for web interface 39 | 40 | ## Installation 41 | 42 | 1. Clone the repository: 43 | ``` 44 | git clone https://github.com/aperswal/CloudFormation_To_Terraform.git 45 | cd CloudFormation_To_Terraform 46 | ``` 47 | 48 | 2. Create a virtual environment (optional but recommended): 49 | ``` 50 | python -m venv venv 51 | source venv/bin/activate # On Windows, use `venv\Scripts\activate` 52 | ``` 53 | 54 | 3. Install the required packages: 55 | ``` 56 | pip install -r requirements.txt 57 | ``` 58 | 59 | ## Usage 60 | 61 | ### Web Application 62 | 63 | 1. Start the Flask server: 64 | ``` 65 | python app.py 66 | ``` 67 | 68 | 2. Open a web browser and navigate to `http://localhost:5000` 69 | 70 | 3. Click on the "Pick Your Files" button and select your CloudFormation template(s) or a ZIP file containing multiple templates. 71 | 72 | 4. The conversion will start automatically, and you'll receive a ZIP file with the converted Terraform files. 73 | 74 | ### Command-Line Interface 75 | 76 | 1. Run the CLI converter: 77 | ``` 78 | python cli_converter.py [-o ] 79 | ``` 80 | 81 | Examples: 82 | ``` 83 | python cli_converter.py my_template.yaml 84 | python cli_converter.py my_templates_folder 85 | python cli_converter.py my_templates.zip -o converted_terraform 86 | ``` 87 | 88 | 2. The converted files will be placed in the specified output directory (or `converted_files` by default). 89 | 90 | ## Project Structure 91 | 92 | ``` 93 | CloudFormation_To_Terraform/ 94 | │ 95 | ├── app.py # Main Flask application 96 | ├── cli_converter.py # Command-line interface for conversion 97 | ├── cf_to_tf_converter.py # Core conversion logic 98 | ├── templates/ 99 | │ └── index.html # Main page template 100 | ├── static/ 101 | │ └── css/ 102 | │ └── styles.css # (Optional) Additional styles 103 | ├── requirements.txt # Python dependencies 104 | └── README.md # This file 105 | ``` 106 | 107 | ## How It Works 108 | 109 | 1. The user selects CloudFormation files through the web interface or specifies them via command line. 110 | 2. Files are processed (either uploaded to the server or read locally). 111 | 3. The `cf_to_tf_converter.py` script processes each file, converting CloudFormation syntax to Terraform. 112 | 4. Converted files are either zipped and sent back to the user's browser (web interface) or saved to a local directory (CLI). 113 | 5. Temporary files are cleaned up after processing. 114 | 115 | ## Customization 116 | 117 | - Modify `index.html` to change the user interface. 118 | - Adjust styles in `static/css/styles.css` (if you decide to separate CSS from HTML). 119 | - Extend `cf_to_tf_converter.py` to support additional CloudFormation resource types or improve conversion accuracy. 120 | 121 | ## Limitations 122 | 123 | - Not all CloudFormation resources may have direct Terraform equivalents. 124 | - Complex CloudFormation templates with custom resources or intrinsic functions may require manual adjustment after conversion. 125 | - The tool does not currently support AWS-specific features that don't have Terraform counterparts. 126 | 127 | ## Troubleshooting 128 | 129 | - If conversions fail, check the server logs or command-line output for detailed error messages. 130 | - Ensure your CloudFormation templates are valid before attempting conversion. 131 | - For large files or many concurrent users, you may need to adjust Flask's configuration for better performance. 132 | 133 | ## Contributing 134 | 135 | Please see [contributing.md](.github/contributing.md) -------------------------------------------------------------------------------- /security_analyzer.py: -------------------------------------------------------------------------------- 1 | import re 2 | from typing import List, Dict, Any 3 | 4 | def analyze_security(terraform_code: str) -> List[Dict[str, Any]]: 5 | issues = [] 6 | 7 | # Check for hardcoded secrets 8 | secret_pattern = r'(password|secret|key)\s*=\s*"[^"]*"' 9 | for match in re.finditer(secret_pattern, terraform_code, re.IGNORECASE): 10 | issues.append({ 11 | "severity": "High", 12 | "type": "Hardcoded Secret", 13 | "description": f"Potential hardcoded secret detected: {match.group(0)}", 14 | "line": terraform_code.count('\n', 0, match.start()) + 1 15 | }) 16 | 17 | # Check for public S3 buckets 18 | if 'acl = "public-read"' in terraform_code or 'acl = "public-read-write"' in terraform_code: 19 | issues.append({ 20 | "severity": "High", 21 | "type": "Public S3 Bucket", 22 | "description": "S3 bucket with public read or read-write access detected", 23 | "line": terraform_code.index('acl = "public-read"') if 'acl = "public-read"' in terraform_code else terraform_code.index('acl = "public-read-write"') 24 | }) 25 | 26 | # Check for unrestricted security group ingress 27 | if 'ingress {' in terraform_code and 'cidr_blocks = ["0.0.0.0/0"]' in terraform_code: 28 | issues.append({ 29 | "severity": "Medium", 30 | "type": "Unrestricted Ingress", 31 | "description": "Unrestricted security group ingress rule detected", 32 | "line": terraform_code.index('ingress {') 33 | }) 34 | 35 | # Check for unencrypted resources 36 | if 'encrypted = false' in terraform_code: 37 | issues.append({ 38 | "severity": "Medium", 39 | "type": "Unencrypted Resource", 40 | "description": "Unencrypted resource detected", 41 | "line": terraform_code.index('encrypted = false') 42 | }) 43 | 44 | # Check for use of default VPC 45 | if 'vpc_id = aws_default_vpc' in terraform_code: 46 | issues.append({ 47 | "severity": "Low", 48 | "type": "Default VPC Usage", 49 | "description": "Usage of default VPC detected. Consider creating a custom VPC for better security", 50 | "line": terraform_code.index('vpc_id = aws_default_vpc') 51 | }) 52 | 53 | # Check for unencrypted S3 bucket 54 | if 'resource "aws_s3_bucket"' in terraform_code and 'server_side_encryption_configuration {' not in terraform_code: 55 | issues.append({ 56 | "severity": "Medium", 57 | "type": "Unencrypted S3 Bucket", 58 | "description": "S3 bucket without server-side encryption detected", 59 | "line": terraform_code.index('resource "aws_s3_bucket"') 60 | }) 61 | 62 | # Check for unrestricted outbound traffic 63 | if 'egress {' in terraform_code and 'cidr_blocks = ["0.0.0.0/0"]' in terraform_code: 64 | issues.append({ 65 | "severity": "Low", 66 | "type": "Unrestricted Egress", 67 | "description": "Unrestricted outbound traffic detected in security group", 68 | "line": terraform_code.index('egress {') 69 | }) 70 | 71 | return issues 72 | 73 | def generate_security_report(terraform_code: str) -> str: 74 | issues = analyze_security(terraform_code) 75 | 76 | if not issues: 77 | return "No security issues detected." 78 | 79 | report = "Security Analysis Report:\n\n" 80 | for issue in issues: 81 | report += f"[{issue['severity']}] {issue['type']} (Line {issue['line']}):\n" 82 | report += f" {issue['description']}\n\n" 83 | 84 | return report 85 | 86 | def get_security_score(issues: List[Dict[str, Any]]) -> int: 87 | severity_scores = {"High": 10, "Medium": 5, "Low": 2} 88 | total_score = 100 - sum(severity_scores[issue['severity']] for issue in issues) 89 | return max(0, total_score) # Ensure score doesn't go below 0 90 | 91 | if __name__ == "__main__": 92 | # For testing purposes 93 | test_code = """ 94 | resource "aws_s3_bucket" "example" { 95 | bucket = "my-bucket" 96 | acl = "public-read" 97 | } 98 | 99 | resource "aws_security_group" "example" { 100 | name = "allow_all" 101 | description = "Allow all inbound traffic" 102 | 103 | ingress { 104 | from_port = 0 105 | to_port = 0 106 | protocol = "-1" 107 | cidr_blocks = ["0.0.0.0/0"] 108 | } 109 | } 110 | 111 | resource "aws_db_instance" "example" { 112 | engine = "mysql" 113 | instance_class = "db.t3.micro" 114 | name = "mydb" 115 | username = "foo" 116 | password = "foobarbaz" 117 | skip_final_snapshot = true 118 | encrypted = false 119 | } 120 | """ 121 | 122 | issues = analyze_security(test_code) 123 | report = generate_security_report(test_code) 124 | score = get_security_score(issues) 125 | 126 | print(report) 127 | print(f"Security Score: {score}/100") -------------------------------------------------------------------------------- /docs_generator.py: -------------------------------------------------------------------------------- 1 | import re 2 | from typing import List, Dict, Any 3 | 4 | def generate_docs(terraform_code: str, security_issues: List[Dict[str, Any]]) -> str: 5 | """Generate documentation for the converted Terraform code.""" 6 | docs = ["# Terraform Configuration Documentation\n"] 7 | 8 | # Add a section for resources 9 | docs.append("## Resources\n") 10 | resources = parse_resources(terraform_code) 11 | for resource_type, resource_names in resources.items(): 12 | docs.append(f"### {resource_type}\n") 13 | for name in resource_names: 14 | docs.append(f"- {name}\n") 15 | docs.append("\n") 16 | 17 | # Add a section for variables 18 | variables = parse_variables(terraform_code) 19 | if variables: 20 | docs.append("## Variables\n") 21 | for var_name, var_details in variables.items(): 22 | docs.append(f"### {var_name}\n") 23 | if 'description' in var_details: 24 | docs.append(f"Description: {var_details['description']}\n") 25 | if 'type' in var_details: 26 | docs.append(f"Type: {var_details['type']}\n") 27 | if 'default' in var_details: 28 | docs.append(f"Default: {var_details['default']}\n") 29 | docs.append("\n") 30 | 31 | # Add a section for outputs 32 | outputs = parse_outputs(terraform_code) 33 | if outputs: 34 | docs.append("## Outputs\n") 35 | for output_name, output_details in outputs.items(): 36 | docs.append(f"### {output_name}\n") 37 | if 'description' in output_details: 38 | docs.append(f"Description: {output_details['description']}\n") 39 | docs.append(f"Value: {output_details['value']}\n\n") 40 | 41 | # Add a section for security analysis 42 | docs.append("## Security Analysis\n") 43 | if security_issues: 44 | for issue in security_issues: 45 | docs.append(f"- **{issue['type']}** (Severity: {issue['severity']})\n") 46 | docs.append(f" - Description: {issue['description']}\n") 47 | docs.append(f" - Line: {issue['line']}\n") 48 | docs.append("\n") 49 | else: 50 | docs.append("No security issues detected.\n") 51 | 52 | return "\n".join(docs) 53 | 54 | def parse_resources(terraform_code: str) -> Dict[str, List[str]]: 55 | """Parse the Terraform code to extract resource types and names.""" 56 | resources = {} 57 | resource_pattern = r'resource\s+"(\w+)"\s+"(\w+)"\s+{' 58 | for match in re.finditer(resource_pattern, terraform_code): 59 | resource_type, resource_name = match.groups() 60 | if resource_type not in resources: 61 | resources[resource_type] = [] 62 | resources[resource_type].append(resource_name) 63 | return resources 64 | 65 | def parse_variables(terraform_code: str) -> Dict[str, Dict[str, str]]: 66 | """Parse the Terraform code to extract variables.""" 67 | variables = {} 68 | variable_pattern = r'variable\s+"(\w+)"\s+{([^}]*)}' 69 | for match in re.finditer(variable_pattern, terraform_code, re.DOTALL): 70 | var_name, var_block = match.groups() 71 | variables[var_name] = {} 72 | if 'description' in var_block: 73 | variables[var_name]['description'] = re.search(r'description\s*=\s*"([^"]*)"', var_block).group(1) 74 | if 'type' in var_block: 75 | variables[var_name]['type'] = re.search(r'type\s*=\s*(\w+)', var_block).group(1) 76 | if 'default' in var_block: 77 | variables[var_name]['default'] = re.search(r'default\s*=\s*([^\n]+)', var_block).group(1) 78 | return variables 79 | 80 | def parse_outputs(terraform_code: str) -> Dict[str, Dict[str, str]]: 81 | """Parse the Terraform code to extract outputs.""" 82 | outputs = {} 83 | output_pattern = r'output\s+"(\w+)"\s+{([^}]*)}' 84 | for match in re.finditer(output_pattern, terraform_code, re.DOTALL): 85 | output_name, output_block = match.groups() 86 | outputs[output_name] = {} 87 | if 'description' in output_block: 88 | outputs[output_name]['description'] = re.search(r'description\s*=\s*"([^"]*)"', output_block).group(1) 89 | if 'value' in output_block: 90 | outputs[output_name]['value'] = re.search(r'value\s*=\s*([^\n]+)', output_block).group(1) 91 | return outputs 92 | 93 | def save_docs(docs: str, output_path: str): 94 | """Save the generated documentation to a file.""" 95 | with open(output_path, 'w') as f: 96 | f.write(docs) 97 | 98 | if __name__ == "__main__": 99 | # For testing purposes 100 | test_code = """ 101 | variable "example_var" { 102 | description = "An example variable" 103 | type = string 104 | default = "example" 105 | } 106 | 107 | resource "aws_s3_bucket" "example" { 108 | bucket = "my-bucket" 109 | acl = "private" 110 | } 111 | 112 | output "bucket_name" { 113 | description = "The name of the S3 bucket" 114 | value = aws_s3_bucket.example.id 115 | } 116 | """ 117 | 118 | test_security_issues = [ 119 | { 120 | "severity": "Low", 121 | "type": "Example Issue", 122 | "description": "This is an example security issue", 123 | "line": 5 124 | } 125 | ] 126 | 127 | docs = generate_docs(test_code, test_security_issues) 128 | print(docs) -------------------------------------------------------------------------------- /templates/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | CloudFormation to Terraform Converter 7 | 8 | 96 | 97 | 98 |
99 |

CloudFormation to Terraform Converter

100 |

Upload your CloudFormation template(s) to convert them to Terraform, receive a security analysis, and get documentation.

101 | 102 |
103 | 104 | 105 |
106 | 107 | 108 | 109 | 114 |
115 | 116 | 117 | 154 | 155 | -------------------------------------------------------------------------------- /app.py: -------------------------------------------------------------------------------- 1 | import os 2 | import time 3 | import uuid 4 | from flask import Flask, request, render_template, send_file, jsonify 5 | from werkzeug.utils import secure_filename 6 | import tempfile 7 | import shutil 8 | import zipfile 9 | import io 10 | from cf_to_tf_converter import process_cf_file 11 | from docs_generator import generate_docs, save_docs 12 | from state_file_generator import generate_state_file 13 | from diff_tool import generate_diff_report 14 | 15 | 16 | app = Flask(__name__) 17 | 18 | ALLOWED_EXTENSIONS = {'yaml', 'yml', 'json', 'zip'} 19 | TEMP_DIR = os.path.join(tempfile.gettempdir(), 'cf2tf_converter') 20 | 21 | def allowed_file(filename): 22 | return '.' in filename and filename.rsplit('.', 1)[1].lower() in ALLOWED_EXTENSIONS 23 | 24 | @app.route('/', methods=['GET']) 25 | def index(): 26 | return render_template('index.html') 27 | 28 | @app.route('/convert', methods=['POST']) 29 | def convert_files(): 30 | if 'file' not in request.files: 31 | return jsonify({'error': 'No file uploaded'}), 400 32 | 33 | files = request.files.getlist('file') 34 | if not files or files[0].filename == '': 35 | return jsonify({'error': 'No selected file'}), 400 36 | 37 | conversion_id = str(uuid.uuid4()) 38 | conversion_dir = os.path.join(TEMP_DIR, conversion_id) 39 | input_dir = os.path.join(conversion_dir, 'input') 40 | output_dir = os.path.join(conversion_dir, 'output') 41 | os.makedirs(input_dir, exist_ok=True) 42 | os.makedirs(output_dir, exist_ok=True) 43 | 44 | try: 45 | results = [] 46 | for file in files: 47 | if file and allowed_file(file.filename): 48 | filename = secure_filename(file.filename) 49 | file_path = os.path.join(input_dir, filename) 50 | file.save(file_path) 51 | 52 | if filename.endswith('.zip'): 53 | with zipfile.ZipFile(file_path, 'r') as zip_ref: 54 | zip_ref.extractall(input_dir) 55 | os.remove(file_path) 56 | else: 57 | result = process_cf_file(file_path) 58 | results.append(result) 59 | 60 | # Generate and save documentation 61 | docs = generate_docs(result["terraform_code"], result["security_issues"]) 62 | docs_filename = os.path.splitext(filename)[0] + '_docs.md' 63 | docs_path = os.path.join(output_dir, docs_filename) 64 | save_docs(docs, docs_path) 65 | 66 | # Save Terraform code 67 | tf_filename = os.path.splitext(filename)[0] + '.tf' 68 | tf_path = os.path.join(output_dir, tf_filename) 69 | with open(tf_path, 'w') as f: 70 | f.write(result["terraform_code"]) 71 | 72 | # Generate diff report 73 | with open(file_path, 'r') as cf_file, open(tf_path, 'r') as tf_file: 74 | diff_report = generate_diff_report(cf_file.read(), tf_file.read()) 75 | diff_filename = os.path.splitext(filename)[0] + '_diff.txt' 76 | diff_path = os.path.join(output_dir, diff_filename) 77 | with open(diff_path, 'w') as f: 78 | f.write(diff_report) 79 | 80 | # Process any extracted files from zip 81 | for root, _, files in os.walk(input_dir): 82 | for file in files: 83 | if allowed_file(file): 84 | file_path = os.path.join(root, file) 85 | result = process_cf_file(file_path) 86 | results.append(result) 87 | 88 | # Generate and save documentation 89 | docs = generate_docs(result["terraform_code"], result["security_issues"]) 90 | docs_filename = os.path.splitext(file)[0] + '_docs.md' 91 | docs_path = os.path.join(output_dir, docs_filename) 92 | save_docs(docs, docs_path) 93 | 94 | # Save Terraform code 95 | tf_filename = os.path.splitext(file)[0] + '.tf' 96 | tf_path = os.path.join(output_dir, tf_filename) 97 | with open(tf_path, 'w') as f: 98 | f.write(result["terraform_code"]) 99 | 100 | # Generate diff report 101 | with open(file_path, 'r') as cf_file, open(tf_path, 'r') as tf_file: 102 | diff_report = generate_diff_report(cf_file.read(), tf_file.read()) 103 | diff_filename = os.path.splitext(file)[0] + '_diff.txt' 104 | diff_path = os.path.join(output_dir, diff_filename) 105 | with open(diff_path, 'w') as f: 106 | f.write(diff_report) 107 | 108 | # Generate state file 109 | regions = ["us-west-2", "us-east-1"] # You might want to make this configurable 110 | resource_types = ["aws_s3_bucket", "aws_ec2_instance", "aws_vpc", "aws_subnet", "aws_security_group"] 111 | state_file = generate_state_file(regions, resource_types) 112 | state_file_path = os.path.join(output_dir, 'terraform.tfstate') 113 | with open(state_file_path, 'w') as f: 114 | f.write(state_file) 115 | 116 | # Create a zip file of all converted files 117 | zip_filename = f'converted_files_{conversion_id}.zip' 118 | zip_path = os.path.join(conversion_dir, zip_filename) 119 | with zipfile.ZipFile(zip_path, 'w') as zipf: 120 | for root, _, files in os.walk(output_dir): 121 | for file in files: 122 | file_path = os.path.join(root, file) 123 | arcname = os.path.relpath(file_path, output_dir) 124 | zipf.write(file_path, arcname) 125 | 126 | return jsonify({ 127 | "results": results, 128 | "download_url": f"/download_converted_files/{conversion_id}" 129 | }) 130 | 131 | except Exception as e: 132 | return jsonify({'error': f'Conversion failed: {str(e)}'}), 500 133 | finally: 134 | # Don't remove temp_dir here, as we need it for the download 135 | pass 136 | 137 | @app.route('/download_converted_files/', methods=['GET']) 138 | def download_converted_files(conversion_id): 139 | conversion_dir = os.path.join(TEMP_DIR, conversion_id) 140 | zip_filename = f'converted_files_{conversion_id}.zip' 141 | zip_path = os.path.join(conversion_dir, zip_filename) 142 | 143 | if not os.path.exists(zip_path): 144 | return jsonify({'error': 'Converted files not found'}), 404 145 | 146 | return send_file(zip_path, as_attachment=True, download_name=zip_filename) 147 | 148 | def safe_remove(path): 149 | try: 150 | if os.path.isdir(path): 151 | shutil.rmtree(path) 152 | elif os.path.isfile(path): 153 | os.remove(path) 154 | except Exception as e: 155 | app.logger.error(f"Error removing {path}: {str(e)}") 156 | 157 | @app.teardown_appcontext 158 | def cleanup_temp_files(error): 159 | if os.path.exists(TEMP_DIR): 160 | for d in os.listdir(TEMP_DIR): 161 | path = os.path.join(TEMP_DIR, d) 162 | try: 163 | if os.path.isdir(path) and (time.time() - os.path.getmtime(path)) > 3600: 164 | safe_remove(path) 165 | except Exception as e: 166 | app.logger.error(f"Error cleaning up {path}: {str(e)}") 167 | 168 | if __name__ == '__main__': 169 | os.makedirs(TEMP_DIR, exist_ok=True) 170 | app.run(debug=True) -------------------------------------------------------------------------------- /state_file_generator.py: -------------------------------------------------------------------------------- 1 | import boto3 2 | import json 3 | from botocore.exceptions import ClientError 4 | from typing import List, Dict, Any 5 | 6 | def generate_state_file(regions: List[str], resource_types: List[str]) -> str: 7 | state = { 8 | "version": 4, 9 | "terraform_version": "1.0.0", 10 | "serial": 1, 11 | "lineage": "", 12 | "outputs": {}, 13 | "resources": [] 14 | } 15 | 16 | for region in regions: 17 | session = boto3.Session(region_name=region) 18 | 19 | for resource_type in resource_types: 20 | try: 21 | resources = fetch_resources(session, resource_type) 22 | state['resources'].extend(resources) 23 | except Exception as e: 24 | print(f"Error fetching {resource_type} in {region}: {str(e)}") 25 | 26 | return json.dumps(state, indent=2) 27 | 28 | def fetch_resources(session: boto3.Session, resource_type: str) -> List[Dict[str, Any]]: 29 | resources = [] 30 | 31 | if resource_type == 'aws_s3_bucket': 32 | resources.extend(fetch_s3_buckets(session)) 33 | elif resource_type == 'aws_ec2_instance': 34 | resources.extend(fetch_ec2_instances(session)) 35 | elif resource_type == 'aws_vpc': 36 | resources.extend(fetch_vpcs(session)) 37 | elif resource_type == 'aws_subnet': 38 | resources.extend(fetch_subnets(session)) 39 | elif resource_type == 'aws_security_group': 40 | resources.extend(fetch_security_groups(session)) 41 | # Add more resource types here as needed 42 | 43 | return resources 44 | 45 | def fetch_s3_buckets(session: boto3.Session) -> List[Dict[str, Any]]: 46 | s3 = session.client('s3') 47 | resources = [] 48 | try: 49 | response = s3.list_buckets() 50 | for bucket in response['Buckets']: 51 | resources.append({ 52 | "mode": "managed", 53 | "type": "aws_s3_bucket", 54 | "name": bucket['Name'], 55 | "provider": f"provider[\"registry.terraform.io/hashicorp/aws\"]", 56 | "instances": [ 57 | { 58 | "schema_version": 0, 59 | "attributes": { 60 | "bucket": bucket['Name'], 61 | "arn": f"arn:aws:s3:::{bucket['Name']}", 62 | "region": s3.get_bucket_location(Bucket=bucket['Name'])['LocationConstraint'] or 'us-east-1' 63 | } 64 | } 65 | ] 66 | }) 67 | except ClientError as e: 68 | print(f"Error fetching S3 buckets: {e}") 69 | return resources 70 | 71 | def fetch_ec2_instances(session: boto3.Session) -> List[Dict[str, Any]]: 72 | ec2 = session.resource('ec2') 73 | resources = [] 74 | try: 75 | for instance in ec2.instances.all(): 76 | resources.append({ 77 | "mode": "managed", 78 | "type": "aws_instance", 79 | "name": instance.id, 80 | "provider": f"provider[\"registry.terraform.io/hashicorp/aws\"]", 81 | "instances": [ 82 | { 83 | "schema_version": 1, 84 | "attributes": { 85 | "id": instance.id, 86 | "instance_type": instance.instance_type, 87 | "ami": instance.image_id, 88 | "vpc_id": instance.vpc_id, 89 | "subnet_id": instance.subnet_id, 90 | "private_ip": instance.private_ip_address, 91 | "public_ip": instance.public_ip_address, 92 | } 93 | } 94 | ] 95 | }) 96 | except ClientError as e: 97 | print(f"Error fetching EC2 instances: {e}") 98 | return resources 99 | 100 | def fetch_vpcs(session: boto3.Session) -> List[Dict[str, Any]]: 101 | ec2 = session.resource('ec2') 102 | resources = [] 103 | try: 104 | for vpc in ec2.vpcs.all(): 105 | resources.append({ 106 | "mode": "managed", 107 | "type": "aws_vpc", 108 | "name": vpc.id, 109 | "provider": f"provider[\"registry.terraform.io/hashicorp/aws\"]", 110 | "instances": [ 111 | { 112 | "schema_version": 1, 113 | "attributes": { 114 | "id": vpc.id, 115 | "cidr_block": vpc.cidr_block, 116 | "enable_dns_hostnames": vpc.describe_attribute(Attribute='enableDnsHostnames')['EnableDnsHostnames']['Value'], 117 | "enable_dns_support": vpc.describe_attribute(Attribute='enableDnsSupport')['EnableDnsSupport']['Value'], 118 | } 119 | } 120 | ] 121 | }) 122 | except ClientError as e: 123 | print(f"Error fetching VPCs: {e}") 124 | return resources 125 | 126 | def fetch_subnets(session: boto3.Session) -> List[Dict[str, Any]]: 127 | ec2 = session.resource('ec2') 128 | resources = [] 129 | try: 130 | for subnet in ec2.subnets.all(): 131 | resources.append({ 132 | "mode": "managed", 133 | "type": "aws_subnet", 134 | "name": subnet.id, 135 | "provider": f"provider[\"registry.terraform.io/hashicorp/aws\"]", 136 | "instances": [ 137 | { 138 | "schema_version": 1, 139 | "attributes": { 140 | "id": subnet.id, 141 | "vpc_id": subnet.vpc_id, 142 | "cidr_block": subnet.cidr_block, 143 | "availability_zone": subnet.availability_zone, 144 | "map_public_ip_on_launch": subnet.map_public_ip_on_launch, 145 | } 146 | } 147 | ] 148 | }) 149 | except ClientError as e: 150 | print(f"Error fetching subnets: {e}") 151 | return resources 152 | 153 | def fetch_security_groups(session: boto3.Session) -> List[Dict[str, Any]]: 154 | ec2 = session.client('ec2') 155 | resources = [] 156 | try: 157 | response = ec2.describe_security_groups() 158 | for sg in response['SecurityGroups']: 159 | resources.append({ 160 | "mode": "managed", 161 | "type": "aws_security_group", 162 | "name": sg['GroupName'], 163 | "provider": f"provider[\"registry.terraform.io/hashicorp/aws\"]", 164 | "instances": [ 165 | { 166 | "schema_version": 1, 167 | "attributes": { 168 | "id": sg['GroupId'], 169 | "name": sg['GroupName'], 170 | "description": sg['Description'], 171 | "vpc_id": sg['VpcId'], 172 | "ingress": [rule for rule in sg.get('IpPermissions', [])], 173 | "egress": [rule for rule in sg.get('IpPermissionsEgress', [])] 174 | } 175 | } 176 | ] 177 | }) 178 | except ClientError as e: 179 | print(f"Error fetching security groups: {e}") 180 | return resources 181 | 182 | def save_state_file(state: str, filename: str): 183 | with open(filename, 'w') as f: 184 | f.write(state) 185 | 186 | if __name__ == "__main__": 187 | regions = ["us-west-2", "us-east-1"] # Add or modify regions as needed 188 | resource_types = ["aws_s3_bucket", "aws_ec2_instance", "aws_vpc", "aws_subnet", "aws_security_group"] 189 | state = generate_state_file(regions, resource_types) 190 | save_state_file(state, "terraform.tfstate") 191 | print("Terraform state file generated: terraform.tfstate") -------------------------------------------------------------------------------- /cf_to_tf_converter.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import json 3 | import os 4 | from typing import Dict, Any, List 5 | from security_analyzer import analyze_security, generate_security_report, get_security_score 6 | 7 | class CloudFormationLoader(yaml.SafeLoader): 8 | def __init__(self, stream): 9 | self._root = stream.name 10 | super(CloudFormationLoader, self).__init__(stream) 11 | 12 | def construct_cfn_tag(loader, node): 13 | if isinstance(node, yaml.ScalarNode): 14 | return f"${{{node.value}}}" 15 | elif isinstance(node, yaml.SequenceNode): 16 | return [construct_cfn_tag(loader, item) for item in node.value] 17 | elif isinstance(node, yaml.MappingNode): 18 | return {construct_cfn_tag(loader, k): construct_cfn_tag(loader, v) for k, v in node.value} 19 | 20 | for tag in ['!Ref', '!GetAtt', '!Sub', '!Join', '!Select', '!Split', '!FindInMap', '!If', '!Equals', '!And', '!Or', '!Not', '!ImportValue']: 21 | CloudFormationLoader.add_constructor(tag, construct_cfn_tag) 22 | 23 | def load_cloudformation_template(file_path: str) -> Dict[str, Any]: 24 | with open(file_path, 'r') as f: 25 | if file_path.endswith('.json'): 26 | return json.load(f) 27 | else: 28 | return yaml.load(f, Loader=CloudFormationLoader) 29 | 30 | def convert_resource_type(cf_type: str) -> str: 31 | type_mapping = { 32 | 'AWS::S3::Bucket': 'aws_s3_bucket', 33 | 'AWS::EC2::Instance': 'aws_instance', 34 | 'AWS::IAM::Role': 'aws_iam_role', 35 | 'AWS::Lambda::Function': 'aws_lambda_function', 36 | 'AWS::DynamoDB::Table': 'aws_dynamodb_table', 37 | 'AWS::RDS::DBInstance': 'aws_db_instance', 38 | 'AWS::ElasticLoadBalancingV2::LoadBalancer': 'aws_lb', 39 | 'AWS::ElasticLoadBalancingV2::TargetGroup': 'aws_lb_target_group', 40 | 'AWS::ElasticLoadBalancingV2::Listener': 'aws_lb_listener', 41 | 'AWS::EC2::SecurityGroup': 'aws_security_group', 42 | 'AWS::EC2::VPC': 'aws_vpc', 43 | 'AWS::EC2::Subnet': 'aws_subnet', 44 | 'AWS::EC2::InternetGateway': 'aws_internet_gateway', 45 | 'AWS::EC2::RouteTable': 'aws_route_table', 46 | 'AWS::EC2::Route': 'aws_route', 47 | 'AWS::EC2::EIP': 'aws_eip', 48 | 'AWS::EC2::NatGateway': 'aws_nat_gateway', 49 | 'AWS::IAM::Policy': 'aws_iam_policy', 50 | 'AWS::CloudWatch::Alarm': 'aws_cloudwatch_metric_alarm', 51 | 'AWS::SNS::Topic': 'aws_sns_topic', 52 | 'AWS::SQS::Queue': 'aws_sqs_queue', 53 | 'AWS::KMS::Key': 'aws_kms_key', 54 | # Add more mappings here 55 | } 56 | return type_mapping.get(cf_type, f"{cf_type.lower().replace('::', '_')}") 57 | 58 | def convert_property_name(name: str) -> str: 59 | name_mapping = { 60 | 'BucketName': 'bucket', 61 | 'AccessControl': 'acl', 62 | 'VersioningConfiguration': 'versioning', 63 | 'ServerSideEncryptionConfiguration': 'server_side_encryption_configuration', 64 | # Add more mappings here 65 | } 66 | converted = name_mapping.get(name, name) 67 | return converted.lower().replace('_', '') 68 | 69 | def convert_property_value(value: Any, property_name: str) -> Any: 70 | if isinstance(value, dict): 71 | if 'Ref' in value: 72 | return f"${{var.{value['Ref']}}}" 73 | elif 'Fn::GetAtt' in value: 74 | attrs = value['Fn::GetAtt'] 75 | if isinstance(attrs, list): 76 | return f"${{{convert_resource_type(attrs[0].split('::')[1].lower())}.{attrs[0].lower()}.{attrs[1].lower()}}}" 77 | else: 78 | parts = attrs.split('.') 79 | return f"${{{convert_resource_type(parts[0].split('::')[1].lower())}.{parts[0].lower()}.{parts[1].lower()}}}" 80 | elif 'Fn::Join' in value: 81 | delimiter, parts = value['Fn::Join'] 82 | return f"${{join(\"{delimiter}\", {parts})}}" 83 | elif 'Fn::Sub' in value: 84 | return f"${{format(\"{value['Fn::Sub']}\", {{}})}}".replace("${", "$${") 85 | # Add more intrinsic function handlers here 86 | elif isinstance(value, list): 87 | if property_name == 'SecurityGroups': 88 | return [f"${{aws_security_group.{item.lower()}.id}}" for item in value] 89 | return [convert_property_value(item, property_name) for item in value] 90 | elif isinstance(value, str): 91 | return f'"{value}"' 92 | return value 93 | 94 | def convert_resource(name: str, resource: Dict[str, Any]) -> List[str]: 95 | resource_type = convert_resource_type(resource['Type']) 96 | properties = resource.get('Properties', {}) 97 | 98 | tf_resources = [] 99 | 100 | if resource_type == 'aws_s3_bucket': 101 | tf_resources.append(f'resource "aws_s3_bucket" "{name}" {{') 102 | tf_resources.append(f' bucket = {convert_property_value(properties.get("BucketName", name), "BucketName")}') 103 | tf_resources.append('}') 104 | 105 | if 'AccessControl' in properties: 106 | tf_resources.append(f'resource "aws_s3_bucket_acl" "{name}_acl" {{') 107 | tf_resources.append(f' bucket = aws_s3_bucket.{name}.id') 108 | tf_resources.append(f' acl = {convert_property_value(properties["AccessControl"], "AccessControl")}') 109 | tf_resources.append('}') 110 | 111 | if 'VersioningConfiguration' in properties: 112 | tf_resources.append(f'resource "aws_s3_bucket_versioning" "{name}_versioning" {{') 113 | tf_resources.append(f' bucket = aws_s3_bucket.{name}.id') 114 | tf_resources.append(' versioning_configuration {') 115 | tf_resources.append(f' status = {convert_property_value(properties["VersioningConfiguration"]["Status"], "Status")}') 116 | tf_resources.append(' }') 117 | tf_resources.append('}') 118 | 119 | if 'ServerSideEncryptionConfiguration' in properties: 120 | tf_resources.append(f'resource "aws_s3_bucket_server_side_encryption_configuration" "{name}_encryption" {{') 121 | tf_resources.append(f' bucket = aws_s3_bucket.{name}.id') 122 | tf_resources.append(' rule {') 123 | tf_resources.append(' apply_server_side_encryption_by_default {') 124 | tf_resources.append(f' sse_algorithm = {convert_property_value(properties["ServerSideEncryptionConfiguration"][0]["ServerSideEncryptionByDefault"]["SSEAlgorithm"], "SSEAlgorithm")}') 125 | tf_resources.append(' }') 126 | tf_resources.append(' }') 127 | tf_resources.append('}') 128 | else: 129 | tf_resources.append(f'resource "{resource_type}" "{name}" {{') 130 | for prop_name, prop_value in properties.items(): 131 | tf_name = convert_property_name(prop_name) 132 | tf_value = convert_property_value(prop_value, prop_name) 133 | tf_resources.append(f' {tf_name} = {tf_value}') 134 | tf_resources.append('}') 135 | 136 | return tf_resources 137 | 138 | def convert_output(name: str, output: Dict[str, Any]) -> str: 139 | value = convert_property_value(output.get('Value'), 'Output') 140 | description = output.get('Description', '') 141 | 142 | tf_output = [f'output "{name}" {{'] 143 | if description: 144 | tf_output.append(f' description = "{description}"') 145 | tf_output.append(f' value = {value}') 146 | tf_output.append('}') 147 | 148 | return '\n'.join(tf_output) 149 | 150 | def convert_to_terraform(cf_template: Dict[str, Any]) -> str: 151 | tf_output = [] 152 | 153 | if 'Parameters' in cf_template: 154 | tf_output.append("# Variables") 155 | for param_name, param_data in cf_template['Parameters'].items(): 156 | default_value = param_data.get('Default', '') 157 | tf_output.append(f'variable "{param_name}" {{') 158 | if 'Description' in param_data: 159 | tf_output.append(f' description = "{param_data["Description"]}"') 160 | tf_output.append(f' type = string') 161 | if default_value: 162 | tf_output.append(f' default = "{default_value}"') 163 | tf_output.append('}') 164 | tf_output.append("") 165 | 166 | if 'Resources' in cf_template: 167 | tf_output.append("# Resources") 168 | for resource_name, resource_data in cf_template['Resources'].items(): 169 | tf_output.extend(convert_resource(resource_name, resource_data)) 170 | tf_output.append("") 171 | 172 | if 'Outputs' in cf_template: 173 | tf_output.append("# Outputs") 174 | for output_name, output_data in cf_template['Outputs'].items(): 175 | tf_output.append(convert_output(output_name, output_data)) 176 | tf_output.append("") 177 | 178 | return '\n'.join(tf_output) 179 | 180 | def process_cf_file(file_path: str) -> Dict[str, Any]: 181 | cf_template = load_cloudformation_template(file_path) 182 | tf_code = convert_to_terraform(cf_template) 183 | security_issues = analyze_security(tf_code) 184 | security_report = generate_security_report(tf_code) 185 | security_score = get_security_score(security_issues) 186 | 187 | return { 188 | "terraform_code": tf_code, 189 | "security_report": security_report, 190 | "security_score": security_score, 191 | "security_issues": security_issues 192 | } 193 | 194 | if __name__ == "__main__": 195 | import sys 196 | if len(sys.argv) != 2: 197 | print("Usage: python cf_to_tf_converter.py ") 198 | sys.exit(1) 199 | 200 | input_file = sys.argv[1] 201 | result = process_cf_file(input_file) 202 | print(result["terraform_code"]) 203 | print("\nSecurity Report:") 204 | print(result["security_report"]) 205 | print(f"\nSecurity Score: {result['security_score']}/100") 206 | --------------------------------------------------------------------------------