├── app ├── __init__.py ├── utils.py ├── tests │ ├── __init__.py │ ├── test_docker_compose.py │ ├── test_iac_bugfix.py │ ├── test_iac_basic.py │ ├── test_helm_template.py │ ├── test_iac_install.py │ ├── test_ansible.py │ ├── test_iac_template.py │ └── conftest.py ├── models │ ├── grafana │ │ ├── __init__.py │ │ ├── terraform_alert.py │ │ ├── loki_models.py │ │ ├── mimir_models.py │ │ ├── elastcsearch_models.py │ │ ├── prometheus_models.py │ │ ├── postgresql_models.py │ │ ├── mysql_models.py │ │ ├── alert_managers_models.py │ │ └── tempo_models.py │ ├── jcasc.py │ ├── gitlab_models.py │ ├── __init__.py │ ├── jenkins.py │ ├── docker_installation_models.py │ ├── utils.py │ ├── compose_models.py │ ├── ansible_models.py │ ├── helm_models.py │ └── terraform_models.py ├── directory_generators │ ├── __init__.py │ ├── compose_generator.py │ ├── jcasc_generator.py │ ├── ansible_generator.py │ ├── terraform_generator.py │ └── helm_generator.py ├── template_generators │ ├── __init__.py │ ├── docker │ │ ├── __init__.py │ │ ├── compose.py │ │ └── installation.py │ ├── terraform │ │ ├── __init__.py │ │ ├── aws │ │ │ ├── __init__.py │ │ │ ├── ELB.py │ │ │ ├── KeyPair.py │ │ │ ├── ALB.py │ │ │ ├── SNS.py │ │ │ ├── IAM.py │ │ │ ├── s3.py │ │ │ ├── Route53.py │ │ │ ├── SQS.py │ │ │ ├── AutoScaling.py │ │ │ ├── CloudFront.py │ │ │ ├── RDS.py │ │ │ ├── EFS.py │ │ │ └── ec2.py │ │ ├── Installation │ │ │ ├── __init__.py │ │ │ └── main.py │ │ ├── docker.py │ │ ├── argocd.py │ │ └── tfvars │ │ │ └── grafana.py │ ├── jenkins │ │ ├── jcasc.py │ │ └── installation.py │ ├── ansible │ │ └── install │ │ │ ├── main.py │ │ │ ├── nginx.py │ │ │ └── docker.py │ ├── grafana_data_sources │ │ ├── tempo.py │ │ ├── elasticsearch.py │ │ ├── postgresql.py │ │ ├── prometheus.py │ │ ├── loki.py │ │ ├── alertmanager.py │ │ ├── mimir.py │ │ └── mysql.py │ └── gitlab │ │ └── installation.py ├── media │ ├── MyAnsible │ │ ├── roles │ │ │ ├── k8s │ │ │ │ ├── defaults │ │ │ │ │ └── main.yml │ │ │ │ ├── files │ │ │ │ │ └── sample.sh │ │ │ │ ├── tasks │ │ │ │ │ ├── main.yml │ │ │ │ │ └── k8s.yml │ │ │ │ └── handlers │ │ │ │ │ └── main.yml │ │ │ ├── init_k8s │ │ │ │ ├── defaults │ │ │ │ │ └── main.yml │ │ │ │ ├── files │ │ │ │ │ └── sample.sh │ │ │ │ ├── handlers │ │ │ │ │ └── main.yml │ │ │ │ ├── tasks │ │ │ │ │ ├── main.yml │ │ │ │ │ ├── cni.yml │ │ │ │ │ └── initk8s.yml │ │ │ │ └── templates │ │ │ │ │ └── kubeadmcnf.yml.j2 │ │ │ ├── preinstall │ │ │ │ ├── files │ │ │ │ │ └── sample.sh │ │ │ │ ├── defaults │ │ │ │ │ └── main.yml │ │ │ │ ├── handlers │ │ │ │ │ └── main.yml │ │ │ │ ├── tasks │ │ │ │ │ ├── main.yml │ │ │ │ │ └── basic.yml │ │ │ │ └── templates │ │ │ │ │ └── resolv.conf.j2 │ │ │ ├── join_master │ │ │ │ ├── defaults │ │ │ │ │ └── main.yml │ │ │ │ ├── files │ │ │ │ │ └── join-command │ │ │ │ ├── handlers │ │ │ │ │ └── main.yml │ │ │ │ ├── tasks │ │ │ │ │ ├── main.yml │ │ │ │ │ └── join_master.yml │ │ │ │ └── templates │ │ │ │ │ └── kubeadmcnf-join.yml.j2 │ │ │ └── join_worker │ │ │ │ ├── defaults │ │ │ │ └── main.yml │ │ │ │ ├── files │ │ │ │ └── join-command │ │ │ │ ├── handlers │ │ │ │ └── main.yml │ │ │ │ └── tasks │ │ │ │ ├── main.yml │ │ │ │ └── join_worker.yml │ │ ├── ansible.cfg │ │ ├── hosts │ │ ├── kubernetes_playbook.yml │ │ └── group_vars │ │ │ └── all │ ├── MyHelm │ │ ├── templates │ │ │ ├── helpers.tpl │ │ │ ├── secrets.yaml │ │ │ ├── service.yaml │ │ │ ├── pvc.yaml │ │ │ └── deployment.yaml │ │ ├── Chart.yaml │ │ └── values.yaml │ ├── MyHelm_zip.zip │ ├── MyTerraform_zip.zip │ ├── Installation_base │ │ ├── Terraform │ │ │ ├── centos.sh │ │ │ ├── amazon_linux.sh │ │ │ ├── fedora.sh │ │ │ └── ubuntu.sh │ │ ├── Jenkins │ │ │ ├── RHEL.sh │ │ │ ├── fedora.sh │ │ │ ├── ubuntu.sh │ │ │ └── docker-compose.yml │ │ ├── Docker │ │ │ ├── centos.sh │ │ │ ├── RHEL.sh │ │ │ ├── fedora.sh │ │ │ └── ubuntu.sh │ │ └── Gitlab │ │ │ └── docker-compose.yaml │ ├── MyTerraform │ │ ├── versions.tf │ │ ├── modules │ │ │ └── docker │ │ │ │ ├── versions.tf │ │ │ │ ├── terraform.tfvars │ │ │ │ ├── main.tf │ │ │ │ └── variables.tf │ │ ├── terraform.tfvars │ │ ├── main.tf │ │ └── variables.tf │ ├── terraform.tfvars │ ├── MyGrafana │ │ └── mimir.yml │ ├── kuber_configs │ │ ├── kubeadmcnf-join.yml.j2 │ │ ├── check_apiserveer.sh.j2 │ │ ├── kubeadmcnf.yml.j2 │ │ ├── resolv.conf.j2 │ │ ├── keepalived.conf.j2 │ │ └── haproxy.cfg.j2 │ ├── grafana_datasources │ │ ├── elasticsearch.yml │ │ ├── loki.yml │ │ ├── mimir.yml │ │ ├── mysql.yml │ │ ├── alertmanager.yml │ │ ├── postgresql.yml │ │ ├── prometheus.yml │ │ └── tempo.yml │ ├── MyBash │ │ └── bash.sh │ ├── MyJcasc │ │ └── jcasc │ │ │ └── config.yaml │ └── MyCompose │ │ └── docker-compose.yaml ├── app_instance.py ├── main.py ├── routes │ ├── gitlab.py │ ├── jenkins.py │ ├── jcasc.py │ ├── helm.py │ ├── docker.py │ ├── grafana_terraform.py │ ├── utils.py │ ├── ansible.py │ └── grafana_data_sources.py ├── gpt_services.py ├── services.py └── prompt_generators.py ├── .gitignore ├── devopsgpt.jpg ├── requirements.txt ├── .gitmodules ├── helm ├── Chart.yaml ├── templates │ ├── app │ │ ├── secret.yaml │ │ ├── service.yaml │ │ ├── ingress.yaml │ │ └── deployment.yaml │ ├── web │ │ ├── ingress.yaml │ │ ├── service.yaml │ │ └── deployment.yaml │ └── _helpers.tpl └── values.yaml ├── Dockerfile ├── Makefile ├── docker-compose.yml ├── .github ├── workflows │ ├── labeler.yml │ ├── pylint.yml │ ├── unit-test.yml │ └── cicd.yml └── labeler.yml ├── crawl ├── main.py └── crawled_data │ └── Amazon EC2 instance types - Amazon EC2.txt └── CONTRIBUTING.md /app/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/utils.py: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /app/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/models/grafana/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/directory_generators/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/template_generators/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/template_generators/docker/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/directory_generators/compose_generator.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/k8s/defaults/main.yml: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/k8s/files/sample.sh: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/template_generators/terraform/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/init_k8s/defaults/main.yml: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/init_k8s/files/sample.sh: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/init_k8s/handlers/main.yml: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/preinstall/files/sample.sh: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/template_generators/terraform/aws/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/join_master/defaults/main.yml: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/join_master/files/join-command: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/join_master/handlers/main.yml: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/join_worker/defaults/main.yml: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/join_worker/files/join-command: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/join_worker/handlers/main.yml: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/preinstall/defaults/main.yml: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/preinstall/handlers/main.yml: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /app/template_generators/terraform/Installation/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | venv 2 | data 3 | .pytest* 4 | .env 5 | **/__pycache__/ -------------------------------------------------------------------------------- /app/media/MyAnsible/ansible.cfg: -------------------------------------------------------------------------------- 1 | [defaults] 2 | host_key_checking=false 3 | -------------------------------------------------------------------------------- /app/media/MyHelm/templates/helpers.tpl: -------------------------------------------------------------------------------- 1 | {{/* Add your helper functions here */}} 2 | -------------------------------------------------------------------------------- /devopsgpt.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/devopshobbies/devops-gpt/HEAD/devopsgpt.jpg -------------------------------------------------------------------------------- /app/template_generators/terraform/aws/ELB.py: -------------------------------------------------------------------------------- 1 | def IaC_template_generator_elb(input) -> str: 2 | pass -------------------------------------------------------------------------------- /app/media/MyHelm_zip.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/devopshobbies/devops-gpt/HEAD/app/media/MyHelm_zip.zip -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/preinstall/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: basic setup 3 | include_tasks: basic.yml 4 | -------------------------------------------------------------------------------- /app/media/MyTerraform_zip.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/devopshobbies/devops-gpt/HEAD/app/media/MyTerraform_zip.zip -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/k8s/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | - name: Install kubernetes packages 3 | include_tasks: k8s.yml 4 | -------------------------------------------------------------------------------- /app/media/MyHelm/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | name: MyHelm 3 | description: A Helm chart for Kubernetes 4 | version: 0.1.0 5 | -------------------------------------------------------------------------------- /app/template_generators/jenkins/jcasc.py: -------------------------------------------------------------------------------- 1 | 2 | def jcasc_template_generator(input): 3 | prompt = """M""" 4 | return prompt -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | ruamel.yaml<0.18.0 2 | fastapi[standard]>=0.113.0,<0.114.0 3 | pydantic>=2.7.0,<3.0.0 4 | openai 5 | pytest 6 | 7 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "admin-panel"] 2 | path = admin-panel 3 | url = https://github.com/devopsgpt/admin-panel.git 4 | branch = master 5 | -------------------------------------------------------------------------------- /helm/Chart.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v2 2 | name: devopsgpt 3 | description: The Helm chart for DevOps ChatBot 4 | type: application 5 | version: 1.0.0 6 | appVersion: "1.0.0" 7 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/join_master/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # tasks file for join_master 3 | 4 | - name: Join master(s) node to cluster 5 | include_tasks: join_master.yml 6 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/join_worker/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # tasks file for join_worker 3 | 4 | - name: Join worker(s) node to cluster 5 | include_tasks: join_worker.yml 6 | -------------------------------------------------------------------------------- /app/media/MyHelm/templates/secrets.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: {{ .Release.Name }}-secret 5 | type: Opaque 6 | data: 7 | # Insert your base64 encoded secrets here 8 | -------------------------------------------------------------------------------- /app/media/MyAnsible/hosts: -------------------------------------------------------------------------------- 1 | [all] 2 | string private_ip=x.x.x.x 3 | string private_ip=x.x.x.x 4 | 5 | [k8s] 6 | string 7 | string 8 | 9 | [k8s_masters] 10 | string 11 | 12 | [k8s_workers] 13 | string -------------------------------------------------------------------------------- /app/media/Installation_base/Terraform/centos.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | sudo yum install -y yum-utils 5 | 6 | sudo yum-config-manager --add-repo https://rpm.releases.hashicorp.com/RHEL/hashicorp.repo 7 | 8 | sudo yum -y install terraform -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/init_k8s/tasks/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # tasks file for init_k8s 3 | 4 | - name: Initialize kubernetes cluster 5 | include_tasks: initk8s.yml 6 | 7 | - name: Initialize Calico CNI 8 | include_tasks: cni.yml 9 | -------------------------------------------------------------------------------- /app/media/MyTerraform/versions.tf: -------------------------------------------------------------------------------- 1 | terraform { 2 | required_version = ">= 1.0" 3 | 4 | required_providers { 5 | docker = { 6 | source = "kreuzwerker/docker" 7 | version = ">= 2.8.0" 8 | } 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /app/media/Installation_base/Terraform/amazon_linux.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | sudo yum install -y yum-utils 5 | 6 | sudo yum-config-manager --add-repo https://rpm.releases.hashicorp.com/AmazonLinux/hashicorp.repo 7 | 8 | sudo yum -y install terraform -------------------------------------------------------------------------------- /app/media/Installation_base/Terraform/fedora.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | 4 | sudo dnf install -y dnf-plugins-core 5 | 6 | sudo dnf config-manager --add-repo https://rpm.releases.hashicorp.com/fedora/hashicorp.repo 7 | 8 | sudo dnf -y install terraform -------------------------------------------------------------------------------- /app/media/MyTerraform/modules/docker/versions.tf: -------------------------------------------------------------------------------- 1 | terraform { 2 | required_version = ">= 1.0" 3 | 4 | required_providers { 5 | docker = { 6 | source = "kreuzwerker/docker" 7 | version = ">= 2.8.0" 8 | } 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /app/media/MyHelm/templates/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{ .Release.Name }}-web 5 | spec: 6 | ports: 7 | - port: 80 8 | targetPort: {{ .Values.web.targetPort }} 9 | selector: 10 | app: {{ .Release.Name }}-web 11 | -------------------------------------------------------------------------------- /app/media/terraform.tfvars: -------------------------------------------------------------------------------- 1 | create_db_instance = true 2 | create_db_option_group = true 3 | create_db_parameter_group = true 4 | create_db_subnet_group = true 5 | create_monitoring_role = true 6 | create_cloudwatch_log_group = true 7 | manage_master_user_password_rotation = false 8 | -------------------------------------------------------------------------------- /app/media/MyHelm/values.yaml: -------------------------------------------------------------------------------- 1 | web: 2 | image: nginx 3 | targetPort: 80 4 | replicas: 1 5 | persistence: 6 | size: 1Gi 7 | accessModes: 8 | - ReadWriteOnce 9 | stateless: 10 | enabled: true 11 | env: 12 | - name: ENV1 13 | value: Hi 14 | -------------------------------------------------------------------------------- /app/app_instance.py: -------------------------------------------------------------------------------- 1 | from fastapi import FastAPI 2 | from fastapi.middleware.cors import CORSMiddleware 3 | app = FastAPI() 4 | origins = ["*"] 5 | app.add_middleware( CORSMiddleware, allow_origins=origins, allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) 6 | 7 | 8 | -------------------------------------------------------------------------------- /app/models/jcasc.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel 2 | from typing import List, Optional 3 | 4 | 5 | class Jcasc(BaseModel): 6 | allowsSignup:bool = True 7 | allowAnonymousRead:bool = True 8 | cache_size:int = 1 9 | executators:int = 1 10 | required_plugins:List[str] 11 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/k8s/handlers/main.yml: -------------------------------------------------------------------------------- 1 | --- 2 | # handlers file for k8s 3 | 4 | - name: Remove temporary GPG key file 5 | file: 6 | path: "/tmp/docker.list" 7 | state: absent 8 | 9 | - name: Restart kubelet 10 | service: 11 | name: kubelet 12 | state: restarted 13 | -------------------------------------------------------------------------------- /app/media/MyGrafana/mimir.yml: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | datasources: 3 | - name: Mimir 4 | uid: mimir 5 | type: prometheus 6 | access: proxy 7 | orgId: 1 8 | url: http://mimir-nginx.mimir.svc.cluster.local/prometheus 9 | editable: true 10 | version: 1 11 | jsonData: 12 | alertmanagerUid: alertmanager 13 | -------------------------------------------------------------------------------- /app/media/kuber_configs/kubeadmcnf-join.yml.j2: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: kubeadm.k8s.io/v1beta3 3 | kind: JoinConfiguration 4 | nodeRegistration: 5 | criSocket: {{ cri_socket }} 6 | 7 | --- 8 | kind: ClusterConfiguration 9 | apiVersion: kubeadm.k8s.io/v1beta3 10 | kubernetesVersion: "{{ k8s_version }}" 11 | 12 | 13 | -------------------------------------------------------------------------------- /app/media/MyHelm/templates/pvc.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: PersistentVolumeClaim 3 | metadata: 4 | name: {{ .Release.Name }}-web-pvc 5 | spec: 6 | accessModes: 7 | - {{ .Values.web.persistence.accessModes | first }} 8 | resources: 9 | requests: 10 | storage: {{ .Values.web.persistence.size }} 11 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.11-alpine 2 | RUN apk add --no-cache shadow 3 | RUN useradd -ms /bin/bash admin 4 | WORKDIR /code 5 | COPY ./requirements.txt /code/requirements.txt 6 | RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt 7 | COPY ./app /code/app 8 | RUN chown -R admin:admin /code/app/* 9 | USER admin 10 | -------------------------------------------------------------------------------- /helm/templates/app/secret.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Secret 3 | metadata: 4 | name: {{ include "app.secretName" . }} 5 | labels: 6 | {{- include "app.labels" . | nindent 4 }} 7 | type: Opaque 8 | data: 9 | {{- range $key, $value := .Values.app.environment }} 10 | {{ $key }}: {{ $value | b64enc | quote }} 11 | {{- end }} 12 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/join_master/templates/kubeadmcnf-join.yml.j2: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: kubeadm.k8s.io/v1beta3 3 | kind: JoinConfiguration 4 | nodeRegistration: 5 | criSocket: {{ cri_socket }} 6 | 7 | --- 8 | kind: ClusterConfiguration 9 | apiVersion: kubeadm.k8s.io/v1beta3 10 | kubernetesVersion: "{{ k8s_version }}" 11 | 12 | 13 | -------------------------------------------------------------------------------- /app/media/MyTerraform/terraform.tfvars: -------------------------------------------------------------------------------- 1 | create_image = true 2 | image_name = "my-image" 3 | image_force_remove = true 4 | image_build = { 5 | context = "./" 6 | tag = ["my-image:latest"] 7 | } 8 | 9 | create_container = false 10 | container_image = "my-image" 11 | container_name = "my-container" 12 | container_hostname = "my-host" 13 | container_restart = "always" 14 | -------------------------------------------------------------------------------- /app/main.py: -------------------------------------------------------------------------------- 1 | from app.routes.utils import * 2 | from app.routes.terraform import * 3 | from app.routes.helm import * 4 | from app.routes.ansible import * 5 | from app.routes.jcasc import * 6 | from app.routes.docker import * 7 | from app.routes.jenkins import * 8 | from app.routes.gitlab import * 9 | from app.routes.grafana_data_sources import * 10 | from app.routes.grafana_terraform import * -------------------------------------------------------------------------------- /app/media/MyTerraform/modules/docker/terraform.tfvars: -------------------------------------------------------------------------------- 1 | create_image = true 2 | image_name = "my-image" 3 | image_force_remove = true 4 | image_build = { 5 | context = "./" 6 | tag = ["my-image:latest"] 7 | } 8 | 9 | create_container = false 10 | container_image = "my-image" 11 | container_name = "my-container" 12 | container_hostname = "my-host" 13 | container_restart = "always" 14 | -------------------------------------------------------------------------------- /app/template_generators/terraform/aws/KeyPair.py: -------------------------------------------------------------------------------- 1 | def IaC_template_generator_key_pair(input) -> str: 2 | 3 | aws_key_pair_create = 'true' if input.key_pair else 'false' 4 | aws_key_pair_create_private_key = 'true' if input.private_key else 'false' 5 | 6 | tfvars_file = f"""create = {aws_key_pair_create} 7 | create_private_key = {aws_key_pair_create_private_key} 8 | """ 9 | return tfvars_file 10 | -------------------------------------------------------------------------------- /app/template_generators/terraform/aws/ALB.py: -------------------------------------------------------------------------------- 1 | def IaC_template_generator_alb(input) -> str: 2 | 3 | aws_alb_create_resources = 'true' if input.alb_resources else 'false' 4 | aws_alb_create_security_group = 'true' if input.security_group else 'false' 5 | 6 | tfvars_file = f"""alb_create = {aws_alb_create_resources} 7 | create_security_group = {aws_alb_create_security_group} 8 | """ 9 | return tfvars_file 10 | -------------------------------------------------------------------------------- /app/media/grafana_datasources/elasticsearch.yml: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | datasources: 4 | - name: elasticsearch-v7-filebeat 5 | type: elasticsearch 6 | access: proxy 7 | url: http://localhost:9200 8 | editable: [true|false] 9 | jsonData: 10 | index: '[filebeat-]YYYY.MM.DD' 11 | interval: Daily 12 | timeField: '@timestamp' 13 | logMessageField: message 14 | logLevelField: fields.level 15 | -------------------------------------------------------------------------------- /app/routes/gitlab.py: -------------------------------------------------------------------------------- 1 | from app.app_instance import app 2 | from app.models import (GitLabInstallation,Output) 3 | from app.template_generators.gitlab.installation import select_install_gitlab 4 | import os 5 | 6 | 7 | 8 | 9 | @app.post("/api/gitlab/installation") 10 | async def gitlab_installation(request:GitLabInstallation) -> Output: 11 | 12 | select_install_gitlab(request) 13 | 14 | return Output(output='output') -------------------------------------------------------------------------------- /app/media/grafana_datasources/loki.yml: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | datasources: 4 | - name: Loki 5 | uid: loki 6 | type: loki 7 | orgId: 1 8 | access: proxy 9 | editable: [true|false] 10 | url: http://localhost:3100 11 | jsonData: 12 | timeout: 60 13 | maxLines: 1000 14 | 15 | # optionally 16 | basicAuth: true 17 | basicAuthUser: my_user 18 | secureJsonData: 19 | basicAuthPassword: test_password 20 | -------------------------------------------------------------------------------- /app/media/kuber_configs/check_apiserveer.sh.j2: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | errorExit() { 4 | echo "*** $@" 1>&2 5 | exit 1 6 | } 7 | 8 | curl --silent --max-time 2 --insecure https://localhost:6443/ -o /dev/null || errorExit "Error GET https://localhost:6443/" 9 | if ip addr | grep -q {{ virtual_ip }}; then 10 | curl --silent --max-time 2 --insecure https://{{ virtual_ip }}:6443/ -o /dev/null || errorExit "Error GET https://{{ virtual_ip }}:6443/" 11 | fi 12 | -------------------------------------------------------------------------------- /app/media/grafana_datasources/mimir.yml: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | datasources: 4 | - name: Mimir 5 | uid: mimir 6 | type: prometheus 7 | access: proxy 8 | orgId: 1 9 | url: http://mimir-nginx.mimir.svc.cluster.local/prometheus 10 | editable: [true|false] 11 | version: 1 12 | jsonData: 13 | httpHeaderName1: "X-Scope-OrgID" 14 | alertmanagerUid: "alertmanager" 15 | secureJsonData: 16 | httpHeaderValue1: "pods" 17 | -------------------------------------------------------------------------------- /app/media/kuber_configs/kubeadmcnf.yml.j2: -------------------------------------------------------------------------------- 1 | kind: InitConfiguration 2 | apiVersion: kubeadm.k8s.io/v1beta3 3 | nodeRegistration: 4 | criSocket: {{ cri_socket }} 5 | imagePullPolicy: IfNotPresent 6 | --- 7 | kind: ClusterConfiguration 8 | apiVersion: kubeadm.k8s.io/v1beta3 9 | kubernetesVersion: "{{ k8s_version }}" 10 | controlPlaneEndpoint: "{{ apiserver_url }}" 11 | certificatesDir: /etc/kubernetes/pki 12 | networking: 13 | podSubnet: {{ pod_network_cidr }} 14 | -------------------------------------------------------------------------------- /app/models/grafana/terraform_alert.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, List 2 | from pydantic import BaseModel,PrivateAttr,Field 3 | 4 | class ContactPoint(BaseModel): 5 | use_email:bool = True 6 | use_slack:bool = True 7 | class GrafanaTerraform(BaseModel): 8 | create_contact_point:Optional[ContactPoint] 9 | create_message_template:bool = True 10 | create_mute_timing:bool = True 11 | create_notification_policy:bool = True 12 | 13 | 14 | -------------------------------------------------------------------------------- /app/media/Installation_base/Jenkins/RHEL.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | sudo wget -O /etc/yum.repos.d/jenkins.repo \ 3 | https://pkg.jenkins.io/redhat-stable/jenkins.repo 4 | sudo rpm --import https://pkg.jenkins.io/redhat-stable/jenkins.io-2023.key 5 | sudo yum upgrade -y 6 | # Add required dependencies for the jenkins package 7 | sudo yum install -y fontconfig java-17-openjdk 8 | sudo yum install -y jenkins 9 | sudo systemctl daemon-reload 10 | sudo systemctl enable --now jenkins -------------------------------------------------------------------------------- /app/media/Installation_base/Jenkins/fedora.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | sudo wget -O /etc/yum.repos.d/jenkins.repo \ 3 | https://pkg.jenkins.io/redhat-stable/jenkins.repo 4 | sudo rpm --import https://pkg.jenkins.io/redhat-stable/jenkins.io-2023.key 5 | sudo dnf upgrade -y 6 | # Add required dependencies for the jenkins package 7 | sudo dnf install -y fontconfig java-17-openjdk 8 | sudo dnf install -y jenkins 9 | sudo systemctl daemon-reload 10 | sudo systemctl enable --now jenkins -------------------------------------------------------------------------------- /app/routes/jenkins.py: -------------------------------------------------------------------------------- 1 | from app.app_instance import app 2 | from app.models import (DockerCompose,JenkinsInstallation,Output) 3 | from app.template_generators.jenkins.installation import select_install_jenkins 4 | import os 5 | 6 | 7 | 8 | 9 | @app.post("/api/jenkins/installation") 10 | async def jenkins_installation(request:JenkinsInstallation) -> Output: 11 | 12 | select_install_jenkins(request) 13 | 14 | return Output(output='output') -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | namespace ?= default 2 | releaseName ?= devopsgpt 3 | 4 | all: update-submodule build up 5 | 6 | update-submodule: 7 | git submodule init && git submodule update 8 | 9 | build: 10 | docker compose build 11 | up: 12 | docker compose up -d 13 | down: 14 | docker compose down -v 15 | helm-install: 16 | helm install $(releaseName) helm/ -f helm/values.yaml -n $(namespace) --create-namespace 17 | helm-uninstall: 18 | helm uninstall $(releaseName) -n $(namespace) 19 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/init_k8s/templates/kubeadmcnf.yml.j2: -------------------------------------------------------------------------------- 1 | kind: InitConfiguration 2 | apiVersion: kubeadm.k8s.io/v1beta3 3 | nodeRegistration: 4 | criSocket: {{ cri_socket }} 5 | imagePullPolicy: IfNotPresent 6 | --- 7 | kind: ClusterConfiguration 8 | apiVersion: kubeadm.k8s.io/v1beta3 9 | kubernetesVersion: "{{ k8s_version }}" 10 | controlPlaneEndpoint: "{{ apiserver_url }}" 11 | certificatesDir: /etc/kubernetes/pki 12 | networking: 13 | podSubnet: {{ pod_network_cidr }} 14 | -------------------------------------------------------------------------------- /helm/templates/app/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{ include "app.serviceName" . }} 5 | labels: 6 | {{- include "app.labels" . | nindent 4 }} 7 | spec: 8 | type: {{ .Values.app.service.type }} 9 | ports: 10 | - port: {{ .Values.app.service.port }} 11 | targetPort: {{ .Values.app.service.targetPort }} 12 | protocol: {{ .Values.app.service.protocol }} 13 | selector: 14 | {{- include "app.labels" . | nindent 4 }} 15 | -------------------------------------------------------------------------------- /app/models/gitlab_models.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | from pydantic import BaseModel, validator, ValidationError 3 | 4 | 5 | 6 | class GitLabInstallation(BaseModel): 7 | 8 | 9 | environment:str = 'Docker' 10 | 11 | 12 | @validator("environment") 13 | def validator_environment(cls, value): 14 | env = ['Docker'] 15 | if value not in env: 16 | raise ValueError(f"your selected Environemnt must be in {env}") 17 | return value -------------------------------------------------------------------------------- /app/media/MyBash/bash.sh: -------------------------------------------------------------------------------- 1 | sudo apt update -y 2 | sudo apt install -y fontconfig openjdk-17-jre 3 | 4 | sudo wget -O /usr/share/keyrings/jenkins-keyring.asc \ 5 | https://pkg.jenkins.io/debian-stable/jenkins.io-2023.key 6 | echo "deb [signed-by=/usr/share/keyrings/jenkins-keyring.asc]" \ 7 | https://pkg.jenkins.io/debian-stable binary/ | sudo tee \ 8 | /etc/apt/sources.list.d/jenkins.list > /dev/null 9 | sudo apt-get update -y 10 | sudo apt-get install -y jenkins 11 | sudo systemctl enable --now jenkins 12 | -------------------------------------------------------------------------------- /app/models/grafana/loki_models.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | from pydantic import BaseModel, validator, ValidationError 3 | 4 | class BasicAuth(BaseModel): 5 | basicAuthUser:str 6 | basicAuthPassword:str 7 | 8 | class LokiInput(BaseModel): 9 | name:str = "Loki" 10 | uid:str = "loki" 11 | url:str = "http://localhost:3100" 12 | editable: bool = True 13 | timeout:int = 60 14 | maxLines:int = 1000 15 | basic_auth:Optional[BasicAuth] 16 | 17 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.6' 2 | 3 | services: 4 | app: 5 | build: 6 | context: . 7 | dockerfile: Dockerfile 8 | image: fastapi_gpt 9 | container_name: fastapi_gpt 10 | command: fastapi run app/main.py --port 8080 11 | volumes: 12 | - './app:/code/app' 13 | environment: 14 | OPENAI_API_KEY: ${KEY:-} 15 | TEST: ${TEST:-} 16 | ports: 17 | - "8080:8080" 18 | networks: 19 | - app_network 20 | 21 | 22 | networks: 23 | app_network: 24 | driver: bridge 25 | -------------------------------------------------------------------------------- /app/media/Installation_base/Docker/centos.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | sudo dnf remove -y \ 3 | docker \ 4 | docker-client \ 5 | docker-client-latest \ 6 | docker-common \ 7 | docker-latest \ 8 | docker-latest-logrotate \ 9 | docker-logrotate \ 10 | docker-engine 11 | 12 | 13 | sudo dnf -y install dnf-plugins-core 14 | sudo dnf config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo 15 | 16 | sudo dnf -y install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin -------------------------------------------------------------------------------- /app/template_generators/terraform/aws/SNS.py: -------------------------------------------------------------------------------- 1 | def IaC_template_generator_sns(input) -> str: 2 | 3 | aws_sns_create_topic = 'true' if input.sns_topic else 'false' 4 | aws_sns_create_topic_policy = 'true' if input.topic_policy else 'false' 5 | aws_sns_create_subscription = 'true' if input.subscription else 'false' 6 | 7 | tfvars_file = f"""create = {aws_sns_create_topic} 8 | create_topic_policy = {aws_sns_create_topic_policy} 9 | create_subscription = {aws_sns_create_subscription} 10 | """ 11 | return tfvars_file 12 | -------------------------------------------------------------------------------- /app/media/Installation_base/Jenkins/ubuntu.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | sudo apt update -y 3 | sudo apt install -y fontconfig openjdk-17-jre 4 | 5 | 6 | sudo wget -O /usr/share/keyrings/jenkins-keyring.asc \ 7 | https://pkg.jenkins.io/debian-stable/jenkins.io-2023.key 8 | echo "deb [signed-by=/usr/share/keyrings/jenkins-keyring.asc]" \ 9 | https://pkg.jenkins.io/debian-stable binary/ | sudo tee \ 10 | /etc/apt/sources.list.d/jenkins.list > /dev/null 11 | sudo apt-get update -y 12 | sudo apt-get install -y jenkins 13 | sudo systemctl enable --now jenkins -------------------------------------------------------------------------------- /app/media/Installation_base/Jenkins/docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.8' 2 | services: 3 | jenkins: 4 | image: jenkins/jenkins:lts 5 | privileged: true 6 | user: root 7 | ports: 8 | - 8080:8080 9 | - 50000:50000 10 | container_name: jenkins 11 | volumes: 12 | - /home/${myname}/jenkins_compose/jenkins_configuration:/var/jenkins_home 13 | - /var/run/docker.sock:/var/run/docker.sock 14 | 15 | # Replace "/home/${myname}/jenkins_compose/jenkins_configuration" with the path you want to use to store your jenkins data -------------------------------------------------------------------------------- /app/models/grafana/mimir_models.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | from pydantic import BaseModel, validator, ValidationError 3 | 4 | 5 | 6 | class MultiTenancy(BaseModel): 7 | tenant_name:str = "pods" 8 | httpHeaderName1:str = "X-Scope-OrgID" 9 | class MimirInput(BaseModel): 10 | name:str = "Mimir" 11 | uid:str = "mimir" 12 | url:str = "http://mimir-nginx.mimir.svc.cluster.local/prometheus" 13 | editable: bool = True 14 | alertmanagerUid:str = "alertmanager" 15 | multi_tenancy:Optional[MultiTenancy] 16 | 17 | 18 | -------------------------------------------------------------------------------- /app/media/Installation_base/Terraform/ubuntu.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | sudo apt-get update && sudo apt-get install -y gnupg software-properties-common 4 | 5 | wget -O- https://apt.releases.hashicorp.com/gpg | \ 6 | gpg --dearmor | \ 7 | sudo tee /usr/share/keyrings/hashicorp-archive-keyring.gpg > /dev/null 8 | 9 | echo "deb [signed-by=/usr/share/keyrings/hashicorp-archive-keyring.gpg] \ 10 | https://apt.releases.hashicorp.com $(lsb_release -cs) main" | \ 11 | sudo tee /etc/apt/sources.list.d/hashicorp.list 12 | 13 | sudo apt update 14 | 15 | sudo apt-get install terraform -------------------------------------------------------------------------------- /app/models/grafana/elastcsearch_models.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | from pydantic import BaseModel, validator, ValidationError 3 | 4 | 5 | 6 | class ElasticSearchInput(BaseModel): 7 | name:str = "elasticsearch-v7-filebeat" 8 | url:str = "http://localhost:9200" 9 | editable: bool = True 10 | index:str = "[filebeat-]YYYY.MM.DD" 11 | interval:str = "Daily" 12 | timeField:str = "@timestamp" 13 | logMessageField:str = "message" 14 | logLevelField:str = "fields.level" 15 | 16 | 17 | 18 | 19 | 20 | 21 | -------------------------------------------------------------------------------- /app/media/Installation_base/Docker/RHEL.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | sudo dnf remove -y \ 4 | docker \ 5 | docker-client \ 6 | docker-client-latest \ 7 | docker-common \ 8 | docker-latest \ 9 | docker-latest-logrotate \ 10 | docker-logrotate \ 11 | docker-engine \ 12 | podman \ 13 | runc 14 | 15 | 16 | sudo dnf -y install dnf-plugins-core 17 | sudo dnf config-manager --add-repo https://download.docker.com/linux/rhel/docker-ce.repo 18 | 19 | sudo dnf install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin -------------------------------------------------------------------------------- /app/media/MyTerraform/modules/docker/main.tf: -------------------------------------------------------------------------------- 1 | resource "docker_image" "image" { 2 | count = var.create_image ? 1 : 0 3 | name = var.image_name 4 | force_remove = var.image_force_remove 5 | 6 | build { 7 | context = var.image_build.context 8 | tag = var.image_build.tag 9 | } 10 | } 11 | 12 | resource "docker_container" "container" { 13 | count = var.create_container ? 1 : 0 14 | image = var.container_image 15 | name = var.container_name 16 | hostname = var.container_hostname 17 | restart = var.container_restart 18 | } 19 | -------------------------------------------------------------------------------- /app/models/grafana/prometheus_models.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | from pydantic import BaseModel, validator, ValidationError,field_validator 3 | 4 | 5 | 6 | class PrometheusInput(BaseModel): 7 | name:str = "Prometheus" 8 | url:str = "http://localhost:9090" 9 | editable: bool = True 10 | httpMethod:str = "POST" 11 | manageAlerts:bool = True 12 | prometheusType:str = "Prometheus" 13 | prometheusVersion:str = "2.44.0" 14 | cacheLevel:str = "High" 15 | disableRecordingRules:bool = False 16 | incrementalQueryOverlapWindow:str = "10m" 17 | -------------------------------------------------------------------------------- /helm/templates/app/ingress.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.ingress.enabled }} 2 | apiVersion: networking.k8s.io/v1 3 | kind: Ingress 4 | metadata: 5 | name: {{ include "app.ingressName" . }} 6 | spec: 7 | ingressClassName: {{ .Values.ingress.ingressClassName }} 8 | rules: 9 | - host: {{ .Values.ingress.host }} 10 | http: 11 | paths: 12 | - path: /api 13 | pathType: Prefix 14 | backend: 15 | service: 16 | name: {{ include "app.serviceName" . }} 17 | port: 18 | number: {{ .Values.app.service.port }} 19 | {{- end }} 20 | -------------------------------------------------------------------------------- /helm/templates/web/ingress.yaml: -------------------------------------------------------------------------------- 1 | {{- if .Values.ingress.enabled }} 2 | apiVersion: networking.k8s.io/v1 3 | kind: Ingress 4 | metadata: 5 | name: {{ include "web.ingressName" . }} 6 | spec: 7 | ingressClassName: {{ .Values.ingress.ingressClassName }} 8 | rules: 9 | - host: {{ .Values.ingress.host }} 10 | http: 11 | paths: 12 | - path: / 13 | pathType: Prefix 14 | backend: 15 | service: 16 | name: {{ include "web.serviceName" . }} 17 | port: 18 | number: {{ .Values.web.service.port }} 19 | {{- end }} 20 | -------------------------------------------------------------------------------- /app/tests/test_docker_compose.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | class TestDockerCompose: 4 | def setup_method(self): 5 | self.url = '/api/docker-compose/' 6 | 7 | def test_docker_compose_input(self, client, docker_compose_sample_input): 8 | response = client.post(self.url, json=docker_compose_sample_input) 9 | assert response.status_code == 200 10 | 11 | def test_docker_compose_invalid_input(self, client, docker_compose_invalid_sample_input): 12 | response = client.post(self.url, json=docker_compose_invalid_sample_input) 13 | assert response.status_code == 422 14 | -------------------------------------------------------------------------------- /app/media/Installation_base/Docker/fedora.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | sudo dnf remove -y \ 3 | docker \ 4 | docker-client \ 5 | docker-client-latest \ 6 | docker-common \ 7 | docker-latest \ 8 | docker-latest-logrotate \ 9 | docker-logrotate \ 10 | docker-selinux \ 11 | docker-engine-selinux \ 12 | docker-engine 13 | 14 | 15 | sudo dnf -y install dnf-plugins-core 16 | sudo dnf-3 config-manager --add-repo https://download.docker.com/linux/fedora/docker-ce.repo 17 | 18 | 19 | sudo dnf -y install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin -------------------------------------------------------------------------------- /app/media/MyTerraform/main.tf: -------------------------------------------------------------------------------- 1 | provider "docker" { 2 | host = "unix:///var/run/docker.sock" 3 | } 4 | 5 | module "docker" { 6 | source = "./modules/docker" 7 | 8 | create_image = var.create_image 9 | image_name = var.image_name 10 | image_force_remove = var.image_force_remove 11 | image_build = var.image_build 12 | 13 | create_container = var.create_container 14 | container_image = var.container_image 15 | container_name = var.container_name 16 | container_hostname = var.container_hostname 17 | container_restart = var.container_restart 18 | } 19 | -------------------------------------------------------------------------------- /helm/templates/web/service.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: Service 3 | metadata: 4 | name: {{ include "web.serviceName" . }} 5 | labels: 6 | {{- include "web.labels" . | nindent 4 }} 7 | spec: 8 | type: {{ .Values.web.service.type }} 9 | ports: 10 | - port: {{ .Values.web.service.port }} 11 | targetPort: {{ .Values.web.service.targetPort }} 12 | protocol: {{ .Values.web.service.protocol }} 13 | {{- if eq .Values.web.service.type "NodePort" }} 14 | nodePort: {{ .Values.web.service.nodePort }} 15 | {{- end }} 16 | selector: 17 | {{- include "web.labels" . | nindent 4 }} 18 | -------------------------------------------------------------------------------- /app/models/grafana/postgresql_models.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | from pydantic import BaseModel, validator, ValidationError 3 | 4 | 5 | 6 | 7 | class PostgresInput(BaseModel): 8 | name:str = "Postgres" 9 | url:str = "localhost:5432" 10 | user:str = "grafana" 11 | editable: bool = True 12 | database:str = "grafana" 13 | sslmode:str = "'disable'" 14 | password:str = "Password!" 15 | maxOpenConns:int = 100 16 | maxIdleConns:int = 100 17 | maxIdleConnsAuto:bool = True 18 | connMaxLifetime:int = 14400 19 | postgresVersion:int = 903 20 | timescaledb:bool = False 21 | 22 | -------------------------------------------------------------------------------- /app/media/grafana_datasources/mysql.yml: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | datasources: 4 | - name: MySQL 5 | type: mysql 6 | url: localhost:3306 7 | user: grafana 8 | editable: [true|false] 9 | jsonData: 10 | tlsAuth: true 11 | tlsSkipVerify: true 12 | database: grafana 13 | maxOpenConns: 100 # Grafana v5.4+ 14 | maxIdleConns: 100 # Grafana v5.4+ 15 | maxIdleConnsAuto: true # Grafana v9.5.1+ 16 | connMaxLifetime: 14400 # Grafana v5.4+ 17 | secureJsonData: 18 | password: ${GRAFANA_MYSQL_PASSWORD} 19 | tlsClientCert: ${GRAFANA_TLS_CLIENT_CERT} 20 | tlsCACert: ${GRAFANA_TLS_CA_CERT} 21 | -------------------------------------------------------------------------------- /.github/workflows/labeler.yml: -------------------------------------------------------------------------------- 1 | # This workflow will triage pull requests and apply a label based on the 2 | # paths that are modified in the pull request. 3 | # 4 | # To use this workflow, you will need to set up a .github/labeler.yml 5 | # file with configuration. For more information, see: 6 | # https://github.com/actions/labeler 7 | 8 | name: Labeler 9 | on: [pull_request_target] 10 | 11 | jobs: 12 | label: 13 | 14 | runs-on: ubuntu-latest 15 | permissions: 16 | contents: read 17 | pull-requests: write 18 | 19 | steps: 20 | - uses: actions/labeler@v5 21 | with: 22 | repo-token: "${{ secrets.GITHUB_TOKEN }}" 23 | -------------------------------------------------------------------------------- /app/media/MyHelm/templates/deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: {{ .Release.Name }}-web 5 | spec: 6 | replicas: {{ .Values.web.replicas }} 7 | selector: 8 | matchLabels: 9 | app: {{ .Release.Name }}-web 10 | template: 11 | metadata: 12 | labels: 13 | app: {{ .Release.Name }}-web 14 | spec: 15 | containers: 16 | - name: web 17 | image: {{ .Values.web.image }} 18 | ports: 19 | - containerPort: {{ .Values.web.targetPort }} 20 | env: 21 | - name: {{ .Values.env[0].name }} 22 | value: {{ .Values.env[0].value }} 23 | -------------------------------------------------------------------------------- /app/template_generators/terraform/aws/IAM.py: -------------------------------------------------------------------------------- 1 | def IaC_template_generator_iam(input) -> str: 2 | 3 | 4 | 5 | aws_iam_create_user = 'true' if input.iam_user else 'false' 6 | aws_iam_create_group = 'true' if input.iam_group else 'false' 7 | iam_user = """ { 8 | name = "devopshobbies" 9 | path = "/" 10 | }""" 11 | iam_groups = """{ 12 | name = "developers" 13 | path = "/" 14 | } 15 | """ 16 | 17 | tfvars_file = f"""iam_create_user = {aws_iam_create_user} 18 | iam_users = [ 19 | {iam_user} 20 | ] 21 | 22 | iam_create_group = {aws_iam_create_group} 23 | iam_groups = [ 24 | {iam_groups} 25 | ]""" 26 | return tfvars_file -------------------------------------------------------------------------------- /app/media/MyTerraform/variables.tf: -------------------------------------------------------------------------------- 1 | variable "create_image" { 2 | type = bool 3 | } 4 | 5 | variable "image_name" { 6 | type = string 7 | } 8 | 9 | variable "image_force_remove" { 10 | type = bool 11 | } 12 | 13 | variable "image_build" { 14 | type = object({ 15 | context = string 16 | tag = list(string) 17 | }) 18 | } 19 | 20 | variable "create_container" { 21 | type = bool 22 | } 23 | 24 | variable "container_image" { 25 | type = string 26 | } 27 | 28 | variable "container_name" { 29 | type = string 30 | } 31 | 32 | variable "container_hostname" { 33 | type = string 34 | } 35 | 36 | variable "container_restart" { 37 | type = string 38 | } 39 | -------------------------------------------------------------------------------- /app/media/kuber_configs/resolv.conf.j2: -------------------------------------------------------------------------------- 1 | 2 | # {{ ansible_managed }} 3 | 4 | {% if resolv_search is defined and resolv_search | length > 0 %} 5 | search {{ resolv_search|join(' ') }} 6 | {% endif %} 7 | {% if resolv_domain is defined and resolv_domain != "" %} 8 | domain {{ resolv_domain }} 9 | {% endif %} 10 | {% for ns in resolv_nameservers %} 11 | nameserver {{ ns }} 12 | {% endfor %} 13 | {% if resolv_sortlist is defined and resolv_sortlist | length > 0 %} 14 | {% for sl in resolv_sortlist %} 15 | sortlist {{ sl }} 16 | {% endfor %} 17 | {% endif %} 18 | {% if resolv_options is defined and resolv_options | length > 0 %} 19 | options {{ resolv_options|join(' ') }} 20 | {% endif %} 21 | -------------------------------------------------------------------------------- /app/media/MyTerraform/modules/docker/variables.tf: -------------------------------------------------------------------------------- 1 | variable "create_image" { 2 | type = bool 3 | } 4 | 5 | variable "image_name" { 6 | type = string 7 | } 8 | 9 | variable "image_force_remove" { 10 | type = bool 11 | } 12 | 13 | variable "image_build" { 14 | type = object({ 15 | context = string 16 | tag = list(string) 17 | }) 18 | } 19 | 20 | variable "create_container" { 21 | type = bool 22 | } 23 | 24 | variable "container_image" { 25 | type = string 26 | } 27 | 28 | variable "container_name" { 29 | type = string 30 | } 31 | 32 | variable "container_hostname" { 33 | type = string 34 | } 35 | 36 | variable "container_restart" { 37 | type = string 38 | } 39 | -------------------------------------------------------------------------------- /app/template_generators/ansible/install/main.py: -------------------------------------------------------------------------------- 1 | from .docker import ansible_docker_install 2 | from .nginx import ansible_nginx_install 3 | from .kuber import ansible_kuber_install 4 | from fastapi import HTTPException 5 | 6 | def ansible_install_template(input_, tool:str): 7 | 8 | match tool: 9 | 10 | case 'nginx': 11 | return ansible_nginx_install(input_) 12 | 13 | case 'docker': 14 | return ansible_docker_install(input_) 15 | 16 | case 'kuber': 17 | return ansible_kuber_install(input_) 18 | 19 | case _: 20 | raise HTTPException(400,"please select a valid tool for installation") -------------------------------------------------------------------------------- /app/media/MyJcasc/jcasc/config.yaml: -------------------------------------------------------------------------------- 1 | systemMessage: "Welcome to Jenkins configured via JCasC" 2 | author: 3 | name: "admin" 4 | password: "password" 5 | allowSignup: true 6 | allowAnonymousRead: true 7 | cache_size: 1 8 | executors: 1 9 | required_plugins: 10 | - "string" 11 | views: 12 | - list: 13 | name: "All" 14 | authorizationStrategy: 15 | projectMatrix: 16 | grantedPermissions: 17 | - "Overall/Administer:admin" 18 | - "Job/Read:developer" 19 | - "Job/Build:developer" 20 | tools: 21 | git: 22 | installations: 23 | - name: "Default" 24 | home: "/usr/bin/git" 25 | security: 26 | globalJobDslSecurityConfiguration: 27 | useScriptSecurity: false -------------------------------------------------------------------------------- /app/media/grafana_datasources/alertmanager.yml: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | datasources: 4 | - name: Alertmanager 5 | uid: alertmanager 6 | type: alertmanager 7 | url: http://localhost:9093 8 | access: proxy 9 | orgId: 1 10 | jsonData: 11 | # Valid options for implementation include mimir, cortex and prometheus 12 | implementation: [prometheus|cortex|mimir] 13 | # Whether or not Grafana should send alert instances to this Alertmanager 14 | handleGrafanaManagedAlerts: [false|true] 15 | 16 | editable: [true|false] 17 | # optionally 18 | basicAuth: true 19 | basicAuthUser: my_user 20 | secureJsonData: 21 | basicAuthPassword: test_password 22 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/preinstall/templates/resolv.conf.j2: -------------------------------------------------------------------------------- 1 | 2 | # {{ ansible_managed }} 3 | 4 | {% if resolv_search is defined and resolv_search | length > 0 %} 5 | search {{ resolv_search|join(' ') }} 6 | {% endif %} 7 | {% if resolv_domain is defined and resolv_domain != "" %} 8 | domain {{ resolv_domain }} 9 | {% endif %} 10 | {% for ns in resolv_nameservers %} 11 | nameserver {{ ns }} 12 | {% endfor %} 13 | {% if resolv_sortlist is defined and resolv_sortlist | length > 0 %} 14 | {% for sl in resolv_sortlist %} 15 | sortlist {{ sl }} 16 | {% endfor %} 17 | {% endif %} 18 | {% if resolv_options is defined and resolv_options | length > 0 %} 19 | options {{ resolv_options|join(' ') }} 20 | {% endif %} 21 | -------------------------------------------------------------------------------- /app/media/grafana_datasources/postgresql.yml: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | datasources: 4 | - name: Postgres 5 | type: postgres 6 | url: localhost:5432 7 | user: grafana # Database user’s login/username 8 | editable: [true|false] 9 | secureJsonData: 10 | password: 'Password!' 11 | jsonData: 12 | database: grafana 13 | sslmode: 'disable' # disable/require/verify-ca/verify-full 14 | maxOpenConns: 100 # Grafana v5.4+ 15 | maxIdleConns: 100 # Grafana v5.4+ 16 | maxIdleConnsAuto: true # Grafana v9.5.1+ 17 | connMaxLifetime: 14400 # Grafana v5.4+ 18 | postgresVersion: 903 # 903=9.3, 904=9.4, 905=9.5, 906=9.6, 1000=10 19 | timescaledb: false 20 | -------------------------------------------------------------------------------- /helm/values.yaml: -------------------------------------------------------------------------------- 1 | app: 2 | replicaCount: 1 3 | image: 4 | repository: 81318131/fastapi_gpt 5 | tag: latest 6 | service: 7 | type: ClusterIP 8 | port: 8080 9 | targetPort: 8080 10 | protocol: TCP 11 | environment: 12 | OPENAI_API_KEY: "" 13 | nodeSelector: {} 14 | affinity: {} 15 | 16 | web: 17 | replicaCount: 1 18 | image: 19 | repository: 81318131/web_gpt 20 | tag: kubernetes-local 21 | service: 22 | type: ClusterIP 23 | port: 80 24 | targetPort: 4173 25 | protocol: TCP 26 | nodePort: 30080 27 | nodeSelector: {} 28 | affinity: {} 29 | 30 | ingress: 31 | enabled: true 32 | host: "devopsgpt.local" 33 | ingressClassName: "nginx" 34 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/init_k8s/tasks/cni.yml: -------------------------------------------------------------------------------- 1 | - block: 2 | - name: Check if Calico CRDs exist 3 | command: kubectl get crd felixconfigurations.crd.projectcalico.org 4 | register: calico_crd_check 5 | ignore_errors: true 6 | delegate_to: "{{ groups['k8s_masters'][0] }}" 7 | 8 | - block: 9 | - name: Apply CNI plugin (Calico) 10 | command: kubectl create -f {{ calico_operator_url }} 11 | retries: 3 12 | delay: 3 13 | 14 | - name: Apply CNI plugin (Calico) 15 | command: kubectl create -f {{ calico_crd_url }} 16 | retries: 3 17 | delay: 3 18 | delegate_to: "{{ groups['k8s_masters'][0] }}" 19 | when: calico_crd_check.rc != 0 20 | run_once: true 21 | 22 | -------------------------------------------------------------------------------- /app/template_generators/terraform/aws/s3.py: -------------------------------------------------------------------------------- 1 | def IaC_template_generator_s3(input) -> str: 2 | 3 | s3 = ['aws_s3_bucket', 'aws_s3_bucket_versioning'] 4 | 5 | aws_s3_create_bucket = 'true' if input.s3_bucket else 'false' 6 | aws_s3_create_bucket_versioning = 'true' if input.bucket_versioning else 'false' 7 | bucket_tags = """{ 8 | Name = "My bucket" 9 | Environment = "Dev" 10 | }""" 11 | 12 | tfvars_file = f""" 13 | s3_create_bucket = {aws_s3_create_bucket} 14 | s3_bucket_name = "UniqueName" 15 | s3_bucket_force_destroy = false 16 | s3_bucket_tags = {bucket_tags} 17 | s3_create_bucket_versioning = {aws_s3_create_bucket_versioning} 18 | s3_bucket_versioning_status = "Enabled" """ 19 | return tfvars_file -------------------------------------------------------------------------------- /app/gpt_services.py: -------------------------------------------------------------------------------- 1 | import os 2 | from openai import OpenAI 3 | from fastapi import HTTPException 4 | 5 | def gpt_service(prompt,token): 6 | 7 | try: 8 | client = OpenAI(api_key=token) 9 | chat_completion = client.chat.completions.create( 10 | messages=[ 11 | { 12 | "role": "user", 13 | "content": [{"type": "text", "text": prompt}], 14 | } 15 | ], 16 | model="gpt-4o-mini", 17 | ) 18 | generated_text = chat_completion.choices[0].message.content 19 | 20 | return generated_text 21 | 22 | except Exception as e: 23 | 24 | raise HTTPException(status_code=400, detail=str(e)) -------------------------------------------------------------------------------- /app/template_generators/terraform/aws/Route53.py: -------------------------------------------------------------------------------- 1 | def IaC_template_generator_route53(input) -> str: 2 | 3 | aws_route53_create_zone = 'true' if input.zone else 'false' 4 | aws_route53_create_record = 'true' if input.record else 'false' 5 | aws_route53_create_delegation_set = 'true' if input.delegation_set else 'false' 6 | aws_route53_create_resolver_rule_association = 'true' if input.resolver_rule_association else 'false' 7 | 8 | tfvars_file = f"""create_zone = {aws_route53_create_zone} 9 | create_record = {aws_route53_create_record} 10 | create_delegation_set = {aws_route53_create_delegation_set} 11 | create_resolver_rule_association = {aws_route53_create_resolver_rule_association} 12 | """ 13 | return tfvars_file 14 | -------------------------------------------------------------------------------- /app/template_generators/grafana_data_sources/tempo.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import os 3 | 4 | def remove_none_values(d): 5 | if isinstance(d, dict): 6 | return {k: remove_none_values(v) for k, v in d.items() if v is not None} 7 | elif isinstance(d, list): 8 | return [remove_none_values(i) for i in d if i is not None] 9 | return d 10 | 11 | def tempo_template(input): 12 | dir = 'app/media/MyGrafana' 13 | compose_total = input.dict(exclude_none=True) 14 | 15 | 16 | os.makedirs(dir) 17 | os.path.join(dir, 'tempo.yaml') 18 | 19 | file=open("app/media/MyGrafana/tempo.yaml","w") 20 | yaml.dump(compose_total,file,default_flow_style=False, sort_keys=False) 21 | file.close() 22 | 23 | -------------------------------------------------------------------------------- /app/template_generators/terraform/docker.py: -------------------------------------------------------------------------------- 1 | def IaC_template_generator_docker(input) -> str: 2 | 3 | 4 | create_docker_image = 'true' if input.docker_image else 'false' 5 | create_docker_container = 'true' if input.docker_container else 'false' 6 | image_build = """{ 7 | context = "./" 8 | tag = ["my-image:latest"] 9 | } 10 | """ 11 | tfvars_file = f"""create_image = {create_docker_image} 12 | image_name = "my-image" 13 | image_force_remove = true 14 | image_build = {image_build} 15 | 16 | create_container = {create_docker_container} 17 | container_image = "my-image" 18 | container_name = "my-container" 19 | container_hostname = "my-host" 20 | container_restart = "always" 21 | 22 | 23 | """ 24 | return tfvars_file -------------------------------------------------------------------------------- /app/media/kuber_configs/keepalived.conf.j2: -------------------------------------------------------------------------------- 1 | vrrp_script check_apiserver { 2 | script "/etc/keepalived/check_apiserver.sh" 3 | interval 3 # check api server every 3 seconds 4 | timeout 10 # timeout second if api server doesn't answered 5 | fall 5 # failed time 6 | rise 2 # success 2 times 7 | weight -2 # if failed is done it reduce 2 of the weight 8 | } 9 | 10 | vrrp_instance VI_1 { 11 | state BACKUP 12 | interface {{ interface_name }} # set your interface 13 | virtual_router_id 1 14 | priority 100 15 | advert_int 5 16 | authentication { 17 | auth_type PASS 18 | auth_pass mysecret 19 | } 20 | virtual_ipaddress { 21 | {{ virtual_ip }} 22 | } 23 | track_script { 24 | check_apiserver 25 | } 26 | } -------------------------------------------------------------------------------- /app/media/Installation_base/Docker/ubuntu.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | sudo apt-get update -y 3 | sudo apt-get install ca-certificates curl -y 4 | sudo install -m 0755 -d /etc/apt/keyrings 5 | sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc 6 | sudo chmod a+r /etc/apt/keyrings/docker.asc 7 | 8 | # Add the repository to Apt sources: 9 | echo \ 10 | "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \ 11 | $(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \ 12 | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null 13 | sudo apt-get update -y 14 | 15 | 16 | sudo apt-get install -y docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin -------------------------------------------------------------------------------- /app/routes/jcasc.py: -------------------------------------------------------------------------------- 1 | from app.app_instance import app 2 | from app.gpt_services import gpt_service 3 | from app.services import (edit_directory_generator,execute_pythonfile) 4 | from app.models import (Jcasc,Output) 5 | from app.template_generators.jenkins.jcasc import jcasc_template_generator 6 | import os 7 | 8 | @app.post("/api/jcasc-template/") 9 | async def jcasc_template_generation(request:Jcasc) -> Output: 10 | if os.environ.get("TEST"): 11 | return Output(output='output') 12 | generated_prompt = jcasc_template_generator(request) 13 | output = gpt_service(generated_prompt) 14 | edit_directory_generator("jcasc_generator",output) 15 | execute_pythonfile("MyJcasc","jcasc_generator") 16 | return Output(output='output') -------------------------------------------------------------------------------- /app/models/__init__.py: -------------------------------------------------------------------------------- 1 | from .helm_models import * 2 | from .terraform_models import * 3 | from .utils import * 4 | from .ansible_models import * 5 | from .jcasc import * 6 | from .compose_models import * 7 | from .docker_installation_models import * 8 | from .jenkins import * 9 | from .gitlab_models import * 10 | from app.models.grafana.alert_managers_models import * 11 | from app.models.grafana.elastcsearch_models import * 12 | from app.models.grafana.loki_models import * 13 | from app.models.grafana.mimir_models import * 14 | from app.models.grafana.mysql_models import * 15 | from app.models.grafana.postgresql_models import * 16 | from app.models.grafana.prometheus_models import * 17 | from app.models.grafana.tempo_models import * 18 | from app.models.grafana.terraform_alert import * -------------------------------------------------------------------------------- /app/routes/helm.py: -------------------------------------------------------------------------------- 1 | from app.app_instance import app 2 | from app.gpt_services import gpt_service 3 | from app.services import (edit_directory_generator,execute_pythonfile) 4 | from app.models import (HelmTemplateGeneration,Output) 5 | from app.prompt_generators import (helm_template_generator) 6 | import os 7 | @app.post("/api/Helm-template/") 8 | async def Helm_template_generation(request:HelmTemplateGeneration) -> Output: 9 | if os.environ.get("TEST"): 10 | return Output(output='output') 11 | generated_prompt = helm_template_generator(request) 12 | output = gpt_service(generated_prompt) 13 | edit_directory_generator("helm_generator",output) 14 | execute_pythonfile("MyHelm","helm_generator") 15 | return Output(output='output') -------------------------------------------------------------------------------- /app/models/jenkins.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | from pydantic import BaseModel, validator, ValidationError 3 | 4 | 5 | 6 | class JenkinsInstallation(BaseModel): 7 | 8 | os: str | None = 'Ubuntu' 9 | 10 | environment:str = 'Linux' 11 | 12 | @validator("os") 13 | def validator_os(cls, value): 14 | valid_oss = ['Ubuntu','Fedora','RHEL'] 15 | if value not in valid_oss: 16 | raise ValueError(f"your selected OS must be in {valid_oss}") 17 | return value 18 | 19 | @validator("environment") 20 | def validator_environment(cls, value): 21 | env = ['Linux','Docker'] 22 | if value not in env: 23 | raise ValueError(f"your selected Environemnt must be in {env}") 24 | return value -------------------------------------------------------------------------------- /app/models/docker_installation_models.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, List, Optional,Union 2 | from pydantic import BaseModel, model_validator,validator 3 | 4 | class DockerInstallationInput(BaseModel): 5 | os:str = "Ubuntu" 6 | environment:str = "Linux" 7 | 8 | @validator("os") 9 | def validate_os(cls, value): 10 | allowed_os = ['Ubuntu', 'Centos', 'Fedora', 'RHEL'] 11 | if value not in allowed_os: 12 | raise ValueError(f"OS must be one of {allowed_os}.") 13 | return value 14 | 15 | @validator("environment") 16 | def validate_environment(cls, value): 17 | allowed_os = ['Linux'] 18 | if value not in allowed_os: 19 | raise ValueError(f"Environment must be one of {allowed_os}.") 20 | return value -------------------------------------------------------------------------------- /app/models/grafana/mysql_models.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | from pydantic import BaseModel, validator, ValidationError 3 | 4 | 5 | 6 | class TLS(BaseModel): 7 | tlsClientCert:str = "${GRAFANA_TLS_CLIENT_CERT}" 8 | tlsCACert:str = "${GRAFANA_TLS_CA_CERT}" 9 | tlsAuth:bool = True 10 | tlsSkipVerify:bool = True 11 | 12 | class MysqlInput(BaseModel): 13 | name:str = "MySQL" 14 | url:str = "localhost:3306" 15 | user:str = "grafana" 16 | editable: bool = True 17 | database:str = "grafana" 18 | maxOpenConns:int = 100 19 | maxIdleConns:int = 100 20 | maxIdleConnsAuto:bool = True 21 | connMaxLifetime:int = 14400 22 | password:str = "${GRAFANA_MYSQL_PASSWORD}" 23 | tls :Optional[TLS] 24 | 25 | -------------------------------------------------------------------------------- /.github/workflows/pylint.yml: -------------------------------------------------------------------------------- 1 | name: Pylint 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - master 7 | - dev 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | strategy: 13 | matrix: 14 | python-version: ["3.11"] 15 | steps: 16 | - uses: actions/checkout@v4 17 | - name: Set up Python ${{ matrix.python-version }} 18 | uses: actions/setup-python@v3 19 | with: 20 | python-version: ${{ matrix.python-version }} 21 | - name: Install dependencies 22 | run: | 23 | python -m pip install --upgrade pip 24 | pip install pylint 25 | pip install -r requirements.txt 26 | - name: Analysing the code with pylint 27 | run: | 28 | pylint $(git ls-files 'app/main.py') --disable=R,C,W 29 | -------------------------------------------------------------------------------- /app/template_generators/terraform/aws/SQS.py: -------------------------------------------------------------------------------- 1 | def IaC_template_generator_sqs(input) -> str: 2 | 3 | aws_sqs_create_queue = 'true' if input.sqs_queue else 'false' 4 | aws_sqs_create_queue_policy = 'true' if input.queue_policy else 'false' 5 | aws_sqs_create_dlq = 'true' if input.dlq else 'false' 6 | aws_sqs_create_dlq_redrive_allow_policy = 'true' if input.dlq_redrive_allow_policy else 'false' 7 | aws_sqs_create_dlq_queue_policy = 'true' if input.dlq_queue_policy else 'false' 8 | 9 | tfvars_file = f"""create = {aws_sqs_create_queue} 10 | create_queue_policy = {aws_sqs_create_queue_policy} 11 | create_dlq = {aws_sqs_create_dlq} 12 | create_dlq_redrive_allow_policy = {aws_sqs_create_dlq_redrive_allow_policy} 13 | create_dlq_queue_policy = {aws_sqs_create_dlq_queue_policy} 14 | """ 15 | return tfvars_file 16 | -------------------------------------------------------------------------------- /app/services.py: -------------------------------------------------------------------------------- 1 | 2 | import os 3 | import shutil 4 | from fastapi import HTTPException 5 | 6 | 7 | def edit_directory_generator(gen_file,python_code): 8 | 9 | 10 | with open(f"app/directory_generators/{gen_file}.py", 'w') as file: 11 | 12 | file.write(python_code) 13 | 14 | 15 | 16 | def execute_pythonfile(folder,gen_file): 17 | folder = f"app/media/{folder}" 18 | if os.path.isdir(folder): 19 | try: 20 | 21 | shutil.rmtree(folder) 22 | print(f"Successfully removed '{folder}' and all its contents.") 23 | except Exception as e: 24 | raise HTTPException(status_code=400, detail='please try again') 25 | 26 | os.system(f"python3 app/directory_generators/{gen_file}.py") 27 | os.system(f"python3 app/directory_generators/{gen_file}.py") 28 | -------------------------------------------------------------------------------- /app/routes/docker.py: -------------------------------------------------------------------------------- 1 | from app.app_instance import app 2 | from app.models import (DockerCompose,DockerInstallationInput,Output) 3 | from app.template_generators.docker.compose import docker_compose_generator 4 | from app.template_generators.docker.installation import docker_installation_selection 5 | import os 6 | 7 | @app.post("/api/docker-compose/") 8 | async def docker_compose_template(request:DockerCompose) -> Output: 9 | 10 | if os.environ.get("TEST"): 11 | return Output(output='output') 12 | docker_compose_generator(request) 13 | 14 | return Output(output='output') 15 | 16 | 17 | @app.post("/api/docker/installation") 18 | async def docker_installation(request:DockerInstallationInput) -> Output: 19 | 20 | docker_installation_selection(request) 21 | 22 | return Output(output='output') -------------------------------------------------------------------------------- /app/media/MyAnsible/kubernetes_playbook.yml: -------------------------------------------------------------------------------- 1 | 2 | - hosts: all 3 | roles: 4 | - role: preinstall 5 | gather_facts: yes 6 | any_errors_fatal: true 7 | tags: [preinstall] 8 | 9 | - hosts: k8s 10 | roles: 11 | - role: k8s 12 | gather_facts: yes 13 | any_errors_fatal: true 14 | tags: [k8s] 15 | 16 | - hosts: k8s 17 | roles: 18 | - role: init_k8s 19 | gather_facts: yes 20 | any_errors_fatal: true 21 | tags: [init_k8s] 22 | 23 | - hosts: k8s_masters 24 | roles: 25 | - role: preinstall 26 | - role: k8s 27 | - role: join_master 28 | gather_facts: yes 29 | any_errors_fatal: true 30 | tags: [join_master] 31 | 32 | - hosts: k8s_workers 33 | roles: 34 | - role: preinstall 35 | - role: k8s 36 | - role: join_worker 37 | gather_facts: yes 38 | any_errors_fatal: true 39 | tags: [join_worker] 40 | 41 | -------------------------------------------------------------------------------- /app/models/grafana/alert_managers_models.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | from pydantic import BaseModel, validator, ValidationError 3 | 4 | class BasicAuth(BaseModel): 5 | basicAuthUser:str 6 | basicAuthPassword:str 7 | 8 | 9 | class AlertManagerInput(BaseModel): 10 | name:str = "Alertmanager" 11 | url:str = "http://localhost:9093" 12 | uid:str = "alertmanager" 13 | implementation:str 14 | 15 | handleGrafanaManagedAlerts:bool = True 16 | editable: bool = True 17 | basic_auth:Optional[BasicAuth] 18 | 19 | @validator("implementation") 20 | def validator_implementation(cls,value): 21 | valid = ['prometheus','cortex','mimir'] 22 | if value not in valid: 23 | raise ValueError(f"implementation must be in {valid}") 24 | return value 25 | 26 | 27 | 28 | 29 | -------------------------------------------------------------------------------- /app/media/grafana_datasources/prometheus.yml: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | datasources: 4 | - name: Prometheus 5 | uid: prometheus 6 | type: prometheus 7 | access: proxy 8 | # Access mode - proxy (server in the UI) or direct (browser in the UI). 9 | url: http://localhost:9090 10 | editable: [true|false] 11 | jsonData: 12 | httpMethod: POST 13 | manageAlerts: true 14 | prometheusType: Prometheus 15 | prometheusVersion: 2.44.0 16 | cacheLevel: 'High' 17 | disableRecordingRules: false 18 | incrementalQueryOverlapWindow: 10m 19 | exemplarTraceIdDestinations: 20 | # Field with internal link pointing to data source in Grafana. 21 | # datasourceUid value can be anything, but it should be unique across all defined data source uids. 22 | - datasourceUid: my_jaeger_uid 23 | name: traceID 24 | -------------------------------------------------------------------------------- /app/media/kuber_configs/haproxy.cfg.j2: -------------------------------------------------------------------------------- 1 | # HAProxy Statistics Report Page 2 | frontend stats-frontend 3 | bind *:8080 4 | mode http 5 | stats enable 6 | stats hide-version 7 | stats uri /stats 8 | stats realm Haproxy\ Statistics 9 | stats auth admin:{{ haproxy_frontend_password }} # Change 'admin:password' to your desired strong username and password 10 | 11 | # No backend is required for exporting stats in HAProxy. 12 | 13 | 14 | frontend kubernetes-frontend 15 | bind *:6443 16 | mode tcp 17 | option tcplog 18 | default_backend kubernetes-backend 19 | 20 | backend kubernetes-backend 21 | option httpchk GET /healthz 22 | http-check expect status 200 23 | mode tcp 24 | option ssl-hello-chk 25 | balance roundrobin 26 | {% for host in groups['k8s_masters'] %} 27 | server {{ host }} {{ hostvars[host]['private_ip'] }}:6443 check fall 3 rise 2 28 | {% endfor %} -------------------------------------------------------------------------------- /helm/templates/_helpers.tpl: -------------------------------------------------------------------------------- 1 | {{- define "app.labels" -}} 2 | app: {{ .Chart.Name }} 3 | component: gpt-app 4 | {{- end -}} 5 | 6 | {{- define "app.deploymentName" -}} 7 | {{ .Release.Name }}-app-deployment 8 | {{- end -}} 9 | 10 | {{- define "app.serviceName" -}} 11 | {{ .Release.Name }}-app-service 12 | {{- end -}} 13 | 14 | {{- define "app.secretName" -}} 15 | {{ .Release.Name }}-app-secret 16 | {{- end -}} 17 | 18 | {{- define "app.ingressName" -}} 19 | {{ .Release.Name }}-app-ingress 20 | {{- end -}} 21 | 22 | {{- define "web.labels" -}} 23 | app: {{ .Chart.Name }} 24 | component: gpt-web 25 | {{- end -}} 26 | 27 | {{- define "web.deploymentName" -}} 28 | {{ .Release.Name }}-web-deployment 29 | {{- end -}} 30 | 31 | {{- define "web.serviceName" -}} 32 | {{ .Release.Name }}-web-service 33 | {{- end -}} 34 | 35 | {{- define "web.ingressName" -}} 36 | {{ .Release.Name }}-web-ingress 37 | {{- end -}} 38 | -------------------------------------------------------------------------------- /app/template_generators/gitlab/installation.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | 4 | 5 | def create_directory(folder:str,filename:str): 6 | 7 | dir = f"app/media/{folder}" 8 | 9 | 10 | if not os.path.exists(dir): 11 | os.makedirs(dir) 12 | os.path.join(dir, filename) 13 | 14 | 15 | def select_install_gitlab(input): 16 | 17 | create_directory("MyCompose","docker-compose.yaml") 18 | 19 | 20 | match input.environment: 21 | 22 | case "Docker": 23 | source = 'app/media/Installation_base/Gitlab/docker-compose.yaml' 24 | dest = 'app/media/MyCompose/docker-compose.yaml' 25 | shutil.copyfile(source, dest) 26 | 27 | 28 | case _: 29 | raise ValueError() 30 | 31 | 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /app/template_generators/terraform/aws/AutoScaling.py: -------------------------------------------------------------------------------- 1 | def IaC_template_generator_autoscaling(input) -> str: 2 | 3 | aws_autoscaling_create_group = 'true' if input.autoscaling_group else 'false' 4 | aws_autoscaling_create_launch_template = 'true' if input.launch_template else 'false' 5 | aws_autoscaling_create_schedule = 'true' if input.schedule else 'false' 6 | aws_autoscaling_create_scaling_policy = 'true' if input.scaling_policy else 'false' 7 | aws_autoscaling_create_iam_instance_profile = 'true' if input.iam_instance_profile else 'false' 8 | 9 | tfvars_file = f"""create = {aws_autoscaling_create_group} 10 | create_launch_template = {aws_autoscaling_create_launch_template} 11 | create_schedule = {aws_autoscaling_create_schedule} 12 | create_scaling_policy = {aws_autoscaling_create_scaling_policy} 13 | create_iam_instance_profile = {aws_autoscaling_create_iam_instance_profile} 14 | """ 15 | return tfvars_file 16 | -------------------------------------------------------------------------------- /.github/workflows/unit-test.yml: -------------------------------------------------------------------------------- 1 | name: Unit-test 2 | 3 | on: 4 | pull_request_target: 5 | branches: 6 | - master 7 | - dev 8 | 9 | jobs: 10 | docker_health_check: 11 | runs-on: ubuntu-latest 12 | steps: 13 | 14 | - name: Checkout code 15 | uses: actions/checkout@v4 16 | with: 17 | submodules: true 18 | ref: ${{ github.event.pull_request.head.ref }} 19 | repository: ${{ github.event.pull_request.head.repo.full_name }} 20 | 21 | - name: install requirements 22 | run: pip install -r requirements.txt 23 | 24 | 25 | - name: build services 26 | run: docker compose build 27 | 28 | - name: run services 29 | run: docker compose up -d 30 | 31 | # - name: Running Unit-tests 32 | # run: pytest tests/ 33 | # working-directory: ./app 34 | 35 | - name: stop containers 36 | run: docker compose down 37 | -------------------------------------------------------------------------------- /app/media/grafana_datasources/tempo.yml: -------------------------------------------------------------------------------- 1 | apiVersion: 1 2 | 3 | datasources: 4 | - name: Tempo 5 | type: tempo 6 | access: proxy 7 | orgId: 1 8 | url: http://tempo-query-frontend.tempo.svc.cluster.local:3100 9 | basicAuth: false 10 | version: 1 11 | editable: true 12 | apiVersion: 1 13 | uid: tempo 14 | jsonData: 15 | httpMethod: GET 16 | tracesToLogsV2: # If you are going to link your tracing data with logs, configure <> 17 | datasourceUid: 'loki' 18 | spanStartTimeShift: '-2m' 19 | spanEndTimeShift: '2m' 20 | filterByTraceID: true 21 | filterBySpanID: true 22 | serviceMap: # If you are going to add serviceGraph feature to tempo, configure <> 23 | datasourceUid: 'Mimir-OtelMetrics-Tenant' 24 | nodeGraph: # If you are going to add nodeGraph feature to tempo, enable <> 25 | enabled: true 26 | -------------------------------------------------------------------------------- /helm/templates/web/deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: {{ include "web.deploymentName" . }} 5 | labels: 6 | {{- include "web.labels" . | nindent 4 }} 7 | spec: 8 | replicas: {{ .Values.web.replicaCount }} 9 | selector: 10 | matchLabels: 11 | {{- include "web.labels" . | nindent 8 }} 12 | template: 13 | metadata: 14 | labels: 15 | {{- include "web.labels" . | nindent 8 }} 16 | spec: 17 | containers: 18 | - name: {{ include "web.deploymentName" . }} 19 | image: {{ .Values.web.image.repository }}:{{ .Values.web.image.tag }} 20 | ports: 21 | - containerPort: {{ .Values.web.service.targetPort }} 22 | {{- if .Values.web.nodeSelector }} 23 | nodeSelector: {{ .Values.web.nodeSelector | toYaml | nindent 8 }} 24 | {{- end }} 25 | 26 | {{- if .Values.web.affinity }} 27 | affinity: {{ .Values.web.affinity | toYaml | nindent 8 }} 28 | {{- end }} 29 | -------------------------------------------------------------------------------- /app/directory_generators/jcasc_generator.py: -------------------------------------------------------------------------------- 1 | import os 2 | project_name = "app/media/MyJcasc" 3 | jcasc_dir = os.path.join(project_name, "jcasc") 4 | os.makedirs(jcasc_dir, exist_ok=True) 5 | 6 | jcasc_content = """ 7 | systemMessage: "Welcome to Jenkins configured via JCasC" 8 | author: 9 | name: "admin" 10 | password: "password" 11 | allowSignup: true 12 | allowAnonymousRead: true 13 | cache_size: 1 14 | executors: 1 15 | required_plugins: 16 | - "string" 17 | views: 18 | - list: 19 | name: "All" 20 | authorizationStrategy: 21 | projectMatrix: 22 | grantedPermissions: 23 | - "Overall/Administer:admin" 24 | - "Job/Read:developer" 25 | - "Job/Build:developer" 26 | tools: 27 | git: 28 | installations: 29 | - name: "Default" 30 | home: "/usr/bin/git" 31 | security: 32 | globalJobDslSecurityConfiguration: 33 | useScriptSecurity: false 34 | """ 35 | 36 | with open(os.path.join(jcasc_dir, "config.yaml"), "w") as f: 37 | f.write(jcasc_content.strip()) -------------------------------------------------------------------------------- /app/template_generators/terraform/aws/CloudFront.py: -------------------------------------------------------------------------------- 1 | def IaC_template_generator_cloudfront(input) -> str: 2 | 3 | aws_cloudfront_create_distribution = 'true' if input.distribution else 'false' 4 | aws_cloudfront_create_origin_access_identity = 'true' if input.origin_access_identity else 'false' 5 | aws_cloudfront_create_origin_access_control = 'true' if input.origin_access_control else 'false' 6 | aws_cloudfront_create_monitoring_subscription = 'true' if input.monitoring_subscription else 'false' 7 | aws_cloudfront_create_vpc_origin = 'true' if input.vpc_origin else 'false' 8 | 9 | tfvars_file = f"""create_distribution = {aws_cloudfront_create_distribution} 10 | create_origin_access_identity = {aws_cloudfront_create_origin_access_identity} 11 | create_origin_access_control = {aws_cloudfront_create_origin_access_control} 12 | create_monitoring_subscription = {aws_cloudfront_create_monitoring_subscription} 13 | create_vpc_origin = {aws_cloudfront_create_vpc_origin} 14 | """ 15 | return tfvars_file 16 | -------------------------------------------------------------------------------- /app/template_generators/docker/compose.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | from app.models.compose_models import DockerCompose 3 | import os 4 | def remove_none_values(d): 5 | if isinstance(d, dict): 6 | return {k: remove_none_values(v) for k, v in d.items() if v is not None} 7 | elif isinstance(d, list): 8 | return [remove_none_values(i) for i in d if i is not None] 9 | return d 10 | 11 | 12 | def docker_compose_generator(input): 13 | dir = 'app/media/MyCompose' 14 | 15 | compose_total = input.model_dump(mode="json") 16 | compose_total = remove_none_values(compose_total) 17 | if not os.path.exists(dir): 18 | os.makedirs(dir) 19 | os.path.join(dir, 'docker-compose.yaml') 20 | 21 | file=open("app/media/MyCompose/docker-compose.yaml","w") 22 | yaml.dump(compose_total,file,default_flow_style=False) 23 | file.close() 24 | 25 | file=open("app/media/MyCompose/docker-compose.yaml","w") 26 | yaml.dump(compose_total,file,default_flow_style=False,sort_keys=False) 27 | file.close() 28 | -------------------------------------------------------------------------------- /app/media/MyCompose/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | # sudo mkdir -p /srv/gitlab 2 | # export GITLAB_HOME=/srv/gitlab 3 | 4 | version: '3.6' 5 | services: 6 | gitlab: 7 | image: gitlab/gitlab-ee:-ee.0 8 | container_name: gitlab 9 | restart: always 10 | hostname: 'gitlab.example.com' 11 | environment: 12 | GITLAB_OMNIBUS_CONFIG: | 13 | # Add any other gitlab.rb configuration here, each on its own line 14 | external_url 'https://gitlab.example.com' 15 | 16 | # you can also use custom HTTP and SSH port. if you you want to do that, follow the below syntax 17 | 18 | # external_url 'http://gitlab.example.com:8929' 19 | # gitlab_rails['gitlab_shell_ssh_port'] = 2424 20 | 21 | ports: 22 | # - '8929:8929' # Custom HTTP Port 23 | # - '2424:22' # Custom SSH Port 24 | - '80:80' 25 | - '443:443' 26 | - '22:22' 27 | volumes: 28 | - '$GITLAB_HOME/config:/etc/gitlab' 29 | - '$GITLAB_HOME/logs:/var/log/gitlab' 30 | - '$GITLAB_HOME/data:/var/opt/gitlab' 31 | shm_size: '256m' -------------------------------------------------------------------------------- /app/media/Installation_base/Gitlab/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | # sudo mkdir -p /srv/gitlab 2 | # export GITLAB_HOME=/srv/gitlab 3 | 4 | version: '3.6' 5 | services: 6 | gitlab: 7 | image: gitlab/gitlab-ce:-ce.0 8 | container_name: gitlab 9 | restart: always 10 | hostname: 'gitlab.example.com' 11 | environment: 12 | GITLAB_OMNIBUS_CONFIG: | 13 | # Add any other gitlab.rb configuration here, each on its own line 14 | external_url 'https://gitlab.example.com' 15 | 16 | # you can also use custom HTTP and SSH port. if you you want to do that, follow the below syntax 17 | 18 | # external_url 'http://gitlab.example.com:8929' 19 | # gitlab_rails['gitlab_shell_ssh_port'] = 2424 20 | 21 | ports: 22 | # - '8929:8929' # Custom HTTP Port 23 | # - '2424:22' # Custom SSH Port 24 | - '80:80' 25 | - '443:443' 26 | - '22:22' 27 | volumes: 28 | - '$GITLAB_HOME/config:/etc/gitlab' 29 | - '$GITLAB_HOME/logs:/var/log/gitlab' 30 | - '$GITLAB_HOME/data:/var/opt/gitlab' 31 | shm_size: '256m' -------------------------------------------------------------------------------- /app/tests/test_iac_bugfix.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import MagicMock, patch 2 | 3 | 4 | class TestIaCBugFix: 5 | def setup_method(self): 6 | mock_client_instance = MagicMock() 7 | mock_client_instance.chat.completions.create.return_value = MagicMock( 8 | choices=[MagicMock(message=MagicMock(content='Mocked OpenAI Response'))] 9 | ) 10 | 11 | self.mock_gpt_service = patch('app.main.gpt_service', return_value='Mocked GPT Response').start() 12 | self.mock_openai = patch('app.gpt_services.OpenAI', return_value=mock_client_instance).start() 13 | 14 | self.url = '/api/IaC-bugfix/' 15 | 16 | def teardown_method(self): 17 | patch.stopall() 18 | 19 | def test_iac_bugfix(self, client, ias_bugfix_sample_input): 20 | response = client.post(self.url, json=ias_bugfix_sample_input) 21 | assert response.status_code == 200 22 | 23 | def test_iac_bugfix_invalid_input(self, client, iac_bugfix_invalid_sample_input): 24 | response = client.post(self.url, json=iac_bugfix_invalid_sample_input) 25 | assert response.status_code == 422 26 | -------------------------------------------------------------------------------- /app/tests/test_iac_basic.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import MagicMock, patch 2 | 3 | 4 | class TestIaCBasic: 5 | def setup_method(self): 6 | mock_client_instance = MagicMock() 7 | mock_client_instance.chat.completions.create.return_value = MagicMock( 8 | choices=[MagicMock(message=MagicMock(content='Mocked OpenAI Response'))] 9 | ) 10 | 11 | self.mock_gpt_service = patch('app.main.gpt_service', return_value='Mocked GPT Response').start() 12 | self.mock_openai = patch('app.gpt_services.OpenAI', return_value=mock_client_instance).start() 13 | 14 | self.url = '/api/IaC-basic/' 15 | 16 | def teardown_method(self): 17 | patch.stopall() 18 | 19 | def test_iac_basic_generation(self, client, iac_basic_sample_input): 20 | response = client.post(self.url, json=iac_basic_sample_input) 21 | assert response.status_code == 200 22 | 23 | def test_basic_generation_invalid_service(self, client, iac_basic_invalid_sample_input): 24 | response = client.post(self.url, json=iac_basic_invalid_sample_input) 25 | assert response.status_code == 422 26 | -------------------------------------------------------------------------------- /helm/templates/app/deployment.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: apps/v1 2 | kind: Deployment 3 | metadata: 4 | name: {{ include "app.deploymentName" . }} 5 | labels: 6 | {{- include "app.labels" . | nindent 4 }} 7 | spec: 8 | replicas: {{ .Values.app.replicaCount }} 9 | selector: 10 | matchLabels: 11 | {{- include "app.labels" . | nindent 8 }} 12 | template: 13 | metadata: 14 | labels: 15 | {{- include "app.labels" . | nindent 8 }} 16 | spec: 17 | containers: 18 | - name: {{ include "app.deploymentName" . }} 19 | image: {{ .Values.app.image.repository }}:{{ .Values.app.image.tag }} 20 | command: ["fastapi", "run", "app/main.py", "--port", "8080"] 21 | ports: 22 | - containerPort: {{ .Values.app.service.targetPort }} 23 | envFrom: 24 | - secretRef: 25 | name: {{ include "app.secretName" . }} 26 | {{- if .Values.app.nodeSelector }} 27 | nodeSelector: {{ .Values.app.nodeSelector | toYaml | nindent 8 }} 28 | {{- end }} 29 | 30 | {{- if .Values.app.affinity }} 31 | affinity: {{ .Values.app.affinity | toYaml | nindent 8 }} 32 | {{- end }} 33 | -------------------------------------------------------------------------------- /app/routes/grafana_terraform.py: -------------------------------------------------------------------------------- 1 | from app.app_instance import app 2 | from app.models import (GrafanaTerraform, Output) 3 | from fastapi.responses import FileResponse 4 | from app.template_generators.terraform.tfvars.grafana import grafana_tfvars 5 | import shutil 6 | import os 7 | import zipfile 8 | def zip_folder(folder_path: str, output_zip_path: str): 9 | """Zip the entire folder.""" 10 | with zipfile.ZipFile(output_zip_path, 'w', zipfile.ZIP_DEFLATED) as zip_file: 11 | for root, dirs, files in os.walk(folder_path): 12 | for file in files: 13 | file_path = os.path.join(root, file) 14 | # Add file to the zip file 15 | zip_file.write(file_path, os.path.relpath(file_path, folder_path)) 16 | 17 | @app.post("/api/grafana/terraform") 18 | async def grafana_terraform_template_route(request:GrafanaTerraform) -> Output: 19 | 20 | dir = 'app/media/terraform.tfvars' 21 | 22 | file_response = grafana_tfvars(request) 23 | with open(dir,'w')as f: 24 | f.write(file_response) 25 | 26 | return FileResponse(dir, media_type='application/zip', filename=f"terraform.tfvars") 27 | -------------------------------------------------------------------------------- /app/models/utils.py: -------------------------------------------------------------------------------- 1 | from pydantic import BaseModel, validator, ValidationError 2 | from typing import List, Optional 3 | 4 | class Output(BaseModel): 5 | output:str 6 | 7 | class BasicInput(BaseModel): 8 | 9 | max_tokens:int = 500 10 | min_tokens:int = 100 11 | service:str 12 | 13 | @validator("max_tokens") 14 | def validate_max_tokens(cls, value): 15 | if value <= 0: 16 | raise ValueError("max_tokens must be a positive integer.") 17 | return value 18 | 19 | @validator("min_tokens") 20 | def validate_min_tokens(cls, value, values): 21 | if value <= 0: 22 | raise ValueError("min_tokens must be a positive integer.") 23 | 24 | 25 | max_tokens = values.get('max_tokens') 26 | if max_tokens is not None and value > max_tokens: 27 | raise ValueError("min_tokens cannot be greater than max_tokens.") 28 | return value 29 | 30 | @validator("service") 31 | def validate_service(cls, value): 32 | if not value: 33 | raise ValueError("service cannot be empty.") 34 | if not isinstance(value, str): 35 | raise ValueError("service must be a string.") 36 | return value -------------------------------------------------------------------------------- /app/template_generators/grafana_data_sources/elasticsearch.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import os 3 | 4 | 5 | def elasticsearch_template(input): 6 | 7 | json_template = { 8 | "apiVersion": 1, 9 | "datasources": [ 10 | { 11 | "name": input.name, 12 | "type": "elasticsearch", 13 | "url": input.url, 14 | "access": "proxy", 15 | 16 | "jsonData": { 17 | "index": input.index, 18 | "interval": input.interval, 19 | "timeField": input.timeField, 20 | "logMessageField": input.logMessageField, 21 | "logLevelField": input.logLevelField, 22 | 23 | }, 24 | "editable": input.editable, 25 | 26 | } 27 | ] 28 | } 29 | dir = "app/media/MyGrafana" 30 | os.makedirs(dir) 31 | os.path.join(dir, 'elasticsearch.yml') 32 | 33 | file=open("app/media/MyGrafana/elasticsearch.yml","w") 34 | yaml.dump(json_template,file,default_flow_style=False,sort_keys=False) 35 | 36 | -------------------------------------------------------------------------------- /app/template_generators/terraform/aws/RDS.py: -------------------------------------------------------------------------------- 1 | def IaC_template_generator_rds(input) -> str: 2 | 3 | aws_rds_create_db_instance = 'true' if input.db_instance else 'false' 4 | aws_rds_create_db_option_group = 'true' if input.db_option_group else 'false' 5 | aws_rds_create_db_parameter_group = 'true' if input.db_parameter_group else 'false' 6 | aws_rds_create_db_subnet_group = 'true' if input.db_subnet_group else 'false' 7 | aws_rds_create_monitoring_role = 'true' if input.monitoring_role else 'false' 8 | aws_rds_create_cloudwatch_log_group = 'true' if input.cloudwatch_log_group else 'false' 9 | aws_rds_create_master_user_password_rotation = 'true' if input.master_user_password_rotation else 'false' 10 | 11 | tfvars_file = f"""create_db_instance = {aws_rds_create_db_instance} 12 | create_db_option_group = {aws_rds_create_db_option_group} 13 | create_db_parameter_group = {aws_rds_create_db_parameter_group} 14 | create_db_subnet_group = {aws_rds_create_db_subnet_group} 15 | create_monitoring_role = {aws_rds_create_monitoring_role} 16 | create_cloudwatch_log_group = {aws_rds_create_cloudwatch_log_group} 17 | manage_master_user_password_rotation = {aws_rds_create_master_user_password_rotation} 18 | """ 19 | return tfvars_file 20 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/join_worker/tasks/join_worker.yml: -------------------------------------------------------------------------------- 1 | - name: Init cluster | Check if kubeadm has already run 2 | stat: 3 | path: "/var/lib/kubelet/config.yaml" 4 | register: kubeadm_already_run 5 | 6 | - block: 7 | - name: Generate join command 8 | command: kubeadm token create --print-join-command 9 | register: join_command 10 | 11 | - name: Print join command 12 | debug: 13 | msg: "{{ join_command.stdout_lines[0] }}" 14 | 15 | - name: Copy join command to local file 16 | become: false 17 | local_action: copy content="{{ join_command.stdout_lines[0] }} $@" dest="roles/join_worker/files/join-command" 18 | 19 | when: 20 | - inventory_hostname not in groups['k8s_masters'][0] 21 | delegate_to: "{{ groups['k8s_masters'][0] }}" 22 | 23 | - block: 24 | - name: Copy the join command to server location 25 | copy: 26 | src: roles/join_worker/files/join-command 27 | dest: /root/join-command.sh 28 | mode: "0777" 29 | 30 | when: 31 | - inventory_hostname not in groups['k8s_masters'] 32 | - not kubeadm_already_run.stat.exists 33 | 34 | - name: Join | Join worker nodes to the cluster 35 | command: sh /root/join-command.sh 36 | when: 37 | - inventory_hostname not in groups['k8s_masters'] 38 | - not kubeadm_already_run.stat.exists 39 | -------------------------------------------------------------------------------- /app/template_generators/docker/installation.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | 4 | 5 | def create_MyBash_directory(): 6 | 7 | dir = 'app/media/MyBash' 8 | 9 | 10 | if not os.path.exists(dir): 11 | os.makedirs(dir) 12 | os.path.join(dir, 'bash.sh') 13 | 14 | 15 | 16 | def docker_installation_selection(input): 17 | 18 | create_MyBash_directory() 19 | 20 | match input.os: 21 | 22 | case "Ubuntu": 23 | 24 | source = 'app/media/Installation_base/Docker/ubuntu.sh' 25 | dest = 'app/media/MyBash/bash.sh' 26 | 27 | shutil.copyfile(source, dest) 28 | 29 | 30 | case "Fedora": 31 | source = 'app/media/Installation_base/Docker/fedora.sh' 32 | dest = 'app/media/MyBash/bash.sh' 33 | shutil.copyfile(source, dest) 34 | 35 | case "Centos": 36 | source = 'app/media/Installation_base/Docker/centos.sh' 37 | dest = 'app/media/MyBash/bash.sh' 38 | shutil.copyfile(source, dest) 39 | 40 | case "RHEL": 41 | source = 'app/media/Installation_base/Docker/RHEL.sh' 42 | dest = 'app/media/MyBash/bash.sh' 43 | shutil.copyfile(source, dest) 44 | case _: 45 | raise ValueError() -------------------------------------------------------------------------------- /app/models/grafana/tempo_models.py: -------------------------------------------------------------------------------- 1 | from typing import Optional, List 2 | from pydantic import BaseModel,PrivateAttr,Field 3 | 4 | class TracesToLogsV2(BaseModel): 5 | datasourceUid: str = 'loki' 6 | spanStartTimeShift: str = '-2m' 7 | spanEndTimeShift: str = '2m' 8 | filterByTraceID: bool = True 9 | filterBySpanID: bool = True 10 | 11 | class ServiceMap(BaseModel): 12 | datasourceUid: str = 'Mimir-OtelMetrics-Tenant' 13 | 14 | class NodeGraph(BaseModel): 15 | enabled: bool = True 16 | 17 | class JsonData(BaseModel): 18 | httpMethod: str = 'GET' 19 | tracesToLogsV2: Optional[TracesToLogsV2] = TracesToLogsV2() 20 | serviceMap: Optional[ServiceMap] = ServiceMap() 21 | nodeGraph: Optional[NodeGraph] = NodeGraph() 22 | 23 | class Datasource(BaseModel): 24 | name: str = 'Tempo' 25 | type: str = Field(default='tempo') 26 | access: str = Field(default="proxy") 27 | orgId: int = Field(default=1) 28 | url: str = 'http://tempo-query-frontend.tempo.svc.cluster.local:3100' 29 | basicAuth: bool = False 30 | version: int = Field(default=1) 31 | editable: bool = True 32 | apiVersion: int = Field(default=1) 33 | uid: str = Field(default="tempo") 34 | jsonData: JsonData = JsonData() 35 | 36 | class TempoInput(BaseModel): 37 | apiVersion: int = Field(default=1) 38 | datasources: List[Datasource] = [Datasource()] 39 | 40 | -------------------------------------------------------------------------------- /app/template_generators/terraform/Installation/main.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | 4 | 5 | def create_directory(folder:str,filename:str): 6 | 7 | dir = f"app/media/{folder}" 8 | 9 | 10 | if not os.path.exists(dir): 11 | os.makedirs(dir) 12 | os.path.join(dir, filename) 13 | 14 | 15 | 16 | def select_install(input): 17 | create_directory("MyBash","bash.sh") 18 | 19 | match input.os: 20 | 21 | 22 | case "Ubuntu": 23 | source = 'app/media/Installation_base/Terraform/ubuntu.sh' 24 | dest = 'app/media/MyBash/bash.sh' 25 | shutil.copyfile(source, dest) 26 | 27 | case "Fedora": 28 | source = 'app/media/Installation_base/Terraform/fedora.sh' 29 | dest = 'app/media/MyBash/bash.sh' 30 | shutil.copyfile(source, dest) 31 | 32 | case "Centos": 33 | source = 'app/media/Installation_base/Terraform/centos.sh' 34 | dest = 'app/media/MyBash/bash.sh' 35 | shutil.copyfile(source, dest) 36 | 37 | case "Amazon_linux": 38 | source = 'app/media/Installation_base/Terraform/amazon_linux.sh' 39 | dest = 'app/media/MyBash/bash.sh' 40 | shutil.copyfile(source, dest) 41 | case _: 42 | raise ValueError() 43 | 44 | 45 | -------------------------------------------------------------------------------- /app/template_generators/terraform/aws/EFS.py: -------------------------------------------------------------------------------- 1 | def IaC_template_generator_efs(input) -> str: 2 | 3 | efs = ['aws_security_group', 'aws_efs_file_system', 'aws_efs_mount_target', 'aws_efs_backup_policy'] 4 | 5 | aws_efs_create_file_system = 'true' if input.efs_file_system else 'false' 6 | aws_efs_create_mount_target = 'true' if input.efs_mount_target else 'false' 7 | aws_efs_create_backup_policy = 'true' if input.efs_backup_policy else 'false' 8 | ingress_rules = """{ 9 | efs_rule = { 10 | description = "EFS Ingress" 11 | from_port = 2049 12 | to_port = 2049 13 | protocol = "tcp" 14 | cidr_blocks = ["0.0.0.0/0"] 15 | } 16 | }""" 17 | egress_ruels = """ { 18 | from_port = 0 19 | to_port = 0 20 | protocol = "-1" 21 | cidr_blocks = ["0.0.0.0/0"] 22 | } 23 | """ 24 | efs = """{ 25 | creation_token = "terraform" 26 | encrypted = true 27 | performance_mode = "generalPurpose" 28 | throughput_mode = "elastic" 29 | backup_policy = "ENABLED" 30 | } 31 | """ 32 | 33 | 34 | tfvars_file = f"""security_group_name = "efs_rule" 35 | security_group_ingress_rules = {ingress_rules} 36 | security_group_egress_rule = {egress_ruels} 37 | 38 | file_system_create = {aws_efs_create_file_system} 39 | efs = {efs} 40 | 41 | mount_target_create = {aws_efs_create_mount_target} 42 | backup_policy_create = {aws_efs_create_backup_policy}""" 43 | return tfvars_file -------------------------------------------------------------------------------- /app/models/compose_models.py: -------------------------------------------------------------------------------- 1 | from typing import Dict, List, Optional,Union 2 | from pydantic import BaseModel, model_validator 3 | 4 | class Build(BaseModel): 5 | context: str = "." 6 | dockerfile: str = "DockerFile" 7 | args: Optional[Dict[str, str]] = {"foo":"bar"} 8 | class Service(BaseModel): 9 | build: Optional[Build] = Build() 10 | image: Optional[str] = "nginx:latest" 11 | container_name: Optional[str] = "web_server" 12 | command: Optional[str] = "command..." 13 | volumes: Optional[List[str]] = ["./foo:bar"] 14 | environment: Optional[Dict[str, str]] = {"foo":"bar"} 15 | ports: Optional[List[str]] = ["80:80"] 16 | networks: Optional[List[str]] = ["app_network"] 17 | 18 | depends_on: Optional[List[str]] = ['service 0'] 19 | 20 | @model_validator(mode="after") 21 | def validator(self): 22 | if self.build == None and self.image == None: 23 | raise ValueError(f"one of the build or image sections must be present!") 24 | return self 25 | 26 | class Network(BaseModel): 27 | driver: str = "bridge" 28 | 29 | class PreCreatedNetwork(BaseModel): 30 | name:str = "net1" 31 | external:bool = True 32 | class DockerCompose(BaseModel): 33 | version: str = "3" 34 | services: Dict[str, Service] = {"web":Service(), "web2":Service()} 35 | networks: Union[Optional[Dict[str, PreCreatedNetwork]],Optional[Dict[str, Network]]] = {"app_network": {"driver":"bridge"}} 36 | -------------------------------------------------------------------------------- /app/template_generators/grafana_data_sources/postgresql.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import os 3 | 4 | def postgres_template(input): 5 | json_template = { 6 | "apiVersion": 1, 7 | "datasources": [ 8 | { 9 | "name": input.name, 10 | "type": "postgres", 11 | "url": input.url, 12 | "user": input.user, 13 | "editable": input.editable, 14 | "secureJsonData": { 15 | "password": input.password 16 | }, 17 | "jsonData": { 18 | "database": input.database, 19 | "sslmode": input.sslmode, 20 | "maxOpenConns": input.maxOpenConns, 21 | "maxIdleConns": input.maxIdleConns, 22 | "maxIdleConnsAuto": input.maxIdleConnsAuto, 23 | "connMaxLifetime": input.connMaxLifetime, 24 | "postgresVersion": input.postgresVersion, 25 | "timescaledb": input.timescaledb 26 | } 27 | } 28 | ] 29 | } 30 | 31 | 32 | dir = "app/media/MyGrafana" 33 | os.makedirs(dir) 34 | os.path.join(dir, 'postgresql.yml') 35 | 36 | file=open("app/media/MyGrafana/postgresql.yml","w") 37 | yaml.dump(json_template,file,default_flow_style=False,sort_keys=False) 38 | -------------------------------------------------------------------------------- /app/template_generators/terraform/aws/ec2.py: -------------------------------------------------------------------------------- 1 | def IaC_template_generator_ec2(input) -> str: 2 | 3 | 4 | 5 | aws_ec2_create_key_pair = 'true' if input.key_pair else 'false' 6 | aws_ec2_create_security_group = 'true' if input.security_group else 'false' 7 | aws_ec2_create_instance = 'true' if input.aws_instance else 'false' 8 | aws_ec2_create_ami_from_instance = 'true' if input.ami_from_instance else 'false' 9 | ingress_rules = """{ 10 | ssh_rule = { 11 | description = "SSH Ingress" 12 | from_port = 22 13 | to_port = 22 14 | protocol = "tcp" 15 | cidr_blocks = ["0.0.0.0/0"] 16 | }, 17 | http_rule = { 18 | description = "HTTP Ingress" 19 | from_port = 80 20 | to_port = 80 21 | protocol = "tcp" 22 | cidr_blocks = ["0.0.0.0/0"] 23 | } 24 | }""" 25 | egress_rules = """{ 26 | from_port = 0 27 | to_port = 0 28 | protocol = "-1" 29 | cidr_blocks = ["0.0.0.0/0"] 30 | }""" 31 | 32 | tfvars_file = f"""key_pair_create = {aws_ec2_create_key_pair} 33 | key_pair_name = "ec2" 34 | 35 | security_group_create = {aws_ec2_create_security_group} 36 | security_group_name = "my_rules" 37 | security_group_ingress_rules = {ingress_rules} 38 | security_group_egress_rule = {egress_rules} 39 | instance_create = {aws_ec2_create_instance} 40 | instance_type = "t2.micro" 41 | 42 | ami_from_instance_create = {aws_ec2_create_ami_from_instance} 43 | ami_name = "my-own-ami" """ 44 | return tfvars_file -------------------------------------------------------------------------------- /app/models/ansible_models.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | from pydantic import BaseModel, validator, ValidationError 3 | 4 | class AnsibleBase(BaseModel): 5 | ansible_user:str = 'root' 6 | ansible_port:int = 22 7 | 8 | class AnsibleInstallNginx(AnsibleBase): 9 | 10 | os: str = 'ubuntu' 11 | hosts:List[str] = ['www.example.com'] 12 | version:str = 'latest' 13 | 14 | @validator("os") 15 | def validator_os(cls, value): 16 | valid_oss = ['ubuntu'] 17 | if value not in valid_oss: 18 | raise ValueError(f"your selected OS must be in {valid_oss}") 19 | return value 20 | 21 | 22 | class AnsibleInstallDocker(AnsibleBase): 23 | os: str = 'ubuntu' 24 | hosts:List[str] = ['www.example.com'] 25 | 26 | 27 | @validator("os") 28 | def validator_os(cls, value): 29 | valid_oss = ['ubuntu'] 30 | if value not in valid_oss: 31 | raise ValueError(f"your selected OS must be in {valid_oss}") 32 | return value 33 | 34 | 35 | 36 | class AnsibleInstallKuber(AnsibleBase): 37 | os: str = 'ubuntu' 38 | k8s_worker_nodes: List[str] 39 | k8s_master_nodes: List[str] 40 | version:str = "1.31" 41 | 42 | 43 | @validator("os") 44 | def validator_os(cls, value): 45 | valid_oss = ['ubuntu'] 46 | if value not in valid_oss: 47 | raise ValueError(f"your selected OS must be in {valid_oss}") 48 | return value 49 | -------------------------------------------------------------------------------- /crawl/main.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from bs4 import BeautifulSoup 3 | import os 4 | 5 | # List of URLs to crawl 6 | urls = [ 7 | "https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/concepts.html", 8 | "https://docs.aws.amazon.com/ec2/latest/instancetypes/instance-types.html#current-gen-instances" 9 | ] 10 | 11 | # Directory to save the files 12 | save_dir = "crawled_data" 13 | os.makedirs(save_dir, exist_ok=True) 14 | 15 | def fetch_and_save(url): 16 | try: 17 | response = requests.get(url) 18 | response.raise_for_status() # Check if the request was successful 19 | 20 | # Parse the HTML content 21 | soup = BeautifulSoup(response.text, 'html.parser') 22 | 23 | # For demonstration, we are fetching the page title and all paragraphs 24 | title = soup.title.string if soup.title else "no_title" 25 | paragraphs = soup.find_all('p') 26 | 27 | # Prepare the file name 28 | file_name = os.path.join(save_dir, f"{title}.txt") 29 | 30 | # Write the content to the file 31 | with open(file_name, 'w', encoding='utf-8') as file: 32 | file.write(f"Title: {title}\n\n") 33 | for para in paragraphs: 34 | file.write(para.get_text() + "\n") 35 | 36 | print(f"Saved content from {url} to {file_name}") 37 | 38 | except requests.RequestException as e: 39 | print(f"Failed to fetch {url}: {e}") 40 | 41 | # Fetch and save data from each URL 42 | for url in urls: 43 | fetch_and_save(url) 44 | -------------------------------------------------------------------------------- /app/media/MyAnsible/group_vars/all: -------------------------------------------------------------------------------- 1 | # General 2 | install_ansible_modules: "true" 3 | disable_transparent_huge_pages: "true" 4 | 5 | setup_interface: "false" 6 | 7 | # Network Calico see here for more details https://github.com/projectcalico/calico/releases 8 | calico_operator_url: "https://raw.githubusercontent.com/projectcalico/calico/v3.29.0/manifests/tigera-operator.yaml" 9 | calico_crd_url: "https://raw.githubusercontent.com/projectcalico/calico/v3.29.0/manifests/custom-resources.yaml" 10 | pod_network_cidr: "192.168.0.0/16" 11 | 12 | # DNS 13 | resolv_nameservers: [8.8.8.8, 4.2.2.4] # 403.online 14 | 15 | # Sanction shekan 16 | use_iran: "true" # change it to "false" if you are outside of iran 17 | 18 | # Docker 19 | docker_gpg_key_url: "https://download.docker.com/linux/ubuntu/gpg" 20 | docker_gpg_key_path: "/etc/apt/keyrings/docker.gpg" 21 | docker_apt_repo: "https://download.docker.com/linux/ubuntu" 22 | 23 | # Kubernetes 24 | kubernetes_gpg_keyring_path: "/etc/apt/keyrings/kubernetes-apt-keyring.gpg" 25 | kubernetes_gpg_key_url: "https://pkgs.k8s.io/core:/stable:/v1.31/deb/Release.key" 26 | kubernetes_apt_repo: "https://pkgs.k8s.io/core:/stable:/v1.31/deb/" 27 | k8s_version: 1.31 # see here https://kubernetes.io/releases/patch-releases/ and https://github.com/kubernetes/kubernetes/releases 28 | 29 | # CRI 30 | cri_socket: unix:///var/run/containerd/containerd.sock 31 | 32 | # Ansible Connection 33 | ansible_user: root 34 | ansible_port: 22 35 | ansible_python_interpreter: "/usr/bin/python3" 36 | domain: "devopsgpt.com" 37 | apiserver_url: "devopsgpt.com" 38 | -------------------------------------------------------------------------------- /app/routes/utils.py: -------------------------------------------------------------------------------- 1 | from app.app_instance import app 2 | from fastapi import FastAPI, HTTPException,Response 3 | from fastapi.responses import FileResponse 4 | import os 5 | import zipfile 6 | import shutil 7 | 8 | 9 | def zip_folder(folder_path: str, output_zip_path: str): 10 | """Zip the entire folder.""" 11 | with zipfile.ZipFile(output_zip_path, 'w', zipfile.ZIP_DEFLATED) as zip_file: 12 | for root, dirs, files in os.walk(folder_path): 13 | for file in files: 14 | file_path = os.path.join(root, file) 15 | # Add file to the zip file 16 | zip_file.write(file_path, os.path.relpath(file_path, folder_path)) 17 | 18 | 19 | def add_files_to_folder(files:list,folder:str): 20 | 21 | os.makedirs(folder, exist_ok=True) 22 | 23 | for filename in files: 24 | os.path.join(folder, filename) 25 | destination_file = os.path.join(folder, os.path.basename(filename)) 26 | shutil.copy(filename, destination_file) 27 | 28 | 29 | @app.get("/api/download-folder{folder_name}/{source}") 30 | async def download_folder_MyHelm(folder_name: str,source:str): 31 | folder_path = f"app/media/{folder_name}" # Adjust the path as needed 32 | if not os.path.exists(folder_path): 33 | raise HTTPException(status_code=404, detail="Folder not found") 34 | 35 | zip_file_path = f"app/media/{folder_name}_zip.zip" 36 | 37 | # Zip the folder 38 | zip_folder(folder_path, zip_file_path) 39 | 40 | # Return the zip file as a response 41 | return FileResponse(zip_file_path, media_type='application/zip', filename=f"{folder_name}_{source}.zip") 42 | 43 | 44 | -------------------------------------------------------------------------------- /app/template_generators/grafana_data_sources/prometheus.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import os 3 | import ruamel.yaml 4 | from ruamel.yaml.scalarstring import ScalarString 5 | import re 6 | 7 | class SingleQuotedScalarString(ScalarString): 8 | def __new__(cls, value): 9 | return ScalarString.__new__(cls, value) 10 | 11 | def pormetheus_template(input): 12 | 13 | 14 | json_template = { 15 | "apiVersion": 1, 16 | "datasources": [ 17 | { 18 | "name": input.name, 19 | "uid": "prometheus", 20 | "type": "prometheus", 21 | "access": "proxy", 22 | "url": input.url, 23 | "editable": input.editable, 24 | "jsonData": { 25 | "httpMethod": input.httpMethod, 26 | "manageAlerts": input.manageAlerts, 27 | "prometheusType": input.prometheusType, 28 | "prometheusVersion": input.prometheusVersion, 29 | "cacheLevel": input.cacheLevel, 30 | "disableRecordingRules": input.disableRecordingRules, 31 | "incrementalQueryOverlapWindow": input.incrementalQueryOverlapWindow, 32 | 33 | } 34 | } 35 | ] 36 | } 37 | 38 | 39 | dir = "app/media/MyGrafana" 40 | os.makedirs(dir) 41 | os.path.join(dir, 'prometheus.yml') 42 | file=open("app/media/MyGrafana/prometheus.yml","w") 43 | yaml.dump(json_template,file,default_flow_style=False,sort_keys=False) 44 | 45 | 46 | -------------------------------------------------------------------------------- /app/template_generators/jenkins/installation.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | 4 | 5 | def create_directory(folder:str,filename:str): 6 | 7 | dir = f"app/media/{folder}" 8 | 9 | 10 | if not os.path.exists(dir): 11 | os.makedirs(dir) 12 | os.path.join(dir, filename) 13 | 14 | 15 | 16 | def select_install_jenkins(input): 17 | 18 | create_directory("MyBash",'bash.sh') 19 | create_directory("MyCompose",'docker-compose.yaml') 20 | 21 | if input.environment == 'Docker': 22 | 23 | source = 'app/media/Installation_base/Jenkins/docker-compose.yml' 24 | dest = 'app/media/MyCompose/docker-compose.yaml' 25 | shutil.copyfile(source, dest) 26 | 27 | else: 28 | 29 | match input.os: 30 | 31 | 32 | case "Ubuntu": 33 | source = 'app/media/Installation_base/Jenkins/ubuntu.sh' 34 | dest = 'app/media/MyBash/bash.sh' 35 | shutil.copyfile(source, dest) 36 | 37 | case "Fedora": 38 | source = 'app/media/Installation_base/Jenkins/fedora.sh' 39 | dest = 'app/media/MyBash/bash.sh' 40 | shutil.copyfile(source, dest) 41 | 42 | 43 | case "RHEL": 44 | source = 'app/media/Installation_base/Jenkins/RHEL.sh' 45 | dest = 'app/media/MyBash/bash.sh' 46 | shutil.copyfile(source, dest) 47 | 48 | case _: 49 | raise ValueError() 50 | 51 | 52 | 53 | 54 | 55 | -------------------------------------------------------------------------------- /.github/labeler.yml: -------------------------------------------------------------------------------- 1 | template_generator_terraform_aws: 2 | - changed-files: 3 | - any-glob-to-any-file: 'app/template_generators/terraform/aws/*.py' 4 | 5 | template_generator_terraform_argocd: 6 | - changed-files: 7 | - any-glob-to-any-file: 'app/template_generators/terraform/argocd.py' 8 | 9 | 10 | template_generator_terraform_docker: 11 | - changed-files: 12 | - any-glob-to-any-file: 'app/template_generators/terraform/docker.py' 13 | 14 | crawler: 15 | - changed-files: 16 | - any-glob-to-any-file: 'crawl/**' 17 | 18 | 19 | helm: 20 | - changed-files: 21 | - any-glob-to-any-file: helm/** 22 | 23 | web-ui: 24 | - changed-files: 25 | - any-glob-to-any-file: web/** 26 | 27 | doc: 28 | - changed-files: 29 | - any-glob-to-any-file: '**/*.md' 30 | 31 | unit-test: 32 | - changed-files: 33 | - any-glob-to-any-file: 'app/tests/*.py' 34 | 35 | docker-compose: 36 | - changed-files: 37 | - any-glob-to-any-file: '**/docker-compose.yml' 38 | 39 | dockerfile: 40 | - changed-files: 41 | - any-glob-to-any-file: '**/Dockerfile' 42 | 43 | 44 | requirements: 45 | - changed-files: 46 | - any-glob-to-any-file: '**/requirements.txt' 47 | 48 | gpt: 49 | - changed-files: 50 | - any-glob-to-any-file: 'app/gpt_services.py' 51 | 52 | services: 53 | - changed-files: 54 | - any-glob-to-any-file: 'app/services.py' 55 | 56 | fastapi: 57 | - changed-files: 58 | - any-glob-to-any-file: 'app/main.py' 59 | 60 | prompt: 61 | - changed-files: 62 | - any-glob-to-any-file: 'app/prompt_generators.py' 63 | 64 | directory-generators: 65 | - changed-files: 66 | - any-glob-to-any-file: 'app/directory_generators/**' 67 | 68 | pydantic-models: 69 | - changed-files: 70 | - any-glob-to-any-file: 'app/models/**' 71 | -------------------------------------------------------------------------------- /app/tests/test_helm_template.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import MagicMock, patch, mock_open 2 | 3 | 4 | class TestHelmTemplate: 5 | def setup_method(self): 6 | mock_client_instance = MagicMock() 7 | mock_client_instance.chat.completions.create.return_value = MagicMock( 8 | choices=[MagicMock(message=MagicMock(content='Mocked OpenAI Response'))] 9 | ) 10 | 11 | self.mock_execute_python_file = patch('app.main.execute_pythonfile').start() 12 | self.mock_edit_directory_generator = patch('app.main.edit_directory_generator').start() 13 | self.mock_gpt_service = patch('app.main.gpt_service', return_value='Mocked GPT Response').start() 14 | self.mock_openai = patch('app.gpt_services.OpenAI', return_value=mock_client_instance).start() 15 | self.mock_builtin_open = patch('builtins.open', mock_open()).start() 16 | self.mock_shutil_rm = patch('shutil.rmtree').start() 17 | 18 | self.url = '/api/Helm-template/' 19 | 20 | def teardown_method(self): 21 | patch.stopall() 22 | 23 | def test_helm_template_generation(self, client, helm_template_sample_input): 24 | response = client.post(self.url, json=helm_template_sample_input) 25 | assert response.status_code == 200 26 | 27 | def test_helm_invalid_api(self, client, helm_template_invalid_sample_input): 28 | resource = client.post(self.url, json=helm_template_invalid_sample_input) 29 | assert resource.status_code == 422 30 | 31 | def test_helm_invalid_port(self, client, helm_template_invalid_sample_input): 32 | helm_template_invalid_sample_input['pods'][0]['target_port'] = 70000 33 | resource = client.post(self.url, json=helm_template_invalid_sample_input) 34 | assert resource.status_code == 422 35 | -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/preinstall/tasks/basic.yml: -------------------------------------------------------------------------------- 1 | - name: Set timezone to UTC 2 | timezone: 3 | name: Etc/UTC 4 | 5 | - name: Set hostname 6 | command: hostnamectl set-hostname {{ inventory_hostname }} 7 | 8 | - name: Remove symlink resolve.conf 9 | file: 10 | path: "/etc/resolv.conf" 11 | state: absent 12 | ignore_errors: true 13 | when: use_iran == "true" 14 | 15 | - name: Configure resolv.conf 16 | template: 17 | src: "resolv.conf.j2" 18 | dest: "/etc/resolv.conf" 19 | mode: "0644" 20 | when: use_iran == "true" 21 | 22 | - name: Add hostname 23 | lineinfile: 24 | path: /etc/hosts 25 | regexp: '^127\.0\.0\.1' 26 | line: "127.0.0.1 {{ inventory_hostname }} localhost" 27 | owner: root 28 | group: root 29 | mode: 0644 30 | 31 | - name: Install necessary tools 32 | apt: 33 | state: latest 34 | update_cache: true 35 | name: 36 | - vim 37 | - sudo 38 | - wget 39 | - curl 40 | - telnet 41 | - nload 42 | - s3cmd 43 | - cron 44 | - ipset 45 | - lvm2 46 | - python3 47 | - python3-setuptools 48 | - python3-pip 49 | - python3-apt 50 | - intel-microcode 51 | - htop 52 | - tcpdump 53 | - net-tools 54 | - screen 55 | - tmux 56 | - byobu 57 | - iftop 58 | - bmon 59 | - iperf 60 | - sysstat 61 | - ethtool 62 | - plocate 63 | - thin-provisioning-tools 64 | - conntrack 65 | - stress 66 | - cpufrequtils 67 | - rsync 68 | - xz-utils 69 | - build-essential 70 | - apt-transport-https 71 | - ca-certificates 72 | - software-properties-common 73 | - gnupg-agent 74 | - iptables-persistent 75 | - open-iscsi 76 | - nfs-common 77 | - tzdata 78 | - tree 79 | 80 | - name: Fix broken packages 81 | apt: 82 | state: fixed 83 | -------------------------------------------------------------------------------- /app/template_generators/terraform/argocd.py: -------------------------------------------------------------------------------- 1 | def IaC_template_generator_argocd(input) -> str: 2 | 3 | argocd = ['argocd_repository', 'argocd_application'] 4 | 5 | argocd_create_repository = 'true' if input.argocd_repository else 'false' 6 | if input.argocd_application != None: 7 | argocd_create_application = 'true' 8 | argocd_application_auto_prune = 'true' if input.argocd_application.sync_policy.auto_prune else 'false' 9 | argocd_application_selfheal = 'true' if input.argocd_application.sync_policy.self_heal else 'false' 10 | else: 11 | argocd_create_application = 'false' 12 | argocd_application_auto_prune = "false" 13 | argocd_application_selfheal = "false" 14 | 15 | 16 | argocd_instance_info = """{ 17 | server_addr = "http://argocd.local" 18 | username = "username" 19 | password = "password" 20 | insecure = true 21 | } 22 | """ 23 | argocd_repository_info = """{ 24 | repo = "https://your_repo.git" 25 | username = "username" 26 | password = "token" 27 | } 28 | """ 29 | argocd_application = """{ 30 | name = "myapp" 31 | destination_server = "https://kubernetes.default.svc" 32 | destination_namespace = "default" 33 | source_repo_url = "https://your_repo.git" 34 | source_path = "myapp/manifests" 35 | source_target_revision = "master" 36 | } 37 | """ 38 | 39 | tfvars_file = f""" 40 | argocd_instance_info = {argocd_instance_info} 41 | 42 | repository_create = {argocd_create_repository} 43 | argocd_repository_info = {argocd_repository_info} 44 | 45 | application_create = {argocd_create_application} 46 | argocd_application = {argocd_application} 47 | 48 | argocd_sync_options = ["CreateNamespace=true", "ApplyOutOfSyncOnly=true", "FailOnSharedResource=true"] 49 | 50 | auto_prune = {argocd_application_auto_prune} 51 | self_heal = {argocd_application_selfheal} """ 52 | return tfvars_file -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/init_k8s/tasks/initk8s.yml: -------------------------------------------------------------------------------- 1 | - name: Init cluster | Check if kubeadm has already run 2 | stat: 3 | path: "/var/lib/kubelet/config.yaml" 4 | register: kubeadm_already_run 5 | when: inventory_hostname == groups['k8s_masters'][0] 6 | delegate_to: "{{ groups['k8s_masters'][0] }}" 7 | 8 | - block: 9 | - name: Init cluster | Copy kubeadmcnf.yaml 10 | template: 11 | src: kubeadmcnf.yml.j2 12 | dest: /root/kubeadmcnf.yaml 13 | 14 | - name: Init cluster | Initiate cluster on node groups['kube_master'][0] 15 | shell: kubeadm init --config=/root/kubeadmcnf.yaml 16 | register: kubeadm_init 17 | # Retry is because upload config sometimes fails 18 | until: kubeadm_init is succeeded or "field is immutable" in kubeadm_init.stderr 19 | notify: Restart kubelet 20 | 21 | when: inventory_hostname == groups['k8s_masters'][0] and not kubeadm_already_run.stat.exists 22 | delegate_to: "{{ groups['k8s_masters'][0] }}" 23 | 24 | - block: 25 | - name: Create kubectl directory 26 | file: 27 | path: /root/.kube 28 | state: directory 29 | 30 | - name: Configure kubectl 31 | copy: 32 | src: /etc/kubernetes/admin.conf 33 | dest: /root/.kube/config 34 | remote_src: yes 35 | 36 | - name: Fetch kubeconfig 37 | fetch: 38 | src: /etc/kubernetes/admin.conf 39 | dest: kubeconfig/ 40 | flat: yes 41 | when: inventory_hostname == groups['k8s_masters'][0] 42 | delegate_to: "{{ groups['k8s_masters'][0] }}" 43 | 44 | - name: Sleep for 300 seconds and reboot the Master1 server 45 | wait_for: 46 | timeout: 300 47 | delegate_to: localhost 48 | 49 | - name: Reboot the servers 50 | command: reboot 51 | async: 1 52 | poll: 0 53 | # ignore_errors: yes 54 | delegate_to: "{{ groups['k8s_masters'][0] }}" 55 | 56 | - name: Sleep for 300 seconds to Master1 up and running 57 | wait_for: 58 | timeout: 300 59 | delegate_to: localhost 60 | # when: use_iran == "true" 61 | 62 | - name: Example Task After Reboot 63 | debug: 64 | msg: "Server back online and ready for tasks." 65 | -------------------------------------------------------------------------------- /app/template_generators/grafana_data_sources/loki.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import os 3 | 4 | def loki_template(input): 5 | if input.basic_auth is None: 6 | json_template = { 7 | "apiVersion": 1, 8 | "datasources": [ 9 | { 10 | "name": input.name, 11 | "uid": input.uid, 12 | "type": "loki", 13 | "orgId": 1, 14 | "url": input.url, 15 | "access": "proxy", 16 | 17 | "jsonData": { 18 | "timeout": input.timeout, 19 | "maxLines": input.maxLines 20 | }, 21 | "editable": input.editable, 22 | 23 | } 24 | ] 25 | } 26 | dir = "app/media/MyGrafana" 27 | os.makedirs(dir) 28 | os.path.join(dir, 'loki.yml') 29 | 30 | file=open("app/media/MyGrafana/loki.yml","w") 31 | yaml.dump(json_template,file,default_flow_style=False,sort_keys=False) 32 | 33 | else: 34 | json_template = { 35 | "apiVersion": 1, 36 | "datasources": [ 37 | { 38 | "name": input.name, 39 | "uid": input.uid, 40 | "type": "loki", 41 | "url": input.url, 42 | "access": "proxy", 43 | "orgId": 1, 44 | 45 | "jsonData": { 46 | "timeout": input.timeout, 47 | "maxLines": input.maxLines 48 | }, 49 | "editable": input.editable, 50 | "basicAuth": True, 51 | "basicAuthUser": input.basic_auth.basicAuthUser, 52 | "secureJsonData": { 53 | "basicAuthPassword": input.basic_auth.basicAuthPassword 54 | } 55 | } 56 | ] 57 | } 58 | dir = "app/media/MyGrafana" 59 | os.makedirs(dir) 60 | os.path.join(dir, 'loki.yml') 61 | 62 | file=open("app/media/MyGrafana/loki.yml","w") 63 | yaml.dump(json_template,file,default_flow_style=False,sort_keys=False) 64 | 65 | -------------------------------------------------------------------------------- /app/template_generators/grafana_data_sources/alertmanager.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import os 3 | 4 | def alert_manager_template(input): 5 | if input.basic_auth is None: 6 | json_template = { 7 | "apiVersion": 1, 8 | "datasources": [ 9 | { 10 | "name": input.name, 11 | "uid": input.uid, 12 | "type": "alertmanager", 13 | "url": input.url, 14 | "access": "proxy", 15 | "orgId": 1, 16 | "jsonData": { 17 | "implementation": input.implementation, 18 | "handleGrafanaManagedAlerts": input.handleGrafanaManagedAlerts 19 | }, 20 | "editable": input.editable, 21 | 22 | } 23 | ] 24 | } 25 | dir = "app/media/MyGrafana" 26 | os.makedirs(dir) 27 | os.path.join(dir, 'alertmanager.yml') 28 | 29 | file=open("app/media/MyGrafana/alertmanager.yml","w") 30 | yaml.dump(json_template,file,default_flow_style=False,sort_keys=False) 31 | 32 | else: 33 | json_template = { 34 | "apiVersion": 1, 35 | "datasources": [ 36 | { 37 | "name": input.name, 38 | "uid": input.uid, 39 | "type": "alertmanager", 40 | "url": input.url, 41 | "access": "proxy", 42 | "orgId": 1, 43 | "jsonData": { 44 | "implementation": input.implementation, 45 | "handleGrafanaManagedAlerts": input.handleGrafanaManagedAlerts 46 | }, 47 | "editable": input.editable, 48 | "basicAuth": True, 49 | "basicAuthUser": input.basic_auth.basicAuthUser, 50 | "secureJsonData": { 51 | "basicAuthPassword": input.basic_auth.basicAuthPassword 52 | } 53 | } 54 | ] 55 | } 56 | dir = "app/media/MyGrafana" 57 | os.makedirs(dir) 58 | os.path.join(dir, 'alertmanager.yml') 59 | 60 | file=open("app/media/MyGrafana/alertmanager.yml","w") 61 | yaml.dump(json_template,file,default_flow_style=False,sort_keys=False) 62 | -------------------------------------------------------------------------------- /app/template_generators/grafana_data_sources/mimir.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import os 3 | 4 | def mimir_template(input): 5 | if input.multi_tenancy is not None: 6 | json_template = { 7 | "apiVersion": 1, 8 | "datasources": [ 9 | { 10 | "name": input.name, 11 | "uid": input.uid, 12 | "type": "prometheus", 13 | "access": "proxy", 14 | "orgId": 1, 15 | "url": input.url, 16 | "editable": input.editable, 17 | "version": 1, 18 | "jsonData": { 19 | "httpHeaderName1": input.multi_tenancy.httpHeaderName1, 20 | "alertmanagerUid": input.alertmanagerUid 21 | }, 22 | "secureJsonData": { 23 | "httpHeaderValue1": input.multi_tenancy.tenant_name 24 | } 25 | } 26 | ] 27 | } 28 | 29 | dir = "app/media/MyGrafana" 30 | os.makedirs(dir) 31 | os.path.join(dir, 'mimir.yml') 32 | 33 | file=open("app/media/MyGrafana/mimir.yml","w") 34 | yaml.dump(json_template,file,default_flow_style=False,sort_keys=False) 35 | 36 | 37 | else: 38 | json_template = { 39 | "apiVersion": 1, 40 | "datasources": [ 41 | { 42 | "name": input.name, 43 | "uid": input.uid, 44 | "type": "prometheus", 45 | "access": "proxy", 46 | "orgId": 1, 47 | "url": input.url, 48 | "editable": input.editable, 49 | "version": 1, 50 | "jsonData": { 51 | "alertmanagerUid": input.alertmanagerUid 52 | }, 53 | 54 | } 55 | ] 56 | } 57 | 58 | dir = "app/media/MyGrafana" 59 | os.makedirs(dir) 60 | os.path.join(dir, 'mimir.yml') 61 | 62 | file=open("app/media/MyGrafana/mimir.yml","w") 63 | yaml.dump(json_template,file,default_flow_style=False,sort_keys=False) 64 | 65 | 66 | 67 | -------------------------------------------------------------------------------- /app/routes/ansible.py: -------------------------------------------------------------------------------- 1 | from app.app_instance import app 2 | from app.gpt_services import gpt_service 3 | from app.services import (edit_directory_generator,execute_pythonfile) 4 | from app.routes.utils import add_files_to_folder 5 | from app.models import (AnsibleInstallNginx,AnsibleInstallDocker,Output,AnsibleInstallKuber) 6 | 7 | from app.models import (AnsibleInstallNginx,Output) 8 | 9 | from app.template_generators.ansible.install.main import ansible_install_template 10 | import os 11 | import shutil 12 | 13 | @app.post("/api/ansible-install/nginx/") 14 | async def ansible_install_generation_nginx(request:AnsibleInstallNginx) -> Output: 15 | 16 | 17 | 18 | if os.environ.get("TEST"): 19 | return Output(output='output') 20 | 21 | dir = 'app/media/MyAnsible' 22 | if os.path.exists(dir): 23 | shutil.rmtree(dir) 24 | 25 | ansible_install_template(request,"nginx") 26 | 27 | 28 | return Output(output='output') 29 | 30 | 31 | @app.post("/api/ansible-install/docker/") 32 | async def ansible_install_generation_docker(request:AnsibleInstallDocker) -> Output: 33 | 34 | 35 | if os.environ.get("TEST"): 36 | return Output(output='output') 37 | 38 | dir = 'app/media/MyAnsible' 39 | if os.path.exists(dir): 40 | shutil.rmtree(dir) 41 | 42 | ansible_install_template(request,"docker") 43 | 44 | return Output(output='output') 45 | 46 | 47 | @app.post("/api/ansible-install/kuber/") 48 | async def ansible_install_generation_kuber(request:AnsibleInstallKuber) -> Output: 49 | 50 | 51 | if os.environ.get("TEST"): 52 | return Output(output='output') 53 | 54 | dir = 'app/media/MyAnsible' 55 | if os.path.exists(dir): 56 | shutil.rmtree(dir) 57 | 58 | ansible_install_template(request,"kuber") 59 | add_files_to_folder(files = ['app/media/kuber_configs/resolv.conf.j2'] , folder='app/media/MyAnsible/roles/preinstall/templates/') 60 | add_files_to_folder(files = ['app/media/kuber_configs/kubeadmcnf.yml.j2'] , folder='app/media/MyAnsible/roles/init_k8s/templates/') 61 | add_files_to_folder(files = ['app/media/kuber_configs/kubeadmcnf-join.yml.j2'] , folder='app/media/MyAnsible/roles/join_master/templates/') 62 | 63 | 64 | return Output(output='output') -------------------------------------------------------------------------------- /app/tests/test_iac_install.py: -------------------------------------------------------------------------------- 1 | # import pytest 2 | # from fastapi.testclient import TestClient 3 | from unittest.mock import MagicMock, patch 4 | # from app.main import app 5 | # from app.models import IaCInstallationInput 6 | 7 | # client = TestClient(app) 8 | 9 | class TestIaCInastall: 10 | def setup_method(self): 11 | mock_client_instance = MagicMock() 12 | mock_client_instance.chat.completions.create.return_value = MagicMock( 13 | choices=[MagicMock(message=MagicMock(content='Mocked OpenAI Response'))] 14 | ) 15 | 16 | self.mock_gpt_service = patch('app.main.gpt_service', return_value='Mocked GPT Response').start() 17 | self.mock_openai = patch('app.gpt_services.OpenAI', return_value=mock_client_instance).start() 18 | 19 | self.url = '/api/IaC-install/' 20 | 21 | def teardown_method(self): 22 | patch.stopall() 23 | 24 | def test_iac_install(self, client, iac_install_sample_input): 25 | response = client.post(self.url, json=iac_install_sample_input) 26 | assert response.status_code == 200 27 | 28 | def test_iac_install_invalid_input(self, client, iac_install_invalid_sample_input): 29 | response = client.post(self.url, json=iac_install_invalid_sample_input) 30 | assert response.status_code == 422 31 | 32 | # @pytest.fixture 33 | # def valid_installation_data(): 34 | # return IaCInstallationInput( 35 | # os="ubuntu", 36 | # service="terraform", 37 | # min_tokens=100, 38 | # max_tokens=500 39 | # ) 40 | 41 | # @patch('app.main.gpt_service') 42 | # def test_install(mock_gpt_service, valid_installation_data): 43 | # """ 44 | # Test the /IaC-install/ endpoint with valid input data to ensure it returns a 200 status code. 45 | # """ 46 | # mock_gpt_service.return_value = "Mocked shell script for installing terraform on Ubuntu." 47 | 48 | # response = client.post("/IaC-install/", json=valid_installation_data.model_dump()) 49 | # assert response.status_code == 200 50 | 51 | 52 | # @patch('app.main.gpt_service') 53 | # def test_install_invalid(mock_gpt_service): 54 | # """ 55 | # Test the /IaC-install/ endpoint with an invalid 'os' value to ensure it returns a 422 status code. 56 | # """ 57 | # invalid_input = { 58 | # "os": "Kali", # Unsupported OS 59 | # "service": "terraform", 60 | # "min_tokens": 100, 61 | # "max_tokens": 500 62 | # } 63 | 64 | # response = client.post("/IaC-install/", json=invalid_input) 65 | # assert response.status_code == 422 66 | -------------------------------------------------------------------------------- /app/tests/test_ansible.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import MagicMock, patch, mock_open 2 | 3 | class TestAnsibleInstall: 4 | def setup_method(self): 5 | mock_client_instance = MagicMock() 6 | mock_client_instance.chat.completions.create.return_value = MagicMock( 7 | choices=[MagicMock(message=MagicMock(content='Mocked OpenAI Response'))] 8 | ) 9 | 10 | self.mock_execute_python_file = patch('app.main.execute_pythonfile').start() 11 | self.mock_edit_directory_generator = patch('app.main.edit_directory_generator').start() 12 | self.mock_gpt_service = patch('app.main.gpt_service', return_value='Mocked GPT Response').start() 13 | self.mock_openai = patch('app.gpt_services.OpenAI', return_value=mock_client_instance).start() 14 | self.mock_builtin_open = patch('builtins.open', mock_open()).start() 15 | self.mock_os_makedirs = patch('os.makedirs').start() 16 | self.mock_os_path_join = patch('os.path.join', side_effect=lambda *args: '/'.join(args)).start() 17 | self.mock_shutil_copy = patch('shutil.copy').start() 18 | self.mock_shutil_rmtree = patch('shutil.rmtree').start() 19 | 20 | self.ansible_nginx_url = '/api/ansible-install/nginx/' 21 | self.ansible_docker_url = '/api/ansible-install/docker/' 22 | self.ansible_kuber_url = '/api/ansible-install/kuber/' 23 | 24 | def test_ansible_nginx_input(self, client, ansible_nginx_sample_input): 25 | response = client.post(self.ansible_nginx_url, json=ansible_nginx_sample_input) 26 | assert response.status_code == 200 27 | 28 | def test_ansible_nginx_invalid_input(self, client, ansible_nginx_invalid_sample_input): 29 | response = client.post(self.ansible_nginx_url, json=ansible_nginx_invalid_sample_input) 30 | assert response.status_code == 422 31 | 32 | def test_ansible_docker_input(self, client, ansible_docker_sample_input): 33 | response = client.post(self.ansible_docker_url, json=ansible_docker_sample_input) 34 | assert response.status_code == 200 35 | 36 | def test_ansible_docker_invalid_input(self, client, ansible_docker_invalid_sample_input): 37 | response = client.post(self.ansible_docker_url, json=ansible_docker_invalid_sample_input) 38 | assert response.status_code == 422 39 | 40 | def test_ansible_kuber_input(self, client, ansible_kuber_sample_input): 41 | response = client.post(self.ansible_kuber_url, json=ansible_kuber_sample_input) 42 | assert response.status_code == 200 43 | 44 | def test_ansible_kuber_invalid_input(self, client, ansible_kuber_invalid_sample_input): 45 | response = client.post(self.ansible_kuber_url, json=ansible_kuber_invalid_sample_input) 46 | assert response.status_code == 422 47 | -------------------------------------------------------------------------------- /app/tests/test_iac_template.py: -------------------------------------------------------------------------------- 1 | from unittest.mock import MagicMock, patch, mock_open 2 | 3 | 4 | class TestIaCTemplates: 5 | def setup_method(self): 6 | mock_client_instance = MagicMock() 7 | mock_client_instance.chat.completions.create.return_value = MagicMock( 8 | choices=[MagicMock(message=MagicMock(content='Mocked OpenAI Response'))] 9 | ) 10 | 11 | self.mock_execute_python_file = patch('app.main.execute_pythonfile').start() 12 | self.mock_edit_directory_generator = patch('app.main.edit_directory_generator').start() 13 | self.mock_gpt_service = patch('app.main.gpt_service', return_value='Mocked GPT Response').start() 14 | self.mock_openai = patch('app.gpt_services.OpenAI', return_value=mock_client_instance).start() 15 | self.mock_builtin_open = patch('builtins.open', mock_open()).start() 16 | 17 | self.iac_template_docker_url = '/api/IaC-template/docker' 18 | self.iac_template_ec2_url = '/api/IaC-template/aws/ec2' 19 | self.iac_template_s3_url = '/api/IaC-template/aws/s3' 20 | self.iac_template_iam_url = '/api/IaC-template/aws/iam' 21 | self.iac_template_argocd_url = '/api/IaC-template/argocd' 22 | self.iac_template_elb_url = '/api/IaC-template/aws/elb' 23 | self.iac_template_efs_url = '/api/IaC-template/aws/efs' 24 | 25 | def teardown_method(self): 26 | patch.stopall() 27 | 28 | def test_iac_template_docker(self, client, iac_template_docker_sample_input): 29 | response = client.post(self.iac_template_docker_url, json=iac_template_docker_sample_input) 30 | assert response.status_code == 200 31 | 32 | def test_iac_template_ec2(self, client, iac_template_ec2_sample_input): 33 | response = client.post(self.iac_template_ec2_url, json=iac_template_ec2_sample_input) 34 | assert response.status_code == 200 35 | 36 | def test_iac_template_s3(self, client, iac_template_s3_sample_input): 37 | response = client.post(self.iac_template_s3_url, json=iac_template_s3_sample_input) 38 | assert response.status_code == 200 39 | 40 | def test_iac_template_iam(self, client, iac_template_iam_sample_input): 41 | response = client.post(self.iac_template_iam_url, json=iac_template_iam_sample_input) 42 | assert response.status_code == 200 43 | 44 | def test_iac_template_argocd(self, client, iac_template_argocd_sample_input): 45 | response = client.post(self.iac_template_argocd_url, json=iac_template_argocd_sample_input) 46 | assert response.status_code == 200 47 | 48 | def test_iac_template_elb(self, client, iac_template_elb_sample_input): 49 | response = client.post(self.iac_template_elb_url, json=iac_template_elb_sample_input) 50 | assert response.status_code == 200 51 | 52 | def test_iac_template_efs(self, client, iac_template_efs_sample_input): 53 | response = client.post(self.iac_template_efs_url, json=iac_template_efs_sample_input) 54 | assert response.status_code == 200 55 | -------------------------------------------------------------------------------- /app/template_generators/grafana_data_sources/mysql.py: -------------------------------------------------------------------------------- 1 | import yaml 2 | import os 3 | 4 | def mysql_template(input): 5 | 6 | if input.tls is None: 7 | json_template = { 8 | "apiVersion": 1, 9 | "datasources": [ 10 | { 11 | "name": input.name, 12 | "type": "mysql", 13 | "url": input.url, 14 | "user": input.user, 15 | "editable": input.editable, 16 | "jsonData": { 17 | 18 | "database": input.database, 19 | "maxOpenConns": input.maxOpenConns, 20 | "maxIdleConns":input.maxIdleConns, 21 | "maxIdleConnsAuto": input.maxIdleConnsAuto, 22 | "connMaxLifetime": input.connMaxLifetime 23 | }, 24 | "secureJsonData": { 25 | "password": input.password, 26 | 27 | } 28 | } 29 | ] 30 | } 31 | 32 | dir = "app/media/MyGrafana" 33 | os.makedirs(dir) 34 | os.path.join(dir, 'mysql.yml') 35 | 36 | file=open("app/media/MyGrafana/mysql.yml","w") 37 | yaml.dump(json_template,file,default_flow_style=False,sort_keys=False) 38 | 39 | else: 40 | json_template = { 41 | "apiVersion": 1, 42 | "datasources": [ 43 | { 44 | "name": input.name, 45 | "type": "mysql", 46 | "url": input.url, 47 | "user": input.user, 48 | "editable": input.editable, 49 | "jsonData": { 50 | "tlsAuth": input.tls.tlsAuth, 51 | "tlsSkipVerify": input.tls.tlsSkipVerify, 52 | "database": input.database, 53 | "maxOpenConns": input.maxOpenConns, 54 | "maxIdleConns":input.maxIdleConns, 55 | "maxIdleConnsAuto": input.maxIdleConnsAuto, 56 | "connMaxLifetime": input.connMaxLifetime 57 | }, 58 | "secureJsonData": { 59 | 60 | "password": input.password, 61 | "tlsClientCert": input.tls.tlsClientCert, 62 | "tlsCACert": input.tls.tlsCACert 63 | } 64 | } 65 | ] 66 | } 67 | dir = "app/media/MyGrafana" 68 | os.makedirs(dir) 69 | os.path.join(dir, 'mysql.yml') 70 | 71 | file=open("app/media/MyGrafana/mysql.yml","w") 72 | yaml.dump(json_template,file,default_flow_style=False,sort_keys=False) 73 | 74 | 75 | -------------------------------------------------------------------------------- /app/models/helm_models.py: -------------------------------------------------------------------------------- 1 | from typing import List, Optional 2 | from pydantic import BaseModel, validator, ValidationError 3 | 4 | class Persistance(BaseModel): 5 | size: str = "1Gi" 6 | accessModes: str = "ReadWriteOnce" 7 | 8 | @validator("size") 9 | def validate_size(cls, value): 10 | 11 | if not isinstance(value, str) or not value.endswith(('Gi', 'Mi', 'Ti')) or value[:-2].isdigit() == False: 12 | raise ValueError("Size must be a valid string ending with 'Gi', 'Mi', or 'Ti'.") 13 | return value 14 | 15 | @validator("accessModes") 16 | def validate_access_modes(cls, value): 17 | allowed_modes = ['ReadWriteOnce', 'ReadOnlyMany', 'ReadWriteMany'] 18 | if value not in allowed_modes: 19 | raise ValueError(f"Access mode must be one of {allowed_modes}.") 20 | return value 21 | 22 | class Environment(BaseModel): 23 | name: str = "ENV1" 24 | value: str = "Hi" 25 | 26 | @validator("name") 27 | def validate_name(cls, value): 28 | if not value: 29 | raise ValueError("Name is str") 30 | return value 31 | 32 | @validator("value") 33 | def validate_value(cls, value): 34 | if not value: 35 | raise ValueError("Value is str") 36 | return value 37 | 38 | class Ingress(BaseModel): 39 | enabled: bool = False 40 | host: str = "www.example.com" 41 | 42 | @validator("host") 43 | def validate_host(cls, value): 44 | if not value: 45 | raise ValueError("Host must be a string.") 46 | return value 47 | 48 | 49 | 50 | 51 | 52 | class Pod(BaseModel): 53 | name: str = "web" 54 | image: str = "nginx" 55 | target_port: int = 80 56 | replicas: int = 1 57 | persistance: Persistance 58 | environment: List[Environment] 59 | stateless: bool = True 60 | ingress: Ingress 61 | 62 | 63 | @validator("name") 64 | def validate_name(cls, value): 65 | if not value: 66 | raise ValueError("Name is str") 67 | return value 68 | 69 | @validator("image") 70 | def validate_image(cls, value): 71 | if not value: 72 | raise ValueError("Image is str") 73 | return value 74 | 75 | @validator("target_port") 76 | def validate_target_port(cls, value): 77 | if value <= 0 or value > 65535: 78 | raise ValueError("Target port must be between 1 and 65535.") 79 | return value 80 | 81 | @validator("replicas") 82 | def validate_replicas(cls, value): 83 | if value < 1: 84 | raise ValueError("Replicas must be at least 1.") 85 | return value 86 | 87 | class HelmTemplateGeneration(BaseModel): 88 | api_version: int = 2 89 | pods: List[Pod] 90 | 91 | @validator("api_version") 92 | def validate_api_version(cls, value): 93 | if value < 1: 94 | raise ValueError("API version must be a positive integer.") 95 | return value 96 | 97 | @validator("pods", each_item=True) 98 | def validate_pods(cls, value): 99 | if not isinstance(value, Pod): 100 | raise ValueError("Each item in pods must be a Pod instance.") 101 | return value -------------------------------------------------------------------------------- /app/media/MyAnsible/roles/join_master/tasks/join_master.yml: -------------------------------------------------------------------------------- 1 | - name: Init cluster | Check if kubeadm has already run 2 | stat: 3 | path: "/var/lib/kubelet/config.yaml" 4 | register: kubeadm_already_run 5 | 6 | - block: 7 | - name: Generate join command 8 | command: kubeadm token create --print-join-command 9 | register: join_command 10 | 11 | - name: Print join command 12 | debug: 13 | msg: "{{ join_command.stdout_lines[0] }}" 14 | 15 | - name: Copy join command to local file 16 | become: false 17 | local_action: copy content="{{ join_command.stdout_lines[0] }} $@" dest="roles/join_master/files/join-command" 18 | 19 | - name: copy kubeadmcnf.yaml 20 | template: 21 | src: kubeadmcnf-join.yml.j2 22 | dest: /root/kubeadm-config.yaml 23 | 24 | when: 25 | - inventory_hostname == groups['k8s_masters'][0] 26 | delegate_to: "{{ groups['k8s_masters'][0] }}" 27 | 28 | - block: 29 | - name: Copy the join command to server location 30 | copy: 31 | src: roles/join_master/files/join-command 32 | dest: /root/join-command.sh 33 | mode: "0777" 34 | 35 | when: 36 | - inventory_hostname != groups['k8s_masters'][0] 37 | - inventory_hostname in groups['k8s_masters'] 38 | - not kubeadm_already_run.stat.exists 39 | 40 | - block: 41 | - name: get certificate key 42 | shell: kubeadm init phase upload-certs --upload-certs --config=/root/kubeadm-config.yaml 43 | register: kubeadm_cert_key 44 | 45 | - name: Print certificate key 46 | debug: 47 | msg: "{{ kubeadm_cert_key.stdout_lines[2] }}" 48 | 49 | - name: register the cert key 50 | set_fact: 51 | control_plane_certkey: "{{ kubeadm_cert_key.stdout_lines[2] }}" 52 | 53 | when: 54 | - inventory_hostname in groups['k8s_masters'][0] 55 | delegate_to: "{{ groups['k8s_masters'][0] }}" 56 | run_once: false 57 | delegate_facts: true 58 | 59 | - name: Join | Join control-plane to cluster 60 | command: "sh /root/join-command.sh --control-plane --certificate-key={{ hostvars[groups['k8s_masters'][0]].control_plane_certkey }} --cri-socket={{ cri_socket }}" 61 | when: 62 | - inventory_hostname != groups['k8s_masters'][0] 63 | - inventory_hostname in groups['k8s_masters'] 64 | - not kubeadm_already_run.stat.exists 65 | 66 | - block: 67 | - name: Create kubectl directory 68 | file: 69 | path: /root/.kube 70 | state: directory 71 | 72 | - name: Configure kubectl 73 | copy: 74 | src: /etc/kubernetes/admin.conf 75 | dest: /root/.kube/config 76 | remote_src: yes 77 | 78 | - name: Fetch kubeconfig 79 | fetch: 80 | src: /etc/kubernetes/admin.conf 81 | dest: kubeconfig/ 82 | flat: yes 83 | when: 84 | - inventory_hostname != groups['k8s_masters'][0] 85 | - inventory_hostname in groups['k8s_masters'] 86 | - not kubeadm_already_run.stat.exists 87 | 88 | - name: remove apiserver_url to point to the masters temporary 89 | lineinfile: 90 | dest: /etc/hosts 91 | line: "{{ hostvars[groups['k8s_masters'][0]].private_ip }} {{ apiserver_url }}" 92 | state: absent 93 | 94 | - name: Add apiserver_url to point to the masters 95 | lineinfile: 96 | dest: /etc/hosts 97 | line: "{{ private_ip }} {{ apiserver_url }}" 98 | state: present 99 | when: 100 | - inventory_hostname in groups['k8s_masters'] 101 | -------------------------------------------------------------------------------- /.github/workflows/cicd.yml: -------------------------------------------------------------------------------- 1 | name: Realse Version 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | - dev 8 | paths: 9 | - 'app/**' 10 | - 'crawl/**' 11 | 12 | jobs: 13 | releaseGithub: 14 | if: github.ref == 'refs/heads/master' 15 | runs-on: ubuntu-22.04 16 | permissions: 17 | contents: write 18 | pull-requests: write 19 | steps: 20 | - name: Bump version and push tag 21 | id: bump 22 | uses: mathieudutour/github-tag-action@v6.2 23 | with: 24 | github_token: ${{ secrets.GITHUB_TOKEN }} 25 | default_bump: patch 26 | 27 | - name: Build Changelog 28 | id: github_release 29 | uses: mikepenz/release-changelog-builder-action@v5 30 | env: 31 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 32 | fromTag: ${{ steps.bump.outputs.previous_tag }} 33 | toTag: ${{ steps.bump.outputs.new_tag }} 34 | 35 | - name: Create Release 36 | uses: softprops/action-gh-release@v2 37 | with: 38 | body: ${{ steps.bump.outputs.changelog }} 39 | tag_name: ${{ steps.bump.outputs.new_tag }} 40 | 41 | releaseDockerWithDeployment: 42 | runs-on: ubuntu-latest 43 | steps: 44 | - name: Checkout code 45 | uses: actions/checkout@v3 46 | 47 | - name: Log in to DockerHub 48 | uses: docker/login-action@v2 49 | with: 50 | username: ${{ secrets.DOCKER_USERNAME }} 51 | password: ${{ secrets.DOCKER_PASSWORD }} 52 | 53 | - name: Build and Push Docker Image 54 | uses: docker/build-push-action@v5 55 | with: 56 | context: . 57 | push: true 58 | tags: | 59 | ${{ secrets.DOCKER_USERNAME }}/devopsgpt-fastapi:${{ github.ref_name }} 60 | 61 | - name: Deploying engine on production server 62 | if: github.ref == 'refs/heads/master' 63 | uses: appleboy/ssh-action@v1.2.0 64 | with: 65 | host: ${{ secrets.HOST_PROD }} 66 | username: ${{ secrets.HOST_USERNAME_PROD }} 67 | password: ${{ secrets.HOST_PASSWORD_PROD }} 68 | port: ${{ secrets.HOST_PORT }} 69 | script: docker compose -f devopsgpt-fastapi/docker-compose-fastapi.yml down fastapi_prod && docker compose -f devopsgpt-fastapi/docker-compose-fastapi.yml up -d --pull always fastapi_prod 70 | 71 | - name: Deploying engine on dev server 72 | if: github.ref == 'refs/heads/dev' 73 | uses: appleboy/ssh-action@v1.2.0 74 | with: 75 | host: ${{ secrets.HOST_DEV }} 76 | username: ${{ secrets.HOST_USERNAME_DEV }} 77 | password: ${{ secrets.HOST_PASSWORD_DEV }} 78 | port: ${{ secrets.HOST_PORT }} 79 | script: docker compose -f devopsgpt-fastapi/docker-compose-fastapi.yml down fastapi_dev && docker compose -f devopsgpt-fastapi/docker-compose-fastapi.yml up -d --pull always fastapi_dev 80 | 81 | - name: Removing dangle images on production server 82 | if: github.ref == 'refs/heads/master' 83 | uses: appleboy/ssh-action@v1.2.0 84 | with: 85 | host: ${{ secrets.HOST_PROD }} 86 | username: ${{ secrets.HOST_USERNAME_PROD }} 87 | password: ${{ secrets.HOST_PASSWORD_PROD }} 88 | port: ${{ secrets.HOST_PORT }} 89 | script: docker image prune -f 90 | 91 | - name: Removing dangle images on dev server 92 | if: github.ref == 'refs/heads/dev' 93 | uses: appleboy/ssh-action@v1.2.0 94 | with: 95 | host: ${{ secrets.HOST_DEV }} 96 | username: ${{ secrets.HOST_USERNAME_DEV }} 97 | password: ${{ secrets.HOST_PASSWORD_DEV }} 98 | port: ${{ secrets.HOST_PORT }} 99 | script: docker image prune -f 100 | -------------------------------------------------------------------------------- /app/routes/grafana_data_sources.py: -------------------------------------------------------------------------------- 1 | from app.app_instance import app 2 | from app.models import (AlertManagerInput,Output,ElasticSearchInput,LokiInput,MimirInput,MysqlInput,PostgresInput, 3 | PrometheusInput,TempoInput) 4 | from app.template_generators.grafana_data_sources.alertmanager import alert_manager_template 5 | from app.template_generators.grafana_data_sources.elasticsearch import elasticsearch_template 6 | from app.template_generators.grafana_data_sources.loki import loki_template 7 | from app.template_generators.grafana_data_sources.mimir import mimir_template 8 | from app.template_generators.grafana_data_sources.mysql import mysql_template 9 | from app.template_generators.grafana_data_sources.postgresql import postgres_template 10 | from app.template_generators.grafana_data_sources.prometheus import pormetheus_template 11 | from app.template_generators.grafana_data_sources.tempo import tempo_template 12 | import shutil 13 | import os 14 | 15 | @app.post("/api/grafana/alertmanager") 16 | async def alertmanager_template_route(request:AlertManagerInput) -> Output: 17 | 18 | dir = 'app/media/MyGrafana' 19 | if os.path.exists(dir): 20 | shutil.rmtree(dir) 21 | 22 | alert_manager_template(request) 23 | 24 | return Output(output='output') 25 | 26 | @app.post("/api/grafana/elasticsearch") 27 | async def elastic_template_route(request:ElasticSearchInput) -> Output: 28 | 29 | dir = 'app/media/MyGrafana' 30 | if os.path.exists(dir): 31 | shutil.rmtree(dir) 32 | 33 | elasticsearch_template(request) 34 | 35 | return Output(output='output') 36 | 37 | @app.post("/api/grafana/loki") 38 | async def loki_template_route(request:LokiInput) -> Output: 39 | 40 | dir = 'app/media/MyGrafana' 41 | if os.path.exists(dir): 42 | shutil.rmtree(dir) 43 | 44 | loki_template(request) 45 | 46 | return Output(output='output') 47 | 48 | 49 | @app.post("/api/grafana/mimir") 50 | async def mimir_template_route(request:MimirInput) -> Output: 51 | 52 | dir = 'app/media/MyGrafana' 53 | if os.path.exists(dir): 54 | shutil.rmtree(dir) 55 | 56 | mimir_template(request) 57 | 58 | return Output(output='output') 59 | 60 | @app.post("/api/grafana/mysql") 61 | async def mysql_template_route(request:MysqlInput) -> Output: 62 | 63 | dir = 'app/media/MyGrafana' 64 | if os.path.exists(dir): 65 | shutil.rmtree(dir) 66 | 67 | mysql_template(request) 68 | 69 | return Output(output='output') 70 | 71 | @app.post("/api/grafana/postgres") 72 | async def postgres_template_route(request:PostgresInput) -> Output: 73 | 74 | dir = 'app/media/MyGrafana' 75 | if os.path.exists(dir): 76 | shutil.rmtree(dir) 77 | 78 | postgres_template(request) 79 | 80 | return Output(output='output') 81 | 82 | @app.post("/api/grafana/prometheus") 83 | async def prometheus_template_route(request:PrometheusInput) -> Output: 84 | 85 | dir = 'app/media/MyGrafana' 86 | if os.path.exists(dir): 87 | shutil.rmtree(dir) 88 | 89 | pormetheus_template(request) 90 | 91 | return Output(output='output') 92 | 93 | 94 | @app.post("/api/grafana/tempo") 95 | async def tempo_template_route(request:TempoInput) -> Output: 96 | 97 | dir = 'app/media/MyGrafana' 98 | if os.path.exists(dir): 99 | shutil.rmtree(dir) 100 | 101 | tempo_template(request) 102 | 103 | return Output(output='output') -------------------------------------------------------------------------------- /app/prompt_generators.py: -------------------------------------------------------------------------------- 1 | from .models import (IaCBasicInput, 2 | IaCBugfixInput, 3 | IaCInstallationInput,HelmTemplateGeneration) 4 | 5 | def IaC_basics_generator(input : IaCBasicInput) -> str: 6 | 7 | prompt = f""" 8 | {input.input} 9 | """ 10 | return prompt 11 | 12 | 13 | def IaC_bugfix_generator(input : IaCBugfixInput) -> str: 14 | 15 | prompt = f""" 16 | {input.bug_description} 17 | 18 | """ 19 | return prompt 20 | 21 | 22 | def IaC_installation_generator(input : IaCInstallationInput) -> str: 23 | 24 | prompt = f""" 25 | generate a clear shell script about installation {input.service} in {input.os} based on {input.service} document. 26 | without any additional note. just script for installation. please consider new lines without any additional comment. 27 | 28 | """ 29 | return prompt 30 | 31 | 32 | 33 | def helm_template_generator(input : HelmTemplateGeneration) -> str: 34 | 35 | templates = [i.name for i in input.pods] 36 | docker_images = [{i.name:i.image} for i in input.pods] 37 | target_ports = [{i.name:i.target_port} for i in input.pods] 38 | replicas_ = [{i.name:i.replicas} for i in input.pods] 39 | persistance = [{i.name:i.persistance} for i in input.pods] 40 | envs = [{i.name:i.environment} for i in input.pods] 41 | status = [{i.name:i.stateless} for i in input.pods] 42 | ingress_ = [{i.name:i.ingress} for i in input.pods] 43 | 44 | prompt = f""" 45 | 46 | generate a correct python code to generate a helm project structure (project name: app/media/MyHelm) 47 | based on the latest version of helm chart. Only provide Python code, no explanations or markdown formatting. 48 | just generate a code to generate a folder as project template. don't consider base_dir 49 | 50 | consider these directories : [charts/,templates/] 51 | consider these files : Chart.yaml & values.yaml 52 | in the templates/ directory create these directories: {templates}. 53 | set the api_version in the Chart.yaml : v{input.api_version}. 54 | initialize values.yaml based on these dict of templates and docker images, 55 | please provide other informations related to values.yaml : {docker_images}, 56 | the target port of pods in the dict format are here : {target_ports} 57 | for each template, initialize this file => service.yaml. 58 | set replicas of pods following this dict format : {replicas_}. 59 | set persistance (pvc) of pods following this dict fomrat : {persistance} 60 | set environment variables of pods following this dict format : {envs} based on helm standard environment setting.( 61 | for example something like that: 62 | env: 63 | name=value 64 | ) 65 | initialize ingress with a default host for pod if the pod ingress is true in here {ingress_}. 66 | set stateless in pod based on {status}. 67 | 68 | 69 | Based on values.yaml (Dont put json in .yaml files), create all necessary Kubernetes templates in the templates directory: 70 | if stateless.enabled is true, create deployment.yaml; if stateless.enabled is false, create statefulset.yaml. 71 | If a persistence block exists, include pvc.yaml. If the ingress block is defined and ingress.enabled 72 | is true, create ingress.yaml. if ingress.enabled is false, do not create ingress.yaml. Always create 73 | secrets.yaml for secure data storage. 74 | 75 | Ensure each template is fully parameterized to match values from values.yaml for flexible configuration. 76 | 77 | in the final stage, put helpers.tpl in all templates and set the content based on information given. 78 | """ 79 | return prompt 80 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | ### Commit message guidelines 2 | 3 | #### Atomic commits 4 | 5 | If possible, make [atomic commits](https://en.wikipedia.org/wiki/Atomic_commit), which means: 6 | 7 | - a commit should contain exactly one self-contained functional change 8 | - a functional change should be contained in exactly one commit 9 | - a commit should not create an inconsistent state (such as test errors, linting errors, partial fix, feature without documentation, etc...) 10 | 11 | A complex feature can be broken down into multiple commits as long as each one maintains a consistent state and consists of a self-contained change. 12 | 13 | #### Commit message format 14 | 15 | Each commit message consists of a **header**, a **body** and a **footer**. 16 | The header has a special format that includes a **type**, a **scope** and a **subject**: 17 | 18 | ```commit 19 | (): 20 | 21 | 22 | 23 |