├── .gitignore ├── templates ├── NOTES.txt ├── argo-workflow-template-asset │ ├── level1 │ │ ├── subdomain │ │ │ ├── config.yaml │ │ │ ├── logstash-config.yaml │ │ │ ├── probe-from-response.yaml │ │ │ ├── save.yaml │ │ │ └── subdomain.yaml │ │ ├── probe-tls.yaml │ │ ├── url │ │ │ ├── config.yaml │ │ │ ├── get-url.yaml │ │ │ └── get-web-service.yaml │ │ ├── cidr │ │ │ ├── config.yaml │ │ │ └── get-cidr.yaml │ │ ├── port │ │ │ ├── save-fofa-result │ │ │ │ ├── save.yaml │ │ │ │ └── logstash-config.yaml │ │ │ ├── probe-port.yaml │ │ │ └── fofa.yaml │ │ ├── get-ip.yaml │ │ ├── second-level-domain │ │ │ ├── get-from-es.yaml │ │ │ ├── save.yaml │ │ │ ├── probe-org-sld.yaml │ │ │ ├── get-from-console.yaml │ │ │ └── logstash-config.yaml │ │ ├── screenshot.yaml │ │ ├── httpx.yaml │ │ ├── logstash │ │ │ ├── logstash.yaml │ │ │ └── config.yaml │ │ └── katana.yaml │ ├── util │ │ ├── install-tools │ │ │ ├── pvc.yaml │ │ │ └── install-tool.yaml │ │ ├── es-query │ │ │ ├── config.yaml │ │ │ └── general-query.yaml │ │ ├── user-input.yaml │ │ └── cidr.yaml │ ├── cron │ │ ├── subdomain │ │ │ ├── org.yaml │ │ │ └── from-response.yaml │ │ ├── tls │ │ │ └── tls.yaml │ │ ├── proxify │ │ │ └── delete.yaml │ │ ├── web-service │ │ │ └── org.yaml │ │ └── org │ │ │ ├── update-org-asset-map.yaml │ │ │ └── config.yaml │ ├── level2 │ │ ├── probe-favicon.yaml │ │ ├── probe-tls.yaml │ │ ├── subdomain.yaml │ │ └── webservice │ │ │ ├── probe-webservice-from-port-asset.yaml │ │ │ ├── probe-webservice-from-es-subdomain.yaml │ │ │ ├── fofa.yaml │ │ │ ├── ip.yaml │ │ │ └── domain.yaml │ └── level3 │ │ ├── webservice.yaml │ │ ├── probe-cidr-webservice.yaml │ │ └── subdomain │ │ └── probe-subdomain.yaml ├── console │ ├── pvc.yaml │ ├── console.yaml │ ├── mysql.yaml │ └── config.yaml ├── xray │ ├── pvc.yaml │ ├── xray-webhook.yaml │ ├── xray-reverse.yaml │ └── xray-proxy.yaml ├── argo-workflow-template │ ├── cron │ │ ├── nuclei-template-update │ │ │ ├── pvc.yaml │ │ │ ├── cron.yaml │ │ │ └── util.yaml │ │ ├── nuclei.yaml │ │ └── crawl-scan │ │ │ └── org.yaml │ ├── retest.yaml │ ├── util │ │ └── util.yaml │ ├── level1 │ │ ├── webscan │ │ │ └── nuclei-fuzz.yaml │ │ ├── save-alarm.yaml │ │ ├── identify-ms.yaml │ │ └── nuclei.yaml │ ├── level2 │ │ ├── nuclei │ │ │ ├── es-nuclei.yaml │ │ │ ├── fofa-nuclei.yaml │ │ │ └── domain-nuclei.yaml │ │ ├── katana │ │ │ ├── es-katana.yaml │ │ │ └── domain-katana.yaml │ │ ├── es-webscan.yaml │ │ └── domain-ms.yaml │ └── level3 │ │ ├── domain-ms.yaml │ │ ├── nuclei.yaml │ │ └── crawl-scan.yaml ├── _helper.tpl ├── argo-workflow │ ├── rb.yaml │ ├── minio.yaml │ └── config.yaml ├── elk │ ├── kibana.yaml │ ├── pipeline.yaml │ └── es-configmap.yaml ├── browser │ └── browserless.yaml ├── proxify │ └── egress.yaml ├── ingress.yaml └── crawler │ ├── coredns-config.yaml │ └── crawler.yaml ├── .helmignore ├── Chart.lock ├── README.md ├── LICENSE ├── README_ENG.md ├── Chart.yaml └── values.yaml /.gitignore: -------------------------------------------------------------------------------- 1 | charts/* 2 | .idea/* 3 | -------------------------------------------------------------------------------- /templates/NOTES.txt: -------------------------------------------------------------------------------- 1 | 1. Get Console Server external IP/domain by running: 2 | 3 | kubectl describe ingress console -n {{.Release.Namespace}} 4 | 5 | docs see https://github.com/leveryd-asm/asm -------------------------------------------------------------------------------- /templates/argo-workflow-template-asset/level1/subdomain/config.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: config-nuclei 5 | data: 6 | provider-config.yaml: | 7 | fofa: 8 | - {{.Values.fofa_email}}:{{.Values.fofa_key}} -------------------------------------------------------------------------------- /templates/console/pvc.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: PersistentVolumeClaim 3 | metadata: 4 | finalizers: 5 | - kubernetes.io/pvc-protection 6 | name: db-data 7 | spec: 8 | accessModes: 9 | - ReadWriteOnce 10 | resources: 11 | requests: 12 | storage: 30Gi 13 | storageClassName: local 14 | -------------------------------------------------------------------------------- /templates/xray/pvc.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: PersistentVolumeClaim 3 | metadata: 4 | finalizers: 5 | - kubernetes.io/pvc-protection 6 | name: xray-reverse-db 7 | spec: 8 | accessModes: 9 | - ReadWriteOnce 10 | resources: 11 | requests: 12 | storage: 1Gi 13 | storageClassName: local 14 | -------------------------------------------------------------------------------- /templates/argo-workflow-template-asset/util/install-tools/pvc.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: PersistentVolumeClaim 3 | metadata: 4 | finalizers: 5 | - kubernetes.io/pvc-protection 6 | name: tools 7 | spec: 8 | accessModes: 9 | - ReadWriteOnce 10 | resources: 11 | requests: 12 | storage: 10Gi 13 | storageClassName: local 14 | -------------------------------------------------------------------------------- /templates/argo-workflow-template/cron/nuclei-template-update/pvc.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: PersistentVolumeClaim 3 | metadata: 4 | finalizers: 5 | - kubernetes.io/pvc-protection 6 | name: nuclei-template 7 | spec: 8 | accessModes: 9 | - ReadWriteOnce 10 | resources: 11 | requests: 12 | storage: 1Gi 13 | storageClassName: local 14 | 15 | -------------------------------------------------------------------------------- /templates/_helper.tpl: -------------------------------------------------------------------------------- 1 | {{ define "kafkaURL" }} 2 | {{- if .Values.kafka_broker_service -}} 3 | {{- .Values.kafka_broker_service -}} 4 | {{- else -}} 5 | release-name-kafka-headless.{{- .Release.Namespace -}}.svc.cluster.local:9092 6 | {{- end -}} 7 | {{- end -}} 8 | 9 | {{ define "interactsh_server" }} 10 | {{- if .Values.interactsh_server -}} 11 | -iserver '{{ .Values.interactsh_server }}' 12 | {{- end -}} 13 | {{- end -}} 14 | -------------------------------------------------------------------------------- /.helmignore: -------------------------------------------------------------------------------- 1 | # Patterns to ignore when building packages. 2 | # This supports shell glob matching, relative path matching, and 3 | # negation (prefixed with !). Only one pattern per line. 4 | .DS_Store 5 | # Common VCS dirs 6 | .git/ 7 | .gitignore 8 | .bzr/ 9 | .bzrignore 10 | .hg/ 11 | .hgignore 12 | .svn/ 13 | # Common backup files 14 | *.swp 15 | *.bak 16 | *.tmp 17 | *.orig 18 | *~ 19 | # Various IDEs 20 | .project 21 | .idea/ 22 | *.tmproj 23 | .vscode/ 24 | -------------------------------------------------------------------------------- /templates/argo-workflow-template/retest.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: argoproj.io/v1alpha1 2 | kind: WorkflowTemplate 3 | metadata: 4 | name: retest 5 | spec: 6 | templates: 7 | - name: xray告警复测 8 | inputs: 9 | parameters: 10 | - name: url 11 | - name: plugin 12 | script: 13 | image: leveryd/xray:v2023.2.21 14 | command: [ "/bin/bash", "-c", "/tools/xray webscan --plugin {{`{{inputs.parameters.plugin}}`}} --url {{`{{inputs.parameters.url}}`}}"] 15 | -------------------------------------------------------------------------------- /templates/argo-workflow/rb.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: rbac.authorization.k8s.io/v1 2 | kind: ClusterRoleBinding 3 | metadata: 4 | name: admin-{{.Release.Namespace}} 5 | roleRef: 6 | apiGroup: rbac.authorization.k8s.io 7 | kind: ClusterRole 8 | {{ if .Values.user_can_deploy_workflowtemplate }} 9 | name: admin 10 | {{ else }} 11 | name: admin-no-create-template 12 | {{ end }} 13 | subjects: 14 | - kind: ServiceAccount 15 | name: default 16 | namespace: {{ .Release.Namespace }} 17 | --- -------------------------------------------------------------------------------- /templates/elk/kibana.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: networking.k8s.io/v1 2 | kind: Ingress 3 | metadata: 4 | name: kibana 5 | spec: 6 | rules: 7 | - host: kibana.{{.Values.console_domain}} 8 | http: 9 | paths: 10 | - path: / # match all paths that begin with /argo/ 11 | pathType: ImplementationSpecific 12 | backend: 13 | service: 14 | name: release-name-kibana # name of the service 15 | port: 16 | number: 5601 17 | -------------------------------------------------------------------------------- /Chart.lock: -------------------------------------------------------------------------------- 1 | dependencies: 2 | - name: kafka 3 | repository: https://charts.bitnami.com/bitnami 4 | version: 20.0.2 5 | - name: argo-workflows 6 | repository: https://argoproj.github.io/argo-helm 7 | version: 0.27.0 8 | - name: elasticsearch 9 | repository: https://helm.elastic.co 10 | version: 7.17.3 11 | - name: kibana 12 | repository: https://helm.elastic.co 13 | version: 7.17.3 14 | digest: sha256:80d403586fef0030d155834d94f4b5d1c1efb59aa8be48b8c2f1e6fa754e25b4 15 | generated: "2023-05-18T15:13:08.234074+08:00" 16 | -------------------------------------------------------------------------------- /templates/argo-workflow-template-asset/cron/subdomain/org.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: argoproj.io/v1alpha1 2 | kind: CronWorkflow 3 | metadata: 4 | name: cron-probe-subdomain-org 5 | spec: 6 | schedule: "0 */1 * * *" # run every hour 7 | suspend: false 8 | concurrencyPolicy: Forbid 9 | workflowSpec: 10 | workflowTemplateRef: 11 | name: probe-asset-level3-subdomain 12 | arguments: 13 | parameters: 14 | - name: org 15 | value: '* AND NOT enable:0' 16 | - name: get-subdomain-way 17 | value: "subfinder" 18 | entrypoint: 获取组织的二级域名-获取子域名-保存结果 19 | -------------------------------------------------------------------------------- /templates/argo-workflow-template/cron/nuclei-template-update/cron.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: argoproj.io/v1alpha1 2 | kind: CronWorkflow 3 | metadata: 4 | name: update-nuclei-template 5 | spec: 6 | # run everyd two hour 7 | schedule: "0 */2 * * *" 8 | suspend: false 9 | workflowSpec: 10 | entrypoint: update-nuclei-template 11 | workflowTemplateRef: 12 | name: util-update-nuclei-template 13 | arguments: 14 | parameters: 15 | - name: force-update 16 | value: false 17 | podGC: 18 | strategy: OnPodCompletion 19 | ttlStrategy: 20 | secondsAfterCompletion: 300 -------------------------------------------------------------------------------- /templates/argo-workflow-template-asset/level1/probe-tls.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: argoproj.io/v1alpha1 2 | kind: WorkflowTemplate 3 | metadata: 4 | name: probe-asset-level1-tls 5 | spec: 6 | arguments: {} 7 | templates: 8 | - name: probe-tls-from-urlfile 9 | inputs: 10 | artifacts: 11 | - name: hosts 12 | path: /tmp/hosts 13 | outputs: 14 | artifacts: 15 | - name: result 16 | path: /tmp/tls.txt 17 | container: 18 | image: projectdiscovery/tlsx:v1.0.4 19 | command: 20 | - 'sh' 21 | args: 22 | - "-c" 23 | - "tlsx -l /tmp/hosts -tv -cipher -wc -tps -ex -ss -mm -re -so -json -o /tmp/tls.txt" 24 | -------------------------------------------------------------------------------- /templates/argo-workflow-template-asset/level1/url/config.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: logstash-config-url 5 | data: 6 | url.conf: | 7 | input { 8 | # Read all documents from Elasticsearch matching the given query 9 | elasticsearch { 10 | hosts => "elasticsearch-master:9200" 11 | index => "web-service" 12 | query => '{ 13 | "_source": "url", 14 | "query": { 15 | "query_string": { 16 | "query": "${QUERY}" 17 | } 18 | } 19 | }' 20 | } 21 | } 22 | 23 | output { 24 | file { 25 | path => "/tmp/url.txt" 26 | codec => line { 27 | format => "%{url}" 28 | } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /templates/argo-workflow-template-asset/util/es-query/config.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: logstash-config-es-general-query 5 | data: 6 | general_query.conf: | 7 | input { 8 | # Read all documents from Elasticsearch matching the given query 9 | elasticsearch { 10 | hosts => "elasticsearch-master:9200" 11 | index => "${INDEX}" 12 | query => '{ 13 | "_source": "${SOURCE}", 14 | "query": { 15 | "query_string": { 16 | "query": "${QUERY}" 17 | } 18 | }, 19 | "size": ${SIZE} 20 | }' 21 | } 22 | } 23 | output { 24 | 25 | file { 26 | path => "/tmp/result.txt" 27 | codec => line { 28 | format => "%{${FORMAT_SOURCE}}" 29 | } 30 | } 31 | } -------------------------------------------------------------------------------- /templates/argo-workflow-template-asset/cron/tls/tls.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: argoproj.io/v1alpha1 2 | kind: CronWorkflow 3 | metadata: 4 | name: cron-probe-tls-new 5 | spec: 6 | schedule: "0 */1 * * *" 7 | suspend: false 8 | workflowSpec: 9 | workflowTemplateRef: 10 | name: level2-probe-tls 11 | arguments: 12 | parameters: 13 | - name: q 14 | value: 'first_create_time:[now-1h TO now] AND scheme:https' 15 | entrypoint: url-from-es 16 | --- 17 | apiVersion: argoproj.io/v1alpha1 18 | kind: CronWorkflow 19 | metadata: 20 | name: cron-probe-tls-all 21 | spec: 22 | schedule: "0 0 */3 * *" 23 | suspend: false 24 | workflowSpec: 25 | workflowTemplateRef: 26 | name: level2-probe-tls 27 | arguments: 28 | parameters: 29 | - name: q 30 | value: "scheme:https AND NOT enable:0" 31 | entrypoint: url-from-es -------------------------------------------------------------------------------- /templates/argo-workflow-template-asset/level1/cidr/config.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: v1 2 | kind: ConfigMap 3 | metadata: 4 | name: logstash-config-cidr 5 | data: 6 | asn.conf: | 7 | input { 8 | # Read all documents from Elasticsearch matching the given query 9 | elasticsearch { 10 | hosts => "elasticsearch-master:9200" 11 | index => "web-service" 12 | query => '{ 13 | "_source": "asn.as-range", 14 | "query": { 15 | "query_string": { 16 | "query": "INPUT_QUERY" 17 | } 18 | } 19 | }' 20 | } 21 | } 22 | output { 23 | file { 24 | path => "/tmp/json.txt" 25 | codec => json_lines 26 | } 27 | 28 | file { 29 | path => "/tmp/asn.txt" 30 | codec => line { 31 | format => "%{[asn][as-range]}" 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /templates/argo-workflow-template-asset/level1/port/save-fofa-result/save.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: argoproj.io/v1alpha1 2 | kind: WorkflowTemplate 3 | metadata: 4 | name: save-asset-level1-fofa-to-es 5 | spec: 6 | templates: 7 | - name: fofa 8 | volumes: 9 | - name: logstash-config-fofa 10 | configMap: 11 | name: logstash-config-fofa 12 | inputs: 13 | artifacts: 14 | - name: port 15 | path: /tmp/port.txt # host,port,ip,lastupdatetime 16 | - name: subdomain 17 | path: /tmp/subdomain.txt # host,lastupdatetime 18 | script: 19 | image: logstash:7.17.3 20 | command: 21 | - 'sh' 22 | source: | 23 | logstash -f /tmp/logstash/port.conf 24 | logstash -f /tmp/logstash/subdomain.conf 25 | volumeMounts: 26 | - name: logstash-config-fofa 27 | mountPath: /tmp/logstash -------------------------------------------------------------------------------- /templates/browser/browserless.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: apps/v1 3 | kind: Deployment 4 | metadata: 5 | name: browserless 6 | spec: 7 | replicas: 1 8 | template: 9 | metadata: 10 | labels: 11 | app: browserless 12 | spec: 13 | containers: 14 | - name: chrome 15 | image: browserless/chrome:1.57-puppeteer-13.1.3 16 | imagePullPolicy: IfNotPresent 17 | env: 18 | - name: "CONNECTION_TIMEOUT" 19 | value: "15000" 20 | resources: 21 | requests: 22 | cpu: "10m" 23 | limits: 24 | cpu: "1" 25 | selector: 26 | matchLabels: 27 | app: browserless 28 | --- 29 | apiVersion: v1 30 | kind: Service 31 | metadata: 32 | name: browserless-service 33 | spec: 34 | selector: 35 | app: browserless 36 | ports: 37 | - protocol: TCP 38 | port: 80 39 | targetPort: 3000 40 | -------------------------------------------------------------------------------- /templates/argo-workflow-template/cron/nuclei.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: argoproj.io/v1alpha1 2 | kind: CronWorkflow 3 | metadata: 4 | name: cron-nuclei-scan-org-new 5 | spec: 6 | schedule: "*/20 * * * *" 7 | suspend: false 8 | workflowSpec: 9 | workflowTemplateRef: 10 | name: level2-es-nuclei 11 | arguments: 12 | parameters: 13 | - name: q 14 | value: 'first_create_time:[now-22m TO now] AND org:* AND NOT enable:0' 15 | entrypoint: es查询webservice获取url列表-nuclei扫描-保存结果 16 | --- 17 | apiVersion: argoproj.io/v1alpha1 18 | kind: CronWorkflow 19 | metadata: 20 | name: cron-nuclei-scan-org-all 21 | spec: 22 | schedule: "0 0 */7 * *" 23 | suspend: false 24 | workflowSpec: 25 | workflowTemplateRef: 26 | name: level2-es-nuclei 27 | arguments: 28 | parameters: 29 | - name: q 30 | value: 'org:* AND NOT enable:0' 31 | entrypoint: es查询webservice获取url列表-nuclei扫描-保存结果 32 | -------------------------------------------------------------------------------- /templates/argo-workflow-template-asset/level1/get-ip.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: argoproj.io/v1alpha1 2 | kind: WorkflowTemplate 3 | metadata: 4 | name: get-asset-level1-ip 5 | spec: 6 | templates: 7 | - name: get-ip-from-es-by-query 8 | inputs: 9 | parameters: 10 | - name: q 11 | description: 'elasticsearch query, e.g. "*"' 12 | default: "*" 13 | outputs: 14 | artifacts: 15 | - name: result 16 | path: /tmp/ip.txt 17 | parameters: 18 | - name: count 19 | valueFrom: 20 | path: /tmp/count.txt 21 | script: 22 | image: leveryd/x-tool:v2023.1.16 23 | command: 24 | - 'sh' 25 | source: | 26 | x es -esURL http://elasticsearch-master:9200 -index web-service -num 10000 -q '{{`{{inputs.parameters.q}}`}}' -source host -of /tmp/ip.txt 27 | wc -l /tmp/ip.txt | awk '{print $1}' > /tmp/count.txt -------------------------------------------------------------------------------- /templates/argo-workflow-template/util/util.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: argoproj.io/v1alpha1 2 | kind: WorkflowTemplate 3 | metadata: 4 | name: util 5 | spec: 6 | arguments: {} 7 | templates: 8 | - name: get-file-lines-count # 获取文件行数 9 | inputs: 10 | artifacts: 11 | - name: file 12 | path: /tmp/file 13 | container: 14 | image: ubuntu:lunar-20221207 15 | command: 16 | - "/bin/sh" 17 | - "-c" 18 | - "wc -l /tmp/file|awk '{print $1}'" 19 | 20 | - name: get-file-line-from-index # 根据文件行数获取文件内容 21 | inputs: 22 | artifacts: 23 | - name: file 24 | path: /tmp/file 25 | parameters: 26 | - name: index 27 | container: 28 | image: ubuntu:lunar-20221207 29 | command: ["awk"] 30 | args: 31 | - 'NR=={{`{{inputs.parameters.index}}+1`}}' 32 | - '/tmp/file' 33 | 34 | - name: generate-http-prefix 35 | data: 36 | -------------------------------------------------------------------------------- /templates/argo-workflow-template/level1/webscan/nuclei-fuzz.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: argoproj.io/v1alpha1 2 | kind: WorkflowTemplate 3 | metadata: 4 | name: webscan-level1-fuzz 5 | spec: 6 | arguments: 7 | parameters: 8 | - name: tags 9 | default: "sqli,xss" 10 | templates: 11 | - name: fuzz 12 | inputs: 13 | artifacts: 14 | - name: hosts 15 | path: /tmp/host 16 | outputs: 17 | artifacts: 18 | - name: nuclei-result 19 | path: /tmp/nuclei-result.json 20 | script: 21 | image: 'projectdiscovery/nuclei:v2.9.4' 22 | command: [sh] 23 | source: | 24 | nuclei -duc -l /tmp/host -stats -tags {{`{{workflow.parameters.tags}} `}} -ni -o /tmp/nuclei-result.json 25 | volumeMounts: 26 | - name: template-dir 27 | mountPath: /root/nuclei-templates 28 | volumes: 29 | - name: template-dir 30 | persistentVolumeClaim: 31 | claimName: nuclei-template 32 | -------------------------------------------------------------------------------- /templates/argo-workflow-template-asset/cron/proxify/delete.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: argoproj.io/v1alpha1 2 | kind: CronWorkflow 3 | metadata: 4 | name: cron-delete-proxify-index 5 | spec: 6 | schedule: "0 */1 * * *" # run every hour 7 | suspend: false 8 | workflowSpec: 9 | workflowTemplateRef: 10 | name: x-util-delete-proxify-index 11 | arguments: 12 | parameters: 13 | - name: q 14 | value: "timestamp:\\[now-1h%20TO%20now\\]" 15 | entrypoint: x-util-delete-proxify-index 16 | --- 17 | apiVersion: argoproj.io/v1alpha1 18 | kind: WorkflowTemplate 19 | metadata: 20 | name: x-util-delete-proxify-index 21 | spec: 22 | templates: 23 | - name: x-util-delete-proxify-index 24 | inputs: 25 | parameters: 26 | - name: q 27 | default: "*" 28 | script: 29 | image: alpine/curl:3.14 30 | command: [sh] 31 | source: | 32 | curl -X POST "{{.Values.kibana.elasticsearchHosts}}/proxify/_delete_by_query?q={{`{{inputs.parameters.q}}`}}" 33 | -------------------------------------------------------------------------------- /templates/argo-workflow-template/cron/crawl-scan/org.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: argoproj.io/v1alpha1 2 | kind: CronWorkflow 3 | metadata: 4 | name: cron-crawl-scan-new-org 5 | spec: 6 | schedule: "0 */1 * * *" 7 | suspend: false 8 | workflowSpec: 9 | workflowTemplateRef: 10 | name: level2-es-katana 11 | arguments: 12 | parameters: 13 | - name: q 14 | value: "first_create_time:[now-1h TO now] AND status-code:200 AND content-type:'html' AND words:>10 AND org:* AND NOT enable:0" 15 | entrypoint: 获取web服务-katana爬虫-xray扫描-保存结果 16 | --- 17 | apiVersion: argoproj.io/v1alpha1 18 | kind: CronWorkflow 19 | metadata: 20 | name: cron-crawl-scan-org-all 21 | spec: 22 | schedule: "0 0 */3 * *" 23 | suspend: false 24 | workflowSpec: 25 | workflowTemplateRef: 26 | name: level2-es-katana 27 | arguments: 28 | parameters: 29 | - name: q 30 | value: "status-code:200 AND content-type:'html' AND words:>10 AND org:* AND NOT enable:0" 31 | entrypoint: 获取web服务-katana爬虫-xray扫描-保存结果 32 | -------------------------------------------------------------------------------- /templates/proxify/egress.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: apps/v1 3 | kind: Deployment 4 | metadata: 5 | name: external-proxy 6 | spec: 7 | replicas: 1 8 | template: 9 | metadata: 10 | labels: 11 | app: external-proxy 12 | spec: 13 | containers: 14 | - name: proxy 15 | image: leveryd/proxify:v0.0.8 16 | ports: 17 | - containerPort: 10080 18 | command: 19 | - "sh" 20 | args: 21 | - "-c" 22 | - "ip=`nslookup elasticsearch-master|grep -i Address|grep -v ':53'|awk '{print $2}'` && proxify -elastic-address ${ip}:9200 -sa 0.0.0.0:10080" 23 | resources: 24 | limits: 25 | memory: 3000Mi 26 | selector: 27 | matchLabels: 28 | app: external-proxy 29 | --- 30 | apiVersion: v1 31 | kind: Service 32 | metadata: 33 | name: external-proxy-service 34 | spec: 35 | selector: 36 | app: external-proxy 37 | ports: 38 | - protocol: TCP 39 | port: 1080 40 | targetPort: 10080 41 | -------------------------------------------------------------------------------- /templates/xray/xray-webhook.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | apiVersion: apps/v1 3 | kind: Deployment 4 | metadata: 5 | name: xray-webhook 6 | spec: 7 | replicas: 1 8 | template: 9 | metadata: 10 | labels: 11 | app: xray-webhook 12 | spec: 13 | containers: 14 | - name: webhook 15 | image: leveryd/xray-webhook:v2022.12.12 16 | imagePullPolicy: IfNotPresent 17 | env: 18 | - name: WEBHOOK_URL 19 | value: {{.Values.weixin_webhook_url}} 20 | - name: api 21 | value: "http://{{.Values.console_api_service_name}}:{{.Values.console_api_service_port}}" 22 | ports: 23 | - containerPort: 8080 24 | selector: 25 | matchLabels: 26 | app: xray-webhook 27 | --- 28 | apiVersion: v1 29 | kind: Service 30 | metadata: 31 | name: {{.Values.xray_webhook_service}} 32 | spec: 33 | selector: 34 | app: xray-webhook 35 | ports: 36 | - protocol: TCP 37 | port: {{.Values.xray_webhook_port}} 38 | targetPort: 8080 39 | -------------------------------------------------------------------------------- /templates/argo-workflow-template/level2/nuclei/es-nuclei.yaml: -------------------------------------------------------------------------------- 1 | apiVersion: argoproj.io/v1alpha1 2 | kind: WorkflowTemplate 3 | metadata: 4 | name: level2-es-nuclei 5 | spec: 6 | templates: 7 | - name: es查询webservice获取url列表-nuclei扫描-保存结果 8 | inputs: 9 | parameters: 10 | - name: q 11 | description: 'elasticsearch query, e.g. "*"' 12 | default: "*" 13 | steps: 14 | - - name: call-url-from-es 15 | arguments: 16 | parameters: 17 | - name: q 18 | value: {{`'{{inputs.parameters.q}}'`}} 19 | templateRef: 20 | name: get-asset-level1-url 21 | template: url-from-es 22 | - - name: call-nuclei-scan 23 | arguments: 24 | artifacts: 25 | - name: hosts 26 | from: {{`'{{steps.call-url-from-es.outputs.artifacts.result}}'`}} 27 | templateRef: 28 | name: level1-nuclei 29 | template: nuclei-scan-big-file 30 | 31 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 |4 | 5 | 本项目致力于成为一个面向企业的外网攻击面管理产品,企业可以用它来发现自己暴露在互联网的资产、感知资产存在的安全漏洞并运营漏洞。 6 | 7 | 使用说明见 [文档](https://leveryd-asm.github.io/asm-document/#/) 。 8 | 9 | ## 贡献者 ✨ 10 | 11 | 欢迎任何形式的贡献! 感谢这些优秀的贡献者,是他们让我们的项目快速成长。 12 | 13 | 14 | 15 | 16 |中文 | English
3 |
Evilran |
20 |
35 |
36 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License
2 |
3 | Copyright (c) 2017-present leveryd, https://github.com/leveryd/go-sec-code
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
--------------------------------------------------------------------------------
/templates/argo-workflow-template/level1/save-alarm.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: level1-save-alarm
5 | spec:
6 | arguments: {}
7 | templates:
8 | - name: save-nuclei
9 | inputs:
10 | artifacts:
11 | - name: result
12 | path: /tmp/result
13 | container:
14 | image: 'leveryd/x-tool:v2023.1.16'
15 | command:
16 | - "x"
17 | args:
18 | - 'nuclei'
19 | - '-f'
20 | - /tmp/result
21 | - '-api'
22 | - 'http://{{.Values.console_api_service_name}}:{{.Values.console_api_service_port}}'
23 |
24 | - name: save-ms-screenshot
25 | inputs:
26 | artifacts:
27 | - name: result
28 | path: /tmp/result
29 | container:
30 | image: 'leveryd/x-tool:v2023.1.16'
31 | command:
32 | - "x"
33 | args:
34 | - 'console'
35 | - '-if'
36 | - '/tmp/result'
37 | - '-module'
38 | - 'ms'
39 | - '-url'
40 | - 'http://{{.Values.console_api_service_name}}:{{.Values.console_api_service_port}}'
41 |
--------------------------------------------------------------------------------
/templates/xray/xray-reverse.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | apiVersion: apps/v1
3 | kind: Deployment
4 | metadata:
5 | name: xray-reverse
6 | spec:
7 | replicas: 1
8 | template:
9 | metadata:
10 | labels:
11 | app: xray-reverse
12 | spec:
13 | hostNetwork: true
14 | containers:
15 | - name: reverse
16 | image: leveryd/xray:v2023.2.21
17 | ports:
18 | - containerPort: 80
19 | command: ["/bin/bash", "-c", "/tools/xray reverse"]
20 | resources:
21 | requests:
22 | cpu: "10m"
23 | limits:
24 | cpu: "1"
25 | volumeMounts:
26 | - name: config
27 | mountPath: /tools/config.yaml
28 | subPath: config.yaml
29 | - name: db
30 | mountPath: /db
31 | volumes:
32 | - name: config
33 | configMap:
34 | name: xray-reverse-config
35 | items:
36 | - key: config.yaml
37 | path: config.yaml
38 | - name: db
39 | persistentVolumeClaim:
40 | claimName: xray-reverse-db
41 | selector:
42 | matchLabels:
43 | app: xray-reverse
44 |
45 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/cron/web-service/org.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: CronWorkflow
3 | metadata:
4 | name: cron-probe-web-service-org
5 | spec:
6 | schedule: "0 */1 * * *" # run every hour
7 | suspend: false
8 | workflowSpec:
9 | workflowTemplateRef:
10 | name: probe-asset-level2-web-service-from-es-subdomain
11 | arguments:
12 | parameters:
13 | - name: q
14 | description: 'elasticsearch query, e.g. "*"'
15 | value: "org:* AND first_create_time:[now-1h TO now] AND NOT enable:0"
16 | - name: index
17 | value: "subdomain"
18 | - name: size
19 | value: "10000000"
20 | - name: source
21 | description: field to return
22 | value: "host"
23 | - name: format_source
24 | description: logstash field reference format, eg "[asn][as-range]"
25 | value: "[host]"
26 | - name: passive
27 | description: "passive mode when get port (default: false)"
28 | value: "false"
29 | enum:
30 | - "true"
31 | - "false"
32 | - name: port-range
33 | value: "80,443,8000-9000"
34 | entrypoint: 根据查询获取主机端口信息-收集首页信息-存储结果
35 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template/level1/identify-ms.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: level1-identify-ms
5 | spec:
6 | arguments: {}
7 | templates:
8 | - name: level1-identify-ms-from-url-file
9 | inputs:
10 | artifacts:
11 | - name: urls
12 | path: /tmp/urls.txt
13 | outputs:
14 | artifacts:
15 | - name: result
16 | path: /tmp/result
17 | container:
18 | image: 'leveryd/x-tool:v2023.1.16'
19 | command:
20 | - "x"
21 | args:
22 | - 'ims'
23 | - '-if'
24 | - '/tmp/urls.txt'
25 | - '-of'
26 | - '/tmp/result'
27 |
28 | - name: level1-identify-ms-from-one-url
29 | inputs:
30 | parameters:
31 | - name: url
32 | description: 'url eg. https://www.baidu.com'
33 | outputs:
34 | artifacts:
35 | - name: result
36 | path: /tmp/result
37 | container:
38 | image: 'leveryd/x-tool:v2023.1.16'
39 | command:
40 | - "x"
41 | args:
42 | - 'ims'
43 | - '-u'
44 | - {{`'{{inputs.parameters.url}}'`}}
45 | - '-of'
46 | - '/tmp/result'
47 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level1/url/get-url.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: get-asset-level1-url
5 | spec:
6 | templates:
7 | - name: url-from-es
8 | volumes:
9 | - name: logstash-config-url
10 | configMap:
11 | name: logstash-config-url
12 | inputs:
13 | parameters:
14 | - name: q
15 | description: 'elasticsearch query, e.g. "*"'
16 | default: "*"
17 | outputs:
18 | artifacts:
19 | - name: result
20 | path: /tmp/url.txt
21 | parameters:
22 | - name: count
23 | valueFrom:
24 | path: /tmp/count.txt
25 | script:
26 | image: logstash:7.17.3
27 | command:
28 | - 'sh'
29 | env:
30 | - name: QUERY
31 | value: {{`"{{inputs.parameters.q}}"`}}
32 | source: |
33 | logstash -f /tmp/logstash/url.conf
34 |
35 | cat /tmp/url.txt | sort | uniq > /tmp/url.txt.tmp
36 | mv /tmp/url.txt.tmp /tmp/url.txt
37 | wc -l /tmp/url.txt | awk '{print $1}' > /tmp/count.txt
38 | volumeMounts:
39 | - name: logstash-config-url
40 | mountPath: /tmp/logstash
41 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level1/second-level-domain/get-from-es.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: get-asset-level1-sld-domain-from-es
5 | spec:
6 | templates:
7 | - name: from-es
8 | volumes:
9 | - name: logstash-config-sld
10 | configMap:
11 | name: logstash-config-sld
12 | inputs:
13 | parameters:
14 | - name: org
15 | description: "组织名"
16 | outputs:
17 | artifacts:
18 | - name: result
19 | path: /tmp/host.txt
20 | parameters:
21 | - name: count
22 | valueFrom:
23 | path: /tmp/count.txt
24 | script:
25 | image: logstash:7.17.3
26 | command:
27 | - sh
28 | env:
29 | - name: ORG
30 | value: {{`"{{inputs.parameters.org}}"`}}
31 | source: |
32 | logstash --api.enabled false -w 1 --log.level error -f /tmp/logstash/sld.conf
33 |
34 | cat /tmp/host.txt | sort | uniq > /tmp/host.txt.tmp
35 | mv /tmp/host.txt.tmp /tmp/host.txt
36 | wc -l /tmp/host.txt | awk '{print $1}' > /tmp/count.txt
37 | volumeMounts:
38 | - name: logstash-config-sld
39 | mountPath: /tmp/logstash
40 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template/level2/nuclei/fofa-nuclei.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: level2-fofa-nuclei
5 | spec:
6 | arguments:
7 | parameters:
8 | - name: fofa_key
9 | default: "{{.Values.fofa_key}}"
10 | description: "fofa key, can be specified when operator install asm instance"
11 | - name: fofa_email
12 | default: "{{.Values.fofa_email}}"
13 | description: "fofa email, can be specified when operator install asm instance"
14 | - name: query
15 | description: "fofa query, do not use single quotes"
16 | default: 'host=".apple.com"'
17 | - name: size
18 | default: 10000
19 | description: "number of results to return"
20 | templates:
21 | - name: fofa查询获取url列表-nuclei扫描-保存结果 # fofa获取的host可能有误报
22 | steps:
23 | - - name: call-fofa
24 | templateRef:
25 | name: probe-asset-level2-webservice-from-fofa
26 | template: fofa
27 | - - name: call-nuclei-scan
28 | arguments:
29 | artifacts:
30 | - name: hosts
31 | from: {{`'{{steps.call-fofa.outputs.artifacts.urls}}'`}}
32 | templateRef:
33 | name: level1-nuclei
34 | template: nuclei-scan-big-file
35 |
36 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level1/screenshot.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: level1-screenshot
5 | spec:
6 | arguments: {}
7 | templates:
8 | - name: level1-screenshot-from-url-file
9 | inputs:
10 | artifacts:
11 | - name: urls
12 | path: /tmp/urls.txt
13 | outputs:
14 | artifacts:
15 | - name: result
16 | path: /tmp/result
17 | container:
18 | image: 'leveryd/x-tool:v2023.1.16'
19 | command:
20 | - "sh"
21 | args:
22 | - "-c"
23 | - 'touch /tmp/result && x ss -sssUrl http://browserless-service -if /tmp/urls.txt -of /tmp/result -ot csv'
24 |
25 | - name: level1-screenshot-from-one-url
26 | inputs:
27 | parameters:
28 | - name: url
29 | description: 'url eg. https://www.baidu.com'
30 | outputs:
31 | artifacts:
32 | - name: result
33 | path: /tmp/
34 | container:
35 | image: 'leveryd/x-tool:v2023.1.16'
36 | command:
37 | - "x"
38 | args:
39 | - 'ss'
40 | - '-sssUrl'
41 | - 'http://browserless-service'
42 | - '-u'
43 | - {{`'{{inputs.parameters.url}}'`}}
44 | - '-of'
45 | - '/tmp/'
46 | - '-ot'
47 | - 'dir'
48 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level1/subdomain/logstash-config.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: ConfigMap
3 | metadata:
4 | name: logstash-config-subdomain
5 | data:
6 | subdomain.conf: |
7 | input {
8 | file {
9 | path => "/tmp/subdomains.txt"
10 | start_position => "beginning"
11 | sincedb_path => "/dev/null"
12 | exit_after_read => true
13 | mode => "read"
14 | file_chunk_size => 3145728 # 3MB
15 | }
16 | }
17 |
18 | filter {
19 | csv {
20 | columns => ["host"]
21 | }
22 | }
23 | output {
24 | elasticsearch {
25 | hosts => ["elasticsearch-master:9200"]
26 | index => "subdomain"
27 | document_id => "%{host}"
28 |
29 | scripted_upsert => true
30 | action => "update"
31 | script_lang => "painless"
32 | script_type => "inline"
33 | script => "
34 | if(ctx.op == 'create') {
35 | ctx._source=params.event;
36 | ctx._source.first_create_time = params.event.get('@timestamp');
37 | } else {
38 | String old = ctx._source.get('first_create_time');
39 | for (entry in params.event.entrySet()) {
40 | ctx._source[entry.getKey()] = entry.getValue()
41 | }
42 | ctx._source.last_update_time = params.event.get('@timestamp');
43 | ctx._source.first_create_time = old;
44 | }
45 | "
46 | }
47 | }
48 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level2/probe-favicon.yaml:
--------------------------------------------------------------------------------
1 | # probe web service status for a domain
2 | apiVersion: argoproj.io/v1alpha1
3 | kind: WorkflowTemplate
4 | metadata:
5 | name: level2-probe-favicon
6 | spec:
7 | volumes:
8 | - name: logstash-config-map
9 | configMap:
10 | name: logstash
11 | templates:
12 | - name: url-from-es
13 | inputs:
14 | parameters:
15 | - name: q
16 | description: "es query"
17 | steps:
18 | - - name: call-get-urls-from-es-by-query
19 | arguments:
20 | parameters:
21 | - name: q
22 | value: {{`'{{inputs.parameters.q}}'`}}
23 | templateRef:
24 | name: asset-level1-web-service
25 | template: get-urls-from-es-by-query
26 | - - name: call-httpx-scan
27 | arguments:
28 | artifacts:
29 | - name: hosts
30 | from: {{`'{{steps.call-get-urls-from-es-by-query.outputs.artifacts.result}}'`}}
31 | templateRef:
32 | name: probe-asset-level1-tls
33 | template: probe-favicon-from-urlfile
34 | - - name: logstash
35 | arguments:
36 | artifacts:
37 | - name: httpx-result
38 | from: {{`'{{steps.call-httpx-scan.outputs.artifacts.result}}'`}}
39 | templateRef:
40 | name: level1-logstash
41 | template: httpx-probe-favicon
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level2/probe-tls.yaml:
--------------------------------------------------------------------------------
1 | # probe web service status for a domain
2 | apiVersion: argoproj.io/v1alpha1
3 | kind: WorkflowTemplate
4 | metadata:
5 | name: level2-probe-tls
6 | spec:
7 | volumes:
8 | - name: logstash-config-map
9 | configMap:
10 | name: logstash
11 | templates:
12 | - name: url-from-es
13 | inputs:
14 | parameters:
15 | - name: q
16 | description: "es query"
17 | default: "scheme:https"
18 | steps:
19 | - - name: call-get-urls-from-es-by-query
20 | arguments:
21 | parameters:
22 | - name: q
23 | value: {{`'{{inputs.parameters.q}}'`}}
24 | templateRef:
25 | name: asset-level1-web-service
26 | template: get-urls-from-es-by-query
27 | - - name: call-probe-tls
28 | arguments:
29 | artifacts:
30 | - name: hosts
31 | from: {{`'{{steps.call-get-urls-from-es-by-query.outputs.artifacts.result}}'`}}
32 | templateRef:
33 | name: probe-asset-level1-tls
34 | template: probe-tls-from-urlfile
35 | - - name: logstash
36 | arguments:
37 | artifacts:
38 | - name: tls-result
39 | from: {{`'{{steps.call-probe-tls.outputs.artifacts.result}}'`}}
40 | templateRef:
41 | name: level1-logstash
42 | template: probe-tls
--------------------------------------------------------------------------------
/README_ENG.md:
--------------------------------------------------------------------------------
1 | 2 |4 | 5 | # Project Introduction 6 | This project aims to become an enterprise-oriented external attack surface management product. Enterprises can use it to discover their assets exposed on the Internet, perceive security vulnerabilities in these assets, and manage these vulnerabilities. 7 | For instructions on how to use the product, please refer to the [documentation](https://leveryd-asm.github.io/english-document/#/). In order to prevent malicious scanning, please contact me via WeChat at `happy_leveryd` to obtain the demo environment address. 8 | 9 | ## Contributors ✨ 10 | We welcome all forms of contributions! Thanks to these outstanding contributors, our project has grown rapidly. 11 | 12 | 13 | 14 |English | 中文
3 |
Evilran |
18 |
30 |
--------------------------------------------------------------------------------
/templates/ingress.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: networking.k8s.io/v1
2 | kind: Ingress
3 | metadata:
4 | name: argo
5 | annotations:
6 | nginx.ingress.kubernetes.io/rewrite-target: /$2
7 | spec:
8 | rules:
9 | - host: {{.Values.console_domain}}
10 | http:
11 | paths:
12 | - path: /argo(/|$)(.*) # match all paths that begin with /argo/
13 | pathType: ImplementationSpecific
14 | backend:
15 | service:
16 | name: argo-workflows-app-server # name of the service
17 | port:
18 | number: 2746
19 | ---
20 | apiVersion: networking.k8s.io/v1
21 | kind: Ingress
22 | metadata:
23 | name: console
24 | spec:
25 | rules:
26 | - host: {{.Values.console_domain}}
27 | http:
28 | paths:
29 | - backend:
30 | service:
31 | name: {{.Values.console_api_service_name}}
32 | port:
33 | number: 80
34 | path: /api
35 | pathType: ImplementationSpecific
36 | - backend:
37 | service:
38 | name: console-fe-service
39 | port:
40 | number: 65533
41 | path: /
42 | pathType: ImplementationSpecific
43 | {{/* - path: /argo*/}}
44 | {{/* pathType: ImplementationSpecific*/}}
45 | {{/* backend:*/}}
46 | {{/* service:*/}}
47 | {{/* name: release-name-argo-workflows-server*/}}
48 | {{/* port:*/}}
49 | {{/* number: 2746*/}}
--------------------------------------------------------------------------------
/templates/crawler/coredns-config.yaml:
--------------------------------------------------------------------------------
1 | {{/*kind: ConfigMap*/}}
2 | {{/*apiVersion: v1*/}}
3 | {{/*metadata:*/}}
4 | {{/* name: coredns*/}}
5 | {{/* namespace: kube-system*/}}
6 | {{/*data:*/}}
7 | {{/* Corefile: |*/}}
8 | {{/* .:53 {*/}}
9 | {{/* errors*/}}
10 | {{/* health {*/}}
11 | {{/* lameduck 5s*/}}
12 | {{/* }*/}}
13 | {{/* ready*/}}
14 | {{/* kubernetes cluster.local in-addr.arpa ip6.arpa {*/}}
15 | {{/* pods insecure*/}}
16 | {{/* fallthrough in-addr.arpa ip6.arpa*/}}
17 | {{/* ttl 30*/}}
18 | {{/* }*/}}
19 | {{/* # rewrite stop {*/}}
20 | {{/* # name leveryd.proxy.com xray-proxy-service.asm.svc.cluster.local*/}}
21 | {{/* # answer name xray-proxy-service.asm.svc.cluster.local leveryd.proxy.com*/}}
22 | {{/* # }*/}}
23 | {{/* # rewrite stop {*/}}
24 | {{/* # name regex (.*)\.my\.domain\.$ {1}.asm.svc.cluster.local*/}}
25 | {{/* # answer name (.*)\.asm\.svc\.cluster\.local\.$ {1}.my.domain*/}}
26 | {{/* # }*/}}
27 | {{/* rewrite stop {*/}}
28 | {{/* name regex (.*)\.(.*)\.leveryd.com {1}.{2}.svc.cluster.local*/}}
29 | {{/* answer name (.*)\.(.*)\.svc\.cluster\.local {1}.{2}.leveryd.com*/}}
30 | {{/* }*/}}
31 | {{/* # log*/}}
32 | {{/* prometheus :9153*/}}
33 | {{/* forward . /etc/resolv.conf {*/}}
34 | {{/* max_concurrent 1000*/}}
35 | {{/* }*/}}
36 | {{/* cache 30*/}}
37 | {{/* loop*/}}
38 | {{/* reload*/}}
39 | {{/* loadbalance*/}}
40 | {{/* }*/}}
41 |
--------------------------------------------------------------------------------
/templates/crawler/crawler.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | apiVersion: apps/v1
3 | kind: Deployment
4 | metadata:
5 | name: crawler
6 | spec:
7 | replicas: 1
8 | template:
9 | metadata:
10 | labels:
11 | app: crawler
12 | spec:
13 | containers:
14 | - name: katana
15 | image: leveryd/katana:v2022.11.23
16 | imagePullPolicy: IfNotPresent
17 | env:
18 | - name: kafkaURL
19 | value: {{ include "kafkaURL" . }}
20 | - name: topic
21 | value: {{.Values.crawler_topic}}
22 | - name: group_id
23 | value: {{.Values.crawler_group_id}}
24 | - name: proxy
25 | value: "https://{{.Values.xray_proxy_service}}:{{.Values.xray_proxy_port}}"
26 |
27 | - name: crawler-api
28 | image: leveryd/katana-api:v2022.11.23
29 | imagePullPolicy: IfNotPresent
30 | ports:
31 | - containerPort: 8080
32 | env:
33 | - name: kafkaURL
34 | value: {{ include "kafkaURL" . }}
35 | - name: topic
36 | value: {{.Values.crawler_topic}}
37 | - name: group_id
38 | value: {{.Values.crawler_group_id}}
39 | - name: proxy
40 | value: {{.Values.xray_proxy_service}}
41 | selector:
42 | matchLabels:
43 | app: crawler
44 | ---
45 | apiVersion: v1
46 | kind: Service
47 | metadata:
48 | name: crawler-service
49 | spec:
50 | selector:
51 | app: crawler
52 | ports:
53 | - protocol: TCP
54 | port: 80
55 | targetPort: 8080
56 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/util/user-input.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: util-user-input
5 | spec:
6 | templates:
7 | - name: convert-lines-to-list
8 | inputs:
9 | parameters:
10 | - name: lines
11 | description: "lines \n separated"
12 | outputs:
13 | artifacts:
14 | - name: result
15 | path: /tmp/result.txt
16 | script:
17 | image: python:3.8
18 | command:
19 | - python
20 | source: |
21 | import json
22 | user_input = """{{`{{inputs.parameters.lines}}`}}"""
23 | user_intput_list = user_input.splitlines()
24 |
25 | with open("/tmp/result.txt", "w") as f:
26 | f.write(json.dumps(user_intput_list))
27 | print(json.dumps(user_intput_list))
28 |
29 | - name: convert-file-to-list
30 | inputs:
31 | artifacts:
32 | - name: file
33 | path: /tmp/input.txt
34 | outputs:
35 | artifacts:
36 | - name: result
37 | path: /tmp/result.txt
38 | script:
39 | image: python:3.8
40 | command:
41 | - python
42 | source: |
43 | import json
44 | user_intput_list = []
45 | with open("/tmp/input.txt", "r") as f:
46 | for line in f:
47 | user_intput_list.append(line.strip())
48 |
49 | with open("/tmp/result.txt", "w") as f:
50 | f.write(json.dumps(user_intput_list))
51 | print(json.dumps(user_intput_list))
52 |
--------------------------------------------------------------------------------
/Chart.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v2
2 | name: helm
3 | description: A Helm chart for Kubernetes
4 |
5 | # A chart can be either an 'application' or a 'library' chart.
6 | #
7 | # Application charts are a collection of templates that can be packaged into versioned archives
8 | # to be deployed.
9 | #
10 | # Library charts provide useful utilities or functions for the chart developer. They're included as
11 | # a dependency of application charts to inject those utilities and functions into the rendering
12 | # pipeline. Library charts do not define any templates and therefore cannot be deployed.
13 | type: application
14 |
15 | # This is the chart version. This version number should be incremented each time you make changes
16 | # to the chart and its templates, including the app version.
17 | # Versions are expected to follow Semantic Versioning (https://semver.org/)
18 | version: 0.0.1
19 |
20 | # This is the version number of the application being deployed. This version number should be
21 | # incremented each time you make changes to the application. Versions are not expected to
22 | # follow Semantic Versioning. They should reflect the version the application is using.
23 | # It is recommended to use it with quotes.
24 | appVersion: "0.0.1"
25 |
26 | dependencies:
27 | - name: kafka
28 | version: "20.0.2"
29 | repository: "https://charts.bitnami.com/bitnami"
30 | - name: argo-workflows
31 | version: "0.27.0"
32 | repository: "https://argoproj.github.io/argo-helm"
33 | - name: elasticsearch
34 | version: 7.17.3
35 | repository: "https://helm.elastic.co"
36 | - name: kibana
37 | version: 7.17.3
38 | repository: "https://helm.elastic.co"
39 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template/cron/nuclei-template-update/util.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | labels:
5 | workflows.argoproj.io/creator: system-serviceaccount-argo-argo-server
6 | name: util-update-nuclei-template
7 | spec:
8 | volumes:
9 | - name: template-dir
10 | persistentVolumeClaim:
11 | claimName: nuclei-template
12 |
13 | templates:
14 | - name: update-nuclei-template
15 | inputs:
16 | parameters:
17 | - name: force-update
18 | default: false
19 | script:
20 | image: projectdiscovery/nuclei:v2.9.4
21 | command:
22 | - 'sh'
23 | source: |
24 | cd /root/nuclei-templates
25 | nuclei --target 127.0.0.1 -id CVE-2021-44228 -debug -ut
26 |
27 | if [ "{{`{{inputs.parameters.force-update}}`}}" = "true" ]
28 | then
29 | wget -c https://github.com/projectdiscovery/fuzzing-templates/archive/refs/tags/v0.0.2.zip -O /root/nuclei-templates/main.zip
30 | unzip /root/nuclei-templates/main.zip -d /root/nuclei-templates/
31 | rm -rf /root/nuclei-templates/main.zip
32 | fi
33 |
34 | ls -al /root/nuclei-templates
35 | volumeMounts:
36 | - name: template-dir
37 | mountPath: /root/nuclei-templates
38 |
39 | - name: lookup-nuclei-template
40 | script:
41 | image: projectdiscovery/nuclei:v2.9.4
42 | command:
43 | - 'sh'
44 | source: |
45 | tail -f /dev/null
46 | volumeMounts:
47 | - name: template-dir
48 | mountPath: /root/nuclei-templates
49 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template/level2/katana/es-katana.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: level2-es-katana
5 | spec:
6 | templates:
7 | - name: 获取web服务-katana爬虫-xray扫描-保存结果
8 | parallelism: 3
9 | inputs:
10 | parameters:
11 | - name: q
12 | description: 'elasticsearch query from web-service index, e.g. "*". escape double quote with backslash'
13 | default: "*"
14 | steps:
15 | - - name: call-url-from-es
16 | arguments:
17 | parameters:
18 | - name: q
19 | value: {{`'{{inputs.parameters.q}}'`}}
20 | templateRef:
21 | name: get-asset-level1-url
22 | template: url-from-es
23 | - - name: call-get-file-lines-count
24 | arguments:
25 | artifacts:
26 | - name: file
27 | from: {{`'{{steps.call-url-from-es.outputs.artifacts.result}}'`}}
28 | templateRef:
29 | name: util
30 | template: get-file-lines-count
31 | - - name: call-katana-from-file-index
32 | arguments:
33 | artifacts:
34 | - name: domains
35 | from: {{`'{{steps.call-url-from-es.outputs.artifacts.result}}'`}}
36 | parameters:
37 | - name: index
38 | value: {{`"{{item}}"`}}
39 | templateRef:
40 | name: level1-crawler
41 | template: katana-from-file-index
42 | withSequence:
43 | count: {{`"{{steps.call-get-file-lines-count.outputs.result}}"`}}
44 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level2/subdomain.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: probe-asset-level2-subdomain
5 | spec:
6 | templates:
7 | - name: get-subdomains-save
8 | description: 'Get subdomains and save to console'
9 | inputs:
10 | parameters:
11 | - name: domain
12 | description: '主域名, 如 apple.com'
13 | - name: get-subdomain-way
14 | description: "哪些方式扫描子域名,可选值 subfinder,oneforall,es,mysql"
15 | default: "subfinder,es,mysql"
16 | - name: save-subdomain-result
17 | description: "是否保存子域名结果,可选值 true,false"
18 | default: "true"
19 | steps:
20 | - - name: get-subdomains
21 | templateRef:
22 | name: probe-asset-level1-subdomain
23 | template: get-subdomain
24 | arguments:
25 | parameters:
26 | - name: domain
27 | value: {{`'{{inputs.parameters.domain}}'`}}
28 | - name: get-subdomain-way
29 | value: {{`'{{inputs.parameters.get-subdomain-way}}'`}}
30 | - - name: save-subdomains
31 | when: {{`"{{inputs.parameters.save-subdomain-result}} == true"`}}
32 | templateRef:
33 | name: level1-save-subdomains
34 | template: save-subdomains
35 | arguments:
36 | parameters:
37 | - name: domain
38 | value: {{`'{{inputs.parameters.domain}}'`}}
39 | artifacts:
40 | - name: subdomains
41 | from: {{`'{{steps.get-subdomains.outputs.artifacts.subdomains}}'`}}
42 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level2/webservice/probe-webservice-from-port-asset.yaml:
--------------------------------------------------------------------------------
1 | # probe web service status for ip segment
2 | apiVersion: argoproj.io/v1alpha1
3 | kind: WorkflowTemplate
4 | metadata:
5 | name: probe-asset-level2-web-service-from-port
6 | spec:
7 | arguments:
8 | parameters:
9 | - name: q
10 | description: 'elasticsearch query, e.g. "*"'
11 | default: "host:apple.com"
12 | - name: index
13 | default: "port"
14 | - name: size
15 | default: "10000000"
16 | - name: source
17 | description: field to return
18 | default: "link"
19 | - name: format_source
20 | description: logstash field reference format, eg "[asn][as-range]"
21 | default: "[link]"
22 | volumes:
23 | - name: logstash-config-map
24 | configMap:
25 | name: logstash
26 | templates:
27 | - name: 根据查询获取主机端口信息-收集首页信息-存储结果
28 | steps:
29 | - - name: call-url-from-es
30 | templateRef:
31 | name: util-asset-es-query
32 | template: query-es
33 | - - name: call-httpx-scan
34 | arguments:
35 | artifacts:
36 | - name: hosts
37 | from: {{`'{{steps.call-url-from-es.outputs.artifacts.result}}'`}}
38 | templateRef:
39 | name: level1-httpx
40 | template: probe-all-from-file-no-port
41 | - - name: logstash
42 | arguments:
43 | artifacts:
44 | - name: httpx-result
45 | from: {{`'{{steps.call-httpx-scan.outputs.artifacts.urls}}'`}}
46 | templateRef:
47 | name: level1-logstash
48 | template: httpx2elasticsearch
49 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template/level2/es-webscan.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: level2-general-es-nuclei
5 | spec:
6 | volumes:
7 | - name: logstash-config-es-general-query
8 | configMap:
9 | name: logstash-config-es-general-query
10 | arguments:
11 | parameters:
12 | - name: q
13 | description: 'elasticsearch query, e.g. "*"'
14 | default: "response_content_type :'html' AND status_code:200"
15 | - name: index
16 | default: "proxify"
17 | - name: size
18 | default: "10000000"
19 | - name: source
20 | description: field to return
21 | default: "url"
22 | - name: format_source
23 | description: logstash field reference format, eg "[asn][as-range]"
24 | default: "[url]"
25 | - name: tags
26 | default: "sqli,xss"
27 | templates:
28 | - name: es查询webservice获取url列表-nuclei扫描-保存结果
29 | steps:
30 | - - name: call-url-from-es
31 | templateRef:
32 | name: util-asset-es-query
33 | template: query-es
34 | - - name: call-nuclei-scan
35 | arguments:
36 | artifacts:
37 | - name: hosts
38 | from: {{`'{{steps.call-url-from-es.outputs.artifacts.result}}'`}}
39 | templateRef:
40 | name: webscan-level1-fuzz
41 | template: fuzz
42 | - - name: call-save-nuclei-alarm
43 | arguments:
44 | artifacts:
45 | - from: {{`'{{steps.call-nuclei-scan.outputs.artifacts.nuclei-result}}'`}}
46 | name: result
47 | templateRef:
48 | name: level1-save-alarm
49 | template: save-nuclei
--------------------------------------------------------------------------------
/templates/console/console.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | apiVersion: apps/v1
3 | kind: Deployment
4 | metadata:
5 | name: console
6 | spec:
7 | replicas: 1
8 | template:
9 | metadata:
10 | labels:
11 | app: console
12 | spec:
13 | containers:
14 | - name: console-api
15 | image: leveryd/console-api:2022.12.9
16 | args:
17 | - -f
18 | - /etc/console/console-api.yaml
19 | imagePullPolicy: IfNotPresent
20 | env:
21 | - name: MYSQL_DATASOURCE
22 | value: root:{{.Values.console_db_root_password}}@tcp({{.Values.console_db_host}})/cute
23 | volumeMounts:
24 | - name: console-api-conf
25 | mountPath: /etc/console
26 | livenessProbe:
27 | periodSeconds: 5
28 | httpGet:
29 | path: /api/alarm/bbscan/query?limit=1
30 | port: 8080
31 | - name: console-fe
32 | image: leveryd/console-fe:2022.12.27
33 | imagePullPolicy: IfNotPresent
34 | ports:
35 | - containerPort: 80
36 | volumes:
37 | - name: console-api-conf
38 | configMap:
39 | name: console-api-conf
40 | selector:
41 | matchLabels:
42 | app: console
43 | ---
44 | apiVersion: v1
45 | kind: Service
46 | metadata:
47 | name: {{.Values.console_api_service_name}}
48 | spec:
49 | selector:
50 | app: console
51 | ports:
52 | - protocol: TCP
53 | port: {{.Values.console_api_service_port}}
54 | targetPort: 8080
55 | ---
56 | apiVersion: v1
57 | kind: Service
58 | metadata:
59 | name: console-fe-service
60 | spec:
61 | selector:
62 | app: console
63 | ports:
64 | - protocol: TCP
65 | port: 65533
66 | targetPort: 80
67 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/cron/subdomain/from-response.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: CronWorkflow
3 | metadata:
4 | name: cron-probe-subdomain-from-response
5 | spec:
6 | schedule: "*/10 * * * *"
7 | suspend: false
8 | workflowSpec:
9 | workflowTemplateRef:
10 | name: probe-asset-level1-subdomain-from-response
11 | arguments:
12 | parameters:
13 | - name: q
14 | value: "timestamp:[now-11m TO now]"
15 | entrypoint: probe-subdomain-from-response
16 | ---
17 | apiVersion: argoproj.io/v1alpha1
18 | kind: CronWorkflow
19 | metadata:
20 | name: cron-probe-subdomain-from-tls-subject
21 | spec:
22 | schedule: "*/10 * * * *"
23 | suspend: false
24 | workflowSpec:
25 | workflowTemplateRef:
26 | name: probe-asset-level1-subdomain-from-response
27 | arguments:
28 | parameters:
29 | - name: q
30 | value: "timestamp:[now-11m TO now]"
31 | - name: index
32 | value: "tls"
33 | - name: source
34 | value: "subject_an"
35 | - name: format_source
36 | value: "[subject_an]"
37 | entrypoint: probe-subdomain-from-response
38 | ---
39 | apiVersion: argoproj.io/v1alpha1
40 | kind: CronWorkflow
41 | metadata:
42 | name: cron-probe-subdomain-from-tls-sni
43 | spec:
44 | schedule: "*/10 * * * *"
45 | suspend: false
46 | workflowSpec:
47 | workflowTemplateRef:
48 | name: probe-asset-level1-subdomain-from-response
49 | arguments:
50 | parameters:
51 | - name: q
52 | value: "timestamp:[now-11m TO now]"
53 | - name: index
54 | value: "tls"
55 | - name: source
56 | value: "sni"
57 | - name: format_source
58 | value: "[sni]"
59 | entrypoint: probe-subdomain-from-response
60 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level2/webservice/probe-webservice-from-es-subdomain.yaml:
--------------------------------------------------------------------------------
1 | # probe web service status for ip segment
2 | apiVersion: argoproj.io/v1alpha1
3 | kind: WorkflowTemplate
4 | metadata:
5 | name: probe-asset-level2-web-service-from-es-subdomain
6 | spec:
7 | arguments:
8 | parameters:
9 | - name: q
10 | description: 'elasticsearch query, e.g. "*"'
11 | default: "host:apple.com"
12 | - name: index
13 | default: "subdomain"
14 | - name: size
15 | default: "10000000"
16 | - name: source
17 | description: field to return
18 | default: "host"
19 | - name: format_source
20 | description: logstash field reference format, eg "[asn][as-range]"
21 | default: "[host]"
22 | - name: passive
23 | description: "passive mode when get port (default: false)"
24 | default: "false"
25 | enum:
26 | - "true"
27 | - "false"
28 | - name: port-range
29 | default: "80,443,8000-9000"
30 | volumes:
31 | - name: logstash-config-map
32 | configMap:
33 | name: logstash
34 | templates:
35 | - name: 根据查询获取主机端口信息-收集首页信息-存储结果
36 | steps:
37 | - - name: call-url-from-es
38 | templateRef:
39 | name: util-asset-es-query
40 | template: query-es
41 | - - name: call-port-scan
42 | arguments:
43 | artifacts:
44 | - name: hosts
45 | from: {{`"{{steps.call-url-from-es.outputs.artifacts.result}}"`}}
46 | parameters:
47 | - name: port-range
48 | value: {{`"{{workflow.parameters.port-range}}"`}}
49 | templateRef:
50 | name: probe-asset-level2-web-service
51 | template: from-host-file
--------------------------------------------------------------------------------
/templates/console/mysql.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: apps/v1
2 | kind: Deployment
3 | metadata:
4 | name: db
5 | spec:
6 | replicas: 1
7 | selector:
8 | matchLabels:
9 | app: db
10 | template:
11 | metadata:
12 | labels:
13 | app: db
14 | spec:
15 | containers:
16 | - name: mysql
17 | image: mysql:5.7
18 | imagePullPolicy: IfNotPresent
19 | ports:
20 | - containerPort: 3306
21 | env:
22 | - name: MYSQL_ROOT_PASSWORD
23 | value: {{.Values.console_db_root_password}}
24 | volumeMounts:
25 | - mountPath: /etc/localtime
26 | name: localtime
27 | {{- if .Values.console_db_persistence_enabled }}
28 | - mountPath: /var/lib/mysql
29 | name: data
30 | - mountPath: /tmp/tempdir
31 | name: tmpdir
32 | {{- else }}
33 | - mountPath: /var/lib/mysql
34 | name: data
35 | {{- end }}
36 | - name: mysql-initdb
37 | mountPath: /docker-entrypoint-initdb.d
38 | volumes:
39 | - name: localtime
40 | hostPath:
41 | path: /etc/localtime
42 | {{- if .Values.console_db_persistence_enabled }}
43 | - name: data
44 | hostPath:
45 | path: {{.Values.console_db_persistence_data_dir}}
46 | - name: tmpdir
47 | hostPath:
48 | path: /tmp
49 | {{- else }}
50 | - name: data
51 | persistentVolumeClaim:
52 | claimName: db-data
53 | {{- end }}
54 | - name: mysql-initdb
55 | configMap:
56 | name: mysql-initdb-config
57 |
58 | ---
59 | apiVersion: v1
60 | kind: Service
61 | metadata:
62 | name: mysql-service
63 | spec:
64 | selector:
65 | app: db
66 | ports:
67 | - protocol: TCP
68 | port: 3306
69 | targetPort: 3306
70 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template/level3/domain-ms.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: level3-domain-ms
5 | spec:
6 | templates:
7 | - name: 从API获取兄弟域名-获取子域名-ms扫描-保存结果
8 | inputs:
9 | parameters:
10 | - name: query
11 | value: 'limit=10000&offset=0&accurate=是'
12 | description: "console api查询子域名时的条件"
13 | - name: get-subdomain-way
14 | description: "哪种方式扫描子域名 subfinder,oneforall,es,mysql"
15 | default: "subfinder,es,mysql"
16 | - name: save-subdomain-result
17 | description: "是否保存子域名结果"
18 | default: "false"
19 | enum: ["true", "false"]
20 | - name: port-range
21 | description: "端口范围 例如: 1-65535"
22 | default: "http:80,https:443"
23 | parallelism: 3
24 | steps:
25 | - - name: call-getrootdomain-from-console-api
26 | arguments:
27 | parameters:
28 | - name: query
29 | value: {{`"{{inputs.parameters.query}}"`}}
30 | templateRef:
31 | name: get-asset-level1-sld-domain-from-console
32 | template: from-console-api
33 | - - name: call-domain-ms
34 | arguments:
35 | parameters:
36 | - name: domain
37 | value: {{`'{{item}}'`}}
38 | - name: get-subdomain-way
39 | value: {{`"{{inputs.parameters.get-subdomain-way}}"`}}
40 | - name: save-subdomain-result
41 | value: {{`"{{inputs.parameters.save-subdomain-result}}"`}}
42 | - name: port-range
43 | value: {{`"{{inputs.parameters.port-range}}"`}}
44 | templateRef:
45 | name: level2-domain-ms
46 | template: 获取子域名-ms扫描-保存结果
47 | withParam: {{`'{{steps.call-getrootdomain-from-console-api.outputs.result}}'`}}
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/cron/org/update-org-asset-map.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: update-org-asset-map
5 | spec:
6 | volumes:
7 | - name: logstash-config-update-org-asset-map
8 | configMap:
9 | name: logstash-config-update-org-asset-map
10 | templates:
11 | - name: update-org-asset-map
12 | steps:
13 | - - name: update-parsed-domain
14 | template: x-update-parsed-domain
15 | - - name: update-org-asset-map
16 | template: x-update-org-asset-map
17 |
18 | - name: x-update-parsed-domain
19 | script:
20 | image: alpine/curl:3.14
21 | command:
22 | - 'sh'
23 | source: |
24 | curl -s -X POST "{{.Values.kibana.elasticsearchHosts}}/subdomain,port,web-service/_update_by_query" -H 'Content-Type: application/json' -d '
25 | {
26 | "query": {
27 | "bool": {
28 | "must_not": {
29 | "exists": {
30 | "field": "parsed-domain.registered_domain"
31 | }
32 | }
33 | }
34 | }
35 | }
36 | '
37 |
38 | - name: x-update-org-asset-map
39 | script:
40 | image: logstash:7.17.3
41 | command:
42 | - 'sh'
43 | source: |
44 | # https://www.elastic.co/guide/en/logstash/current/running-logstash-command-line.html
45 | logstash --log.level error -f /tmp/logstash/update-org-asset-map.conf --api.enabled false -w 1
46 | volumeMounts:
47 | - name: logstash-config-update-org-asset-map
48 | mountPath: /tmp/logstash
49 | ---
50 | apiVersion: argoproj.io/v1alpha1
51 | kind: CronWorkflow
52 | metadata:
53 | name: cron-update-org-asset-map
54 | spec:
55 | schedule: "*/30 * * * *"
56 | suspend: false
57 | workflowSpec:
58 | workflowTemplateRef:
59 | name: update-org-asset-map
60 | entrypoint: update-org-asset-map
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/util/es-query/general-query.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: util-asset-es-query
5 | spec:
6 | entrypoint: query-es
7 | arguments:
8 | parameters:
9 | - name: q
10 | description: 'elasticsearch query, e.g. "*"'
11 | value: "*"
12 | - name: index
13 | value: "web-service"
14 | - name: size
15 | value: "10000000"
16 | - name: source
17 | description: field to return
18 | value: "url"
19 | - name: format_source
20 | description: logstash field reference format, eg "[asn][as-range]"
21 | value: "[url]"
22 |
23 | templates:
24 | - name: query-es
25 | outputs:
26 | artifacts:
27 | - name: result
28 | path: /tmp/result.txt
29 | parameters:
30 | - name: count
31 | valueFrom:
32 | path: /tmp/count.txt
33 | script:
34 | image: logstash:7.17.3
35 | env:
36 | - name: INDEX
37 | value: {{`"{{workflow.parameters.index}}"`}}
38 | - name: SIZE
39 | value: {{`"{{workflow.parameters.size}}"`}}
40 | - name: QUERY
41 | value: {{`"{{workflow.parameters.q}}"`}}
42 | - name: SOURCE
43 | value: {{`"{{workflow.parameters.source}}"`}}
44 | - name: FORMAT_SOURCE
45 | value: {{`"{{workflow.parameters.format_source}}"`}}
46 | command:
47 | - 'sh'
48 | source: |
49 | logstash -f /tmp/logstash/general_query.conf
50 |
51 | cat /tmp/result.txt | sort | uniq > /tmp/result.txt.tmp
52 | mv /tmp/result.txt.tmp /tmp/result.txt
53 | cat /tmp/result.txt | wc -l > /tmp/count.txt
54 | volumeMounts:
55 | - name: logstash-config-es-general-query
56 | mountPath: /tmp/logstash
57 | volumes:
58 | - name: logstash-config-es-general-query
59 | configMap:
60 | name: logstash-config-es-general-query
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level2/webservice/fofa.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: probe-asset-level2-webservice-from-fofa
5 | spec:
6 | arguments:
7 | parameters:
8 | - name: fofa_key
9 | default: "{{.Values.fofa_key}}"
10 | description: "fofa key, can be specified when operator install asm instance"
11 | - name: fofa_email
12 | default: "{{.Values.fofa_email}}"
13 | description: "fofa email, can be specified when operator install asm instance"
14 | - name: query
15 | description: "fofa query, do not use single quotes"
16 | default: 'host=".apple.com"'
17 | - name: size
18 | default: 10
19 | description: "number of results to return"
20 | templates:
21 | - name: fofa
22 | outputs:
23 | parameters:
24 | - name: count
25 | valueFrom:
26 | parameter: {{`"{{steps.call-fofa.outputs.parameters.count}}"`}}
27 | artifacts:
28 | - name: result
29 | description: "csv type"
30 | from: {{`"{{steps.call-fofa.outputs.artifacts.result}}"`}}
31 | - name: urls
32 | from: {{`'{{steps.call-httpx-scan.outputs.artifacts.urls}}'`}}
33 | steps:
34 | - - name: call-fofa
35 | templateRef:
36 | name: get-asset-level1-fofa
37 | template: fofa-client
38 | - - name: call-httpx-scan
39 | arguments:
40 | artifacts:
41 | - name: hosts
42 | from: {{`'{{steps.call-fofa.outputs.artifacts.urls}}'`}}
43 | templateRef:
44 | name: level1-httpx
45 | template: probe-all-from-file-no-port
46 | - - name: logstash
47 | arguments:
48 | artifacts:
49 | - name: httpx-result
50 | from: {{`'{{steps.call-httpx-scan.outputs.artifacts.urls}}'`}}
51 | templateRef:
52 | name: level1-logstash
53 | template: httpx2elasticsearch
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level3/webservice.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: level3-probe-web-service
5 | spec:
6 | arguments: {}
7 | volumes:
8 | - name: logstash-config-map
9 | configMap:
10 | name: logstash
11 | templates:
12 | - name: 从API获取兄弟域名-获取子域名-web服务扫描-保存结果
13 | inputs:
14 | parameters:
15 | - name: query
16 | value: 'limit=10000&offset=0&accurate=是'
17 | description: "console api查询子域名时的条件"
18 | - name: get-subdomain-way
19 | description: "哪种方式扫描子域名 subfinder,oneforall,es,mysql"
20 | default: "subfinder,es,mysql"
21 | - name: save-subdomain-result
22 | default: "false"
23 | enum: [ "true", "false" ]
24 | - name: port-range
25 | description: "是否保存子域名结果"
26 | default: "80,443,8000-9000"
27 |
28 | parallelism: 4 # 并发数
29 | steps:
30 | - - name: call-getrootdomain-from-console-api
31 | arguments:
32 | parameters:
33 | - name: query
34 | value: {{`'{{inputs.parameters.query}}'`}}
35 | templateRef:
36 | name: get-asset-level1-sld-domain-from-console
37 | template: from-console-api
38 | - - name: call-subdomain-portscan-save
39 | arguments:
40 | parameters:
41 | - name: domain
42 | value: {{`'{{item}}'`}}
43 | - name: get-subdomain-way
44 | value: {{`'{{inputs.parameters.get-subdomain-way}}'`}}
45 | - name: save-subdomain-result
46 | value: {{`'{{inputs.parameters.save-subdomain-result}}'`}}
47 | - name: port-range
48 | value: {{`'{{inputs.parameters.port-range}}'`}}
49 | templateRef:
50 | name: probe-asset-level2-web-service
51 | template: 获取子域名-端口扫描-存储结果
52 | withParam: {{`'{{steps.call-getrootdomain-from-console-api.outputs.result}}'`}}
53 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level1/subdomain/probe-from-response.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: probe-asset-level1-subdomain-from-response
5 | spec:
6 | entrypoint: probe-subdomain-from-response
7 | arguments:
8 | parameters:
9 | - name: q
10 | description: 'elasticsearch query, e.g. "*"'
11 | value: "*"
12 | - name: index
13 | value: "proxify"
14 | - name: size
15 | value: "10000000"
16 | - name: source
17 | description: field to return
18 | value: "response"
19 | - name: format_source
20 | description: logstash field reference format, eg "[asn][as-range]"
21 | value: "[response]"
22 | templates:
23 | - name: probe-subdomain-from-response
24 | steps:
25 | - - name: get-response
26 | templateRef:
27 | name: util-asset-es-query
28 | template: query-es
29 | - - name: get-info
30 | arguments:
31 | artifacts:
32 | - name: response
33 | from: {{`"{{steps.get-response.outputs.artifacts.result}}"`}}
34 | template: x-find-info
35 | - - name: save-subdomains
36 | templateRef:
37 | name: level1-save-subdomains
38 | template: save-subdomains
39 | arguments:
40 | parameters:
41 | - name: domain
42 | value: ""
43 | artifacts:
44 | - name: subdomains
45 | from: {{`'{{steps.get-info.outputs.artifacts.subdomain}}'`}}
46 |
47 | - name: x-find-info
48 | inputs:
49 | artifacts:
50 | - name: response
51 | path: /tmp/response.txt
52 | outputs:
53 | artifacts:
54 | - name: subdomain
55 | path: /output/subdomain.txt
56 | - name: sld
57 | path: /output/tldDomain.txt
58 | script:
59 | image: 'leveryd/x-tool:v2023.1.16'
60 | command:
61 | - sh
62 | source: |
63 | x txt -d /tmp/ -od /output
64 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level1/subdomain/save.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: level1-save-subdomains
5 | spec:
6 | templates:
7 | - name: save-subdomains
8 | inputs:
9 | artifacts:
10 | - name: subdomains
11 | parameters:
12 | - name: domain
13 | description: 'parent domain, eg apple.com'
14 | steps:
15 | - - name: console
16 | arguments:
17 | artifacts:
18 | - name: subdomains
19 | from: {{`"{{inputs.artifacts.subdomains}}"`}}
20 | parameters:
21 | - name: domain
22 | value: {{`"{{inputs.parameters.domain}}"`}}
23 | template: x-console
24 | - - name: elasticsearch
25 | arguments:
26 | artifacts:
27 | - name: subdomains
28 | from: {{`"{{inputs.artifacts.subdomains}}"`}}
29 | template: x-es
30 |
31 | - name: x-es
32 | volumes:
33 | - name: logstash-config-subdomain
34 | configMap:
35 | name: logstash-config-subdomain
36 | inputs:
37 | artifacts:
38 | - name: subdomains
39 | path: /tmp/subdomains.txt
40 | script:
41 | image: logstash:7.17.3
42 | command:
43 | - 'sh'
44 | source: |
45 | logstash -f /tmp/logstash/subdomain.conf
46 | volumeMounts:
47 | - name: logstash-config-subdomain
48 | mountPath: /tmp/logstash
49 |
50 | - name: x-console
51 | inputs:
52 | artifacts:
53 | - name: subdomains
54 | path: /tmp/subdomains
55 | parameters:
56 | - name: domain
57 | description: 'parent domain, eg apple.com'
58 | script:
59 | image: 'leveryd/x-tool:v2023.1.16'
60 | command:
61 | - 'sh'
62 | source: |
63 | x subdomain -action save -source console -domain {{`{{inputs.parameters.domain}}`}} -if /tmp/subdomains -consoleUrl http://{{.Values.console_api_service_name}}:{{.Values.console_api_service_port}}
64 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level1/second-level-domain/save.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: probe-asset-level1-sld-from-text
5 | spec:
6 | templates:
7 | - name: manual-add-sld
8 | steps:
9 | - - name: approve
10 | template: x-approve
11 | - - name: add-sld
12 | template: save
13 | arguments:
14 | parameters:
15 | - name: org
16 | value: "{{`{{steps.approve.outputs.parameters.org}}`}}"
17 | artifacts:
18 | - name: sld
19 | raw:
20 | data: "{{`{{steps.approve.outputs.parameters.sld}}`}}"
21 | when: "{{`{{steps.approve.outputs.parameters.approve}}`}} == YES"
22 |
23 | - name: x-approve
24 | suspend: {}
25 | inputs:
26 | parameters:
27 | - name: approve
28 | default: 'NO'
29 | enum:
30 | - 'YES'
31 | - 'NO'
32 | - name: org
33 | default: ""
34 | - name: sld
35 | default: ""
36 | outputs:
37 | parameters:
38 | - name: approve
39 | valueFrom:
40 | supplied: {}
41 | - name: org
42 | valueFrom:
43 | supplied: {}
44 | - name: sld
45 | valueFrom:
46 | supplied: {}
47 |
48 | - name: save
49 | volumes:
50 | - name: logstash-config-sld
51 | configMap:
52 | name: logstash-config-sld
53 | inputs:
54 | parameters:
55 | - name: org
56 | - name: source
57 | default: ""
58 | artifacts:
59 | - name: sld
60 | path: /tmp/sld.txt
61 | script:
62 | image: logstash:7.17.3
63 | command:
64 | - sh
65 | env:
66 | - name: ORG
67 | value: {{`"{{inputs.parameters.org}}"`}}
68 | - name: SOURCE
69 | value: {{`"{{inputs.parameters.source}}"`}}
70 | source: |
71 | logstash --api.enabled false -w 1 --log.level error -f /tmp/logstash/save.conf
72 | volumeMounts:
73 | - name: logstash-config-sld
74 | mountPath: /tmp/logstash
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/util/cidr.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: util-asset
5 | spec:
6 | templates:
7 | - name: mapcidr
8 | inputs:
9 | parameters:
10 | - name: cidr
11 | description: "CIDR to map, eg 173.0.84.0/24"
12 | outputs:
13 | artifacts:
14 | - name: ip-result
15 | path: /tmp/cidr.txt
16 | script:
17 | image: projectdiscovery/mapcidr:v1.0.3
18 | command:
19 | - "/bin/sh"
20 | source: |
21 | mapcidr -cidr {{`{{inputs.parameters.cidr}}`}} -skip-base -skip-broadcast -silent -o /tmp/cidr.txt
22 | cat /tmp/cidr.txt
23 |
24 | - name: compute-cidr-aggregate-approx
25 | inputs:
26 | artifacts:
27 | - name: compute-cidr-input
28 | path: /tmp/cidr.txt
29 | description: "Aggregate sparse IPs/CIDRs into minimum approximated subnet"
30 | outputs:
31 | artifacts:
32 | - name: result
33 | path: /tmp/cidr.txt
34 | parameters:
35 | - name: count
36 | valueFrom:
37 | path: /tmp/count.txt
38 | script:
39 | image: projectdiscovery/mapcidr:v1.0.3
40 | command:
41 | - "/bin/sh"
42 | source: |
43 | mapcidr -cidr /tmp/cidr.txt -aa -o /tmp/cidr.txt
44 | cat /tmp/cidr.txt
45 | wc -l /tmp/cidr.txt | awk '{print $1}' > /tmp/count.txt
46 |
47 | - name: print-file-content
48 | inputs:
49 | artifacts:
50 | - name: file
51 | path: /tmp/file.txt
52 | description: "input file"
53 | script:
54 | image: ubuntu:lunar-20221207
55 | command:
56 | - "/bin/sh"
57 | source: |
58 | cat /tmp/file.txt
59 |
60 | - name: mapcidr-from-file
61 | inputs:
62 | artifacts:
63 | - name: cidr
64 | description: "CIDR file to map, eg 173.0.84.0/24"
65 | path: /tmp/cidr.txt
66 | outputs:
67 | artifacts:
68 | - name: ip-result
69 | path: /tmp/ip.txt
70 | script:
71 | image: projectdiscovery/mapcidr:v1.0.3
72 | command:
73 | - "/bin/sh"
74 | source: |
75 | mapcidr -cidr /tmp/cidr.txt -skip-base -skip-broadcast -silent -o /tmp/ip.txt
76 | cat /tmp/ip.txt
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/cron/org/config.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: ConfigMap
3 | metadata:
4 | name: logstash-config-update-org-asset-map
5 | data:
6 | update-org-asset-map.conf: |
7 | input {
8 | file {
9 | path => "/tmp/logstash/org.txt"
10 | codec => "json"
11 | start_position => "beginning"
12 | sincedb_path => "/dev/null"
13 | exit_after_read => true
14 | mode => "read"
15 | file_chunk_size => 3145728 # 3MB
16 | }
17 | }
18 |
19 | filter {
20 | http {
21 | url => "http://elasticsearch-master:9200/%{index}/_update_by_query"
22 | verb => "POST"
23 | body_format => "json"
24 | socket_timeout => 60
25 | body => {
26 | "script" => {
27 | "lang" => "painless"
28 | "source" => "
29 | ArrayList org;
30 |
31 | if (ctx._source.org != null) {
32 | org = ctx._source.org
33 | } else {
34 | org = new ArrayList();
35 | }
36 | org.add(params.org);
37 |
38 | // https://www.geeksforgeeks.org/how-to-remove-duplicates-from-arraylist-in-java/
39 | LinkedHashSet newOrg = new LinkedHashSet();
40 | newOrg.addAll(org);
41 | org.clear();
42 | org.addAll(newOrg);
43 |
44 | ctx._source.org=org;
45 | ctx._source.org_num=org.size();
46 | "
47 | "params" => {
48 | "org" => "%{org}"
49 | "index" => "%{index}"
50 | }
51 | }
52 | "query" => {
53 | "query_string" => {
54 | "query" => "%{query}"
55 | }
56 | }
57 | }
58 | }
59 |
60 | sleep {
61 | # sleep 2 second for every event.
62 | time => "2"
63 | }
64 | }
65 |
66 | org.txt: |
67 | { "org": "百度", "query": "subject_org:baidu OR subject_org:百度", "index": "tls" }
68 | { "org": "百度", "query": "title:baidu OR title:百度", "index": "web-service" }
69 | { "org": "百度", "query": "response-body:baidu OR response-body:百度", "index": "web-service" }
70 | { "org": "百度", "query": "parsed-domain.registered_domain:baidu.com", "index": "web-service" }
71 | { "org": "百度", "query": "asn.as-name:baidu OR asn.as-name:百度", "index": "web-service" }
72 | { "org": "百度", "query": "parsed-domain.registered_domain.keyword :*baidu*", "index": "subdomain,port,web-service,proxify" }
73 |
--------------------------------------------------------------------------------
/templates/elk/pipeline.yaml:
--------------------------------------------------------------------------------
1 | # to manage elasticsearch, bash code is better than python, easier to debug and read
2 | # that's why we use bash for the elasticsearch management
3 |
4 | apiVersion: argoproj.io/v1alpha1
5 | kind: WorkflowTemplate
6 | metadata:
7 | name: service-elasticsearch-pipeline
8 | spec:
9 | templates:
10 | - name: create-and-set-default-pipeline
11 | script:
12 | image: alpine/curl:3.14
13 | command: [sh]
14 | source: |
15 | #!/bin/sh
16 |
17 | echo "Creating pipeline"
18 |
19 | curl -s -X PUT "{{.Values.kibana.elasticsearchHosts}}/_ingest/pipeline/subdomain-parse-domain" -H 'Content-Type: application/json' -d '
20 | {
21 | "processors" : [
22 | {
23 | "registered_domain" : {
24 | "field" : "host",
25 | "target_field" : "parsed-domain",
26 | "ignore_failure" : true
27 | }
28 | }
29 | ]
30 | }'
31 |
32 | echo "Creating index"
33 |
34 | curl -s -X PUT "{{.Values.kibana.elasticsearchHosts}}/subdomain"
35 | curl -s -X PUT "{{.Values.kibana.elasticsearchHosts}}/port"
36 | curl -s -X PUT "{{.Values.kibana.elasticsearchHosts}}/proxify"
37 | curl -s -X PUT "{{.Values.kibana.elasticsearchHosts}}/tls"
38 | curl -s -X PUT "{{.Values.kibana.elasticsearchHosts}}/web-service"
39 | curl -s -X PUT "{{.Values.kibana.elasticsearchHosts}}/sld"
40 | curl -s -X PUT "{{.Values.kibana.elasticsearchHosts}}/xray"
41 |
42 | echo "Setting default pipeline"
43 |
44 | # set the "subdomain-parse-domain" pipeline as default pipeline
45 | curl -s -X PUT "{{.Values.kibana.elasticsearchHosts}}/subdomain,port,tls,sld/_settings" -H 'Content-Type: application/json' -d '
46 | {
47 | "index": {
48 | "default_pipeline": "subdomain-parse-domain"
49 | }
50 | }
51 | '
52 |
53 | {{/* # set mapping*/}}
54 | {{/* '*/}}
55 | {{/* {*/}}
56 | {{/* "properties": {*/}}
57 | {{/* "request": {*/}}
58 | {{/* "type": "keyword"*/}}
59 | {{/* },*/}}
60 | {{/* "response": {*/}}
61 | {{/* "type": "keyword"*/}}
62 | {{/* },*/}}
63 | {{/* "timestamp": {*/}}
64 | {{/* "type": "date"*/}}
65 | {{/* }*/}}
66 | {{/* }*/}}
67 | {{/* }*/}}
68 | {{/* '*/}}
69 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level1/url/get-web-service.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: asset-level1-web-service
5 | spec:
6 | templates:
7 | - name: get-urls-from-es
8 | inputs:
9 | parameters:
10 | - name: domain
11 | description: rootdomain to search for, e.g. example.com
12 | outputs:
13 | artifacts:
14 | - name: result
15 | path: /tmp/result
16 | script:
17 | image: leveryd/x-tool:v2023.1.16
18 | command:
19 | - 'sh'
20 | source: |
21 | x es -esURL http://elasticsearch-master:9200 -index web-service -num 10000 -q 'parsed-domain.registered_domain:"{{`{{inputs.parameters.domain}}`}}"' -source url -of /tmp/result
22 |
23 | - name: get-website-from-es
24 | inputs:
25 | parameters:
26 | - name: domain
27 | description: rootdomain to search for, e.g. example.com
28 | - name: min_port
29 | description: min port to search for, e.g. 8000
30 | - name: max_port
31 | description: max port to search for, e.g. 9000
32 | outputs:
33 | artifacts:
34 | - name: result
35 | path: /tmp/result
36 | script:
37 | image: leveryd/x-tool:v2023.1.16
38 | command:
39 | - 'sh'
40 | source: |
41 | #query='status-code:200 AND content-type:"html" AND words:>10 AND parsed-domain.registered_domain:"{{`{{inputs.parameters.domain}}`}}"'
42 | #query=$query' AND port:>={{`{{inputs.parameters.min_port}}`}} AND port:<={{`{{inputs.parameters.max_port}}`}}'
43 | #echo $query
44 | x es -esURL http://elasticsearch-master:9200 -index web-service -num 10000 -q 'status-code:200 AND content-type:"html" AND words:>10 AND parsed-domain.registered_domain:"{{`{{inputs.parameters.domain}}`}}" AND port:>={{`{{inputs.parameters.min_port}}`}} AND port:<={{`{{inputs.parameters.max_port}}`}}' -source url -of /tmp/result
45 |
46 | - name: get-urls-from-es-by-query
47 | inputs:
48 | parameters:
49 | - name: q
50 | description: "es query"
51 | outputs:
52 | artifacts:
53 | - name: result
54 | path: /tmp/urls.txt
55 | script:
56 | image: leveryd/x-tool:v2023.1.16
57 | command:
58 | - 'sh'
59 | source: |
60 | x es -esURL http://elasticsearch-master:9200 -index web-service -num 10000 -q '{{`{{inputs.parameters.q}}`}}' -source url -of /tmp/urls.txt
61 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level1/second-level-domain/probe-org-sld.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: probe-asset-level1-sld-from-org
5 | spec:
6 | templates:
7 | - name: enscan
8 | inputs:
9 | parameters:
10 | - name: keyword
11 | description: "enscan工具支持pid或者关键字列表"
12 | - name: extra_args
13 | description: "eg --is-pid"
14 | default: ""
15 | steps:
16 | - - name: enscan
17 | arguments:
18 | parameters:
19 | - name: keyword
20 | value: {{`"{{inputs.parameters.keyword}}"`}}
21 | - name: extra_args
22 | value: {{`"{{inputs.parameters.extra_args}}"`}}
23 | template: x-enscan
24 | - - name: save
25 | arguments:
26 | artifacts:
27 | - name: result
28 | from: {{`"{{steps.enscan.outputs.artifacts.result}}"`}}
29 | template: x-save
30 | - name: x-enscan
31 | inputs:
32 | parameters:
33 | - name: keyword
34 | - name: extra_args
35 | default: ""
36 | outputs:
37 | artifacts:
38 | - name: result
39 | path: /tmp/outs
40 | script:
41 | image: docker/whalesay:latest
42 | command: [ "bash" ]
43 | source: |
44 | cp /tmp/tools/enscan/enscan-0.0.10-linux-amd64 /bin/enscan
45 | enscan -v
46 |
47 | cat <<< '
48 | {{`{{inputs.parameters.keyword}}`}}
49 | ' > /tmp/query.txt
50 |
51 | enscan -field icp -f /tmp/query.txt -json -o /tmp/outs/ -no-merge {{`{{inputs.parameters.extra_args}}`}}
52 | volumeMounts:
53 | - name: tools
54 | mountPath: /tmp/tools
55 | volumes:
56 | - name: tools
57 | persistentVolumeClaim:
58 | claimName: tools
59 |
60 | - name: x-save
61 | volumes:
62 | - name: logstash-config-sld
63 | configMap:
64 | name: logstash-config-sld
65 | inputs:
66 | artifacts:
67 | - name: result
68 | path: /tmp/outs
69 | outputs:
70 | artifacts:
71 | - name: sld
72 | path: /tmp/sld
73 | script:
74 | image: logstash:7.17.3
75 | command: ["sh"]
76 | source: |
77 | logstash --api.enabled false -w 1 --log.level error -f /tmp/logstash/save-enscan.conf
78 | volumeMounts:
79 | - name: logstash-config-sld
80 | mountPath: /tmp/logstash
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level1/second-level-domain/get-from-console.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: get-asset-level1-sld-domain-from-console
5 | spec:
6 | templates:
7 | - name: from-console-api
8 | inputs:
9 | parameters:
10 | - name: query
11 | value: limit=10000&offset=0 # 默认获取1w条数据
12 | script:
13 | image: python:3.8
14 | command:
15 | - python
16 | source: |
17 | import json
18 | import sys
19 | import urllib.parse
20 | import urllib.request
21 | from urllib.parse import quote
22 |
23 | query = "{{`{{inputs.parameters.query}}`}}"
24 | query = quote(query, safe='/=&')
25 |
26 | url = 'http://{{.Values.console_api_service_name}}:{{.Values.console_api_service_port}}/api/info/brotherdomain/query?'+query
27 | request = urllib.request.Request(url)
28 | response = urllib.request.urlopen(request).read()
29 |
30 | domains = response.decode()
31 |
32 | domains = json.loads(domains)
33 | ret = [domain['rootdomain'] for domain in domains['rows']]
34 | json.dump(ret, sys.stdout)
35 |
36 | - name: from-db
37 | inputs:
38 | parameters:
39 | - name: sql
40 | value: "select rootdomain from brotherdomain where corpname in (select corpname from corp where corelevel='1级') and accurate='是';"
41 | description: "sql语句, 注意: 最好使用单引号, 双引号需要自己转义"
42 | container:
43 | image: mysql:5.7
44 | command:
45 | - 'sh'
46 | args:
47 | - "-c"
48 | - |
49 | echo "export lang=UTF8" > sql.sh
50 | echo "mysql -s -h mysql-service -u root --password={{.Values.console_db_root_password}} -e \"use cute;set character_set_client=utf8;set character_set_connection=utf8;set character_set_results=utf8;set character_set_server=utf8;{{`{{inputs.parameters.sql}}`}}\" > /tmp/result 2>/dev/null" >> sql.sh
51 |
52 | echo "sed -i '1d' /tmp/result" >> sql.sh
53 | #echo "printf \"'\"" >> sql.sh
54 | echo "printf '['" >> sql.sh
55 | echo "result=\`cat /tmp/result\`" >> sql.sh
56 | echo "for i in \${result[@]}" >> sql.sh
57 | echo "do" >> sql.sh
58 | echo "printf '\"'\$i'\",'" >> sql.sh
59 | echo "done" >> sql.sh
60 | echo "printf '\"\"'" >> sql.sh
61 | #echo "echo \"\"" >> sql.sh
62 | echo "printf ']'" >> sql.sh
63 | echo "echo \"\"" >> sql.sh
64 | bash sql.sh
65 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level1/port/probe-port.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: probe-asset-level1-port
5 | spec:
6 | arguments:
7 | parameters:
8 | - name: passive
9 | description: "passive mode (default: false)"
10 | default: "false"
11 | enum:
12 | - "true"
13 | - "false"
14 | templates:
15 | - name: port-scan
16 | inputs:
17 | artifacts:
18 | - name: hosts
19 | parameters:
20 | - name: port-range
21 | default: "8000-9000"
22 | outputs:
23 | artifacts:
24 | - name: result # every line in the result file will be like "host:port"
25 | from: {{`"{{steps.result-to-es.outputs.artifacts.result}}"`}}
26 | steps:
27 | - - name: naabu
28 | arguments:
29 | parameters:
30 | - name: port-range
31 | value: {{`"{{inputs.parameters.port-range}}"`}}
32 | artifacts:
33 | - name: hosts
34 | from: {{`"{{inputs.artifacts.hosts}}"`}}
35 | template: port-scan-no-save
36 | - - name: result-to-es
37 | arguments:
38 | artifacts:
39 | - name: port-result
40 | from: {{`"{{steps.naabu.outputs.artifacts.result}}"`}}
41 | templateRef:
42 | name: level1-logstash
43 | template: probe-port
44 |
45 | - name: port-scan-no-save
46 | retryStrategy: # because '-ec' options maybe fail
47 | limit: "2"
48 | retryPolicy: "Always"
49 | inputs:
50 | artifacts:
51 | - name: hosts
52 | path: /tmp/hosts
53 | parameters:
54 | - name: port-range
55 | default: "8000-9000"
56 | outputs:
57 | artifacts:
58 | - name: result # every line in the result file will be like "host:port"
59 | path: /tmp/result
60 | script:
61 | image: projectdiscovery/naabu:v2.1.1
62 | command:
63 | - 'sh'
64 | source: |
65 | num=`wc -l /tmp/hosts | awk '{print $1}'`
66 | if [ "$num" = "0" ]; then
67 | echo "no hosts"
68 | touch /tmp/result
69 | exit 0
70 | fi
71 |
72 | if [ "{{`{{workflow.parameters.passive}}`}}" = "true" ]
73 | then
74 | naabu -l /tmp/hosts -r 114.114.114.114,8.8.8.8 -ec -passive -json -o /tmp/result # 使用passive时-p参数、-stats无效
75 | else
76 | naabu -l /tmp/hosts -r 114.114.114.114,8.8.8.8 -ec -stats -p {{`{{inputs.parameters.port-range}}`}} -json -o /tmp/result
77 | fi
78 |
--------------------------------------------------------------------------------
/templates/xray/xray-proxy.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | apiVersion: apps/v1
3 | kind: Deployment
4 | metadata:
5 | name: xray-proxy
6 | spec:
7 | replicas: 1
8 | template:
9 | metadata:
10 | labels:
11 | app: xray-proxy
12 | spec:
13 | securityContext:
14 | runAsUser: 0
15 | runAsGroup: 0
16 | containers:
17 | - name: proxy
18 | image: leveryd/xray:v2023.2.21
19 | ports:
20 | - containerPort: 80
21 | command: ["/bin/bash", "-c", "/tools/xray webscan --listen 0.0.0.0:80 --webhook-output http://{{.Values.xray_webhook_service}}:{{.Values.xray_webhook_port}}/webhook --json-output /xray-output/xray.json --html-output /xray-output/xray.html"]
22 | resources:
23 | requests:
24 | cpu: "10m"
25 | limits:
26 | cpu: "3"
27 | volumeMounts:
28 | - mountPath: /tools/ca.crt
29 | subPath: ca.crt
30 | name: ca-crt
31 | - mountPath: /tools/ca.key
32 | subPath: ca.key
33 | name: ca-key
34 | - mountPath: /tools/config.yaml
35 | subPath: config.yaml
36 | name: config
37 | - mountPath: /xray-output
38 | name: xray-output
39 | - name: logstash
40 | image: logstash:7.17.3
41 | command: ["sh", "-c", "logstash --api.enabled false -w 1 --log.level error -f /xray-logstash/logstash.conf"]
42 | volumeMounts:
43 | - mountPath: /xray-logstash
44 | name: xray-output-logstash-config
45 | - mountPath: /xray-output
46 | name: xray-output
47 | volumes:
48 | - name: ca-crt
49 | configMap:
50 | name: xray-proxy-ca
51 | items:
52 | - key: ca.crt
53 | path: ca.crt
54 | - name: ca-key
55 | configMap:
56 | name: xray-proxy-ca
57 | items:
58 | - key: ca.key
59 | path: ca.key
60 | - name: config
61 | configMap:
62 | name: xray-proxy-config
63 | items:
64 | - key: config.yaml
65 | path: config.yaml
66 | - name: xray-output-logstash-config
67 | configMap:
68 | name: xray-output-logstash-config
69 | - name: xray-output
70 | emptyDir: {}
71 |
72 | selector:
73 | matchLabels:
74 | app: xray-proxy
75 | ---
76 | apiVersion: v1
77 | kind: Service
78 | metadata:
79 | name: {{.Values.xray_proxy_service}}
80 | spec:
81 | selector:
82 | app: xray-proxy
83 | ports:
84 | - protocol: TCP
85 | port: {{.Values.xray_proxy_port}}
86 | targetPort: 80
87 | nodePort: {{.Values.xray_proxy_nodeport}}
88 | type: NodePort
89 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template/level2/domain-ms.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: level2-domain-ms
5 | spec:
6 | templates:
7 | - name: 获取子域名-ms扫描-保存结果
8 | inputs:
9 | parameters:
10 | - name: domain
11 | description: "域名 例如: apple.com"
12 | - name: get-subdomain-way
13 | description: "哪种方式扫描子域名 subfinder,oneforall,es,mysql"
14 | default: "subfinder,es,mysql"
15 | - name: save-subdomain-result
16 | description: "是否保存子域名结果"
17 | default: "false"
18 | enum: ["true", "false"]
19 | - name: port-range
20 | description: "端口范围 例如: 1-65535"
21 | default: "http:80,https:443"
22 | steps:
23 | - - name: call-get-subdomains-save
24 | templateRef:
25 | name: probe-asset-level2-subdomain
26 | template: get-subdomains-save
27 | arguments:
28 | parameters:
29 | - name: domain
30 | value: {{`'{{inputs.parameters.domain}}'`}}
31 | - name: get-subdomain-way
32 | value: {{`'{{inputs.parameters.get-subdomain-way}}'`}}
33 | - name: save-subdomain-result
34 | value: {{`'{{inputs.parameters.save-subdomain-result}}'`}}
35 | - - name: call-httpx
36 | templateRef:
37 | name: level1-httpx
38 | template: probe-from-file
39 | arguments:
40 | parameters:
41 | - name: port-range
42 | value: {{`'{{inputs.parameters.port-range}}'`}}
43 | artifacts:
44 | - name: hosts
45 | from: {{`"{{workflow.outputs.artifacts.subdomains}}"`}}
46 | - - name: call-level1-identify-ms-from-url-file
47 | templateRef:
48 | name: level1-identify-ms
49 | template: level1-identify-ms-from-url-file
50 | arguments:
51 | artifacts:
52 | - name: urls
53 | from: {{`'{{steps.call-httpx.outputs.artifacts.urls}}'`}}
54 | - - name: call-level1-screenshot-from-urls
55 | templateRef:
56 | name: level1-screenshot
57 | template: level1-screenshot-from-url-file
58 | arguments:
59 | artifacts:
60 | - name: urls
61 | from: {{`'{{steps.call-level1-identify-ms-from-url-file.outputs.artifacts.result}}'`}}
62 | - - name: call-save-ms-screenshot
63 | templateRef:
64 | name: level1-save-alarm
65 | template: save-ms-screenshot
66 | arguments:
67 | artifacts:
68 | - name: result
69 | from: {{`'{{steps.call-level1-screenshot-from-urls.outputs.artifacts.result}}'`}}
70 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level1/httpx.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: level1-httpx
5 | spec:
6 | arguments: {}
7 | templates:
8 | - name: probe-80-443-from-file # 只被katana调用
9 | inputs:
10 | artifacts:
11 | - name: hosts
12 | path: /tmp/hosts
13 | outputs:
14 | artifacts:
15 | - name: urls
16 | path: /tmp/result
17 | container:
18 | image: projectdiscovery/httpx:v1.2.4
19 | command:
20 | - 'sh'
21 | args:
22 | - "-c"
23 | - "httpx -stats -l /tmp/hosts -p http:80,https:443 -fc 404,403,500,301,302 -ct -o /tmp/1 && grep -i html /tmp/1 | awk '{print $1}' > /tmp/result"
24 |
25 | - name: probe-from-file
26 | inputs:
27 | artifacts:
28 | - name: hosts
29 | path: /tmp/hosts
30 | parameters:
31 | - name: port-range
32 | value: "http:80,https:443"
33 | outputs:
34 | artifacts:
35 | - name: urls
36 | path: /tmp/result
37 | container:
38 | image: projectdiscovery/httpx:v1.2.4
39 | command:
40 | - 'sh'
41 | args:
42 | - "-c"
43 | - "httpx -stats -l /tmp/hosts -p {{`{{inputs.parameters.port-range}}`}} -fc 404,403,500 -ct -o /tmp/1 && grep -i html /tmp/1 | awk '{print $1}' > /tmp/result"
44 |
45 | - name: probe-from-file-no-port
46 | inputs:
47 | artifacts:
48 | - name: hosts
49 | path: /tmp/hosts
50 | outputs:
51 | artifacts:
52 | - name: urls
53 | path: /tmp/result
54 | container:
55 | image: projectdiscovery/httpx:v1.2.4
56 | command:
57 | - 'sh'
58 | args:
59 | - "-c"
60 | - "httpx -stats -l /tmp/hosts -ec -o /tmp/result"
61 |
62 | - name: probe-all-from-file-no-port
63 | inputs:
64 | artifacts:
65 | - name: hosts
66 | path: /tmp/hosts
67 | outputs:
68 | artifacts:
69 | - name: urls
70 | path: /tmp/result
71 | container:
72 | image: projectdiscovery/httpx:v1.2.4
73 | command:
74 | - 'sh'
75 | args:
76 | - "-c"
77 | - "httpx -stats -fc 0 -l /tmp/hosts -sc -cl -ct -location -lc -wc -rt -title -server -td -ip -cname -asn -cdn -probe -json -ec -o /tmp/result"
78 |
79 | - name: probe-favicon-from-urlfile
80 | inputs:
81 | artifacts:
82 | - name: hosts
83 | path: /tmp/hosts
84 | outputs:
85 | artifacts:
86 | - name: result
87 | path: /tmp/favicon.txt
88 | container:
89 | image: projectdiscovery/httpx:v1.2.7
90 | command:
91 | - 'sh'
92 | args:
93 | - "-c"
94 | - "httpx -stats -fc 0 -l /tmp/hosts -sc -probe -irrb -ec -path favicon.ico -json -o /tmp/favicon.txt"
95 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level3/probe-cidr-webservice.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: probe-asset-level3-webservice-from-cidr
5 | spec:
6 | arguments:
7 | parameters:
8 | - name: passive
9 | description: "passive mode (default: false)"
10 | default: "false"
11 | enum:
12 | - "true"
13 | - "false"
14 | volumes:
15 | - name: logstash-config-map
16 | configMap:
17 | name: logstash
18 | - name: logstash-config-cidr
19 | configMap:
20 | name: logstash-config-cidr
21 | templates:
22 | - name: es查询后聚合ip生成cidr列表-探测web服务-保存结果
23 | inputs:
24 | parameters:
25 | - name: q
26 | description: 'elasticsearch query, e.g. "*"'
27 | default: "*"
28 | - name: port-range
29 | description: "探测web服务的端口范围"
30 | default: "80,443,8000-9000"
31 |
32 | steps:
33 | - - name: call-get-cidr-from-es-by-compute-ip
34 | arguments:
35 | parameters:
36 | - name: q
37 | value: {{`'{{inputs.parameters.q}}'`}}
38 | templateRef:
39 | name: get-asset-level1-cidr
40 | template: aggregate-ip-from-es
41 | - - name: call-probe-web-service
42 | arguments:
43 | artifacts:
44 | - name: cidr
45 | from: {{`"{{steps.call-get-cidr-from-es-by-compute-ip.outputs.artifacts.result}}"`}}
46 | parameters:
47 | - name: port-range
48 | value: {{`'{{inputs.parameters.port-range}}'`}}
49 | templateRef:
50 | name: level2-probe-web-service-from-ip-segment
51 | template: 根据cidr文件获取ip列表-端口扫描-存储结果
52 |
53 | - name: es查询as-range后生成cidr列表-探测web服务-保存结果
54 | inputs:
55 | parameters:
56 | - name: q
57 | description: 'elasticsearch query, e.g. "*"'
58 | default: "*"
59 | - name: port-range
60 | description: "探测web服务的端口范围"
61 | default: "80,443,8000-9000"
62 | steps:
63 | - - name: call-get-cidr-from-es
64 | arguments:
65 | parameters:
66 | - name: q
67 | value: {{`'{{inputs.parameters.q}}'`}}
68 | templateRef:
69 | name: get-asset-level1-cidr
70 | template: asn-range-from-es
71 | - - name: call-probe-web-service
72 | arguments:
73 | artifacts:
74 | - name: cidr
75 | from: {{`"{{steps.call-get-cidr-from-es.outputs.artifacts.result}}"`}}
76 | parameters:
77 | - name: port-range
78 | value: {{`'{{inputs.parameters.port-range}}'`}}
79 | templateRef:
80 | name: level2-probe-web-service-from-ip-segment
81 | template: 根据cidr文件获取ip列表-端口扫描-存储结果
82 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level1/logstash/logstash.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: level1-logstash
5 | spec:
6 | templates:
7 | - name: httpx2elasticsearch
8 | volumes:
9 | - name: logstash-config-map
10 | configMap:
11 | name: logstash
12 | inputs:
13 | artifacts:
14 | - name: httpx-result
15 | path: /tmp/httpx-result.txt
16 | script:
17 | image: logstash:7.17.3
18 | command:
19 | - 'sh'
20 | source: |
21 | logstash -f /tmp/logstash/httpx.conf
22 | volumeMounts:
23 | - name: logstash-config-map
24 | mountPath: /tmp/logstash
25 |
26 | - name: httpx-probe-favicon
27 | volumes:
28 | - name: logstash-config-map
29 | configMap:
30 | name: logstash
31 | inputs:
32 | artifacts:
33 | - name: httpx-result
34 | path: /tmp/httpx-result.txt
35 | script:
36 | image: logstash:7.17.3
37 | command:
38 | - 'sh'
39 | source: |
40 | logstash -f /tmp/logstash/probe_favicon.conf
41 | volumeMounts:
42 | - name: logstash-config-map
43 | mountPath: /tmp/logstash
44 |
45 | - name: probe-tls
46 | volumes:
47 | - name: logstash-config-map
48 | configMap:
49 | name: logstash
50 | inputs:
51 | artifacts:
52 | - name: tls-result
53 | path: /tmp/tls-result.txt
54 | script:
55 | image: logstash:7.17.3
56 | command:
57 | - 'sh'
58 | source: |
59 | logstash -f /tmp/logstash/probe_tls.conf
60 | volumeMounts:
61 | - name: logstash-config-map
62 | mountPath: /tmp/logstash
63 |
64 | - name: probe-port
65 | volumes:
66 | - name: logstash-config-map
67 | configMap:
68 | name: logstash
69 | inputs:
70 | artifacts:
71 | - name: port-result
72 | path: /tmp/port-result.txt
73 | outputs:
74 | artifacts:
75 | - name: result
76 | path: /tmp/url.txt
77 | script:
78 | image: logstash:7.17.3
79 | command:
80 | - 'sh'
81 | source: |
82 | logstash -f /tmp/logstash/probe_port.conf
83 | volumeMounts:
84 | - name: logstash-config-map
85 | mountPath: /tmp/logstash
86 |
87 | - name: subfinder
88 | volumes:
89 | - name: logstash-config-map
90 | configMap:
91 | name: logstash
92 | inputs:
93 | artifacts:
94 | - name: subdomain-result
95 | path: /tmp/subdomain-result.txt
96 | outputs:
97 | artifacts:
98 | - name: result
99 | path: /tmp/subdomain.txt
100 | script:
101 | image: logstash:7.17.3
102 | command:
103 | - 'sh'
104 | source: |
105 | logstash -f /tmp/logstash/subfinder.conf
106 | volumeMounts:
107 | - name: logstash-config-map
108 | mountPath: /tmp/logstash
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/util/install-tools/install-tool.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: util-install-tool
5 | spec:
6 | entrypoint: install-tool
7 | templates:
8 | - name: install-tool
9 | inputs:
10 | parameters:
11 | - name: tool-name
12 | description: "tools to install, eg 'fofax,enscan,crawlergo,chromium'"
13 | default: "fofax,enscan,crawlergo,chromium"
14 | - name: suspend
15 | description: "suspend for debug after install"
16 | default: "true"
17 | enum:
18 | - "true"
19 | - "false"
20 | script:
21 | image: busybox:glibc
22 | command:
23 | - 'sh'
24 | source: |
25 | result=$(echo {{`{{inputs.parameters.tool-name}}`}} | grep "fofax")
26 | if [[ "$result" != "" ]]
27 | then
28 | wget https://github.com/xiecat/fofax/releases/download/v0.1.42/fofax_v0.1.42_linux_amd64.tar.gz
29 | mkdir -p /tmp/tools/fofax
30 | tar -xvf fofax_v0.1.42_linux_amd64.tar.gz -C /tmp/tools/fofax
31 | rm fofax_v0.1.42_linux_amd64.tar.gz
32 | ls -al /tmp/tools/fofax
33 | else
34 | echo "ignore install fofax"
35 | fi
36 |
37 | result=$(echo {{`{{inputs.parameters.tool-name}}`}} | grep "enscan")
38 | if [[ "$result" != "" ]]
39 | then
40 | wget https://github.com/wgpsec/ENScan_GO/releases/download/0.0.10/enscan-0.0.10-linux-amd64.tar.gz
41 | mkdir -p /tmp/tools/enscan
42 | tar -xvf enscan-0.0.10-linux-amd64.tar.gz -C /tmp/tools/enscan
43 | rm enscan-0.0.10-linux-amd64.tar.gz
44 | ls -al /tmp/tools/enscan
45 | else
46 | echo "ignore install enscan"
47 | fi
48 |
49 | result=$(echo {{`{{inputs.parameters.tool-name}}`}} | grep "crawlergo")
50 | if [[ "$result" != "" ]]
51 | then
52 | wget https://github.com/Qianlitp/crawlergo/releases/download/v0.4.4/crawlergo_linux_amd64
53 | mkdir -p /tmp/tools/crawlergo
54 | mv crawlergo_linux_amd64 /tmp/tools/crawlergo/crawlergo
55 | chmod +x /tmp/tools/crawlergo/crawlergo
56 | ls -al /tmp/tools/crawlergo
57 | else
58 | echo "ignore install crawlergo"
59 | fi
60 |
61 | result=$(echo {{`{{inputs.parameters.tool-name}}`}} | grep "chromium")
62 | then
63 | wget https://registry.npmmirror.com/-/binary/chromium-browser-snapshots/Linux_x64/1108766/chrome-linux.zip
64 | unzip chrome-linux.zip -d /tmp/tools/crawlergo/
65 | rm chrome-linux.zip
66 | ls -al /tmp/tools/crawlergo/chrome-linux
67 | else
68 | echo "ignore install chromium"
69 | fi
70 |
71 | result=$(echo {{`{{inputs.parameters.suspend}}`}} | grep "true")
72 | if [[ "$result" != "" ]]
73 | then
74 | echo "suspend"
75 | tail -f /dev/null
76 | else
77 | echo "not suspend"
78 | fi
79 |
80 | volumeMounts:
81 | - name: tools
82 | mountPath: /tmp/tools
83 | volumes:
84 | - name: tools
85 | persistentVolumeClaim:
86 | claimName: tools
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level1/cidr/get-cidr.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: get-asset-level1-cidr
5 | spec:
6 | volumes:
7 | - name: logstash-config-cidr
8 | configMap:
9 | name: logstash-config-cidr
10 | templates:
11 | - name: asn-range-from-es
12 | inputs:
13 | parameters:
14 | - name: q
15 | description: 'elasticsearch query, e.g. "*"'
16 | default: "*"
17 | outputs:
18 | artifacts:
19 | - name: result
20 | path: /tmp/asn.txt
21 | parameters:
22 | - name: count
23 | valueFrom:
24 | path: /tmp/count.txt
25 | script:
26 | image: logstash:7.17.3
27 | command:
28 | - 'sh'
29 | source: |
30 | cp /tmp/logstash/* /tmp/
31 | sed /tmp/asn.conf -i -e "s/INPUT_QUERY/{{`{{inputs.parameters.q}}`}}/g"
32 | logstash -f /tmp/asn.conf
33 |
34 | cat /tmp/asn.txt | sort | uniq > /tmp/asn.txt.tmp
35 | mv /tmp/asn.txt.tmp /tmp/asn.txt
36 |
37 | wc -l /tmp/asn.txt | awk '{print $1}' > /tmp/count.txt
38 | volumeMounts:
39 | - name: logstash-config-cidr
40 | mountPath: /tmp/logstash
41 |
42 | - name: aggregate-ip-from-es
43 | inputs:
44 | parameters:
45 | - name: q
46 | description: 'elasticsearch query, e.g. "*"'
47 | default: "*"
48 | outputs:
49 | artifacts:
50 | - name: result
51 | fromExpression: "steps.cidr.outputs.artifacts.result"
52 | parameters:
53 | - name: count
54 | valueFrom:
55 | expression: "steps.cidr.outputs.parameters.count"
56 | steps:
57 | - - name: get-ips
58 | arguments:
59 | parameters:
60 | - name: q
61 | value: '{{`{{inputs.parameters.q}}`}}'
62 | templateRef:
63 | name: get-asset-level1-ip
64 | template: get-ip-from-es-by-query
65 | - - name: cidr # name can not be "get-cidr", i don't know why
66 | arguments:
67 | artifacts:
68 | - name: compute-cidr-input
69 | from: '{{`{{steps.get-ips.outputs.artifacts.result}}`}}'
70 | templateRef:
71 | name: util-asset
72 | template: compute-cidr-aggregate-approx
73 |
74 | {{/* - name: query*/}}
75 | {{/* inputs:*/}}
76 | {{/* parameters:*/}}
77 | {{/* - name: q*/}}
78 | {{/* description: 'elasticsearch query, e.g. "*"'*/}}
79 | {{/* default: "*"*/}}
80 | {{/* outputs:*/}}
81 | {{/* artifacts:*/}}
82 | {{/* - name: result*/}}
83 | {{/* path: /tmp/cidr.txt*/}}
84 | {{/* parameters:*/}}
85 | {{/* - name: count*/}}
86 | {{/* valueFrom:*/}}
87 | {{/* path: /tmp/count.txt*/}}
88 | {{/* script:*/}}
89 | {{/* image: leveryd/x-tool:v2023.1.16*/}}
90 | {{/* command:*/}}
91 | {{/* - 'sh'*/}}
92 | {{/* source: |*/}}
93 | {{/* x es -esURL http://elasticsearch-master:9200 -index web-service -num 10000 -q '{{`{{inputs.parameters.q}}`}}' -source "asn.as-number" -of /tmp/cidr.txt*/}}
94 | {{/* wc -l /tmp/cidr.txt | awk '{print $1}' > /tmp/count.txt*/}}
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level1/port/fofa.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: get-asset-level1-fofa
5 | spec:
6 | arguments:
7 | parameters:
8 | - name: fofa_key
9 | default: "{{.Values.fofa_key}}"
10 | description: "fofa key, can be specified when operator install asm instance"
11 | - name: fofa_email
12 | default: "{{.Values.fofa_email}}"
13 | description: "fofa email, can be specified when operator install asm instance"
14 | - name: query
15 | description: "multiple fofa query, do not use single quotes"
16 | - name: size
17 | default: 10
18 | description: "number of results to return"
19 | templates:
20 | - name: fofa-client-no-save
21 | outputs:
22 | parameters:
23 | - name: count
24 | valueFrom:
25 | path: /tmp/count.txt
26 | artifacts:
27 | - name: result
28 | description: "csv type"
29 | path: /tmp/result.txt
30 | - name: urls
31 | path: /tmp/urls.txt
32 | - name: hosts
33 | path: /tmp/hosts.txt
34 | script:
35 | image: ubuntu:lunar-20221207
36 | command: ["bash"]
37 | env:
38 | - name: FOFA_KEY
39 | value: {{`"{{workflow.parameters.fofa_key}}"`}}
40 | - name: FOFA_EMAIL
41 | value: {{`"{{workflow.parameters.fofa_email}}"`}}
42 | source: |
43 | cp /tmp/tools/fofax/fofax /bin/fofax
44 | fofax -disable-update -silent # create config
45 |
46 | # https://stackoverflow.com/questions/22697688/how-to-cat-eof-a-file-containing-code
47 | cat <<< '
48 | {{`{{workflow.parameters.query}}`}}
49 | ' > /tmp/query.txt
50 |
51 | fofax -disable-update -qf /tmp/query.txt -fs {{`{{workflow.parameters.size}}`}} -ff host,port,ip,lastupdatetime > /tmp/result.txt
52 |
53 | awk -F',' '{printf "%s:%s\n",$1,$2;}' /tmp/result.txt | sort | uniq > /tmp/urls.txt
54 |
55 | awk -F',' '{printf "%s,%s\n",$1,$4;}' /tmp/result.txt | sort | uniq > /tmp/hosts.txt
56 | sed /tmp/hosts.txt -i -e "s/http:\/\///g"
57 | sed /tmp/hosts.txt -i -e "s/https:\/\///g"
58 |
59 | wc -l /tmp/result.txt | awk '{print $1}' > /tmp/count.txt
60 | volumeMounts:
61 | - name: tools
62 | mountPath: /tmp/tools
63 | volumes:
64 | - name: tools
65 | persistentVolumeClaim:
66 | claimName: tools
67 |
68 | - name: fofa-client
69 | outputs:
70 | parameters:
71 | - name: count
72 | valueFrom:
73 | parameter: {{`"{{steps.call-fofa-client-no-save.outputs.parameters.count}}"`}}
74 | artifacts:
75 | - name: result
76 | description: "csv type"
77 | from: {{`"{{steps.call-fofa-client-no-save.outputs.artifacts.result}}"`}}
78 | - name: urls
79 | from: {{`"{{steps.call-fofa-client-no-save.outputs.artifacts.urls}}"`}}
80 | steps:
81 | - - name: call-fofa-client-no-save
82 | template: fofa-client-no-save
83 | - - name: call-save-port
84 | arguments:
85 | artifacts:
86 | - name: port
87 | from: {{`"{{steps.call-fofa-client-no-save.outputs.artifacts.result}}"`}}
88 | - name: subdomain
89 | from: {{`"{{steps.call-fofa-client-no-save.outputs.artifacts.hosts}}"`}}
90 | templateRef:
91 | name: save-asset-level1-fofa-to-es
92 | template: fofa
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level1/port/save-fofa-result/logstash-config.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: ConfigMap
3 | metadata:
4 | name: logstash-config-fofa
5 | data:
6 | port.conf: |
7 | input {
8 | file {
9 | path => "/tmp/port.txt"
10 | start_position => "beginning"
11 | sincedb_path => "/dev/null"
12 | exit_after_read => true
13 | mode => "read"
14 | }
15 | }
16 | filter {
17 | csv {
18 | columns => ["host", "port", "ip", "fofa-lastupdatetime"]
19 | convert => {
20 | "port" => "integer"
21 | "fofa-lastupdatetime" => "date"
22 | }
23 | add_field => {
24 | "source" => "fofa"
25 | "link" => "%{host}:%{port}"
26 | }
27 | remove_field => ["message", "path"]
28 | }
29 |
30 | mutate {
31 | gsub => [
32 | "host", "https?://", "",
33 | "host", ":[\d]+", ""
34 | ]
35 | }
36 | }
37 | output {
38 | elasticsearch {
39 | hosts => ["elasticsearch-master:9200"]
40 | index => "port"
41 | document_id => "%{ip}_%{host}_%{port}"
42 |
43 | scripted_upsert => true
44 | action => "update"
45 | script_lang => "painless"
46 | script_type => "inline"
47 | script => "
48 | if(ctx.op == 'create') {
49 | ctx._source = params.event;
50 | ctx._source.first_create_time = params.event.get('@timestamp');
51 | } else {
52 | String old = ctx._source.get('first_create_time');
53 |
54 | for (entry in params.event.entrySet()) {
55 | ctx._source[entry.getKey()] = entry.getValue()
56 | }
57 |
58 | ctx._source.last_update_time = params.event.get('@timestamp');
59 | ctx._source.first_create_time = old;
60 | }
61 | "
62 | }
63 | }
64 | subdomain.conf: |
65 | input {
66 | file {
67 | path => "/tmp/subdomain.txt"
68 | start_position => "beginning"
69 | sincedb_path => "/dev/null"
70 | exit_after_read => true
71 | mode => "read"
72 | }
73 | }
74 | filter {
75 | csv {
76 | columns => ["host", "fofa-lastupdatetime"]
77 | convert => {
78 | "fofa-lastupdatetime" => "date"
79 | }
80 | add_field => {
81 | "source" => "fofa"
82 | }
83 | remove_field => ["message", "path"]
84 | }
85 |
86 | mutate {
87 | gsub => [
88 | "host", "https?://", "",
89 | "host", ":[\d]+", ""
90 | ]
91 | }
92 | }
93 | output {
94 | elasticsearch {
95 | hosts => ["elasticsearch-master:9200"]
96 | index => "subdomain"
97 | document_id => "%{host}"
98 |
99 | scripted_upsert => true
100 | action => "update"
101 | script_lang => "painless"
102 | script_type => "inline"
103 | script => "
104 | if(ctx.op == 'create') {
105 | ctx._source = params.event;
106 | ctx._source.first_create_time = params.event.get('@timestamp');
107 | } else {
108 | String old = ctx._source.get('first_create_time');
109 | for (entry in params.event.entrySet()) {
110 | ctx._source[entry.getKey()] = entry.getValue()
111 | }
112 | ctx._source.last_update_time = params.event.get('@timestamp');
113 | ctx._source.first_create_time = old;
114 | }
115 | "
116 | }
117 | }
118 |
--------------------------------------------------------------------------------
/templates/argo-workflow/minio.yaml:
--------------------------------------------------------------------------------
1 | # from https://github.com/argoproj/argo-workflows/blob/master/manifests/quick-start-postgres.yaml
2 | ---
3 | apiVersion: apps/v1
4 | kind: Deployment
5 | metadata:
6 | labels:
7 | app: minio
8 | name: minio
9 | spec:
10 | selector:
11 | matchLabels:
12 | app: minio
13 | template:
14 | metadata:
15 | labels:
16 | app: minio
17 | spec:
18 | automountServiceAccountToken: false
19 | containers:
20 | - command:
21 | - minio
22 | - server
23 | - --console-address
24 | - :9001
25 | - /data
26 | env:
27 | - name: MINIO_ACCESS_KEY
28 | value: admin
29 | - name: MINIO_SECRET_KEY
30 | value: password
31 | image: minio/minio
32 | volumeMounts:
33 | - mountPath: /data
34 | name: data
35 | lifecycle:
36 | postStart:
37 | exec:
38 | command:
39 | - mkdir
40 | - -p
41 | - /data/my-bucket
42 | livenessProbe:
43 | httpGet:
44 | path: /minio/health/live
45 | port: 9000
46 | initialDelaySeconds: 5
47 | periodSeconds: 10
48 | name: main
49 | ports:
50 | - containerPort: 9000
51 | name: api
52 | - containerPort: 9001
53 | name: dashboard
54 | readinessProbe:
55 | httpGet:
56 | path: /minio/health/ready
57 | port: 9000
58 | initialDelaySeconds: 5
59 | periodSeconds: 10
60 | volumes:
61 | - name: data
62 | persistentVolumeClaim:
63 | claimName: minio-data
64 | ---
65 | apiVersion: apps/v1
66 | kind: Deployment
67 | metadata:
68 | labels:
69 | app: postgres
70 | name: postgres
71 | spec:
72 | selector:
73 | matchLabels:
74 | app: postgres
75 | template:
76 | metadata:
77 | labels:
78 | app: postgres
79 | name: postgres
80 | spec:
81 | containers:
82 | - env:
83 | - name: POSTGRES_PASSWORD
84 | value: password
85 | image: postgres:12-alpine
86 | name: main
87 | ports:
88 | - containerPort: 5432
89 | readinessProbe:
90 | exec:
91 | command:
92 | - psql
93 | - -U
94 | - postgres
95 | - -c
96 | - SELECT 1
97 | initialDelaySeconds: 15
98 | timeoutSeconds: 2
99 | nodeSelector:
100 | kubernetes.io/os: linux
101 | ---
102 |
103 | ---
104 | apiVersion: v1
105 | kind: Service
106 | metadata:
107 | labels:
108 | app: minio
109 | name: minio
110 | spec:
111 | ports:
112 | - name: api
113 | port: 9000
114 | protocol: TCP
115 | targetPort: 9000
116 | - name: dashboard
117 | port: 9001
118 | protocol: TCP
119 | targetPort: 9001
120 | selector:
121 | app: minio
122 | ---
123 | apiVersion: v1
124 | kind: Service
125 | metadata:
126 | labels:
127 | app: postgres
128 | name: postgres
129 | spec:
130 | ports:
131 | - port: 5432
132 | protocol: TCP
133 | targetPort: 5432
134 | selector:
135 | app: postgres
136 | ---
137 | apiVersion: v1
138 | kind: PersistentVolumeClaim
139 | metadata:
140 | finalizers:
141 | - kubernetes.io/pvc-protection
142 | name: minio-data
143 | spec:
144 | accessModes:
145 | - ReadWriteOnce
146 | resources:
147 | requests:
148 | storage: 50Gi
149 | storageClassName: local
150 |
--------------------------------------------------------------------------------
/values.yaml:
--------------------------------------------------------------------------------
1 | # Default values for helm.
2 | # This is a YAML-formatted file.
3 | # Declare variables to be passed into your templates.
4 |
5 | # redis password
6 | password: redis_password
7 | redis_service_name: redis-service
8 |
9 | # proxy console manage username/password
10 | proxy_username: root
11 | proxy_password: proxy_password
12 |
13 | # console db
14 | console_db_host: mysql-service
15 | console_db_root_password: console_db_root_password
16 | console_db_persistence_enabled: false # true: use console_db_persistence_data_dir as hostpath
17 | console_db_persistence_data_dir: /root/data
18 | # console api
19 | console_api_service_name: console-api-service
20 | console_api_service_port: 80
21 |
22 | poc_api_endpoint: console-api-service
23 |
24 | # kafka
25 | kafka_broker_service: ""
26 | crawler_topic: crawler
27 | crawler_group_id: crawler
28 |
29 | # xray
30 | xray_proxy_service: xray-proxy-service # https/http proxy (attention: it should be better not to modify this name)
31 | xray_proxy_port: 58088
32 | xray_proxy_nodeport: 30088 # nodeport for xray_proxy_service, should be in range 30000-32767 on default k8s cluster
33 |
34 | xray_webhook_service: xray-webhook-service
35 | xray_webhook_port: 58089
36 |
37 | # xray reverse
38 | reverse_http_enable: false
39 | reverse_dns_enable: false
40 | reverse_client_enable: false
41 |
42 | reverse_http_base_url: "" # eg "leveryd.xxx.com"
43 | reverse_listen_ip: "192.168.0.110"
44 | reverse_dns_domain: "" # eg "xxx.com"
45 |
46 | # sensitive info
47 | weixin_webhook_url: https://qyapi.weixin.qq.com/cgi-bin/webhook/send?key=07d4613c-45ef-46e2-9379-a7b2aade3132
48 |
49 | # install
50 | console: true
51 | crawler: true
52 | xray: true
53 |
54 | # argo workflows server and controller config
55 | argo-workflows:
56 | fullnameOverride: argo-workflows-app # app name, attention: it should be better not to modify this name
57 | singleNamespace: true
58 | controller:
59 | serviceAccount:
60 | create: false
61 | workflow:
62 | rbac:
63 | create: false
64 | server:
65 | serviceAccount:
66 | create: false
67 | extraArgs:
68 | - "--auth-mode=client"
69 | - "--auth-mode=server"
70 | baseHref: /argo/
71 | crds:
72 | install: true
73 | server:
74 | name: argo-server
75 |
76 | console_domain: console.com # ingress domain
77 |
78 | # nuclei
79 | # visit https://app.interactsh.com/ to get your unique domain
80 | interactsh_server: "oast.fun"
81 |
82 | # security
83 | user_can_deploy_workflowtemplate: true # if u set false, it will be more secure, because user can only run certain task
84 |
85 | # elasticsearch
86 | elasticsearch:
87 | esConfig:
88 | elasticsearch.yml: |
89 | http:
90 | cors.allow-origin: "*"
91 | cors.enabled : true
92 | cors.allow-methods: OPTIONS, HEAD, GET, POST, PUT, DELETE
93 | cors.allow-headers: X-Requested-With,X-Auth-Token,Content-Type,Content-Length
94 | replicas: 1 # single node
95 | extraVolumes:
96 | - name: init-script
97 | configMap:
98 | name: init-script
99 | items:
100 | - key: init.py
101 | path: init.py
102 | extraContainers:
103 | - name: init-es-pipeline
104 | image: python:3.8
105 | command:
106 | - python
107 | args:
108 | - '-u'
109 | - /tmp/init.py
110 | env:
111 | - name: PYTHONUNBUFFERED
112 | value: '1'
113 | volumeMounts:
114 | - name: init-script
115 | mountPath: /tmp/init.py
116 | subPath: init.py
117 | rbac:
118 | create: true
119 |
120 | kibana:
121 | elasticsearchHosts: "http://elasticsearch-master:9200"
122 | kibanaConfig:
123 | kibana.yml: |
124 | server.basePath="/kibana"
125 | server.host="0.0.0.0"
126 | server.rewriteBasePath=true
127 |
128 | # passive asset
129 | fofa_key: ""
130 | fofa_email: ""
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level2/webservice/ip.yaml:
--------------------------------------------------------------------------------
1 | # probe web service status for ip segment
2 | apiVersion: argoproj.io/v1alpha1
3 | kind: WorkflowTemplate
4 | metadata:
5 | name: level2-probe-web-service-from-ip-segment
6 | spec:
7 | arguments:
8 | parameters:
9 | - name: passive
10 | description: "passive mode (default: false)"
11 | default: "false"
12 | enum:
13 | - "true"
14 | - "false"
15 | volumes:
16 | - name: logstash-config-map
17 | configMap:
18 | name: logstash
19 | - name: logstash-config-cidr
20 | configMap:
21 | name: logstash-config-cidr
22 | templates:
23 | - name: 根据cidr获取ip列表-端口扫描-存储结果
24 | inputs:
25 | parameters:
26 | - name: cidr
27 | description: "CIDR to map, eg 173.0.84.0/24"
28 | - name: port-range
29 | value: "80,443,8000-9000"
30 | steps:
31 | - - name: call-util-mapcidr
32 | arguments:
33 | parameters:
34 | - name: cidr
35 | value: {{`'{{inputs.parameters.cidr}}'`}}
36 | templateRef:
37 | name: util-asset
38 | template: mapcidr
39 | - - name: call-port-scan
40 | arguments:
41 | artifacts:
42 | - name: hosts
43 | from: {{`'{{steps.call-util-mapcidr.outputs.artifacts.ip-result}}'`}}
44 | parameters:
45 | - name: port-range
46 | value: {{`'{{inputs.parameters.port-range}}'`}}
47 | templateRef:
48 | name: probe-asset-level1-port
49 | template: port-scan
50 | - - name: call-httpx-scan
51 | arguments:
52 | artifacts:
53 | - name: hosts
54 | from: {{`'{{steps.call-port-scan.outputs.artifacts.result}}'`}}
55 | templateRef:
56 | name: level1-httpx
57 | template: probe-all-from-file-no-port
58 | - - name: logstash
59 | arguments:
60 | artifacts:
61 | - name: httpx-result
62 | from: {{`'{{steps.call-httpx-scan.outputs.artifacts.urls}}'`}}
63 | templateRef:
64 | name: level1-logstash
65 | template: httpx2elasticsearch
66 |
67 | - name: 根据cidr文件获取ip列表-端口扫描-存储结果
68 | inputs:
69 | artifacts:
70 | - name: cidr
71 | parameters:
72 | - name: port-range
73 | value: "80,443,8000-9000"
74 | steps:
75 | - - name: call-util-mapcidr
76 | arguments:
77 | artifacts:
78 | - name: cidr
79 | from: {{`'{{inputs.artifacts.cidr}}'`}}
80 | templateRef:
81 | name: util-asset
82 | template: mapcidr-from-file
83 | - - name: call-port-scan
84 | arguments:
85 | artifacts:
86 | - name: hosts
87 | from: {{`'{{steps.call-util-mapcidr.outputs.artifacts.ip-result}}'`}}
88 | parameters:
89 | - name: port-range
90 | value: {{`'{{inputs.parameters.port-range}}'`}}
91 | templateRef:
92 | name: probe-asset-level1-port
93 | template: port-scan
94 | - - name: call-httpx-scan
95 | arguments:
96 | artifacts:
97 | - name: hosts
98 | from: {{`'{{steps.call-port-scan.outputs.artifacts.result}}'`}}
99 | templateRef:
100 | name: level1-httpx
101 | template: probe-all-from-file-no-port
102 | - - name: logstash
103 | arguments:
104 | artifacts:
105 | - name: httpx-result
106 | from: {{`'{{steps.call-httpx-scan.outputs.artifacts.urls}}'`}}
107 | templateRef:
108 | name: level1-logstash
109 | template: httpx2elasticsearch
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level3/subdomain/probe-subdomain.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: probe-asset-level3-subdomain
5 | spec:
6 | templates:
7 | - name: 从API获取兄弟域名-获取子域名-保存结果
8 | inputs:
9 | parameters:
10 | - name: query
11 | value: 'limit=10000&offset=0&accurate=是'
12 | description: "console api查询子域名时的条件"
13 | - name: get-subdomain-way
14 | description: "哪种方式扫描子域名 subfinder,oneforall,es,mysql"
15 | default: "subfinder,es"
16 | parallelism: 6 # todo:config
17 | steps:
18 | - - name: call-getrootdomain-from-console-api
19 | arguments:
20 | parameters:
21 | - name: query
22 | value: {{`"{{inputs.parameters.query}}"`}}
23 | templateRef:
24 | name: get-asset-level1-sld-domain-from-console
25 | template: from-console-api
26 | - - name: call-get-subdomains-save
27 | templateRef:
28 | name: probe-asset-level2-subdomain
29 | template: get-subdomains-save
30 | arguments:
31 | parameters:
32 | - name: domain
33 | value: {{`'{{item}}'`}}
34 | - name: get-subdomain-way
35 | value: {{`"{{inputs.parameters.get-subdomain-way}}"`}}
36 | withParam: {{`'{{steps.call-getrootdomain-from-console-api.outputs.result}}'`}}
37 |
38 | - name: 输入域名列表-获取子域名-保存结果
39 | inputs:
40 | parameters:
41 | - name: domains
42 | description: "域名列表"
43 | - name: get-subdomain-way
44 | description: "哪种方式扫描子域名 subfinder,oneforall,es,mysql"
45 | default: "subfinder,es"
46 | parallelism: 6 # todo:config
47 | steps:
48 | - - name: generate-domain-list
49 | arguments:
50 | parameters:
51 | - name: lines
52 | value: {{`"{{inputs.parameters.domains}}"`}}
53 | templateRef:
54 | name: util-user-input
55 | template: convert-lines-to-list
56 | - - name: call-get-subdomains-save
57 | templateRef:
58 | name: probe-asset-level2-subdomain
59 | template: get-subdomains-save
60 | arguments:
61 | parameters:
62 | - name: domain
63 | value: {{`'{{item}}'`}}
64 | - name: get-subdomain-way
65 | value: {{`"{{inputs.parameters.get-subdomain-way}}"`}}
66 | withParam: {{`'{{steps.generate-domain-list.outputs.result}}'`}}
67 |
68 | - name: 获取组织的二级域名-获取子域名-保存结果
69 | inputs:
70 | parameters:
71 | - name: org
72 | description: "组织名"
73 | - name: get-subdomain-way
74 | description: "哪种方式扫描子域名 subfinder,oneforall,es,mysql"
75 | default: "subfinder,es"
76 | parallelism: 3
77 | steps:
78 | - - name: call-get-sld
79 | arguments:
80 | parameters:
81 | - name: org
82 | value: {{`"{{inputs.parameters.org}}"`}}
83 | templateRef:
84 | name: get-asset-level1-sld-domain-from-es
85 | template: from-es
86 | - - name: convert-file-to-list
87 | arguments:
88 | artifacts:
89 | - name: file
90 | from: {{`'{{steps.call-get-sld.outputs.artifacts.result}}'`}}
91 | templateRef:
92 | name: util-user-input
93 | template: convert-file-to-list
94 | - - name: call-get-subdomains-save
95 | templateRef:
96 | name: probe-asset-level2-subdomain
97 | template: get-subdomains-save
98 | arguments:
99 | parameters:
100 | - name: domain
101 | value: {{`'{{item}}'`}}
102 | - name: get-subdomain-way
103 | value: {{`"{{inputs.parameters.get-subdomain-way}}"`}}
104 | withParam: {{`'{{steps.convert-file-to-list.outputs.result}}'`}}
105 |
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level1/second-level-domain/logstash-config.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: v1
2 | kind: ConfigMap
3 | metadata:
4 | name: logstash-config-sld
5 | data:
6 | sld.conf: |
7 | input {
8 | # Read all documents from Elasticsearch matching the given query
9 | elasticsearch {
10 | hosts => "elasticsearch-master:9200"
11 | index => "subdomain"
12 | query => '{
13 | "_source": "parsed-domain.registered_domain",
14 | "query": {
15 | "query_string": {
16 | "query": "org:${ORG}"
17 | }
18 | }
19 | }'
20 | }
21 | }
22 |
23 | output {
24 | file {
25 | path => "/tmp/host.txt"
26 | codec => line {
27 | format => "%{[parsed-domain][registered_domain]}"
28 | }
29 | }
30 | }
31 |
32 | save.conf: |
33 | input {
34 | file {
35 | path => "/tmp/sld.txt"
36 | start_position => "beginning"
37 | sincedb_path => "/dev/null"
38 | exit_after_read => true
39 | mode => "read"
40 | }
41 | }
42 |
43 | filter {
44 | csv {
45 | columns => ["sld"]
46 | }
47 |
48 | mutate {
49 | add_field => { "source" => "${SOURCE}" }
50 | add_field => { "org" => ["${ORG}"] }
51 | add_field => { "host" => "%{sld}" }
52 | remove_field => ["host", "path", "message"]
53 | }
54 | }
55 |
56 | output {
57 | elasticsearch {
58 | hosts => "{{.Values.kibana.elasticsearchHosts}}"
59 | index => "sld"
60 | document_id => "%{sld}"
61 |
62 | scripted_upsert => true
63 | action => "update"
64 | script_lang => "painless"
65 | script_type => "inline"
66 | script => "
67 | if(ctx.op == 'create') {
68 | ctx._source=params.event;
69 | ctx._source.first_create_time = params.event.get('timestamp');
70 | } else {
71 | String old = ctx._source.get('first_create_time');
72 |
73 | for (entry in params.event.entrySet()) {
74 | ctx._source[entry.getKey()] = entry.getValue()
75 | }
76 | //ctx._source = params.event;
77 |
78 | ctx._source.last_update_time = params.event.get('timestamp');
79 | ctx._source.first_create_time = old;
80 | }
81 | "
82 | }
83 | }
84 |
85 | save-enscan.conf: |
86 | input {
87 | file {
88 | path => "/tmp/outs/*.json"
89 | start_position => "beginning"
90 | sincedb_path => "/dev/null"
91 | exit_after_read => true
92 | mode => "read"
93 | codec => "json"
94 | file_chunk_size => 3145728 # 3MB
95 | }
96 | }
97 |
98 | filter {
99 | split {
100 | field => ["icp"]
101 | }
102 | mutate {
103 | add_field => {
104 | "sld" => "%{[icp][domain]}"
105 | }
106 | remove_field => ["icp.company_name", "host"]
107 | }
108 | }
109 |
110 | output {
111 | file {
112 | path => "/tmp/sld/%{[enterprise_info][0][name]}.txt"
113 | codec => line {
114 | format => "%{[icp][domain]}"
115 | }
116 | }
117 | elasticsearch {
118 | hosts => "{{.Values.kibana.elasticsearchHosts}}"
119 | index => "sld"
120 | document_id => "%{sld}"
121 |
122 | scripted_upsert => true
123 | action => "update"
124 | script_lang => "painless"
125 | script_type => "inline"
126 | script => "
127 | if(ctx.op == 'create') {
128 | ctx._source=params.event;
129 | ctx._source.first_create_time = params.event.get('timestamp');
130 | } else {
131 | String old = ctx._source.get('first_create_time');
132 |
133 | for (entry in params.event.entrySet()) {
134 | ctx._source[entry.getKey()] = entry.getValue()
135 | }
136 | //ctx._source = params.event;
137 |
138 | ctx._source.last_update_time = params.event.get('timestamp');
139 | ctx._source.first_create_time = old;
140 | }
141 | "
142 | }
143 | }
--------------------------------------------------------------------------------
/templates/argo-workflow-template/level2/katana/domain-katana.yaml:
--------------------------------------------------------------------------------
1 | apiVersion: argoproj.io/v1alpha1
2 | kind: WorkflowTemplate
3 | metadata:
4 | name: level2-domain-katana
5 | spec:
6 | templates:
7 | - name: 获取子域名-katana爬虫-xray扫描-保存结果
8 | inputs:
9 | parameters:
10 | - name: domain
11 | description: "二级域名,比如 baidu.com"
12 | steps:
13 | - - name: call-subfinder
14 | arguments:
15 | parameters:
16 | - name: domain
17 | value: {{`'{{inputs.parameters.domain}}'`}}
18 | templateRef:
19 | name: probe-asset-level1-subdomain
20 | template: subfinder
21 | - - name: call-katana-service-from-file
22 | arguments:
23 | artifacts:
24 | - name: domains
25 | from: {{`'{{steps.call-subfinder.outputs.artifacts.subdomains}}'`}}
26 | templateRef:
27 | name: level1-crawler
28 | template: katana-service-from-file
29 |
30 | - name: 获取子域名-katana爬虫二进制-xray扫描-保存结果 # 区别:爬虫任务调用katana二进制,而不是katana服务
31 | inputs:
32 | parameters:
33 | - name: domain
34 | description: "二级域名,比如 baidu.com"
35 | - name: get-subdomain-way
36 | description: "哪种方式扫描子域名 subfinder,oneforall,es,mysql"
37 | default: "subfinder,es,mysql"
38 | - name: save-subdomain-result
39 | default: "false"
40 | enum: ["true", "false"]
41 | steps:
42 | - - name: call-get-subdomains-save
43 | templateRef:
44 | name: probe-asset-level2-subdomain
45 | template: get-subdomains-save
46 | arguments:
47 | parameters:
48 | - name: domain
49 | value: {{`'{{inputs.parameters.domain}}'`}}
50 | - name: get-subdomain-way
51 | value: {{`"{{inputs.parameters.get-subdomain-way}}"`}}
52 | - name: save-subdomain-result
53 | value: {{`"{{inputs.parameters.save-subdomain-result}}"`}}
54 | - - name: call-katana-binary
55 | arguments:
56 | artifacts:
57 | - name: domains
58 | from: {{`'{{workflow.outputs.artifacts.subdomains}}'`}} # global artifacts
59 | templateRef:
60 | name: level1-crawler
61 | template: katana-binary
62 |
63 | - name: 获取web服务-katana爬虫-xray扫描-保存结果
64 | parallelism: 3
65 | inputs:
66 | parameters:
67 | - name: domain
68 | description: "eg : example.com"
69 | - name: min_port
70 | description: min port to search for, e.g. 8000
71 | - name: max_port
72 | description: max port to search for, e.g. 9000
73 | steps:
74 | - - name: call-get-urls-from-es
75 | arguments:
76 | parameters:
77 | - name: domain
78 | value: {{`'{{inputs.parameters.domain}}'`}}
79 | - name: min_port
80 | value: {{`'{{inputs.parameters.min_port}}'`}}
81 | - name: max_port
82 | value: {{`'{{inputs.parameters.max_port}}'`}}
83 | templateRef:
84 | name: asset-level1-web-service
85 | template: get-website-from-es
86 | - - name: call-get-file-lines-count
87 | arguments:
88 | artifacts:
89 | - name: file
90 | from: {{`"{{steps.call-get-urls-from-es.outputs.artifacts.result}}"`}}
91 | templateRef:
92 | name: util
93 | template: get-file-lines-count
94 | - - name: call-katana-from-file-index
95 | arguments:
96 | artifacts:
97 | - name: domains
98 | from: {{`"{{steps.call-get-urls-from-es.outputs.artifacts.result}}"`}}
99 | parameters:
100 | - name: index
101 | value: {{`"{{item}}"`}}
102 | templateRef:
103 | name: level1-crawler
104 | template: katana-from-file-index
105 | withSequence:
106 | count: {{`"{{steps.call-get-file-lines-count.outputs.result}}"`}}
107 |
--------------------------------------------------------------------------------
/templates/argo-workflow/config.yaml:
--------------------------------------------------------------------------------
1 | # from https://github.com/argoproj/argo-workflows/blob/master/manifests/quick-start-postgres.yaml
2 | ---
3 | apiVersion: v1
4 | data:
5 | default-v1: |
6 | archiveLogs: true
7 | s3:
8 | bucket: my-bucket
9 | endpoint: minio:9000
10 | insecure: true
11 | accessKeySecret:
12 | name: my-minio-cred
13 | key: accesskey
14 | secretKeySecret:
15 | name: my-minio-cred
16 | key: secretkey
17 | empty: ""
18 | my-key: |
19 | archiveLogs: true
20 | s3:
21 | bucket: my-bucket
22 | endpoint: minio:9000
23 | insecure: true
24 | accessKeySecret:
25 | name: my-minio-cred
26 | key: accesskey
27 | secretKeySecret:
28 | name: my-minio-cred
29 | key: secretkey
30 | kind: ConfigMap
31 | metadata:
32 | annotations:
33 | workflows.argoproj.io/default-artifact-repository: default-v1
34 | name: artifact-repositories
35 | ---
36 | apiVersion: v1
37 | data:
38 | # https://argoproj.github.io/argo-workflows/default-workflow-specs/
39 | workflowDefaults: |
40 | metadata:
41 | labels:
42 | workflows.argoproj.io/creator: system-serviceaccount-argo-argo-server
43 | spec:
44 | podGC:
45 | strategy: OnPodCompletion
46 | ttlStrategy:
47 | secondsAfterCompletion: 30
48 | workflowRestrictions: |
49 | templateReferencing: Strict
50 | artifactRepository: |
51 | s3:
52 | bucket: my-bucket
53 | endpoint: minio:9000
54 | insecure: true
55 | accessKeySecret:
56 | name: my-minio-cred
57 | key: accesskey
58 | secretKeySecret:
59 | name: my-minio-cred
60 | key: secretkey
61 | executor: |
62 | resources:
63 | requests:
64 | cpu: 10m
65 | memory: 64Mi
66 | images: |
67 | docker/whalesay:latest:
68 | cmd: [cowsay]
69 | links: |
70 | - name: Workflow Link
71 | scope: workflow
72 | url: http://logging-facility?namespace=${metadata.namespace}&workflowName=${metadata.name}&startedAt=${status.startedAt}&finishedAt=${status.finishedAt}
73 | - name: Pod Link
74 | scope: pod
75 | url: http://{{.Values.console_domain}}:30880/terminal/cluster/default/projects/${metadata.namespace}/pods/${metadata.name}/containers/main
76 | - name: Pod Logs Link
77 | scope: pod-logs
78 | url: http://logging-facility?namespace=${metadata.namespace}&podName=${metadata.name}&startedAt=${status.startedAt}&finishedAt=${status.finishedAt}
79 | - name: Event Source Logs Link
80 | scope: event-source-logs
81 | url: http://logging-facility?namespace=${metadata.namespace}&podName=${metadata.name}&startedAt=${status.startedAt}&finishedAt=${status.finishedAt}
82 | - name: Sensor Logs Link
83 | scope: sensor-logs
84 | url: http://logging-facility?namespace=${metadata.namespace}&podName=${metadata.name}&startedAt=${status.startedAt}&finishedAt=${status.finishedAt}
85 | metricsConfig: |
86 | enabled: true
87 | path: /metrics
88 | port: 9090
89 | namespaceParallelism: "10"
90 | persistence: |
91 | connectionPool:
92 | maxIdleConns: 100
93 | maxOpenConns: 0
94 | connMaxLifetime: 0s
95 | nodeStatusOffLoad: true
96 | archive: true
97 | archiveTTL: 7d
98 | postgresql:
99 | host: postgres
100 | port: 5432
101 | database: postgres
102 | tableName: argo_workflows
103 | userNameSecret:
104 | name: argo-postgres-config
105 | key: username
106 | passwordSecret:
107 | name: argo-postgres-config
108 | key: password
109 | retentionPolicy: |
110 | completed: 10
111 | failed: 3
112 | errored: 3
113 | kind: ConfigMap
114 | metadata:
115 | name: "argo-workflows-app-workflow-controller-configmap" # controller and server will use the same configmap
116 | ---
117 | apiVersion: v1
118 | kind: Secret
119 | metadata:
120 | labels:
121 | app: postgres
122 | name: argo-postgres-config
123 | stringData:
124 | password: password
125 | username: postgres
126 | type: Opaque
127 | ---
128 | apiVersion: v1
129 | kind: Secret
130 | metadata:
131 | labels:
132 | app: minio
133 | name: my-minio-cred
134 | stringData:
135 | accesskey: admin
136 | secretkey: password
137 | type: Opaque
138 | ---
--------------------------------------------------------------------------------
/templates/argo-workflow-template-asset/level2/webservice/domain.yaml:
--------------------------------------------------------------------------------
1 | # probe web service status for a domain
2 | apiVersion: argoproj.io/v1alpha1
3 | kind: WorkflowTemplate
4 | metadata:
5 | name: probe-asset-level2-web-service
6 | spec:
7 | arguments:
8 | parameters:
9 | - name: passive
10 | description: "passive mode (default: false)"
11 | default: "false"
12 | enum:
13 | - "true"
14 | - "false"
15 | volumes:
16 | - name: logstash-config-map
17 | configMap:
18 | name: logstash
19 | templates:
20 | - name: port-scan
21 | inputs:
22 | parameters:
23 | - name: hosts
24 | description: "domains or ips, eg www.apple.com