├── .env.default
├── .gitignore
├── .gitlab-ci.yml
├── LICENSE
├── Makefile
├── README.md
├── behat.default.yml
├── composer.json
├── content
├── .gitkeep
├── taxonomy_term-category.csv
└── user.csv
├── docker
├── 90-mail.ini
├── Caddyfile
├── docker-compose.override.yml.default
├── docker-compose.yml
├── load-extension.sh
├── unit.json
└── xx-php.ini
├── drush
├── .drushrc.php
├── README.md
└── policy.drush.inc
├── features
├── bootstrap
│ └── FeatureContext.php
└── generic_tests.feature
├── lighthouserc.yml
├── private-files
└── .htaccess
├── rector.php
├── scripts
├── composer
│ └── ScriptHandler.php
├── delivery-archive
│ ├── .gitlab-ci.delivery_via_archive_example.yml
│ └── README.md
├── delivery-docker
│ ├── .gitlab-ci.delivery_via_docker_example.yml
│ ├── Dockerfile
│ └── README.md
├── delivery-git
│ ├── .gitlab-ci.delivery_via_git_example.yml
│ ├── README.md
│ └── deliver_current_tag_via_git.sh
├── git_hooks
│ └── sniffers.sh
├── makefile
│ ├── backup.mk
│ ├── baseconfig-langcode.sh
│ ├── blackfire.sh
│ ├── config-inspector-validation.sh
│ ├── contentgen.sh
│ ├── front.mk
│ ├── help.mk
│ ├── newlineeof.sh
│ ├── newrelic.sh
│ ├── patchval.sh
│ ├── reload.sh
│ ├── status-report-validation.sh
│ ├── system-detection.mk
│ ├── tests.mk
│ ├── upgrade-status-validation.sh
│ ├── watchdog-validation.sh
│ └── xdebug.sh
├── mirroring
│ ├── .gitlab-ci.mirroring_example.yml
│ ├── README.md
│ └── mirror_current_branch.sh
└── multisite
│ ├── .gitlab-ci.split_switch_example.yml
│ ├── README.md
│ ├── config_split.mk
│ ├── config_split_disable_all.sh
│ └── config_split_list_all.sh
├── settings
├── settings.dev.php
├── settings.local.php
└── settings.redis.php
├── translations
├── en.po
└── fr.po
└── web
├── modules
└── custom
│ ├── README.txt
│ └── project_default_content
│ ├── README.md
│ ├── content
│ └── shortcut
│ │ └── ebb7c60a-b052-41ba-9a82-0ed73475a33b.yml
│ └── project_default_content.info.yml
└── themes
└── custom
└── README.txt
/.env.default:
--------------------------------------------------------------------------------
1 | COMPOSE_FILE=./docker/docker-compose.yml:./docker/docker-compose.override.yml
2 | COMPOSE_PROJECT_NAME=projectname
3 | PROFILE_NAME=druxxy
4 | THEME_NAME=NA
5 | SITE_NAME=Example
6 | SITE_MAIL=admin@example.com
7 | ADMIN_NAME=admin
8 | ADMIN_MAIL=admin@example.com
9 | PROJECT_INSTALL=
10 | IMAGE_PHP=skilldlabs/php:83-unit
11 | #IMAGE_PHP=skilldlabs/php:83-frankenphp
12 | #EXEC_SHELL=/bin/bash
13 | #PKGMAN=apt
14 | ADDITIONAL_PHP_PACKAGES=graphicsmagick
15 | IMAGE_NGINX=skilldlabs/nginx:1.24
16 | IMAGE_FRONT=node:lts-alpine
17 | IMAGE_SOLR=solr:8-slim
18 | IMAGE_REDIS=redis:5-alpine
19 | IMAGE_DRIVER=zenika/alpine-chrome
20 | CLEAR_FRONT_PACKAGES=no
21 | MAIN_DOMAIN_NAME=docker.localhost
22 | DB_URL=sqlite://./../.cache/db.sqlite
23 | # Faster but data will be lost on php container recreation
24 | #DB_URL=sqlite:///dev/shm/db.sqlite
25 | #DB_URL=mysql://db:db@mysql/db
26 | #DB_URL=pgsql://db:dbroot@postgresql/db
27 | # Include path to this folder to your .gitignore if you override it
28 | DB_DATA_DIR=../.cache
29 | #DB_DATA_DIR=/dev/shm
30 | # We're connecting through TCP. Use "redis" as host, and "6379" as port.
31 | REDIS_HOST=redis
32 | REDIS_PORT=6379
33 | REDIS_PASSWD=1234567890
34 | # See readme for basicauth convention
35 | RA_BASIC_AUTH=
36 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Ignore directories generated by Composer
2 | vendor
3 | web/*.*
4 | web/core/
5 | web/libraries/
6 | web/modules/*.*
7 | web/modules/contrib/
8 | web/themes/*.*
9 | web/themes/contrib/
10 | web/profiles/*.*
11 | web/profiles/contrib/
12 | web/sites/
13 |
14 | # Ignore environmental files
15 | .env
16 | docker/docker-compose.override.yml
17 | behat.yml
18 | features/*.html
19 | features/*.png
20 | features/junit/
21 | .cache/
22 | .editorconfig
23 | .gitattributes
24 | config/sync/.htaccess
25 | mysql/
26 | node_modules
27 | web/themes/custom/*/dist/
28 | private-files/*/
29 | private/*/
30 | *.sql
31 | .lighthouseci/
32 | web/.lighthouseci/
33 | *.zip
34 | *.tgz
35 | *.tar.gz
36 |
37 | # Ignore personal directories
38 | .idea/
39 | .buildpath
40 | .settings
41 | .project
42 |
43 |
--------------------------------------------------------------------------------
/.gitlab-ci.yml:
--------------------------------------------------------------------------------
1 | # Variables to add in Gitalb UI : Settings > CI/CD
2 | # - NEW_RELIC_LICENSE_KEY // Optional
3 | # - TEST_UPDATE_DEPLOYMENTS // Optional, enables test:deploy job
4 | # - GITLAB_PROJECT_ACCESS_TOKEN // Required by test:deploy job. To be created in Gitlab project UI with read_api and read_repository permissions
5 | # - GITLAB_PROJECT_BASIC_AUTH // Required by test:deploy job. Encoded credentials to pass Gitlab basic auth, if any
6 |
7 | variables:
8 | GIT_DEPTH: "1"
9 | THEME_PATH: "" # Update to enable front jobs (web/themes/custom/XXX)
10 | STORYBOOK_PATH: "" # Update to enable storybook job (themes/custom/XXX/dist/storybook/index.html)
11 | GIT_STRATEGY: fetch
12 | REVIEW_DOMAIN: "XXX.XXX.com" # Mandatory, should equal to DNS of available runner server with docker + compose + traefik
13 | IMAGE_PHP: skilldlabs/php:83
14 |
15 | image: $IMAGE_PHP
16 |
17 | # Tags defines which runner to use (expected shell runner)
18 | .runner_tag_selection:
19 | tags:
20 | - XXX # Mandatory, should equal to tag of available runner server with docker + compose + traefik
21 |
22 | before_script:
23 | - date
24 | - pwd
25 | - ls -lah
26 | - whoami
27 | - id
28 | - cat /etc/os-release
29 | - env
30 | - echo $CI_PIPELINE_SOURCE
31 |
32 | stages:
33 | - sniffers
34 | - prepare
35 | - build
36 | - update
37 | - tests
38 | - more tests
39 |
40 | sniffers:clang:
41 | stage: sniffers
42 | script:
43 | - make clang
44 | rules:
45 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline'
46 | changes:
47 | - {{ project.path }}/**/*
48 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
49 | changes:
50 | - {{ project.path }}/**/*
51 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
52 | changes:
53 | - {{ project.path }}/**/*
54 |
55 | sniffers:compose:
56 | stage: sniffers
57 | script:
58 | - composer -vvv -V
59 | - time composer validate --profile
60 | rules:
61 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline'
62 | changes:
63 | - {{ project.path }}/**/*
64 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
65 | changes:
66 | - {{ project.path }}/**/*
67 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
68 | changes:
69 | - {{ project.path }}/**/*
70 |
71 | sniffers:front:
72 | stage: sniffers
73 | script:
74 | - make front-install # Dependencies are required for js imports to pass linters
75 | - make lintval
76 | cache:
77 | key:
78 | files:
79 | # Until https://gitlab.com/gitlab-org/gitlab/issues/118466 lands ...
80 | - web/themes/custom/XXX/package.json # ... this path has to be hardcoded
81 | - web/themes/custom/XXX/yarn.lock # ... this path has to be hardcoded
82 | paths:
83 | - ${THEME_PATH}/node_modules/ # Populated during yarn install
84 | artifacts:
85 | name: "${CI_COMMIT_REF_NAME}:${CI_COMMIT_SHA}:front"
86 | expire_in: 1d
87 | paths:
88 | - ${THEME_PATH}/node_modules/ # Populated during yarn install
89 | extends: .runner_tag_selection
90 | rules:
91 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline' && $THEME_PATH
92 | changes:
93 | - {{ project.path }}/**/*
94 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $THEME_PATH
95 | changes:
96 | - {{ project.path }}/**/*
97 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $THEME_PATH
98 | changes:
99 | - {{ project.path }}/**/*
100 | - if: $CI_COMMIT_TAG && $THEME_PATH
101 |
102 | sniffers:phpcs:
103 | stage: sniffers
104 | image: skilldlabs/docker-phpcs-drupal
105 | script:
106 | - docker -v
107 | - make phpcs
108 | extends: .runner_tag_selection
109 | rules:
110 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline'
111 | changes:
112 | - {{ project.path }}/**/*
113 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
114 | changes:
115 | - {{ project.path }}/**/*
116 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
117 | changes:
118 | - {{ project.path }}/**/*
119 |
120 | sniffers:newlineeof:
121 | stage: sniffers
122 | script:
123 | - make newlineeof
124 | rules:
125 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline'
126 | changes:
127 | - {{ project.path }}/**/*
128 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
129 | changes:
130 | - {{ project.path }}/**/*
131 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
132 | changes:
133 | - {{ project.path }}/**/*
134 |
135 |
136 | prepare:back:
137 | stage: prepare
138 | script:
139 | - docker --version
140 | - docker run -e "COMPOSER_MEMORY_LIMIT=1G" --user 1000:1000 --rm --init -v `pwd`:`pwd` -w `pwd` "$IMAGE_PHP" /bin/ash -c "composer --version && composer install --no-interaction --prefer-dist -o -v --ignore-platform-reqs && composer create-required-files" # Make back on the fly, using --ignore-platform-reqs as additional PHP-extensions could be missing
141 | cache:
142 | key:
143 | files:
144 | - composer.json
145 | - composer.lock
146 | paths:
147 | - vendor/
148 | - web/core/
149 | - web/libraries/
150 | - web/modules/contrib/
151 | - web/profiles/contrib/
152 | - web/themes/contrib/
153 | - drush/contrib/
154 | dependencies: []
155 | artifacts:
156 | name: "${CI_COMMIT_REF_NAME}:${CI_COMMIT_SHA}:back"
157 | expire_in: 1d
158 | paths:
159 | - vendor/
160 | - web/
161 | - drush/
162 | exclude:
163 | - web/modules/custom/
164 | - web/themes/custom/
165 | - web/profiles/contrib/
166 | extends: .runner_tag_selection
167 | rules:
168 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline'
169 | changes:
170 | - {{ project.path }}/**/*
171 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
172 | changes:
173 | - {{ project.path }}/**/*
174 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
175 | changes:
176 | - {{ project.path }}/**/*
177 | - if: $CI_COMMIT_TAG
178 |
179 | prepare:front:
180 | stage: prepare
181 | script:
182 | - make front-install
183 | - make front-build
184 | dependencies:
185 | - sniffers:front
186 | artifacts:
187 | name: "${CI_COMMIT_REF_NAME}:${CI_COMMIT_SHA}:front"
188 | expire_in: 1d
189 | paths:
190 | - ${THEME_PATH}/dist/ # Populated during yarn build
191 | extends: .runner_tag_selection
192 | rules:
193 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline' && $THEME_PATH
194 | changes:
195 | - {{ project.path }}/**/*
196 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $THEME_PATH
197 | changes:
198 | - {{ project.path }}/**/*
199 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $THEME_PATH
200 | changes:
201 | - {{ project.path }}/**/*
202 | - if: $CI_COMMIT_TAG && $THEME_PATH
203 |
204 |
205 | .deploy_template: &deploy_template
206 | stage: build
207 | script:
208 | - echo "Deploy ${CI_ENVIRONMENT_URL} review app to ${BUILD_DIR}."
209 | - echo "CI_ENVIRONMENT_NAME=${CI_ENVIRONMENT_NAME}"
210 | - echo "CI_ENVIRONMENT_SLUG=${CI_ENVIRONMENT_SLUG}"
211 | - echo "CI_COMMIT_REF_SLUG=${CI_COMMIT_REF_SLUG}"
212 | - echo "CI_ENVIRONMENT_URL=${CI_ENVIRONMENT_URL}"
213 | - echo "CI_PROJECT_PATH_SLUG=${CI_PROJECT_PATH_SLUG}"
214 | - echo "CI_PROJECT_NAME=${CI_PROJECT_NAME}"
215 | - echo "REVIEW_DOMAIN=${REVIEW_DOMAIN}"
216 | - mkdir -p ${BUILD_DIR}
217 | - rsync -ah --exclude=.git --exclude=.cache --delete ./ ${BUILD_DIR}
218 | - cd ${BUILD_DIR}
219 | - echo "COMPOSE_PROJECT_NAME=${CI_PROJECT_NAME}-review-${CI_COMMIT_REF_SLUG}" >> .env.default
220 | - echo "MAIN_DOMAIN_NAME=${CI_ENVIRONMENT_SLUG}-${CI_PROJECT_PATH_SLUG}.${REVIEW_DOMAIN}" >> .env.default
221 | - docker --version
222 | - docker compose version
223 | - make all_ci
224 | - make drush config-set system.site name '${CI_COMMIT_REF_SLUG}' -- -y
225 | - echo "Copying DB and files to make them accessible to artifact creator"
226 | - docker compose exec -T --user root php ash -c "chmod -R 775 .cache/ web/sites/"
227 | - cp -r .cache/ ${CI_PROJECT_DIR} && cp -r web/sites/ ${CI_PROJECT_DIR}/web/ # https://gitlab.com/gitlab-org/gitlab-foss/-/issues/15530#note_533732001
228 | after_script:
229 | - docker network prune -f
230 | - docker container prune -f
231 | - echo "Started ${CI_ENVIRONMENT_URL} composition in ${BUILD_DIR} from Makefile."
232 | dependencies:
233 | - prepare:back
234 | - prepare:front
235 | allow_failure: false # https://gitlab.com/gitlab-org/gitlab-foss/-/issues/25892#note_26909808
236 | extends: .runner_tag_selection
237 |
238 | .deploy_template_environment_ttl_long: &deploy_template_environment_ttl_long
239 | environment:
240 | url: https://${CI_ENVIRONMENT_SLUG}-${CI_PROJECT_PATH_SLUG}.${REVIEW_DOMAIN}
241 | name: review/${CI_COMMIT_REF_NAME}
242 | on_stop: stop_review
243 | auto_stop_in: 1 month
244 |
245 | .deploy_template_environment_ttl_mid: &deploy_template_environment_ttl_mid
246 | environment:
247 | url: https://${CI_ENVIRONMENT_SLUG}-${CI_PROJECT_PATH_SLUG}.${REVIEW_DOMAIN}
248 | name: review/${CI_COMMIT_REF_NAME}
249 | on_stop: stop_review
250 | auto_stop_in: 1 week
251 |
252 | .deploy_template_environment_ttl_short: &deploy_template_environment_ttl_short
253 | environment:
254 | url: https://${CI_ENVIRONMENT_SLUG}-${CI_PROJECT_PATH_SLUG}.${REVIEW_DOMAIN}
255 | name: review/${CI_COMMIT_REF_NAME}
256 | on_stop: stop_review
257 | auto_stop_in: 1 day
258 |
259 | build:review:
260 | <<: *deploy_template
261 | <<: *deploy_template_environment_ttl_mid
262 | when: manual
263 | rules:
264 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline' && $CI_MERGE_REQUEST_IID
265 | changes:
266 | - {{ project.path }}/**/*
267 |
268 | build:master:
269 | <<: *deploy_template
270 | <<: *deploy_template_environment_ttl_long
271 | when: always
272 | rules:
273 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline' && $CI_COMMIT_REF_NAME == $CI_DEFAULT_BRANCH
274 | changes:
275 | - {{ project.path }}/**/*
276 |
277 | build:tag:
278 | <<: *deploy_template
279 | <<: *deploy_template_environment_ttl_short
280 | when: manual
281 | rules:
282 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline' && $CI_COMMIT_TAG
283 | artifacts:
284 | name: "${CI_PROJECT_NAME}-raw_build_db_and_files-tag-artifact"
285 | expire_in: 1 week
286 | paths:
287 | - web/sites/*/files/
288 | - .cache
289 |
290 | stop_review:
291 | stage: build
292 | variables:
293 | GIT_STRATEGY: none
294 | dependencies: []
295 | script:
296 | - echo "Clean-up build dir ${BUILD_DIR}"
297 | - (if [ -d ${BUILD_DIR} ]; then date; cd ${BUILD_DIR}; pwd; make clean; cd -; rm -rf ${BUILD_DIR}; fi)
298 | - echo "Removed previous review app ${CI_ENVIRONMENT_URL} from ${BUILD_DIR}."
299 | when: manual
300 | environment:
301 | name: review/${CI_COMMIT_REF_NAME}
302 | action: stop
303 | extends: .runner_tag_selection
304 | rules:
305 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline'
306 | changes:
307 | - {{ project.path }}/**/*
308 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
309 | changes:
310 | - {{ project.path }}/**/*
311 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
312 | changes:
313 | - {{ project.path }}/**/*
314 | - if: $CI_COMMIT_TAG
315 |
316 | generate:logins:
317 | stage: build
318 | dependencies: []
319 | script:
320 | - cd ${BUILD_DIR}
321 | - make info
322 | when: manual
323 | environment:
324 | url: https://${CI_ENVIRONMENT_SLUG}-${CI_PROJECT_PATH_SLUG}.${REVIEW_DOMAIN}
325 | name: review/${CI_COMMIT_REF_NAME}
326 | on_stop: stop_review
327 | extends: .runner_tag_selection
328 | rules:
329 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline'
330 | changes:
331 | - {{ project.path }}/**/*
332 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
333 | changes:
334 | - {{ project.path }}/**/*
335 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
336 | changes:
337 | - {{ project.path }}/**/*
338 |
339 | test:deploy:
340 | stage: update
341 | extends: .runner_tag_selection
342 | environment:
343 | url: https://${CI_ENVIRONMENT_SLUG}-${CI_PROJECT_PATH_SLUG}.${REVIEW_DOMAIN}
344 | name: review/${CI_COMMIT_REF_NAME}
345 | on_stop: stop_review
346 | script:
347 | - cd ${BUILD_DIR}
348 | - ls -lah
349 | - ls -lah .cache/
350 | - ls -lah web/sites/*/files/
351 | - printf "\033[1m- Getting name of last tag created...\n\033[0m"
352 | - export LAST_TAG=$(curl --header "Authorization:${GITLAB_PROJECT_BASIC_AUTH}" --header "PRIVATE-TOKEN:${GITLAB_PROJECT_ACCESS_TOKEN}" "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/repository/tags" | jq -r '.[0] | .name')
353 | - |
354 | if [ "${LAST_TAG}" = "null" ]; then printf "\033[1m- No tag was found. Nothing to do here.\n\033[0m"; else \
355 | printf "\033[1m- Tag found! (${LAST_TAG}) Downloading it's artifacts...\n\033[0m" && \
356 | curl --location --output artifacts.zip --header "Authorization:${GITLAB_PROJECT_BASIC_AUTH}" --header "PRIVATE-TOKEN:${GITLAB_PROJECT_ACCESS_TOKEN}" "${CI_API_V4_URL}/projects/${CI_PROJECT_ID}/jobs/artifacts/${LAST_TAG}/download?job=build:tag" && \
357 | printf "\033[1m- Artifacts were successfully downloaded from tag ${LAST_TAG}\n\033[0m" && \
358 | printf "\033[1m- Replacing DB and files of ${CI_ENVIRONMENT_URL} from those of ${LAST_TAG} tag artifacts\n\033[0m" && \
359 | docker compose exec -T --user root php ash -c "chmod -R 775 .cache/ web/sites/*/files/" && \
360 | rm -rf .cache/* web/sites/*/files/* && \
361 | unzip -o artifacts.zip && \
362 | printf "\033[1m- DB and files from ${CI_ENVIRONMENT_URL} were successfully replaced from those of the artifacts\n\033[0m" && \
363 | make drush cr && \
364 | # Disabling config_ignore so that all configs will be imported bellow
365 | if [ $(docker compose exec -T php sh -c "drush config:status --state=Any --format=list | grep config_ignore.settings | wc -l") -gt 0 ]; then make drush config:delete config_ignore.settings -- -y && make drush cr; fi && \
366 | printf "\033[1m- Simulating deployment against tag ${LAST_TAG}...\n\033[0m" && \
367 | make drush deploy -- -y && \
368 | printf "\033[1m- Successful deployment simulation of ${CI_COMMIT_REF_NAME} against tag ${LAST_TAG} using ${CI_ENVIRONMENT_URL} review app.\n\033[0m"; fi
369 | rules:
370 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline' && $TEST_UPDATE_DEPLOYMENTS == "TRUE"
371 | changes:
372 | - {{ project.path }}/**/*
373 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $TEST_UPDATE_DEPLOYMENTS == "TRUE"
374 | changes:
375 | - {{ project.path }}/**/*
376 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $TEST_UPDATE_DEPLOYMENTS == "TRUE"
377 | changes:
378 | - {{ project.path }}/**/*
379 |
380 | test:storybook:
381 | stage: tests
382 | environment:
383 | url: https://${CI_ENVIRONMENT_SLUG}-${CI_PROJECT_PATH_SLUG}.${REVIEW_DOMAIN}
384 | name: review/${CI_COMMIT_REF_NAME}
385 | on_stop: stop_review
386 | script:
387 | - cd ${BUILD_DIR}
388 | - make build-storybook
389 | - echo "- Storybook is accessible here :" && echo "${CI_ENVIRONMENT_URL}/${STORYBOOK_PATH}"
390 | extends: .runner_tag_selection
391 | rules:
392 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline' && $STORYBOOK_PATH
393 | changes:
394 | - {{ project.path }}/**/*
395 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $STORYBOOK_PATH
396 | changes:
397 | - {{ project.path }}/**/*
398 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $STORYBOOK_PATH
399 | changes:
400 | - {{ project.path }}/**/*
401 |
402 | test:behat:
403 | stage: tests
404 | environment:
405 | url: https://${CI_ENVIRONMENT_SLUG}-${CI_PROJECT_PATH_SLUG}.${REVIEW_DOMAIN}
406 | name: review/${CI_COMMIT_REF_NAME}
407 | on_stop: stop_review
408 | script:
409 | - echo "Starting job script in ${BUILD_DIR}"
410 | - cd ${BUILD_DIR}
411 | - BEHAT_ARGS='--colors -f pretty -o std -f junit -o features/junit' make behat
412 | after_script:
413 | - cd ${BUILD_DIR}
414 | - make browser_driver_stop
415 | - rm -rf web/screenshots
416 | - mkdir web/screenshots
417 | - mv features/*.png web/screenshots/
418 | - echo "- Screenshots are available here :" && cd web/screenshots && ls -h *.png | xargs -i echo "${CI_ENVIRONMENT_URL}/screenshots/{}"
419 | - mv ${BUILD_DIR}/features/junit ${CI_PROJECT_DIR}
420 | retry:
421 | max: 2
422 | when: script_failure
423 | dependencies:
424 | - build:review
425 | extends: .runner_tag_selection
426 | rules:
427 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline'
428 | changes:
429 | - {{ project.path }}/**/*
430 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
431 | changes:
432 | - {{ project.path }}/**/*
433 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
434 | changes:
435 | - {{ project.path }}/**/*
436 | artifacts:
437 | expire_in: 1 week
438 | when: always
439 | paths:
440 | - junit/*.xml
441 | reports:
442 | junit: junit/*.xml
443 |
444 | test:cinsp:
445 | stage: tests
446 | environment:
447 | url: https://${CI_ENVIRONMENT_SLUG}-${CI_PROJECT_PATH_SLUG}.${REVIEW_DOMAIN}
448 | name: review/${CI_COMMIT_REF_NAME}
449 | on_stop: stop_review
450 | script:
451 | - echo "Starting job script in ${BUILD_DIR}"
452 | - cd ${BUILD_DIR}
453 | - make cinsp
454 | dependencies:
455 | - build:review
456 | extends: .runner_tag_selection
457 | rules:
458 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline'
459 | changes:
460 | - {{ project.path }}/**/*
461 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
462 | changes:
463 | - {{ project.path }}/**/*
464 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
465 | changes:
466 | - {{ project.path }}/**/*
467 |
468 | test:drupalrector:
469 | stage: tests
470 | environment:
471 | url: https://${CI_ENVIRONMENT_SLUG}-${CI_PROJECT_PATH_SLUG}.${REVIEW_DOMAIN}
472 | name: review/${CI_COMMIT_REF_NAME}
473 | on_stop: stop_review
474 | script:
475 | - echo "Starting job script in ${BUILD_DIR}"
476 | - cd ${BUILD_DIR}
477 | - make drupalrectorval
478 | dependencies:
479 | - build:review
480 | extends: .runner_tag_selection
481 | rules:
482 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline'
483 | changes:
484 | - {{ project.path }}/**/*
485 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
486 | changes:
487 | - {{ project.path }}/**/*
488 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
489 | changes:
490 | - {{ project.path }}/**/*
491 |
492 | test:lighthouse:
493 | stage: tests
494 | environment:
495 | url: https://${CI_ENVIRONMENT_SLUG}-${CI_PROJECT_PATH_SLUG}.${REVIEW_DOMAIN}
496 | name: review/${CI_COMMIT_REF_NAME}
497 | on_stop: stop_review
498 | script:
499 | - echo "Starting job script in ${BUILD_DIR}"
500 | - cd ${BUILD_DIR}
501 | - cat lighthouserc.yml
502 | - docker run --tty --rm --init -d -v `pwd`:`pwd` -w `pwd` -u $(id -u):$(id -g) --entrypoint="/bin/bash" --name "${CI_PROJECT_NAME}-review-${CI_COMMIT_REF_SLUG}_lighthouse" cypress/browsers:node14.15.0-chrome86-ff82
503 | - docker exec --tty ${CI_PROJECT_NAME}-review-${CI_COMMIT_REF_SLUG}_lighthouse node -v
504 | - docker exec --tty ${CI_PROJECT_NAME}-review-${CI_COMMIT_REF_SLUG}_lighthouse npm -v
505 | - docker exec --tty -u root ${CI_PROJECT_NAME}-review-${CI_COMMIT_REF_SLUG}_lighthouse npm install -g @lhci/cli@0.6.x
506 | - docker exec --tty ${CI_PROJECT_NAME}-review-${CI_COMMIT_REF_SLUG}_lighthouse lhci --version
507 | - docker exec --tty ${CI_PROJECT_NAME}-review-${CI_COMMIT_REF_SLUG}_lighthouse lhci healthcheck
508 | - docker exec --tty ${CI_PROJECT_NAME}-review-${CI_COMMIT_REF_SLUG}_lighthouse lhci collect --url=https://${RA_BASIC_AUTH_USERNAME}:${RA_BASIC_AUTH_PASSWORD}@${CI_ENVIRONMENT_SLUG}-${CI_PROJECT_PATH_SLUG}.${REVIEW_DOMAIN}
509 | - docker exec --tty ${CI_PROJECT_NAME}-review-${CI_COMMIT_REF_SLUG}_lighthouse lhci assert
510 | after_script:
511 | - if [ -n `docker ps -f 'name=${CI_PROJECT_NAME}-review-${CI_COMMIT_REF_SLUG}_lighthouse' -q` ]; then echo 'Stopping container'; docker rm --force ${CI_PROJECT_NAME}-review-${CI_COMMIT_REF_SLUG}_lighthouse; fi
512 | - cd ${BUILD_DIR}
513 | - if [ -d .lighthouseci ]; then mv .lighthouseci web/lighthouseci; echo "- Reports are available here :" && for i in $(find web/lighthouseci/ -name "*.html"); do basename $i;done | xargs -i echo "${CI_ENVIRONMENT_URL}/lighthouseci/{}"; fi
514 | dependencies:
515 | - build:review
516 | extends: .runner_tag_selection
517 | rules:
518 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline'
519 | changes:
520 | - {{ project.path }}/**/*
521 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
522 | changes:
523 | - {{ project.path }}/**/*
524 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
525 | changes:
526 | - {{ project.path }}/**/*
527 |
528 | test:contentgen:
529 | stage: tests
530 | environment:
531 | url: https://${CI_ENVIRONMENT_SLUG}-${CI_PROJECT_PATH_SLUG}.${REVIEW_DOMAIN}
532 | name: review/${CI_COMMIT_REF_NAME}
533 | on_stop: stop_review
534 | script:
535 | - echo "Starting job script in ${BUILD_DIR}"
536 | - cd ${BUILD_DIR}
537 | - make contentgen
538 | dependencies:
539 | - build:review
540 | extends: .runner_tag_selection
541 | rules:
542 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline'
543 | changes:
544 | - {{ project.path }}/**/*
545 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
546 | changes:
547 | - {{ project.path }}/**/*
548 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
549 | changes:
550 | - {{ project.path }}/**/*
551 | when: manual
552 |
553 | test:patch:
554 | stage: tests
555 | environment:
556 | url: https://${CI_ENVIRONMENT_SLUG}-${CI_PROJECT_PATH_SLUG}.${REVIEW_DOMAIN}
557 | name: review/${CI_COMMIT_REF_NAME}
558 | on_stop: stop_review
559 | script:
560 | - echo "Starting job script in ${BUILD_DIR}"
561 | - cd ${BUILD_DIR}
562 | - make patchval
563 | extends: .runner_tag_selection
564 | rules:
565 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline' && $RUN_PATCHVAL_CI_JOB != "FALSE"
566 | changes:
567 | - {{ project.path }}/**/*
568 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event' && $RUN_PATCHVAL_CI_JOB != "FALSE"
569 | changes:
570 | - {{ project.path }}/**/*
571 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH && $RUN_PATCHVAL_CI_JOB != "FALSE"
572 | changes:
573 | - {{ project.path }}/**/*
574 | # Use $RUN_PATCHVAL_CI_JOB=TRUE only in case patching upstream is not possible at all, like private packages
575 |
576 | test:statusreport:
577 | stage: 'more tests'
578 | environment:
579 | url: https://${CI_ENVIRONMENT_SLUG}-${CI_PROJECT_PATH_SLUG}.${REVIEW_DOMAIN}
580 | name: review/${CI_COMMIT_REF_NAME}
581 | on_stop: stop_review
582 | script:
583 | - echo "Starting job script in ${BUILD_DIR}"
584 | - cd ${BUILD_DIR}
585 | - make statusreportval
586 | dependencies:
587 | - build:review
588 | extends: .runner_tag_selection
589 | rules:
590 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline'
591 | changes:
592 | - {{ project.path }}/**/*
593 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
594 | changes:
595 | - {{ project.path }}/**/*
596 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
597 | changes:
598 | - {{ project.path }}/**/*
599 |
600 | test:upgradestatus:
601 | stage: 'more tests'
602 | environment:
603 | url: https://${CI_ENVIRONMENT_SLUG}-${CI_PROJECT_PATH_SLUG}.${REVIEW_DOMAIN}
604 | name: review/${CI_COMMIT_REF_NAME}
605 | on_stop: stop_review
606 | script:
607 | - echo "Starting job script in ${BUILD_DIR}"
608 | - cd ${BUILD_DIR}
609 | - make upgradestatusval
610 | dependencies:
611 | - build:review
612 | extends: .runner_tag_selection
613 | rules:
614 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline'
615 | changes:
616 | - {{ project.path }}/**/*
617 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
618 | changes:
619 | - {{ project.path }}/**/*
620 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
621 | changes:
622 | - {{ project.path }}/**/*
623 |
624 | test:watchdog:
625 | stage: 'more tests'
626 | environment:
627 | url: https://${CI_ENVIRONMENT_SLUG}-${CI_PROJECT_PATH_SLUG}.${REVIEW_DOMAIN}
628 | name: review/${CI_COMMIT_REF_NAME}
629 | on_stop: stop_review
630 | script:
631 | - echo "Starting job script in ${BUILD_DIR}"
632 | - cd ${BUILD_DIR}
633 | - make watchdogval
634 | dependencies:
635 | - build:review
636 | extends: .runner_tag_selection
637 | rules:
638 | - if: $CI_PIPELINE_SOURCE == 'parent_pipeline'
639 | changes:
640 | - {{ project.path }}/**/*
641 | - if: $CI_PIPELINE_SOURCE == 'merge_request_event'
642 | changes:
643 | - {{ project.path }}/**/*
644 | - if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
645 | changes:
646 | - {{ project.path }}/**/*
647 |
648 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 Skilld SAS
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | # Add utility functions and scripts to the container
2 | include scripts/makefile/*.mk
3 |
4 | .PHONY: all fast allfast provision si exec exec0 down clean dev drush info phpcs phpcbf hooksymlink clang cinsp compval watchdogval drupalrectorval upgradestatusval behat sniffers tests front front-install front-build clear-front lintval lint storybook back behatdl behatdi browser_driver browser_driver_stop statusreportval contentgen newlineeof localize local-settings redis-settings content patchval diff
5 | .DEFAULT_GOAL := help
6 |
7 | # https://stackoverflow.com/a/6273809/1826109
8 | %:
9 | @:
10 |
11 | # Prepare enviroment variables from defaults
12 | $(shell false | cp -i \.env.default \.env 2>/dev/null)
13 | $(shell false | cp -i \.\/docker\/docker-compose\.override\.yml\.default \.\/docker\/docker-compose\.override\.yml 2>/dev/null)
14 | include .env
15 | $(shell sed -i -e '/COMPOSE_PROJECT_NAME=/ s/=.*/=$(shell echo "$(COMPOSE_PROJECT_NAME)" | tr -cd '[a-zA-Z0-9]' | tr '[:upper:]' '[:lower:]')/' .env)
16 |
17 | # Get user/group id to manage permissions between host and containers
18 | LOCAL_UID := $(shell id -u)
19 | LOCAL_GID := $(shell id -g)
20 |
21 | # Evaluate recursively
22 | CUID ?= $(LOCAL_UID)
23 | CGID ?= $(LOCAL_GID)
24 |
25 | # Define current directory only once
26 | CURDIR=$(shell pwd)
27 |
28 | # Define network name.
29 | COMPOSE_NET_NAME := $(COMPOSE_PROJECT_NAME)_front
30 |
31 | compose = docker compose --env-file .env ${1}
32 |
33 | SDC_SERVICES=$(call compose, config --services)
34 | # Determine database data directory if defined
35 | DB_MOUNT_DIR=$(shell echo $(CURDIR))/$(shell basename $(DB_DATA_DIR))
36 | ifeq ($(findstring mysql,$(SDC_SERVICES)),mysql)
37 | DB_MOUNT_DIR=$(shell echo $(CURDIR))/$(shell basename $(DB_DATA_DIR))/$(COMPOSE_PROJECT_NAME)_mysql
38 | endif
39 | ifeq ($(findstring postgresql,$(SDC_SERVICES)),postgresql)
40 | DB_MOUNT_DIR=$(shell echo $(CURDIR))/$(shell basename $(DB_DATA_DIR))/$(COMPOSE_PROJECT_NAME)_pgsql
41 | endif
42 |
43 |
44 | # Execute php container as regular user
45 | php = docker compose --env-file .env exec -T --user $(CUID):$(CGID) php ${1}
46 | # Execute php container as root user
47 | php-0 = docker compose --env-file .env exec -T --user 0:0 php ${1}
48 |
49 | ADDITIONAL_PHP_PACKAGES ?= # php83-intl php83-redis php83-pdo_pgsql postgresql-client
50 | DC_MODULES := project_default_content default_content serialization
51 | MG_MODULES := migrate_generator migrate migrate_plus migrate_source_csv
52 |
53 | EXEC_SHELL?=/bin/ash
54 | PKGMAN?=apk
55 | apk = apk add --no-cache $(1)
56 | apt = $(EXEC_SHELL) -c "DEBIAN_FRONTEND=noninteractive && apt-get update && apt-get install -qy $(1) && rm -rf /var/lib/apt/lists/*"
57 |
58 | ## Full site install from the scratch
59 | all: | provision back front si localize hooksymlink info
60 | # Install for CI deploy:review. Back & Front tasks are run in a dedicated previous step in order to leverage CI cache
61 | all_ci: | provision si localize hooksymlink info
62 | # Full site install from the scratch with DB in ram (makes data NOT persistant)
63 | allfast: | fast provision back front si localize hooksymlink info
64 |
65 | ## Update .env to build DB in ram (makes data NOT persistant)
66 | fast:
67 | $(shell sed -i "s|^#DB_URL=sqlite:///dev/shm/db.sqlite|DB_URL=sqlite:///dev/shm/db.sqlite|g" .env)
68 | $(shell sed -i "s|^DB_URL=sqlite://./../.cache/db.sqlite|#DB_URL=sqlite://./../.cache/db.sqlite|g" .env)
69 |
70 | ## Provision enviroment
71 | provision:
72 | # Check if enviroment variables has been defined
73 | ifeq ($(strip $(COMPOSE_PROJECT_NAME)),projectname)
74 | $(eval COMPOSE_PROJECT_NAME = $(strip $(shell read -p "- Please customize project name: " REPLY;echo -n $$REPLY)))
75 | $(shell sed -i -e '/COMPOSE_PROJECT_NAME=/ s/=.*/=$(shell echo "$(COMPOSE_PROJECT_NAME)" | tr -cd '[a-zA-Z0-9]' | tr '[:upper:]' '[:lower:]')/' .env)
76 | $(info - Run `make all` again.)
77 | @echo
78 | exit 1
79 | endif
80 | ifdef DB_MOUNT_DIR
81 | $(shell [ ! -d $(DB_MOUNT_DIR) ] && mkdir -p $(DB_MOUNT_DIR) && chmod 777 $(DB_MOUNT_DIR))
82 | endif
83 | make -s down
84 | @echo "Build and run containers..."
85 | $(call compose, up -d --remove-orphans)
86 | ifneq ($(strip $(ADDITIONAL_PHP_PACKAGES)),)
87 | $(call php-0, $(call $(PKGMAN),$(ADDITIONAL_PHP_PACKAGES)))
88 | endif
89 | # Install newrelic PHP extension if NEW_RELIC_LICENSE_KEY defined
90 | make -s newrelic reload
91 |
92 | ## Install backend dependencies
93 | back:
94 | @echo "Installing composer dependencies, without dev ones"
95 | $(call php, composer install --no-interaction --prefer-dist -o --no-dev)
96 | $(call php, composer create-required-files)
97 | @echo "Restarting web-server after getting new source"
98 | $(call php-0, /bin/sh ./scripts/makefile/reload.sh)
99 |
100 | $(eval TESTER_NAME := tester)
101 | $(eval TESTER_ROLE := contributor)
102 | ## Install drupal
103 | si:
104 | @echo "Installing from: $(PROJECT_INSTALL)"
105 | make -s local-settings
106 | ifeq ($(PROJECT_INSTALL), config)
107 | $(call php, drush si --existing-config --db-url="$(DB_URL)" --account-name="$(ADMIN_NAME)" --account-mail="$(ADMIN_MAIL)" -y)
108 | # install_import_translations() overwrites config translations so we need to reimport.
109 | $(call php, drush cim -y)
110 | else
111 | $(call php, drush si $(PROFILE_NAME) --db-url="$(DB_URL)" --account-name="$(ADMIN_NAME)" --account-mail="$(ADMIN_MAIL)" -y --site-name="$(SITE_NAME)" --site-mail="$(SITE_MAIL)" install_configure_form.site_default_country=FR install_configure_form.date_default_timezone=Europe/Paris)
112 | endif
113 | make content
114 | #make -s redis-settings
115 | $(call php, drush user:create "$(TESTER_NAME)")
116 | $(call php, drush user:role:add "$(TESTER_ROLE)" "$(TESTER_NAME)")
117 |
118 | content:
119 | ifneq ($(strip $(DC_MODULES)),)
120 | $(call php, drush en $(DC_MODULES) -y)
121 | $(call php, drush pmu $(DC_MODULES) -y)
122 | endif
123 | ifneq ($(strip $(MG_MODULES)),)
124 | $(call php, drush en $(MG_MODULES) -y)
125 | $(call php, drush migrate_generator:generate_migrations /var/www/html/content --update)
126 | $(call php, drush migrate:import --tag=mgg)
127 | $(call php, drush migrate_generator:clean_migrations mgg)
128 | $(call php, drush pmu $(MG_MODULES) -y)
129 | endif
130 |
131 | local-settings:
132 | ifneq ("$(wildcard settings/settings.local.php)","")
133 | @echo "Turn on settings.local"
134 | $(call php-0, chmod ug+w web/sites/default web/sites/default/settings.local.php || true)
135 | $(call php, cp settings/settings.local.php web/sites/default/settings.local.php)
136 | $(call php-0, sed -i "/settings.local.php';/s/# //g" web/sites/default/settings.php)
137 | endif
138 |
139 | REDIS_IS_INSTALLED := $(shell grep "redis.connection" web/sites/default/settings.php 2> /dev/null | tail -1 | wc -l || echo "0")
140 | redis-settings:
141 | ifeq ($(REDIS_IS_INSTALLED), 1)
142 | @echo "Redis settings already installed, nothing to do"
143 | else
144 | @echo "Turn on Redis settings"
145 | $(call php-0, chmod -R +w web/sites/)
146 | $(call php, cat settings/settings.redis.php >> web/sites/default/settings.php)
147 | endif
148 |
149 | ## Import online & local translations
150 | localize:
151 | @echo "Checking & importing online translations..."
152 | $(call php, drush locale:check)
153 | $(call php, drush locale:update)
154 | @echo "Importing custom translations..."
155 | $(call php, drush locale:import:all /var/www/html/translations/ --type=customized --override=all)
156 | @echo "Localization finished"
157 |
158 | define get_login_url
159 | $(shell $(call php, drush user:login --name="$(1)" /admin/content/) | grep -v ERROR | head -n1 | sed 's|http://default||')
160 | endef
161 | ## Display project's information
162 | info:
163 | $(info )
164 | $(info Containers for "$(COMPOSE_PROJECT_NAME)" info:)
165 | $(eval CONTAINERS = $(shell docker ps -f name=$(COMPOSE_PROJECT_NAME) --format "{{ .ID }}" -f 'label=traefik.enable=true'))
166 | $(foreach CONTAINER, $(CONTAINERS),$(info http://$(shell printf '%-19s \n' $(shell docker inspect --format='{{(index .NetworkSettings.Networks "$(COMPOSE_NET_NAME)").IPAddress}}:{{index .Config.Labels "sdc.port"}} {{range $$p, $$conf := .NetworkSettings.Ports}}{{$$p}}{{end}} {{.Name}}' $(CONTAINER) | rev | sed "s/pct\//,pct:/g" | sed "s/,//" | rev | awk '{ print $0}')) ))
167 | $(info )
168 | ifdef REVIEW_DOMAIN
169 | $(eval BASE_URL := $(MAIN_DOMAIN_NAME))
170 | else
171 | $(eval BASE_URL := $(shell docker inspect --format='{{(index .NetworkSettings.Networks "$(COMPOSE_NET_NAME)").IPAddress}}:{{index .Config.Labels "sdc.port"}}' $(COMPOSE_PROJECT_NAME)_web))
172 | endif
173 | $(info Login as System Admin: http://$(BASE_URL)$(call get_login_url,$(ADMIN_NAME)))
174 | $(info Login as Contributor: http://$(BASE_URL)$(call get_login_url,$(TESTER_NAME)))
175 | $(info )
176 | ifneq ($(shell diff .env .env.default -q),)
177 | @echo -e "\x1b[33mWARNING\x1b[0m - .env and .env.default files differ. Use 'make diff' to see details."
178 | endif
179 | ifneq ($(shell diff docker/docker-compose.override.yml docker/docker-compose.override.yml.default -q),)
180 | @echo -e "\x1b[33mWARNING\x1b[0m - docker/docker-compose.override.yml and docker/docker-compose.override.yml.default files differ. Use 'make diff' to see details."
181 | endif
182 |
183 | ## Output diff between local and versioned files
184 | diff:
185 | diff -u0 --color .env .env.default || true; echo ""
186 | diff -u0 --color docker/docker-compose.override.yml docker/docker-compose.override.yml.default || true; echo ""
187 |
188 | ## Run shell in PHP container as regular user
189 | exec:
190 | $(call compose, exec --user $(CUID):$(CGID) php $(EXEC_SHELL))
191 |
192 | ## Run shell in PHP container as root
193 | exec0:
194 | $(call compose, exec --user 0:0 php $(EXEC_SHELL))
195 |
196 | down:
197 | @echo "Removing network & containers for $(COMPOSE_PROJECT_NAME)"
198 | $(call compose, down -v --remove-orphans --rmi local)
199 | @if [ ! -z "$(shell docker ps -f 'name=$(COMPOSE_PROJECT_NAME)_chrome' --format '{{.Names}}')" ]; then \
200 | echo 'Stoping browser driver.' && make -s browser_driver_stop; fi
201 |
202 | DIRS = web/core web/libraries web/modules/contrib web/profiles/contrib web/sites web/themes/contrib vendor
203 |
204 | ## Totally remove project build folder, docker containers and network
205 | clean: info
206 | make -s down
207 | ifdef CURDIR
208 | $(eval SCAFFOLD = $(shell docker run --rm -v $(CURDIR):/mnt -w /mnt --user $(CUID):$(CGID) $(IMAGE_PHP) composer run-script list-scaffold-files | grep -P '^(?!>)'))
209 | @docker run --rm --user 0:0 -v $(CURDIR):/mnt -w /mnt -e RMLIST="$(addprefix web/,$(SCAFFOLD)) $(DIRS)" $(IMAGE_PHP) sh -c 'for i in $$RMLIST; do rm -fr $$i && echo "Removed $$i"; done'
210 | endif
211 | ifdef DB_MOUNT_DIR
212 | @echo "Clean-up database data from $(DB_MOUNT_DIR) ..."
213 | docker run --rm --user 0:0 -v $(shell dirname $(DB_MOUNT_DIR)):/mnt $(IMAGE_PHP) sh -c "rm -fr /mnt/`basename $(DB_MOUNT_DIR)`"
214 | endif
215 | ifeq ($(CLEAR_FRONT_PACKAGES), yes)
216 | make clear-front
217 | endif
218 |
219 | ## Enable development mode and disable caching
220 | dev:
221 | @echo "Dev tasks..."
222 | $(call php, composer install --no-interaction --prefer-dist -o)
223 | @$(call php-0, chmod +w web/sites/default)
224 | @$(call php, cp web/sites/default/default.services.yml web/sites/default/services.yml)
225 | @$(call php, sed -i -e 's/debug: false/debug: true/g' web/sites/default/services.yml)
226 | @$(call php, cp web/sites/example.settings.local.php web/sites/default/settings.local.php)
227 | @echo "Including settings.local.php."
228 | @$(call php-0, sed -i "/settings.local.php';/s/# //g" web/sites/default/settings.php)
229 | @$(call php, drush -y -q config-set system.performance css.preprocess 0)
230 | @$(call php, drush -y -q config-set system.performance js.preprocess 0)
231 | @echo "Enabling devel module."
232 | @$(call php, drush -y -q en devel devel_generate)
233 | @echo "Disabling caches."
234 | @$(call php, drush -y -q pm-uninstall dynamic_page_cache page_cache)
235 | @$(call php, drush cr)
236 |
237 | ## Run drush command in PHP container. To pass arguments use double dash: "make drush dl devel -- -y"
238 | drush:
239 | $(call php, $(filter-out "$@",$(MAKECMDGOALS)))
240 | $(info "To pass arguments use double dash: "make drush en devel -- -y"")
241 |
242 | ## Reconfigure app-server via directory with config file
243 | # unit https://unit.nginx.org/configuration/#process-management
244 | # frankenphp/caddy https://caddyserver.com/docs/api
245 | reload:
246 | $(call php-0, /bin/sh ./scripts/makefile/reload.sh /var/www/html/docker)
247 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Skilld docker container
2 |
3 | ---
4 |
5 | - [Skilld docker container](#Skilld-docker-container)
6 | - [Overview](#Overview)
7 | - [What is this?](#What-is-this)
8 | - [What is this not?](#What-is-this-not)
9 | - [Quickstart](#Quickstart)
10 | - [Used variables](#Used-variables)
11 | - [Persistent Mysql](#Persistent-Mysql)
12 | - [Network](#Network)
13 | - [Usage](#Usage)
14 | - [Additional goals](#Additional-goals)
15 | - [Support](#Support)
16 | - [Drush commands](#Drush-commands)
17 | - [Troubleshooting](#Troubleshooting)
18 | - [Git hooks](#Git-hooks)
19 | - [License](#License)
20 |
21 |
22 | ## Overview
23 |
24 | **Skilld docker container** is a developer starterkit for your Drupal project.
25 |
26 | ## What is this?
27 |
28 | * This is a developer starterkit which can be used for local drupal development or/and integration into your CI/CD processes.
29 |
30 | ## What is this not?
31 |
32 | * This is not `ready to use tool`, tools list you can find in this google doc
33 | * Another quick solution https://gist.github.com/andypost/f8e359f2e80cb7d4737350189f009646
34 |
35 |
36 | ## Quickstart
37 |
38 | * Install docker for Linux, Mac, Windows
39 | * Check post-installation steps for Linux version 18.06.0 or later
40 | * Install Docker Compose V2 version **2.0** or later
41 |
42 | * Copy **.env.default** to **.env**, more information about enviroment file can be found docs.docker.com
43 | * Copy **docker-compose.override.yml.default** to **docker-compose.override.yml**, update parts you want to overwrite.
44 | * **docker-compose.yml** contains the base requirements of a working Drupal site. It should not be updated.
45 | * Update **.gitlab-ci.yml** `variables` section THEME_PATH to make front gitlab CI works.
46 | * Run `make all`
47 |
48 |
49 | #### Used variables
50 |
51 | | Variable name | Description | Default value |
52 | | --------------- | ----------------------- | ------------- |
53 | | COMPOSE_FILE | Path to a Compose file(s) | `./docker/docker-compose.yml:./docker/docker-compose.override.yml` |
54 | | COMPOSE_PROJECT_NAME | Your project name | - |
55 | | PROFILE_NAME | Profile used for site install | druxxy |
56 | | MODULES | Additional modules to enable after site install | project_default_content |
57 | | THEME_NAME | Name of theme directory in /web/themes | `NA` |
58 | | SITE_NAME | Site name | Example |
59 | | SITE_MAIL | Site e-mail address | admin@example.com |
60 | | ADMIN_NAME | Admin username | admin |
61 | | PROJECT_INSTALL | Way to install site - from straight or existing config | - |
62 | | IMAGE_PHP | Php image to use | `skilldlabs/php:83-unit` |
63 | | EXEC_SHELL | Shell to use in PHP-container (`ash`/`bash`) | `/bin/ash` |
64 | | PKGMAN | Package manager to use in PHP-container (`apk`/`apt`) | `apk` |
65 | | ADDITIONAL_PHP_PACKAGES | Additional php extensions and tools to install | `graphicsmagick` |
66 | | IMAGE_NGINX | Image to use for nginx container | `skilldlabs/nginx:1.24` |
67 | | IMAGE_APACHE | Image to use for apache container | `skilldlabs/skilld-docker-apache` |
68 | | IMAGE_FRONT | Image to use for front tasks | `skilldlabs/frontend:zen` |
69 | | IMAGE_DRIVER | Image to use for automated testing webdriver | `zenika/alpine-chrome` |
70 | | MAIN_DOMAIN_NAME | Domain name used for traefik | `docker.localhost` |
71 | | DB_URL | Url to connect to database | `sqlite:///dev/shm/db.sqlite` |
72 | | DB_DATA_DIR | Full path to database storage | `/dev/shm` |
73 | | CLEAR_FRONT_PACKAGES | Set it to `no` to keep `/node_nodules` directory in theme after `make front` task to save build time. | yes |
74 | | RA_BASIC_AUTH | username:hashed-password format defining BasicAuth in Traefik. Password hashed using `htpasswd -nibB username password!` as [described here](https://doc.traefik.io/traefik/middlewares/basicauth/#general) | - |
75 |
76 | #### Persistent Mysql
77 |
78 | * By default sqlite storage used, which is created inside php container, if you need persistent data to be saved:
79 | * Update `docker-compose.override.yml`, set
80 | ```yaml
81 | php:
82 | depends_on:
83 | - mysql
84 | ```
85 | and update mysql container part
86 | ```yaml
87 | mysql:
88 | image: mysql:8.0-oraclelinux8
89 | ...
90 | ```
91 | * Update `.env` file, and set `DB_URL=mysql://db:db@mysql/db`
92 |
93 | #### Network
94 |
95 | * Every time project built, it take new available IP address, if you want to have persistent IP, uncomment lines from `docker-compose.override.yml`
96 | ```yaml
97 | networks:
98 | front:
99 | driver: bridge
100 | ipam:
101 | driver: default
102 | config:
103 | - subnet: "172.18.0.5"
104 | ```
105 |
106 | ## Usage
107 |
108 | * `make` - Show this info.
109 | * `make all` - Full project install from the scratch.
110 | * `make clean` - Totally remove project build folder, files, docker containers and network.
111 | * `make si` - Install/reinstall site.
112 | * `make info` - Show project services IP addresses.
113 | * `make diff` - Show changes in overrides (needs local `diff` command).
114 | * `make exec` - `docker exec` into php container.
115 | * `make exec0` - `docker exec` into php container as root.
116 | * `make dev` - Devel + kint setup, and config for Twig debug mode, disable aggregation.
117 | * `make drush [command]` - execute drush command.
118 | * `make phpcs` - Check codebase with `phpcs` sniffers to make sure it conforms https://www.drupal.org/docs/develop/standards.
119 | * `make phpcbf` - Fix codebase according to Drupal standards https://www.drupal.org/docs/develop/standards.
120 | * `make front` - Builds frontend tasks.
121 | * `make lint` - Runs frontend linters.
122 | * `make storybook` - Runs storybook in current theme.
123 | * `make blackfire` - Adds and enables blackfire.io php extension, needs [configuration](https://blackfire.io/docs/configuration/php) in docker-compose.override.yml.
124 | * `make newrelic` - Adds and enables newrelic.com php extension, needs [configuration](https://docs.newrelic.com/docs/agents/php-agent/getting-started/introduction-new-relic-php#configuration) `NEW_RELIC_LICENSE_KEY` environment variable defined with valid license key.
125 | * `make xdebug (on|off|status)` - Enable, disable or report status of [Xdebug](https://xdebug.org/docs/) PHP extension.
126 |
127 | #### Additional goals
128 |
129 | * If you need to add your custom/specific project goal, create new file in `scripts/makefile/myfile.mk` and describe goal inside. Example can be found at `scripts/makefile/backup.mk`
130 |
131 | ## Support
132 |
133 | * This project is supported by © Skilld SAS
134 |
135 | ## Drush commands
136 |
137 | * You can run any drush command `make drush [command -- -argument]`
138 |
139 | ## Troubleshooting
140 |
141 | * Use our issue queue, which is public, to search or add new issues.
142 |
143 | ## Git hooks
144 |
145 | * Project includes [git hooks](https://git-scm.com/book/en/v2/Customizing-Git-Git-Hooks) to perform automatic validation when certain git commands are executed
146 | * You can bypass this validation with option `--no-verify`
147 |
148 | ## License
149 |
150 | This project is licensed under the MIT open source license.
151 |
--------------------------------------------------------------------------------
/behat.default.yml:
--------------------------------------------------------------------------------
1 | default:
2 | suites:
3 | default:
4 | contexts:
5 | - FeatureContext
6 | - Drupal\DrupalExtension\Context\DrupalContext
7 | - Drupal\DrupalExtension\Context\MinkContext
8 | - Drupal\DrupalExtension\Context\MessageContext
9 | - Drupal\DrupalExtension\Context\DrushContext
10 | - FailAid\Context\FailureContext
11 | - espend\Behat\PlaceholderExtension\Context\PlaceholderContext
12 | extensions:
13 | DMore\ChromeExtension\Behat\ServiceContainer\ChromeExtension: ~
14 | Behat\MinkExtension:
15 | browser_name: chrome
16 | base_url: URL_TO_TEST # Will be replaced automatically by running "make behat" command
17 | sessions:
18 | default:
19 | chrome:
20 | api_url: http://0.0.0.0:9222
21 | validate_certificate: false
22 | socket_timeout: 20
23 | # selenium2: ~
24 | FailAid\Extension:
25 | screenshot:
26 | directory: /var/www/html/features/
27 | mode: default
28 | autoClean: true
29 | Drupal\DrupalExtension:
30 | # text:
31 | # log_in: "Log in"
32 | # password_field: "Password"
33 | # username_field: "Login by username/email address"
34 | blackbox: ~
35 | api_driver: 'drupal'
36 | drupal:
37 | drupal_root: 'web'
38 | selectors:
39 | message_selector: '.messages'
40 | error_message_selector: '.messages.messages-error'
41 | success_message_selector: '.messages.messages-status'
42 | espend\Behat\PlaceholderExtension\PlaceholderExtension: ~
43 |
--------------------------------------------------------------------------------
/composer.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "skilld-labs/sdc",
3 | "description": "Project template for Drupal 9 projects with composer",
4 | "type": "project",
5 | "license": "MIT",
6 | "authors": [
7 | {
8 | "name": "",
9 | "role": ""
10 | }
11 | ],
12 | "repositories": [
13 | {
14 | "type": "composer",
15 | "url": "https://packages.drupal.org/8"
16 | }
17 | ],
18 | "require": {
19 | "composer/installers": "^1.9 || ^2.0",
20 | "cweagans/composer-patches": "^1.7",
21 | "drupal/ckeditor": "^1.0",
22 | "drupal/color": "^1.0",
23 | "drupal/core-composer-scaffold": "^10.3.1",
24 | "drupal/core-vendor-hardening": "^10.3.1",
25 | "drupal/default_content": "^2",
26 | "drupal/imagemagick": "^4.0",
27 | "drupal/migrate_generator": "^2.0",
28 | "drupal/seven": "^1.0",
29 | "drush/drush": "^13.2",
30 | "skilldlabs/drupal-cleanup": "^1",
31 | "skilldlabs/druxxy": "^1.1",
32 | "webflo/drupal-finder": "^1.3"
33 | },
34 | "require-dev": {
35 | "dmore/behat-chrome-extension": "^1.3",
36 | "drupal/config_inspector": "^2.1",
37 | "drupal/devel": "^5.2",
38 | "drupal/drupal-extension": "^5.0",
39 | "drupal/upgrade_status": "^4.3",
40 | "espend/behat-placeholder-extension": "^1.1",
41 | "genesis/behat-fail-aid": "^3.7",
42 | "palantirnet/drupal-rector": "^0.20.3",
43 | "phpspec/prophecy-phpunit": "^2",
44 | "phpunit/phpunit": "^9"
45 | },
46 | "conflict": {
47 | "drupal/drupal": "*"
48 | },
49 | "minimum-stability": "dev",
50 | "prefer-stable": true,
51 | "config": {
52 | "sort-packages": true,
53 | "preferred-install": {
54 | "*": "dist"
55 | },
56 | "allow-plugins": {
57 | "composer/installers": true,
58 | "cweagans/composer-patches": true,
59 | "drupal/core-composer-scaffold": true,
60 | "drupal/core-project-message": true,
61 | "drupal/core-vendor-hardening": true,
62 | "skilldlabs/drupal-cleanup": true
63 | }
64 | },
65 | "autoload": {
66 | "classmap": [
67 | "scripts/composer/ScriptHandler.php"
68 | ]
69 | },
70 | "scripts": {
71 | "create-required-files": "SkilldDrupal\\composer\\ScriptHandler::createRequiredFiles",
72 | "list-scaffold-files": [
73 | "SkilldDrupal\\composer\\ScriptHandler::listScaffoldFiles"
74 | ],
75 | "pre-install-cmd": [
76 | "SkilldDrupal\\composer\\ScriptHandler::checkComposerVersion"
77 | ],
78 | "pre-update-cmd": [
79 | "SkilldDrupal\\composer\\ScriptHandler::checkComposerVersion"
80 | ]
81 | },
82 | "extra": {
83 | "installer-paths": {
84 | "web/core": ["type:drupal-core"],
85 | "web/libraries/{$name}": ["type:drupal-library"],
86 | "web/modules/contrib/{$name}": ["type:drupal-module"],
87 | "web/profiles/contrib/{$name}": ["type:drupal-profile"],
88 | "web/themes/contrib/{$name}": ["type:drupal-theme"],
89 | "drush/Commands/contrib/{$name}": ["type:drupal-drush"]
90 | },
91 | "drupal-scaffold": {
92 | "locations": {
93 | "web-root": "web/"
94 | },
95 | "file-mapping": {
96 | "[web-root]/web.config": false
97 | }
98 | },
99 | "composer-exit-on-patch-failure": true,
100 | "patchLevel": {
101 | "drupal/core": "-p2"
102 | },
103 | "drupal-cleanup": {
104 | "drupal-core": [
105 | "modules/*/tests",
106 | "modules/*/src/Tests",
107 | "profiles/demo_umami",
108 | "profiles/*/tests",
109 | "profiles/*testing*",
110 | "lib/Drupal/Core/Test",
111 | "scripts/test",
112 | "tests"
113 | ],
114 | "drupal-module": [
115 | "tests",
116 | "src/Tests"
117 | ],
118 | "exclude": [
119 | "web/core/tests"
120 | ]
121 | },
122 | "patches": {
123 | "drupal/default_content": {
124 | "Do not reimport existing entities": "https://www.drupal.org/files/issues/2022-07-29/default_content-fix-uuid-duplicate-entry-2698425.patch"
125 | }
126 | }
127 | }
128 | }
129 |
--------------------------------------------------------------------------------
/content/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/skilld-labs/skilld-docker-container/61b02df82c7b81bf079cf7e3a8ffec76c491e65a/content/.gitkeep
--------------------------------------------------------------------------------
/content/taxonomy_term-category.csv:
--------------------------------------------------------------------------------
1 | id;name;status;langcode
2 | 1;Article;1;en
3 | 2;Video;1;en
4 | 3;Document;1;en
5 | 4;Tools;1;en
6 |
--------------------------------------------------------------------------------
/content/user.csv:
--------------------------------------------------------------------------------
1 | id;name;mail;created;status;uuid
2 | 2;Default content author;;;1;4bad48eb-ff5b-45b4-b30c-ecabff09591a
3 |
--------------------------------------------------------------------------------
/docker/90-mail.ini:
--------------------------------------------------------------------------------
1 | sendmail_path = /usr/sbin/sendmail -t -i -S mailhog:1025
2 |
--------------------------------------------------------------------------------
/docker/Caddyfile:
--------------------------------------------------------------------------------
1 | # Adapted from https://caddy.community/t/caddyfile-for-drupal-10/21607/5
2 | {
3 | {$CADDY_GLOBAL_OPTIONS}
4 |
5 | frankenphp {
6 | {$FRANKENPHP_CONFIG}
7 | }
8 |
9 | # https://caddyserver.com/docs/caddyfile/directives#sorting-algorithm
10 | order php_server before file_server
11 | order php before file_server
12 | }
13 |
14 | {$CADDY_EXTRA_CONFIG}
15 |
16 | {$SERVER_NAME:localhost} {
17 | encode zstd br gzip
18 |
19 | @hiddenPhpFilesRegexp path_regexp \..*/.*\.php$
20 | error @hiddenPhpFilesRegexp 403
21 |
22 | @notFoundPhpFiles path_regexp /vendor/.*\.php$
23 | error @notFoundPhpFiles 404
24 |
25 | @notFoundPhpFilesRegexp path_regexp ^/sites/[^/]+/files/.*\.php$
26 | error @notFoundPhpFilesRegexp 404
27 |
28 | @privateDirRegexp path_regexp ^/sites/.*/private/
29 | error @privateDirRegexp 403
30 |
31 | @protectedFilesRegexp {
32 | not path /.well-known*
33 | path_regexp \.(engine|inc|install|make|module|profile|po|sh|.*sql|theme|twig|tpl(\.php)?|xtmpl|yml)(~|\.sw[op]|\.bak|\.orig|\.save)?$|^/(\..*|Entries.*|Repository|Root|Tag|Template|composer\.(json|lock)|web\.config|yarn\.lock|package\.json)$|^\/#.*#$|\.php(~|\.sw[op]|\.bak|\.orig|\.save)$
34 | }
35 | error @protectedFilesRegexp 403
36 |
37 | @static {
38 | file
39 | path *.avif *.css *.eot *.gif *.gz *.ico *.jpg *.jpeg *.js *.otf *.pdf *.png *.svg *.ttf *.webp *.woff *.woff2
40 | }
41 | header @static Cache-Control "max-age=31536000,public,immutable"
42 |
43 | {$CADDY_SERVER_EXTRA_DIRECTIVES}
44 |
45 | php_server
46 | }
47 |
--------------------------------------------------------------------------------
/docker/docker-compose.override.yml.default:
--------------------------------------------------------------------------------
1 | x-service-defaults:
2 | &service-defaults
3 | restart: always
4 | networks:
5 | - front
6 |
7 | x-service-rewrelic:
8 | &service-newrelic
9 | newrelic:
10 | <<: *service-defaults
11 | image: newrelic/php-daemon
12 | container_name: "${COMPOSE_PROJECT_NAME}_newrelic"
13 |
14 | services:
15 | # <<: *service-newrelic
16 |
17 | # Override base service.
18 | php:
19 | environment:
20 | COMPOSER_MEMORY_LIMIT: "1G"
21 | # BLACKFIRE_CLIENT_ID: x
22 | # BLACKFIRE_CLIENT_TOKEN: x
23 | # NEW_RELIC_APPNAME: "${COMPOSE_PROJECT_NAME}"
24 | volumes:
25 | # Uncomment for MacOS.
26 | # - 'nfsmount:/var/www/html'
27 | - "./90-mail.ini:/etc/php83/conf.d/90-mail.ini:z"
28 | - "./xx-php.ini:/etc/php83/conf.d/xx-php.ini:z"
29 | # depends_on:
30 | # - mysql
31 | # - postgresql
32 | # Uncomment for MacOS.
33 | # ports:
34 | # - "8088:80"
35 | labels:
36 | - 'sdc.port=80'
37 | - 'traefik.enable=true'
38 | # Treafik 2.x
39 | - 'traefik.http.routers.web-${COMPOSE_PROJECT_NAME}.rule=Host(`${MAIN_DOMAIN_NAME}`)'
40 | - 'traefik.http.routers.web-${COMPOSE_PROJECT_NAME}.tls.certresolver=dns'
41 | - 'traefik.http.routers.web-${COMPOSE_PROJECT_NAME}.tls=true'
42 | - 'traefik.http.routers.web-${COMPOSE_PROJECT_NAME}.middlewares=web-${COMPOSE_PROJECT_NAME}@docker'
43 | - 'traefik.http.middlewares.web-${COMPOSE_PROJECT_NAME}.basicauth.users=${RA_BASIC_AUTH}'
44 | - 'traefik.http.middlewares.web-${COMPOSE_PROJECT_NAME}.basicauth.removeheader=true'
45 |
46 | # Get access keys from https://blackfire.io/my/profiles
47 | # Then download extension using "make blackfire"
48 |
49 | # blackfire:
50 | # <<: *service-defaults
51 | # image: blackfire/blackfire
52 | # container_name: "${COMPOSE_PROJECT_NAME}_blackfire"
53 | # environment:
54 | # BLACKFIRE_SERVER_ID: x
55 | # BLACKFIRE_SERVER_TOKEN: x
56 | # BLACKFIRE_LOG_LEVEL: 1
57 |
58 | # adminer:
59 | # <<: *service-defaults
60 | # image: adminer:standalone
61 | # container_name: "${COMPOSE_PROJECT_NAME}_adminer"
62 | # links:
63 | # - mysql:mysql
64 | # depends_on:
65 | # - mysql
66 |
67 | # mysql:
68 | # <<: *service-defaults
69 | # image: mysql:8.0-oraclelinux8
70 | # container_name: "${COMPOSE_PROJECT_NAME}_mysql"
71 | # volumes:
72 | # - ${DB_DATA_DIR}/${COMPOSE_PROJECT_NAME}_mysql:/var/lib/mysql:Z
73 | # environment:
74 | # MYSQL_DATABASE: db
75 | # MYSQL_USER: db
76 | # MYSQL_PASSWORD: db
77 | # MYSQL_ROOT_PASSWORD: dbroot
78 |
79 | # postgresql:
80 | # <<: *service-defaults
81 | # image: postgres:14-alpine
82 | # container_name: "${COMPOSE_PROJECT_NAME}_pgsql"
83 | # volumes:
84 | # - ${DB_DATA_DIR}/${COMPOSE_PROJECT_NAME}_pgsql:/var/lib/postgresql/data:Z
85 | # - ./load-extension.sh:/docker-entrypoint-initdb.d/load-extension.sh
86 | # environment:
87 | # POSTGRES_DB: db
88 | # POSTGRES_USER: db
89 | # POSTGRES_PASSWORD: dbroot
90 | # PGDATA: /var/lib/postgresql/data
91 |
92 | mailhog:
93 | <<: *service-defaults
94 | image: axllent/mailpit
95 | container_name: "${COMPOSE_PROJECT_NAME}_mail"
96 | labels:
97 | - 'sdc.port=8025'
98 | - 'traefik.enable=true'
99 | # Treafik 2.x
100 | - 'traefik.http.routers.mailhog-${COMPOSE_PROJECT_NAME}.rule=Host(`mail-${MAIN_DOMAIN_NAME}`)'
101 | - 'traefik.http.routers.mailhog-${COMPOSE_PROJECT_NAME}.tls.certresolver=dns'
102 | - 'traefik.http.routers.mailhog-${COMPOSE_PROJECT_NAME}.tls=true'
103 | - 'traefik.http.routers.mailhog-${COMPOSE_PROJECT_NAME}.middlewares=mailhog-${COMPOSE_PROJECT_NAME}@docker'
104 | - 'traefik.http.services.mailhog-${COMPOSE_PROJECT_NAME}.loadbalancer.server.port=8025'
105 | - 'traefik.http.middlewares.mailhog-${COMPOSE_PROJECT_NAME}.basicauth.users=${RA_BASIC_AUTH}'
106 | - 'traefik.http.middlewares.mailhog-${COMPOSE_PROJECT_NAME}.basicauth.removeheader=true'
107 |
108 | # nginx:
109 | ## Uncomment for MacOS.
110 | ## ports:
111 | ## - "8088:80"
112 | ## Mount local folder with ssl keys.
113 | ## volumes:
114 | ## - ./nginx/ssl:/etc/nginx/ssl:Z
115 | # labels:
116 | # - 'sdc.port=80'
117 | # - 'traefik.enable=true'
118 | # # Treafik 2.x
119 | # - 'traefik.http.routers.web-${COMPOSE_PROJECT_NAME}.rule=Host(`${MAIN_DOMAIN_NAME}`)'
120 | # - 'traefik.http.routers.web-${COMPOSE_PROJECT_NAME}.tls.certresolver=dns'
121 | # - 'traefik.http.routers.web-${COMPOSE_PROJECT_NAME}.tls=true'
122 | # - 'traefik.http.routers.web-${COMPOSE_PROJECT_NAME}.middlewares=web-${COMPOSE_PROJECT_NAME}@docker'
123 | # - 'traefik.http.middlewares.web-${COMPOSE_PROJECT_NAME}.basicauth.users=${RA_BASIC_AUTH}'
124 | # - 'traefik.http.middlewares.web-${COMPOSE_PROJECT_NAME}.basicauth.removeheader=true'
125 |
126 | # apache:
127 | # Uncomment for MacOS.
128 | # ports:
129 | # - "8088:80"
130 | # Mount local folder with ssl keys.
131 | # volumes:
132 | # - ./apache/ssl:/etc/apache2/ssl:Z
133 | # labels:
134 | # - 'sdc.port=80'
135 | # - 'traefik.enable=true'
136 | # # Treafik 2.x
137 | # - 'traefik.http.routers.apache-${COMPOSE_PROJECT_NAME}.rule=Host(`${MAIN_DOMAIN_NAME}`)'
138 | # - 'traefik.http.routers.apache-${COMPOSE_PROJECT_NAME}.tls.certresolver=dns'
139 | # - 'traefik.http.routers.apache-${COMPOSE_PROJECT_NAME}.tls=true'
140 | # - 'traefik.http.routers.apache-${COMPOSE_PROJECT_NAME}.middlewares=apache-${COMPOSE_PROJECT_NAME}@docker'
141 | # - 'traefik.http.middlewares.apache-${COMPOSE_PROJECT_NAME}.basicauth.users=${RA_BASIC_AUTH}'
142 | # - 'traefik.http.middlewares.apache-${COMPOSE_PROJECT_NAME}.basicauth.removeheader=true'
143 |
144 | # solr:
145 | # labels:
146 | # - 'sdc.port=8983'
147 | # - 'traefik.enable=true'
148 | # # Treafik 2.x
149 | # - 'traefik.http.routers.solr-${COMPOSE_PROJECT_NAME}.rule=Host(`solr-${MAIN_DOMAIN_NAME}`)'
150 | # - 'traefik.http.routers.solr-${COMPOSE_PROJECT_NAME}.tls.certresolver=dns'
151 | # - 'traefik.http.routers.solr-${COMPOSE_PROJECT_NAME}.tls=true'
152 | # - 'traefik.http.routers.solr-${COMPOSE_PROJECT_NAME}.middlewares=solr-${COMPOSE_PROJECT_NAME}@docker'
153 | # - 'traefik.http.services.solr-${COMPOSE_PROJECT_NAME}.loadbalancer.server.port=8983'
154 | # - 'traefik.http.middlewares.solr-${COMPOSE_PROJECT_NAME}.basicauth.users=${RA_BASIC_AUTH}'
155 | # - 'traefik.http.middlewares.solr-${COMPOSE_PROJECT_NAME}.basicauth.removeheader=true'
156 |
157 | # Uncomment for MacOS.
158 | #volumes:
159 | # nfsmount:
160 | # driver: local
161 | # driver_opts:
162 | # type: nfs
163 | # o: addr=host.docker.internal,rw,nolock,hard,nointr,nfsvers=3
164 | # device: ":${PWD}"
165 |
--------------------------------------------------------------------------------
/docker/docker-compose.yml:
--------------------------------------------------------------------------------
1 | x-service-defaults:
2 | &service-defaults
3 | restart: always
4 | networks:
5 | - front
6 |
7 | services:
8 |
9 | php:
10 | <<: *service-defaults
11 | image: ${IMAGE_PHP}
12 | container_name: "${COMPOSE_PROJECT_NAME}_web"
13 | volumes:
14 | - ../:/var/www/html:z
15 | - ../drush/.drushrc.php:/home/www-data/.drushrc.php:z
16 | # environment:
17 | # REDIS_HOST: ${REDIS_HOST}
18 | # REDIS_PORT: ${REDIS_PORT}
19 | # REDIS_PASSWD: ${REDIS_PASSWD}
20 | # depends_on:
21 | # - redis
22 | # links:
23 | # - redis:redis
24 |
25 | # nginx:
26 | # <<: *service-defaults
27 | # image: ${IMAGE_NGINX}
28 | # container_name: "${COMPOSE_PROJECT_NAME}_web"
29 | # depends_on:
30 | # - php
31 | # volumes_from:
32 | # - php
33 |
34 | # apache:
35 | # <<: *service-defaults
36 | # image: ${IMAGE_APACHE}
37 | # container_name: "${COMPOSE_PROJECT_NAME}_web"
38 | # depends_on:
39 | # - php
40 | # volumes_from:
41 | # - php
42 |
43 | # solr:
44 | # <<: *service-defaults
45 | # image: ${IMAGE_SOLR}
46 | # container_name: "${COMPOSE_PROJECT_NAME}_solr"
47 | # volumes:
48 | # - ./solr-conf/8.x:/solr-conf/conf
49 | # entrypoint:
50 | # - docker-entrypoint.sh
51 | # - solr-precreate
52 | # - ${COMPOSE_PROJECT_NAME}
53 | # - /solr-conf
54 |
55 | # redis:
56 | # <<: *service-defaults
57 | # image: ${IMAGE_REDIS}
58 | # container_name: "${COMPOSE_PROJECT_NAME}_redis"
59 | # command: redis-server --maxmemory-policy allkeys-lru --requirepass ${REDIS_PASSWD}
60 |
61 | networks:
62 | front:
63 | driver: bridge
64 |
--------------------------------------------------------------------------------
/docker/load-extension.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | set -e
4 |
5 | psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
6 | CREATE EXTENSION IF NOT EXISTS pg_trgm;
7 | EOSQL
8 |
--------------------------------------------------------------------------------
/docker/unit.json:
--------------------------------------------------------------------------------
1 | {
2 | "access_log": "/dev/stdout",
3 | "listeners": {
4 | "*:80": {
5 | "pass": "routes/main"
6 | }
7 | },
8 |
9 | "routes": {
10 | "main": [
11 | {
12 | "match": {
13 | "uri": [
14 | "!*/.well-known/*",
15 | "/vendor/*",
16 | "/core/profiles/demo_umami/modules/demo_umami_content/default_content/*",
17 | "*.engine",
18 | "*.inc",
19 | "*.install",
20 | "*.make",
21 | "*.module",
22 | "*.po",
23 | "*.profile",
24 | "*.sh",
25 | "*.theme",
26 | "*.tpl",
27 | "*.twig",
28 | "*.xtmpl",
29 | "*.yml",
30 | "*/.*",
31 | "*/Entries*",
32 | "*/Repository",
33 | "*/Root",
34 | "*/Tag",
35 | "*/Template",
36 | "*/composer.json",
37 | "*/composer.lock",
38 | "*/web.config",
39 | "*sql",
40 | "*.bak",
41 | "*.orig",
42 | "*.save",
43 | "*.swo",
44 | "*.swp",
45 | "*~"
46 | ]
47 | },
48 |
49 | "action": {
50 | "return": 404
51 | }
52 | },
53 | {
54 | "match": {
55 | "uri": [
56 | "/core/authorize.php",
57 | "/core/install.php",
58 | "/core/modules/statistics/statistics.php",
59 | "~^/core/modules/system/tests/https?\\.php",
60 | "/core/rebuild.php",
61 | "/update.php",
62 | "/update.php/*"
63 | ]
64 | },
65 |
66 | "action": {
67 | "pass": "applications/drupal/direct"
68 | }
69 | },
70 | {
71 | "match": {
72 | "uri": [
73 | "!/index.php*",
74 | "*.php"
75 | ]
76 | },
77 |
78 | "action": {
79 | "return": 404
80 | }
81 | },
82 | {
83 | "match": {
84 | "uri": [
85 | "~^.*css_[a-zA-Z0-9-_]+\\.css(?:\\?.*)?$",
86 | "~^.*js_[a-zA-Z0-9-_]+\\.js(?:\\?.*)?$"
87 | ],
88 |
89 | "headers": [
90 | {
91 | "Accept-Encoding": "*gzip*"
92 | }
93 | ]
94 | },
95 |
96 | "action": {
97 | "pass": "routes/assets_gz"
98 | }
99 | },
100 | {
101 | "action": {
102 | "share": "/var/www/html/web$uri",
103 | "fallback": {
104 | "pass": "applications/drupal/index"
105 | }
106 | }
107 | }
108 | ],
109 |
110 | "assets_gz": [
111 | {
112 | "action": {
113 | "share": "/var/www/html/web${uri}.gz",
114 | "response_headers": {
115 | "Content-Encoding": "gzip"
116 | },
117 |
118 | "fallback": {
119 | "pass": "routes/assets"
120 | }
121 | }
122 | }
123 | ],
124 |
125 | "assets": [
126 | {
127 | "action": {
128 | "share": "/var/www/html/web${uri}",
129 | "fallback": {
130 | "pass": "applications/drupal/index"
131 | }
132 | }
133 | }
134 | ]
135 | },
136 |
137 | "applications": {
138 | "drupal": {
139 | "type": "php",
140 | "stdout": "/dev/stdout",
141 | "stderr": "/dev/stderr",
142 | "processes": {
143 | "max": 4,
144 | "spare": 2,
145 | "idle_timeout": 120
146 | },
147 |
148 | "limits": {
149 | "timeout": 300,
150 | "requests": 1500
151 | },
152 |
153 | "options": {
154 | "admin": {
155 | "memory_limit": "1G",
156 | "opcache.jit_buffer_size": "20M"
157 | }
158 | },
159 |
160 | "targets": {
161 | "direct": {
162 | "root": "/var/www/html/web/"
163 | },
164 |
165 | "index": {
166 | "root": "/var/www/html/web/",
167 | "script": "index.php"
168 | }
169 | }
170 | }
171 | }
172 | }
173 |
--------------------------------------------------------------------------------
/docker/xx-php.ini:
--------------------------------------------------------------------------------
1 | zend.assertions=1
2 | ;opcache.jit_buffer_size=20M
3 |
--------------------------------------------------------------------------------
/drush/.drushrc.php:
--------------------------------------------------------------------------------
1 | getSession()->getCurrentUrl();
32 | var_dump($url);
33 | } catch (Exception $e) {
34 | throw new Exception($e);
35 | }
36 | }
37 |
38 | /**
39 | * @Then /^I want to see the page content$/
40 | *
41 | * @throws \Exception
42 | */
43 | public function iWantToSeeThePageContent() {
44 | try {
45 | $html = $this->getSession()->getPage()->getHtml();
46 | print($html);
47 | } catch (Exception $e) {
48 | throw new Exception($e);
49 | }
50 | }
51 |
52 | /**
53 | * @Given /^I wait (\d+) seconds$/
54 | */
55 | public function iWaitSeconds($seconds) {
56 | sleep($seconds);
57 | }
58 |
59 | /**
60 | * @Then a PDF is displayed
61 | */
62 | public function assertPdfDisplay()
63 | {
64 | $headers = $this->getSession()->getResponseHeaders();
65 |
66 | if (!isset($headers['Content-Type'][0]) || strcmp($headers['Content-Type'][0], 'application/pdf') != 0 ) {
67 | throw new Exception('No PDF displayed.');
68 | }
69 |
70 | //assertArraySubset(['Content-Type' => [0 => 'application/pdf']], $headers);
71 | }
72 |
73 | /**
74 | * @Then I click the back button of the navigator
75 | */
76 | public function iClickTheBackButtonInNavigator() {
77 | $this->getSession()->getDriver()->back();
78 | }
79 |
80 | /**
81 | * @Given I click the :arg1 element
82 | */
83 | public function iClickTheElement($selector) {
84 | $page = $this->getSession()->getPage();
85 | $element = $page->find('css', $selector);
86 |
87 | if (empty($element)) {
88 | throw new Exception("No html element found for the selector ('$selector')");
89 | }
90 |
91 | $element->click();
92 | }
93 |
94 | /**
95 | * @Given I select the first element in :arg1 list
96 | */
97 | public function iSelectTheFirstElement($selector) {
98 | $page = $this->getSession()->getPage();
99 |
100 | $options = $page->findAll('css', "#$selector option");
101 |
102 | /** @var \Behat\Mink\Element\NodeElement $option */
103 | foreach ($options as $option) {
104 | if (strcmp($option->getValue(), "_none") != 0) {
105 | $page->selectFieldOption($selector, $option->getValue());
106 | return;
107 | }
108 | }
109 |
110 | throw new Exception("Unable to find a non empty value.");
111 | }
112 |
113 | /**
114 | * @Given I select in combobox ":arg1" value ":arg2"
115 | */
116 | public function iSelectCombobox($field_wrapper_selector, $value) {
117 | $page = $this->getSession()->getPage();
118 | $element = $page->find('css', $field_wrapper_selector);
119 | $combo = $element->getParent()->find('css', 'span');
120 | $combo->click();
121 |
122 | // Caught this case on RA instance, its chrome can not draw selectlist so
123 | // fast.
124 | $this->iWaitSeconds(3);
125 | $option = $page->find('xpath', "//li [@class='ui-menu-item']/div [text () ='$value']");
126 | $option->mouseOver();
127 | $option->getParent()->getParent()->click();
128 | $this->iWaitSeconds(1);
129 | }
130 |
131 | /**
132 | * Click some text
133 | *
134 | * @When /^I click on the text "([^"]*)"$/
135 | */
136 | public function iClickOnTheText($text)
137 | {
138 | $session = $this->getSession();
139 | $element = $session->getPage()->find(
140 | 'xpath',
141 | $session->getSelectorsHandler()->selectorToXpath('xpath', '*//*[text()="'. $text .'"]')
142 | );
143 | if (null === $element) {
144 | throw new \InvalidArgumentException(sprintf('Cannot find text: "%s"', $text));
145 | }
146 |
147 | $element->click();
148 | }
149 |
150 |
151 | /**
152 | * @Then /^the selectbox "([^"]*)" should have a list containing:$/
153 | */
154 | public function shouldHaveAListContaining($element, \Behat\Gherkin\Node\PyStringNode $list)
155 | {
156 | $page = $this->getSession()->getPage();
157 | $validStrings = $list->getStrings();
158 |
159 | $elements = $page->findAll('css', "#$element option");
160 |
161 | $option_none = 0;
162 |
163 | /** @var \Behat\Mink\Element\NodeElement $element */
164 | foreach ($elements as $element) {
165 | $value = $element->getValue();
166 | if (strcmp($value, '_none') == 0) {
167 | $option_none = 1;
168 | continue;
169 | }
170 |
171 | if (!in_array($element->getValue(), $validStrings)) {
172 | throw new Exception ("Element $value not found.");
173 | }
174 | }
175 |
176 | if ((sizeof($elements) - $option_none) < sizeof($validStrings)) {
177 | throw new Exception ("Expected options are missing in the select list.");
178 | }
179 | elseif ((sizeof($elements) - $option_none) > sizeof($validStrings)) {
180 | throw new Exception ("There are more options than expected in the select list.");
181 | }
182 | }
183 |
184 |
185 | /**
186 | * Wait for AJAX to finish.
187 | *
188 | * @see \Drupal\FunctionalJavascriptTests\JSWebAssert::assertWaitOnAjaxRequest()
189 | *
190 | * @Given I wait max :arg1 seconds for AJAX to finish
191 | */
192 | public function iWaitForAjaxToFinish($seconds) {
193 | $condition = <<getSession()->wait($seconds * 1000, $condition);
212 | if (!$result) {
213 | throw new \RuntimeException('Unable to complete AJAX request.');
214 | }
215 | }
216 |
217 | /**
218 | * Switches to specific iframe.
219 | *
220 | * @Given I switch to iframe :arg1
221 | */
222 | public function iSwitchToIframe(string $name) {
223 | $this->getSession()->switchToIFrame($name);
224 | }
225 |
226 | /**
227 | * Switches to main window.
228 | *
229 | * @Given I switch back to main window
230 | */
231 | public function iSwitchToMainWindow() {
232 | $this->getSession()->switchToIFrame(NULL);
233 | }
234 |
235 | /**
236 | * @Given I select in choices ":arg1" element number ":arg2"
237 | */
238 |
239 | public function iSelectOptionNumberInChoices($selector, $number) {
240 | $page = $this->getSession()->getPage();
241 | $element = $page->find('css', $selector);
242 | if (!$element) {
243 | throw new \Exception(sprintf('Element "%s" not found', $selector));
244 | }
245 | $trigger = $element->getParent();
246 | $trigger->click();
247 | $items = $trigger->getParent()->findAll('css', '.choices__list--dropdown > .choices__list > .choices__item');
248 | $item = $items[$number - 1];
249 | $item->mouseOver();
250 | $item->click();
251 | }
252 |
253 | /**
254 | * @When /^wait for the page to be loaded$/
255 | */
256 | public function waitForThePageToBeLoaded()
257 | {
258 | $this->getSession()->wait(10000, "document.readyState === 'complete'");
259 | }
260 |
261 | /**
262 | * @Then I wait :arg1 seconds until element :arg2 appears
263 | */
264 | public function waitSecondsUntilElementAppears($seconds, $selector) {
265 | $startTime = time();
266 | do {
267 | try {
268 | $element = $this->getSession()->getPage()->findAll('css', $selector);
269 | if (count($element) > 0) {
270 | return TRUE;
271 | }
272 | } catch (ExpectationException $e) {
273 | /* Intentionally left blank */
274 | }
275 | } while (time() - $startTime < $seconds);
276 | throw new ResponseTextException(
277 | sprintf('Cannot find the element %s after %s seconds', $selector, $seconds),
278 | $this->getSession()
279 | );
280 | }
281 |
282 | /**
283 | * @Then I should see the pin with title :arg1 on the map :arg2
284 | */
285 | public function iShouldSeeThePinWithTitleOnTheMap($pin, $map_selector) {
286 | if ($map = $this->getSession()->getPage()->find('css', $map_selector)) {
287 | if ($pin = $map->find('css', 'area[title="' . $pin . '"]')) {
288 | return $pin;
289 | }
290 | else {
291 | throw new \Exception(sprintf("The map '%s' does not contain pin with title '%s'", $map_selector, $pin));
292 | }
293 | }
294 | else {
295 | throw new \Exception(sprintf("The page does not contain the map with selector '%s'", $map_selector));
296 | }
297 | }
298 |
299 | /**
300 | * @When I click on the pin with title :arg1 on the map :arg2
301 | */
302 | public function iClickThePinWithTitleOnTheMap($pin, $map_selector) {
303 | $element = $this->iShouldSeeThePinWithTitleOnTheMap($pin, $map_selector);
304 | if (empty($element)) {
305 | throw new \Exception(sprintf("The map '%s' does not contain pin with title '%s'", $map_selector, $pin));
306 | }
307 | $element->click();
308 | }
309 |
310 | /**
311 | * Switch to the another tab.
312 | * Example: I switch to "2" tab
313 | * Example: I switch to "main" tab
314 | *
315 | * @Given I switch to ":arg1" tab
316 | */
317 | public function switchToNextTab($tab) {
318 | if ($tab == 'main') {
319 | $tab = 1;
320 | }
321 | $windowNames = $this->getSession()->getWindowNames();
322 | if(isset($windowNames[$tab])) {
323 | $this->getSession()->switchToWindow($windowNames[$tab]);
324 | }
325 | else {
326 | throw new Exception('There is not a tab to switch.');
327 | }
328 | }
329 |
330 | /**
331 | * @Given I select :arg1 with value :arg2 from :arg3 Selectmenu select element
332 | */
333 | public function iSelectValueFromSelectmenuList($optionValueSelector, $value, $listSelector) {
334 | $page = $this->getSession()->getPage();
335 |
336 | $autocompletePanel = $page->find('css', $listSelector);
337 | $autocompleteListOptions = $autocompletePanel->findAll('css', '.ui-menu-item-wrapper');
338 |
339 | /** @var \Behat\Mink\Element\NodeElement $option */
340 | foreach ($autocompleteListOptions as $option) {
341 | $optionValue = $option->find('css', $optionValueSelector);
342 | if ($optionValue && $optionValue->getHtml() == $value) {
343 | $option->click();
344 | return;
345 | }
346 | }
347 |
348 | throw new Exception("Unable to find the item in the autocomplete list.");
349 | }
350 |
351 | /**
352 | * @Given I see :arg1 value in input element :arg2
353 | */
354 | public function iCheckInputsValue($value, $selector) {
355 | $page = $this->getSession()->getPage();
356 | $element = $page->find('css', $selector);
357 | if (empty($element)) {
358 | throw new Exception("No input element found.");
359 | }
360 | if ($element->getValue() != $value) {
361 | throw new Exception("No matches.");
362 | }
363 | }
364 |
365 | }
366 |
--------------------------------------------------------------------------------
/features/generic_tests.feature:
--------------------------------------------------------------------------------
1 | @api
2 |
3 | Feature: Generic tests
4 |
5 | # Availability tests
6 |
7 | Scenario: Homepage is accessible
8 | Given I am an anonymous user
9 | When I am on the homepage
10 | And I take a screenshot
11 | Then I should get a "200" HTTP response
12 |
13 | Scenario: User login page is accessible
14 | Given I am an anonymous user
15 | When I visit "/user"
16 | And I take a screenshot
17 | Then I should get a "200" HTTP response
18 |
19 | Scenario: Run cron
20 | Given I am logged in as a user with the "sysadmin" role
21 | When I run cron
22 | And am on "admin/reports/dblog"
23 | When wait for the page to be loaded
24 | And I take a screenshot
25 | Then I should see the link "Cron run completed"
26 |
27 | Scenario: Clear cache
28 | Given the cache has been cleared
29 | When I am on the homepage
30 | When wait for the page to be loaded
31 | And I take a screenshot
32 | Then I should get a "200" HTTP response
33 |
34 | # Security tests
35 |
36 | Scenario: Submit invalid login credentials
37 | Given I am an anonymous user
38 | When I visit "/user"
39 | And I fill in "edit-name" with "XXXXX"
40 | And I fill in "edit-pass" with "YYYYY"
41 | And I press the "edit-submit" button
42 | Then I am on "/admin/people"
43 | And I take a screenshot
44 | And the response status code should be 403
45 | # See https://www.drupal.org/project/username_enumeration_prevention
46 |
47 | # Global behavior tests
48 |
49 | Scenario: Create users programatically
50 | Given users:
51 | | name | mail | status |
52 | | John Doe | johndoe@example.com | 1 |
53 | And I am logged in as a user with the "contributor" role
54 | When I visit "/admin/people"
55 | When wait for the page to be loaded
56 | Then I should see the link "John Doe"
57 |
58 | Scenario: Create nodes programatically
59 | Given "basic_page" content:
60 | | title |
61 | | Page one |
62 | | Page two |
63 | And I am logged in as a user with the "contributor" role
64 | When I go to "admin/content"
65 | Then I should see "Page one"
66 | And I should see "Page two"
67 |
68 | Scenario: Create a node programatically with listed field(s) and check it displays
69 | Given I am viewing an "basic_page" content:
70 | | title | My node with fields! |
71 | Then I should see the text "My node with fields!"
72 |
73 | Scenario: Target links within table rows
74 | Given I am logged in as a user with the "sysadmin" role
75 | When I am at "/admin/structure/menu/"
76 | And I click "Edit menu" in the "Administration" row
77 | And I should see text matching "menu Administration"
78 |
79 | # User & role tests
80 |
81 | Scenario Outline: Create nodes manualy using different roles
82 | Given I am logged in as a user with the "" role
83 | When I go to "node/add/basic_page"
84 | And I fill in "Title" with "Test node created by user with role"
85 | And I press the "edit-submit--2" button
86 | When I go to "admin/content"
87 | And I take a screenshot
88 | Then I should see "Test node created by user with role"
89 |
90 | Examples:
91 | | role_machine_name |
92 | | sysadmin |
93 | | contributor |
94 |
95 |
--------------------------------------------------------------------------------
/lighthouserc.yml:
--------------------------------------------------------------------------------
1 | ci:
2 | collect:
3 | numberOfRuns: 5
4 | settings:
5 | chromeFlags:
6 | - "--no-sandbox"
7 | url:
8 | - URL_TO_TEST # Overriden by CLI argument
9 | assert:
10 | includePassedAssertions: true
11 | # preset: lighthouse:no-pwa # lighthouse:recommended but without PWA audits
12 | assertions: # Additional to preset
13 | categories:performance:
14 | - error
15 | - minScore: 0.9
16 | categories:accessibility:
17 | - error
18 | - minScore: 0.9
19 | categories:best-practices:
20 | - error
21 | - minScore: 0.9
22 | categories:seo:
23 | - error
24 | - minScore: 0.8
25 |
26 |
--------------------------------------------------------------------------------
/private-files/.htaccess:
--------------------------------------------------------------------------------
1 | # Deny all requests from Apache 2.4+.
2 |
3 | Require all denied
4 |
5 |
6 | # Deny all requests from Apache 2.0-2.2.
7 |
8 | Deny from all
9 |
10 |
11 | # Turn off all options we don't need.
12 | Options -Indexes -ExecCGI -Includes -MultiViews
13 |
14 | # Set the catch-all handler to prevent scripts from being executed.
15 | SetHandler Drupal_Security_Do_Not_Remove_See_SA_2006_006
16 |
17 | # Override the handler again if we're run later in the evaluation list.
18 | SetHandler Drupal_Security_Do_Not_Remove_See_SA_2013_003
19 |
20 |
21 | # If we know how to do it safely, disable the PHP engine entirely.
22 |
23 | php_flag engine off
24 |
25 |
--------------------------------------------------------------------------------
/rector.php:
--------------------------------------------------------------------------------
1 | sets([
16 | Drupal8SetList::DRUPAL_8,
17 | Drupal9SetList::DRUPAL_9,
18 | Drupal10SetList::DRUPAL_10,
19 | ]);
20 |
21 | $drupalFinder = new DrupalFinder();
22 | $drupalFinder->locateRoot(__DIR__);
23 | $drupalRoot = $drupalFinder->getDrupalRoot();
24 | $rectorConfig->autoloadPaths([
25 | $drupalRoot . '/core',
26 | $drupalRoot . '/modules',
27 | $drupalRoot . '/profiles',
28 | $drupalRoot . '/themes',
29 | __DIR__ . '/vendor/drush/drush/includes/output.inc',
30 | ]);
31 | $rectorConfig->skip(['*/upgrade_status/tests/modules/*']);
32 | $rectorConfig->fileExtensions(['php', 'module', 'theme', 'install', 'profile', 'inc', 'engine']);
33 | $rectorConfig->importNames(true, false);
34 | $rectorConfig->importShortClasses(false);
35 | };
36 |
--------------------------------------------------------------------------------
/scripts/composer/ScriptHandler.php:
--------------------------------------------------------------------------------
1 | getIO()->write("\r\n".implode(' ',$files));
48 | }
49 |
50 |
51 | /**
52 | * @param \Composer\Script\Event $event
53 | *
54 | * @throws \Exception
55 | */
56 | public static function createRequiredFiles(Event $event) {
57 | $fs = new Filesystem();
58 | $drupalFinder = new DrupalFinder();
59 | $drupalFinder->locateRoot(getcwd());
60 | $drupalRoot = $drupalFinder->getDrupalRoot();
61 |
62 | $dirs = [
63 | 'modules',
64 | 'profiles',
65 | 'themes',
66 | ];
67 |
68 | // Required for unit testing
69 | foreach ($dirs as $dir) {
70 | if (!$fs->exists($drupalRoot . '/' . $dir)) {
71 | $fs->mkdir($drupalRoot . '/' . $dir);
72 | $fs->touch($drupalRoot . '/' . $dir . '/.gitkeep');
73 | }
74 | }
75 |
76 | // Prepare the settings file for installation
77 | if (!$fs->exists($drupalRoot . '/sites/default/settings.php') and $fs->exists($drupalRoot . '/sites/default/default.settings.php')) {
78 | $fs->copy($drupalRoot . '/sites/default/default.settings.php', $drupalRoot . '/sites/default/settings.php');
79 | require_once $drupalRoot . '/core/includes/bootstrap.inc';
80 | require_once $drupalRoot . '/core/includes/install.inc';
81 | $settings['settings']['config_sync_directory'] = (object) [
82 | 'value' => Path::makeRelative($drupalFinder->getComposerRoot() . '/config/sync', $drupalRoot),
83 | 'required' => TRUE,
84 | ];
85 | new Settings([]);
86 | if (version_compare(\Drupal::VERSION, '10.1', '>=')) {
87 | SettingsEditor::rewrite($drupalRoot . '/sites/default/settings.php', $settings);
88 | }
89 | else {
90 | drupal_rewrite_settings($settings, $drupalRoot . '/sites/default/settings.php');
91 | }
92 | $fs->chmod($drupalRoot . '/sites/default/settings.php', 0666);
93 | $event->getIO()
94 | ->write("Create a sites/default/settings.php file with chmod 0666");
95 | }
96 |
97 | // Create the files directory with chmod 0777
98 | if (!$fs->exists($drupalRoot . '/sites/default/files')) {
99 | $oldmask = umask(0);
100 | $fs->mkdir($drupalRoot . '/sites/default/files', 0775);
101 | umask($oldmask);
102 | $event->getIO()
103 | ->write("Create a sites/default/files directory with chmod 0775");
104 | }
105 | else {
106 | $fs->chmod($drupalRoot . '/sites/default/files', 0775);
107 | }
108 | }
109 |
110 | /**
111 | * Checks if the installed version of Composer is compatible.
112 | *
113 | * Composer 1.0.0 and higher consider a `composer install` without having a
114 | * lock file present as equal to `composer update`. We do not ship with a lock
115 | * file to avoid merge conflicts downstream, meaning that if a project is
116 | * installed with an older version of Composer the scaffolding of Drupal will
117 | * not be triggered. We check this here instead of in drupal-scaffold to be
118 | * able to give immediate feedback to the end user, rather than failing the
119 | * installation after going through the lengthy process of compiling and
120 | * downloading the Composer dependencies.
121 | *
122 | * @see https://github.com/composer/composer/pull/5035
123 | */
124 | public static function checkComposerVersion(Event $event) {
125 | $composer = $event->getComposer();
126 | $io = $event->getIO();
127 |
128 | $version = $composer::VERSION;
129 |
130 | // The dev-channel of composer uses the git revision as version number,
131 | // try to the branch alias instead.
132 | if (preg_match('/^[0-9a-f]{40}$/i', $version)) {
133 | $version = $composer::BRANCH_ALIAS_VERSION;
134 | }
135 |
136 | // If Composer is installed through git we have no easy way to determine if
137 | // it is new enough, just display a warning.
138 | if ($version === '@package_version@' || $version === '@package_branch_alias_version@') {
139 | $io->writeError('You are running a development version of Composer. If you experience problems, please update Composer to the latest stable version.');
140 | }
141 | elseif (Comparator::lessThan($version, '1.0.0')) {
142 | $io->writeError('Drupal-project requires Composer version 1.0.0 or higher. Please update your Composer before continuing.');
143 | exit(1);
144 | }
145 | }
146 |
147 | }
148 |
--------------------------------------------------------------------------------
/scripts/delivery-archive/.gitlab-ci.delivery_via_archive_example.yml:
--------------------------------------------------------------------------------
1 | # Example of .gitlab-ci.yml jobs
2 |
3 | stages:
4 | - deliver
5 |
6 | .delivery_template: &delivery_template
7 | extends: .runner_tag_selection
8 | stage: deliver
9 | dependencies:
10 | - prepare:front # Where front dependencies are installed and assets are builded
11 | - prepare:back # Where back dependencies are installed
12 | allow_failure: true
13 | retry:
14 | max: 2
15 | only:
16 | - tags
17 | except:
18 | - branches
19 | after_script:
20 | - rm -f ${CI_PROJECT_NAME}-${CI_COMMIT_REF_NAME}.tar.gz
21 |
22 | delivery:
23 | <<: *delivery_template
24 | script:
25 | - echo " - Start of CI job"
26 | - date; pwd; ls -lah;
27 | - echo ${CI_PROJECT_NAME}
28 | - echo ${CI_COMMIT_REF_NAME}
29 | - echo ${DELIVERY_REPOSITORIES_RAW_REGISTRY_DOMAIN_1}
30 | - echo ${DELIVERY_REPOSITORIES_USERNAME}
31 | - ls -lah
32 | - touch ${CI_PROJECT_NAME}-${CI_COMMIT_REF_NAME}.tar.gz # https://stackoverflow.com/a/37993307/12961859
33 | - time tar --exclude=${CI_PROJECT_NAME}-${CI_COMMIT_REF_NAME}.tar.gz --exclude=".git" --exclude=".gitlab" --exclude=".cache" --exclude=".env" -czf ${CI_PROJECT_NAME}-${CI_COMMIT_REF_NAME}.tar.gz .
34 | - curl -v --user "${DELIVERY_REPOSITORIES_USERNAME}:${DELIVERY_REPOSITORIES_PASSWORD}" --upload-file "${CI_PROJECT_NAME}-${CI_COMMIT_REF_NAME}.tar.gz" https://${DELIVERY_REPOSITORIES_RAW_REGISTRY_DOMAIN_1}/repository/${CI_PROJECT_NAME}-raw/releases/${CI_PROJECT_NAME}-$CI_COMMIT_TAG.tar.gz
35 | - echo " - End of CI job"
36 | artifacts:
37 | name: "$CI_COMMIT_REF_NAME:$CI_COMMIT_SHA:delivery"
38 | expire_in: 1d
39 | paths:
40 | - ./*
41 |
42 | Delivery 2: # Job can then be duplicated to deliver to multiples registries (just use a different REPOSITORIES_DOCKER_REGISTRY_DOMAIN_N variable for each)
43 | ...
44 |
45 |
--------------------------------------------------------------------------------
/scripts/delivery-archive/README.md:
--------------------------------------------------------------------------------
1 | # Delivery via archive script
2 |
3 | ## What
4 |
5 | - This script deliver the current tag from a repo to a raw file registry using a Gitlab CI job, as part of continuous deployments
6 | - You can also deliver to multiple registries at the same time using multiple jobs in the same pipeline
7 |
8 | ## Why
9 |
10 | - Because some hosters prefer to receive artifact archives for deployments
11 | - Because artifacts (downloaded and generated files) are usualy not versioned in git
12 |
13 | ## Setup
14 |
15 | 2 files are required :
16 | - `.gitlab-ci.yml`
17 |
18 | 1. Define a delivery CI job like "Delivery 1" in .gitlab-ci.yml, as shown in .gitlab-ci.delivery_via_archive_example.yml
19 | - To include artefact dependencies, this CI job should be positioned after all dependencies have been built and installed and use the [dependencies](https://docs.gitlab.com/ee/ci/yaml/#dependencies) key word
20 | 1. In Gitlab UI, add the following custom CI/CD variables :
21 | - DELIVERY_REPOSITORIES_RAW_REGISTRY_DOMAIN_1 : Docker repository to which deliver current tag (you can have multiple ones)
22 | - DELIVERY_REPOSITORIES_USERNAME : Service account credentials to use to push Docker image
23 | - DELIVERY_REPOSITORIES_PASSWORD : Service account credentials to use to push Docker image
24 |
25 |
--------------------------------------------------------------------------------
/scripts/delivery-docker/.gitlab-ci.delivery_via_docker_example.yml:
--------------------------------------------------------------------------------
1 | # Example of .gitlab-ci.yml jobs
2 |
3 | stages:
4 | - deliver
5 |
6 | .delivery_template: &delivery_template
7 | extends: .runner_tag_selection
8 | stage: deliver
9 | dependencies:
10 | - prepare:front # Where front dependencies are installed and assets are builded
11 | - prepare:back # Where back dependencies are installed
12 | allow_failure: true
13 | retry:
14 | max: 2
15 | only:
16 | - tags
17 | except:
18 | - branches
19 | after_script:
20 | - docker rmi -f ${CI_PROJECT_NAME}/artifact:${CI_COMMIT_REF_NAME}
21 | - docker rmi -f ${DELIVERY_REPOSITORIES_DOCKER_REGISTRY_DOMAIN_1}/${CI_PROJECT_NAME}/artifact:${CI_COMMIT_REF_NAME}
22 | - docker images --quiet --filter=dangling=true | xargs --no-run-if-empty docker rmi -f 2> /dev/null
23 | - docker images
24 |
25 | delivery:
26 | <<: *delivery_template
27 | script:
28 | - echo " - Start of CI job"
29 | - date; pwd; ls -lah;
30 | - echo ${CI_PROJECT_NAME}
31 | - echo ${CI_COMMIT_REF_NAME}
32 | - echo ${CI_PROJECT_URL}
33 | - echo ${DELIVERY_REPOSITORIES_DOCKER_REGISTRY_DOMAIN_1}
34 | - echo ${DELIVERY_REPOSITORIES_USERNAME}
35 | - docker info
36 | - docker build -t ${CI_PROJECT_NAME}/artifact:${CI_COMMIT_REF_NAME} -f scripts/delivery-docker/Dockerfile . --no-cache --force-rm --build-arg BUILD_DATE=`date -u +"%Y-%m-%dT%H:%M:%SZ"` --build-arg BUILD_URL="${CI_PROJECT_URL}" --build-arg BUILD_DESC="Drupal build artifact" --build-arg BUILD_NAME="${CI_PROJECT_NAME}" --build-arg BUILD_MAINTAINER="${CI_PROJECT_NAME}/mgmt@skilld.cloud"
37 | - docker tag ${CI_PROJECT_NAME}/artifact:${CI_COMMIT_REF_NAME} ${DELIVERY_REPOSITORIES_DOCKER_REGISTRY_DOMAIN_1}/${CI_PROJECT_NAME}/artifact:${CI_COMMIT_REF_NAME}
38 | - docker tag ${CI_PROJECT_NAME}/artifact:${CI_COMMIT_REF_NAME} ${DELIVERY_REPOSITORIES_DOCKER_REGISTRY_DOMAIN_1}/${CI_PROJECT_NAME}/artifact:latest
39 | - docker inspect ${CI_PROJECT_NAME}/artifact:${CI_COMMIT_REF_NAME}
40 | - docker login ${DELIVERY_REPOSITORIES_DOCKER_REGISTRY_DOMAIN_1} --username ${DELIVERY_REPOSITORIES_USERNAME} --password ${DELIVERY_REPOSITORIES_PASSWORD}
41 | - docker push ${DELIVERY_REPOSITORIES_DOCKER_REGISTRY_DOMAIN_1}/${CI_PROJECT_NAME}/artifact:${CI_COMMIT_REF_NAME}
42 | - docker push ${DELIVERY_REPOSITORIES_DOCKER_REGISTRY_DOMAIN_1}/${CI_PROJECT_NAME}/artifact:latest
43 | - echo " - End of CI job"
44 | artifacts:
45 | name: "$CI_COMMIT_REF_NAME:$CI_COMMIT_SHA:delivery"
46 | expire_in: 1d
47 | paths:
48 | - ./*
49 |
50 | Delivery 2: # Job can then be duplicated to deliver to multiples registries (just use a different REPOSITORIES_DOCKER_REGISTRY_DOMAIN_N variable for each)
51 | ...
52 |
53 |
--------------------------------------------------------------------------------
/scripts/delivery-docker/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM scratch
2 |
3 | ARG BUILD_DATE
4 | ARG BUILD_NAME
5 | ARG BUILD_DESC
6 | ARG BUILD_URL
7 | ARG BUILD_MAINTAINER
8 |
9 | LABEL org.label-schema.build-date=$BUILD_DATE \
10 | org.label-schema.name=$BUILD_NAME \
11 | org.label-schema.description=$BUILD_DESC \
12 | org.label-schema.vcs-url=$BUILD_URL \
13 | maintainer=$BUILD_MAINTAINER
14 |
15 | WORKDIR src
16 | COPY . .
17 |
18 |
19 |
--------------------------------------------------------------------------------
/scripts/delivery-docker/README.md:
--------------------------------------------------------------------------------
1 | # Delivering script
2 |
3 | ## What
4 |
5 | - This script deliver the current tag from a repo to a docker registry using a Gitlab CI job, as part of continuous deployments
6 | - You can also deliver to multiple registries at the same time using multiple jobs in the same pipeline
7 |
8 | ## Why
9 |
10 | Why not use the built-in registry functionality Gitlab-ci and other repository services usualy offer ?
11 |
12 | - Because it doesn't work when your git repo is protected behind a basic authentication
13 | - Because mirroring to multiple registries at once is sometime a premium (paid) feature
14 | - Because artifacts (downloaded and generated files) are usualy not versioned in git
15 |
16 | ## Setup
17 |
18 | 2 files are required :
19 | - `.gitlab-ci.yml`
20 | - `scripts/delivery-docker/Dockerfile`
21 |
22 | 1. Define a delivery CI job like "Delivery 1" in .gitlab-ci.yml, as shown in .gitlab-ci.delivery_via_docker_example.yml
23 | - To include artefact dependencies, this CI job should be positioned after all dependencies have been built and installed and use the [dependencies](https://docs.gitlab.com/ee/ci/yaml/#dependencies) key word
24 | 1. In Gitlab UI, add the following custom CI/CD variables :
25 | - DELIVERY_REPOSITORIES_DOCKER_REGISTRY_DOMAIN_1 : Docker repository to which deliver current tag (you can have multiple ones)
26 | - DELIVERY_REPOSITORIES_USERNAME : Service account credentials to use to push Docker image
27 | - DELIVERY_REPOSITORIES_PASSWORD : Service account credentials to use to push Docker image
28 |
29 |
--------------------------------------------------------------------------------
/scripts/delivery-git/.gitlab-ci.delivery_via_git_example.yml:
--------------------------------------------------------------------------------
1 | # Example of .gitlab-ci.yml jobs
2 | # Use with scripts/delivery-git/deliver_current_tag_via_git.sh
3 |
4 | stages:
5 | - deliver
6 |
7 | .delivery_via_git_template: &delivery_via_git_template
8 | stage: deliver
9 | dependencies:
10 | - prepare:front # Where front dependencies are installed and assets are builded
11 | - prepare:back # Where back dependencies are installed
12 | allow_failure: true
13 | retry:
14 | max: 2
15 | only:
16 | - tags
17 | except:
18 | - branches
19 |
20 | Delivery to hoster:
21 | <<: *delivery_via_git_template
22 | script:
23 | - echo " - Start of CI script"
24 | - date; pwd; ls -lah;
25 | - mkdir -p ~/.ssh
26 | - ls -lah ~
27 | - echo "$DELIVERY_REMOTE_REPO_PRIVATE_KEY" > ~/.ssh/id_rsa
28 | - chmod 0600 ~/.ssh/id_rsa
29 | - ls -lah ~/.ssh
30 | - apk add --no-cache openssh-client git rsync
31 | - ssh-keyscan -H "$DELIVERY_REMOTE_REPO_IP" >> ~/.ssh/known_hosts
32 | - git version
33 | - rsync --version
34 | - export TARGET_GIT_REPO="${DELIVERY_REMOTE_REPO_URL_1}" # Gitlab custom variable to update for each repo to mirror
35 | - echo -e "TARGET_GIT_REPO = $TARGET_GIT_REPO"
36 | - export TARGET_GIT_REPO_BRANCH="${DELIVERY_REMOTE_REPO_BRANCH}" # Gitlab custom variable to update for each repo to mirror
37 | - echo -e "TARGET_GIT_REPO_BRANCH = $TARGET_GIT_REPO_BRANCH"
38 | - export TARGET_GIT_REPO_TYPE="${DELIVERY_REMOTE_REPO_TYPE}"
39 | - echo -e "TARGET_GIT_REPO_TYPE = $TARGET_GIT_REPO_TYPE"
40 | - chmod +x scripts/delivery-git/deliver_current_tag_via_git.sh
41 | - ./scripts/delivery-git/deliver_current_tag_via_git.sh
42 | - echo " - End of CI script"
43 | artifacts:
44 | name: "$CI_COMMIT_REF_NAME:$CI_COMMIT_SHA:delivery"
45 | expire_in: 1d
46 | paths:
47 | - ./*
48 | exclude:
49 | - .cache/**/*
50 |
51 | Delivery to repo YYY: # Job can then be duplicated to deliver to multiples repos (just use a different DELIVERY_REMOTE_REPO_URL_N variable for each)
52 | ...
53 |
54 |
--------------------------------------------------------------------------------
/scripts/delivery-git/README.md:
--------------------------------------------------------------------------------
1 | # Delivering script
2 |
3 | ## What
4 |
5 | - This script deliver the current tag from a repo to another one using a Gitlab CI job, as part of continuous deployments
6 | - You can also deliver to multiple repositories at the same time using multiple jobs in the same pipeline
7 |
8 | ## Why
9 |
10 | Why not use the built-in mirroring functionality Gitlab-ci and other repository services usualy offer ?
11 |
12 | - Because it doesn't work when your git repo is protected behind a basic authentication
13 | - Because mirroring to multiple repos at once is sometime a premium (paid) feature
14 | - Because artifacts (downloaded and generated files) are usualy not versioned in git
15 |
16 | ## Setup
17 |
18 | 2 files are required :
19 | - `.gitlab-ci.yml`
20 | - `scripts/delivery-git/deliver_current_tag_via_git.sh`
21 |
22 | 1. Define a delivery CI job like "Deliver to repo XXX" in .gitlab-ci.yml, as shown in .gitlab-ci.delivery_example.yml
23 | - To include artefact dependencies, this CI job should be positioned after all dependencies have been built and installed and use the [dependencies](https://docs.gitlab.com/ee/ci/yaml/#dependencies) key word
24 | 1. In Gitlab UI, add the following custom CI/CD variables :
25 | - DELIVERY_REMOTE_REPO_IP : IP or domain name of target git repos
26 | - DELIVERY_REMOTE_REPO_PRIVATE_KEY : SSH private key matching public key added to git user
27 | - DELIVERY_REMOTE_REPO_TYPE : Possible values : "PLATFORM.SH" only for now, or leave empty if appropriate
28 | - DELIVERY_REMOTE_REPO_URL_1 : Git repo to which deliver current tag (you can have multiple ones)
29 | - DELIVERY_REMOTE_REPO_BRANCH : Git branch to which deliver current tag
30 | - GIT_USER_EMAIL : Email to be used by git user (used to commit)
31 | - GIT_USER_NAME : Name to be used by git user (used to commit)
32 |
33 |
--------------------------------------------------------------------------------
/scripts/delivery-git/deliver_current_tag_via_git.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 | # Use with Gitlab CI jobs from scripts/delivery/.gitlab-ci.delivery_example.yml
3 |
4 | echo -e "\n- Start of delivery script"
5 | # set -x #echo on
6 |
7 | # Defining functions # For local use only, NOT FOR USE IN CI
8 |
9 | CURRENT_TAG_FUNC()
10 | {
11 | git describe --tags $(git rev-list --tags --max-count=1)
12 | }
13 |
14 | # Defining variables
15 | echo -e "- Defining variables...\n"
16 |
17 | PACKAGE_DIR=$(pwd)/files_to_be_deployed
18 | echo -e "PACKAGE_DIR = $PACKAGE_DIR"
19 |
20 | # TARGET_GIT_REPO=XXX@XXX.git # For local use only, NOT FOR USE IN CI
21 | # For CI use, var is moved to CI job itself, so that same script can be used to clone on multiple repos
22 | echo -e "TARGET_GIT_REPO = $TARGET_GIT_REPO"
23 |
24 | # TARGET_GIT_REPO_BRANCH=master # For local use only, NOT FOR USE IN CI
25 | # For CI use, var is moved to CI job itself, so that same script can be used to clone on multiple repos
26 | echo -e "TARGET_GIT_REPO_BRANCH = $TARGET_GIT_REPO_BRANCH"
27 |
28 | # TARGET_GIT_REPO_TYPE=GITLAB # For local use only, NOT FOR USE IN CI
29 | # For CI use, var is moved to CI job itself, so that same script can be used to clone on multiple repos
30 | echo -e "TARGET_GIT_REPO_TYPE = $TARGET_GIT_REPO_TYPE"
31 |
32 | # CURRENT_TAG=$(CURRENT_TAG_FUNC) # For local use only, NOT FOR USE IN CI
33 | CURRENT_TAG="$CI_COMMIT_REF_NAME" # For CI use only, using Gitlab predefined variable
34 | echo -e "CURRENT_TAG = $CURRENT_TAG"
35 |
36 | # GIT_USER_EMAIL="XXX@XXX.com" # For local use only, NOT FOR USE IN CI
37 | echo -e "GIT_USER_EMAIL = $GIT_USER_EMAIL" # For CI use only, using Gitlab custom variable
38 |
39 | # GIT_USER_NAME="XXX CI/CD" # For local use only, NOT FOR USE IN CI
40 | echo -e "GIT_USER_NAME = $GIT_USER_NAME" # For CI use only, using Gitlab custom variable
41 |
42 | # Preparing delivery dir
43 | echo -e "- Preparing delivery dir...\n"
44 | mkdir "$PACKAGE_DIR"
45 | cd "$PACKAGE_DIR"
46 |
47 | # Initialising external git repo
48 | echo -e "- Initialising external git repo...\n"
49 | git init && git config --local core.excludesfile false && git config --local core.fileMode true
50 | git remote add origin $TARGET_GIT_REPO
51 | git pull origin master
52 | git fetch
53 | git checkout $TARGET_GIT_REPO_BRANCH
54 | git config --local user.email "$GIT_USER_EMAIL"
55 | git config --local user.name "$GIT_USER_NAME"
56 |
57 | # Deleting files in delivery dir
58 | echo -e "- Deleting files in delivery dir...\n"
59 | set -x #echo on
60 | find -maxdepth 1 ! -name '.git' -exec rm -rv {} \; 1> /dev/null
61 | ls -lah
62 |
63 | # Copying files to delivery dir
64 | echo -e "- Copying files to delivery dir...\n"
65 | rsync -av --quiet --progress ../. . --exclude .git/ --exclude files_to_be_deployed/
66 |
67 | # Making sure everything needed will be included in commit
68 | echo -e "- Making sure everything needed will be included in commit...\n"
69 | echo -e "-- Deleting all .gitignore files...\n"
70 | mv .gitignore .gitig
71 | find . -name '.gitignore' -type f | wc -l
72 | find . -name '.gitignore' -type f -exec rm {} +
73 | # mv .gitig .gitignore // Note that we don't keep the .gitignore file like we usualy do, all project content must be commited here
74 |
75 | echo -e "-- Deleting all .git directories except root one...\n"
76 | mv .git .got
77 | find . -name '.git' -type d | wc -l
78 | find . -name '.git' -type d -exec rm -rf {} +
79 | mv .got .git
80 |
81 | # Removing local DB settings from settings.php
82 | sed -i -e "/$databases\['default'\]\['default'\] = array (/,/)/d" web/sites/default/settings.php
83 | # Adding install profile value in settings.php
84 | echo "\$settings['install_profile'] = 'druxxy';" >> web/sites/default/settings.php
85 | # Adding settings.local.php to web dir
86 | cp settings/settings.local.php web/sites/default/settings.local.php
87 | sed -i "/settings.local.php';/s/# //g" web/sites/default/settings.php
88 |
89 | # Preventing platform.sh error "Application name 'app' is not unique"
90 | if [ "$TARGET_GIT_REPO_TYPE" = "PLATFORM.SH" ]; then
91 | echo -e "- Preventing platform.sh error "Application name 'app' is not unique"...\n"
92 | sed -i "s|name: 'app'|name: 'XXX'|g" ../.platform.app.yaml
93 | fi
94 | # Moving hosting env files back at project root
95 | if [ "$TARGET_GIT_REPO_TYPE" = "GITLAB" ]; then
96 | echo -e "- Moving hosting env files back at project root...\n"
97 | mv .gitlab-ci.yml .gitlab-ci-backup.yml
98 | mv hosting/* hosting/.* .
99 | fi
100 |
101 | # Commiting to external repo
102 | echo -e "- Commiting to external repo...\n"
103 | git add -A 1> /dev/null
104 | git status -s
105 | git commit --quiet -m "$CURRENT_TAG"
106 | git push origin $TARGET_GIT_REPO_BRANCH --quiet
107 | git tag "$CURRENT_TAG"
108 | git push --tag
109 |
110 | # Cleaning delivery dir
111 | echo -e "- Cleaning delivery dir...\n"
112 | cd ..
113 | rm -rf "$PACKAGE_DIR"
114 |
115 | echo -e "- End of delivery script"
116 |
117 |
--------------------------------------------------------------------------------
/scripts/git_hooks/sniffers.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | make sniffers
4 | exit $?
5 |
6 |
--------------------------------------------------------------------------------
/scripts/makefile/backup.mk:
--------------------------------------------------------------------------------
1 | ## Make backup from current state
2 | mysql_dump_name = $(COMPOSE_PROJECT_NAME).sql
3 | files_dir = web/sites/default/files
4 | datestamp=$(shell echo `date +'%Y-%m-%d'`)
5 | backup_name = $(COMPOSE_PROJECT_NAME)-$(datestamp).tar.gz
6 |
7 | backup:
8 | rm -f $(backup_name)
9 | $(call php, drush sql-dump --database=default --result-file=../$(mysql_dump_name) --structure-tables-list=cachetags,cache_*,flood,sessions,watchdog)
10 | tar -czvf $(backup_name) --exclude=$(files_dir)/translations --exclude=$(files_dir)/js --exclude=$(files_dir)/css --exclude=$(files_dir)/styles --exclude=$(files_dir)/php $(files_dir) $(mysql_dump_name)
11 | rm $(mysql_dump_name)
12 |
--------------------------------------------------------------------------------
/scripts/makefile/baseconfig-langcode.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | # Checking langcode of base config files
4 | if [ -r config/sync/system.site.yml ]
5 | then
6 |
7 | # Get the site settings config file (if it exists) + save it in a variable
8 | SITE_SETTINGS_FILE_PATH=$(find config/sync -maxdepth 1 -type f -name "system.site.yml")
9 |
10 | # Get Drupal default language as defined in the site settings config file + save it in a variable
11 | # DEFAULT_SITE_LANG_VALUE=$(awk -v pattern="default_langcode" '$1 ~ pattern { print $NF }' config/sync/system.site.yml)
12 | DEFAULT_SITE_LANG_VALUE=$(awk -v pattern="default_langcode" '$1 ~ pattern { print $NF }' config/sync/system.site.yml)
13 |
14 | # Get the language defined in each of the basic config files + save them in a variable
15 | LANG_VALUE_IN_BASE_CONFIG_FILES=$(grep -E "^langcode:" config/sync/*.yml | awk '{print $2}' | sort | uniq)
16 |
17 | # Defining value of MESSAGE_OUTPUT variable
18 | MESSAGE_OUTPUT="\nThe language of some base config files is NOT matching site default language (\e[32m$DEFAULT_SITE_LANG_VALUE\e[0m) :"
19 | FAIL=0
20 |
21 | # For each file, compare the language of base config files against default site language
22 | for lang in $LANG_VALUE_IN_BASE_CONFIG_FILES; do
23 | if [ "$lang" != "$DEFAULT_SITE_LANG_VALUE" ]
24 | then
25 | FAIL=1
26 | MESSAGE_OUTPUT="$MESSAGE_OUTPUT \n - langcode \e[31m$lang\e[0m was found in $(grep -rE "^langcode: $lang" config/sync/*.yml -l | wc -l) file(s)\n$(grep -rE "^langcode: $lang" config/sync/*.yml -l)"
27 | fi
28 | done
29 | if [ $FAIL -eq 1 ]
30 | then
31 | echo -e "$MESSAGE_OUTPUT \n\n\e[33mBase configs should have the same langcode as default site language.\n"
32 | else
33 | echo "Langcode of config files are valid"
34 | fi
35 | exit $FAIL
36 | fi
37 |
--------------------------------------------------------------------------------
/scripts/makefile/blackfire.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | # installs backfire/io probe extension
4 |
5 | # use `php -i | grep "additional .ini"` to get it
6 | PHP_INI_DIR=/etc/php83/conf.d
7 |
8 | set -e
9 |
10 | env_vars='BLACKFIRE_CLIENT_ID BLACKFIRE_CLIENT_TOKEN'
11 |
12 | for var in $env_vars; do
13 | eval "val=\${$var}"
14 | if [ -z "${val}" -o "${val}" = 'x' ]; then
15 | echo "Configure ${var} in docker-compose.override.yml"
16 | echo "Visit https://blackfire.io/my/settings/credentials to get credentials"; exit 1
17 | fi
18 | done
19 |
20 | version=$(php -r "echo PHP_MAJOR_VERSION.PHP_MINOR_VERSION;") \
21 | && curl -A "Docker" -o /tmp/blackfire-probe.tar.gz -D - -L -s https://blackfire.io/api/v1/releases/probe/php/alpine/amd64/$version \
22 | && mkdir -p /tmp/blackfire \
23 | && tar zxpf /tmp/blackfire-probe.tar.gz -C /tmp/blackfire \
24 | && mv /tmp/blackfire/blackfire-*.so $(php -r "echo ini_get ('extension_dir');")/blackfire.so \
25 | && printf "extension=blackfire.so\nblackfire.agent_socket=tcp://blackfire:8707\n" > $PHP_INI_DIR/blackfire.ini \
26 | && rm -rf /tmp/blackfire /tmp/blackfire-probe.tar.gz
27 |
--------------------------------------------------------------------------------
/scripts/makefile/config-inspector-validation.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 | # Enable config inspector module before inspection.
3 | drush pm:enable config_inspector -y
4 |
5 | # Get count of config inspector errors
6 | ERROR_COUNT=$(drush config:inspect --only-error --format=string | tail -n +3 | wc -l)
7 |
8 | # Exit1 and alert if logs
9 | if [ "$ERROR_COUNT" -gt "0" ]; then
10 | printf "\n- \033[1m$ERROR_COUNT error(s)\033[0m identified by config_inspector to fix :\n"
11 | drush config:inspect --only-error --detail
12 | echo -e "\nConfiguration is not valid : \n- Go to \033[1m/admin/config/development/configuration/inspect\033[0m for more details\n"
13 | exit 1
14 | else
15 | drush pmu config_inspector -y
16 | echo -e "Configuration is valid"
17 | exit 0
18 | fi
19 |
--------------------------------------------------------------------------------
/scripts/makefile/contentgen.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 | # set -x
3 | set -eu
4 |
5 | # Preparing
6 | echo -e "\nEnabling module..."
7 | drush pm:enable devel_generate -y
8 |
9 | # Parsing command
10 | PARSING_CMD='echo drush config:status --state=Any --format=list'
11 |
12 |
13 | # Looking for languages
14 | echo -e "\nLooking for languages..."
15 |
16 | # Entity to parse. Can be node.type or taxonomy.vocabulary for ex (see PARSING_CMD for more)
17 | ENTITY_TO_PARSE=language.entity
18 |
19 | # Languages "undefined" and "zxx" exist by default but are invisible in UI
20 | LANGUAGES_TO_EXCLUDE="und|zxx"
21 |
22 | # Count languages
23 | LANGUAGE_COUNT=$($($PARSING_CMD) | grep -vE "$LANGUAGES_TO_EXCLUDE" | grep -c $ENTITY_TO_PARSE)
24 |
25 | # Find languages
26 | LANGUAGES_FOUND=$($($PARSING_CMD) | grep -vE "$LANGUAGES_TO_EXCLUDE" | grep ^$ENTITY_TO_PARSE | awk -F "." '{print $3}' | tr '\n' ',' | sed 's/,$//')
27 |
28 | if [ "$LANGUAGE_COUNT" -gt "1" ]; then
29 | printf "- \033[1m$LANGUAGE_COUNT languages\033[0m found : "
30 | echo $LANGUAGES_FOUND
31 | printf "All content will be created with their translations !\n"
32 | else
33 | printf "- \033[1mOnly 1 language\033[0m found : $LANGUAGES_FOUND\n"
34 | fi
35 |
36 | # Looking for bundles
37 | echo -e "\nLooking for bundles..."
38 |
39 | # Voc entity
40 |
41 | # Entity to parse. Can be node.type or taxonomy.vocabulary for ex (see PARSING_CMD for more)
42 | ENTITY_TO_PARSE=taxonomy.vocabulary
43 |
44 | # Count bundles
45 | BUNDLE_COUNT=$($($PARSING_CMD) | grep -c $ENTITY_TO_PARSE)
46 |
47 | if [ "$BUNDLE_COUNT" -gt "0" ]; then
48 |
49 | printf "- \033[1m$BUNDLE_COUNT Voc bundle(s)\033[0m found : "
50 | BUNDLES_FOUND=$($($PARSING_CMD) | grep ^$ENTITY_TO_PARSE | awk -F "." '{print $3}' | tr '\n' ',' | sed 's/,$//')
51 | echo $BUNDLES_FOUND
52 |
53 | echo " Generating content..."
54 | VOC_GENERATE_COUNT=10
55 |
56 | BUNDLES_FOUND=$($($PARSING_CMD) | grep ^$ENTITY_TO_PARSE | awk -F "." '{print $3}')
57 | for voc_bundles in $BUNDLES_FOUND; do
58 | drush devel-generate-terms $VOC_GENERATE_COUNT --bundles=$voc_bundles --translations=$LANGUAGES_FOUND --quiet
59 | echo " $VOC_GENERATE_COUNT terms have been created for $voc_bundles"
60 | done
61 |
62 | else
63 | printf "- \033[1mNo Voc bundle\033[0m found\n"
64 | fi
65 |
66 | # CT entity
67 |
68 | # Entity to parse. Can be node.type or taxonomy.vocabulary for ex (see PARSING_CMD for more)
69 | ENTITY_TO_PARSE=node.type
70 |
71 | # Count bundles
72 | BUNDLE_COUNT=$($($PARSING_CMD) | grep -c $ENTITY_TO_PARSE)
73 |
74 | if [ "$BUNDLE_COUNT" -gt "0" ]; then
75 |
76 | printf "- \033[1m$BUNDLE_COUNT CT bundle(s)\033[0m found : "
77 | BUNDLES_FOUND=$($($PARSING_CMD) | grep ^$ENTITY_TO_PARSE | awk -F "." '{print $3}' | tr '\n' ',' | sed 's/,$//')
78 | echo $BUNDLES_FOUND
79 |
80 | echo " Generating content..."
81 | CT_GENERATE_COUNT=30
82 |
83 | BUNDLES_FOUND=$($($PARSING_CMD) | grep ^$ENTITY_TO_PARSE | awk -F "." '{print $3}')
84 | for ct_bundles in $BUNDLES_FOUND; do
85 | drush devel-generate-content $CT_GENERATE_COUNT --bundles=$ct_bundles --translations=$LANGUAGES_FOUND --quiet
86 | echo " $CT_GENERATE_COUNT nodes have been created for $ct_bundles"
87 | done
88 |
89 | else
90 | printf "- \033[1mNo CT bundle\033[0m found\n"
91 | fi
92 |
93 |
94 | # Cleaning
95 | echo -e "\nDisabling module..."
96 | drush pmu devel_generate devel -y
97 |
98 | # Informing
99 | echo -e "\nFor more content, run this job multiple times or use Devel Generate Drupal UI.\n"
100 |
--------------------------------------------------------------------------------
/scripts/makefile/front.mk:
--------------------------------------------------------------------------------
1 | FRONT_PORT?=65200
2 |
3 | # Execute front container function.
4 | frontexec = docker run \
5 | --rm \
6 | --init \
7 | -u $(CUID):$(CGID) \
8 | -v $(CURDIR)/web/themes/custom/$(THEME_NAME):/app \
9 | --workdir /app \
10 | $(IMAGE_FRONT) ${1}
11 |
12 | # Execute front container function on localhost:FRONT_PORT. Needed for dynamic storybook.
13 | frontexec-with-port = docker run \
14 | --rm \
15 | --init \
16 | -p $(FRONT_PORT):$(FRONT_PORT) \
17 | -u $(CUID):$(CGID) \
18 | -v $(CURDIR)/web/themes/custom/$(THEME_NAME):/app \
19 | --workdir /app \
20 | $(IMAGE_FRONT) ${1}
21 |
22 | # Execute front container with TTY. Needed for storybook components creation.
23 | frontexec-with-interactive = docker run \
24 | --rm \
25 | --init \
26 | -u $(CUID):$(CGID) \
27 | -v $(CURDIR)/web/themes/custom/$(THEME_NAME):/app \
28 | --workdir /app \
29 | -it \
30 | $(IMAGE_FRONT) ${1}
31 |
32 | clear-front:
33 | @echo "Clean of node_modules and compiled dist... To skip this action please set CLEAR_FRONT_PACKAGES=no in .env file"
34 | $(call frontexec, rm -rf /app/node_modules /app/dist)
35 |
36 | ## Install frontend dependencies & build assets
37 | front: | front-install front-build
38 |
39 | front-install:
40 | @if [ -d $(CURDIR)/web/themes/custom/$(THEME_NAME) ]; then \
41 | echo "- Theme directory found. Installing yarn dependencies..."; \
42 | docker pull $(IMAGE_FRONT); \
43 | $(call frontexec, node -v); \
44 | $(call frontexec, yarn -v); \
45 | $(call frontexec, yarn install --ignore-optional --check-files --prod); \
46 | else \
47 | echo "- Theme directory defined in .env file was not found. Skipping front-install."; \
48 | fi
49 |
50 | front-build:
51 | @if [ -d $(CURDIR)/web/themes/custom/$(THEME_NAME) ]; then \
52 | echo "- Theme directory found. Building front assets..."; \
53 | docker pull $(IMAGE_FRONT); \
54 | $(call frontexec, node -v); \
55 | $(call frontexec, yarn -v); \
56 | $(call frontexec, yarn build --stats=verbose); \
57 | else \
58 | echo "- Theme directory defined in .env file was not found. Skipping front-build."; \
59 | fi
60 |
61 | lintval:
62 | @if [ -d $(CURDIR)/web/themes/custom/$(THEME_NAME) ]; then \
63 | echo "- Theme directory found. Running theme linters..."; \
64 | docker pull $(IMAGE_FRONT); \
65 | $(call frontexec, node -v); \
66 | $(call frontexec, yarn -v); \
67 | $(call frontexec, yarn run lint); \
68 | else \
69 | echo "- Theme directory defined in .env file was not found. Skipping theme linters."; \
70 | fi
71 |
72 | lint:
73 | @if [ -d $(CURDIR)/web/themes/custom/$(THEME_NAME) ]; then \
74 | echo "- Theme directory found. Running theme linters with fix..."; \
75 | docker pull $(IMAGE_FRONT); \
76 | $(call frontexec, node -v); \
77 | $(call frontexec, yarn -v); \
78 | $(call frontexec, yarn install --ignore-optional --check-files --prod); \
79 | $(call frontexec, yarn lint-fix); \
80 | else \
81 | echo "- Theme directory defined in .env file was not found. Skipping theme linters with fix."; \
82 | fi
83 |
84 | storybook:
85 | @if [ -d $(CURDIR)/web/themes/custom/$(THEME_NAME) ]; then \
86 | echo "- Theme directory found. Running dynamic storybook..."; \
87 | docker pull $(IMAGE_FRONT); \
88 | $(call frontexec, node -v); \
89 | $(call frontexec, yarn -v); \
90 | $(call frontexec, yarn install --ignore-optional --check-files); \
91 | $(call frontexec, yarn run build); \
92 | $(call frontexec-with-port, yarn storybook -p $(FRONT_PORT)); \
93 | else \
94 | echo "- Theme directory defined in .env file was not found. Skipping dynamic storybook."; \
95 | fi
96 |
97 | build-storybook:
98 | @if [ -d $(CURDIR)/web/themes/custom/$(THEME_NAME) ]; then \
99 | echo "- Theme directory found. Exporting static storybook..."; \
100 | docker pull $(IMAGE_FRONT); \
101 | $(call frontexec, node -v); \
102 | $(call frontexec, yarn -v); \
103 | $(call frontexec, yarn install --ignore-optional --check-files); \
104 | $(call frontexec, yarn run build); \
105 | $(call frontexec, yarn run build-storybook); \
106 | else \
107 | echo "- Theme directory defined in .env file was not found. Skipping dynamic storybook."; \
108 | fi
109 |
110 | create-component:
111 | @echo "Create component CLI dialog... It assumed that you already have 'make storybook' or 'make build-storybook' finished"
112 | docker pull $(IMAGE_FRONT)
113 | $(call frontexec-with-interactive, yarn cc)
114 |
--------------------------------------------------------------------------------
/scripts/makefile/help.mk:
--------------------------------------------------------------------------------
1 | TARGET_MAX_CHAR_NUM=25
2 | ## Show help
3 | RESET=\x1b[0m
4 | GREEN=\x1b[32;01m
5 | RED=\x1b[31;01m
6 | YELLOW=\x1b[33;01m
7 |
8 | help:
9 | @echo ''
10 | @echo 'Usage:'
11 | @echo -e ' ${YELLOW}make${RESET} ${GREEN}${RESET}'
12 | @echo ''
13 | @echo 'Targets:'
14 | @awk '/^[a-zA-Z\-_0-9]+:/ { \
15 | helpMessage = match(lastLine, /^## (.*)/); \
16 | if (helpMessage) { \
17 | helpCommand = substr($$1, 0, index($$1, ":")); \
18 | helpMessage = substr(lastLine, RSTART + 3, RLENGTH); \
19 | printf " ${YELLOW}%-$(TARGET_MAX_CHAR_NUM)s${RESET} ${GREEN}%s${RESET}\n", helpCommand, helpMessage; \
20 | } \
21 | } \
22 | { lastLine = $$0 }' $(MAKEFILE_LIST)
23 |
24 |
--------------------------------------------------------------------------------
/scripts/makefile/newlineeof.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | # Separate with comma ","
4 | FILES_TO_VALIDATE=.env.default
5 |
6 | FILES_TO_VALIDATE_AS_LIST=$(echo "$FILES_TO_VALIDATE" | tr ',' '\n')
7 |
8 | echo "Validating newline at the end of file(s) $FILES_TO_VALIDATE..."
9 |
10 | for file in $FILES_TO_VALIDATE_AS_LIST; do
11 | if [ -z "$(tail -c 1 "$file")" ]
12 | then
13 | echo "OK : Newline found at end of $file"
14 | else
15 | printf "\e[33mKO : No newline found at end of $file !\e[0m\n"
16 | exit 1
17 | fi
18 | done
19 |
--------------------------------------------------------------------------------
/scripts/makefile/newrelic.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | # installs newrelic.com agent extension
4 |
5 | # use `php -i | grep "additional .ini"` to get it
6 | PHP_INI_DIR=/etc/php83/conf.d
7 |
8 | # get the latest version from https://download.newrelic.com/php_agent/archive/
9 | NEW_RELIC_AGENT_VERSION="${NEW_RELIC_AGENT_VERSION:-11.9.0.23}"
10 | # change it to 'linux' if docker image is not based on Alpinelinux
11 | NEW_RELIC_LINUX=${NEW_RELIC_LINUX:-linux-musl}
12 |
13 | set -e
14 |
15 | # Print help in case parameters are empty
16 | if [ -z "$1" ] || [ -z "$2" ]
17 | then
18 | echo "Visit https://newrelic.com 'Account settings' to get the license key";
19 | exit 1 # Exit script after printing help
20 | fi
21 | NEW_RELIC_LICENSE_KEY="$1"
22 | NEW_RELIC_APPNAME="$2"
23 |
24 | curl -L https://download.newrelic.com/php_agent/archive/${NEW_RELIC_AGENT_VERSION}/newrelic-php5-${NEW_RELIC_AGENT_VERSION}-${NEW_RELIC_LINUX}.tar.gz | tar -C /tmp -zx \
25 | && export NR_INSTALL_USE_CP_NOT_LN=1 \
26 | && export NR_INSTALL_SILENT=1 \
27 | && /tmp/newrelic-php5-${NEW_RELIC_AGENT_VERSION}-${NEW_RELIC_LINUX}/newrelic-install install \
28 | && rm -rf /tmp/newrelic-php5-* /tmp/nrinstall*
29 |
30 | sed -i -e s/\"REPLACE_WITH_REAL_KEY\"/${NEW_RELIC_LICENSE_KEY}/ \
31 | -e s/newrelic.appname[[:space:]]=[[:space:]].\*/newrelic.appname="${NEW_RELIC_APPNAME}"/ \
32 | $PHP_INI_DIR/newrelic.ini
33 | # -e s/\;newrelic.daemon.address[[:space:]]=[[:space:]].\*/newrelic.daemon.address="${NEW_RELIC_DAEMON_ADDRESS}"/ \
34 |
35 | chgrp -R 1000 /var/log/newrelic
36 | chmod -R g+w /var/log/newrelic
37 |
--------------------------------------------------------------------------------
/scripts/makefile/patchval.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 | # set -x
3 |
4 | NON_REMOTE_PATCH_COUNT=$(jq --raw-output '.extra.patches[] | .[]' composer.json | grep -v "http" | wc -l)
5 |
6 | if [ "$NON_REMOTE_PATCH_COUNT" -gt "0" ]; then
7 | # If patch file was added, throw error
8 | printf "\033[1mERROR: A non-remote patch was found in composer.json:\n"
9 | printf " - Patches should not be commited to project repos\n"
10 | printf " - They should be systematically published on Drupal.org, Github or any other upstream repo, to a new or existing issue\n\n"
11 | exit 1
12 | else
13 | printf "OK : No local patch added to repo\n"
14 | exit 0
15 | fi
16 |
--------------------------------------------------------------------------------
/scripts/makefile/reload.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | NC='\x1b[0m'
4 | RED='\x1b[31;01m'
5 | GREEN='\x1b[32;01m'
6 | YELLOW='\x1b[33;01m'
7 |
8 | set -e
9 |
10 | sapi=$(cat /proc/1/comm)
11 |
12 | if [ $sapi = "unitd" ]; then
13 | socket='--unix-socket /run/control.unit.sock'
14 | if [ -z "${1-}" ]; then
15 | # just reload as no new config passed
16 | curl -s -o /dev/null $socket http://localhost/control/applications/drupal/restart
17 | else
18 | file=${1}/unit.json
19 | curl -s -o /dev/null -X PUT --data-binary @$file $socket http://localhost/config
20 | fi
21 | elif [ $sapi = "frankenphp" ]; then
22 | frankenphp reload -c ${1:-/etc/caddy}/Caddyfile
23 | elif echo "$sapi" | grep -q '^php-fpm'; then
24 | kill -USR2 1;
25 | else
26 | printf "%b unknown SAPI to restart%b\n" "${RED}" "${NC}" && exit 1
27 | fi
28 |
--------------------------------------------------------------------------------
/scripts/makefile/status-report-validation.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | # Define messages to watch for
4 | ## Possible values : 2=error, 1=warning, and 0/-1=OK
5 | MESSAGES_SEVERITY="2"
6 |
7 | ## Messages to ignore
8 | ## Separate with pipe : |
9 | # IGNORED_MESSAGES="Trusted Host Settings"
10 | IGNORED_MESSAGES="Trusted Host Settings"
11 |
12 | # Get count of messages
13 | MESSAGE_COUNT=$(drush status-report --severity=$MESSAGES_SEVERITY --filter="title~=/^(?!$IGNORED_MESSAGES$)/i" --format=string | wc -l)
14 |
15 | # Exit1 and alert if count > 0
16 | if [ "$MESSAGE_COUNT" -gt "0" ]; then
17 | printf "There are \033[1m$MESSAGE_COUNT message(s)\033[0m in status report to fix :\n"
18 | drush status-report --severity=$MESSAGES_SEVERITY --filter="title~=/^(?!$IGNORED_MESSAGES$)/i"
19 | exit 1
20 | else
21 | echo -e "Status report is valid : No error listed"
22 | exit 0
23 | fi
24 |
--------------------------------------------------------------------------------
/scripts/makefile/system-detection.mk:
--------------------------------------------------------------------------------
1 | # List of available OS:
2 | # - WINDOWS
3 | # - OS_X
4 | # - LINUX
5 | # List of available processors
6 | # - AMD64
7 | # - IA32 (Intel x86)
8 | # - ARM
9 | ifeq ($(OS),Windows_NT)
10 | SYSTEM_OS = 'WINDOWS'
11 | ifeq ($(PROCESSOR_ARCHITEW6432),AMD64)
12 | SYSTEM_PROCESSOR = 'AMD64'
13 | else
14 | ifeq ($(PROCESSOR_ARCHITECTURE),AMD64)
15 | SYSTEM_PROCESSOR = 'AMD64'
16 | endif
17 | ifeq ($(PROCESSOR_ARCHITECTURE),x86)
18 | SYSTEM_PROCESSOR = 'IA32'
19 | endif
20 | endif
21 | else
22 | UNAME_S := $(shell uname -s)
23 | ifeq ($(UNAME_S),Linux)
24 | SYSTEM_OS = 'LINUX'
25 | endif
26 | ifeq ($(UNAME_S),Darwin)
27 | SYSTEM_OS = 'OS_X'
28 | CUID=1000
29 | CGID=1000
30 | COMPOSE_HTTP_TIMEOUT=1024
31 | endif
32 | UNAME_P := $(shell uname -p)
33 | ifeq ($(UNAME_P),x86_64)
34 | SYSTEM_PROCESSOR = 'AMD64'
35 | endif
36 | ifneq ($(filter %86,$(UNAME_P)),)
37 | SYSTEM_PROCESSOR = 'IA32'
38 | endif
39 | ifneq ($(filter arm%,$(UNAME_P)),)
40 | SYSTEM_PROCESSOR = 'ARM'
41 | endif
42 | endif
43 |
--------------------------------------------------------------------------------
/scripts/makefile/tests.mk:
--------------------------------------------------------------------------------
1 | ## Run sniffer validations (executed as git hook, by scripts/git_hooks/sniffers.sh)
2 | sniffers: | clang compval phpcs newlineeof
3 |
4 | ## Run all tests & validations (including sniffers)
5 | tests: | sniffers cinsp drupalrectorval upgradestatusval behat watchdogval statusreportval patchval
6 |
7 | IMAGE_PHPCS ?= skilldlabs/docker-phpcs-drupal:10
8 | # Function for code sniffer images.
9 | phpcsexec = docker run --rm \
10 | -v $(CURDIR)/web/modules/custom:/work/modules \
11 | -v $(CURDIR)/web/themes/custom:/work/themes \
12 | $(IMAGE_PHPCS) ${1} -s --colors -v \
13 | --standard=Drupal,DrupalPractice \
14 | --extensions=php,module,inc,install,profile,theme,yml,txt,md,js \
15 | --ignore=*.min.js,*.css,libraries/*,dist/*,styleguide/*,README.md,README.txt,node_modules/*,work/themes/**.js,work/themes/**.md \
16 | .
17 |
18 | ## Validate codebase with phpcs sniffers to make sure it conforms https://www.drupal.org/docs/develop/standards
19 | phpcs:
20 | @echo "Phpcs validation..."
21 | @$(call phpcsexec, phpcs)
22 |
23 | ## Fix codebase according to Drupal standards https://www.drupal.org/docs/develop/standards
24 | phpcbf:
25 | @$(call phpcsexec, phpcbf)
26 |
27 | ## Add symbolic link from custom script(s) to .git/hooks/
28 | hooksymlink:
29 | # Check if .git directory exists
30 | ifneq ($(wildcard .git/.*),)
31 | # Check if script file exists
32 | ifneq ("$(wildcard scripts/git_hooks/sniffers.sh)","")
33 | @echo "Removing previous git hooks and installing fresh ones"
34 | $(shell find .git/hooks -type l -exec unlink {} \;)
35 | $(shell ln -sf ../../scripts/git_hooks/sniffers.sh .git/hooks/pre-push)
36 | else
37 | @echo "scripts/git_hooks/sniffers.sh file does not exist"
38 | @exit 1
39 | endif
40 | else
41 | @echo "No git directory found, git hooks won't be installed"
42 | endif
43 |
44 | ## Validate langcode of base config files
45 | clang:
46 | ifneq ("$(wildcard scripts/makefile/baseconfig-langcode.sh)","")
47 | @echo "Base config langcode validation..."
48 | @/bin/sh ./scripts/makefile/baseconfig-langcode.sh
49 | else
50 | @echo "scripts/makefile/baseconfig-langcode.sh file does not exist"
51 | @exit 1
52 | endif
53 |
54 | ## Validate configuration schema
55 | cinsp:
56 | ifneq ("$(wildcard scripts/makefile/config-inspector-validation.sh)","")
57 | @echo "Config schema validation..."
58 | $(call php, composer install -o)
59 | @$(call php, /bin/sh ./scripts/makefile/config-inspector-validation.sh)
60 | else
61 | @echo "scripts/makefile/config-inspector-validation.sh file does not exist"
62 | @exit 1
63 | endif
64 |
65 | ## Validate composer.json file
66 | compval:
67 | @echo "Composer.json validation..."
68 | @docker run --rm -v $(CURDIR):/mnt -w /mnt $(IMAGE_PHP) sh -c "git config --global --add safe.directory /mnt && composer validate"
69 |
70 | ## Validate watchdog logs
71 | watchdogval:
72 | ifneq ("$(wildcard scripts/makefile/watchdog-validation.sh)","")
73 | @echo "Watchdog validation..."
74 | @$(call php, /bin/sh ./scripts/makefile/watchdog-validation.sh)
75 | else
76 | @echo "scripts/makefile/watchdog-validation.sh file does not exist"
77 | @exit 1
78 | endif
79 |
80 | ## Validate status report
81 | statusreportval:
82 | ifneq ("$(wildcard scripts/makefile/status-report-validation.sh)","")
83 | @echo "Status report validation..."
84 | @$(call php, /bin/sh ./scripts/makefile/status-report-validation.sh)
85 | else
86 | @echo "scripts/makefile/status-report-validation.sh file does not exist"
87 | @exit 1
88 | endif
89 |
90 | ## Validate drupal-rector
91 | drupalrectorval:
92 | ifneq ("$(wildcard rector.php)","")
93 | @echo "Drupal Rector validation..."
94 | $(call php, composer install -o)
95 | $(call php, vendor/bin/rector -V)
96 | $(call php, vendor/bin/rector process --dry-run --no-progress-bar web/modules/custom web/themes/custom)
97 | else
98 | @echo "rector.php file does not exist"
99 | @exit 1
100 | endif
101 |
102 | ## Validate upgrade-status
103 | upgradestatusval:
104 | ifneq ("$(wildcard scripts/makefile/upgrade-status-validation.sh)","")
105 | @echo "Upgrade status validation..."
106 | $(call php, composer install -o)
107 | @$(call php, /bin/sh ./scripts/makefile/upgrade-status-validation.sh)
108 | else
109 | @echo "scripts/makefile/upgrade-status-validation.sh file does not exist"
110 | @exit 1
111 | endif
112 |
113 | ## Validate newline at the end of files
114 | newlineeof:
115 | ifneq ("$(wildcard scripts/makefile/newlineeof.sh)","")
116 | @/bin/sh ./scripts/makefile/newlineeof.sh
117 | else
118 | @echo "scripts/makefile/newlineeof.sh file does not exist"
119 | @exit 1
120 | endif
121 |
122 | ## Validate that no custom patch is added to repo
123 | patchval:
124 | ifneq ("$(wildcard scripts/makefile/patchval.sh)","")
125 | @echo "Patch validation..."
126 | @$(call php-0, apk add --no-cache -q jq)
127 | @$(call php, /bin/sh ./scripts/makefile/patchval.sh)
128 | else
129 | @echo "scripts/makefile/patchval.sh file does not exist"
130 | @exit 1
131 | endif
132 |
133 | ## Validate Behat scenarios
134 | BEHAT_ARGS ?= --colors
135 | behat:
136 | @echo "Getting base url"
137 | ifdef REVIEW_DOMAIN
138 | $(eval BASE_URL := https:\/\/$(RA_BASIC_AUTH_USERNAME):$(RA_BASIC_AUTH_PASSWORD)@$(MAIN_DOMAIN_NAME))
139 | else
140 | $(eval BASE_URL := http:\/\/$(shell docker inspect --format='{{(index .NetworkSettings.Networks "$(COMPOSE_NET_NAME)").IPAddress}}' $(COMPOSE_PROJECT_NAME)_web))
141 | endif
142 | echo "Base URL: " $(BASE_URL)
143 | if [ -z `docker ps -f 'name=$(COMPOSE_PROJECT_NAME)_chrome' --format '{{.Names}}'` ]; then \
144 | echo 'Browser driver is stoped. Running it.'; \
145 | make -s browser_driver; \
146 | fi
147 | @echo "Replacing URL_TO_TEST value in behat.yml with http://$(BASE_URL)"
148 | $(call php, cp behat.default.yml behat.yml)
149 | $(call php, sed -i "s/URL_TO_TEST/$(BASE_URL)/g" behat.yml)
150 | @echo "Running Behat scenarios against http://$(BASE_URL)"
151 | $(call php, composer install -o)
152 | $(call php, vendor/bin/behat -V)
153 | $(call php, vendor/bin/behat $(BEHAT_ARGS)) || $(call php, vendor/bin/behat $(BEHAT_ARGS) --rerun)
154 | make browser_driver_stop
155 |
156 | ## List existing behat definitions
157 | behatdl:
158 | $(call php, vendor/bin/behat -dl --colors)
159 |
160 | ## List existing behat definitions with more details
161 | behatdi:
162 | $(call php, vendor/bin/behat -di --colors)
163 |
164 | ## Run browser driver for behat tests
165 | browser_driver:
166 | docker run -d --init --rm --name $(COMPOSE_PROJECT_NAME)_chrome \
167 | --network container:$(COMPOSE_PROJECT_NAME)_php \
168 | --entrypoint "" \
169 | $(IMAGE_DRIVER) \
170 | chromium-browser --headless --disable-gpu \
171 | --remote-debugging-address=0.0.0.0 --remote-debugging-port=9222 --no-sandbox \
172 | --window-size=1200,2080 \
173 | --disable-web-security --w3c=false
174 |
175 | ## Stop browser driver
176 | browser_driver_stop:
177 | @echo 'Stopping browser driver...'
178 | if [ ! -z `docker ps -f 'name=$(COMPOSE_PROJECT_NAME)_chrome' --format '{{.Names}}'` ]; then \
179 | docker stop $(COMPOSE_PROJECT_NAME)_chrome; \
180 | fi
181 |
182 | ## Create a high number of random content
183 | contentgen:
184 | ifneq ("$(wildcard scripts/makefile/contentgen.sh)","")
185 | $(call php, composer install -o)
186 | @$(call php, /bin/sh ./scripts/makefile/contentgen.sh)
187 | else
188 | @echo "scripts/makefile/watchdog-validation.sh file does not exist"
189 | @exit 1
190 | endif
191 |
192 | blackfire:
193 | ifneq ("$(wildcard scripts/makefile/blackfire.sh)","")
194 | $(call php-0, /bin/sh ./scripts/makefile/blackfire.sh)
195 | $(call php-0, /bin/sh ./scripts/makefile/reload.sh)
196 | @echo "Blackfire extension enabled"
197 | else
198 | @echo "scripts/makefile/blackfire.sh file does not exist"
199 | @exit 1
200 | endif
201 |
202 | newrelic:
203 | ifdef NEW_RELIC_LICENSE_KEY
204 | $(call php-0, /bin/sh ./scripts/makefile/newrelic.sh $(NEW_RELIC_LICENSE_KEY) '$(COMPOSE_PROJECT_NAME)')
205 | $(call php, sed -i -e 's/# <<: \*service-newrelic/ <<: \*service-newrelic/g' docker/docker-compose.override.yml)
206 | $(call php-0, /bin/sh ./scripts/makefile/reload.sh)
207 | @echo "NewRelic PHP extension enabled"
208 | else
209 | @echo "NewRelic install skipped as NEW_RELIC_LICENSE_KEY is not set"
210 | endif
211 |
212 | xdebug:
213 | $(call php-0, /bin/sh ./scripts/makefile/xdebug.sh $(filter-out $@, $(MAKECMDGOALS)))
214 |
--------------------------------------------------------------------------------
/scripts/makefile/upgrade-status-validation.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | # Enable Upgrade Status module
4 | drush pm:enable upgrade_status -y
5 |
6 | # Clear drush cache
7 | drush cc drush
8 |
9 | # Search for no issues message
10 | REPORT=$(drush upgrade_status:analyze --all --ignore-contrib --ignore-uninstalled)
11 | IS_INVALID=$(echo "$REPORT" | grep "FILE:")
12 |
13 | drush pmu upgrade_status -y
14 | # Exit 1 and alert if at least one file was reported.
15 | if [ -z "$IS_INVALID" ]; then
16 | echo -e "Status report is valid : No error listed"
17 | exit 0
18 | else
19 | printf "There are \033[1missue(s)\033[0m in Upgrade Status report to fix : \n- Go to \033[1m/admin/reports/upgrade-status\033[0m for more details\n"
20 | echo -e "$REPORT"
21 | exit 1
22 | fi
23 |
--------------------------------------------------------------------------------
/scripts/makefile/watchdog-validation.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | # Define messages to watch for
4 | ## Separate with pipe : |
5 | ## Possible values : Emergency|Alert|Critical|Error|Warning|Notice|Info|Debug
6 | MESSAGES_SEVERITY="Emergency|Alert|Critical|Error"
7 |
8 | # Get count of watchlog logs
9 | LOG_COUNT=$(drush watchdog-show --filter="severity~=#($MESSAGES_SEVERITY)#" --format=string | wc -l)
10 |
11 | # Exit1 and alert if logs
12 | if [ "$LOG_COUNT" -gt "0" ]; then
13 | printf "There are \033[1m$LOG_COUNT messages\033[0m in logs to fix :\n"
14 | drush watchdog-show --filter="severity~=#($MESSAGES_SEVERITY)#" --format=string --extended --count=100
15 | exit 1
16 | else
17 | echo -e "Watchdog is valid : No message of high severity in logs ($MESSAGES_SEVERITY)"
18 | exit 0
19 | fi
20 |
--------------------------------------------------------------------------------
/scripts/makefile/xdebug.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | NC='\x1b[0m'
4 | RED='\x1b[31;01m'
5 | GREEN='\x1b[32;01m'
6 | YELLOW='\x1b[33;01m'
7 |
8 | ACTION="${1:-na}"
9 |
10 | xdebug_status() {
11 | if [ "$(php -m | grep -c 'Xdebug')" -eq "0" ]; then
12 | printf "%b disabled%b\n" "${GREEN}" "${NC}"
13 | else
14 | printf "%b enabled%b\n" "${GREEN}" "${NC}"
15 | fi
16 | }
17 |
18 | xdebug_find_file() {
19 | for php in 84 83 82 81 8; do
20 | ini="/etc/php${php}/conf.d/50_xdebug.ini"
21 | if [ -f "$ini" ]; then echo "$ini"; break; fi
22 | done
23 | if [ "$php" = "8" ]; then
24 | printf "%bXdebug ini file not found%b\n" "${RED}" "${NC}" && exit 1
25 | fi
26 | }
27 |
28 | xdebug_on() {
29 | printf "Enabling Xdebug..."
30 |
31 | if grep -q ';zend_extension' "$1"; then
32 | sed -i -e "s/;zend_extension/zend_extension/" "$1"
33 | else
34 | printf "%b already enabled%b\n" "${YELLOW}" "${NC}" && exit 0
35 | fi
36 | }
37 |
38 | xdebug_off() {
39 | printf "Disabling Xdebug..."
40 |
41 | if grep -q ';zend_extension' "$1"; then
42 | printf "%b already disabled%b\n" "${YELLOW}" "${NC}" && exit 0
43 | else
44 | sed -i -e "s/zend_extension/;zend_extension/" "$1"
45 | fi
46 | }
47 |
48 | xdebug_reload() {
49 | SCRIPT=$(readlink -f "$0")
50 | SCRIPTPATH=$(dirname "$SCRIPT")
51 | . "$SCRIPTPATH"/reload.sh
52 | }
53 |
54 | set -e
55 |
56 | case "$ACTION" in
57 | on|off) xdebug_"$ACTION" "$(xdebug_find_file)" && xdebug_reload && xdebug_status ;;
58 | status) printf "Xdebug status..." && xdebug_status ;;
59 | *) printf "%bRequires [on|off|status] argument%b\n" "${RED}" "${NC}" && exit 1 ;;
60 | esac
61 |
--------------------------------------------------------------------------------
/scripts/mirroring/.gitlab-ci.mirroring_example.yml:
--------------------------------------------------------------------------------
1 | # Example of .gitlab-ci.yml jobs
2 | # Use with scripts/mirroring/mirror_current_branch.sh
3 |
4 | stages:
5 | - mirror
6 |
7 | .mirroring_template: &mirroring_template
8 | stage: mirror
9 | allow_failure: true
10 | retry:
11 | max: 2
12 |
13 | Mirror to repo XXX:
14 | <<: *mirroring_template
15 | script:
16 | - echo " - Start of CI script"
17 | - date; pwd; ls -lah;
18 | - mkdir -p ~/.ssh
19 | - ls -lah ~
20 | - echo "$MIRRORING_REMOTE_REPO_PRIVATE_KEY" > ~/.ssh/id_rsa
21 | - chmod 0600 ~/.ssh/id_rsa
22 | - ls -lah ~/.ssh
23 | - apk add --no-cache openssh-client git rsync
24 | - ssh-keyscan -H "$MIRRORING_REMOTE_REPO_IP" >> ~/.ssh/known_hosts
25 | - git version
26 | - rsync --version
27 | - export MIRRORING_TARGET_GIT_REPO="${MIRRORING_REMOTE_REPO_URL_1}" # Gitlab custom variable to update for each repo to mirror
28 | - echo -e "MIRRORING_TARGET_GIT_REPO = $MIRRORING_TARGET_GIT_REPO"
29 | - export MIRRORING_REMOTE_REPO_TYPE="${MIRRORING_REMOTE_REPO_TYPE}"
30 | - echo -e "MIRRORING_REMOTE_REPO_TYPE = $MIRRORING_REMOTE_REPO_TYPE"
31 | - chmod +x scripts/mirroring/mirror_current_branch.sh
32 | - ./scripts/mirroring/mirror_current_branch.sh
33 | - echo " - End of CI script"
34 |
35 | Mirror to repo YYY: # Job can then be duplicated to clone to multiples repos (just use a different MIRRORING_REMOTE_REPO_URL_N variable for each)
36 | ...
37 |
38 |
--------------------------------------------------------------------------------
/scripts/mirroring/README.md:
--------------------------------------------------------------------------------
1 | # Mirroring script
2 |
3 | ## What
4 |
5 | - This script mirrors the current branch from a repo to another one using a Gitlab CI job
6 | - You can also mirror to multiple repositories at the same time using multiple jobs in the same pipeline
7 | - Additionally, branches non-existing in local repo are deleted from remote repo(s)
8 |
9 | ## Why
10 |
11 | Why not use the built-in mirroring functionality Gitlab-ci and other repository services usualy offer ?
12 |
13 | - Because it doesn't work when your git repo is protected behind a basic authentication
14 | - Because mirroring to multiple repos at once is sometime a premium (paid) feature
15 |
16 | ## Setup
17 |
18 | 2 files are required :
19 | - `.gitlab-ci.yml`
20 | - `mirror_current_branch.sh`
21 |
22 | 1. Define a mirroring CI job like "Mirror to repo XXX" in .gitlab-ci.yml, as shown in .gitlab-ci.mirroring_example.yml
23 | 1. In Gitlab UI, add the following custom CI/CD variables :
24 | - MIRRORING_REMOTE_REPO_IP : IP or domain name of target git repos
25 | - MIRRORING_REMOTE_REPO_PRIVATE_KEY : SSH private key matching public key added to git user
26 | - MIRRORING_REMOTE_REPO_TYPE : Possible values : "PLATFORM.SH" only for now, or leave empty if appropriate
27 | - MIRRORING_REMOTE_REPO_URL_1 : Git repo to which mirror current branch (you can have multiple ones)
28 | - GIT_USER_EMAIL : Email to be used by git user (used to commit)
29 | - GIT_USER_NAME : Name to be used by git user (used to commit)
30 |
31 |
--------------------------------------------------------------------------------
/scripts/mirroring/mirror_current_branch.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 | # Use with Gitlab CI jobs from scripts/mirroring/.gitlab-ci.mirroring_example.yml
3 | set -x
4 | echo -e "\n- Start of mirroring script"
5 |
6 | # Defining functions # For local use only, NOT FOR USE IN CI
7 |
8 | CURRENT_BRANCH_FUNC()
9 | {
10 | git rev-parse --abbrev-ref HEAD
11 | }
12 |
13 |
14 | CURRENT_COMMIT_FUNC()
15 | {
16 | git log -1 --pretty=%B
17 | }
18 |
19 | # Defining variables
20 | echo -e "- Defining variables...\n"
21 |
22 | MIRRORING_DIR=$(pwd)/files_to_mirror
23 | echo -e "MIRRORING_DIR = $MIRRORING_DIR"
24 |
25 | # MIRRORING_TARGET_GIT_REPO=XXX@XXX.git # For local use only, NOT FOR USE IN CI
26 | # For CI use, var is moved to CI job itself, so that same script can be used to clone on multiple repos
27 | echo -e "MIRRORING_TARGET_GIT_REPO = $MIRRORING_TARGET_GIT_REPO"
28 |
29 | # CURRENT_BRANCH=$(CURRENT_BRANCH_FUNC) # For local use only, NOT FOR USE IN CI
30 | CURRENT_BRANCH="$CI_COMMIT_REF_NAME" # For CI use only, using Gitlab predefined variable
31 | echo -e "CURRENT_BRANCH = $CURRENT_BRANCH"
32 |
33 | # CURRENT_COMMIT=$(CURRENT_COMMIT_FUNC) # For local use only, NOT FOR USE IN CI
34 | CURRENT_COMMIT="$CI_COMMIT_MESSAGE" # For CI use only, using Gitlab predefined variable
35 | echo -e "CURRENT_COMMIT = $CURRENT_COMMIT"
36 |
37 | # GIT_USER_EMAIL="XXX@XXX.com" # For local use only, NOT FOR USE IN CI
38 | echo -e "GIT_USER_EMAIL = $GIT_USER_EMAIL" # For CI use only, using Gitlab custom variable
39 |
40 | # GIT_USER_NAME="XXX CI/CD" # For local use only, NOT FOR USE IN CI
41 | echo -e "GIT_USER_NAME = $GIT_USER_NAME" # For CI use only, using Gitlab custom variable
42 |
43 | # Saving list of branches in GitLab before switching directory
44 | echo -e "- Fetching branches of current repo...\n"
45 | git fetch --quiet 1> /dev/null
46 | LOCAL_BRANCHES=$(git branch -r)
47 |
48 | # Preparing mirrorring dir
49 | echo -e "- Preparing mirrorring dir...\n"
50 | mkdir "$MIRRORING_DIR"
51 | cd "$MIRRORING_DIR"
52 |
53 | # Initialising external git repo
54 | echo -e "- Initialising external git repo...\n"
55 | git init && git config --local core.excludesfile false && git config --local core.fileMode true
56 | git remote add origin $MIRRORING_TARGET_GIT_REPO
57 | git fetch origin
58 | git checkout -b $CURRENT_BRANCH
59 | git config --local user.email "$GIT_USER_EMAIL"
60 | git config --local user.name "$GIT_USER_NAME"
61 |
62 |
63 | # Cleaninng orphan branches in remote repo, compared to local repo
64 | echo -e "- Syncing git branches..."
65 | REMOTE_BRANCHES=$(git branch -r)
66 | REMOVED_BRANCHES=$(echo "$REMOTE_BRANCHES" | grep -v "$(echo "$LOCAL_BRANCHES" | sed ':a;N;$!ba;s/\n/\\|/g')" | sed 's/origin\///g;s/\n/ /g')
67 |
68 | if [ ! -z "$REMOVED_BRANCHES" ]
69 | then
70 | echo -e "- Removing branches from remote git repo :"
71 | echo -e "$REMOVED_BRANCHES"
72 | if [ "$MIRRORING_REMOTE_REPO_TYPE" -eq "PLATFORM.SH" ]; then
73 | echo -e "Use platform.sh CLI to remove environments along with git branches"
74 | curl -sS https://platform.sh/cli/installer | php
75 | "$HOME/"'.platformsh/bin/platform' environment:delete --delete-branch -y $REMOVED_BRANCHES
76 | else
77 | echo -e "Use git to remove branches"
78 | git branch -D $REMOVED_BRANCHES
79 | git push origin --delete $REMOVED_BRANCHES
80 | fi
81 | fi
82 |
83 |
84 | # Copying files to mirrorring dir
85 | echo -e "- Copying files to mirrorring dir...\n"
86 | rsync -av --quiet --progress ../. . --exclude .git/ --exclude files_to_mirror/
87 |
88 |
89 | # Making sure everything needed will be included in commit
90 | echo -e "- Making sure everything needed will be included in commit...\n"
91 | mv .gitignore .gitig
92 | echo -e "- Deleting all .gitignore files except root one...\n"
93 | find . -name '.gitignore' -type f | wc -l
94 | find . -name '.gitignore' -type f -exec rm {} +
95 | mv .gitig .gitignore
96 |
97 | echo -e "- Deleting all .git directories except root one...\n"
98 | mv .git .got
99 | find . -name '.git' -type d | wc -l
100 | find . -name '.git' -type d -exec rm -rf {} +
101 | mv .got .git
102 |
103 |
104 | # Preventing platform.sh error "Application name 'app' is not unique"
105 | if [ "$MIRRORING_REMOTE_REPO_TYPE" -eq "PLATFORM.SH" ]; then
106 | echo -e "- Preventing platform.sh error "Application name 'app' is not unique"...\n"
107 | sed -i "s|name: 'app'|name: 'XXX'|g" ../.platform.app.yaml
108 | fi
109 |
110 |
111 | # Commiting to external repo
112 | echo -e "- Commiting to external repo...\n"
113 | git add -A 1> /dev/null
114 | git status -s
115 | git commit --quiet -m "$CURRENT_COMMIT"
116 | git push origin $CURRENT_BRANCH --quiet -f
117 |
118 |
119 | # Cleaning mirrorring dir
120 | echo -e "- Cleaning mirrorring dir...\n"
121 | cd ..
122 | rm -rf "$MIRRORING_DIR"
123 |
124 | echo -e "- End of mirroring script"
125 |
--------------------------------------------------------------------------------
/scripts/multisite/.gitlab-ci.split_switch_example.yml:
--------------------------------------------------------------------------------
1 | # Example of .gitlab-ci.yml jobs
2 | # Use with config_split.mk and config_split_disable_all.sh
3 |
4 | ## Overhead config
5 |
6 | stages:
7 | - deploy
8 |
9 | # Tags defines which runner to use (expected shell runner)
10 | .ra_tags: &ra_tags
11 | tags:
12 | - XXX # Mandatory, should equal to tag of available runner server with docker + compose + traefik
13 |
14 | .ra_only: &ra_only
15 | only:
16 | - branches
17 |
18 | ## Multisite config
19 |
20 | Enable split default:
21 | stage: deploy
22 | environment:
23 | url: https://${CI_ENVIRONMENT_SLUG}-${CI_PROJECT_PATH_SLUG}.${REVIEW_DOMAIN}
24 | name: review/$CI_COMMIT_REF_NAME
25 | on_stop: stop_review
26 | script:
27 | - echo "Starting job script in ${BUILD_DIR}"
28 | - cd ${BUILD_DIR}
29 | - pwd
30 | - make split default
31 | when: manual
32 | <<: *ra_tags
33 | <<: *ra_only
34 |
35 | Enable split first:
36 | stage: deploy
37 | environment:
38 | url: https://${CI_ENVIRONMENT_SLUG}-${CI_PROJECT_PATH_SLUG}.${REVIEW_DOMAIN}
39 | name: review/$CI_COMMIT_REF_NAME
40 | on_stop: stop_review
41 | script:
42 | - echo "Starting job script in ${BUILD_DIR}"
43 | - cd ${BUILD_DIR}
44 | - pwd
45 | - make split first # Vary for each site
46 | when: manual
47 | <<: *ra_tags
48 | <<: *ra_only
49 |
50 | Enable split second: # Job can then be duplicated to navigate between to multiple set of config (just use a different make split command for each)
51 | ...
52 |
53 |
--------------------------------------------------------------------------------
/scripts/multisite/README.md:
--------------------------------------------------------------------------------
1 | # Multisite script
2 |
3 | ## What
4 |
5 | - These are scripts and commands to facilitate the use of different set of config in a multisite setup
6 | - It can be used locally using make commands as well as in Gitalb CI/CD pipelines using manual jobs
7 |
8 | ## Why
9 |
10 | - Being able to quickly switch from one set of config to another is very much useful for testing on a multisite Drupal setup
11 |
12 | ## Setup
13 |
14 | 3 files are required :
15 | - `.gitlab-ci.yml`
16 | - `config_split.mk`
17 | - `config_split_disable_all.sh`
18 |
19 | 1. Install and enable config_split module :
20 | - `composer require drupal/config_split`
21 | - `drush en -y config_split`
22 | 1. In Drupal UI, create split(s) for site where config vary
23 | - Do not create a split for "default" case, in which all shared config should be stored
24 | 1. Create the config directories required by created splits
25 | 1. Define a CI job "Enable split default" in .gitlab-ci.yml, as shown in .gitlab-ci.mirroring_example.yml
26 | 1. Define additional CI jobs like "Enable split first" in .gitlab-ci.yml for each split
27 | 1. Move files to scripts/makefile/ directory :
28 | - `mv scripts/multisite/config_split.mk scripts/makefile/`
29 | - `mv scripts/multisite/config_split_disable_all.sh scripts/makefile/`
30 |
31 | ## Usage
32 |
33 | Locally you can use commands like :
34 | - `make split first` to simulate site "first" of multisite
35 | - `make split default` to revert back to no split
36 |
37 | On Gitlab CI/CD, Review Apps will be builded using default split :
38 | - Manually click on manual jobs "Enable split first" to simulate site "first" of multisite
39 | - Manually click on manual job "Enable split default" to revert back to no split
40 |
41 |
--------------------------------------------------------------------------------
/scripts/multisite/config_split.mk:
--------------------------------------------------------------------------------
1 |
2 | .PHONY: split
3 |
4 | # If the first argument is "split"
5 | ifeq (split,$(firstword $(MAKECMDGOALS)))
6 | # Then use next strings as arguments
7 | ARGS := $(wordlist 2,$(words $(MAKECMDGOALS)),$(MAKECMDGOALS))
8 | endif
9 |
10 | ## Enable a specific split of config (config_split)
11 | split:
12 | $($@,$(MAKECMDGOALS))
13 |
14 | # Message if no argument passed
15 | ifeq ($(ARGS),)
16 | @printf "\nMachine name of split to enable is expected as argument \nFor exemple : \n - make split SPLIT_MACHINE_NAME // To enable SPLIT_MACHINE_NAME split\nor\n - make split default // To enable default config with no split\nor\n - make split list // To list available splits\n"
17 | @printf "\n"
18 | @exit 1
19 | else
20 |
21 | # List available splits
22 | ifeq ($(ARGS),list)
23 | ifneq ("$(wildcard scripts/makefile/config_split_list_all.sh)","")
24 | @$(call php, /bin/sh ./scripts/makefile/config_split_list_all.sh)
25 | else
26 | @echo "- scripts/makefile/config_split_list_all.sh file does not exist"
27 | @exit 1
28 | endif
29 | @exit 1
30 | else
31 |
32 | # Split is found
33 | @echo "YAY $(ARGS)"
34 | # Disabling all active splits
35 | @echo "Disabling all active splits:"
36 | ifneq ("$(wildcard scripts/makefile/config_split_disable_all.sh)","")
37 | @$(call php, /bin/sh ./scripts/makefile/config_split_disable_all.sh)
38 | else
39 | @echo "- scripts/makefile/config_split_disable_all.sh file does not exist"
40 | @exit 1
41 | endif
42 |
43 | # Enabling selected split
44 | @echo "Enabling selected split : $(ARGS) ..."
45 | ifeq ($(ARGS),default)
46 | @echo "No specific split to enable for $(ARGS) config..."
47 | else
48 | @$(call php, drush config-set config_split.config_split.$(ARGS) status 1 -y)
49 | endif
50 | endif
51 | endif
52 |
53 | # Executing the rest of deploy commands
54 | @echo "Importing $(ARGS) config..."
55 | @$(call php, drush cim -y)
56 | @make localize
57 | @echo "Clearing cache..."
58 | @$(call php, drush cr)
59 | @make info
60 |
61 |
--------------------------------------------------------------------------------
/scripts/multisite/config_split_disable_all.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | # Parsing command
4 | PARSING_CMD='echo drush config:status --state=Any --format=list'
5 |
6 | # Entity to parse (see PARSING_CMD for more)
7 | CONFIG_TO_PARSE=config_split.config_split
8 |
9 | # Count entities
10 | CONFIG_COUNT=$($($PARSING_CMD) | grep -c $CONFIG_TO_PARSE)
11 |
12 | # List entities
13 | CONFIG_LIST=$($($PARSING_CMD) | grep $CONFIG_TO_PARSE | awk -F "." '{print $3}')
14 |
15 |
16 | # Looking for splits
17 | echo -e "- Looking for splits of config..."
18 |
19 | if [ "$CONFIG_COUNT" -gt "0" ]; then
20 |
21 | echo -e "- Some splits were found : Disabling all splits..."
22 |
23 | for bundles in $CONFIG_LIST; do
24 | drush config-set config_split.config_split.$bundles status 0 -y
25 | done
26 |
27 | else
28 | printf "- \033[1mNo split of config was\033[0m found\n"
29 | fi
30 |
31 | echo -e "- Clearing cache..."
32 | drush cr
33 |
--------------------------------------------------------------------------------
/scripts/multisite/config_split_list_all.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | # Parsing command
4 | PARSING_CMD='echo drush config:status --state=Any --format=list'
5 |
6 | # Entity to parse (see PARSING_CMD for more)
7 | CONFIG_TO_PARSE=config_split.config_split
8 |
9 | # Count entities
10 | CONFIG_COUNT=$($($PARSING_CMD) | grep -c $CONFIG_TO_PARSE)
11 |
12 | # List entities
13 | CONFIG_LIST=$($($PARSING_CMD) | grep $CONFIG_TO_PARSE | awk -F "." '{print $3}')
14 |
15 |
16 | # Looking for splits
17 |
18 | if [ "$CONFIG_COUNT" -gt "0" ]; then
19 |
20 | echo -e "- The following splits are available : "
21 |
22 | for bundles in $CONFIG_LIST; do
23 | echo " - $bundles"
24 | done
25 | printf "\n"
26 |
27 | else
28 | printf "- \033[1mNo split of config was\033[0m found\n"
29 | fi
30 |
--------------------------------------------------------------------------------
/settings/settings.dev.php:
--------------------------------------------------------------------------------
1 | Custom block "
10 | "types, each with its own fields and display settings. After "
11 | "creating a block, place it in a region from the Block layout page."
13 | msgstr " "
14 |
--------------------------------------------------------------------------------
/translations/fr.po:
--------------------------------------------------------------------------------
1 | # French translation of interface strings
2 |
3 | # These empty lines above are required for import to work
4 | msgid ""
5 | msgstr ""
6 | "Plural-Forms: nplurals=2; plural=(n>1);\n"
7 |
8 | # https://www.drupal.org/project/block_content_permissions/issues/2920739#comment-13406815
9 | msgid ""
10 | "Blocks in the block library belong to Custom block "
11 | "types, each with its own fields and display settings. After "
12 | "creating a block, place it in a region from the Block layout page."
14 | msgstr " "
15 |
16 | msgid "Send"
17 | msgstr "Envoyer"
18 |
19 | msgid "in"
20 | msgstr "à"
21 |
--------------------------------------------------------------------------------
/web/modules/custom/README.txt:
--------------------------------------------------------------------------------
1 | Store here a custom modules
2 |
--------------------------------------------------------------------------------
/web/modules/custom/project_default_content/README.md:
--------------------------------------------------------------------------------
1 | # Skilld default content
2 |
3 | ## How to use ?
4 |
5 | 1. Enable module default_content. It should be already installed by composer.
6 | 2. Create content manually on site using UID1 admin user
7 | 3. Use `drush dcer` commands (brought by module) to export selected content :
8 | ```
9 | drush dcer node --folder=modules/custom/project_default_content/content
10 | drush dcer user --folder=modules/custom/project_default_content/content
11 | drush dcer block_content --folder=modules/custom/project_default_content/content
12 | drush dcer menu_link_content --folder=modules/custom/project_default_content/content
13 | drush dcer taxonomy_term --folder=modules/custom/project_default_content/content
14 | drush dcer file --folder=modules/custom/project_default_content/content
15 | drush dcer media --folder=modules/custom/project_default_content/content
16 | ```
17 | 4. Find out what is UUID of admin user : `ls web/modules/custom/project_default_content/content/user`
18 | - `4bad48eb-ff5b-45b4-b30c-ecabff09591a` : UUID of default_content_author user
19 | - Another UUID should be listed here : UUID of admin user
20 | 5. Delete json file of admin user :
21 | - `rm web/modules/custom/project_default_content/content/user/UUID_OF_ADMIN_USER.json`
22 | 6. Use _sed_ commands to replace UID and UUID values of admin author in files of all exported content :
23 | - `cd web/modules/custom/project_default_content/content/`
24 | - `find . -type f -exec sed -i 's/\/user\\\/1/\/user\\\/2/g' {} +`
25 | - `find . -type f -exec sed -i 's/UUID_OF_ADMIN_USER/4bad48eb-ff5b-45b4-b30c-ecabff09591a/g' {} +`
26 | 7. Exported default content will be created at build and it's author should be `default_content_author`
27 |
28 |
--------------------------------------------------------------------------------
/web/modules/custom/project_default_content/content/shortcut/ebb7c60a-b052-41ba-9a82-0ed73475a33b.yml:
--------------------------------------------------------------------------------
1 | _meta:
2 | version: '1.0'
3 | entity_type: shortcut
4 | uuid: ebb7c60a-b052-41ba-9a82-0ed73475a33b
5 | bundle: default
6 | default_langcode: en
7 | default:
8 | title:
9 | -
10 | value: 'Add content'
11 | weight:
12 | -
13 | value: -20
14 | link:
15 | -
16 | uri: 'internal:/node/add'
17 | title: ''
18 | options: { }
19 |
--------------------------------------------------------------------------------
/web/modules/custom/project_default_content/project_default_content.info.yml:
--------------------------------------------------------------------------------
1 | name: Project default content
2 | description: Creates default content fixtures for the project. This module comes with a default user which should be used as author of all default content. Check README.md for more info.
3 | package: Custom
4 |
5 | type: module
6 | core_version_requirement: ^9 || ^10
7 |
8 | dependencies:
9 | - default_content:default_content
10 | - drupal:block_content
11 | - drupal:content_translation
12 |
--------------------------------------------------------------------------------
/web/themes/custom/README.txt:
--------------------------------------------------------------------------------
1 | Store here a custom theme
2 |
--------------------------------------------------------------------------------