├── .codeql └── codeql-config.yml ├── .env.sample ├── .github └── workflows │ ├── backport.yml │ └── codeql.yml ├── .gitignore ├── .override_dev_env.sample ├── Dockerfile ├── README.md ├── create-envfile.py ├── dev_config.yml ├── docker-build.sh ├── docker-compose.yml ├── docker-purge.sh ├── docker ├── geoserver │ └── Dockerfile ├── geoserver_data │ └── Dockerfile ├── letsencrypt │ └── Dockerfile ├── nginx │ └── Dockerfile └── postgresql │ └── Dockerfile └── src ├── Makefile ├── README.md ├── celery-cmd ├── celery.sh ├── dev_config.yml ├── entrypoint.sh ├── fixtures ├── apikey_docker.json ├── default_oauth_apps.json ├── default_oauth_apps_docker.json ├── django_celery_beat.json ├── initial_data.json ├── sample_admin.json └── sites_template.json ├── manage.py ├── manage.sh ├── manage_dev.sh.sample ├── pavement.py ├── paver.sh ├── paver_dev.sh.sample ├── project_name ├── __init__.py ├── apps.py ├── br │ ├── backup.sh │ ├── restore.sh │ └── settings_docker.ini ├── celeryapp.py ├── locale │ └── .gitkeep ├── settings.py ├── static │ ├── README │ ├── css │ │ └── site_base.css │ ├── gulpfile.js │ ├── img │ │ ├── README │ │ ├── bing_aerial_w_labels.png │ │ ├── bing_canvas_dark.png │ │ └── bing_road_on_demand.png │ ├── js │ │ └── README │ ├── less │ │ └── site_base.less │ └── package.json ├── templates │ └── geonode-mapstore-client │ │ └── _geonode_config.html ├── urls.py ├── version.py └── wsgi.py ├── requirements.txt ├── setup.py ├── tasks.py ├── uwsgi.ini └── wait-for-databases.sh /.codeql/codeql-config.yml: -------------------------------------------------------------------------------- 1 | languages: ${{ matrix.language }} 2 | 3 | paths-ignore: 4 | - src/project_name/__init__.py -------------------------------------------------------------------------------- /.env.sample: -------------------------------------------------------------------------------- 1 | COMPOSE_PROJECT_NAME={{project_name}} 2 | # See https://github.com/containers/podman/issues/13889 3 | # DOCKER_BUILDKIT=0 4 | DOCKER_ENV=production 5 | # See https://github.com/geosolutions-it/geonode-generic/issues/28 6 | # to see why we force API version to 1.24 7 | DOCKER_API_VERSION="1.24" 8 | BACKUPS_VOLUME_DRIVER=local 9 | 10 | GEONODE_BASE_IMAGE_VERSION=master 11 | NGINX_BASE_IMAGE_VERSION=1.25.3-latest 12 | LETSENCRYPT_BASE_IMAGE_VERSION=2.6.0-latest 13 | GEOSERVER_BASE_IMAGE_VERSION=2.24.4-latest 14 | GEOSERVER_DATA_BASE_IMAGE_VERSION=2.24.4-latest 15 | POSTGRES_BASE_IMAGE_VERSION=15.3-latest 16 | 17 | C_FORCE_ROOT=1 18 | FORCE_REINIT=false 19 | INVOKE_LOG_STDOUT=true 20 | 21 | # LANGUAGE_CODE=it-it 22 | # LANGUAGES=(('en-us','English'),('it-it','Italiano')) 23 | 24 | DJANGO_SETTINGS_MODULE={{project_name}}.settings 25 | GEONODE_INSTANCE_NAME=geonode 26 | 27 | # ################# 28 | # backend 29 | # ################# 30 | POSTGRES_USER=postgres 31 | POSTGRES_PASSWORD={pgpwd} 32 | GEONODE_DATABASE={{project_name}} 33 | GEONODE_DATABASE_USER={{project_name}} 34 | GEONODE_DATABASE_PASSWORD={dbpwd} 35 | GEONODE_GEODATABASE={{project_name}}_data 36 | GEONODE_GEODATABASE_USER={{project_name}}_data 37 | GEONODE_GEODATABASE_PASSWORD={geodbpwd} 38 | GEONODE_DATABASE_SCHEMA=public 39 | GEONODE_GEODATABASE_SCHEMA=public 40 | DATABASE_HOST=db 41 | DATABASE_PORT=5432 42 | DATABASE_URL=postgis://{{project_name}}:{dbpwd}@db:5432/{{project_name}} 43 | GEODATABASE_URL=postgis://{{project_name}}_data:{geodbpwd}@db:5432/{{project_name}}_data 44 | GEONODE_DB_CONN_MAX_AGE=0 45 | GEONODE_DB_CONN_TOUT=5 46 | DEFAULT_BACKEND_DATASTORE=datastore 47 | BROKER_URL=amqp://guest:guest@rabbitmq:5672/ 48 | CELERY_BEAT_SCHEDULER=celery.beat:PersistentScheduler 49 | ASYNC_SIGNALS=True 50 | 51 | SITEURL={siteurl}/ 52 | 53 | ALLOWED_HOSTS="['django', '{hostname}']" 54 | 55 | # Data Uploader 56 | DEFAULT_BACKEND_UPLOADER=geonode.importer 57 | TIME_ENABLED=True 58 | MOSAIC_ENABLED=False 59 | HAYSTACK_SEARCH=False 60 | HAYSTACK_ENGINE_URL=http://elasticsearch:9200/ 61 | HAYSTACK_ENGINE_INDEX_NAME=haystack 62 | HAYSTACK_SEARCH_RESULTS_PER_PAGE=200 63 | 64 | # ################# 65 | # nginx 66 | # HTTPD Server 67 | # ################# 68 | GEONODE_LB_HOST_IP=django 69 | GEONODE_LB_PORT=8000 70 | NGINX_BASE_URL={siteurl} 71 | 72 | # IP or domain name and port where the server can be reached on HTTPS (leave HOST empty if you want to use HTTP only) 73 | # port where the server can be reached on HTTPS 74 | HTTP_HOST={http_host} 75 | HTTPS_HOST={https_host} 76 | 77 | HTTP_PORT=80 78 | HTTPS_PORT=443 79 | 80 | # Let's Encrypt certificates for https encryption. You must have a domain name as HTTPS_HOST (doesn't work 81 | # with an ip) and it must be reachable from the outside. This can be one of the following : 82 | # disabled : we do not get a certificate at all (a placeholder certificate will be used) 83 | # staging : we get staging certificates (are invalid, but allow to test the process completely and have much higher limit rates) 84 | # production : we get a normal certificate (default) 85 | LETSENCRYPT_MODE={letsencrypt_mode} 86 | # LETSENCRYPT_MODE=staging 87 | # LETSENCRYPT_MODE=production 88 | 89 | RESOLVER=127.0.0.11 90 | 91 | # ################# 92 | # geoserver 93 | # ################# 94 | GEOSERVER_LB_HOST_IP=geoserver 95 | GEOSERVER_LB_PORT=8080 96 | GEOSERVER_WEB_UI_LOCATION={siteurl}/geoserver/ 97 | GEOSERVER_PUBLIC_LOCATION={siteurl}/geoserver/ 98 | GEOSERVER_LOCATION=http://${GEOSERVER_LB_HOST_IP}:${GEOSERVER_LB_PORT}/geoserver/ 99 | GEOSERVER_ADMIN_USER=admin 100 | GEOSERVER_ADMIN_PASSWORD={geoserverpwd} 101 | 102 | OGC_REQUEST_TIMEOUT=30 103 | OGC_REQUEST_MAX_RETRIES=1 104 | OGC_REQUEST_BACKOFF_FACTOR=0.3 105 | OGC_REQUEST_POOL_MAXSIZE=10 106 | OGC_REQUEST_POOL_CONNECTIONS=10 107 | 108 | # Java Options & Memory 109 | ENABLE_JSONP=true 110 | outFormat=text/javascript 111 | GEOSERVER_JAVA_OPTS=-Djava.awt.headless=true -Xms4G -Xmx4G -Dgwc.context.suffix=gwc -XX:+UnlockDiagnosticVMOptions -XX:+LogVMOutput -XX:LogFile=/var/log/jvm.log -XX:PerfDataSamplingInterval=500 -XX:SoftRefLRUPolicyMSPerMB=36000 -XX:-UseGCOverheadLimit -XX:ParallelGCThreads=4 -Dfile.encoding=UTF8 -Djavax.servlet.request.encoding=UTF-8 -Djavax.servlet.response.encoding=UTF-8 -Duser.timezone=GMT -Dorg.geotools.shapefile.datetime=false -DGS-SHAPEFILE-CHARSET=UTF-8 -DGEOSERVER_CSRF_DISABLED=true -DPRINT_BASE_URL={siteurl}/geoserver/pdf -DALLOW_ENV_PARAMETRIZATION=true -Xbootclasspath/a:/usr/local/tomcat/webapps/geoserver/WEB-INF/lib/marlin-0.9.3-Unsafe.jar -Dsun.java2d.renderer=org.marlin.pisces.MarlinRenderingEngine 112 | 113 | # ################# 114 | # Security 115 | # ################# 116 | # Admin Settings 117 | # 118 | # ADMIN_PASSWORD is used to overwrite the GeoNode admin password **ONLY** the first time 119 | # GeoNode is run. If you need to overwrite it again, you need to set the env var FORCE_REINIT, 120 | # otherwise the invoke updateadmin task will be skipped and the current password already stored 121 | # in DB will honored. 122 | 123 | ADMIN_USERNAME=admin 124 | ADMIN_PASSWORD={geonodepwd} 125 | ADMIN_EMAIL={email} 126 | 127 | # EMAIL Notifications 128 | EMAIL_ENABLE=False 129 | DJANGO_EMAIL_BACKEND=django.core.mail.backends.smtp.EmailBackend 130 | DJANGO_EMAIL_HOST=localhost 131 | DJANGO_EMAIL_PORT=25 132 | DJANGO_EMAIL_HOST_USER= 133 | DJANGO_EMAIL_HOST_PASSWORD= 134 | DJANGO_EMAIL_USE_TLS=False 135 | DJANGO_EMAIL_USE_SSL=False 136 | DEFAULT_FROM_EMAIL='{email}' # eg Company 137 | 138 | # Session/Access Control 139 | LOCKDOWN_GEONODE=False 140 | X_FRAME_OPTIONS="SAMEORIGIN" 141 | SESSION_ENGINE=django.contrib.sessions.backends.cached_db 142 | SESSION_EXPIRED_CONTROL_ENABLED=True 143 | DEFAULT_ANONYMOUS_VIEW_PERMISSION=True 144 | DEFAULT_ANONYMOUS_DOWNLOAD_PERMISSION=True 145 | 146 | CORS_ALLOW_ALL_ORIGINS=True 147 | GEOSERVER_CORS_ENABLED=True 148 | GEOSERVER_CORS_ALLOWED_ORIGINS=* 149 | GEOSERVER_CORS_ALLOWED_METHODS=GET,POST,PUT,DELETE,HEAD,OPTIONS 150 | GEOSERVER_CORS_ALLOWED_HEADERS=* 151 | 152 | # Users Registration 153 | ACCOUNT_OPEN_SIGNUP=True 154 | ACCOUNT_EMAIL_REQUIRED=True 155 | ACCOUNT_APPROVAL_REQUIRED=False 156 | ACCOUNT_CONFIRM_EMAIL_ON_GET=False 157 | ACCOUNT_EMAIL_VERIFICATION=none 158 | ACCOUNT_AUTHENTICATION_METHOD=username_email 159 | AUTO_ASSIGN_REGISTERED_MEMBERS_TO_REGISTERED_MEMBERS_GROUP_NAME=True 160 | 161 | # OAuth2 162 | OAUTH2_API_KEY= 163 | OAUTH2_CLIENT_ID={clientid} 164 | OAUTH2_CLIENT_SECRET={clientsecret} 165 | 166 | # GeoNode APIs 167 | API_LOCKDOWN=False 168 | TASTYPIE_APIKEY= 169 | 170 | # ################# 171 | # Production and 172 | # Monitoring 173 | # ################# 174 | DEBUG={debug} 175 | 176 | SECRET_KEY='{secret_key}' 177 | 178 | STATIC_ROOT=/mnt/volumes/statics/static/ 179 | MEDIA_ROOT=/mnt/volumes/statics/uploaded/ 180 | ASSETS_ROOT=/mnt/volumes/statics/assets/ 181 | GEOIP_PATH=/mnt/volumes/statics/geoip.db 182 | 183 | CACHE_BUSTING_STATIC_ENABLED=False 184 | 185 | MEMCACHED_ENABLED=False 186 | MEMCACHED_BACKEND=django.core.cache.backends.memcached.PyLibMCCache 187 | MEMCACHED_LOCATION=memcached:11211 188 | MEMCACHED_LOCK_EXPIRE=3600 189 | MEMCACHED_LOCK_TIMEOUT=10 190 | # 191 | # Options for memcached binary, e.g. -vvv to log all requests and cache hits 192 | # 193 | MEMCACHED_OPTIONS= 194 | 195 | MAX_DOCUMENT_SIZE=200 196 | CLIENT_RESULTS_LIMIT=5 197 | API_LIMIT_PER_PAGE=1000 198 | 199 | # GIS Client 200 | GEONODE_CLIENT_LAYER_PREVIEW_LIBRARY=mapstore 201 | MAPBOX_ACCESS_TOKEN= 202 | BING_API_KEY= 203 | GOOGLE_API_KEY= 204 | 205 | # Monitoring 206 | MONITORING_ENABLED=False 207 | MONITORING_DATA_TTL=365 208 | USER_ANALYTICS_ENABLED=True 209 | USER_ANALYTICS_GZIP=True 210 | CENTRALIZED_DASHBOARD_ENABLED=False 211 | MONITORING_SERVICE_NAME=local-geonode 212 | MONITORING_HOST_NAME=geonode 213 | 214 | # Other Options/Contribs 215 | MODIFY_TOPICCATEGORY=True 216 | AVATAR_GRAVATAR_SSL=True 217 | EXIF_ENABLED=True 218 | CREATE_LAYER=True 219 | FAVORITE_ENABLED=True 220 | 221 | # Advanced Workflow 222 | RESOURCE_PUBLISHING=False 223 | ADMIN_MODERATE_UPLOADS=False 224 | 225 | # LDAP 226 | LDAP_ENABLED=False 227 | LDAP_SERVER_URL=ldap:// 228 | LDAP_BIND_DN=uid=ldapinfo,cn=users,dc=ad,dc=example,dc=org 229 | LDAP_BIND_PASSWORD= 230 | LDAP_USER_SEARCH_DN=dc=ad,dc=example,dc=org 231 | LDAP_USER_SEARCH_FILTERSTR=(&(uid=%(user)s)(objectClass=person)) 232 | LDAP_GROUP_SEARCH_DN=cn=groups,dc=ad,dc=example,dc=org 233 | LDAP_GROUP_SEARCH_FILTERSTR=(|(cn=abt1)(cn=abt2)(cn=abt3)(cn=abt4)(cn=abt5)(cn=abt6)) 234 | LDAP_GROUP_PROFILE_MEMBER_ATTR=uniqueMember 235 | 236 | # CELERY 237 | 238 | # expressed in KB 239 | # CELERY__MAX_MEMORY_PER_CHILD="200000" 240 | # ## 241 | # Note right autoscale value must coincide with worker concurrency value 242 | # CELERY__AUTOSCALE_VALUES="15,10" 243 | # CELERY__WORKER_CONCURRENCY="10" 244 | # ## 245 | # CELERY__OPTS="--without-gossip --without-mingle -Ofair -B -E" 246 | # CELERY__BEAT_SCHEDULE="/mnt/volumes/statics/celerybeat-schedule" 247 | # CELERY__LOG_LEVEL="INFO" 248 | # CELERY__LOG_FILE="/var/log/celery.log" 249 | # CELERY__WORKER_NAME="worker1@%h" 250 | 251 | # PostgreSQL 252 | POSTGRESQL_MAX_CONNECTIONS=200 253 | 254 | # Common containers restart policy 255 | RESTART_POLICY_CONDITION="on-failure" 256 | RESTART_POLICY_DELAY="5s" 257 | RESTART_POLICY_MAX_ATTEMPTS="3" 258 | RESTART_POLICY_WINDOW=120s 259 | 260 | DEFAULT_MAX_UPLOAD_SIZE=5368709120 261 | DEFAULT_MAX_PARALLEL_UPLOADS_PER_USER=5 262 | -------------------------------------------------------------------------------- /.github/workflows/backport.yml: -------------------------------------------------------------------------------- 1 | name: Backport 2 | on: 3 | pull_request_target: 4 | types: 5 | - closed 6 | - labeled 7 | 8 | jobs: 9 | backport: 10 | name: Backport 11 | runs-on: ubuntu-latest 12 | # Only react to merged PRs for security reasons. 13 | # See https://docs.github.com/en/actions/using-workflows/events-that-trigger-workflows#pull_request_target. 14 | if: > 15 | github.event.pull_request.merged 16 | && ( 17 | github.event.action == 'closed' 18 | || ( 19 | github.event.action == 'labeled' 20 | && contains(github.event.label.name, 'backport') 21 | ) 22 | ) 23 | steps: 24 | - name: Backporting 25 | uses: tibdex/backport@v2 26 | with: 27 | github_token: ${{ secrets.GITHUB_TOKEN }} 28 | -------------------------------------------------------------------------------- /.github/workflows/codeql.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL" 2 | 3 | on: 4 | push: 5 | branches: [ "master", 4.1.x ] 6 | pull_request: 7 | branches: [ "master", 4.1.x ] 8 | schedule: 9 | - cron: '38 4 * * 5' 10 | 11 | jobs: 12 | analyze: 13 | name: Analyze 14 | runs-on: ubuntu-latest 15 | permissions: 16 | actions: read 17 | contents: read 18 | security-events: write 19 | 20 | strategy: 21 | fail-fast: false 22 | matrix: 23 | language: [ 'javascript', 'python' ] 24 | 25 | steps: 26 | - name: Checkout repository 27 | uses: actions/checkout@v3 28 | 29 | - name: Initialize CodeQL 30 | uses: github/codeql-action/init@v2 31 | with: 32 | config-file: .codeql/codeql-config.yml 33 | 34 | - name: Perform CodeQL Analysis 35 | uses: github/codeql-action/analyze@v2 -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | # env/ 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | *.egg-info/ 23 | .installed.cfg 24 | *.egg 25 | 26 | # Virtual environment 27 | venv/ 28 | .venv/ 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *,cover 49 | 50 | # Django stuff: 51 | *.log 52 | celerybeat-schedule.* 53 | 54 | # Sphinx documentation 55 | docs/_build/ 56 | 57 | # PyBuilder 58 | target/ 59 | 60 | # Downstream Project 61 | downloaded/ 62 | uploaded/ 63 | static_root/ 64 | development.db 65 | development.db-journal 66 | local_settings.py 67 | 68 | .idea/ 69 | .vscode/ 70 | .vagrant/ 71 | Vagrantfile 72 | /.env 73 | -------------------------------------------------------------------------------- /.override_dev_env.sample: -------------------------------------------------------------------------------- 1 | export DEBUG=True 2 | 3 | export SECRET_KEY="myv-y4#7j-d*p-__@j#*3z@!y24fz8%^z2v6atuy4bo9vqr1_a" 4 | 5 | export SITEURL=http://localhost:8000/ 6 | export ALLOWED_HOSTS="['localhost',]" 7 | 8 | export GEONODE_INSTANCE_NAME=geonode 9 | export DJANGO_SETTINGS_MODULE={{project_name}}.settings 10 | export GEONODE_DATABASE={{project_name}} 11 | export GEONODE_DATABASE_PASSWORD=geonode 12 | export GEONODE_GEODATABASE={{project_name}}_data 13 | export GEONODE_GEODATABASE_PASSWORD=geonode 14 | 15 | export DATABASE_URL=postgis://{{project_name}}:geonode@localhost:5432/{{project_name}} 16 | export GEODATABASE_URL=postgis://{{project_name}}_data:geonode@localhost:5432/{{project_name}}_data 17 | export DEFAULT_BACKEND_DATASTORE=datastore 18 | 19 | export GEOSERVER_WEB_UI_LOCATION=http://localhost:8080/geoserver/ 20 | export GEOSERVER_PUBLIC_LOCATION=http://localhost:8080/geoserver/ 21 | export GEOSERVER_LOCATION=http://localhost:8080/geoserver/ 22 | export GEOSERVER_ADMIN_USER=admin 23 | export GEOSERVER_ADMIN_PASSWORD=geoserver 24 | 25 | export OGC_REQUEST_TIMEOUT=30 26 | export OGC_REQUEST_MAX_RETRIES=1 27 | export OGC_REQUEST_BACKOFF_FACTOR=0.3 28 | export OGC_REQUEST_POOL_MAXSIZE=10 29 | export OGC_REQUEST_POOL_CONNECTIONS=10 30 | 31 | export DEFAULT_BACKEND_UPLOADER=geonode.importer 32 | export TIME_ENABLED=True 33 | export MOSAIC_ENABLED=False 34 | 35 | export ADMIN_PASSWORD=admin 36 | export ADMIN_EMAIL=admin@localhost 37 | 38 | export EMAIL_ENABLE=False 39 | export DJANGO_EMAIL_BACKEND=django.core.mail.backends.smtp.EmailBackend 40 | export DJANGO_EMAIL_HOST=localhost 41 | export DJANGO_EMAIL_PORT=25 42 | export DJANGO_EMAIL_HOST_USER= 43 | export DJANGO_EMAIL_HOST_PASSWORD= 44 | export DJANGO_EMAIL_USE_TLS=False 45 | export DJANGO_EMAIL_USE_SSL=False 46 | export DEFAULT_FROM_EMAIL="GeoNode " 47 | 48 | export LOCKDOWN_GEONODE=False 49 | export X_FRAME_OPTIONS=SAMEORIGIN 50 | export SESSION_EXPIRED_CONTROL_ENABLED=True 51 | export DEFAULT_ANONYMOUS_VIEW_PERMISSION=True 52 | export DEFAULT_ANONYMOUS_DOWNLOAD_PERMISSION=True 53 | 54 | export CORS_ALLOW_ALL_ORIGINS=True 55 | export GEOSERVER_CORS_ENABLED=True 56 | export GEOSERVER_CORS_ALLOWED_ORIGINS=* 57 | export GEOSERVER_CORS_ALLOWED_METHODS=GET,POST,PUT,DELETE,HEAD,OPTIONS 58 | export GEOSERVER_CORS_ALLOWED_HEADERS=* 59 | 60 | export ACCOUNT_OPEN_SIGNUP=True 61 | export ACCOUNT_EMAIL_REQUIRED=True 62 | export ACCOUNT_APPROVAL_REQUIRED=False 63 | export ACCOUNT_CONFIRM_EMAIL_ON_GET=False 64 | export ACCOUNT_EMAIL_VERIFICATION=none 65 | export ACCOUNT_AUTHENTICATION_METHOD=username_email 66 | 67 | export OAUTH2_API_KEY= 68 | export OAUTH2_CLIENT_ID=Jrchz2oPY3akmzndmgUTYrs9gczlgoV20YPSvqaV 69 | export OAUTH2_CLIENT_SECRET=rCnp5txobUo83EpQEblM8fVj3QT5zb5qRfxNsuPzCqZaiRyIoxM4jdgMiZKFfePBHYXCLd7B8NlkfDBY9HKeIQPcy5Cp08KQNpRHQbjpLItDHv12GvkSeXp6OxaUETv3 70 | 71 | export API_LOCKDOWN=False 72 | export TASTYPIE_APIKEY= 73 | 74 | export CACHE_BUSTING_STATIC_ENABLED=False 75 | 76 | export MAX_DOCUMENT_SIZE=2 77 | export CLIENT_RESULTS_LIMIT=5 78 | export API_LIMIT_PER_PAGE=1000 79 | 80 | export GEONODE_CLIENT_LAYER_PREVIEW_LIBRARY=mapstore 81 | export MAPBOX_ACCESS_TOKEN= 82 | export BING_API_KEY= 83 | export GOOGLE_API_KEY= 84 | 85 | export MONITORING_ENABLED=False 86 | export MONITORING_DATA_TTL=365 87 | export USER_ANALYTICS_ENABLED=True 88 | export USER_ANALYTICS_GZIP=True 89 | export CENTRALIZED_DASHBOARD_ENABLED=False 90 | export MONITORING_SERVICE_NAME=local-geonode 91 | export MONITORING_HOST_NAME=geonode 92 | 93 | export MODIFY_TOPICCATEGORY=True 94 | export AVATAR_GRAVATAR_SSL=True 95 | export AVATAR_PROVIDERS='avatar.providers.PrimaryAvatarProvider','avatar.providers.GravatarAvatarProvider','avatar.providers.DefaultAvatarProvider' 96 | export EXIF_ENABLED=True 97 | export CREATE_LAYER=True 98 | export FAVORITE_ENABLED=True 99 | 100 | export DEFAULT_MAX_UPLOAD_SIZE=5368709120 101 | export DEFAULT_MAX_PARALLEL_UPLOADS_PER_USER=5 102 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM geonode/geonode-base:latest-ubuntu-22.04 2 | LABEL GeoNode development team 3 | 4 | RUN mkdir -p /usr/src/{{project_name}} 5 | 6 | RUN apt-get update -y && apt-get install curl wget unzip gnupg2 locales -y 7 | 8 | RUN sed -i -e 's/# C.UTF-8 UTF-8/C.UTF-8 UTF-8/' /etc/locale.gen && \ 9 | locale-gen 10 | ENV LC_ALL C.UTF-8 11 | ENV LANG C.UTF-8 12 | 13 | # add bower and grunt command 14 | COPY src /usr/src/{{project_name}}/ 15 | WORKDIR /usr/src/{{project_name}} 16 | 17 | #COPY src/monitoring-cron /etc/cron.d/monitoring-cron 18 | #RUN chmod 0644 /etc/cron.d/monitoring-cron 19 | #RUN crontab /etc/cron.d/monitoring-cron 20 | #RUN touch /var/log/cron.log 21 | #RUN service cron start 22 | 23 | COPY src/wait-for-databases.sh /usr/bin/wait-for-databases 24 | RUN chmod +x /usr/bin/wait-for-databases 25 | RUN chmod +x /usr/src/{{project_name}}/tasks.py \ 26 | && chmod +x /usr/src/{{project_name}}/entrypoint.sh 27 | 28 | COPY src/celery.sh /usr/bin/celery-commands 29 | RUN chmod +x /usr/bin/celery-commands 30 | 31 | COPY src/celery-cmd /usr/bin/celery-cmd 32 | RUN chmod +x /usr/bin/celery-cmd 33 | 34 | # Install "geonode-contribs" apps 35 | # RUN cd /usr/src; git clone https://github.com/GeoNode/geonode-contribs.git -b master 36 | # Install logstash and centralized dashboard dependencies 37 | # RUN cd /usr/src/geonode-contribs/geonode-logstash; pip install --upgrade -e . \ 38 | # cd /usr/src/geonode-contribs/ldap; pip install --upgrade -e . 39 | 40 | RUN yes w | pip install --src /usr/src -r requirements.txt &&\ 41 | yes w | pip install -e . 42 | 43 | # Cleanup apt update lists 44 | RUN apt-get autoremove --purge &&\ 45 | apt-get clean &&\ 46 | rm -rf /var/lib/apt/lists/* 47 | 48 | # Export ports 49 | EXPOSE 8000 50 | 51 | # We provide no command or entrypoint as this image can be used to serve the django project or run celery tasks 52 | # ENTRYPOINT /usr/src/{{project_name}}/entrypoint.sh 53 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # {{ project_name|title }} 2 | 3 | GeoNode template project. Generates a django project with GeoNode support. 4 | 5 | ## Table of Contents 6 | 7 | - [Quick Docker Start](#quick-docker-start) 8 | - [Developer Workshop](#developer-workshop) 9 | - [Create a custom project](#create-a-custom-project) 10 | - [Start your server using Docker](#start-your-server-using-docker) 11 | - [Run the instance in development mode](#run-the-instance-in-development-mode) 12 | - [Run the instance on a public site](#run-the-instance-on-a-public-site) 13 | - [Stop the Docker Images](#stop-the-docker-images) 14 | - [Backup and Restore from Docker Images](#backup-and-restore-the-docker-images) 15 | - [Recommended: Track your changes](#recommended-track-your-changes) 16 | - [Hints: Configuring `requirements.txt`](#hints-configuring-requirementstxt) 17 | 18 | ## Quick Docker Start 19 | 20 | ```bash 21 | python3.10 -m venv ~/.venvs/project_name 22 | source ~/.venvs/{{ project_name }}/bin/activate 23 | 24 | pip install Django==4.2.9 25 | 26 | mkdir ~/project_name 27 | 28 | GN_VERSION=master # Define the branch or tag you want to generate the project from 29 | django-admin startproject --template=https://github.com/GeoNode/geonode-project/archive/refs/heads/$GN_VERSION.zip -e py,sh,md,rst,json,yml,ini,env,sample,properties -n monitoring-cron -n Dockerfile project_name ~/project_name 30 | 31 | cd ~/project_name 32 | python create-envfile.py 33 | ``` 34 | 35 | The project can also be generated from a local checkout of the goenode-project repository 36 | 37 | ```bash 38 | git clone https://github.com/GeoNode/geonode-project 39 | git checkout $GN_VERSION 40 | django-admin startproject --template=./geonode-project -e py,sh,md,rst,json,yml,ini,env,sample,properties -n monitoring-cron -n Dockerfile project_name ~/project_name 41 | 42 | ``` 43 | 44 | `create-envfile.py` accepts the following arguments: 45 | 46 | - `--https`: Enable SSL. It's disabled by default 47 | - `--env_type`: 48 | - When set to `prod` `DEBUG` is disabled and the creation of a valid `SSL` is requested to Letsencrypt's ACME server 49 | - When set to `test` `DEBUG` is disabled and a test `SSL` certificate is generated for local testing 50 | - When set to `dev` `DEBUG` is enabled and no `SSL` certificate is generated 51 | - `--hostname`: The URL that whill serve GeoNode (`localhost` by default) 52 | - `--email`: The administrator's email. Notice that a real email and a valid SMPT configurations are required if `--env_type` is seto to `prod`. Letsencrypt uses to email for issuing the SSL certificate 53 | - `--geonodepwd`: GeoNode's administrator password. A random value is set if left empty 54 | - `--geoserverpwd`: GeoNode's administrator password. A random value is set if left empty 55 | - `--pgpwd`: PostgreSQL's administrator password. A random value is set if left empty 56 | - `--dbpwd`: GeoNode DB user role's password. A random value is set if left empty 57 | - `--geodbpwd`: GeoNode data DB user role's password. A random value is set if left empty 58 | - `--clientid`: Client id of Geoserver's GeoNode Oauth2 client. A random value is set if left empty 59 | - `--clientsecret`: Client secret of Geoserver's GeoNode Oauth2 client. A random value is set if left empty 60 | ```bash 61 | docker compose build 62 | docker compose up -d 63 | ``` 64 | 65 | ## Developer Workshop 66 | 67 | Available at 68 | 69 | ```bash 70 | http://geonode.org/dev-workshop 71 | ``` 72 | 73 | ## Create a custom project 74 | 75 | **NOTE**: *You can call your geonode project whatever you like **except 'geonode'**. Follow the naming conventions for python packages (generally lower case with underscores (``_``). In the examples below, replace ``{{ project_name }}`` with whatever you would like to name your project.* 76 | 77 | To setup your project follow these instructions: 78 | 79 | 1. Generate the project 80 | 81 | ```bash 82 | git clone https://github.com/GeoNode/geonode-project.git -b 83 | source /usr/share/virtualenvwrapper/virtualenvwrapper.sh 84 | mkvirtualenv --python=/usr/bin/python3 {{ project_name }} 85 | pip install Django==3.2.16 86 | 87 | django-admin startproject --template=./geonode-project -e py,sh,md,rst,json,yml,ini,env,sample,properties -n monitoring-cron -n Dockerfile {{ project_name }} 88 | 89 | cd {{ project_name }} 90 | ``` 91 | 92 | 2. Create the .env file 93 | 94 | An `.env` file is requird to run the application. It can be created from the `.env.sample` either manually or with the create-envfile.py script. 95 | 96 | The script accepts several parameters to create the file, in detail: 97 | 98 | - *hostname*: e.g. master.demo.geonode.org, default localhost 99 | - *https*: (boolean), default value is False 100 | - *email*: Admin email (this is required if https is set to True since a valid email is required by Letsencrypt certbot) 101 | - *env_type*: `prod`, `test` or `dev`. It will set the `DEBUG` variable to `False` (`prod`, `test`) or `True` (`dev`) 102 | - *geonodepwd*: GeoNode admin password (required inside the .env) 103 | - *geoserverpwd*: Geoserver admin password (required inside the .env) 104 | - *pgpwd*: PostgreSQL password (required inside the .env) 105 | - *dbpwd*: GeoNode DB user password (required inside the .env) 106 | - *geodbpwd*: Geodatabase user password (required inside the .env) 107 | - *clientid*: Oauth2 client id (required inside the .env) 108 | - *clientsecret*: Oauth2 client secret (required inside the .env) 109 | - *secret key*: Django secret key (required inside the .env) 110 | - *sample_file*: absolute path to a env_sample file used to create the env_file. If not provided, the one inside the GeoNode project is used. 111 | - *file*: absolute path to a json file that contains all the above configuration 112 | 113 | **NOTE:** 114 | - if the same configuration is passed in the json file and as an argument, the CLI one will overwrite the one in the JSON file 115 | - If some value is not provided, a random string is used 116 | 117 | Example USAGE 118 | 119 | ```bash 120 | python create-envfile.py -f /opt/core/geonode-project/file.json \ 121 | --hostname localhost \ 122 | --https \ 123 | --email random@email.com \ 124 | --geonodepwd gn_password \ 125 | --geoserverpwd gs_password \ 126 | --pgpwd pg_password \ 127 | --dbpwd db_password \ 128 | --geodbpwd _db_password \ 129 | --clientid 12345 \ 130 | --clientsecret abc123 131 | ``` 132 | 133 | Example JSON expected: 134 | 135 | ```JSON 136 | { 137 | "hostname": "value", 138 | "https": "value", 139 | "email": "value", 140 | "geonodepwd": "value", 141 | "geoserverpwd": "value", 142 | "pgpwd": "value", 143 | "dbpwd": "value", 144 | "geodbpwd": "value", 145 | "clientid": "value", 146 | "clientsecret": "value" 147 | } 148 | ``` 149 | 150 | ### Start your server 151 | *Skip this part if you want to run the project using Docker instead* see [Start your server using Docker](#start-your-server-using-docker) 152 | 153 | 1. Setup the Python Dependencies 154 | 155 | **NOTE**: *Important: modify your `requirements.txt` file, by adding the `GeoNode` branch before continue!* 156 | 157 | (see [Hints: Configuring `requirements.txt`](#hints-configuring-requirementstxt)) 158 | 159 | ```bash 160 | cd src 161 | pip install -r requirements.txt --upgrade 162 | pip install -e . --upgrade 163 | 164 | # Install GDAL Utilities for Python 165 | pip install pygdal=="`gdal-config --version`.*" 166 | 167 | # Dev scripts 168 | mv ../.override_dev_env.sample ../.override_dev_env 169 | mv manage_dev.sh.sample manage_dev.sh 170 | mv paver_dev.sh.sample paver_dev.sh 171 | 172 | source ../.override_dev_env 173 | 174 | # Using the Default Settings 175 | sh ./paver_dev.sh reset 176 | sh ./paver_dev.sh setup 177 | sh ./paver_dev.sh sync 178 | sh ./paver_dev.sh start 179 | ``` 180 | 181 | 2. Access GeoNode from browser 182 | 183 | **NOTE**: default admin user is ``admin`` (with pw: ``admin``) 184 | 185 | ```bash 186 | http://localhost:8000/ 187 | ``` 188 | 189 | ### Start your server using Docker 190 | 191 | You need Docker 1.12 or higher, get the latest stable official release for your platform. 192 | Once you have the project configured run the following command from the root folder of the project. 193 | 194 | 1. Run `docker-compose` to start it up (get a cup of coffee or tea while you wait) 195 | 196 | ```bash 197 | docker-compose build --no-cache 198 | docker-compose up -d 199 | ``` 200 | 201 | ```bash 202 | set COMPOSE_CONVERT_WINDOWS_PATHS=1 203 | ``` 204 | 205 | before running `docker-compose up` 206 | 207 | 2. Access the site on http://localhost/ 208 | 209 | ## Run the instance in development mode 210 | 211 | ### Use dedicated docker-compose files while developing 212 | 213 | **NOTE**: In this example we are going to keep localhost as the target IP for GeoNode 214 | 215 | ```bash 216 | docker-compose -f docker-compose.development.yml -f docker-compose.development.override.yml up 217 | ``` 218 | 219 | ## Run the instance on a public site 220 | 221 | ### Preparation of the image (First time only) 222 | 223 | **NOTE**: In this example we are going to publish to the public IP http://123.456.789.111 224 | 225 | ```bash 226 | vim .env 227 | --> replace localhost with 123.456.789.111 everywhere 228 | ``` 229 | 230 | ### Startup the image 231 | 232 | ```bash 233 | docker-compose up --build -d 234 | ``` 235 | 236 | ### Stop the Docker Images 237 | 238 | ```bash 239 | docker-compose stop 240 | ``` 241 | 242 | ### Fully Wipe-out the Docker Images 243 | 244 | **WARNING**: This will wipe out all the repositories created until now. 245 | 246 | **NOTE**: The images must be stopped first 247 | 248 | ```bash 249 | docker system prune -a 250 | ``` 251 | 252 | ## Backup and Restore from Docker Images 253 | 254 | ### Run a Backup 255 | 256 | ```bash 257 | SOURCE_URL=$SOURCE_URL TARGET_URL=$TARGET_URL ./{{project_name}}/br/backup.sh $BKP_FOLDER_NAME 258 | ``` 259 | 260 | - BKP_FOLDER_NAME: 261 | Default value = backup_restore 262 | Shared Backup Folder name. 263 | The scripts assume it is located on "root" e.g.: /$BKP_FOLDER_NAME/ 264 | 265 | - SOURCE_URL: 266 | Source Server URL, the one generating the "backup" file. 267 | 268 | - TARGET_URL: 269 | Target Server URL, the one which must be synched. 270 | 271 | e.g.: 272 | 273 | ```bash 274 | docker exec -it django4{{project_name}} sh -c 'SOURCE_URL=$SOURCE_URL TARGET_URL=$TARGET_URL ./{{project_name}}/br/backup.sh $BKP_FOLDER_NAME' 275 | ``` 276 | 277 | ### Run a Restore 278 | 279 | ```bash 280 | SOURCE_URL=$SOURCE_URL TARGET_URL=$TARGET_URL ./{{project_name}}/br/restore.sh $BKP_FOLDER_NAME 281 | ``` 282 | 283 | - BKP_FOLDER_NAME: 284 | Default value = backup_restore 285 | Shared Backup Folder name. 286 | The scripts assume it is located on "root" e.g.: /$BKP_FOLDER_NAME/ 287 | 288 | - SOURCE_URL: 289 | Source Server URL, the one generating the "backup" file. 290 | 291 | - TARGET_URL: 292 | Target Server URL, the one which must be synched. 293 | 294 | e.g.: 295 | 296 | ```bash 297 | docker exec -it django4{{project_name}} sh -c 'SOURCE_URL=$SOURCE_URL TARGET_URL=$TARGET_URL ./{{project_name}}/br/restore.sh $BKP_FOLDER_NAME' 298 | ``` 299 | 300 | ## Recommended: Track your changes 301 | 302 | Step 1. Install Git (for Linux, Mac or Windows). 303 | 304 | Step 2. Init git locally and do the first commit: 305 | 306 | ```bash 307 | git init 308 | git add * 309 | git commit -m "Initial Commit" 310 | ``` 311 | 312 | Step 3. Set up a free account on github or bitbucket and make a copy of the repo there. 313 | 314 | ## Hints: Configuring `requirements.txt` 315 | 316 | You may want to configure your requirements.txt, if you are using additional or custom versions of python packages. For example 317 | 318 | ```python 319 | Django==3.2.16 320 | git+git://github.com//geonode.git@ 321 | ``` 322 | 323 | ## Increasing PostgreSQL Max connections 324 | 325 | In case you need to increase the PostgreSQL Max Connections , you can modify 326 | the **POSTGRESQL_MAX_CONNECTIONS** variable in **.env** file as below: 327 | 328 | ``` 329 | POSTGRESQL_MAX_CONNECTIONS=200 330 | ``` 331 | 332 | In this case PostgreSQL will run accepting 200 maximum connections. 333 | 334 | ## Test project generation and docker-compose build Vagrant usage 335 | 336 | Testing with [vagrant](https://www.vagrantup.com/docs) works like this: 337 | What vagrant does: 338 | 339 | Starts a vm for test on docker swarm: 340 | - configures a GeoNode project from template every time from your working directory (so you can develop directly on geonode-project). 341 | - exposes service on localhost port 8888 342 | - rebuilds everytime everything with cache [1] to avoid banning from docker hub with no login. 343 | - starts, reboots to check if docker services come up correctly after reboot. 344 | 345 | ```bash 346 | vagrant plugin install vagrant-reload 347 | #test things for docker-compose 348 | vagrant up 349 | # check services are up upon reboot 350 | vagrant ssh geonode-compose -c 'docker ps' 351 | ``` 352 | 353 | Test geonode on [http://localhost:8888/](http://localhost:8888/) 354 | 355 | To clean up things and delete the vagrant box: 356 | 357 | ```bash 358 | vagrant destroy -f 359 | ``` 360 | 361 | ## Test project generation and Docker swarm build on vagrant 362 | 363 | What vagrant does: 364 | 365 | Starts a vm for test on docker swarm: 366 | - configures a GeoNode project from template every time from your working directory (so you can develop directly on geonode-project). 367 | - exposes service on localhost port 8888 368 | - rebuilds everytime everything with cache [1] to avoid banning from docker hub with no login. 369 | - starts, reboots to check if docker services come up correctly after reboot. 370 | 371 | To test on a docker swarm enable vagrant box: 372 | 373 | ```bash 374 | vagrant up 375 | VAGRANT_VAGRANTFILE=Vagrantfile.stack vagrant up 376 | # check services are up upon reboot 377 | VAGRANT_VAGRANTFILE=Vagrantfile.stack vagrant ssh geonode-compose -c 'docker service ls' 378 | ``` 379 | 380 | Test geonode on [http://localhost:8888/](http://localhost:8888/) 381 | Again, to clean up things and delete the vagrant box: 382 | 383 | ```bash 384 | VAGRANT_VAGRANTFILE=Vagrantfile.stack vagrant destroy -f 385 | ``` 386 | 387 | for direct deveolpment on geonode-project after first `vagrant up` to rebuild after changes to project, you can do `vagrant reload` like this: 388 | 389 | ```bash 390 | vagrant up 391 | ``` 392 | 393 | What vagrant does (swarm or comnpose cases): 394 | 395 | Starts a vm for test on plain docker service with docker-compose: 396 | - configures a GeoNode project from template every time from your working directory (so you can develop directly on geonode-project). 397 | - rebuilds everytime everything with cache [1] to avoid banning from docker hub with no login. 398 | - starts, reboots. 399 | 400 | [1] to achieve `docker-compose build --no-cache` just destroy vagrant boxes `vagrant destroy -f` 401 | 402 | -------------------------------------------------------------------------------- /create-envfile.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ######################################################################### 3 | # 4 | # Copyright (C) 2022 OSGeo 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | # 19 | ######################################################################### 20 | import argparse 21 | import json 22 | import logging 23 | import os 24 | import random 25 | import re 26 | import string 27 | import sys 28 | import ast 29 | 30 | dir_path = os.path.dirname(os.path.realpath(__file__)) 31 | 32 | logger = logging.getLogger() 33 | handler = logging.StreamHandler(sys.stdout) 34 | logger.setLevel(logging.INFO) 35 | formatter = logging.Formatter("%(levelname)s - %(message)s") 36 | handler.setFormatter(formatter) 37 | logger.addHandler(handler) 38 | 39 | 40 | def shuffle(chars): 41 | chars_as_list = list(chars) 42 | random.shuffle(chars_as_list) 43 | return "".join(chars_as_list) 44 | 45 | _simple_chars = shuffle(string.ascii_letters + string.digits) 46 | _strong_chars = shuffle(string.ascii_letters + string.digits + "#%*._~") 47 | 48 | 49 | def generate_env_file(args): 50 | # validity checks 51 | if not os.path.exists(args.sample_file): 52 | logger.error(f"File does not exists {args.sample_file}") 53 | raise FileNotFoundError 54 | 55 | if args.file and not os.path.isfile(args.file): 56 | logger.error(f"File does not exists: {args.file}") 57 | raise FileNotFoundError 58 | 59 | if args.https and not args.email: 60 | raise Exception("With HTTPS enabled, the email parameter is required") 61 | 62 | _sample_file = None 63 | with open(args.sample_file, "r+") as sample_file: 64 | _sample_file = sample_file.read() 65 | 66 | if not _sample_file: 67 | raise Exception("Sample file is empty!") 68 | 69 | def _get_vals_to_replace(args): 70 | _config = ["sample_file", "file", "env_type", "https", "email"] 71 | _jsfile = {} 72 | if args.file: 73 | with open(args.file) as _json_file: 74 | _jsfile = json.load(_json_file) 75 | 76 | _vals_to_replace = { 77 | key: _jsfile.get(key, val) 78 | for key, val in vars(args).items() 79 | if key not in _config 80 | } 81 | tcp = ( 82 | "https" 83 | if ast.literal_eval(f"{_jsfile.get('https', args.https)}".capitalize()) 84 | else "http" 85 | ) 86 | 87 | _vals_to_replace["http_host"] = ( 88 | _jsfile.get("hostname", args.hostname) if tcp == "http" else "" 89 | ) 90 | _vals_to_replace["https_host"] = ( 91 | _jsfile.get("hostname", args.hostname) if tcp == "https" else "" 92 | ) 93 | 94 | _vals_to_replace[ 95 | "siteurl" 96 | ] = f"{tcp}://{_jsfile.get('hostname', args.hostname)}" 97 | _vals_to_replace["secret_key"] = _jsfile.get( 98 | "secret_key", args.secret_key 99 | ) or "".join(random.choice(_strong_chars) for _ in range(50)) 100 | _vals_to_replace["letsencrypt_mode"] = ( 101 | "disabled" 102 | if not _vals_to_replace.get("https_host") 103 | else "staging" 104 | if _jsfile.get("env_type", args.env_type) in ["test"] 105 | else "production" 106 | ) 107 | _vals_to_replace["debug"] = ( 108 | False 109 | if _jsfile.get("env_type", args.env_type) in ["prod", "test"] 110 | else True 111 | ) 112 | _vals_to_replace["email"] = _jsfile.get("email", args.email) 113 | 114 | if tcp == "https" and not _vals_to_replace["email"]: 115 | raise Exception("With HTTPS enabled, the email parameter is required") 116 | 117 | return {**_jsfile, **_vals_to_replace} 118 | 119 | for key, val in _get_vals_to_replace(args).items(): 120 | _val = val or "".join(random.choice(_simple_chars) for _ in range(15)) 121 | if isinstance(val, bool) or key in ["email", "http_host", "https_host"]: 122 | _val = str(val) 123 | _sample_file = re.sub( 124 | "{" + key + "}", 125 | lambda _: _val, 126 | _sample_file, 127 | ) 128 | 129 | with open(f"{dir_path}/.env", "w+") as output_env: 130 | output_env.write(_sample_file) 131 | logger.info(f".env file created: {dir_path}/.env") 132 | 133 | 134 | if __name__ == "__main__": 135 | parser = argparse.ArgumentParser( 136 | prog="ENV file builder", 137 | description="Tool for generate environment file automatically. The information can be passed or via CLI or via JSON file ( --file /path/env.json)", 138 | usage="python create-envfile.py localhost -f /path/to/json/file.json", 139 | allow_abbrev=False 140 | ) 141 | parser.add_argument( 142 | "--noinput", 143 | "--no-input", 144 | action="store_false", 145 | dest="confirmation", 146 | help=("skips prompting for confirmation."), 147 | ) 148 | parser.add_argument( 149 | "-hn", 150 | "--hostname", 151 | help=f"Host name, default localhost", 152 | default="localhost", 153 | ) 154 | 155 | # expected path as a value 156 | parser.add_argument( 157 | "-sf", 158 | "--sample_file", 159 | help=f"Path of the sample file to use as a template. Default is: {dir_path}/.env.sample", 160 | default=f"{dir_path}/.env.sample", 161 | ) 162 | parser.add_argument( 163 | "-f", 164 | "--file", 165 | help="absolute path of the file with the configuration. Note: we expect that the keys of the dictionary have the same name as the CLI params", 166 | ) 167 | # booleans 168 | parser.add_argument( 169 | "--https", action="store_true", default=False, help="If provided, https is used" 170 | ) 171 | # strings 172 | parser.add_argument( 173 | "--email", help="Admin email, this field is required if https is enabled" 174 | ) 175 | 176 | parser.add_argument("--geonodepwd", help="GeoNode admin password") 177 | parser.add_argument("--geoserverpwd", help="Geoserver admin password") 178 | parser.add_argument("--pgpwd", help="PostgreSQL password") 179 | parser.add_argument("--dbpwd", help="GeoNode DB user password") 180 | parser.add_argument("--geodbpwd", help="Geodatabase user password") 181 | parser.add_argument("--clientid", help="Oauth2 client id") 182 | parser.add_argument("--clientsecret", help="Oauth2 client secret") 183 | parser.add_argument("--secret_key", help="Django Secret Key") 184 | 185 | parser.add_argument( 186 | "--env_type", 187 | help="Development/production or test", 188 | choices=["prod", "test", "dev"], 189 | default="prod", 190 | ) 191 | 192 | args = parser.parse_args() 193 | 194 | if not args.confirmation: 195 | generate_env_file(args) 196 | else: 197 | overwrite_env = input( 198 | "This action will overwrite any existing .env file. Do you wish to continue? (y/n)" 199 | ) 200 | if overwrite_env not in ["y", "n"]: 201 | logger.error("Please enter a valid response") 202 | if overwrite_env == "y": 203 | generate_env_file(args) 204 | -------------------------------------------------------------------------------- /dev_config.yml: -------------------------------------------------------------------------------- 1 | --- 2 | GEOSERVER_URL: "https://artifacts.geonode.org/geoserver/2.24.3/geoserver.war" 3 | DATA_DIR_URL: "https://artifacts.geonode.org/geoserver/2.24.3/geonode-geoserver-ext-web-app-data.zip" 4 | JETTY_RUNNER_URL: "https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-runner/9.4.31.v20200723/jetty-runner-9.4.31.v20200723.jar" 5 | WINDOWS: 6 | py2exe: "http://downloads.sourceforge.net/project/py2exe/py2exe/0.6.9/py2exe-0.6.9.win32-py2.7.exe" 7 | pyproj: "https://pyproj.googlecode.com/files/pyproj-1.9.3.win32-py2.7.exe" 8 | lxml: "https://pypi.python.org/packages/2.7/l/lxml/lxml-3.6.0.win32-py2.7.exe" 9 | -------------------------------------------------------------------------------- /docker-build.sh: -------------------------------------------------------------------------------- 1 | docker-compose build --no-cache; docker-compose stop; docker-compose up -d; docker system prune -a -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.9' 2 | 3 | # Common Django template for GeoNode and Celery services below 4 | x-common-django: 5 | &default-common-django 6 | image: ${COMPOSE_PROJECT_NAME}/geonode:${GEONODE_BASE_IMAGE_VERSION} 7 | restart: unless-stopped 8 | env_file: 9 | - .env 10 | volumes: 11 | # - './src:/usr/src/{{project_name}}' 12 | - statics:/mnt/volumes/statics 13 | - geoserver-data-dir:/geoserver_data/data 14 | - backup-restore:/backup_restore 15 | - data:/data 16 | - tmp:/tmp 17 | depends_on: 18 | db: 19 | condition: service_healthy 20 | 21 | services: 22 | 23 | # Our custom django application. It includes Geonode. 24 | django: 25 | << : *default-common-django 26 | build: 27 | context: ./ 28 | dockerfile: Dockerfile 29 | container_name: django4${COMPOSE_PROJECT_NAME} 30 | healthcheck: 31 | test: "curl -m 10 --fail --silent --write-out 'HTTP CODE : %{http_code}\n' --output /dev/null http://django:8000/" 32 | start_period: 60s 33 | interval: 60s 34 | timeout: 10s 35 | retries: 2 36 | environment: 37 | - IS_CELERY=False 38 | entrypoint: ["/usr/src/{{project_name}}/entrypoint.sh"] 39 | command: "uwsgi --ini /usr/src/{{project_name}}/uwsgi.ini" 40 | 41 | # Celery worker that executes celery tasks created by Django. 42 | celery: 43 | << : *default-common-django 44 | container_name: celery4${COMPOSE_PROJECT_NAME} 45 | depends_on: 46 | django: 47 | condition: service_healthy 48 | environment: 49 | - IS_CELERY=True 50 | entrypoint: ["/usr/src/{{project_name}}/entrypoint.sh"] 51 | command: "celery-cmd" 52 | 53 | # Nginx is serving django static and media files and proxies to django and geonode 54 | geonode: 55 | image: ${COMPOSE_PROJECT_NAME}/nginx:${NGINX_BASE_IMAGE_VERSION} 56 | build: 57 | context: ./docker/nginx 58 | dockerfile: Dockerfile 59 | args: 60 | - BASE_IMAGE_VERSION=${NGINX_BASE_IMAGE_VERSION} 61 | container_name: nginx4${COMPOSE_PROJECT_NAME} 62 | env_file: 63 | - .env 64 | environment: 65 | - RESOLVER=127.0.0.11 66 | ports: 67 | - "${HTTP_PORT}:80" 68 | - "${HTTPS_PORT}:443" 69 | volumes: 70 | - nginx-confd:/etc/nginx 71 | - nginx-certificates:/geonode-certificates 72 | - statics:/mnt/volumes/statics 73 | restart: unless-stopped 74 | 75 | # memcached service 76 | memcached: 77 | image: memcached:alpine 78 | container_name: memcached4${COMPOSE_PROJECT_NAME} 79 | command: memcached ${MEMCACHED_OPTIONS} 80 | restart: on-failure 81 | healthcheck: 82 | test: nc -z 127.0.0.1 11211 83 | interval: 30s 84 | timeout: 30s 85 | retries: 5 86 | start_period: 30s 87 | 88 | # Gets and installs letsencrypt certificates 89 | letsencrypt: 90 | image: ${COMPOSE_PROJECT_NAME}/letsencrypt:${LETSENCRYPT_BASE_IMAGE_VERSION} 91 | build: 92 | context: ./docker/letsencrypt 93 | dockerfile: Dockerfile 94 | args: 95 | - BASE_IMAGE_VERSION=${LETSENCRYPT_BASE_IMAGE_VERSION} 96 | container_name: letsencrypt4${COMPOSE_PROJECT_NAME} 97 | env_file: 98 | - .env 99 | volumes: 100 | - nginx-certificates:/geonode-certificates 101 | restart: unless-stopped 102 | 103 | # Geoserver backend 104 | geoserver: 105 | image: ${COMPOSE_PROJECT_NAME}/geoserver:${GEOSERVER_BASE_IMAGE_VERSION} 106 | build: 107 | context: ./docker/geoserver 108 | dockerfile: Dockerfile 109 | args: 110 | - BASE_IMAGE_VERSION=${GEOSERVER_BASE_IMAGE_VERSION} 111 | container_name: geoserver4${COMPOSE_PROJECT_NAME} 112 | healthcheck: 113 | test: "curl -m 10 --fail --silent --write-out 'HTTP CODE : %{http_code}\n' --output /dev/null http://geoserver:8080/geoserver/ows" 114 | start_period: 60s 115 | interval: 60s 116 | timeout: 10s 117 | retries: 2 118 | env_file: 119 | - .env 120 | ports: 121 | - "8080:8080" 122 | volumes: 123 | - statics:/mnt/volumes/statics 124 | - geoserver-data-dir:/geoserver_data/data 125 | - backup-restore:/backup_restore 126 | - data:/data 127 | - tmp:/tmp 128 | restart: unless-stopped 129 | depends_on: 130 | data-dir-conf: 131 | condition: service_healthy 132 | django: 133 | condition: service_healthy 134 | 135 | data-dir-conf: 136 | image: ${COMPOSE_PROJECT_NAME}/geoserver_data:${GEOSERVER_DATA_BASE_IMAGE_VERSION} 137 | build: 138 | context: ./docker/geoserver_data 139 | dockerfile: Dockerfile 140 | args: 141 | - BASE_IMAGE_VERSION=${GEOSERVER_DATA_BASE_IMAGE_VERSION} 142 | container_name: gsconf4${COMPOSE_PROJECT_NAME} 143 | entrypoint: sleep infinity 144 | volumes: 145 | - geoserver-data-dir:/geoserver_data/data 146 | restart: unless-stopped 147 | healthcheck: 148 | test: "ls -A '/geoserver_data/data' | wc -l" 149 | 150 | # PostGIS database. 151 | db: 152 | image: ${COMPOSE_PROJECT_NAME}/postgis:${POSTGRES_BASE_IMAGE_VERSION} 153 | build: 154 | context: ./docker/postgresql 155 | dockerfile: Dockerfile 156 | args: 157 | - BASE_IMAGE_VERSION=${POSTGRES_BASE_IMAGE_VERSION} 158 | command: postgres -c "max_connections=${POSTGRESQL_MAX_CONNECTIONS}" 159 | container_name: db4${COMPOSE_PROJECT_NAME} 160 | env_file: 161 | - .env 162 | volumes: 163 | - dbdata:/var/lib/postgresql/data 164 | - dbbackups:/pg_backups 165 | restart: unless-stopped 166 | healthcheck: 167 | test: "pg_isready -d postgres -U postgres" 168 | # uncomment to enable remote connections to postgres 169 | #ports: 170 | # - "5432:5432" 171 | 172 | # Vanilla RabbitMQ service. This is needed by celery 173 | rabbitmq: 174 | image: rabbitmq:3-alpine 175 | container_name: rabbitmq4${COMPOSE_PROJECT_NAME} 176 | volumes: 177 | - rabbitmq:/var/lib/rabbitmq 178 | restart: unless-stopped 179 | 180 | volumes: 181 | statics: 182 | name: ${COMPOSE_PROJECT_NAME}-statics 183 | nginx-confd: 184 | name: ${COMPOSE_PROJECT_NAME}-nginxconfd 185 | nginx-certificates: 186 | name: ${COMPOSE_PROJECT_NAME}-nginxcerts 187 | geoserver-data-dir: 188 | name: ${COMPOSE_PROJECT_NAME}-gsdatadir 189 | dbdata: 190 | name: ${COMPOSE_PROJECT_NAME}-dbdata 191 | dbbackups: 192 | name: ${COMPOSE_PROJECT_NAME}-dbbackups 193 | backup-restore: 194 | name: ${COMPOSE_PROJECT_NAME}-backup-restore 195 | data: 196 | name: ${COMPOSE_PROJECT_NAME}-data 197 | tmp: 198 | name: ${COMPOSE_PROJECT_NAME}-tmp 199 | rabbitmq: 200 | name: ${COMPOSE_PROJECT_NAME}-rabbitmq 201 | -------------------------------------------------------------------------------- /docker-purge.sh: -------------------------------------------------------------------------------- 1 | docker kill $(docker ps -q); docker rm $(docker ps -a -q); docker rmi $(docker images -q); docker volume ls -qf dangling=true | xargs -r docker volume rm; docker system prune -a 2 | docker volume prune 3 | -------------------------------------------------------------------------------- /docker/geoserver/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG BASE_IMAGE_VERSION 2 | FROM geonode/geoserver:${BASE_IMAGE_VERSION} -------------------------------------------------------------------------------- /docker/geoserver_data/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG BASE_IMAGE_VERSION 2 | FROM geonode/geoserver_data:${BASE_IMAGE_VERSION} -------------------------------------------------------------------------------- /docker/letsencrypt/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG BASE_IMAGE_VERSION 2 | FROM geonode/letsencrypt:${BASE_IMAGE_VERSION} -------------------------------------------------------------------------------- /docker/nginx/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG BASE_IMAGE_VERSION 2 | FROM geonode/nginx:${BASE_IMAGE_VERSION} -------------------------------------------------------------------------------- /docker/postgresql/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG BASE_IMAGE_VERSION 2 | FROM geonode/postgis:${BASE_IMAGE_VERSION} -------------------------------------------------------------------------------- /src/Makefile: -------------------------------------------------------------------------------- 1 | up: 2 | # bring up the services 3 | docker-compose up -d 4 | 5 | build: 6 | docker-compose build django 7 | docker-compose build celery 8 | 9 | sync: 10 | # set up the database tablea 11 | docker-compose run django python manage.py makemigrations --noinput 12 | docker-compose exec django python manage.py migrate account --noinput 13 | docker-compose run django python manage.py migrate --noinput 14 | 15 | wait: 16 | sleep 5 17 | 18 | logs: 19 | docker-compose logs --follow 20 | 21 | down: 22 | docker-compose down 23 | 24 | test: 25 | docker-compose run django python manage.py test --failfast 26 | 27 | reset: down up wait sync 28 | 29 | hardreset: pull build reset 30 | -------------------------------------------------------------------------------- /src/README.md: -------------------------------------------------------------------------------- 1 | # {{ project_name|title }} 2 | -------------------------------------------------------------------------------- /src/celery-cmd: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # A configurable celery command. 3 | # Luca Pasquali 4 | CELERY_BIN=${CELERY_BIN:-"$(which celery||echo celery)"} 5 | CELERY_APP=${CELERY_APP:-"geonode.celery_app:app"} 6 | CELERY__STATE_DB=${CELERY__STATE_DB:-"/mnt/volumes/statics/worker@%h.state"} 7 | # expressed in KB 8 | CELERY__MAX_MEMORY_PER_CHILD=${CELERY__MAX_MEMORY_PER_CHILD:-"200000"} 9 | CELERY__AUTOSCALE_VALUES=${CELERY__AUTOSCALE_VALUES:-"15,10"} 10 | CELERY__MAX_TASKS_PER_CHILD=${CELERY__MAX_TASKS_PER_CHILD:-"10"} 11 | CELERY__OPTS=${CELERY__OPTS:-"--without-gossip --without-mingle -Ofair -B -E"} 12 | CELERY__BEAT_SCHEDULE=${CELERY__BEAT_SCHEDULE:-"celery.beat:PersistentScheduler"} 13 | CELERY__LOG_LEVEL=${CELERY__LOG_LEVEL:-"INFO"} 14 | CELERY__LOG_FILE=${CELERY__LOG_FILE:-"/var/log/celery.log"} 15 | CELERY__WORKER_NAME=${CELERY__WORKER_NAME:-"worker1@%h"} 16 | CELERY__WORKER_CONCURRENCY=${CELERY__WORKER_CONCURRENCY:-"4"} 17 | 18 | $CELERY_BIN -A $CELERY_APP worker --autoscale=$CELERY__AUTOSCALE_VALUES \ 19 | --max-memory-per-child=$CELERY__MAX_MEMORY_PER_CHILD $CELERY__OPTS \ 20 | --statedb=$CELERY__STATE_DB --scheduler=$CELERY__BEAT_SCHEDULE \ 21 | --loglevel=$CELERY__LOG_LEVEL -n $CELERY__WORKER_NAME -f $CELERY__LOG_FILE \ 22 | --concurrency=$CELERY__WORKER_CONCURRENCY --max-tasks-per-child=$CELERY__MAX_TASKS_PER_CHILD 23 | -------------------------------------------------------------------------------- /src/celery.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | nohup celery -A geonode.celery_app:app beat -l DEBUG -f /var/log/celery.log &>/dev/null & 3 | nohup celery -A geonode.celery_app:app worker --without-gossip --without-mingle -Ofair -B -E --statedb=worker.state --scheduler=celery.beat:PersistentScheduler --loglevel=INFO --concurrency=2 -n worker1@%h -f /var/log/celery.log &>/dev/null 4 | -------------------------------------------------------------------------------- /src/dev_config.yml: -------------------------------------------------------------------------------- 1 | --- 2 | GEOSERVER_URL: "https://artifacts.geonode.org/geoserver/2.24.3/geoserver.war" 3 | DATA_DIR_URL: "https://artifacts.geonode.org/geoserver/2.24.3/geonode-geoserver-ext-web-app-data.zip" 4 | JETTY_RUNNER_URL: "https://repo1.maven.org/maven2/org/eclipse/jetty/jetty-runner/9.4.31.v20200723/jetty-runner-9.4.31.v20200723.jar" 5 | WINDOWS: 6 | py2exe: "http://downloads.sourceforge.net/project/py2exe/py2exe/0.6.9/py2exe-0.6.9.win32-py2.7.exe" 7 | pyproj: "https://pyproj.googlecode.com/files/pyproj-1.9.3.win32-py2.7.exe" 8 | lxml: "https://pypi.python.org/packages/2.7/l/lxml/lxml-3.6.0.win32-py2.7.exe" 9 | -------------------------------------------------------------------------------- /src/entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Exit script in case of error 4 | set -e 5 | 6 | INVOKE_LOG_STDOUT=${INVOKE_LOG_STDOUT:-FALSE} 7 | invoke () { 8 | if [ $INVOKE_LOG_STDOUT = 'true' ] || [ $INVOKE_LOG_STDOUT = 'True' ] 9 | then 10 | /usr/local/bin/invoke $@ 11 | else 12 | /usr/local/bin/invoke $@ > /usr/src/{{project_name}}/invoke.log 2>&1 13 | fi 14 | echo "$@ tasks done" 15 | } 16 | 17 | # Start cron service 18 | service cron restart 19 | 20 | echo $"\n\n\n" 21 | echo "-----------------------------------------------------" 22 | echo "STARTING DJANGO ENTRYPOINT $(date)" 23 | echo "-----------------------------------------------------" 24 | 25 | invoke update 26 | 27 | source $HOME/.bashrc 28 | source $HOME/.override_env 29 | 30 | echo DOCKER_API_VERSION=$DOCKER_API_VERSION 31 | echo POSTGRES_USER=$POSTGRES_USER 32 | echo POSTGRES_PASSWORD=$POSTGRES_PASSWORD 33 | echo DATABASE_URL=$DATABASE_URL 34 | echo GEODATABASE_URL=$GEODATABASE_URL 35 | echo SITEURL=$SITEURL 36 | echo ALLOWED_HOSTS=$ALLOWED_HOSTS 37 | echo GEOSERVER_PUBLIC_LOCATION=$GEOSERVER_PUBLIC_LOCATION 38 | echo MONITORING_ENABLED=$MONITORING_ENABLED 39 | echo MONITORING_HOST_NAME=$MONITORING_HOST_NAME 40 | echo MONITORING_SERVICE_NAME=$MONITORING_SERVICE_NAME 41 | echo MONITORING_DATA_TTL=$MONITORING_DATA_TTL 42 | 43 | # invoke waitfordbs 44 | 45 | cmd="$@" 46 | 47 | if [ ${IS_CELERY} = "true" ] || [ ${IS_CELERY} = "True" ] 48 | then 49 | echo "Executing Celery server $cmd for Production" 50 | else 51 | 52 | invoke migrations 53 | invoke prepare 54 | 55 | if [ ${FORCE_REINIT} = "true" ] || [ ${FORCE_REINIT} = "True" ] || [ ! -e "/mnt/volumes/statics/geonode_init.lock" ]; then 56 | invoke fixtures 57 | invoke monitoringfixture 58 | invoke initialized 59 | invoke updateadmin 60 | fi 61 | 62 | invoke statics 63 | 64 | echo "Executing UWSGI server $cmd for Production" 65 | fi 66 | 67 | echo "-----------------------------------------------------" 68 | echo "FINISHED DJANGO ENTRYPOINT --------------------------" 69 | echo "-----------------------------------------------------" 70 | 71 | # Run the CMD 72 | echo "got command $cmd" 73 | exec $cmd 74 | -------------------------------------------------------------------------------- /src/fixtures/apikey_docker.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "fields": { 4 | "user": 1000, 5 | "key": "pyxW5djJ7XsjeFUXduAsGpR4xMGUwpeBGQRqTeT3", 6 | "created": "2018-06-28T14:54:51Z" 7 | }, 8 | "model": "tastypie.apikey", 9 | "pk": 1 10 | } 11 | ] -------------------------------------------------------------------------------- /src/fixtures/default_oauth_apps.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "model": "oauth2_provider.application", 4 | "pk": 1001, 5 | "fields": { 6 | "skip_authorization": true, 7 | "created": "2018-05-31T10:00:31.661Z", 8 | "updated": "2018-05-31T11:30:31.245Z", 9 | "algorithm": "RS256", 10 | "redirect_uris": "http://localhost:8080/geoserver/index.html\nhttp://localhost/geoserver/index.html", 11 | "name": "GeoServer", 12 | "authorization_grant_type": "authorization-code", 13 | "client_type": "confidential", 14 | "client_id": "Jrchz2oPY3akmzndmgUTYrs9gczlgoV20YPSvqaV", 15 | "client_secret": "rCnp5txobUo83EpQEblM8fVj3QT5zb5qRfxNsuPzCqZaiRyIoxM4jdgMiZKFfePBHYXCLd7B8NlkfDBY9HKeIQPcy5Cp08KQNpRHQbjpLItDHv12GvkSeXp6OxaUETv3", 16 | "user": [ 17 | "admin" 18 | ] 19 | } 20 | } 21 | ] 22 | -------------------------------------------------------------------------------- /src/fixtures/default_oauth_apps_docker.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "model": "oauth2_provider.application", 4 | "pk": 1001, 5 | "fields": { 6 | "skip_authorization": true, 7 | "created": "2018-05-31T10:00:31.661Z", 8 | "updated": "2018-05-31T11:30:31.245Z", 9 | "algorithm": "RS256", 10 | "redirect_uris": "http://geonode/geoserver", 11 | "name": "GeoServer", 12 | "authorization_grant_type": "authorization-code", 13 | "client_type": "confidential", 14 | "client_id": "Jrchz2oPY3akmzndmgUTYrs9gczlgoV20YPSvqaV", 15 | "client_secret": "rCnp5txobUo83EpQEblM8fVj3QT5zb5qRfxNsuPzCqZaiRyIoxM4jdgMiZKFfePBHYXCLd7B8NlkfDBY9HKeIQPcy5Cp08KQNpRHQbjpLItDHv12GvkSeXp6OxaUETv3", 16 | "user": [ 17 | "admin" 18 | ] 19 | } 20 | } 21 | ] 22 | -------------------------------------------------------------------------------- /src/fixtures/django_celery_beat.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "model": "django_celery_beat.intervalschedule", 4 | "pk": 1, 5 | "fields": { 6 | "every": 60, 7 | "period": "seconds" 8 | } 9 | }, 10 | { 11 | "model": "django_celery_beat.crontabschedule", 12 | "pk": 1, 13 | "fields": { 14 | "minute": "0", 15 | "hour": "4", 16 | "day_of_week": "*", 17 | "day_of_month": "*", 18 | "month_of_year": "*" 19 | } 20 | }, 21 | { 22 | "model": "django_celery_beat.periodictasks", 23 | "pk": 1, 24 | "fields": { 25 | "last_update": "2019-10-14T12:56:49.352Z" 26 | } 27 | }, 28 | { 29 | "model": "django_celery_beat.periodictask", 30 | "pk": 1, 31 | "fields": { 32 | "name": "celery.backend_cleanup", 33 | "task": "celery.backend_cleanup", 34 | "interval": null, 35 | "crontab": 1, 36 | "solar": null, 37 | "args": "[]", 38 | "kwargs": "{}", 39 | "queue": null, 40 | "exchange": null, 41 | "routing_key": null, 42 | "expires": null, 43 | "enabled": true, 44 | "last_run_at": null, 45 | "total_run_count": 0, 46 | "date_changed": "2019-10-14T12:50:54.847Z", 47 | "description": "" 48 | } 49 | }, 50 | { 51 | "model": "django_celery_beat.periodictask", 52 | "pk": 2, 53 | "fields": { 54 | "name": "delayed-security-sync-task", 55 | "task": "geonode.security.tasks.synch_guardian", 56 | "interval": 1, 57 | "crontab": null, 58 | "solar": null, 59 | "args": "[]", 60 | "kwargs": "{}", 61 | "queue": null, 62 | "exchange": null, 63 | "routing_key": null, 64 | "expires": null, 65 | "enabled": true, 66 | "last_run_at": null, 67 | "total_run_count": 0, 68 | "date_changed": "2019-10-14T12:56:37.554Z", 69 | "description": "" 70 | } 71 | } 72 | ] -------------------------------------------------------------------------------- /src/fixtures/sample_admin.json: -------------------------------------------------------------------------------- 1 | [{ 2 | "fields": { 3 | "date_joined": "2011-06-09 15:15:27", 4 | "email": "ad@m.in", 5 | "first_name": "", 6 | "groups": [], 7 | "is_active": true, 8 | "is_staff": true, 9 | "is_superuser": true, 10 | "last_login": "2011-06-09 15:45:34", 11 | "last_name": "", 12 | "password": "pbkdf2_sha256$30000$rjuGt0Obn8on$cxF75frIOSaitNklLZ0IJ/VonUW0fwEFVF96o0M+lGc=", 13 | "user_permissions": [], 14 | "username": "admin" 15 | }, 16 | "model": "people.Profile", 17 | "pk": 1000 18 | }] -------------------------------------------------------------------------------- /src/fixtures/sites_template.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "fields": { 4 | "domain": "example.com", 5 | "name": "example.com" 6 | }, 7 | "model": "sites.site", 8 | "pk": 1 9 | } 10 | ] 11 | -------------------------------------------------------------------------------- /src/manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # -*- coding: utf-8 -*- 4 | ######################################################################### 5 | # 6 | # Copyright (C) 2017 OSGeo 7 | # 8 | # This program is free software: you can redistribute it and/or modify 9 | # it under the terms of the GNU General Public License as published by 10 | # the Free Software Foundation, either version 3 of the License, or 11 | # (at your option) any later version. 12 | # 13 | # This program is distributed in the hope that it will be useful, 14 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 15 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 16 | # GNU General Public License for more details. 17 | # 18 | # You should have received a copy of the GNU General Public License 19 | # along with this program. If not, see . 20 | # 21 | ######################################################################### 22 | 23 | import os 24 | import sys 25 | 26 | 27 | if __name__ == "__main__": 28 | from django.core.management import execute_from_command_line 29 | 30 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings") 31 | execute_from_command_line(sys.argv) 32 | -------------------------------------------------------------------------------- /src/manage.sh: -------------------------------------------------------------------------------- 1 | . $HOME/.override_env 2 | /usr/bin/python /usr/src/{{project_name}}/manage.py $@ 3 | -------------------------------------------------------------------------------- /src/manage_dev.sh.sample: -------------------------------------------------------------------------------- 1 | set -a 2 | source ../.override_dev_env 3 | python manage.py $@ 4 | -------------------------------------------------------------------------------- /src/pavement.py: -------------------------------------------------------------------------------- 1 | ######################################################################### 2 | # 3 | # Copyright (C) 2018 OSGeo 4 | # 5 | # This program is free software: you can redistribute it and/or modify 6 | # it under the terms of the GNU General Public License as published by 7 | # the Free Software Foundation, either version 3 of the License, or 8 | # (at your option) any later version. 9 | # 10 | # This program is distributed in the hope that it will be useful, 11 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 12 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 13 | # GNU General Public License for more details. 14 | # 15 | # You should have received a copy of the GNU General Public License 16 | # along with this program. If not, see . 17 | # 18 | ######################################################################### 19 | 20 | import fileinput 21 | import glob 22 | import os 23 | import re 24 | import shutil 25 | import subprocess 26 | import signal 27 | import sys 28 | import time 29 | import pytz 30 | import logging 31 | import datetime 32 | from dateutil.parser import parse as parsedate 33 | 34 | from urllib.parse import urlparse 35 | from urllib.request import urlopen, Request 36 | 37 | import zipfile 38 | from tqdm import tqdm 39 | import requests 40 | import math 41 | import psutil 42 | 43 | import yaml 44 | from paver.easy import ( 45 | BuildFailure, 46 | call_task, 47 | cmdopts, 48 | info, 49 | needs, 50 | path, 51 | sh, 52 | task, 53 | ) 54 | from setuptools.command import easy_install 55 | 56 | try: 57 | from {{ project_name }}.local_settings import * 58 | except ImportError: 59 | from {{ project_name }}.settings import * 60 | 61 | try: 62 | from paver.path import pushd 63 | except ImportError: 64 | from paver.easy import pushd 65 | 66 | from geonode.settings import ( 67 | on_travis, 68 | core_tests, 69 | internal_apps_tests, 70 | integration_tests, 71 | integration_server_tests, 72 | integration_upload_tests, 73 | integration_monitoring_tests, 74 | integration_csw_tests, 75 | integration_bdd_tests, 76 | INSTALLED_APPS, 77 | GEONODE_CORE_APPS, 78 | GEONODE_INTERNAL_APPS, 79 | GEONODE_APPS, 80 | OGC_SERVER, 81 | ASYNC_SIGNALS, 82 | MONITORING_ENABLED, 83 | CELERY_BEAT_SCHEDULER, 84 | ) 85 | 86 | try: 87 | from geonode.settings import TEST_RUNNER_KEEPDB, TEST_RUNNER_PARALLEL 88 | 89 | _keepdb = "--keepdb" if TEST_RUNNER_KEEPDB else "" 90 | _parallel = f"--parallel={TEST_RUNNER_PARALLEL}" if TEST_RUNNER_PARALLEL else "" 91 | except Exception: 92 | _keepdb = "" 93 | _parallel = "" 94 | 95 | assert sys.version_info >= (2, 6), SystemError( 96 | "GeoNode Build requires python 2.6 or better" 97 | ) 98 | 99 | dev_config = None 100 | with open("dev_config.yml") as f: 101 | dev_config = yaml.load(f, Loader=yaml.Loader) 102 | 103 | 104 | logger = logging.getLogger(__name__) 105 | 106 | 107 | def grab(src, dest, name): 108 | src, dest, name = map(str, (src, dest, name)) 109 | logger.info(f" src, dest, name --> {src} {dest} {name}") 110 | 111 | if not os.path.exists(dest): 112 | logger.info(f"Downloading {name}") 113 | elif not zipfile.is_zipfile(dest): 114 | logger.info(f"Downloading {name} (corrupt file)") 115 | elif not src.startswith("file://"): 116 | r = requests.head(src) 117 | file_time = datetime.datetime.fromtimestamp(os.path.getmtime(dest)) 118 | url_time = file_time 119 | for _k in ["last-modified", "Date"]: 120 | if _k in r.headers: 121 | url_time = r.headers[_k] 122 | url_date = parsedate(url_time) 123 | utc = pytz.utc 124 | url_date = url_date.replace(tzinfo=utc) 125 | file_time = file_time.replace(tzinfo=utc) 126 | if url_date < file_time: 127 | # Do not download if older than the local one 128 | return 129 | logger.info(f"Downloading updated {name}") 130 | 131 | # Local file does not exist or remote one is newer 132 | if src.startswith("file://"): 133 | src2 = src.replace("file://", "") 134 | if not os.path.exists(src2): 135 | logger.info(f"Source location ({src2}) does not exist") 136 | else: 137 | logger.info(f"Copying local file from {src2}") 138 | shutil.copyfile(src2, dest) 139 | else: 140 | # urlretrieve(str(src), str(dest)) 141 | # Streaming, so we can iterate over the response. 142 | r = requests.get(src, stream=True, timeout=10, verify=False) 143 | # Total size in bytes. 144 | total_size = int(r.headers.get("content-length", 0)) 145 | logger.info(f"Requesting {src}") 146 | block_size = 1024 147 | wrote = 0 148 | with open("output.bin", "wb") as f: 149 | for data in tqdm( 150 | r.iter_content(block_size), 151 | total=math.ceil(total_size // block_size), 152 | unit="KB", 153 | unit_scale=False, 154 | ): 155 | wrote += len(data) 156 | f.write(data) 157 | logger.info(f" total_size [{total_size}] / wrote [{wrote}] ") 158 | if total_size != 0 and wrote != total_size: 159 | logger.error( 160 | f"ERROR, something went wrong. Data could not be written. Expected to write {wrote} but wrote {total_size} instead" 161 | ) 162 | else: 163 | shutil.move("output.bin", dest) 164 | try: 165 | # Cleaning up 166 | os.remove("output.bin") 167 | except OSError: 168 | pass 169 | 170 | 171 | @task 172 | @cmdopts( 173 | [ 174 | ("geoserver=", "g", "The location of the geoserver build (.war file)."), 175 | ("jetty=", "j", "The location of the Jetty Runner (.jar file)."), 176 | ("force_exec=", "", "Force GeoServer Setup."), 177 | ] 178 | ) 179 | def setup_geoserver(options): 180 | """Prepare a testing instance of GeoServer.""" 181 | # only start if using Geoserver backend 182 | if "geonode.geoserver" not in INSTALLED_APPS: 183 | return 184 | if on_travis and not options.get("force_exec", False): 185 | """Will make use of the docker container for the Integration Tests""" 186 | return 187 | else: 188 | download_dir = path("downloaded") 189 | if not download_dir.exists(): 190 | download_dir.makedirs() 191 | geoserver_dir = path("geoserver") 192 | geoserver_bin = download_dir / os.path.basename( 193 | urlparse(dev_config["GEOSERVER_URL"]).path 194 | ) 195 | jetty_runner = download_dir / os.path.basename( 196 | urlparse(dev_config["JETTY_RUNNER_URL"]).path 197 | ) 198 | geoserver_data = download_dir / os.path.basename( 199 | urlparse(dev_config["DATA_DIR_URL"]).path 200 | ) 201 | grab( 202 | options.get("geoserver", dev_config["GEOSERVER_URL"]), 203 | geoserver_bin, 204 | "geoserver binary", 205 | ) 206 | grab( 207 | options.get("jetty", dev_config["JETTY_RUNNER_URL"]), 208 | jetty_runner, 209 | "jetty runner", 210 | ) 211 | grab( 212 | options.get("geoserver data", dev_config["DATA_DIR_URL"]), 213 | geoserver_data, 214 | "geoserver data-dir", 215 | ) 216 | 217 | if not geoserver_dir.exists(): 218 | geoserver_dir.makedirs() 219 | 220 | webapp_dir = geoserver_dir / "geoserver" 221 | if not webapp_dir: 222 | webapp_dir.makedirs() 223 | 224 | logger.info("extracting geoserver") 225 | z = zipfile.ZipFile(geoserver_bin, "r") 226 | z.extractall(webapp_dir) 227 | 228 | logger.info("extracting geoserver data dir") 229 | z = zipfile.ZipFile(geoserver_data, "r") 230 | z.extractall(geoserver_dir) 231 | 232 | _configure_data_dir() 233 | 234 | 235 | def _configure_data_dir(): 236 | try: 237 | config = path("geoserver/data/global.xml") 238 | with open(config) as f: 239 | xml = f.read() 240 | m = re.search("proxyBaseUrl>([^<]+)", xml) 241 | xml = f"{xml[:m.start(1)]}http://localhost:8080/geoserver{xml[m.end(1):]}" 242 | with open(config, "w") as f: 243 | f.write(xml) 244 | except Exception as e: 245 | print(e) 246 | 247 | try: 248 | config = path("geoserver/data/security/filter/geonode-oauth2/config.xml") 249 | with open(config) as f: 250 | xml = f.read() 251 | m = re.search("accessTokenUri>([^<]+)", xml) 252 | xml = f"{xml[:m.start(1)]}http://localhost:8000/o/token/{xml[m.end(1):]}" 253 | m = re.search("userAuthorizationUri>([^<]+)", xml) 254 | xml = ( 255 | f"{xml[:m.start(1)]}http://localhost:8000/o/authorize/{xml[m.end(1):]}" 256 | ) 257 | m = re.search("redirectUri>([^<]+)", xml) 258 | xml = f"{xml[:m.start(1)]}http://localhost:8080/geoserver/index.html{xml[m.end(1):]}" 259 | m = re.search("checkTokenEndpointUrl>([^<]+)", xml) 260 | xml = f"{xml[:m.start(1)]}http://localhost:8000/api/o/v4/tokeninfo/{xml[m.end(1):]}" 261 | m = re.search("logoutUri>([^<]+)", xml) 262 | xml = f"{xml[:m.start(1)]}http://localhost:8000/account/logout/{xml[m.end(1):]}" 263 | with open(config, "w") as f: 264 | f.write(xml) 265 | except Exception as e: 266 | print(e) 267 | 268 | try: 269 | config = path( 270 | "geoserver/data/security/role/geonode REST role service/config.xml" 271 | ) 272 | with open(config) as f: 273 | xml = f.read() 274 | m = re.search("baseUrl>([^<]+)", xml) 275 | xml = f"{xml[:m.start(1)]}http://localhost:8000{xml[m.end(1):]}" 276 | with open(config, "w") as f: 277 | f.write(xml) 278 | except Exception as e: 279 | print(e) 280 | 281 | 282 | @task 283 | def static(options): 284 | with pushd("geonode/static"): 285 | sh("grunt production") 286 | 287 | 288 | @task 289 | @needs( 290 | [ 291 | "setup_geoserver", 292 | ] 293 | ) 294 | def setup(options): 295 | """Get dependencies and prepare a GeoNode development environment.""" 296 | 297 | if MONITORING_ENABLED: 298 | updategeoip(options) 299 | 300 | info( 301 | "GeoNode development environment successfully set up." 302 | "If you have not set up an administrative account," 303 | ' please do so now. Use "paver start" to start up the server.' 304 | ) 305 | 306 | 307 | def grab_winfiles(url, dest, packagename): 308 | # Add headers 309 | headers = {"User-Agent": "Mozilla 5.10"} 310 | request = Request(url, None, headers) 311 | response = urlopen(request) 312 | with open(dest, "wb") as writefile: 313 | writefile.write(response.read()) 314 | 315 | 316 | @task 317 | def win_install_deps(options): 318 | """ 319 | Install all Windows Binary automatically 320 | This can be removed as wheels become available for these packages 321 | """ 322 | download_dir = path("downloaded").abspath() 323 | if not download_dir.exists(): 324 | download_dir.makedirs() 325 | win_packages = { 326 | # required by transifex-client 327 | "Py2exe": dev_config["WINDOWS"]["py2exe"], 328 | # the wheel 1.9.4 installs but pycsw wants 1.9.3, which fails to compile 329 | # when pycsw bumps their pyproj to 1.9.4 this can be removed. 330 | "PyProj": dev_config["WINDOWS"]["pyproj"], 331 | "lXML": dev_config["WINDOWS"]["lxml"], 332 | } 333 | failed = False 334 | for package, url in win_packages.items(): 335 | tempfile = download_dir / os.path.basename(url) 336 | logger.info(f"Installing file ... {tempfile}") 337 | grab_winfiles(url, tempfile, package) 338 | try: 339 | easy_install.main([tempfile]) 340 | except Exception as e: 341 | failed = True 342 | logger.error("install failed with error: ", e) 343 | os.remove(tempfile) 344 | if failed and sys.maxsize > 2**32: 345 | logger.error("64bit architecture is not currently supported") 346 | logger.error("try finding the 64 binaries for py2exe, and pyproj") 347 | elif failed: 348 | logger.error("install failed for py2exe, and/or pyproj") 349 | else: 350 | print( 351 | "Windows dependencies now complete. Run pip install -e geonode --use-mirrors" 352 | ) 353 | 354 | 355 | @task 356 | @cmdopts([("version=", "v", "Legacy GeoNode version of the existing database.")]) 357 | def upgradedb(options): 358 | """ 359 | Add 'fake' data migrations for existing tables from legacy GeoNode versions 360 | """ 361 | version = options.get("version") 362 | if version in {"1.1", "1.2"}: 363 | sh("python -W ignore manage.py migrate maps 0001 --fake") 364 | sh("python -W ignore manage.py migrate avatar 0001 --fake") 365 | elif version is None: 366 | print("Please specify your GeoNode version") 367 | else: 368 | print(f"Upgrades from version {version} are not yet supported.") 369 | 370 | 371 | @task 372 | @cmdopts([("settings=", "s", "Specify custom DJANGO_SETTINGS_MODULE")]) 373 | def updategeoip(options): 374 | """ 375 | Update geoip db 376 | """ 377 | if MONITORING_ENABLED: 378 | settings = options.get("settings", "") 379 | if settings and "DJANGO_SETTINGS_MODULE" not in settings: 380 | settings = f"DJANGO_SETTINGS_MODULE={settings}" 381 | 382 | sh(f"{settings} python -W ignore manage.py updategeoip -o") 383 | 384 | 385 | @task 386 | @cmdopts([("settings=", "s", "Specify custom DJANGO_SETTINGS_MODULE")]) 387 | def sync(options): 388 | """ 389 | Run the migrate and migrate management commands to create and migrate a DB 390 | """ 391 | settings = options.get("settings", "") 392 | if settings and "DJANGO_SETTINGS_MODULE" not in settings: 393 | settings = f"DJANGO_SETTINGS_MODULE={settings}" 394 | 395 | sh(f"{settings} python -W ignore manage.py makemigrations --noinput") 396 | sh(f"{settings} python -W ignore manage.py migrate --noinput") 397 | sh(f"{settings} python -W ignore manage.py loaddata sample_admin.json") 398 | sh(f"{settings} python -W ignore manage.py loaddata default_oauth_apps.json") 399 | sh(f"{settings} python -W ignore manage.py loaddata initial_data.json") 400 | sh(f"{settings} python -W ignore manage.py set_all_datasets_alternate") 401 | sh(f"{settings} python -W ignore manage.py collectstatic --noinput") 402 | 403 | 404 | @task 405 | def package(options): 406 | """ 407 | Creates a tarball to use for building the system elsewhere 408 | """ 409 | import tarfile 410 | import geonode 411 | 412 | version = geonode.get_version() 413 | # Use GeoNode's version for the package name. 414 | pkgname = f"GeoNode-{version}-all" 415 | 416 | # Create the output directory. 417 | out_pkg = path(pkgname) 418 | out_pkg_tar = path(f"{pkgname}.tar.gz") 419 | 420 | # Create a distribution in zip format for the geonode python package. 421 | dist_dir = path("dist") 422 | dist_dir.rmtree() 423 | sh("python setup.py sdist --formats=zip") 424 | 425 | with pushd("package"): 426 | # Delete old tar files in that directory 427 | for f in glob.glob("GeoNode*.tar.gz"): 428 | old_package = path(f) 429 | if old_package != out_pkg_tar: 430 | old_package.remove() 431 | 432 | if out_pkg_tar.exists(): 433 | info(f"There is already a package for version {version}") 434 | return 435 | 436 | # Clean anything that is in the oupout package tree. 437 | out_pkg.rmtree() 438 | out_pkg.makedirs() 439 | 440 | support_folder = path("support") 441 | install_file = path("install.sh") 442 | 443 | # And copy the default files from the package folder. 444 | justcopy(support_folder, out_pkg / "support") 445 | justcopy(install_file, out_pkg) 446 | 447 | geonode_dist = path("..") / "dist" / f"GeoNode-{version}.zip" 448 | justcopy(geonode_dist, out_pkg) 449 | 450 | # Create a tar file with all files in the output package folder. 451 | tar = tarfile.open(out_pkg_tar, "w:gz") 452 | for file in out_pkg.walkfiles(): 453 | tar.add(file) 454 | 455 | # Add the README with the license and important links to documentation. 456 | tar.add("README", arcname=f"{out_pkg}/README.rst") 457 | tar.close() 458 | 459 | # Remove all the files in the temporary output package directory. 460 | out_pkg.rmtree() 461 | 462 | # Report the info about the new package. 463 | info(f"{out_pkg_tar.abspath()} created") 464 | 465 | 466 | @task 467 | @needs(["start_geoserver", "start_django"]) 468 | @cmdopts( 469 | [ 470 | ("bind=", "b", "Bind server to provided IP address and port number."), 471 | ("java_path=", "j", "Full path to java install for Windows"), 472 | ("foreground", "f", "Do not run in background but in foreground"), 473 | ("settings=", "s", "Specify custom DJANGO_SETTINGS_MODULE"), 474 | ], 475 | share_with=["start_django", "start_geoserver"], 476 | ) 477 | def start(options): 478 | """ 479 | Start GeoNode (Django, GeoServer & Client) 480 | """ 481 | info("GeoNode is now available.") 482 | 483 | 484 | @task 485 | def stop_django(options): 486 | """ 487 | Stop the GeoNode Django application 488 | """ 489 | if ASYNC_SIGNALS: 490 | kill("python", "celery") 491 | kill("celery", "worker") 492 | kill("python", "runserver") 493 | kill("python", "runmessaging") 494 | 495 | 496 | @task 497 | @cmdopts([("force_exec=", "", "Force GeoServer Stop.")]) 498 | def stop_geoserver(options): 499 | """ 500 | Stop GeoServer 501 | """ 502 | # we use docker-compose for integration tests 503 | if on_travis and not options.get("force_exec", False): 504 | return 505 | 506 | # only start if using Geoserver backend 507 | if "geonode.geoserver" not in INSTALLED_APPS: 508 | return 509 | kill("java", "geoserver") 510 | 511 | # Kill process. 512 | try: 513 | # proc = subprocess.Popen("ps -ef | grep -i -e '[j]ava\|geoserver' | 514 | # awk '{print $2}'", 515 | proc = subprocess.Popen( 516 | "ps -ef | grep -i -e 'geoserver' | awk '{print $2}'", 517 | shell=True, 518 | stdout=subprocess.PIPE, 519 | ) 520 | for pid in map(int, proc.stdout): 521 | info(f"Stopping geoserver (process number {pid})") 522 | os.kill(pid, signal.SIGKILL) 523 | 524 | # Check if the process that we killed is alive. 525 | killed, alive = psutil.wait_procs([psutil.Process(pid=pid)], timeout=30) 526 | for p in alive: 527 | p.kill() 528 | except Exception as e: 529 | info(e) 530 | 531 | 532 | @task 533 | @needs( 534 | [ 535 | "stop_geoserver", 536 | ] 537 | ) 538 | def stop(options): 539 | """ 540 | Stop GeoNode 541 | """ 542 | # windows needs to stop the geoserver first b/c we can't tell which python 543 | # is running, so we kill everything 544 | info("Stopping GeoNode ...") 545 | stop_django(options) 546 | 547 | 548 | @task 549 | @cmdopts([("bind=", "b", "Bind server to provided IP address and port number.")]) 550 | def start_django(options): 551 | """ 552 | Start the GeoNode Django application 553 | """ 554 | settings = options.get("settings", "") 555 | if settings and "DJANGO_SETTINGS_MODULE" not in settings: 556 | settings = f"DJANGO_SETTINGS_MODULE={settings}" 557 | bind = options.get("bind", "0.0.0.0:8000") 558 | port = bind.split(":")[1] 559 | foreground = "" if options.get("foreground", False) else "&" 560 | sh(f"{settings} python -W ignore manage.py runserver {bind} {foreground}") 561 | 562 | if ASYNC_SIGNALS: 563 | sh( 564 | f"{settings} celery -A geonode.celery_app:app worker --autoscale=20,10 --without-gossip --without-mingle -Ofair -B -E \ 565 | --statedb=/tmp/worker.state --scheduler={CELERY_BEAT_SCHEDULER} --loglevel=DEBUG \ 566 | --concurrency=10 --max-tasks-per-child=10 -n worker1@%h -f celery.log {foreground}" 567 | ) 568 | sh(f"{settings} python -W ignore manage.py runmessaging {foreground}") 569 | 570 | # wait for Django to start 571 | started = waitfor(f"http://localhost:{port}") 572 | if not started: 573 | info("Django never started properly or timed out.") 574 | sys.exit(1) 575 | 576 | 577 | @task 578 | def start_messaging(options): 579 | """ 580 | Start the GeoNode messaging server 581 | """ 582 | settings = options.get("settings", "") 583 | if settings and "DJANGO_SETTINGS_MODULE" not in settings: 584 | settings = f"DJANGO_SETTINGS_MODULE={settings}" 585 | foreground = "" if options.get("foreground", False) else "&" 586 | sh(f"{settings} python -W ignore manage.py runmessaging {foreground}") 587 | 588 | 589 | @task 590 | @cmdopts( 591 | [ 592 | ("java_path=", "j", "Full path to java install for Windows"), 593 | ("force_exec=", "", "Force GeoServer Start."), 594 | ] 595 | ) 596 | def start_geoserver(options): 597 | """ 598 | Start GeoServer with GeoNode extensions 599 | """ 600 | # we use docker-compose for integration tests 601 | if on_travis and not options.get("force_exec", False): 602 | return 603 | 604 | # only start if using Geoserver backend 605 | if "geonode.geoserver" not in INSTALLED_APPS: 606 | return 607 | 608 | GEOSERVER_BASE_URL = OGC_SERVER["default"]["LOCATION"] 609 | url = GEOSERVER_BASE_URL 610 | 611 | if urlparse(GEOSERVER_BASE_URL).hostname != "localhost": 612 | logger.warning( 613 | "Warning: OGC_SERVER['default']['LOCATION'] hostname is not equal to 'localhost'" 614 | ) 615 | 616 | if not GEOSERVER_BASE_URL.endswith("/"): 617 | logger.error("Error: OGC_SERVER['default']['LOCATION'] does not end with a '/'") 618 | sys.exit(1) 619 | 620 | download_dir = path("downloaded").abspath() 621 | jetty_runner = download_dir / os.path.basename(dev_config["JETTY_RUNNER_URL"]) 622 | data_dir = path("geoserver/data").abspath() 623 | geofence_dir = path("geoserver/data/geofence").abspath() 624 | web_app = path("geoserver/geoserver").abspath() 625 | log_file = path("geoserver/jetty.log").abspath() 626 | config = path("scripts/misc/jetty-runner.xml").abspath() 627 | jetty_port = urlparse(GEOSERVER_BASE_URL).port 628 | 629 | import socket 630 | 631 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 632 | socket_free = True 633 | try: 634 | s.bind(("127.0.0.1", jetty_port)) 635 | except OSError as e: 636 | socket_free = False 637 | if e.errno == 98: 638 | info(f"Port {jetty_port} is already in use") 639 | else: 640 | info( 641 | f"Something else raised the socket.error exception while checking port {jetty_port}" 642 | ) 643 | print(e) 644 | finally: 645 | s.close() 646 | 647 | if socket_free: 648 | # @todo - we should not have set workdir to the datadir but a bug in geoserver 649 | # prevents geonode security from initializing correctly otherwise 650 | with pushd(data_dir): 651 | javapath = "java" 652 | if on_travis: 653 | sh( 654 | "sudo apt install -y openjdk-8-jre openjdk-8-jdk;" 655 | " sudo update-java-alternatives --set java-1.8.0-openjdk-amd64;" 656 | ' export JAVA_HOME=$(readlink -f /usr/bin/java | sed "s:bin/java::");' 657 | " export PATH=$JAVA_HOME'bin/java':$PATH;" 658 | ) 659 | # import subprocess 660 | # result = subprocess.run(['update-alternatives', '--list', 'java'], stdout=subprocess.PIPE) 661 | # javapath = result.stdout 662 | javapath = "/usr/lib/jvm/java-8-openjdk-amd64/jre/bin/java" 663 | loggernullpath = os.devnull 664 | 665 | # checking if our loggernullpath exists and if not, reset it to 666 | # something manageable 667 | if loggernullpath == "nul": 668 | try: 669 | open("../../downloaded/null.txt", "w+").close() 670 | except OSError: 671 | print( 672 | "Chances are that you have Geoserver currently running. You " 673 | "can either stop all servers with paver stop or start only " 674 | "the django application with paver start_django." 675 | ) 676 | sys.exit(1) 677 | loggernullpath = "../../downloaded/null.txt" 678 | 679 | try: 680 | sh(("%(javapath)s -version") % locals()) 681 | except Exception: 682 | logger.warning( 683 | "Java was not found in your path. Trying some other options: " 684 | ) 685 | javapath_opt = None 686 | if os.environ.get("JAVA_HOME", None): 687 | logger.info("Using the JAVA_HOME environment variable") 688 | javapath_opt = os.path.join( 689 | os.path.abspath(os.environ["JAVA_HOME"]), "bin", "java.exe" 690 | ) 691 | elif options.get("java_path"): 692 | javapath_opt = options.get("java_path") 693 | else: 694 | logger.critical( 695 | "Paver cannot find java in the Windows Environment. " 696 | "Please provide the --java_path flag with your full path to " 697 | "java.exe e.g. --java_path=C:/path/to/java/bin/java.exe" 698 | ) 699 | sys.exit(1) 700 | # if there are spaces 701 | javapath = f'START /B "" "{javapath_opt}"' 702 | 703 | sh( 704 | "%(javapath)s -Xms512m -Xmx2048m -server -Dgwc.context.suffix=gwc -XX:+UseConcMarkSweepGC -XX:MaxPermSize=512m" 705 | " -DGEOSERVER_DATA_DIR=%(data_dir)s" 706 | " -DGEOSERVER_CSRF_DISABLED=true" 707 | " -Dgeofence.dir=%(geofence_dir)s" 708 | " -Djava.awt.headless=true" 709 | # ' -Dgeofence-ovr=geofence-datasource-ovr.properties' 710 | # workaround for JAI sealed jar issue and jetty classloader 711 | # ' -Dorg.eclipse.jetty.server.webapp.parentLoaderPriority=true' 712 | " -jar %(jetty_runner)s" 713 | " --port %(jetty_port)i" 714 | " --log %(log_file)s" 715 | " %(config)s" 716 | " > %(loggernullpath)s &" % locals() 717 | ) 718 | 719 | info(f"Starting GeoServer on {url}") 720 | 721 | # wait for GeoServer to start 722 | started = waitfor(url) 723 | info(f"The logs are available at {log_file}") 724 | 725 | if not started: 726 | # If applications did not start in time we will give the user a chance 727 | # to inspect them and stop them manually. 728 | info( 729 | "GeoServer never started properly or timed out." 730 | "It may still be running in the background." 731 | ) 732 | sys.exit(1) 733 | 734 | 735 | @task 736 | def test(options): 737 | """ 738 | Run GeoNode's Unit Test Suite 739 | """ 740 | if on_travis: 741 | if core_tests: 742 | _apps = GEONODE_CORE_APPS 743 | if internal_apps_tests: 744 | _apps = GEONODE_INTERNAL_APPS 745 | else: 746 | _apps = GEONODE_APPS 747 | 748 | _apps_to_test = [] 749 | for _app in _apps: 750 | if _app and len(_app) > 0 and "geonode" in _app: 751 | _apps_to_test.append(_app) 752 | if ( 753 | MONITORING_ENABLED 754 | and "geonode.monitoring" in INSTALLED_APPS 755 | and "geonode.monitoring" not in _apps_to_test 756 | ): 757 | _apps_to_test.append("geonode.monitoring") 758 | sh( 759 | f"{options.get('prefix')} manage.py test geonode.tests.smoke \ 760 | {('.tests '.join(_apps_to_test))}.tests --noinput {_keepdb} {_parallel}" 761 | ) 762 | 763 | 764 | @task 765 | @cmdopts([("local=", "l", "Set to True if running bdd tests locally")]) 766 | def test_bdd(options): 767 | """ 768 | Run GeoNode's BDD Test Suite 769 | """ 770 | local = str2bool(options.get("local", "false")) 771 | if local: 772 | call_task("reset_hard") 773 | 774 | call_task("setup") 775 | call_task("sync") 776 | if local: 777 | sh("sleep 30") 778 | 779 | info("GeoNode is now available, running the bdd tests now.") 780 | sh("py.test") 781 | 782 | 783 | @task 784 | def test_javascript(options): 785 | with pushd("geonode/static/geonode"): 786 | sh("./run-tests.sh") 787 | 788 | 789 | @task 790 | @cmdopts( 791 | [ 792 | ("name=", "n", "Run specific tests."), 793 | ("settings=", "s", "Specify custom DJANGO_SETTINGS_MODULE"), 794 | ("local=", "l", "Set to True if running bdd tests locally"), 795 | ] 796 | ) 797 | def test_integration(options): 798 | """ 799 | Run GeoNode's Integration test suite against the external apps 800 | """ 801 | prefix = options.get("prefix") 802 | local = str2bool(options.get("local", "false")) 803 | if local: 804 | call_task("stop_geoserver") 805 | _reset() 806 | 807 | name = options.get("name", None) 808 | settings = options.get("settings", "") 809 | success = False 810 | try: 811 | call_task("setup", options={"settings": settings, "force_exec": True}) 812 | 813 | if not settings: 814 | settings = "REUSE_DB=1 DJANGO_SETTINGS_MODULE=geonode.settings" 815 | 816 | if name and name in ( 817 | "geonode.tests.csw", 818 | "geonode.tests.integration", 819 | "geonode.geoserver.tests.integration", 820 | ): 821 | call_task("sync", options={"settings": settings}) 822 | if local: 823 | call_task( 824 | "start_geoserver", 825 | options={"settings": settings, "force_exec": True}, 826 | ) 827 | call_task("start", options={"settings": settings}) 828 | if integration_server_tests: 829 | call_task("setup_data", options={"settings": settings}) 830 | elif "geonode.geoserver" in INSTALLED_APPS: 831 | if local: 832 | sh("cp geonode/upload/tests/test_settings.py geonode/") 833 | settings = "geonode.test_settings" 834 | sh( 835 | f"DJANGO_SETTINGS_MODULE={settings} python -W ignore manage.py " 836 | "makemigrations --noinput" 837 | ) 838 | sh( 839 | f"DJANGO_SETTINGS_MODULE={settings} python -W ignore manage.py " 840 | "migrate --noinput" 841 | ) 842 | sh( 843 | f"DJANGO_SETTINGS_MODULE={settings} python -W ignore manage.py " 844 | "loaddata sample_admin.json" 845 | ) 846 | sh( 847 | f"DJANGO_SETTINGS_MODULE={settings} python -W ignore manage.py " 848 | "loaddata geonode/base/fixtures/default_oauth_apps.json" 849 | ) 850 | sh( 851 | f"DJANGO_SETTINGS_MODULE={settings} python -W ignore manage.py " 852 | "loaddata geonode/base/fixtures/initial_data.json" 853 | ) 854 | call_task("start_geoserver") 855 | bind = options.get("bind", "0.0.0.0:8000") 856 | foreground = "" if options.get("foreground", False) else "&" 857 | sh( 858 | f"DJANGO_SETTINGS_MODULE={settings} python -W ignore manage.py runmessaging {foreground}" 859 | ) 860 | sh( 861 | f"DJANGO_SETTINGS_MODULE={settings} python -W ignore manage.py runserver {bind} {foreground}" 862 | ) 863 | sh("sleep 30") 864 | settings = f"REUSE_DB=1 DJANGO_SETTINGS_MODULE={settings}" 865 | else: 866 | call_task("sync", options={"settings": settings}) 867 | 868 | live_server_option = "" 869 | info("Running the tests now...") 870 | sh( 871 | f"{settings} {prefix} manage.py test {name} -v 3 {_keepdb} --noinput {live_server_option}" 872 | ) 873 | 874 | except BuildFailure as e: 875 | info(f"Tests failed! {str(e)}") 876 | else: 877 | success = True 878 | finally: 879 | if local: 880 | stop(options) 881 | _reset() 882 | 883 | if not success: 884 | sys.exit(1) 885 | 886 | 887 | @task 888 | @needs( 889 | [ 890 | "start_geoserver", 891 | ] 892 | ) 893 | @cmdopts( 894 | [ 895 | ("coverage", "c", "use this flag to generate coverage during test runs"), 896 | ("local=", "l", "Set to True if running tests locally"), 897 | ] 898 | ) 899 | def run_tests(options): 900 | """ 901 | Executes the entire test suite. 902 | """ 903 | if options.get("coverage"): 904 | prefix = 'coverage run --branch --source=geonode \ 905 | --omit="*/__init__*,*/test*,*/wsgi*,*/version*,*/migrations*,\ 906 | */search_indexes*,*/management/*,*/context_processors*,*/upload/*"' 907 | else: 908 | prefix = "python" 909 | local = options.get("local", "false") # travis uses default to false 910 | 911 | if ( 912 | not integration_tests 913 | and not integration_csw_tests 914 | and not integration_bdd_tests 915 | ): 916 | call_task("test", options={"prefix": prefix}) 917 | elif integration_tests: 918 | if integration_upload_tests: 919 | call_task( 920 | "test_integration", 921 | options={ 922 | "prefix": prefix, 923 | "name": "geonode.upload.tests.integration", 924 | "local": local, 925 | }, 926 | ) 927 | elif integration_monitoring_tests: 928 | call_task( 929 | "test_integration", 930 | options={ 931 | "prefix": prefix, 932 | "name": "geonode.monitoring.tests.integration", 933 | "local": local, 934 | }, 935 | ) 936 | elif integration_csw_tests: 937 | call_task( 938 | "test_integration", 939 | options={"prefix": prefix, "name": "geonode.tests.csw", "local": local}, 940 | ) 941 | elif integration_bdd_tests: 942 | call_task("test_bdd", options={"local": local}) 943 | elif integration_server_tests: 944 | call_task( 945 | "test_integration", 946 | options={ 947 | "prefix": prefix, 948 | "name": "geonode.geoserver.tests.integration", 949 | "local": local, 950 | }, 951 | ) 952 | else: 953 | call_task( 954 | "test_integration", 955 | options={ 956 | "prefix": prefix, 957 | "name": "geonode.tests.integration", 958 | "local": local, 959 | }, 960 | ) 961 | sh("flake8 geonode") 962 | 963 | 964 | @task 965 | @needs(["stop"]) 966 | def reset(options): 967 | """ 968 | Reset a development environment (Database, GeoServer & Catalogue) 969 | """ 970 | _reset() 971 | 972 | 973 | def _reset(): 974 | from geonode import settings 975 | 976 | path = os.path.join(settings.PROJECT_ROOT, "development.db") 977 | sh(f"rm -rf {path}") 978 | sh("rm -rf {{ project_name }}/development.db") 979 | sh("rm -rf {{ project_name }}/uploaded/*") 980 | _configure_data_dir() 981 | 982 | 983 | @needs(["reset"]) 984 | def reset_hard(options): 985 | """ 986 | Reset a development environment (Database, GeoServer & Catalogue) 987 | """ 988 | sh("git clean -dxf") 989 | 990 | 991 | @task 992 | @cmdopts( 993 | [ 994 | ("type=", "t", 'Import specific data type ("vector", "raster", "time")'), 995 | ("settings=", "s", "Specify custom DJANGO_SETTINGS_MODULE"), 996 | ] 997 | ) 998 | def setup_data(options): 999 | """ 1000 | Import sample data (from gisdata package) into GeoNode 1001 | """ 1002 | import gisdata 1003 | 1004 | ctype = options.get("type", None) 1005 | 1006 | data_dir = gisdata.GOOD_DATA 1007 | 1008 | if ctype in {"vector", "raster", "time"}: 1009 | data_dir = os.path.join(gisdata.GOOD_DATA, ctype) 1010 | 1011 | settings = options.get("settings", "") 1012 | if settings and "DJANGO_SETTINGS_MODULE" not in settings: 1013 | settings = f"DJANGO_SETTINGS_MODULE={settings}" 1014 | 1015 | from geonode import settings as geonode_settings 1016 | 1017 | if not os.path.exists(geonode_settings.MEDIA_ROOT): 1018 | info("media root not available, creating...") 1019 | os.makedirs(geonode_settings.MEDIA_ROOT, exist_ok=True) 1020 | 1021 | sh( 1022 | f"{settings} python -W ignore manage.py importlayers -v2 -hh {geonode_settings.SITEURL} {data_dir}" 1023 | ) 1024 | 1025 | 1026 | @needs(["package"]) 1027 | @cmdopts( 1028 | [ 1029 | ("key=", "k", "The GPG key to sign the package"), 1030 | ("ppa=", "p", "PPA this package should be published to."), 1031 | ] 1032 | ) 1033 | def deb(options): 1034 | """ 1035 | Creates debian packages. 1036 | 1037 | Example uses: 1038 | paver deb 1039 | paver deb -k 12345 1040 | paver deb -k 12345 -p geonode/testing 1041 | """ 1042 | key = options.get("key", None) 1043 | ppa = options.get("ppa", None) 1044 | 1045 | version, simple_version = versions() 1046 | 1047 | info(f"Creating package for GeoNode version {version}") 1048 | 1049 | # Get rid of any uncommitted changes to debian/changelog 1050 | info("Getting rid of any uncommitted changes in debian/changelog") 1051 | sh("git checkout package/debian/changelog") 1052 | 1053 | # Workaround for git-dch bug 1054 | # http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=594580 1055 | sh(f"rm -rf {os.path.realpath('package')}/.git") 1056 | sh(f"ln -s {os.path.realpath('.git')} {os.path.realpath('package')}") 1057 | 1058 | with pushd("package"): 1059 | # Install requirements 1060 | # sh('sudo apt-get -y install debhelper devscripts git-buildpackage') 1061 | 1062 | # sh(('git-dch --spawn-editor=snapshot --git-author --new-version=%s' 1063 | # ' --id-length=6 --ignore-branch --release' % (simple_version))) 1064 | # In case you publish from Ubuntu Xenial (git-dch is removed from upstream) 1065 | # use the following line instead: 1066 | # sh(('gbp dch --spawn-editor=snapshot --git-author --new-version=%s' 1067 | # ' --id-length=6 --ignore-branch --release' % (simple_version))) 1068 | distribution = "bionic" 1069 | # sh(('gbp dch --distribution=%s --force-distribution --spawn-editor=snapshot --git-author --new-version=%s' 1070 | # ' --id-length=6 --ignore-branch --release' % (distribution, simple_version))) 1071 | 1072 | deb_changelog = path("debian") / "changelog" 1073 | for idx, line in enumerate(fileinput.input([deb_changelog], inplace=True)): 1074 | if idx == 0: 1075 | logger.info( 1076 | f"geonode ({simple_version}) {distribution}; urgency=high", end="" 1077 | ) 1078 | else: 1079 | print(line.replace("urgency=medium", "urgency=high"), end="") 1080 | 1081 | # Revert workaround for git-dhc bug 1082 | sh("rm -rf .git") 1083 | 1084 | if key is None and ppa is None: 1085 | print("A local installable package") 1086 | sh("debuild -uc -us -A") 1087 | elif key is None and ppa is not None: 1088 | print("A sources package, signed by daemon") 1089 | sh("debuild -S") 1090 | elif key is not None and ppa is None: 1091 | print("A signed installable package") 1092 | sh(f"debuild -k{key} -A") 1093 | elif key is not None and ppa is not None: 1094 | print("A signed, source package") 1095 | sh(f"debuild -k{key} -S") 1096 | 1097 | if ppa is not None: 1098 | sh(f"dput ppa:{ppa} geonode_{simple_version}_source.changes") 1099 | 1100 | 1101 | @task 1102 | def publish(options): 1103 | if "GPG_KEY_GEONODE" in os.environ: 1104 | key = os.environ["GPG_KEY_GEONODE"] 1105 | else: 1106 | print("You need to set the GPG_KEY_GEONODE environment variable") 1107 | return 1108 | 1109 | if "PPA_GEONODE" in os.environ: 1110 | ppa = os.environ["PPA_GEONODE"] 1111 | else: 1112 | ppa = None 1113 | 1114 | call_task( 1115 | "deb", 1116 | options={ 1117 | "key": key, 1118 | "ppa": ppa, 1119 | # 'ppa': 'geonode/testing', 1120 | # 'ppa': 'geonode/unstable', 1121 | }, 1122 | ) 1123 | 1124 | version, simple_version = versions() 1125 | if ppa: 1126 | sh("git add package/debian/changelog") 1127 | sh(f'git commit -m "Updated changelog for version {version}"') 1128 | sh(f"git tag -f {version}") 1129 | sh(f"git push origin {version}") 1130 | sh(f"git tag -f debian/{simple_version}") 1131 | sh(f"git push origin debian/{simple_version}") 1132 | # sh('git push origin master') 1133 | sh("python setup.py sdist upload -r pypi") 1134 | 1135 | 1136 | def versions(): 1137 | import geonode 1138 | from geonode.version import get_git_changeset 1139 | 1140 | raw_version = geonode.__version__ 1141 | version = geonode.get_version() 1142 | timestamp = get_git_changeset() 1143 | 1144 | major, minor, revision, stage, edition = raw_version 1145 | 1146 | branch = "dev" 1147 | 1148 | if stage == "final": 1149 | stage = "thefinal" 1150 | 1151 | if stage == "unstable": 1152 | tail = f"{branch}{timestamp}" 1153 | else: 1154 | tail = f"{stage}{edition}" 1155 | 1156 | simple_version = f"{major}.{minor}.{revision}+{tail}" 1157 | return version, simple_version 1158 | 1159 | 1160 | def kill(arg1, arg2): 1161 | """Stops a proces that contains arg1 and is filtered by arg2""" 1162 | from subprocess import Popen, PIPE 1163 | 1164 | # Wait until ready 1165 | t0 = time.time() 1166 | # Wait no more than these many seconds 1167 | time_out = 30 1168 | running = True 1169 | 1170 | while running and time.time() - t0 < time_out: 1171 | if os.name == "nt": 1172 | p = Popen( 1173 | f'tasklist | find "{arg1}"', 1174 | shell=True, 1175 | stdin=PIPE, 1176 | stdout=PIPE, 1177 | stderr=PIPE, 1178 | close_fds=False, 1179 | ) 1180 | else: 1181 | p = Popen( 1182 | f"ps aux | grep {arg1}", 1183 | shell=True, 1184 | stdin=PIPE, 1185 | stdout=PIPE, 1186 | stderr=PIPE, 1187 | close_fds=True, 1188 | ) 1189 | 1190 | lines = p.stdout.readlines() 1191 | 1192 | running = False 1193 | for line in lines: 1194 | # this kills all java.exe and python including self in windows 1195 | if (f"{arg2}" in str(line)) or (os.name == "nt" and f"{arg1}" in str(line)): 1196 | running = True 1197 | 1198 | # Get pid 1199 | fields = line.strip().split() 1200 | 1201 | info(f"Stopping {arg1} (process number {int(fields[1])})") 1202 | if os.name == "nt": 1203 | kill = f'taskkill /F /PID "{int(fields[1])}"' 1204 | else: 1205 | kill = f"kill -9 {int(fields[1])} 2> /dev/null" 1206 | os.system(kill) 1207 | 1208 | # Give it a little more time 1209 | time.sleep(1) 1210 | 1211 | if running: 1212 | _procs = "\n".join([str(_l).strip() for _l in lines]) 1213 | raise Exception(f"Could not stop {arg1}: " f"Running processes are\n{_procs}") 1214 | 1215 | 1216 | def waitfor(url, timeout=300): 1217 | started = False 1218 | for a in range(timeout): 1219 | try: 1220 | resp = urlopen(url) 1221 | except OSError: 1222 | pass 1223 | else: 1224 | if resp.getcode() == 200: 1225 | started = True 1226 | break 1227 | time.sleep(1) 1228 | return started 1229 | 1230 | 1231 | def _copytree(src, dst, symlinks=False, ignore=None): 1232 | if not os.path.exists(dst): 1233 | os.makedirs(dst, exist_ok=True) 1234 | for item in os.listdir(src): 1235 | s = os.path.join(src, item) 1236 | d = os.path.join(dst, item) 1237 | if os.path.isdir(s): 1238 | try: 1239 | shutil.copytree(s, d, symlinks, ignore) 1240 | except Exception: 1241 | pass 1242 | elif os.path.isfile(s): 1243 | shutil.copy2(s, d) 1244 | 1245 | 1246 | def justcopy(origin, target): 1247 | if os.path.isdir(origin): 1248 | shutil.rmtree(target, ignore_errors=True) 1249 | _copytree(origin, target) 1250 | elif os.path.isfile(origin): 1251 | if not os.path.exists(target): 1252 | os.makedirs(target, exist_ok=True) 1253 | shutil.copy(origin, target) 1254 | 1255 | 1256 | def str2bool(v): 1257 | if v and len(v) > 0: 1258 | return v.lower() in ("yes", "true", "t", "1") 1259 | else: 1260 | return False 1261 | -------------------------------------------------------------------------------- /src/paver.sh: -------------------------------------------------------------------------------- 1 | . $HOME/.override_env 2 | paver $@ 3 | -------------------------------------------------------------------------------- /src/paver_dev.sh.sample: -------------------------------------------------------------------------------- 1 | set -a 2 | source ../.override_dev_env 3 | paver $@ 4 | -------------------------------------------------------------------------------- /src/project_name/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ######################################################################### 3 | # 4 | # Copyright (C) 2017 OSGeo 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | # 19 | ######################################################################### 20 | 21 | import os 22 | 23 | __version__ = (4, 2, 0, "dev", 0) 24 | 25 | 26 | default_app_config = "{{ project_name }}.apps.AppConfig" 27 | 28 | 29 | def get_version(): 30 | import {{ project_name }}.version 31 | 32 | return {{ project_name }}.version.get_version(__version__) 33 | -------------------------------------------------------------------------------- /src/project_name/apps.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ######################################################################### 3 | # 4 | # Copyright (C) 2018 OSGeo 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | # 19 | ######################################################################### 20 | import os 21 | from django.apps import AppConfig as BaseAppConfig 22 | 23 | 24 | def run_setup_hooks(*args, **kwargs): 25 | from django.conf import settings 26 | from .celeryapp import app as celeryapp 27 | 28 | LOCAL_ROOT = os.path.abspath(os.path.dirname(__file__)) 29 | settings.TEMPLATES[0]["DIRS"].insert(0, os.path.join(LOCAL_ROOT, "templates")) 30 | 31 | if celeryapp not in settings.INSTALLED_APPS: 32 | settings.INSTALLED_APPS += (celeryapp,) 33 | 34 | 35 | class AppConfig(BaseAppConfig): 36 | name = "{{ project_name }}" 37 | label = "{{ project_name }}" 38 | 39 | def ready(self): 40 | super(AppConfig, self).ready() 41 | run_setup_hooks() 42 | -------------------------------------------------------------------------------- /src/project_name/br/backup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # ########################################################## 3 | # Run a backup 4 | # SOURCE_URL=$SOURCE_URL TARGET_URL=$TARGET_URL ./{{project_name}}/br/backup.sh $BKP_FOLDER_NAME 5 | # - BKP_FOLDER_NAME: 6 | # Default value = backup_restore 7 | # Shared Backup Folder name. 8 | # The scripts assume it is located on "root" e.g.: /$BKP_FOLDER_NAME/ 9 | # 10 | # - SOURCE_URL: 11 | # Source Server URL, the one generating the "backup" file. 12 | # 13 | # - TARGET_URL: 14 | # Target Server URL, the one which must be synched. 15 | # 16 | # e.g.: 17 | # docker exec -it django4{{project_name}} sh -c 'SOURCE_URL=$SOURCE_URL TARGET_URL=$TARGET_URL ./{{project_name}}/br/backup.sh $BKP_FOLDER_NAME' 18 | # ########################################################## 19 | 20 | # Exit script in case of error 21 | set -e 22 | 23 | echo "-----------------------------------------------------" 24 | echo "STARTING {{project_name}} BACKUP $(date)" 25 | echo "-----------------------------------------------------" 26 | 27 | if [ "$1" != "" ]; then 28 | BKP_FOLDER_NAME="$1" 29 | else 30 | BKP_FOLDER_NAME="backup_restore" 31 | fi 32 | 33 | cd /usr/src/{{project_name}}/ 34 | 35 | ./manage.sh backup -i -f -c $PWD/{{project_name}}/br/settings_docker.ini --backup-dir /$BKP_FOLDER_NAME/ 36 | 37 | BKP_FILE_LATEST=$(find /$BKP_FOLDER_NAME/*.zip -type f -exec stat -c '%Y %n' {} \; | sort -nr | awk 'NR==1,NR==1 {print $2}') 38 | BKP_FILE_NAME=$(echo $BKP_FILE_LATEST | tail -n 1 | grep -oP -m 1 "\/$BKP_FOLDER_NAME\/\K.*" | sed 's|.zip||') 39 | 40 | sed -i 's~$~ /'"$BKP_FOLDER_NAME"'/'"$BKP_FILE_NAME"'.zip~g' /$BKP_FOLDER_NAME/$BKP_FILE_NAME.md5 41 | 42 | echo "-----------------------------------------------------" 43 | cat /$BKP_FOLDER_NAME/$BKP_FILE_NAME.md5 44 | echo "\n" 45 | echo "-----------------------------------------------------" 46 | -------------------------------------------------------------------------------- /src/project_name/br/restore.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | # ########################################################## 3 | # Run a restore 4 | # SOURCE_URL=$SOURCE_URL TARGET_URL=$TARGET_URL ./{{project_name}}/br/restore.sh $BKP_FOLDER_NAME 5 | # - BKP_FOLDER_NAME: 6 | # Default value = backup_restore 7 | # Shared Backup Folder name. 8 | # The scripts assume it is located on "root" e.g.: /$BKP_FOLDER_NAME/ 9 | # 10 | # - SOURCE_URL: 11 | # Source Server URL, the one generating the "backup" file. 12 | # 13 | # - TARGET_URL: 14 | # Target Server URL, the one which must be synched. 15 | # 16 | # e.g.: 17 | # docker exec -it django4{{project_name}} sh -c 'SOURCE_URL=$SOURCE_URL TARGET_URL=$TARGET_URL ./{{project_name}}/br/restore.sh $BKP_FOLDER_NAME' 18 | # ########################################################## 19 | 20 | # Exit script in case of error 21 | set -e 22 | 23 | echo "-----------------------------------------------------" 24 | echo "STARTING {{project_name}} RESTORE $(date)" 25 | echo "-----------------------------------------------------" 26 | 27 | if [ "$1" != "" ]; then 28 | BKP_FOLDER_NAME="$1" 29 | else 30 | BKP_FOLDER_NAME="backup_restore" 31 | fi 32 | 33 | if [ -z "$SOURCE_URL" ] || [ -z "$TARGET_URL" ] 34 | then 35 | echo "-----------------------------------------------------" 36 | echo "ERROR: SOURCE_URL and TARGET_URL environment variables not set" 37 | echo " e.g.: SOURCE_URL=test.webgis.adbpo.it TARGET_URL=staging.webgis.adbpo.it" 38 | echo "-----------------------------------------------------" 39 | exit 1 40 | else 41 | echo "$SOURCE_URL --> $TARGET_URL" 42 | fi 43 | 44 | cd /usr/src/{{project_name}}/ 45 | 46 | echo "-----------------------------------------------------" 47 | echo " 1. BACKUP $TARGET_URL" 48 | echo "-----------------------------------------------------" 49 | 50 | NEW_UUID=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1) 51 | mkdir /$BKP_FOLDER_NAME/$NEW_UUID/ 52 | SOURCE_URL=$SOURCE_URL TARGET_URL=$TARGET_URL ./{{project_name}}/br/backup.sh $BKP_FOLDER_NAME/$NEW_UUID 53 | 54 | echo "-----------------------------------------------------" 55 | echo " 2. CHECK BACKUP.md5 $TARGET_URL" 56 | echo "-----------------------------------------------------" 57 | 58 | BKP_FILE_LATEST=$(find /$BKP_FOLDER_NAME/$NEW_UUID/*.zip -type f -exec stat -c '%Y %n' {} \; | sort -nr | awk 'NR==1,NR==1 {print $2}') 59 | BKP_FILE_NAME=$(echo $BKP_FILE_LATEST | tail -n 1 | grep -oP -m 1 "\/$BKP_FOLDER_NAME\/$NEW_UUID\/\K.*" | sed 's|.zip||') 60 | 61 | if md5sum -c /$BKP_FOLDER_NAME/$NEW_UUID/$BKP_FILE_NAME.md5; then 62 | 63 | echo "-----------------------------------------------------" 64 | echo " - Original Backup of $TARGET_URL --> /$BKP_FOLDER_NAME/$NEW_UUID/" 65 | echo " 3. RESTORE FROM $SOURCE_URL" 66 | echo "-----------------------------------------------------" 67 | 68 | RECOVERY_FILE_NAME=$BKP_FILE_NAME 69 | BKP_FILE_LATEST=$(find /$BKP_FOLDER_NAME/*.zip -type f -exec stat -c '%Y %n' {} \; | sort -nr | awk 'NR==1,NR==1 {print $2}') 70 | BKP_FILE_NAME=$(echo $BKP_FILE_LATEST | tail -n 1 | grep -oP -m 1 "\/$BKP_FOLDER_NAME\/\K.*" | sed 's|.zip||') 71 | 72 | if md5sum -c /$BKP_FOLDER_NAME/$BKP_FILE_NAME.md5; then 73 | # The MD5 sum matched 74 | ./manage.sh restore -l -n -f --backup-file /$BKP_FOLDER_NAME/$BKP_FILE_NAME.zip --recovery-file /$BKP_FOLDER_NAME/$NEW_UUID/$RECOVERY_FILE_NAME.zip 75 | ./manage.sh migrate_baseurl -f --source-address=$SOURCE_URL --target-address=$TARGET_URL 76 | ./manage.sh create_tile_layers 77 | ./manage.sh set_all_datasets_metadata -d -i 78 | else 79 | # The MD5 sum didn't match 80 | echo "-----------------------------------------------------" 81 | echo " - Original Backup of $TARGET_URL --> /$BKP_FOLDER_NAME/$NEW_UUID/" 82 | echo "ERROR: The MD5 sum didn't match" 83 | echo "-----------------------------------------------------" 84 | exit 1 85 | fi 86 | else 87 | # The MD5 sum didn't match 88 | echo "-----------------------------------------------------" 89 | echo " - Original Backup of $TARGET_URL --> /$BKP_FOLDER_NAME/$NEW_UUID/" 90 | echo "ERROR: Could not save $TARGET_URL" 91 | echo "-----------------------------------------------------" 92 | exit 1 93 | fi 94 | 95 | echo "-----------------------------------------------------" 96 | echo " - Original Backup of $TARGET_URL --> /$BKP_FOLDER_NAME/$NEW_UUID/" 97 | echo "FINISHED {{project_name}} RESTORE $(date)" 98 | echo "-----------------------------------------------------" 99 | -------------------------------------------------------------------------------- /src/project_name/br/settings_docker.ini: -------------------------------------------------------------------------------- 1 | [database] 2 | pgdump = pg_dump 3 | pgrestore = pg_restore 4 | psql = psql 5 | 6 | [geoserver] 7 | datadir = /geoserver_data/data 8 | # datadir_exclude_file_path = {comma separated list of paths to exclude from geoserver catalog} e.g.: /data,/data/geonode,/geonode 9 | dumpvectordata = yes 10 | dumprasterdata = yes 11 | # data_dt_filter = {cmp_operator} {ISO8601} e.g. > 20019-04-05T24:00 12 | # data_layername_filter = {comma separated list of layernames, optionally with glob syntax} e.g.: tuscany_*,italy 13 | # data_layername_exclude_filter = {comma separated list of layernames, optionally with glob syntax} e.g.: tuscany_*,italy 14 | 15 | [fixtures] 16 | apps = contenttypes,auth,people,groups,account,guardian,admin,actstream,announcements,avatar,assets,base,documents,geoserver,invitations,pinax_notifications,harvesting,services,layers,maps,oauth2_provider,sites,socialaccount,taggit,tastypie,upload,user_messages,geonode_themes,geoapps,favorite,geonode_client 17 | dumps = contenttypes,auth,people,groups,account,guardian,admin,actstream,announcements,avatar,assets,base,documents,geoserver,invitations,pinax_notifications,harvesting,services,layers,maps,oauth2_provider,sites,socialaccount,taggit,tastypie,upload,user_messages,geonode_themes,geoapps,favorite,geonode_client 18 | -------------------------------------------------------------------------------- /src/project_name/celeryapp.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ######################################################################### 3 | # 4 | # Copyright (C) 2017 OSGeo 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | # 19 | ######################################################################### 20 | 21 | from __future__ import absolute_import 22 | 23 | import os 24 | from celery import Celery 25 | 26 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings") 27 | 28 | app = Celery("{{ project_name }}") 29 | 30 | # Using a string here means the worker will not have to 31 | # pickle the object when using Windows. 32 | app.config_from_object("django.conf:settings", namespace="CELERY") 33 | app.autodiscover_tasks() 34 | 35 | 36 | @app.task(bind=True, name="{{ project_name }}.debug_task", queue="default") 37 | def debug_task(self): 38 | print("Request: {!r}".format(self.request)) 39 | -------------------------------------------------------------------------------- /src/project_name/locale/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoNode/geonode-project/5da24f8c1492e75c0e362895538caf8fca9edf81/src/project_name/locale/.gitkeep -------------------------------------------------------------------------------- /src/project_name/settings.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ######################################################################### 3 | # 4 | # Copyright (C) 2017 OSGeo 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | # 19 | ######################################################################### 20 | 21 | # Django settings for the GeoNode project. 22 | import os 23 | import ast 24 | 25 | from urllib.parse import urlparse, urlunparse 26 | from urllib.request import urlopen, Request 27 | # Load more settings from a file called local_settings.py if it exists 28 | try: 29 | from {{ project_name }}.local_settings import * 30 | # from geonode.local_settings import * 31 | except ImportError: 32 | from geonode.settings import * 33 | 34 | # 35 | # General Django development settings 36 | # 37 | PROJECT_NAME = "{{ project_name }}" 38 | 39 | # add trailing slash to site url. geoserver url will be relative to this 40 | if not SITEURL.endswith("/"): 41 | SITEURL = "{}/".format(SITEURL) 42 | 43 | SITENAME = os.getenv("SITENAME", "{{ project_name }}") 44 | 45 | # Defines the directory that contains the settings file as the LOCAL_ROOT 46 | # It is used for relative settings elsewhere. 47 | LOCAL_ROOT = os.path.abspath(os.path.dirname(__file__)) 48 | 49 | WSGI_APPLICATION = "{}.wsgi.application".format(PROJECT_NAME) 50 | 51 | # Language code for this installation. All choices can be found here: 52 | # http://www.i18nguy.com/unicode/language-identifiers.html 53 | LANGUAGE_CODE = os.getenv("LANGUAGE_CODE", "en") 54 | 55 | if PROJECT_NAME not in INSTALLED_APPS: 56 | INSTALLED_APPS += (PROJECT_NAME,) 57 | 58 | # Location of url mappings 59 | ROOT_URLCONF = os.getenv("ROOT_URLCONF", "{}.urls".format(PROJECT_NAME)) 60 | 61 | # Additional directories which hold static files 62 | # - Give priority to local geonode-project ones 63 | STATICFILES_DIRS = [ 64 | os.path.join(LOCAL_ROOT, "static"), 65 | ] + STATICFILES_DIRS 66 | 67 | # Location of locale files 68 | LOCALE_PATHS = (os.path.join(LOCAL_ROOT, "locale"),) + LOCALE_PATHS 69 | 70 | TEMPLATES[0]["DIRS"].insert(0, os.path.join(LOCAL_ROOT, "templates")) 71 | loaders = TEMPLATES[0]["OPTIONS"].get("loaders") or [ 72 | "django.template.loaders.filesystem.Loader", 73 | "django.template.loaders.app_directories.Loader", 74 | ] 75 | # loaders.insert(0, 'apptemplates.Loader') 76 | TEMPLATES[0]["OPTIONS"]["loaders"] = loaders 77 | TEMPLATES[0].pop("APP_DIRS", None) 78 | 79 | -------------------------------------------------------------------------------- /src/project_name/static/README: -------------------------------------------------------------------------------- 1 | # Introduction 2 | 3 | This directory is used to store static assets for your project. User media files 4 | (FileFields/ImageFields) are not stored here. 5 | 6 | The convention for this directory is: 7 | 8 | * css/ — stores CSS files 9 | * less/ - stores LESS files 10 | * js/ — stores Javascript files 11 | * img/ — stores image files 12 | 13 | # Gulp 14 | 15 | Gulp can be used to automatically build css from LESS files. The gulp process will 16 | watch you LESS files for changes and recompile. To install gulp do 17 | the following 2 steps: 18 | 19 | 1. cd into the project directory and install dependencies with `npm install` 20 | 2. install gulp command line globally with `sudo npm install -g gulp` 21 | -------------------------------------------------------------------------------- /src/project_name/static/css/site_base.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoNode/geonode-project/5da24f8c1492e75c0e362895538caf8fca9edf81/src/project_name/static/css/site_base.css -------------------------------------------------------------------------------- /src/project_name/static/gulpfile.js: -------------------------------------------------------------------------------- 1 | var gulp = require('gulp'); 2 | var gutil = require('gulp-util'); 3 | var pkg = require('./package.json'); 4 | var concat = require('gulp-concat'); 5 | var less = require('gulp-less'); 6 | var del = require('del'); 7 | 8 | gulp.task('clean:site_base.css', [], function () { 9 | return del([ './css/site_base.css' ]); 10 | }); 11 | 12 | gulp.task('compile:site_base.css', [], function() { 13 | return gulp.src(["./less/site_base.less"], {base: './'}) 14 | .pipe(less({})) 15 | .pipe(concat("site_base.css")) 16 | .pipe(gulp.dest("./css")); 17 | }); 18 | 19 | gulp.task('watch', function() { 20 | gulp.watch("./less/**/*", ['clean:site_base.css', 'compile:site_base.css']); 21 | }); 22 | 23 | gulp.task('default', ['watch', 'clean:site_base.css', 'compile:site_base.css']); 24 | -------------------------------------------------------------------------------- /src/project_name/static/img/README: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoNode/geonode-project/5da24f8c1492e75c0e362895538caf8fca9edf81/src/project_name/static/img/README -------------------------------------------------------------------------------- /src/project_name/static/img/bing_aerial_w_labels.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoNode/geonode-project/5da24f8c1492e75c0e362895538caf8fca9edf81/src/project_name/static/img/bing_aerial_w_labels.png -------------------------------------------------------------------------------- /src/project_name/static/img/bing_canvas_dark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoNode/geonode-project/5da24f8c1492e75c0e362895538caf8fca9edf81/src/project_name/static/img/bing_canvas_dark.png -------------------------------------------------------------------------------- /src/project_name/static/img/bing_road_on_demand.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoNode/geonode-project/5da24f8c1492e75c0e362895538caf8fca9edf81/src/project_name/static/img/bing_road_on_demand.png -------------------------------------------------------------------------------- /src/project_name/static/js/README: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoNode/geonode-project/5da24f8c1492e75c0e362895538caf8fca9edf81/src/project_name/static/js/README -------------------------------------------------------------------------------- /src/project_name/static/less/site_base.less: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/GeoNode/geonode-project/5da24f8c1492e75c0e362895538caf8fca9edf81/src/project_name/static/less/site_base.less -------------------------------------------------------------------------------- /src/project_name/static/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "{{ project_name }}", 3 | "version": "0.0.1", 4 | "author": "GeoNode Developers", 5 | "description": "Static code and assets for {{ project_name }}", 6 | "contributors": [ 7 | { 8 | } 9 | ], 10 | "scripts": { 11 | "test": "jshint **.js" 12 | }, 13 | "license": "BSD", 14 | "private": "false", 15 | "dependencies": { 16 | }, 17 | "devDependencies": { 18 | "del": "*", 19 | "gulp": "^3.9.0", 20 | "gulp-util": "*", 21 | "gulp-concat": "*", 22 | "gulp-less": "*", 23 | "path": "*" 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /src/project_name/templates/geonode-mapstore-client/_geonode_config.html: -------------------------------------------------------------------------------- 1 | {% extends 'geonode-mapstore-client/_geonode_config.html' %} 2 | {% block override_local_config %} 3 | 9 | {% endblock %} 10 | -------------------------------------------------------------------------------- /src/project_name/urls.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ######################################################################### 3 | # 4 | # Copyright (C) 2017 OSGeo 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | # 19 | ######################################################################### 20 | 21 | from geonode.urls import urlpatterns 22 | 23 | """ 24 | # You can register your own urlpatterns here 25 | urlpatterns = [ 26 | url(r'^/?$', 27 | homepage, 28 | name='home'), 29 | ] + urlpatterns 30 | """ 31 | -------------------------------------------------------------------------------- /src/project_name/version.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import os 3 | import subprocess 4 | 5 | 6 | def get_version(version=None): 7 | "Returns a PEP 386-compliant version number from VERSION." 8 | if version is None: 9 | from geonode import __version__ as version 10 | else: 11 | assert len(version) == 5 12 | assert version[3] in ("unstable", "beta", "rc", "final") 13 | 14 | # Now build the two parts of the version number: 15 | # main = X.Y[.Z] 16 | # sub = .devN - for pre-alpha releases 17 | # | {a|b|c}N - for alpha, beta and rc releases 18 | 19 | parts = 2 if version[2] == 0 else 3 20 | main = ".".join(str(x) for x in version[:parts]) 21 | 22 | sub = "" 23 | if version[3] == "unstable": 24 | git_changeset = get_git_changeset() 25 | if git_changeset: 26 | sub = ".dev%s" % git_changeset 27 | 28 | elif version[3] != "final": 29 | mapping = {"beta": "b", "rc": "rc"} 30 | sub = mapping[version[3]] + str(version[4]) 31 | 32 | return main + sub 33 | 34 | 35 | def get_git_changeset(): 36 | """Returns a numeric identifier of the latest git changeset. 37 | 38 | The result is the UTC timestamp of the changeset in YYYYMMDDHHMMSS format. 39 | This value isn't guaranteed to be unique, but collisions are very unlikely, 40 | so it's sufficient for generating the development version numbers. 41 | """ 42 | repo_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) 43 | git_show = subprocess.Popen( 44 | "git show --pretty=format:%ct --quiet HEAD", 45 | stdout=subprocess.PIPE, 46 | stderr=subprocess.PIPE, 47 | shell=True, 48 | cwd=repo_dir, 49 | universal_newlines=True, 50 | ) 51 | timestamp = git_show.communicate()[0].partition("\n")[0] 52 | try: 53 | timestamp = datetime.datetime.utcfromtimestamp(int(timestamp)) 54 | except ValueError: 55 | return None 56 | return timestamp.strftime("%Y%m%d%H%M%S") 57 | -------------------------------------------------------------------------------- /src/project_name/wsgi.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ######################################################################### 3 | # 4 | # Copyright (C) 2017 OSGeo 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | # 19 | ######################################################################### 20 | 21 | """ 22 | WSGI config for {{ project_name }} project. 23 | 24 | This module contains the WSGI application used by Django's development server 25 | and any production WSGI deployments. It should expose a module-level variable 26 | named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover 27 | this application via the ``WSGI_APPLICATION`` setting. 28 | 29 | Usually you will have the standard Django WSGI application here, but it also 30 | might make sense to replace the whole Django WSGI application with a custom one 31 | that later delegates to the Django one. For example, you could introduce WSGI 32 | middleware here, or combine a Django application with an application of another 33 | framework. 34 | 35 | """ 36 | import os 37 | 38 | os.environ.setdefault("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings") 39 | 40 | # This application object is used by any WSGI server configured to use this 41 | # file. This includes Django's development server, if the WSGI_APPLICATION 42 | # setting points here. 43 | from django.core.wsgi import get_wsgi_application 44 | 45 | application = get_wsgi_application() 46 | 47 | # Apply WSGI middleware here. 48 | # from helloworld.wsgi import HelloWorldApplication 49 | # application = HelloWorldApplication(application) 50 | -------------------------------------------------------------------------------- /src/requirements.txt: -------------------------------------------------------------------------------- 1 | -e git+https://github.com/GeoNode/geonode-mapstore-client.git@master#egg=django_geonode_mapstore_client 2 | -e git+https://github.com/GeoNode/geonode.git@master#egg=GeoNode 3 | -------------------------------------------------------------------------------- /src/setup.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ######################################################################### 3 | # 4 | # Copyright (C) 2018 OSGeo 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | # 19 | ######################################################################### 20 | import os 21 | 22 | from distutils.core import setup 23 | 24 | from setuptools import find_packages 25 | 26 | 27 | def read(*rnames): 28 | return open(os.path.join(os.path.dirname(__file__), *rnames)).read() 29 | 30 | 31 | setup( 32 | name="{{ project_name }}", 33 | version="4.0.0", 34 | author="", 35 | author_email="", 36 | description="{{ project_name }}, based on GeoNode", 37 | long_description=(read("README.md")), 38 | # Full list of classifiers can be found at: 39 | # http://pypi.python.org/pypi?%3Aaction=list_classifiers 40 | classifiers=[ 41 | "Development Status :: 1 - Planning", 42 | ], 43 | license="GPL", 44 | keywords="{{ project_name }} geonode django", 45 | url="https://github.com/{{ project_name }}/{{ project_name }}", 46 | packages=find_packages(), 47 | dependency_links=["git+https://github.com/GeoNode/geonode.git#egg=geonode"], 48 | include_package_data=True, 49 | ) 50 | -------------------------------------------------------------------------------- /src/tasks.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | ######################################################################### 3 | # 4 | # Copyright (C) 2016 OSGeo 5 | # 6 | # This program is free software: you can redistribute it and/or modify 7 | # it under the terms of the GNU General Public License as published by 8 | # the Free Software Foundation, either version 3 of the License, or 9 | # (at your option) any later version. 10 | # 11 | # This program is distributed in the hope that it will be useful, 12 | # but WITHOUT ANY WARRANTY; without even the implied warranty of 13 | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 14 | # GNU General Public License for more details. 15 | # 16 | # You should have received a copy of the GNU General Public License 17 | # along with this program. If not, see . 18 | # 19 | ######################################################################### 20 | import os 21 | import re 22 | import ast 23 | import json 24 | import time 25 | import docker 26 | import socket 27 | import ipaddress 28 | import logging 29 | import datetime 30 | from pathlib import Path 31 | 32 | from urllib.parse import urlparse, urlunparse 33 | from invoke import task 34 | 35 | BOOTSTRAP_IMAGE_CHEIP = "codenvy/che-ip:nightly" 36 | 37 | logger = logging.getLogger(__name__) 38 | 39 | 40 | @task 41 | def waitfordbs(ctx): 42 | print("**************************databases*******************************") 43 | db_host = os.getenv("DATABASE_HOST", "db") 44 | ctx.run(f"/usr/bin/wait-for-databases {db_host}", pty=True) 45 | 46 | 47 | @task 48 | def update(ctx): 49 | print("***************************setting env*********************************") 50 | ctx.run("env", pty=True) 51 | pub_host = _geonode_public_host() 52 | print(f"Public Hostname or IP is {pub_host}") 53 | pub_port = _geonode_public_port() 54 | print(f"Public PORT is {pub_port}") 55 | pub_protocol = "https" if pub_port == "443" else "http" 56 | if pub_protocol == "https" or pub_port == "80": 57 | pub_port = None 58 | db_url = _update_db_connstring() 59 | geodb_url = _update_geodb_connstring() 60 | geonode_docker_host = None 61 | for _cnt in range(1, 29): 62 | try: 63 | geonode_docker_host = str(socket.gethostbyname("geonode")) 64 | except Exception: 65 | print(f"...waiting for NGINX to pop-up...{_cnt}") 66 | time.sleep(1) 67 | 68 | override_env = "$HOME/.override_env" 69 | if os.path.exists(override_env): 70 | os.remove(override_env) 71 | else: 72 | print(f"Can not delete the {override_env} file as it doesn't exists") 73 | 74 | if pub_port: 75 | siteurl = f"{pub_protocol}://{pub_host}:{pub_port}/" 76 | gs_pub_loc = f"http://{pub_host}:{pub_port}/geoserver/" 77 | else: 78 | siteurl = f"{pub_protocol}://{pub_host}/" 79 | gs_pub_loc = f"http://{pub_host}/geoserver/" 80 | envs = { 81 | "local_settings": str(_localsettings()), 82 | "siteurl": os.environ.get("SITEURL", siteurl), 83 | "geonode_docker_host": geonode_docker_host, 84 | "public_protocol": pub_protocol, 85 | "public_fqdn": str(pub_host) + str(f":{pub_port}" if pub_port else ""), 86 | "public_host": str(pub_host), 87 | "dburl": os.environ.get("DATABASE_URL", db_url), 88 | "geodburl": os.environ.get("GEODATABASE_URL", geodb_url), 89 | "static_root": os.environ.get("STATIC_ROOT", "/mnt/volumes/statics/static/"), 90 | "media_root": os.environ.get("MEDIA_ROOT", "/mnt/volumes/statics/uploaded/"), 91 | "geoip_path": os.environ.get("GEOIP_PATH", "/mnt/volumes/statics/geoip.db"), 92 | "monitoring": os.environ.get("MONITORING_ENABLED", False), 93 | "monitoring_host_name": os.environ.get("MONITORING_HOST_NAME", "geonode"), 94 | "monitoring_service_name": os.environ.get( 95 | "MONITORING_SERVICE_NAME", "local-geonode" 96 | ), 97 | "monitoring_data_ttl": os.environ.get("MONITORING_DATA_TTL", 7), 98 | "geonode_geodb_passwd": os.environ.get( 99 | "GEONODE_GEODATABASE_PASSWORD", "geonode_data" 100 | ), 101 | "default_backend_datastore": os.environ.get( 102 | "DEFAULT_BACKEND_DATASTORE", "datastore" 103 | ), 104 | "geonode_db_passwd": os.environ.get("GEONODE_DATABASE_PASSWORD", "geonode"), 105 | "geonode_geodb": os.environ.get("GEONODE_GEODATABASE", "geonode_data"), 106 | "db_url": os.environ.get( 107 | "DATABASE_URL", "postgis://geonode:geonode@db:5432/geonode" 108 | ), 109 | "geodb_url": os.environ.get( 110 | "GEODATABASE_URL", "postgis://geonode:geonode@db:5432/geonode_data" 111 | ), 112 | "geonode_db": os.environ.get("GEONODE_DATABASE", "geonode"), 113 | "gs_loc": os.environ.get( 114 | "GEOSERVER_LOCATION", "http://geoserver:8080/geoserver/" 115 | ), 116 | "gs_web_ui_loc": os.environ.get("GEOSERVER_WEB_UI_LOCATION", gs_pub_loc), 117 | "gs_pub_loc": os.environ.get("GEOSERVER_PUBLIC_LOCATION", gs_pub_loc), 118 | "gs_admin_pwd": os.environ.get("GEOSERVER_ADMIN_PASSWORD", "geoserver"), 119 | "override_fn": override_env, 120 | } 121 | try: 122 | current_allowed = ast.literal_eval( 123 | os.getenv("ALLOWED_HOSTS") 124 | or "['{public_fqdn}', '{public_host}', 'localhost', 'django', 'geonode',]".format( 125 | **envs 126 | ) 127 | ) 128 | except ValueError: 129 | current_allowed = [] 130 | current_allowed.extend([str(pub_host), f"{pub_host}:{pub_port}"]) 131 | allowed_hosts = [str(c) for c in current_allowed] + ['"geonode"', '"django"'] 132 | 133 | ctx.run( 134 | "echo export DJANGO_SETTINGS_MODULE=\ 135 | {local_settings} >> {override_fn}".format( 136 | **envs 137 | ), 138 | pty=True, 139 | ) 140 | ctx.run( 141 | "echo export MONITORING_ENABLED=\ 142 | {monitoring} >> {override_fn}".format( 143 | **envs 144 | ), 145 | pty=True, 146 | ) 147 | ctx.run( 148 | "echo export MONITORING_HOST_NAME=\ 149 | {monitoring_host_name} >> {override_fn}".format( 150 | **envs 151 | ), 152 | pty=True, 153 | ) 154 | ctx.run( 155 | "echo export MONITORING_SERVICE_NAME=\ 156 | {monitoring_service_name} >> {override_fn}".format( 157 | **envs 158 | ), 159 | pty=True, 160 | ) 161 | ctx.run( 162 | "echo export MONITORING_DATA_TTL=\ 163 | {monitoring_data_ttl} >> {override_fn}".format( 164 | **envs 165 | ), 166 | pty=True, 167 | ) 168 | ctx.run( 169 | "echo export GEOIP_PATH=\ 170 | {geoip_path} >> {override_fn}".format( 171 | **envs 172 | ), 173 | pty=True, 174 | ) 175 | ctx.run( 176 | "echo export GEONODE_GEODATABASE_PASSWORD=\ 177 | {geonode_geodb_passwd} >> {override_fn}".format( 178 | **envs 179 | ), 180 | pty=True, 181 | ) 182 | ctx.run( 183 | "echo export DEFAULT_BACKEND_DATASTORE=\ 184 | {default_backend_datastore} >> {override_fn}".format( 185 | **envs 186 | ), 187 | pty=True, 188 | ) 189 | ctx.run( 190 | "echo export GEONODE_DATABASE_PASSWORD=\ 191 | {geonode_db_passwd} >> {override_fn}".format( 192 | **envs 193 | ), 194 | pty=True, 195 | ) 196 | ctx.run( 197 | "echo export GEONODE_GEODATABASE=\ 198 | {geonode_geodb} >> {override_fn}".format( 199 | **envs 200 | ), 201 | pty=True, 202 | ) 203 | ctx.run( 204 | "echo export DATABASE_URL=\ 205 | {db_url} >> {override_fn}".format( 206 | **envs 207 | ), 208 | pty=True, 209 | ) 210 | ctx.run( 211 | "echo export GEODATABASE_URL=\ 212 | {geodb_url} >> {override_fn}".format( 213 | **envs 214 | ), 215 | pty=True, 216 | ) 217 | ctx.run( 218 | "echo export GEONODE_DATABASE=\ 219 | {geonode_db} >> {override_fn}".format( 220 | **envs 221 | ), 222 | pty=True, 223 | ) 224 | ctx.run( 225 | "echo export GEOSERVER_LOCATION=\ 226 | {gs_loc} >> {override_fn}".format( 227 | **envs 228 | ), 229 | pty=True, 230 | ) 231 | ctx.run( 232 | "echo export GEOSERVER_WEB_UI_LOCATION=\ 233 | {gs_web_ui_loc} >> {override_fn}".format( 234 | **envs 235 | ), 236 | pty=True, 237 | ) 238 | ctx.run( 239 | "echo export GEOSERVER_PUBLIC_LOCATION=\ 240 | {gs_pub_loc} >> {override_fn}".format( 241 | **envs 242 | ), 243 | pty=True, 244 | ) 245 | ctx.run( 246 | "echo export GEOSERVER_ADMIN_PASSWORD=\ 247 | {gs_admin_pwd} >> {override_fn}".format( 248 | **envs 249 | ), 250 | pty=True, 251 | ) 252 | ctx.run( 253 | "echo export SITEURL=\ 254 | {siteurl} >> {override_fn}".format( 255 | **envs 256 | ), 257 | pty=True, 258 | ) 259 | ctx.run( 260 | 'echo export ALLOWED_HOSTS=\ 261 | "\\"{}\\"" >> {override_fn}'.format( 262 | allowed_hosts, **envs 263 | ), 264 | pty=True, 265 | ) 266 | ctx.run( 267 | "echo export DATABASE_URL=\ 268 | {dburl} >> {override_fn}".format( 269 | **envs 270 | ), 271 | pty=True, 272 | ) 273 | ctx.run( 274 | "echo export GEODATABASE_URL=\ 275 | {geodburl} >> {override_fn}".format( 276 | **envs 277 | ), 278 | pty=True, 279 | ) 280 | ctx.run( 281 | "echo export STATIC_ROOT=\ 282 | {static_root} >> {override_fn}".format( 283 | **envs 284 | ), 285 | pty=True, 286 | ) 287 | ctx.run( 288 | "echo export MEDIA_ROOT=\ 289 | {media_root} >> {override_fn}".format( 290 | **envs 291 | ), 292 | pty=True, 293 | ) 294 | ctx.run( 295 | "echo export GEOIP_PATH=\ 296 | {geoip_path} >> {override_fn}".format( 297 | **envs 298 | ), 299 | pty=True, 300 | ) 301 | ctx.run( 302 | "echo export LOGIN_URL=\ 303 | {siteurl}account/login/ >> {override_fn}".format( 304 | **envs 305 | ), 306 | pty=True, 307 | ) 308 | ctx.run( 309 | "echo export LOGOUT_URL=\ 310 | {siteurl}account/logout/ >> {override_fn}".format( 311 | **envs 312 | ), 313 | pty=True, 314 | ) 315 | ctx.run( 316 | "echo export LOGIN_REDIRECT_URL=\ 317 | {siteurl} >> {override_fn}".format( 318 | **envs 319 | ), 320 | pty=True, 321 | ) 322 | ctx.run( 323 | "echo export LOGOUT_REDIRECT_URL=\ 324 | {siteurl} >> {override_fn}".format( 325 | **envs 326 | ), 327 | pty=True, 328 | ) 329 | ctx.run(f"source {override_env}", pty=True) 330 | print("****************************finalize env**********************************") 331 | ctx.run("env", pty=True) 332 | 333 | 334 | @task 335 | def migrations(ctx): 336 | print("**************************migrations*******************************") 337 | ctx.run( 338 | f"python manage.py migrate --noinput --settings={_localsettings()}", pty=True 339 | ) 340 | ctx.run( 341 | f"python manage.py migrate --noinput --settings={_localsettings()} --database=datastore", 342 | pty=True, 343 | ) 344 | try: 345 | ctx.run( 346 | f"python manage.py rebuild_index --noinput --settings={_localsettings()}", 347 | pty=True, 348 | ) 349 | except Exception: 350 | pass 351 | 352 | 353 | @task 354 | def statics(ctx): 355 | print("**************************statics*******************************") 356 | try: 357 | static_root = os.environ.get("STATIC_ROOT", "/mnt/volumes/statics/static/") 358 | media_root = os.environ.get("MEDIA_ROOT", "/mnt/volumes/statics/uploaded/") 359 | assets_root = os.environ.get("ASSETS_ROOT", "/mnt/volumes/statics/assets/") 360 | 361 | ctx.run(f"mkdir -pv {static_root} {media_root} {assets_root}") 362 | ctx.run( 363 | f"python manage.py collectstatic --noinput --settings={_localsettings()}", 364 | pty=True, 365 | ) 366 | except Exception: 367 | import traceback 368 | 369 | traceback.print_exc() 370 | 371 | 372 | @task 373 | def prepare(ctx): 374 | print("**********************prepare fixture***************************") 375 | ctx.run("rm -rf /tmp/default_oauth_apps_docker.json", pty=True) 376 | _prepare_oauth_fixture() 377 | ctx.run("rm -rf /tmp/default_site.json", pty=True) 378 | _prepare_site_fixture() 379 | 380 | 381 | @task 382 | def fixtures(ctx): 383 | print("**************************fixtures********************************") 384 | ctx.run( 385 | f"python manage.py loaddata sample_admin \ 386 | --settings={_localsettings()}", 387 | pty=True, 388 | ) 389 | ctx.run( 390 | f"python manage.py loaddata /tmp/default_oauth_apps_docker.json \ 391 | --settings={_localsettings()}", 392 | pty=True, 393 | ) 394 | ctx.run( 395 | f"python manage.py loaddata /tmp/default_site.json \ 396 | --settings={_localsettings()}", 397 | pty=True, 398 | ) 399 | ctx.run( 400 | f"python manage.py loaddata initial_data.json \ 401 | --settings={_localsettings()}", 402 | pty=True, 403 | ) 404 | 405 | 406 | @task 407 | def collectstatic(ctx): 408 | print("************************static artifacts******************************") 409 | ctx.run( 410 | f"django-admin collectstatic --noinput \ 411 | --settings={_localsettings()}", 412 | pty=True, 413 | ) 414 | 415 | 416 | @task 417 | def monitoringfixture(ctx): 418 | if ast.literal_eval(os.environ.get("MONITORING_ENABLED", "False")): 419 | print("*******************monitoring fixture********************************") 420 | ctx.run("rm -rf /tmp/default_monitoring_apps_docker.json", pty=True) 421 | _prepare_monitoring_fixture() 422 | try: 423 | ctx.run( 424 | f"django-admin loaddata metric_data.json \ 425 | --settings={_localsettings()}", 426 | pty=True, 427 | ) 428 | ctx.run( 429 | f"django-admin loaddata notifications.json \ 430 | --settings={_localsettings()}", 431 | pty=True, 432 | ) 433 | ctx.run( 434 | f"django-admin loaddata /tmp/default_monitoring_apps_docker.json \ 435 | --settings={_localsettings()}", 436 | pty=True, 437 | ) 438 | except Exception as e: 439 | logger.error(f"ERROR installing monitoring fixture: {str(e)}") 440 | 441 | 442 | @task 443 | def updategeoip(ctx): 444 | print("**************************update geoip*******************************") 445 | if ast.literal_eval(os.environ.get("MONITORING_ENABLED", "False")): 446 | ctx.run(f"django-admin updategeoip --settings={_localsettings()}", pty=True) 447 | 448 | 449 | @task 450 | def updateadmin(ctx): 451 | print("***********************update admin details**************************") 452 | ctx.run("rm -rf /tmp/django_admin_docker.json", pty=True) 453 | _prepare_admin_fixture( 454 | os.environ.get("ADMIN_PASSWORD", "admin"), 455 | os.environ.get("ADMIN_EMAIL", "admin@example.org"), 456 | ) 457 | ctx.run( 458 | f"django-admin loaddata /tmp/django_admin_docker.json \ 459 | --settings={_localsettings()}", 460 | pty=True, 461 | ) 462 | 463 | 464 | @task 465 | def collectmetrics(ctx): 466 | print("************************collect metrics******************************") 467 | ctx.run( 468 | f"python -W ignore manage.py collect_metrics \ 469 | --settings={_localsettings()} -n -t xml", 470 | pty=True, 471 | ) 472 | 473 | 474 | @task 475 | def initialized(ctx): 476 | print("**************************init file********************************") 477 | static_root = os.environ.get("STATIC_ROOT", "/mnt/volumes/statics/static/") 478 | lockfile_dir = Path(static_root).parent # quite ugly, we're assuming such dir exists and is writable 479 | ctx.run(f"date > {lockfile_dir}/geonode_init.lock") 480 | 481 | 482 | def _docker_host_ip(): 483 | try: 484 | client = docker.from_env(version="1.24") 485 | ip_list = client.containers.run( 486 | BOOTSTRAP_IMAGE_CHEIP, network_mode="host" 487 | ).split("\n") 488 | except Exception: 489 | import traceback 490 | 491 | traceback.print_exc() 492 | ip_list = [ 493 | "127.0.0.1", 494 | ] 495 | if len(ip_list) > 1: 496 | print( 497 | f"Docker daemon is running on more than one \ 498 | address {ip_list}" 499 | ) 500 | print(f"Only the first address:{ip_list[0]} will be returned!") 501 | else: 502 | print( 503 | f"Docker daemon is running at the following \ 504 | address {ip_list[0]}" 505 | ) 506 | return ip_list[0] 507 | 508 | def _is_valid_ip(ip): 509 | try: 510 | ipaddress.IPv4Address(ip) 511 | return True 512 | except Exception as e: 513 | return False 514 | 515 | def _container_exposed_port(component, instname): 516 | port = "80" 517 | try: 518 | client = docker.from_env(version="1.24") 519 | ports_dict = json.dumps( 520 | [ 521 | c.attrs["Config"]["ExposedPorts"] 522 | for c in client.containers.list( 523 | filters={ 524 | "label": f"org.geonode.component={component}", 525 | "status": "running", 526 | } 527 | ) 528 | if str(instname) in c.name 529 | ][0] 530 | ) 531 | for key in json.loads(ports_dict): 532 | port = re.split("/tcp", key)[0] 533 | except Exception: 534 | import traceback 535 | 536 | traceback.print_exc() 537 | return port 538 | 539 | 540 | def _update_db_connstring(): 541 | connstr = os.getenv("DATABASE_URL", None) 542 | if not connstr: 543 | user = os.getenv("GEONODE_DATABASE_USER", "geonode") 544 | pwd = os.getenv("GEONODE_DATABASE_PASSWORD", "geonode") 545 | dbname = os.getenv("GEONODE_DATABASE", "geonode") 546 | dbhost = os.getenv("DATABASE_HOST", "db") 547 | dbport = os.getenv("DATABASE_PORT", 5432) 548 | connstr = f"postgis://{user}:{pwd}@{dbhost}:{dbport}/{dbname}" 549 | return connstr 550 | 551 | 552 | def _update_geodb_connstring(): 553 | geoconnstr = os.getenv("GEODATABASE_URL", None) 554 | if not geoconnstr: 555 | geouser = os.getenv("GEONODE_GEODATABASE_USER", "geonode_data") 556 | geopwd = os.getenv("GEONODE_GEODATABASE_PASSWORD", "geonode_data") 557 | geodbname = os.getenv("GEONODE_GEODATABASE", "geonode_data") 558 | dbhost = os.getenv("DATABASE_HOST", "db") 559 | dbport = os.getenv("DATABASE_PORT", 5432) 560 | geoconnstr = f"postgis://{geouser}:{geopwd}@{dbhost}:{dbport}/{geodbname}" 561 | return geoconnstr 562 | 563 | 564 | def _localsettings(): 565 | settings = os.getenv("DJANGO_SETTINGS_MODULE", "{{ project_name }}.settings") 566 | return settings 567 | 568 | 569 | def _gs_service_availability(url): 570 | import requests 571 | 572 | try: 573 | r = requests.request("get", url, verify=False) 574 | r.raise_for_status() # Raises a HTTPError if the status is 4xx, 5xxx 575 | except (requests.exceptions.ConnectionError, requests.exceptions.Timeout) as e: 576 | logger.error(f"GeoServer connection error is {e}") 577 | return False 578 | except requests.exceptions.HTTPError as er: 579 | logger.error(f"GeoServer HTTP error is {er}") 580 | return False 581 | else: 582 | logger.info("GeoServer API are available!") 583 | return True 584 | 585 | 586 | def _geonode_public_host(): 587 | gn_pub_hostip = os.getenv("GEONODE_LB_HOST_IP", None) 588 | if not gn_pub_hostip: 589 | gn_pub_hostip = _docker_host_ip() 590 | return gn_pub_hostip 591 | 592 | 593 | def _geonode_public_host_ip(): 594 | gn_pub_hostip = os.getenv("GEONODE_LB_HOST_IP", None) 595 | if not gn_pub_hostip or not _is_valid_ip(gn_pub_hostip): 596 | gn_pub_hostip = _docker_host_ip() 597 | return gn_pub_hostip 598 | 599 | 600 | def _geonode_public_port(): 601 | gn_pub_port = os.getenv("GEONODE_LB_PORT", "") 602 | if not gn_pub_port: 603 | gn_pub_port = _container_exposed_port( 604 | "nginx", os.getenv("GEONODE_INSTANCE_NAME", "geonode") 605 | ) 606 | elif gn_pub_port in ("80", "443"): 607 | gn_pub_port = None 608 | return gn_pub_port 609 | 610 | 611 | def _prepare_oauth_fixture(): 612 | upurl = urlparse(os.environ["SITEURL"]) 613 | default_fixture = [ 614 | { 615 | "model": "oauth2_provider.application", 616 | "pk": 1001, 617 | "fields": { 618 | "skip_authorization": True, 619 | "created": "2018-05-31T10:00:31.661Z", 620 | "updated": "2018-05-31T11:30:31.245Z", 621 | "algorithm": "RS256", 622 | "redirect_uris": f"{urlunparse(upurl)}geoserver/index.html", 623 | "name": "GeoServer", 624 | "authorization_grant_type": "authorization-code", 625 | "client_type": "confidential", 626 | "client_id": str(os.environ["OAUTH2_CLIENT_ID"]), 627 | "client_secret": str(os.environ["OAUTH2_CLIENT_SECRET"]), 628 | "user": ["admin"], 629 | }, 630 | } 631 | ] 632 | with open("/tmp/default_oauth_apps_docker.json", "w") as fixturefile: 633 | json.dump(default_fixture, fixturefile) 634 | 635 | 636 | def _prepare_site_fixture(): 637 | upurl = urlparse(os.environ["SITEURL"]) 638 | default_fixture = [ 639 | { 640 | "model": "sites.site", 641 | "pk": 1, 642 | "fields": {"domain": str(upurl.hostname), "name": str(upurl.hostname)}, 643 | } 644 | ] 645 | with open("/tmp/default_site.json", "w") as fixturefile: 646 | json.dump(default_fixture, fixturefile) 647 | 648 | 649 | def _prepare_monitoring_fixture(): 650 | # upurl = urlparse(os.environ['SITEURL']) 651 | # net_scheme = upurl.scheme 652 | # net_loc = upurl.netloc 653 | pub_ip = _geonode_public_host() 654 | print(f"Public Hostname or IP is {pub_ip}") 655 | pub_port = _geonode_public_port() 656 | print(f"Public PORT is {pub_port}") 657 | try: 658 | geonode_ip = socket.gethostbyname("geonode") 659 | except Exception: 660 | geonode_ip = pub_ip 661 | try: 662 | geoserver_ip = socket.gethostbyname("geoserver") 663 | except Exception: 664 | geoserver_ip = pub_ip 665 | d = "1970-01-01 00:00:00" 666 | default_fixture = [ 667 | { 668 | "fields": { 669 | "active": True, 670 | "ip": str(geonode_ip), 671 | "name": str(os.environ["MONITORING_HOST_NAME"]), 672 | }, 673 | "model": "monitoring.host", 674 | "pk": 1, 675 | }, 676 | { 677 | "fields": {"active": True, "ip": str(geoserver_ip), "name": "geoserver"}, 678 | "model": "monitoring.host", 679 | "pk": 2, 680 | }, 681 | { 682 | "fields": { 683 | "name": str(os.environ["MONITORING_SERVICE_NAME"]), 684 | "url": str(os.environ["SITEURL"]), 685 | "notes": "", 686 | "last_check": d, 687 | "active": True, 688 | "host": 1, 689 | "check_interval": "00:01:00", 690 | "service_type": 1, 691 | }, 692 | "model": "monitoring.service", 693 | "pk": 1, 694 | }, 695 | { 696 | "fields": { 697 | "name": "geoserver-hostgeonode", 698 | "url": str(os.environ["SITEURL"]), 699 | "notes": "", 700 | "last_check": d, 701 | "active": True, 702 | "host": 1, 703 | "check_interval": "00:01:00", 704 | "service_type": 3, 705 | }, 706 | "model": "monitoring.service", 707 | "pk": 2, 708 | }, 709 | { 710 | "fields": { 711 | "name": "geoserver-hostgeoserver", 712 | "url": str(os.environ["GEOSERVER_PUBLIC_LOCATION"]), 713 | "notes": "", 714 | "last_check": d, 715 | "active": True, 716 | "host": 2, 717 | "check_interval": "00:01:00", 718 | "service_type": 4, 719 | }, 720 | "model": "monitoring.service", 721 | "pk": 3, 722 | }, 723 | { 724 | "fields": { 725 | "name": "default-geoserver", 726 | "url": "http://geoserver:8080/geoserver/", 727 | "notes": "", 728 | "last_check": d, 729 | "active": True, 730 | "host": 2, 731 | "check_interval": "00:01:00", 732 | "service_type": 2, 733 | }, 734 | "model": "monitoring.service", 735 | "pk": 4, 736 | }, 737 | ] 738 | with open("/tmp/default_monitoring_apps_docker.json", "w") as fixturefile: 739 | json.dump(default_fixture, fixturefile) 740 | 741 | 742 | def _prepare_admin_fixture(admin_password, admin_email): 743 | from django.contrib.auth.hashers import make_password 744 | 745 | d = datetime.datetime.now() 746 | mdext_date = f"{d.isoformat()[:23]}Z" 747 | default_fixture = [ 748 | { 749 | "fields": { 750 | "date_joined": mdext_date, 751 | "email": admin_email, 752 | "first_name": "", 753 | "groups": [], 754 | "is_active": True, 755 | "is_staff": True, 756 | "is_superuser": True, 757 | "last_login": mdext_date, 758 | "last_name": "", 759 | "password": make_password(admin_password), 760 | "user_permissions": [], 761 | "username": "admin", 762 | }, 763 | "model": "people.Profile", 764 | "pk": 1000, 765 | } 766 | ] 767 | with open("/tmp/django_admin_docker.json", "w") as fixturefile: 768 | json.dump(default_fixture, fixturefile) 769 | -------------------------------------------------------------------------------- /src/uwsgi.ini: -------------------------------------------------------------------------------- 1 | [uwsgi] 2 | # uwsgi-socket = 0.0.0.0:8000 3 | http-socket = 0.0.0.0:8000 4 | logto = /var/log/geonode.log 5 | # pidfile = /tmp/geonode.pid 6 | 7 | chdir = /usr/src/{{project_name}}/ 8 | module = {{project_name}}.wsgi:application 9 | 10 | strict = false 11 | master = true 12 | enable-threads = true 13 | vacuum = true ; Delete sockets during shutdown 14 | single-interpreter = true 15 | die-on-term = true ; Shutdown when receiving SIGTERM (default is respawn) 16 | need-app = true 17 | thunder-lock = true 18 | 19 | touch-reload = /usr/src/{{project_name}}/{{project_name}}/wsgi.py 20 | buffer-size = 32768 21 | 22 | harakiri = 600 ; forcefully kill workers after 600 seconds 23 | py-callos-afterfork = true ; allow workers to trap signals 24 | 25 | max-requests = 1000 ; Restart workers after this many requests 26 | max-worker-lifetime = 3600 ; Restart workers after this many seconds 27 | reload-on-rss = 2048 ; Restart workers after this much resident memory 28 | worker-reload-mercy = 60 ; How long to wait before forcefully killing workers 29 | 30 | cheaper-algo = busyness 31 | processes = 128 ; Maximum number of workers allowed 32 | cheaper = 8 ; Minimum number of workers allowed 33 | cheaper-initial = 16 ; Workers created at startup 34 | cheaper-overload = 1 ; Length of a cycle in seconds 35 | cheaper-step = 16 ; How many workers to spawn at a time 36 | 37 | cheaper-busyness-multiplier = 30 ; How many cycles to wait before killing workers 38 | cheaper-busyness-min = 20 ; Below this threshold, kill workers (if stable for multiplier cycles) 39 | cheaper-busyness-max = 70 ; Above this threshold, spawn new workers 40 | cheaper-busyness-backlog-alert = 16 ; Spawn emergency workers if more than this many requests are waiting in the queue 41 | cheaper-busyness-backlog-step = 2 ; How many emergency workers to create if there are too many requests in the queue 42 | 43 | # cron = -1 -1 -1 -1 -1 sh -c '/usr/src/{{project_name}}/manage.sh collect_metrics -n -t xml'; 44 | # cron = 0 0 -1 -1 -1 sh -c 'find /backup_restore/ -type f -mtime +30 -exec rm -f {} \;' 45 | # Remove backup files older than 30 days except the most recent 3 files (a backup is composed by 3 files) 46 | cron = 0 0 -1 -1 -1 sh -c 'find /backup_restore/ -maxdepth 1 -type f -mtime +30 -printf "%T@ %p\n" | sort -n | head -n -3 | awk "{ print $2 }" | xargs -r rm' 47 | cron = 0 0 -1 -1 -1 sh -c 'find /backup_restore/ -type d -ctime +30 -exec rm -rf {} \;' 48 | -------------------------------------------------------------------------------- /src/wait-for-databases.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -e 4 | 5 | host="$1" 6 | shift 7 | 8 | until PGPASSWORD=${POSTGRES_PASSWORD} psql -h "$host" -U ${POSTGRES_USER} -P "pager=off" -c '\l'; do 9 | >&2 echo "Postgres is unavailable - sleeping" 10 | sleep 1 11 | done 12 | 13 | until PGPASSWORD=${GEONODE_DATABASE_PASSWORD} psql -h "$host" -U ${GEONODE_DATABASE} -d ${GEONODE_DATABASE} -P "pager=off" -c '\l'; do 14 | >&2 echo "${GEONODE_DATABASE} is unavailable - sleeping" 15 | sleep 1 16 | done 17 | 18 | until PGPASSWORD=${GEONODE_GEODATABASE_PASSWORD} psql -h "$host" -U ${GEONODE_GEODATABASE} -d ${GEONODE_GEODATABASE} -P "pager=off" -c '\l'; do 19 | >&2 echo "${GEONODE_GEODATABASE} is unavailable - sleeping" 20 | sleep 1 21 | done 22 | 23 | >&2 echo "GeoNode databases are up - executing command" 24 | --------------------------------------------------------------------------------