├── log └── .gitkeep ├── park_api ├── cities │ ├── __init__.py │ ├── Dresden.json.def │ ├── Hamburg.geojson │ ├── Frankfurt.geojson │ ├── Aalborg.py │ ├── Ingolstadt.py │ ├── Muenster.py │ ├── Sample_City.geojson │ ├── Konstanz.py │ ├── Rosenheim.py │ ├── Karlsruhe.py │ ├── Ulm.py │ ├── Koeln.py │ ├── Basel.py │ ├── Sample_City.py │ ├── Regensburg.py │ ├── Heidelberg.py │ ├── Zuerich.py │ ├── Freiburg.py │ ├── Dortmund.py │ ├── Mannheim.py │ ├── Aachen.py │ ├── Hanau.py │ ├── Luebeck.py │ ├── Heilbronn.py │ ├── Hamburg.py │ ├── Bonn.py │ ├── Limburg.py │ ├── Wiesbaden.py │ ├── Oldenburg.py │ ├── Frankfurt.py │ ├── Nuernberg.py │ ├── Aarhus.py │ ├── Heilbronn.geojson │ ├── Dresden.py │ ├── Limburg.geojson │ ├── Ulm.geojson │ ├── Magdeburg.py │ ├── Aarhus.geojson │ ├── Konstanz.geojson │ ├── Rosenheim.geojson │ ├── Muenster.geojson │ ├── Bonn.geojson │ ├── Oldenburg.geojson │ ├── Basel.geojson │ ├── Aalborg.geojson │ ├── Ingolstadt.geojson │ └── Hanau.geojson ├── structs.py ├── security.py ├── setupdb.py ├── server.py ├── db.py ├── crossdomain.py ├── timespan.py ├── dump.py ├── scraper.py ├── util.py ├── env.py └── geodata.py ├── image.jpg ├── tests ├── fixtures │ ├── aalborg.html │ ├── sample_city.html │ ├── rosenheim.json │ ├── ingolstadt.html │ ├── aarhus.json │ ├── heilbronn.html │ ├── luebeck.html │ └── oldenburg.html ├── helpers.py ├── test_scraper.py ├── validate-geojson.py └── test_cities.py ├── bin ├── parkapi-server ├── parkapi-scraper ├── parkapi-setupdb └── parkapi-dump ├── config ├── parkapi-scraper@.timer ├── parkapi-scraper@.service └── parkapi-server@.service ├── requirements.txt ├── .gitignore ├── .dockerignore ├── config_travis.ini ├── schema ├── db │ └── 0_init.py ├── meta_schema.json └── city_schema.json ├── Dockerfile ├── config_example.ini ├── .travis.yml ├── docker-compose.yml ├── entrypoint.sh ├── LICENSE ├── DEPLOYMENT.md ├── tools └── geojson_koeln.py ├── setup.py └── README.md /log/.gitkeep: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /park_api/cities/__init__.py: -------------------------------------------------------------------------------- 1 | __author__ = 'kilian' 2 | -------------------------------------------------------------------------------- /image.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ubahnverleih/ParkAPI/master/image.jpg -------------------------------------------------------------------------------- /tests/fixtures/aalborg.html: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ubahnverleih/ParkAPI/master/tests/fixtures/aalborg.html -------------------------------------------------------------------------------- /park_api/cities/Dresden.json.def: -------------------------------------------------------------------------------- 1 | { 2 | "source":"", 3 | "public":"" 4 | } 5 | -------------------------------------------------------------------------------- /bin/parkapi-server: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from park_api.server import main 4 | 5 | if __name__ == "__main__": 6 | main() 7 | -------------------------------------------------------------------------------- /config/parkapi-scraper@.timer: -------------------------------------------------------------------------------- 1 | [Timer] 2 | OnBootSec=10min 3 | OnUnitActiveSec=5min 4 | 5 | [Install] 6 | WantedBy=multi-user.target 7 | -------------------------------------------------------------------------------- /bin/parkapi-scraper: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from park_api.scraper import main 4 | 5 | if __name__ == "__main__": 6 | main() 7 | -------------------------------------------------------------------------------- /bin/parkapi-setupdb: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from park_api.setupdb import main 4 | 5 | if __name__ == "__main__": 6 | main() 7 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | beautifulsoup4 2 | Flask 3 | requests 4 | feedparser 5 | pytz 6 | psycopg2 7 | yoyo-migrations 8 | requests-mock 9 | utm 10 | ddt 11 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea 2 | venv 3 | cities/__pycache__ 4 | config.ini 5 | __pycache__ 6 | cache 7 | *.pyc 8 | log/*.log 9 | ParkAPI.egg-info 10 | .tox 11 | .k8s 12 | -------------------------------------------------------------------------------- /bin/parkapi-dump: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | from sys import argv 4 | from park_api.dump import main, get_args 5 | 6 | if __name__ == "__main__": 7 | main(get_args()) 8 | -------------------------------------------------------------------------------- /park_api/structs.py: -------------------------------------------------------------------------------- 1 | from collections import namedtuple 2 | 3 | ServerConf = namedtuple('ServerConf', ['port', 'host', 'debug']) 4 | 5 | Coords = namedtuple('Coords', ['lng', 'lat']) 6 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | .idea 2 | venv 3 | cities/__pycache__ 4 | config.ini 5 | __pycache__ 6 | cache 7 | *.pyc 8 | log/*.log 9 | ParkAPI.egg-info 10 | .tox 11 | .dockerignore 12 | .gitignore 13 | Dockerfile 14 | Jenkinsfile 15 | -------------------------------------------------------------------------------- /config_travis.ini: -------------------------------------------------------------------------------- 1 | [DEFAULT] 2 | port = 5000 3 | host = ::1 4 | debug = true 5 | database_uri = postgresql://postgres@localhost:5432/park_api 6 | debug = false 7 | live_scrape = false 8 | 9 | [development] 10 | live_scrape = true 11 | 12 | [testing] 13 | 14 | [production] 15 | -------------------------------------------------------------------------------- /config/parkapi-scraper@.service: -------------------------------------------------------------------------------- 1 | [Service] 2 | User=%i 3 | WorkingDirectory=/home/%i/ParkAPI/current 4 | Environment=VIRTUAL_ENV="/home/%i/ParkAPI/current/venv" 5 | Environment=PATH="/home/%i/ParkAPI/current/venv/bin:$PATH" 6 | Environment=env=%i 7 | ExecStart=/usr/bin/env venv/bin/python bin/parkapi-server 8 | Restart=Always 9 | -------------------------------------------------------------------------------- /park_api/security.py: -------------------------------------------------------------------------------- 1 | def file_is_allowed(file): 2 | t = file.endswith(".py") 3 | t &= "__Init__" not in file.title() 4 | t &= "Sample_City" not in file.title() 5 | t &= "Frankfurt" not in file.title() # See offenesdresden/ParkAPI#153 6 | t &= "Aalborg" not in file.title() # See offenesdresden/ParkAPI#212 7 | return t 8 | -------------------------------------------------------------------------------- /park_api/setupdb.py: -------------------------------------------------------------------------------- 1 | import os 2 | from yoyo import read_migrations, get_backend 3 | 4 | from park_api import env 5 | 6 | 7 | def main(): 8 | backend = get_backend(env.DATABASE_URI) 9 | migrations = read_migrations(os.path.join(env.APP_ROOT, "schema/db")) 10 | backend.apply_migrations(migrations) 11 | 12 | if __name__ == "__main__": 13 | main() 14 | -------------------------------------------------------------------------------- /config/parkapi-server@.service: -------------------------------------------------------------------------------- 1 | [Service] 2 | User=%i 3 | WorkingDirectory=/home/%i/ParkAPI/current 4 | Environment=VIRTUAL_ENV="/home/%i/ParkAPI/current/venv" 5 | Environment=PATH="/home/%i/ParkAPI/current/venv/bin:$PATH" 6 | Environment=env=%i 7 | ExecStart=/usr/bin/env venv/bin/python bin/parkapi-server 8 | Restart=Always 9 | 10 | [Install] 11 | WantedBy=multi-user.target 12 | -------------------------------------------------------------------------------- /tests/helpers.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | for m in sys.modules.keys(): 5 | if m.startswith("park_api"): 6 | raise Exception("Include helpers module before any park_api module") 7 | os.environ["env"] = "testing" 8 | 9 | TEST_ROOT = os.path.join(os.path.dirname(os.path.realpath(__file__))) 10 | 11 | # Add project to import path 12 | sys.path.append(str(os.path.join(TEST_ROOT, ".."))) 13 | -------------------------------------------------------------------------------- /schema/db/0_init.py: -------------------------------------------------------------------------------- 1 | from yoyo import step 2 | 3 | step(""" 4 | CREATE TABLE "public"."parkapi" ( 5 | "id" SERIAL, 6 | "timestamp_updated" TIMESTAMP NOT NULL, 7 | "timestamp_downloaded" TIMESTAMP NOT NULL, 8 | "city" TEXT NOT NULL,"data" JSON NOT NULL, 9 | PRIMARY KEY ("id")) 10 | TABLESPACE "pg_default"; 11 | """, 12 | "create index latest_scrape_index on parkapi (city, timestamp_downloaded DESC);" 13 | ) 14 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.4-alpine 2 | LABEL maintainer="Max Schorradt " 3 | 4 | ENV port=5000 5 | ENV host=0.0.0.0 6 | ENV database_uri=postgresql://parkapi:parkapi@postgres/parkapi 7 | ENV debug=false 8 | ENV live_scrape=false 9 | 10 | RUN apk add --no-cache postgresql-dev build-base git 11 | 12 | COPY . /app 13 | WORKDIR /app 14 | 15 | RUN pip install -e . 16 | 17 | EXPOSE 5000 18 | ENTRYPOINT ["sh", "/app/entrypoint.sh"] 19 | -------------------------------------------------------------------------------- /config_example.ini: -------------------------------------------------------------------------------- 1 | [DEFAULT] 2 | port = 5000 3 | host = ::1 4 | debug = false 5 | live_scrape = false 6 | database_uri = 7 | 8 | [development] 9 | database_uri = postgresql:///parkapi_development 10 | debug = true 11 | live_scrape = true 12 | 13 | [testing] 14 | database_uri = postgresql:///parkapi_testing 15 | 16 | [staging] 17 | database_uri = postgresql:///parkapi_staging 18 | 19 | [production] 20 | database_uri = postgresql://user:password@host/parkapi_production 21 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: false 2 | language: python 3 | python: 4 | - "3.5" 5 | - "3.7" 6 | - "nightly" 7 | addons: 8 | postgresql: "9.4" 9 | matrix: 10 | allow_failures: 11 | - python: "nightly" 12 | cache: pip 13 | before_script: 14 | - psql -c 'create database park_api;' -U postgres 15 | - pip install -e . 16 | - cp config_travis.ini config.ini 17 | - python bin/parkapi-setupdb 18 | script: 19 | - python tests/validate-geojson.py park_api/cities/*.geojson 20 | - python -m unittest discover tests 21 | -------------------------------------------------------------------------------- /schema/meta_schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema":"http://jkliemann.de/parkendd/media/meta_schema.json", 3 | "title":"ParkenDD JSON API", 4 | "description":"JSON meta information required by ParkenDD app", 5 | "type":"Object", 6 | "properties":{ 7 | "mail":{ 8 | "description":"Mail address of the server admin", 9 | "type":"string" 10 | }, 11 | "cities":{ 12 | "description":"list of available cities", 13 | "type":"array", 14 | "items":{ 15 | "description":"displayed city name", 16 | "type":"string" 17 | } 18 | } 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /tests/test_scraper.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | import helpers 4 | import requests 5 | import requests_mock 6 | from park_api import env, scraper, db 7 | 8 | class ScraperTestCase(unittest.TestCase): 9 | def setUp(self): 10 | db.setup() 11 | @requests_mock.Mocker() 12 | def test_insert(self, mock): 13 | path = os.path.join(helpers.TEST_ROOT, "fixtures", "dresden.html") 14 | cities = env.supported_cities() 15 | module = cities["Dresden"] 16 | with open(path) as f: 17 | src = module.geodata.city.source 18 | mock.get(src, text=f.read()) 19 | scraper.scrape_city(module) 20 | -------------------------------------------------------------------------------- /park_api/cities/Hamburg.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [{ 4 | "type": "Feature", 5 | "geometry": { 6 | "type": "Point", 7 | "coordinates": [ 8 | 9.9957000, 9 | 53.5558000 10 | ] 11 | }, 12 | "properties": { 13 | "name": "Hamburg", 14 | "type": "city", 15 | "url": "http://www.geoportal-hamburg.de/Geoportal/geo-online/index.html", 16 | "source":"https://geodienste.hamburg.de/HH_WFS_Verkehr_opendata?service=WFS&request=GetFeature&VERSION=1.1.0&typename=verkehr_parkhaeuser", 17 | "active_support": true 18 | } 19 | }] 20 | } 21 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | volumes: 3 | database_data: 4 | driver: local 5 | services: 6 | db: 7 | image: postgres:latest 8 | volumes: 9 | - database_data:/var/lib/postgresql/data 10 | environment: 11 | - POSTGRES_DB=parkapi 12 | - POSTGRES_USER=parkapi 13 | - POSTGRES_PASSWORD=parkapi 14 | api: 15 | build: 16 | context: . 17 | dockerfile: ./Dockerfile 18 | expose: 19 | - 5000 20 | ports: 21 | - 5000:5000 22 | volumes: 23 | - .:/usr/src/app/ 24 | links: 25 | - db 26 | environment: 27 | - PGHOST=db 28 | - PGDATABASE=parkapi 29 | - PGUSER=parkapi 30 | - PGPASS=parkapi 31 | -------------------------------------------------------------------------------- /entrypoint.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | if [ -z $port ]; then 4 | port="5000" 5 | fi 6 | 7 | if [ -z $host ]; then 8 | host="::1" 9 | fi 10 | 11 | if [ -z $debug ]; then 12 | debug="false" 13 | fi 14 | 15 | if [ -z $live_scrape ]; then 16 | live_scrape="false" 17 | fi 18 | 19 | if [ -z $database_uri ]; then 20 | database_uri="postgresql://$PGUSER:$PGPASS@$PGHOST/$PGDATABASE" 21 | fi 22 | 23 | echo " 24 | [DEFAULT] 25 | port = $port 26 | host = $host 27 | debug = $debug 28 | live_scrape = $live_scrape 29 | database_uri = $database_uri 30 | [development] 31 | port = $port 32 | host = $host 33 | debug = $debug 34 | live_scrape = $live_scrape 35 | database_uri = $database_uri 36 | " > /app/config.ini 37 | 38 | python bin/parkapi-server 39 | -------------------------------------------------------------------------------- /park_api/cities/Frankfurt.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [{ 4 | "type": "Feature", 5 | "geometry": { 6 | "type": "Point", 7 | "coordinates": [8.683333, 50.116667] 8 | }, 9 | "properties": { 10 | "name": "Frankfurt am Main", 11 | "type": "city", 12 | "url": "http://offenedaten.frankfurt.de", 13 | "source": "http://offenedaten.frankfurt.de/dataset/912fe0ab-8976-4837-b591-57dbf163d6e5/resource/48378186-5732-41f3-9823-9d1938f2695e/download/parkdatendyn.xml", 14 | "active_support":true, 15 | "attribution":{ 16 | "contributor":"Stadt Frankfurt am Main", 17 | "url":"http://daten.frankfurt.de/organization/strassenverkehrsamt", 18 | "license":"dl-de/by-2-0" 19 | } 20 | } 21 | }] 22 | } 23 | -------------------------------------------------------------------------------- /park_api/server.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from logging.handlers import RotatingFileHandler 3 | import os 4 | from park_api import env, db 5 | from park_api.app import app 6 | 7 | 8 | def main(): 9 | log_path = os.path.join(env.APP_ROOT, "log", env.ENV + ".log") 10 | log_handler = RotatingFileHandler(log_path, 11 | maxBytes=1000000, 12 | backupCount=1) 13 | formatter = logging.Formatter("%(asctime)s %(levelname)s: %(message)s ") 14 | log_handler.setFormatter(formatter) 15 | 16 | app.logger.addHandler(log_handler) 17 | app.logger.addHandler(logging.StreamHandler()) 18 | 19 | if not env.is_development(): 20 | app.logger.setLevel(logging.INFO) 21 | log_handler.setLevel(logging.INFO) 22 | 23 | db.setup() 24 | 25 | app.run(**env.SERVER_CONF._asdict()) 26 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 Kilian Koeltzsch 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /tests/fixtures/sample_city.html: -------------------------------------------------------------------------------- 1 | 2 |

26.06.2015 14:19 Uhr

3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 |
Parking Lot 1100200Musterstraße 1openParkhaus
Parking Lot 21020Musterstraße 2openParkplatz
Parking Lot 3050Musterstraße 3closedTiefgarage
29 | 30 | -------------------------------------------------------------------------------- /park_api/db.py: -------------------------------------------------------------------------------- 1 | from urllib.parse import urlparse 2 | from contextlib import contextmanager 3 | import psycopg2, psycopg2.extras 4 | from psycopg2.pool import ThreadedConnectionPool 5 | from park_api import env 6 | 7 | POOL = None 8 | 9 | def setup(url=env.DATABASE_URI): 10 | global POOL 11 | u = urlparse(url) 12 | POOL = ThreadedConnectionPool(1, 20, 13 | database=u.path[1:], 14 | user=u.username, 15 | password=u.password, 16 | host=u.hostname, 17 | port=u.port) 18 | 19 | @contextmanager 20 | def cursor(commit=False): 21 | """ 22 | psycopg2 connection.cursor context manager. 23 | Creates a new cursor and closes it, commiting changes if specifie 24 | """ 25 | global POOL 26 | assert POOL != None, "use db.setup() before calling db.cursor()" 27 | try: 28 | connection = POOL.getconn() 29 | cursor = connection.cursor(cursor_factory=psycopg2.extras.RealDictCursor) 30 | try: 31 | yield cursor 32 | if commit: connection.commit() 33 | finally: 34 | cursor.close() 35 | finally: 36 | POOL.putconn(connection) 37 | -------------------------------------------------------------------------------- /park_api/cities/Aalborg.py: -------------------------------------------------------------------------------- 1 | from park_api.util import convert_date 2 | from park_api.geodata import GeoData 3 | from datetime import datetime 4 | import json 5 | import re 6 | 7 | # This loads the geodata for this city if .geojson exists in the same directory as this file. 8 | geodata = GeoData(__file__) 9 | 10 | 11 | def parse_html(text_content): 12 | 13 | elems = text_content.split("\r\n\r\n") 14 | 15 | data = { 16 | "last_updated": convert_date(elems[0], "%d-%m-%Y %H:%M:%S "), 17 | "lots": [] 18 | } 19 | 20 | state_mappings = { 21 | 1: "open", 22 | 0: "closed" 23 | } 24 | 25 | for elem in elems[1:]: 26 | e = {"name": elem.split("\r\n")[0].split("=")[1], 27 | "free": int(elem.split("\r\n")[1].split("=")[1])} 28 | lot = geodata.lot(e["name"]) 29 | data["lots"].append({ 30 | "name": e["name"], 31 | "free": e["free"], 32 | "total": lot.total, 33 | "address": lot.address, 34 | "coords": lot.coords, 35 | "state": "unknown", 36 | "lot_type": lot.type, 37 | "id": lot.id, 38 | "forecast": False, 39 | }) 40 | 41 | return data 42 | -------------------------------------------------------------------------------- /DEPLOYMENT.md: -------------------------------------------------------------------------------- 1 | # Deployment 2 | 3 | Currently 2 stages exist production and staging. 4 | We use [capistrano](http://capistranorb.com/) to deploy servers. 5 | 6 | ## Install Capistrano 7 | 8 | ``` 9 | # 1. install ruby (my recommendation is https://rvm.io) 10 | $ gem install --user bundler 11 | $ bundle install 12 | ``` 13 | 14 | ## How to deploy a new version to staging 15 | 16 | TODO: automatic pull from master on commit 17 | 18 | ## How to deploy a new version to production 19 | 20 | ```bash 21 | # Merge master into stable, run tests 22 | $ cap production release:prepare 23 | # Push release and switch back to master 24 | $ cap production release:push 25 | # Deploy new code from stable 26 | $ cap production deploy 27 | ``` 28 | 29 | ## Further things 30 | 31 | * To reset stable branch to offenesdresden/stable (abort release) 32 | 33 | ``` 34 | $ cap production release:reset 35 | ``` 36 | 37 | * Nothing works after deploy? Don't freak out use to step back 38 | 39 | ``` 40 | $ cap production deploy:rollback 41 | ``` 42 | 43 | * Download/upload configuration 44 | 45 | ``` 46 | $ cap production config:download 47 | $ cap production config:upload 48 | ``` 49 | 50 | * Follow server logs 51 | 52 | ``` 53 | $ cap production log:tail 54 | ``` 55 | -------------------------------------------------------------------------------- /park_api/cities/Ingolstadt.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.geodata import GeoData 3 | from park_api.util import convert_date 4 | 5 | # Additional information for single lots: 6 | # http://www2.ingolstadt.de/Wirtschaft/Parken/Parkeinrichtungen_der_IFG/ 7 | geodata = GeoData(__file__) 8 | 9 | 10 | def parse_html(html): 11 | soup = BeautifulSoup(html, "html.parser") 12 | 13 | data = { 14 | "last_updated": convert_date(soup.p.string, "(%d.%m.%Y, %H.%M Uhr)"), 15 | "lots": [] 16 | } 17 | 18 | # get all lots 19 | raw_lots = soup.find_all("tr") 20 | 21 | for raw_lot in raw_lots: 22 | elements = raw_lot.find_all("td") 23 | 24 | state = "open" 25 | if "class" in raw_lot.attrs and "strike" in raw_lot["class"]: 26 | state = "closed" 27 | 28 | lot_name = elements[0].text 29 | 30 | lot = geodata.lot(lot_name) 31 | data["lots"].append({ 32 | "name": lot.name, 33 | "free": int(elements[1].text), 34 | "total": lot.total, 35 | "lot_type": lot.type, 36 | "address": lot.address, 37 | "coords": lot.coords, 38 | "state": state, 39 | "id": lot.id, 40 | "forecast": False 41 | }) 42 | 43 | return data 44 | -------------------------------------------------------------------------------- /tests/fixtures/rosenheim.json: -------------------------------------------------------------------------------- 1 | [{"uid":4,"title":"Reserve","parkings":0,"occupied":0,"free":"---","isOpened":false,"link":0},{"uid":7,"title":"Reserve","parkings":0,"occupied":0,"free":"---","isOpened":false,"link":0},{"uid":13,"title":"Reserve","parkings":0,"occupied":0,"free":"---","isOpened":false,"link":0},{"uid":14,"title":"Reserve","parkings":0,"occupied":0,"free":"---","isOpened":false,"link":0},{"uid":0,"title":"P1 Zentrum","parkings":342,"occupied":86,"free":256,"isOpened":true,"link":224},{"uid":1,"title":"P2 KU'KO","parkings":138,"occupied":60,"free":78,"isOpened":true,"link":225},{"uid":2,"title":"P3 Rathaus","parkings":31,"occupied":1,"free":30,"isOpened":true,"link":226},{"uid":3,"title":"P4 Mitte","parkings":285,"occupied":57,"free":228,"isOpened":true,"link":227},{"uid":5,"title":"P6 Salinplatz","parkings":232,"occupied":60,"free":172,"isOpened":true,"link":228},{"uid":6,"title":"P7 Altstadt-Ost","parkings":97,"occupied":15,"free":82,"isOpened":true,"link":229},{"uid":10,"title":"P8 Beilhack-Citydome","parkings":155,"occupied":38,"free":117,"isOpened":true,"link":230},{"uid":8,"title":"P9 Am Klinikum","parkings":421,"occupied":363,"free":58,"isOpened":true,"link":1053},{"uid":9,"title":"P10 Stadtcenter","parkings":56,"occupied":34,"free":22,"isOpened":true,"link":231},{"uid":11,"title":"P11 Beilhack-Gie\u00dfereistr.","parkings":155,"occupied":155,"free":"---","isOpened":false,"link":1151},{"uid":12,"title":"P12 Bahnhof Nord","parkings":398,"occupied":33,"free":365,"isOpened":true,"link":1203}] 2 | -------------------------------------------------------------------------------- /park_api/cities/Muenster.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.util import convert_date 3 | from park_api.geodata import GeoData 4 | 5 | state_map = { 6 | "frei": "open", 7 | "geschlossen": "closed", 8 | "besetzt": "open" 9 | } 10 | 11 | geodata = GeoData(__file__) 12 | 13 | 14 | def parse_html(html): 15 | soup = BeautifulSoup(html, "html.parser") 16 | 17 | lot_table_trs = soup.select("div#parkingList table")[0].find_all("tr") 18 | date_field = soup.find(id="lastRefresh").text.strip() 19 | 20 | data = { 21 | "last_updated": convert_date(date_field, "%d.%m.%Y %H:%M Uhr"), 22 | "lots": [] 23 | } 24 | 25 | for tr in lot_table_trs[1:-1]: 26 | tds = tr.find_all("td") 27 | type_and_name = process_name(tds[0].text.strip()) 28 | lot = geodata.lot(tds[0].text.strip()) 29 | data["lots"].append({ 30 | "name": type_and_name[1].strip("\n"), 31 | "lot_type": type_and_name[0], 32 | "free": int(tds[1].text), 33 | "total": lot.total, 34 | "state": state_map.get(tds[2].text, ""), 35 | "coords": lot.coords, 36 | "id": lot.id, 37 | "forecast": False 38 | }) 39 | 40 | return data 41 | 42 | 43 | def process_name(name): 44 | lot_type = name[:3].strip() 45 | lot_name = name[3:].strip() 46 | 47 | type_mapping = { 48 | "PP": "Parkplatz", 49 | "PH": "Parkhaus", 50 | } 51 | if lot_type in type_mapping.keys(): 52 | lot_type = type_mapping[lot_type] 53 | else: 54 | lot_type = "" 55 | lot_name = name 56 | 57 | return lot_type, lot_name 58 | -------------------------------------------------------------------------------- /park_api/cities/Sample_City.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [{ 4 | "type": "Feature", 5 | "geometry": { 6 | "type": "Point", 7 | "coordinates": [ 8 | 8.2168987, 9 | 53.1391840 10 | ] 11 | }, 12 | "properties": { 13 | "name": "Sample City", 14 | "type": "city", 15 | "url": "http://example.com", 16 | "source": "http://example.com/parkingdata", 17 | "active_support":false 18 | } 19 | }, { 20 | "type": "Feature", 21 | "properties": { 22 | "name": "Parking Lot 1", 23 | "total": 200, 24 | "address": "Musterstraße 1", 25 | "type": "Parkhaus" 26 | }, 27 | "geometry": { 28 | "type": "Point", 29 | "coordinates": [ 30 | 8.2168988, 31 | 53.1391841 32 | ] 33 | } 34 | }, 35 | { 36 | "type": "Feature", 37 | "properties": { 38 | "name": "Parking Lot 2", 39 | "total": 20, 40 | "address": "Musterstraße 2", 41 | "type": "Parkplatz" 42 | }, 43 | "geometry": { 44 | "type": "Point", 45 | "coordinates": [ 46 | 8.21115, 47 | 53.14169 48 | ] 49 | } 50 | }, 51 | { 52 | "type": "Feature", 53 | "properties": { 54 | "name": "Parking Lot 3", 55 | "total": 50, 56 | "address": "Musterstraße 3", 57 | "type": "Tiefgarage" 58 | }, 59 | "geometry": { 60 | "type": "Point", 61 | "coordinates": [ 62 | 8.21514, 63 | 53.14161 64 | ] 65 | } 66 | } 67 | ] 68 | } 69 | -------------------------------------------------------------------------------- /park_api/cities/Konstanz.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.util import convert_date, utc_now 3 | from park_api.geodata import GeoData 4 | import datetime 5 | 6 | geodata = GeoData(__file__) 7 | 8 | 9 | def parse_html(html): 10 | soup = BeautifulSoup(html, "html.parser") 11 | 12 | # last update time (UTC) 13 | # Konstanz does not support the last_updated yet. I hope they will inform me when it's added 14 | # as the data seems accurate I will return the current time and date 15 | data = { 16 | "last_updated": utc_now(), 17 | "lots": [] 18 | } 19 | 20 | # get all tables with lots 21 | parken = soup.find_all( "table", class_="parken") 22 | 23 | # get all lots 24 | for park_lot in parken : 25 | td = park_lot.find_all("td") 26 | parking_name = td[1].text.strip() 27 | # work-around for the Umlaute-problem: ugly but working 28 | if ( 'Marktst' in parking_name) : parking_name = 'Marktstätte' 29 | elif ( 'bele' in parking_name) : parking_name = 'Döbele' 30 | # get the data 31 | lot = geodata.lot(parking_name) 32 | # look for free lots 33 | parking_state = 'open' 34 | parking_free = 0 35 | try: 36 | parking_free = int(td[2].text) 37 | except: 38 | parking_state = 'nodata' 39 | 40 | data["lots"].append({ 41 | "name": parking_name, 42 | "free": parking_free, 43 | "total": lot.total, 44 | "address": lot.address, 45 | "coords": lot.coords, 46 | "state": parking_state, 47 | "lot_type": lot.type, 48 | "id": lot.id, 49 | "forecast": False 50 | }) 51 | 52 | return data 53 | -------------------------------------------------------------------------------- /park_api/crossdomain.py: -------------------------------------------------------------------------------- 1 | from datetime import timedelta 2 | from flask import make_response, request, current_app 3 | from functools import update_wrapper 4 | 5 | 6 | def crossdomain(origin=None, methods=None, headers=None, 7 | max_age=21600, attach_to_all=True, 8 | automatic_options=True): 9 | if methods is not None: 10 | methods = ', '.join(sorted(x.upper() for x in methods)) 11 | if headers is not None and not isinstance(headers, str): 12 | headers = ', '.join(x.upper() for x in headers) 13 | if not isinstance(origin, str): 14 | origin = ', '.join(origin) 15 | if isinstance(max_age, timedelta): 16 | max_age = max_age.total_seconds() 17 | 18 | def get_methods(): 19 | if methods is not None: 20 | return methods 21 | 22 | options_resp = current_app.make_default_options_response() 23 | return options_resp.headers['allow'] 24 | 25 | def decorator(f): 26 | def wrapped_function(*args, **kwargs): 27 | if automatic_options and request.method == 'OPTIONS': 28 | resp = current_app.make_default_options_response() 29 | else: 30 | resp = make_response(f(*args, **kwargs)) 31 | if not attach_to_all and request.method != 'OPTIONS': 32 | return resp 33 | 34 | h = resp.headers 35 | 36 | h['Access-Control-Allow-Origin'] = origin 37 | h['Access-Control-Allow-Methods'] = get_methods() 38 | h['Access-Control-Max-Age'] = str(max_age) 39 | if headers is not None: 40 | h['Access-Control-Allow-Headers'] = headers 41 | return resp 42 | 43 | f.provide_automatic_options = False 44 | return update_wrapper(wrapped_function, f) 45 | return decorator 46 | -------------------------------------------------------------------------------- /tools/geojson_koeln.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import sys 4 | import json 5 | 6 | if __name__ == "__main__": 7 | with open(sys.argv[1], "r") as f: 8 | data = json.load(f) 9 | 10 | geo = {"type":"FeatureCollection", 11 | "features":[{ 12 | "type":"Feature", 13 | "geometry":{ 14 | "type":"Point", 15 | "coordinates":[ 16 | 6.958249, 17 | 50.941387 18 | ] 19 | }, 20 | "properties":{ 21 | "name":"Köln", 22 | "type":"city", 23 | "url":"https://offenedaten-koeln.de/dataset/parkhausbelegung", 24 | "source":"https://www.stadt-koeln.de/externe-dienste/open-data/parking-ts.php", 25 | "active_support":False 26 | } 27 | } 28 | ] 29 | } 30 | 31 | for k in data.keys(): 32 | aux = {"identifier":k, 33 | "open":data[k]["open"] == "durchgehend"} 34 | geo["features"].append({ 35 | "type":"Feature", 36 | "properties":{ 37 | "name":data[k]["title"].replace(" (*)", ""), 38 | "total":int(data[k]["capacity"]), 39 | "address":data[k]["street"] + " " + data[k]["housenumber"], 40 | "type":"Parkhaus", 41 | "aux":json.dumps(aux) 42 | }, 43 | "geometry":{ 44 | "type":"Point", 45 | "coordinates":[ 46 | float(data[k]["lng"]), 47 | float(data[k]["lat"]) 48 | ] 49 | } 50 | }) 51 | 52 | with open("Koeln.geojson", "w") as geojson: 53 | json.dump(geo, geojson, indent=4, sort_keys=True, ensure_ascii=False) 54 | 55 | -------------------------------------------------------------------------------- /park_api/cities/Rosenheim.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.geodata import GeoData 3 | from park_api.util import utc_now 4 | from park_api import env 5 | import json 6 | 7 | # This loads the geodata for this city if .geojson exists in the same directory as this file. 8 | # No need to remove this if there's no geodata (yet), everything will still work. 9 | geodata = GeoData(__file__) 10 | 11 | # This function is called by the scraper and given the data of the page specified as source in geojson above. 12 | # It's supposed to return a dictionary containing everything the current spec expects. Tests will fail if it doesn't ;) 13 | def parse_html(html): 14 | 15 | data = { 16 | "last_updated": utc_now(), # not found on site, so we use something else 17 | # URL for the page where the scraper can gather the data 18 | "lots": [] 19 | } 20 | 21 | dataJSON=json.loads(html) 22 | # over all parking-lots 23 | for parking_lot in dataJSON : 24 | parking_name = parking_lot['title'] 25 | if ( parking_name != 'Reserve' ) : 26 | lot = geodata.lot(parking_name) 27 | try : 28 | parking_free = 0 29 | if ( parking_lot['isOpened'] == False) : 30 | parking_status = 'closed' 31 | else : 32 | parking_status = 'open' 33 | parking_free = int(parking_lot['free']) 34 | except : 35 | parking_status = 'nodata' 36 | data["lots"].append({ 37 | "name": parking_name, 38 | "free": parking_free, 39 | "total": parking_lot['parkings'], 40 | "address": lot.address, 41 | "coords": lot.coords, 42 | "state": parking_status, 43 | "lot_type": lot.type, 44 | "id": lot.id, 45 | "forecast": False 46 | }) 47 | 48 | return data 49 | 50 | -------------------------------------------------------------------------------- /park_api/cities/Karlsruhe.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.geodata import GeoData 3 | from park_api.util import utc_now 4 | 5 | # This loads the geodata for this city if .geojson exists in the same directory as this file. 6 | # No need to remove this if there's no geodata (yet), everything will still work. 7 | geodata = GeoData(__file__) 8 | 9 | # This function is called by the scraper and given the data of the page specified as source in geojson above. 10 | # It's supposed to return a dictionary containing everything the current spec expects. Tests will fail if it doesn't ;) 11 | def parse_html(html): 12 | 13 | # BeautifulSoup is a great and easy way to parse the html and find the bits and pieces we're looking for. 14 | soup = BeautifulSoup(html, "html.parser") 15 | 16 | # last update time (UTC) 17 | # Karlsruhe does not support the last_upted yet. 18 | # as the data seems accurate I will return the current time and date 19 | data = { 20 | "last_updated": utc_now(), 21 | "lots": [] 22 | } 23 | 24 | lots = soup.find_all( 'div', class_='parkhaus') 25 | for parking_lot in lots : 26 | parking_name = parking_lot.find('a').text 27 | lot = geodata.lot(parking_name) 28 | 29 | parking_state = 'open' 30 | parking_free = 0 31 | parking_fuellstand = parking_lot.find( 'div', class_='fuellstand') 32 | try : 33 | if ( parking_fuellstand == None ) : 34 | parking_state = 'nodata' 35 | else : 36 | temp= parking_fuellstand.text.split() 37 | parking_free = int(temp[0]) 38 | except : 39 | parking_state = 'nodata' 40 | 41 | data["lots"].append({ 42 | "name": parking_name, 43 | "free": parking_free, 44 | "total": lot.total, 45 | "address": lot.address, 46 | "coords": lot.coords, 47 | "state": parking_state, 48 | "lot_type": lot.type, 49 | "id": lot.id, 50 | "forecast": False, 51 | }) 52 | 53 | return data 54 | -------------------------------------------------------------------------------- /park_api/cities/Ulm.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.geodata import GeoData 3 | from park_api.util import utc_now 4 | 5 | # This loads the geodata for this city if .geojson exists in the same directory as this file. 6 | # No need to remove this if there's no geodata (yet), everything will still work. 7 | geodata = GeoData(__file__) 8 | 9 | # This function is called by the scraper and given the data of the page specified as source in geojson above. 10 | # It's supposed to return a dictionary containing everything the current spec expects. Tests will fail if it doesn't ;) 11 | def parse_html(html): 12 | 13 | # BeautifulSoup is a great and easy way to parse the html and find the bits and pieces we're looking for. 14 | soup = BeautifulSoup(html, "html.parser") 15 | 16 | # last_updated is the date when the data on the page was last updated, it should be listed on most pages 17 | # Uhrzeit like Konstanz 18 | data = { 19 | # last_updated like Konstanz 20 | "last_updated": utc_now(), 21 | # URL for the page where the scraper can gather the data 22 | "lots": [] 23 | } 24 | 25 | table = soup.find('table', id='haupttabelle') 26 | table2 = table.find('table', width='790') 27 | rows = table2.find_all('tr') 28 | for row in rows[3:12] : 29 | parking_data = row.find_all('td') 30 | parking_name = parking_data[0].text 31 | lot = geodata.lot(parking_name) 32 | try : 33 | parking_state = 'open' 34 | parking_free = int(parking_data[2].text) 35 | except : 36 | parking_free = 0 37 | parking_state = 'nodata' 38 | 39 | data["lots"].append({ 40 | "name": parking_name, 41 | "free": parking_free, 42 | "total": lot.total, 43 | "address": lot.address, 44 | "coords": lot.coords, 45 | "state": parking_state, 46 | "lot_type": lot.type, 47 | "id": lot.id, 48 | "forecast": False, 49 | }) 50 | 51 | return data 52 | -------------------------------------------------------------------------------- /park_api/cities/Koeln.py: -------------------------------------------------------------------------------- 1 | import json 2 | import datetime 3 | from park_api.util import convert_date 4 | from park_api.geodata import GeoData 5 | 6 | # This loads the geodata for this city if .geojson exists in the same directory as this file. 7 | # No need to remove this if there's no geodata (yet), everything will still work. 8 | geodata = GeoData(__file__) 9 | 10 | # This function is called by the scraper and given the data of the page specified as source in geojson above. 11 | # It's supposed to return a dictionary containing everything the current spec expects. Tests will fail if it doesn't ;) 12 | def parse_html(html): 13 | data = json.loads(html) 14 | lots = { 15 | "lots":[], 16 | "last_updated":None 17 | } 18 | id_lots = {} 19 | for l in geodata.lots: 20 | aux = json.loads(geodata.lots[l].aux) 21 | id_lots[aux["identifier"]] = {"lot":geodata.lots[l], 22 | "open":aux["open"]} 23 | timestamps = [] 24 | for feature in data["features"]: 25 | try: 26 | if id_lots[feature["attributes"]["IDENTIFIER"]]["open"]: 27 | state = "open" 28 | else: 29 | if feature["attributes"]["KAPAZITAET"] == -1: 30 | state = "nodata" 31 | else: 32 | state = "unknown" 33 | lot = id_lots[feature["attributes"]["IDENTIFIER"]]["lot"] 34 | lots["lots"].append({ 35 | "coords":lot.coords, 36 | "name":lot.name, 37 | "total":int(lot.total), 38 | "free":int(feature["attributes"]["KAPAZITAET"]), 39 | "state":state, 40 | "id":lot.id, 41 | "lot_type":lot.type, 42 | "address":lot.address, 43 | "forecast":False, 44 | "region":"" 45 | }) 46 | timestamps.append(convert_date(feature["attributes"]["TIMESTAMP"], "%Y-%m-%d %H:%M:%S")) 47 | except (KeyError, ValueError): 48 | pass 49 | timestamps.sort() 50 | timestamps.reverse() 51 | lots["last_updated"] = timestamps[0] 52 | return lots 53 | 54 | -------------------------------------------------------------------------------- /schema/city_schema.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema":"http://jkliemann.de/parkendd/media/city_schema.json", 3 | "title":"ParkenDD JSON API", 4 | "description":"JSON required by ParkenDD app to show parking lots", 5 | "type":"array", 6 | "items":{ 7 | "type":"object", 8 | "properties":{ 9 | "name":{ 10 | "description":"Name of parking lot category", 11 | "type":"string" 12 | }, 13 | "lots":{ 14 | "description":"Parking lots in this category", 15 | "type":"array", 16 | "items":{ 17 | "type":"object", 18 | "properties":{ 19 | "name":{ 20 | "description":"Name of parking lot", 21 | "type":"string" 22 | }, 23 | "count":{ 24 | "description":"Count of available parking lots", 25 | "type":"integer" 26 | }, 27 | "free":{ 28 | "description":"Count of free parking lots", 29 | "type":"integer" 30 | }, 31 | "state":{ 32 | "description":"State of the parking lot", 33 | "type":"string", 34 | "enum":["nodata","closed","many","few","full"] 35 | }, 36 | "lat":{ 37 | "description":"latitude of the parking lot", 38 | "type":"string" 39 | }, 40 | "lon":{ 41 | "description":"longitude of the parking lot", 42 | "type":"string" 43 | }, 44 | "forecast":{ 45 | "description":"shows if forecast is available for this spot", 46 | "type":"boolean" 47 | }, 48 | "url":{ 49 | "description":"A URL of a web resource where more information can be viewed", 50 | "type":"string" 51 | }, 52 | "opening_hours":{ 53 | "description":"Opening hours of this lot in OpenStreetMap format (https://wiki.openstreetmap.org/wiki/Key:opening_hours)", 54 | "type":"string" 55 | }, 56 | "fee_hours":{ 57 | "description":"Hours during which usage of this lot incurrs fees in OpenStreetMap format (https://wiki.openstreetmap.org/wiki/Key:opening_hours). It is implied that outside those hours usage is free.", 58 | "type":"string" 59 | } 60 | }, 61 | "required":["name", "count", "state", "forecast"] 62 | } 63 | } 64 | }, 65 | "required":["name", "lots"] 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /park_api/cities/Basel.py: -------------------------------------------------------------------------------- 1 | import feedparser 2 | import html 3 | from datetime import datetime 4 | from park_api.geodata import GeoData 5 | from park_api.util import utc_now 6 | 7 | geodata = GeoData(__file__) 8 | 9 | 10 | def parse_html(xml_data): 11 | feed = feedparser.parse(xml_data) 12 | 13 | try: 14 | last_updated = feed["entries"][0]["updated"] 15 | last_updated = datetime.strptime(last_updated[5:25], "%d %b %Y %H:%M:%S").isoformat() 16 | except KeyError: 17 | last_updated = utc_now() 18 | 19 | 20 | 21 | data = { 22 | "lots": [], 23 | "last_updated": last_updated 24 | } 25 | 26 | for entry in feed["entries"]: 27 | summary = parse_summary(entry["summary"]) 28 | title_elements = parse_title(entry["title"]) 29 | 30 | lot_identifier = html.unescape((title_elements[2] + " " + title_elements[0]).strip()) 31 | lot = geodata.lot(lot_identifier) 32 | 33 | data["lots"].append({ 34 | "name": html.unescape(title_elements[0]), 35 | "address": lot.address, 36 | "id": html.unescape(lot.id), 37 | "state": "open", 38 | "free": summary[1], 39 | "total": lot.total, 40 | "coords": lot.coords, 41 | "forecast": False, 42 | "lot_type": title_elements[2] 43 | }) 44 | return data 45 | 46 | 47 | def parse_summary(summary): 48 | """Parse a string from the format 'Anzahl freie Parkplätze: 179' into both its params""" 49 | summary = summary.split(":") 50 | 51 | summary[0] = summary[0].strip() 52 | if "?" in summary[0]: 53 | summary[0] = "nodata" 54 | 55 | try: 56 | summary[1] = int(summary[1]) 57 | except ValueError: 58 | summary[1] = 0 59 | return summary 60 | 61 | 62 | def parse_title(title): 63 | """ 64 | Parse a string from the format 'Parkhaus Bad. Bahnhof' 65 | """ 66 | types = ["Parkhaus", "Parkplatz"] 67 | 68 | name = title 69 | address = '' 70 | type = "" 71 | if name.split(" ")[0] in types: 72 | type = name.split(" ")[0] 73 | name = " ".join(name.split(" ")[1:]) 74 | 75 | return name, address, type 76 | -------------------------------------------------------------------------------- /park_api/cities/Sample_City.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.util import convert_date 3 | from park_api.geodata import GeoData 4 | 5 | # This loads the geodata for this city if .geojson exists in the same directory as this file. 6 | # No need to remove this if there's no geodata (yet), everything will still work. 7 | geodata = GeoData(__file__) 8 | 9 | # This function is called by the scraper and given the data of the page specified as source in geojson above. 10 | # It's supposed to return a dictionary containing everything the current spec expects. Tests will fail if it doesn't ;) 11 | def parse_html(html): 12 | 13 | # BeautifulSoup is a great and easy way to parse the html and find the bits and pieces we're looking for. 14 | soup = BeautifulSoup(html, "html.parser") 15 | 16 | # last_updated is the date when the data on the page was last updated, it should be listed on most pages 17 | last_updated = soup.select("p#last_updated")[0].text 18 | 19 | data = { 20 | # convert_date is a utility function you can use to turn this date into the correct string format 21 | "last_updated": convert_date(last_updated, "%d.%m.%Y %H:%M Uhr"), 22 | # URL for the page where the scraper can gather the data 23 | "lots": [] 24 | } 25 | 26 | for tr in soup.find_all("tr"): 27 | lot_name = tr.find("td", {"class": "lot_name"}).text 28 | lot_free = tr.find("td", {"class": "lot_free"}).text 29 | lot_total = tr.find("td", {"class": "lot_total"}).text 30 | 31 | # please be careful about the state only being allowed to contain either open, closed or nodata 32 | # should the page list other states, please map these into the three listed possibilities 33 | state = tr.find("td", {"class": "lot_state"}).text 34 | 35 | lot = geodata.lot(lot_name) 36 | data["lots"].append({ 37 | "name": lot.name, 38 | "free": lot_free, 39 | "total": lot_total, 40 | "address": lot.address, 41 | "coords": lot.coords, 42 | "state": state, 43 | "lot_type": lot.type, 44 | "id": lot.id, 45 | "forecast": False, 46 | }) 47 | 48 | return data 49 | -------------------------------------------------------------------------------- /park_api/cities/Regensburg.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.util import convert_date 3 | from park_api.geodata import GeoData 4 | 5 | # This loads the geodata for this city if .geojson exists in the same directory as this file. 6 | # No need to remove this if there's no geodata (yet), everything will still work. 7 | geodata = GeoData(__file__) 8 | 9 | # This function is called by the scraper and given the data of the page specified as source in geojson above. 10 | # It's supposed to return a dictionary containing everything the current spec expects. Tests will fail if it doesn't ;) 11 | def parse_html(html): 12 | 13 | # BeautifulSoup is a great and easy way to parse the html and find the bits and pieces we're looking for. 14 | soup = BeautifulSoup(html, "html.parser") 15 | 16 | # last_updated is the date when the data on the page was last updated, it should be listed on most pages 17 | # suche:

zuletzt aktualisiert: 28.05.2019 15.30 Uhr

18 | updated = soup.find( "p", class_="updateinfo") 19 | last_updated = convert_date(updated.text, 'zuletzt aktualisiert: %d.%m.%Y %H.%M Uhr') 20 | 21 | data = { 22 | "last_updated": last_updated, 23 | # URL for the page where the scraper can gather the data 24 | "lots": [] 25 | } 26 | 27 | parking_lots = soup.find_all("div", class_="accordeon parkmoeglichkeit") 28 | for one_lot in parking_lots : 29 | parking_name = one_lot.find("h3").text 30 | lot = geodata.lot(parking_name) 31 | 32 | parking_state = 'open' 33 | parking_free = 0 34 | parking_belegung = one_lot.find("div", class_="belegung") 35 | if (parking_belegung != None ) : 36 | parking_free=int(parking_belegung.find("strong").text) 37 | else: 38 | parking_state='nodata' 39 | 40 | data["lots"].append({ 41 | "name": lot.name, 42 | "free": parking_free, 43 | "total": lot.total, 44 | "address": lot.address, 45 | "coords": lot.coords, 46 | "state": parking_state, 47 | "lot_type": lot.type, 48 | "id": lot.id, 49 | "forecast": False, 50 | }) 51 | 52 | return data 53 | -------------------------------------------------------------------------------- /park_api/cities/Heidelberg.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.util import convert_date 3 | from park_api.geodata import GeoData 4 | import json 5 | from park_api import env 6 | 7 | # This loads the geodata for this city if .geojson exists in the same directory as this file. 8 | # No need to remove this if there's no geodata (yet), everything will still work. 9 | geodata = GeoData(__file__) 10 | 11 | # This function is called by the scraper and given the data of the page specified as source in geojson above. 12 | # It's supposed to return a dictionary containing everything the current spec expects. Tests will fail if it doesn't ;) 13 | def parse_html(html): 14 | 15 | dataJSON=json.loads(html) 16 | 17 | data = { 18 | # convert_date is a utility function you can use to turn this date into the correct string format 19 | "last_updated": convert_date(dataJSON['data']['updated'].split("+")[0][:-1], '%a, %d %b %Y %H:%M:%S'), 20 | # URL for the page where the scraper can gather the data 21 | "lots": [] 22 | } 23 | 24 | # iteration over single parking_lots 25 | for parking_lot in dataJSON['data']['parkinglocations'] : 26 | # please keep the name in the geojson-file in the same form as delivered here (including spaces) 27 | parking_name = 'P'+str(parking_lot['uid'])+' '+parking_lot['name'] 28 | # get the data 29 | lot = geodata.lot(parking_name) 30 | 31 | parking_state = 'open' 32 | parking_free = 0 33 | try : 34 | if ( parking_lot['parkingupdate']['status'] == 'closed' ) : 35 | parking_state = 'closed' 36 | else : 37 | parking_free = int(parking_lot['parkingupdate']['total']) - int(parking_lot['parkingupdate']['current']) 38 | except : 39 | parking_state = 'nodata' 40 | 41 | data["lots"].append({ 42 | "name": parking_name, 43 | "free": parking_free, 44 | "total": lot.total, 45 | "address": lot.address, 46 | "coords": lot.coords, 47 | "state": parking_state, 48 | "lot_type": lot.type, 49 | "id": lot.id, 50 | "forecast": False, 51 | }) 52 | 53 | return data 54 | -------------------------------------------------------------------------------- /park_api/cities/Zuerich.py: -------------------------------------------------------------------------------- 1 | import feedparser 2 | from park_api.geodata import GeoData 3 | from park_api.util import utc_now 4 | 5 | 6 | # Falls das hier jemals einer von den Menschen 7 | # hinter OpenDataZürich lesen sollte: Ihr seid so toll <3 8 | geodata = GeoData(__file__) 9 | 10 | 11 | def parse_html(xml_data): 12 | feed = feedparser.parse(xml_data) 13 | 14 | try: 15 | last_updated = feed["entries"][0]["updated"] 16 | except KeyError: 17 | last_updated = utc_now() 18 | 19 | data = { 20 | "lots": [], 21 | # remove trailing timezone for consensistency 22 | "last_updated": last_updated.replace("Z", "") 23 | } 24 | 25 | for entry in feed["entries"]: 26 | summary = parse_summary(entry["summary"]) 27 | title_elements = parse_title(entry["title"]) 28 | 29 | lot_identifier = (title_elements[2] + " " + title_elements[0]).strip() 30 | lot = geodata.lot(lot_identifier) 31 | 32 | data["lots"].append({ 33 | "name": title_elements[0], 34 | "address": title_elements[1], 35 | "id": lot.id, 36 | "state": summary[0], 37 | "free": summary[1], 38 | "total": lot.total, 39 | "coords": lot.coords, 40 | "forecast": False, 41 | "lot_type": title_elements[2] 42 | }) 43 | 44 | return data 45 | 46 | 47 | def parse_summary(summary): 48 | """Parse a string from the format 'open / 41' into both its params""" 49 | summary = summary.split("/") 50 | 51 | summary[0] = summary[0].strip() 52 | if "?" in summary[0]: 53 | summary[0] = "nodata" 54 | 55 | try: 56 | summary[1] = int(summary[1]) 57 | except ValueError: 58 | summary[1] = 0 59 | return summary 60 | 61 | 62 | def parse_title(title): 63 | """ 64 | Parse a string from the format 'Parkgarage am Central / Seilergraben' 65 | into both its params 66 | """ 67 | types = ["Parkhaus", "Parkplatz"] 68 | 69 | name = title.split(" / ")[0] 70 | address = title.split(" / ")[1] 71 | type = "" 72 | if name.split(" ")[0] in types: 73 | type = name.split(" ")[0] 74 | name = " ".join(name.split(" ")[1:]) 75 | 76 | return name, address, type 77 | -------------------------------------------------------------------------------- /park_api/cities/Freiburg.py: -------------------------------------------------------------------------------- 1 | from park_api.util import convert_date 2 | from park_api.geodata import GeoData 3 | import json 4 | 5 | # This loads the geodata for this city if .geojson exists in the same directory as this file. 6 | # No need to remove this if there's no geodata (yet), everything will still work. 7 | geodata = GeoData(__file__) 8 | 9 | # This function is called by the scraper and given the data of the page specified as source in geojson above. 10 | # It's supposed to return a dictionary containing everything the current spec expects. Tests will fail if it doesn't ;) 11 | def parse_html(source_json): 12 | 13 | parsed_json = json.loads(source_json) 14 | features = parsed_json['features'] 15 | 16 | # last_updated is the date when the data on the page was last updated, it should be listed on most pages 17 | last_updated = "" 18 | 19 | data = { 20 | # URL for the page where the scraper can gather the data 21 | "lots": [] 22 | } 23 | 24 | for feature in features: 25 | lot_name = feature['properties']['park_name'] 26 | lot_free = int(feature['properties']['obs_free']) 27 | lot_total = int(feature['properties']['obs_max']) 28 | 29 | if last_updated < feature['properties']['obs_ts']: 30 | last_updated = feature['properties']['obs_ts'] 31 | 32 | # please be careful about the state only being allowed to contain either open, closed or nodata 33 | # should the page list other states, please map these into the three listed possibilities 34 | state = "nodata" 35 | 36 | if feature['properties']['obs_state'] == "1": 37 | state = "open" 38 | elif feature['properties']['obs_state'] == "0": 39 | state = "closed" 40 | 41 | lot = geodata.lot(lot_name) 42 | data["lots"].append({ 43 | "name": lot.name, 44 | "free": lot_free, 45 | "total": lot_total, 46 | "address": lot.address, 47 | "coords": lot.coords, 48 | "state": state, 49 | "lot_type": lot.type, 50 | "id": lot.id, 51 | "forecast": False, 52 | }) 53 | 54 | data['last_updated'] = convert_date(last_updated, "%Y-%m-%d %H:%M:%S") 55 | 56 | return data 57 | -------------------------------------------------------------------------------- /park_api/cities/Dortmund.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.util import convert_date 3 | from park_api.geodata import GeoData 4 | 5 | # This loads the geodata for this city if .geojson exists in the same directory as this file. 6 | # No need to remove this if there's no geodata (yet), everything will still work. 7 | geodata = GeoData(__file__) 8 | 9 | # This function is called by the scraper and given the data of the page specified as source in geojson above. 10 | # It's supposed to return a dictionary containing everything the current spec expects. Tests will fail if it doesn't ;) 11 | def parse_html(html): 12 | 13 | # BeautifulSoup is a great and easy way to parse the html and find the bits and pieces we're looking for. 14 | soup = BeautifulSoup(html, "html.parser") 15 | 16 | # last_updated is the date when the data on the page was last updated, it should be listed on most pages 17 | last_updated = soup.find('h2').text 18 | 19 | data = { 20 | # convert_date is a utility function you can use to turn this date into the correct string format 21 | # Stand: 07.06.2019 15:46 Uhr 22 | "last_updated": convert_date(last_updated, "Stand: %d.%m.%Y %H:%M Uhr"), 23 | # URL for the page where the scraper can gather the data 24 | "lots": [] 25 | } 26 | 27 | # find all entries 28 | all_parking_lots = soup.find_all('dl') 29 | for parking_lot in all_parking_lots : 30 | parking_name = parking_lot.find('dt').text 31 | lot = geodata.lot(parking_name) 32 | 33 | try : 34 | parking_state = 'open' 35 | parking_free = int(parking_lot.find('dd').find('strong').text) 36 | except : 37 | parking_state = 'nodata' 38 | parking_free = 0 39 | 40 | data["lots"].append({ 41 | "name": parking_name, 42 | "free": parking_free, 43 | "total": lot.total, 44 | "address": lot.address, 45 | "coords": lot.coords, 46 | "state": parking_state, 47 | "lot_type": lot.type, 48 | "id": lot.id, 49 | "forecast": False, 50 | }) 51 | 52 | return data 53 | -------------------------------------------------------------------------------- /park_api/cities/Mannheim.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.util import convert_date 3 | from park_api.geodata import GeoData 4 | 5 | # This loads the geodata for this city if .geojson exists in the same directory as this file. 6 | # No need to remove this if there's no geodata (yet), everything will still work. 7 | geodata = GeoData(__file__) 8 | 9 | # This function is called by the scraper and given the data of the page specified as source in geojson above. 10 | # It's supposed to return a dictionary containing everything the current spec expects. Tests will fail if it doesn't ;) 11 | def parse_html(html): 12 | 13 | # BeautifulSoup is a great and easy way to parse the html and find the bits and pieces we're looking for. 14 | soup = BeautifulSoup(html, "html.parser") 15 | 16 | data = { 17 | "last_updated": '', # will fill this later 18 | # URL for the page where the scraper can gather the data 19 | "lots": [] 20 | } 21 | 22 | # suche:
23 | div_level1 = soup.find_all('div', id='parkhausliste-ct')[-1] 24 | #

zuletzt aktualisiert am 19.06.2019, 15:27 Uhr

25 | date_time = div_level1.find('p') 26 | data['last_updated'] = convert_date(date_time.text, 'zuletzt aktualisiert am %d.%m.%Y, %H:%M Uhr') 27 | 28 | # find all entries: 29 | div_level2 = div_level1.find('div') 30 | div_level3 = div_level2.find_all('div') 31 | count = 0 32 | while (count < len(div_level3)-2) : 33 | parking_name = div_level3[count+1].text.strip() 34 | lot = geodata.lot(parking_name) 35 | parking_free = 0 36 | parking_state = 'open' 37 | try : 38 | parking_free = int(div_level3[count+2].text) 39 | except : 40 | parking_state = 'nodata' 41 | count += 3 42 | 43 | data["lots"].append({ 44 | "name": parking_name, 45 | "free": parking_free, 46 | "total": lot.total, 47 | "address": lot.address, 48 | "coords": lot.coords, 49 | "state": parking_state, 50 | "lot_type": lot.type, 51 | "id": lot.id, 52 | "forecast": False 53 | }) 54 | 55 | return data 56 | 57 | -------------------------------------------------------------------------------- /park_api/timespan.py: -------------------------------------------------------------------------------- 1 | import os 2 | import csv 3 | from datetime import datetime 4 | 5 | from park_api import db, env 6 | 7 | 8 | def timespan(city, lot_id, total, date_from, date_to, version): 9 | now = datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S") 10 | if date_from > now or version == 1.0: 11 | data = forecast(lot_id, total, date_from, date_to, version) 12 | elif date_to < now: 13 | data = known_timespan_data(city, lot_id, date_from, date_to, version) 14 | else: 15 | data = known_timespan_data(city, lot_id, date_from, now, version) 16 | data.extend(forecast(lot_id, total, now, date_to, version)) 17 | return data 18 | 19 | 20 | def known_timespan_data(city, lot_id, date_from, date_to, version): 21 | if version == 1: 22 | return {} 23 | elif version == "1.1": 24 | with db.cursor() as cur: 25 | sql = '''SELECT timestamp_downloaded, data \ 26 | FROM parkapi \ 27 | WHERE timestamp_downloaded > %s AND timestamp_downloaded < %s AND city = %s''' 28 | cur.execute(sql, (date_from, date_to, city,)) 29 | data = [] 30 | for row in cur.fetchall(): 31 | for lot in row['data']['lots']: 32 | if lot['id'] == lot_id: 33 | data.append({"timestamp": row["timestamp_downloaded"].strftime("%Y-%m-%dT%H:%M:%S"), 34 | "free": lot["free"]}) 35 | return data 36 | 37 | 38 | def forecast(lot_id, total, date_from, date_to, version): 39 | if version == 1.0: 40 | try: 41 | csv_path = os.path.join(env.APP_ROOT, "forecast_data", lot_id + ".csv") 42 | 43 | with open(csv_path) as csvfile: 44 | data = {} 45 | for row in csv.reader(csvfile): 46 | if date_from <= row[0] <= date_to: 47 | data[row[0]] = row[1] 48 | return data 49 | except FileNotFoundError: 50 | return {} 51 | elif version == "1.1": 52 | try: 53 | csv_path = os.path.join(env.APP_ROOT, "forecast_data", lot_id + ".csv") 54 | 55 | with open(csv_path) as csvfile: 56 | return [{"timestamp": row[0], "free": int(total * (1 - int(row[1]) / 100))} for row in csv.reader(csvfile) if date_from <= row[0] <= date_to] 57 | except FileNotFoundError: 58 | return [] 59 | -------------------------------------------------------------------------------- /park_api/cities/Aachen.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | # from park_api.util import convert_date 3 | from park_api.geodata import GeoData 4 | from park_api.util import utc_now 5 | 6 | # This loads the geodata for this city if .geojson exists in the same directory as this file. 7 | # No need to remove this if there's no geodata (yet), everything will still work. 8 | geodata = GeoData(__file__) 9 | 10 | # This function is called by the scraper and given the data of the page specified as source in geojson above. 11 | # It's supposed to return a dictionary containing everything the current spec expects. Tests will fail if it doesn't ;) 12 | def parse_html(html): 13 | 14 | # BeautifulSoup is a great and easy way to parse the html and find the bits and pieces we're looking for. 15 | soup = BeautifulSoup(html, "html.parser") 16 | 17 | data = { 18 | "last_updated": utc_now(), # not found on site, so we use something else 19 | # URL for the page where the scraper can gather the data 20 | "lots": [] 21 | } 22 | # for handling duplicate entries 23 | dataUniqe = dict() 24 | 25 | # find all entries: 26 | # suche
27 | parking_houses = soup.find_all('div', class_='houses') 28 | for parking_group in parking_houses : 29 | parking_lots = parking_group.find_all('li') 30 | for one_lot in parking_lots : 31 | parking_name = one_lot.find('a').text 32 | if ( not parking_name in dataUniqe ) : 33 | dataUniqe[parking_name] = 1 # add this to the list 34 | lot = geodata.lot(parking_name) 35 | 36 | parking_state = 'open' 37 | parking_free = 0 38 | try : 39 | parking_free = int(one_lot.find('span', class_='free-text').text.split()[0]) 40 | except : 41 | parking_state = 'nodata' 42 | 43 | data["lots"].append({ 44 | "name": parking_name, 45 | "free": parking_free, 46 | "total": lot.total, 47 | "address": lot.address, 48 | "coords": lot.coords, 49 | "state": parking_state, 50 | "lot_type": lot.type, 51 | "id": lot.id, 52 | "forecast": False 53 | }) 54 | 55 | return data 56 | 57 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | """open API serving parking lot data for multiple cities 2 | """ 3 | 4 | # Always prefer setuptools over distutils 5 | from setuptools import setup, find_packages 6 | # To use a consistent encoding 7 | from codecs import open 8 | from os import path 9 | 10 | try: # for pip >= 10 11 | from pip._internal.req import parse_requirements 12 | except ImportError: # for pip <= 9.0.3 13 | from pip.req import parse_requirements 14 | 15 | here = path.abspath(path.dirname(__file__)) 16 | 17 | requirements = parse_requirements(path.join(here, "requirements.txt"), session=False) 18 | try: 19 | install_requires = [str(ir.requirement) for ir in requirements] 20 | except AttributeError: 21 | # deprecated 22 | install_requires = [str(ir.req) for ir in requirements] 23 | 24 | # Get the long description from the relevant file 25 | with open(path.join(here, 'README.md'), encoding='utf-8') as f: 26 | long_description = f.read() 27 | 28 | setup( 29 | name='ParkAPI', 30 | version='0.0.1', 31 | 32 | description='open API serving parking lot data for multiple cities', 33 | long_description=long_description, 34 | 35 | url='https://github.com/offenesdresden/ParkAPI', 36 | 37 | author='kilian', 38 | author_email='me@kilian.io', 39 | 40 | license='MIT', 41 | 42 | classifiers=[ 43 | 'Development Status :: 4 - Beta', 44 | 45 | 'Framework :: Flask', 46 | 47 | 'License :: OSI Approved :: MIT License', 48 | 49 | 'Programming Language :: Python :: 3', 50 | 'Programming Language :: Python :: 3.2', 51 | 'Programming Language :: Python :: 3.3', 52 | 'Programming Language :: Python :: 3.4', 53 | ], 54 | 55 | keywords='webapp parkinglots scraping', 56 | 57 | packages=find_packages(exclude=['cache', 'server.log']), 58 | 59 | install_requires=install_requires, 60 | 61 | extras_require={ 62 | 'dev': ['pip'], 63 | 'test': [], 64 | }, 65 | 66 | # To provide executable scripts, use entry points in preference to the 67 | # "scripts" keyword. Entry points provide cross-platform support and allow 68 | # pip to create the appropriate form of executable for the target platform. 69 | entry_points={ 70 | "console_scripts": [ 71 | "parkapi-scraper=park_api.scraper:main", 72 | "parkapi-server=park_api.server:main", 73 | "parkapi-setupdb=park_api.setupdb:main", 74 | ], 75 | }, 76 | ) 77 | -------------------------------------------------------------------------------- /park_api/dump.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import argparse 4 | import csv 5 | from time import gmtime 6 | 7 | from park_api import db 8 | 9 | def get_args(): 10 | parser = argparse.ArgumentParser() 11 | parser.add_argument("-c", "--city", help="city to dump") 12 | parser.add_argument("-y", "--year", help="year to dump") 13 | parser.add_argument("-m", "--month", help="month of year to dump") 14 | parser.add_argument("-w", "--week", help="week of year to dump") 15 | parser.add_argument("-o", "--outdir", help="output base directory") 16 | return parser.parse_args() 17 | 18 | def create_query(city=None, year=None, month=None, week=None): 19 | base = "select city, data from parkapi where" 20 | count = "select count(city) from parkapi where" 21 | conditions = " " 22 | if week and month: 23 | raise ValueError("Month and Week cannot be specified together.") 24 | if city: 25 | conditions += "city = '{0}' and".format(city) 26 | if not year: 27 | year = gmtime()[0] 28 | conditions += " extract(year from timestamp_downloaded) = '{0}'".format(year) 29 | if week: 30 | conditions += " and extract(week from timestamp_downloaded) = '{0}'".format(week) 31 | elif month: 32 | conditions += " and extract(month from timestamp_downloaded) = '{0}'".format(month) 33 | return (base + conditions, count + conditions) 34 | 35 | def get_data(cursor, query): 36 | 37 | cursor.execute(query[1]) 38 | count = cursor.fetchone()['count'] 39 | 40 | cursor.execute(query[0]) 41 | table = [] 42 | for i in range(count): 43 | table.append(cursor.fetchone()) 44 | 45 | data = {} 46 | 47 | for row in table: 48 | if not row['city'] in data.keys(): 49 | data[row['city']] = {} 50 | 51 | for lot in row['data']['lots']: 52 | if not lot['id'] in data[row['city']].keys(): 53 | data[row['city']][lot['id']] = [] 54 | data[row['city']][lot['id']].append((row['data']['last_downloaded'], lot['free'])) 55 | 56 | print(data) 57 | 58 | def main(args): 59 | if args.month and args.week: 60 | print("Month and Week cannot be specified together.") 61 | exit(1) 62 | 63 | query = create_query(args.city, args.year, args.month, args.week) 64 | 65 | db.setup() 66 | 67 | with db.cursor() as cursor: 68 | get_data(cursor, query) 69 | 70 | 71 | 72 | -------------------------------------------------------------------------------- /park_api/cities/Hanau.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.util import convert_date 3 | from park_api.geodata import GeoData 4 | from park_api.util import utc_now 5 | 6 | # This loads the geodata for this city if .geojson exists in the same directory as this file. 7 | # No need to remove this if there's no geodata (yet), everything will still work. 8 | geodata = GeoData(__file__) 9 | 10 | # This function is called by the scraper and given the data of the page specified as source in geojson above. 11 | # It's supposed to return a dictionary containing everything the current spec expects. Tests will fail if it doesn't ;) 12 | def parse_html(html): 13 | 14 | # BeautifulSoup is a great and easy way to parse the html and find the bits and pieces we're looking for. 15 | soup = BeautifulSoup(html, "html.parser") 16 | 17 | data = { 18 | "last_updated": '', 19 | # URL for the page where the scraper can gather the data 20 | "lots": [] 21 | } 22 | 23 | try : 24 | #
0: 32 | # This is a header row, save it for later 33 | region_header = row.find("th", {"class": "head1"}).text 34 | else: 35 | if row.find("td").text == "Gesamt": 36 | continue 37 | 38 | # This is a parking lot row 39 | raw_lot_data = row.find_all("td") 40 | 41 | type_and_name = process_name(raw_lot_data[0].text) 42 | 43 | if len(raw_lot_data) == 2: 44 | total = get_most_lots_from_known_data("Lübeck", 45 | type_and_name[1]) 46 | free = 0 47 | state = process_state_map.get(raw_lot_data[1].text, "") 48 | elif len(raw_lot_data) == 4: 49 | total = int(raw_lot_data[1].text) 50 | free = int(raw_lot_data[2].text) 51 | state = "open" 52 | 53 | lot = geodata.lot(type_and_name[1]) 54 | data["lots"].append({ 55 | "name": lot.name, 56 | "lot_type": type_and_name[0], 57 | "total": total, 58 | "free": free, 59 | "region": region_header, 60 | "state": state, 61 | "coords": lot.coords, 62 | "id": lot.id, 63 | "forecast": False 64 | }) 65 | 66 | return data 67 | 68 | 69 | def process_name(name): 70 | lot_type = name[:2] 71 | lot_name = name[3:] 72 | 73 | type_mapping = { 74 | "PP": "Parkplatz", 75 | "PH": "Parkhaus", 76 | } 77 | return type_mapping.get(lot_type, ""), lot_name 78 | -------------------------------------------------------------------------------- /park_api/cities/Heilbronn.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.util import convert_date 3 | from park_api.geodata import GeoData 4 | 5 | # This loads the geodata for this city if .geojson exists in the same directory as this file. 6 | # No need to remove this if there's no geodata (yet), everything will still work. 7 | geodata = GeoData(__file__) 8 | 9 | # This function is called by the scraper and given the data of the page specified as source in geojson above. 10 | # It's supposed to return a dictionary containing everything the current spec expects. Tests will fail if it doesn't ;) 11 | def parse_html(html): 12 | 13 | # BeautifulSoup is a great and easy way to parse the html and find the bits and pieces we're looking for. 14 | soup = BeautifulSoup(html, "html.parser") 15 | 16 | data = { 17 | "last_updated": '', # will fill this later 18 | # URL for the page where the scraper can gather the data 19 | "lots": [] 20 | } 21 | 22 | # Datum: 22.07.2019 - Uhrzeit: 16:57 23 | data['last_updated'] = convert_date( soup.find('div', class_='col-sm-12').text, 'Datum: %d.%m.%Y - Uhrzeit: %H:%M') 24 | 25 | parking_lots = soup.find_all( 'div', class_='row carparkContent') 26 | for one_parking_lot in parking_lots : 27 | park_temp1 = one_parking_lot.find( 'div', class_='carparkLocation col-sm-9') 28 | park_temp2 = park_temp1.find('a') 29 | if ( park_temp2 != None ) : 30 | parking_name = park_temp2.text 31 | else : 32 | parking_name = park_temp1.text.strip() 33 | lot = geodata.lot(parking_name) 34 | 35 | parking_free = 0 36 | parking_state = 'open' 37 | try : 38 | # text: Freie Parkplätze: 195 39 | parking_free_temp = one_parking_lot.find('div', class_='col-sm-5').text.split() 40 | # parking_free_temp: ['Freie', 'Parkplätze:', '195'] 41 | parking_free = int(parking_free_temp[2]) 42 | except : 43 | parking_state = 'nodata' 44 | 45 | data["lots"].append({ 46 | "name": parking_name, 47 | "free": parking_free, 48 | "total": lot.total, 49 | "address": lot.address, 50 | "coords": lot.coords, 51 | "state": parking_state, 52 | "lot_type": lot.type, 53 | "id": lot.id, 54 | "forecast": False 55 | }) 56 | 57 | return data 58 | 59 | -------------------------------------------------------------------------------- /park_api/cities/Hamburg.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | import utm 3 | from park_api.geodata import GeoData 4 | 5 | geodata = GeoData(__file__) 6 | 7 | def parse_html(xml): 8 | soup = BeautifulSoup(xml, "html.parser") 9 | 10 | data = { 11 | "lots": [], 12 | "last_updated": soup.find('wfs:featurecollection')["timestamp"][:-1] 13 | } 14 | 15 | region = "Hamburg" 16 | forecast = False 17 | 18 | for member in soup.find('wfs:featurecollection').find_all('gml:featuremember'): 19 | name = member.find('app:name').string 20 | count = 0 21 | try: 22 | count = int(member.find('app:stellplaetze_gesamt').string) 23 | except AttributeError: 24 | pass 25 | free = 0 26 | state = "nodata" 27 | situation = member.find('app:situation') 28 | if situation and situation.string != "keine Auslastungsdaten": 29 | free = int(member.find('app:frei').string) 30 | status = member.find('app:status').string 31 | if status == "frei" or status == "besetzt": 32 | state = "open" 33 | else: 34 | state = "closed" 35 | lot_type = member.find('app:art').string 36 | if lot_type == "Straßenrand": 37 | lot_type = "Parkplatz" 38 | lot_id = member.find('app:id').string 39 | address = "" 40 | try: 41 | address = member.find('app:einfahrt').string 42 | except AttributeError: 43 | try: 44 | address = member.find('app:strasse').string 45 | try: 46 | address += " " + member.find('app:hausnr').string 47 | except (AttributeError, TypeError): 48 | pass 49 | except AttributeError: 50 | pass 51 | 52 | coord_member = member.find('gml:pos') 53 | if coord_member: 54 | coord_string = coord_member.string.split() 55 | latlon = utm.to_latlon(float(coord_string[0]), float(coord_string[1]), 32, 'U') 56 | coords = { 57 | "lat": latlon[0], 58 | "lng": latlon[1] 59 | } 60 | else: 61 | coords = None 62 | data['lots'].append({ 63 | "coords":coords, 64 | "name":name, 65 | "id": lot_id, 66 | "lot_type": lot_type, 67 | "total":count, 68 | "free":free, 69 | "state":state, 70 | "region":region, 71 | "forecast":forecast, 72 | "address":address 73 | }) 74 | 75 | return data 76 | -------------------------------------------------------------------------------- /park_api/cities/Bonn.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.geodata import GeoData 3 | from park_api.util import utc_now 4 | 5 | geodata = GeoData(__file__) 6 | 7 | lot_map = { 8 | 0: "Münsterplatzgarage", 9 | 1: "Stadthausgarage", 10 | 2: "Beethoven-Parkhaus", 11 | 3: "Bahnhofgarage", 12 | 4: "Friedensplatzgarage", 13 | 5: "Marktgarage", 14 | } 15 | 16 | 17 | def parse_html(html): 18 | soup = BeautifulSoup(html, "html.parser") 19 | 20 | lots = [] 21 | 22 | for row in soup.find_all("div", class_='parking-lots'): 23 | entity_wrapper_class = 'wpb_column vc_column_container vc_col-sm-3' 24 | for column in row.find_all("div", class_=entity_wrapper_class): 25 | h3 = column.find_all("h3") 26 | if not h3[0].a is None and len(h3) > 1: 27 | name = h3[0].a.string 28 | free = 0 29 | for heading in h3: 30 | for heading_element in heading.find_all("span"): 31 | if heading_element.find("strong") is not None: 32 | free = int(heading_element.strong.get_text()) 33 | lot = geodata.lot(name) 34 | ltype = None 35 | for p in [pt for pt in ["Parkhaus", "Parkplatz"] if pt in name]: 36 | ltype = p 37 | lots.append({ 38 | "name": name, 39 | "coords": lot.coords, 40 | "free": free, 41 | "address": lot.address, 42 | "total": lot.total, 43 | "state": "unknown", 44 | "id": lot.id, 45 | "lot_type": ltype, 46 | "forecast": False 47 | }) 48 | 49 | elif h3[0].string: 50 | name = h3[0].string 51 | ltype = None 52 | if "Parkhaus" in name: 53 | ltype = "Parkhaus" 54 | elif "Parkplatz" in name: 55 | ltype = "Parkplatz" 56 | lot = geodata.lot(name) 57 | lots.append({ 58 | "name": name, 59 | "coords": lot.coords, 60 | "free": 0, 61 | "address": lot.address, 62 | "total": lot.total, 63 | "state": "nodata", 64 | "id": lot.id, 65 | "lot_type": ltype, 66 | "forecast": False 67 | }) 68 | 69 | return { 70 | "last_updated": utc_now(), 71 | "lots": lots 72 | } 73 | -------------------------------------------------------------------------------- /tests/validate-geojson.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import sys, os 3 | import json 4 | 5 | def validate_geometry(geometry): 6 | _type = geometry.get("type", None) 7 | assert _type == "Point", \ 8 | "geometry should contain a key with the value 'Point', got: '%s'" % _type 9 | coords = geometry.get("coordinates", None) 10 | assert type(coords) is list and len(coords) == 2 and \ 11 | type(coords[0]) == float and type(coords[1]) == float, \ 12 | "invalid coordinates: got '%s' in '%s'" % (coords, geometry) 13 | 14 | 15 | def validate_feature(feature): 16 | assert type(feature) == dict, \ 17 | "each entry of array 'features' " \ 18 | "should be an object, got: '%s'" % feature 19 | 20 | _type = feature.get("type", None) 21 | assert _type == "Feature", \ 22 | "each entry of array 'features' should contain the key 'type' with value 'Feature', got: '%s'" % _type 23 | 24 | assert "geometry" in feature, \ 25 | "each entry of array 'features' should contain the key 'geometry', got: '%s'" % feature 26 | 27 | geometry = feature.get("geometry", None) 28 | 29 | if type(geometry) is dict: 30 | validate_geometry(geometry) 31 | elif geometry != None: 32 | assert False, "geometry must be null or object got: '%s' in '%s'" % (geometry, feature) 33 | 34 | prop = feature.get("properties", None) 35 | assert type(prop) is dict, \ 36 | "properties must be an object, got: '%s' in '%s'" % (prop, feature) 37 | assert "name" in prop, \ 38 | "properties must contain a 'name' key, got: '%s' in %s" % (prop, feature) 39 | 40 | 41 | 42 | def validate_schema(geojson): 43 | assert type(geojson) is dict, "Toplevel object should be an JSON object" 44 | assert geojson.get("type", None) == "FeatureCollection", \ 45 | "Toplevel object should contain a key 'type' with value 'FeatureCollection'" 46 | assert type(geojson.get("features", None)) == list, \ 47 | "Toplevel object should contain a key 'features', where the type is an array" 48 | for feature in geojson["features"]: 49 | validate_feature(feature) 50 | 51 | 52 | def process_json(path): 53 | try: 54 | file = open(path) 55 | except OSError as e: 56 | print("failed to open '%s': %s" % (path, e), file=sys.stderr) 57 | sys.exit(1) 58 | validate_schema(json.load(file)) 59 | 60 | 61 | def main(): 62 | if len(sys.argv) <= 1: 63 | print("USAGE: %s GEOJSON_FILES..." % sys.argv[0], file=sys.stderr) 64 | sys.exit(1) 65 | for path in sys.argv[1:]: 66 | process_json(path) 67 | 68 | 69 | if __name__ == '__main__': 70 | main() 71 | -------------------------------------------------------------------------------- /tests/test_cities.py: -------------------------------------------------------------------------------- 1 | import os 2 | import unittest 3 | import ddt 4 | import helpers 5 | import importlib 6 | from datetime import datetime 7 | from park_api import db, env, security 8 | 9 | 10 | def scrape_city(city): 11 | allowed_extensions = [".html", ".json", ".xml"] 12 | for extension in allowed_extensions: 13 | path = os.path.join(helpers.TEST_ROOT, 14 | "fixtures", 15 | city.lower() + extension) 16 | if os.path.isfile(path): 17 | break 18 | with open(path, 'rb') as f: 19 | city = importlib.import_module("park_api.cities." + city) 20 | return city.parse_html(f.read().decode('utf-8', 'replace')) 21 | 22 | def get_tests(): 23 | modpath = os.path.join(env.APP_ROOT, "park_api", "cities") 24 | strip_py = lambda name: ".".join(name.split(".")[:-1]) 25 | for (dirpath, dirnames, filenames) in os.walk(modpath): 26 | return tuple(map(strip_py, filter(security.file_is_allowed, filenames))) 27 | 28 | @ddt.ddt 29 | class CityTestCase(unittest.TestCase): 30 | def setUp(self): 31 | db.setup() 32 | 33 | def sanity_check(self, city_name, city): 34 | self.assertIn("lots", city) 35 | self.assertIn("last_updated", city) 36 | last_updated = datetime.strptime(city["last_updated"], 37 | "%Y-%m-%dT%H:%M:%S") 38 | self.assertIsInstance(last_updated, datetime) 39 | 40 | self.assertTrue(len(city["lots"]) > 0) 41 | 42 | for lot in city["lots"]: 43 | self.assertIn("name", lot) 44 | 45 | self.assertIn("coords", lot) 46 | 47 | self.assertIn("state", lot) 48 | self.assertIn(lot["state"], 49 | ["open", "closed", "nodata", "unknown"]) 50 | 51 | self.assertIn("id", lot) 52 | 53 | self.assertIn("forecast", lot) 54 | self.assertIs(type(lot["forecast"]), bool) 55 | 56 | self.assertIn("free", lot) 57 | self.assertIn("total", lot) 58 | total, free = lot["total"], lot["free"] 59 | if total < free: 60 | msg = "\n[warn] total lots should be more than free lots:"\ 61 | " %d >= %d: %s => %s" 62 | print(msg % (total, free, city_name, lot)) 63 | if "coords" in lot and lot["coords"] is not None: 64 | self.assertIn("lat", lot["coords"]) 65 | self.assertIn("lng", lot["coords"]) 66 | 67 | @ddt.data(*get_tests()) 68 | def test_city(self, city_name): 69 | self.sanity_check(city_name, scrape_city(city_name)) 70 | 71 | -------------------------------------------------------------------------------- /park_api/cities/Limburg.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.util import convert_date 3 | from park_api.geodata import GeoData 4 | # from park_api.util import utc_now 5 | 6 | # This loads the geodata for this city if .geojson exists in the same directory as this file. 7 | # No need to remove this if there's no geodata (yet), everything will still work. 8 | geodata = GeoData(__file__) 9 | 10 | # This function is called by the scraper and given the data of the page specified as source in geojson above. 11 | # It's supposed to return a dictionary containing everything the current spec expects. Tests will fail if it doesn't ;) 12 | def parse_html(html): 13 | 14 | # BeautifulSoup is a great and easy way to parse the html and find the bits and pieces we're looking for. 15 | soup = BeautifulSoup(html, "html.parser") 16 | 17 | data = { 18 | "last_updated": '', 19 | # URL for the page where the scraper can gather the data 20 | "lots": [] 21 | } 22 | 23 | # Stand: 13.08.2019 16:40:00 Uhr (Aktualisierung alle 60 Sekunden)
24 | data['last_updated'] = convert_date( soup.find('b').text, 'Stand: %d.%m.%Y %H:%M:%S Uhr') 25 | 26 | entries = soup.find( 'table', class_= 'tabellenformat') 27 | entries_rows = entries.find_all( 'tr' ) 28 | # first line: header 29 | for one_entry in entries_rows[1:] : 30 | one_entry_data = one_entry.find_all( 'td') 31 | parking_name = one_entry_data[0].text 32 | lot = geodata.lot(parking_name) 33 | 34 | parking_free = 0 35 | parking_total = 0 36 | try : 37 | parking_total = int(one_entry_data[1].text) 38 | if ( one_entry_data[5].text.__eq__('Offen') ) : 39 | parking_status = 'open' 40 | parking_free = int(one_entry_data[3].text) 41 | elif ( one_entry_data[5].text.__eq__('Geschlossen') ) : 42 | parking_status = 'closed' 43 | else : 44 | parking_status = 'nodata' 45 | except : 46 | parking_status = 'nodata' 47 | 48 | data["lots"].append({ 49 | "name": parking_name, 50 | "free": parking_free, 51 | "total": parking_total, 52 | "address": lot.address, 53 | "coords": lot.coords, 54 | "state": parking_status, 55 | "lot_type": lot.type, 56 | "id": lot.id, 57 | "forecast": False 58 | }) 59 | 60 | return data 61 | 62 | -------------------------------------------------------------------------------- /park_api/cities/Wiesbaden.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.util import convert_date 3 | from park_api.geodata import GeoData 4 | 5 | geodata = GeoData(__file__) 6 | 7 | def parse_html(html): 8 | soup = BeautifulSoup(html, "html.parser") 9 | 10 | stand=soup.select('span') 11 | # this gives you: 12 | # in stand[0]: 13 | # Stand: 10.04.2019 15:09 14 | # splitting it gives you: u'10.04.2019', u'15:09' 15 | # putting it together: u'10.04.2019 15:09' 16 | last_updated_date=stand[0].text.strip().split()[1] 17 | last_updated_time=stand[0].text.strip().split()[2] 18 | last_updated = last_updated_date + " " + last_updated_time 19 | 20 | data = { 21 | "last_updated": convert_date(last_updated, "%d.%m.%Y %H:%M"), 22 | "lots": [] 23 | } 24 | 25 | # everything is in table-objects 26 | table=soup.select('table') 27 | # table[0] is a big table-object around everything 28 | # table[1] contains some headers 29 | # table[2] contains column-headers and one row for each parking-lot 30 | # so we look in this for name and values 31 | td = table[2].find_all('td') 32 | i = 0 33 | while i < len(td)-4 : 34 | # for each row 35 | # td[0] contains an image 36 | # td[1] contains the name of the parking-lot 37 | # td[2] contains the text 'geschlossen' or the values in the form xxx / xxx 38 | parking_name = td[i+1].text.strip() 39 | # work-around for the sz-problem: Coulinstraße 40 | if ( 'Coulinstr' in parking_name ) : parking_name = 'Coulinstraße' 41 | # get the data 42 | lot = geodata.lot(parking_name) 43 | try: 44 | parking_state = 'open' 45 | parking_free = 0 46 | parking_total = 0 47 | if ( 'geschlossen' in td[i+2].text ) : 48 | parking_state = 'closed' 49 | else : 50 | parking_free = int(td[i+2].text.split()[0]) 51 | parking_total = int(td[i+2].text.split()[2]) 52 | except: 53 | parking_state = 'nodata' 54 | 55 | data["lots"].append({ 56 | "name": parking_name, 57 | "free": parking_free, 58 | "total": parking_total, 59 | "address": lot.address, 60 | "coords": lot.coords, 61 | "state": parking_state, 62 | "lot_type": lot.type, 63 | "id": lot.id, 64 | "forecast": False, 65 | }) 66 | i += 5 # next parking-lot 67 | 68 | return data 69 | -------------------------------------------------------------------------------- /park_api/cities/Oldenburg.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.util import convert_date 3 | from park_api.geodata import GeoData 4 | 5 | # This loads the geodata for this city if .geojson 6 | # exists in the same directory as this file. 7 | # No need to remove this if there's no geodata (yet), 8 | # everything will still work. 9 | geodata = GeoData(__file__) 10 | 11 | 12 | # This function is called by the scraper and 13 | # given the data of the page specified as data_url above. 14 | # It's supposed to return a dictionary, 15 | # containing everything the current spec expects. 16 | # Tests will fail if it doesn't ;) 17 | def parse_html(html): 18 | # BeautifulSoup is a great and easy way to parse the html and 19 | # find the bits and pieces we're looking for. 20 | soup = BeautifulSoup(html, "html.parser") 21 | 22 | # last_updated is the date when the data on the page was last updated 23 | last_updated = str(soup.select("body")) 24 | start = str.find(last_updated, "Letzte Aktualisierung:") + 23 25 | last_updated = last_updated[start:start + 16] 26 | 27 | data = { 28 | # convert_date is a utility function 29 | # you can use to turn this date into the correct string format 30 | "last_updated": convert_date(last_updated, "%d.%m.%Y %H:%M"), 31 | "lots": [] 32 | } 33 | 34 | status_map = { 35 | "Offen": "open", 36 | "Geschlossen": "closed" 37 | } 38 | 39 | for tr in soup.find_all("tr"): 40 | if tr.td is None: 41 | continue 42 | td = tr.findAll('td') 43 | parking_name = td[0].string 44 | # work-around for the Umlaute-problem: ugly but working 45 | if ( 'Heiligengeist-' in parking_name) : parking_name = 'Heiligengeist-Höfe' 46 | elif ( 'Schlossh' in parking_name) : parking_name = 'Schlosshöfe' 47 | # get the data 48 | lot = geodata.lot(parking_name) 49 | try: 50 | parking_state = 'open' 51 | parking_free = 0 52 | if ( 'Geschlossen' in td[3].text ) : 53 | parking_state = 'closed' 54 | else : 55 | parking_free = int(td[1].text) 56 | except: 57 | parking_state = 'nodata' 58 | 59 | data["lots"].append({ 60 | "name": parking_name, 61 | "free": parking_free, 62 | "total": lot.total, 63 | "address": lot.address, 64 | "coords": lot.coords, 65 | "state": parking_state, 66 | "lot_type": lot.type, 67 | "id": lot.id, 68 | "forecast": False 69 | }) 70 | 71 | return data 72 | -------------------------------------------------------------------------------- /park_api/scraper.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import json 3 | import traceback 4 | 5 | import requests 6 | from bs4 import BeautifulSoup 7 | from park_api import util, env, db 8 | 9 | HEADERS = { 10 | "User-Agent": "ParkAPI v%s - Info: %s" % 11 | (env.SERVER_VERSION, env.SOURCE_REPOSITORY), 12 | } 13 | 14 | 15 | def get_html(city): 16 | """Download html data for a given city""" 17 | r = requests.get(city.source, headers={**HEADERS, **city.headers}) 18 | 19 | # Requests fails to correctly check the encoding for every site, 20 | # we're going to have to get that manually (in some cases). This sucks. 21 | soup = BeautifulSoup(r.text, "html.parser") 22 | meta_content = soup.find("meta", {"http-equiv": "content-type"}) 23 | if meta_content is not None: 24 | encoding = meta_content["content"].split("=")[-1] 25 | r.encoding = encoding 26 | 27 | return r.text 28 | 29 | 30 | def parse_html(city, html): 31 | """Use a city module to parse its html""" 32 | return city.parse_html(html) 33 | 34 | 35 | def add_metadata(data): 36 | """Adds metadata to a scraped output dict""" 37 | data["last_downloaded"] = util.utc_now() 38 | return data 39 | 40 | 41 | def save_data_to_db(cursor, parking_data, city): 42 | """Save the data given into the Postgres DB.""" 43 | timestamp_updated = parking_data["last_updated"] 44 | timestamp_downloaded = util.utc_now() 45 | json_data = json.dumps(parking_data) 46 | sql = """ 47 | INSERT INTO parkapi( 48 | timestamp_updated, 49 | timestamp_downloaded, 50 | city, 51 | data) 52 | VALUES (%(updated)s, %(downloaded)s, %(city)s, %(data)s) 53 | RETURNING 'id'; 54 | """ 55 | cursor.execute(sql, { 56 | "updated": timestamp_updated, 57 | "downloaded": timestamp_downloaded, 58 | "city": city, 59 | "data": json_data 60 | }) 61 | 62 | print("Saved " + city + " to DB.") 63 | 64 | 65 | def _live(module): 66 | """ 67 | Scrape data for a given city pulling all data now 68 | This function is only used in development mode 69 | for debugging the server without a database present. 70 | """ 71 | return add_metadata(module.parse_html(get_html(module.geodata.city))) 72 | 73 | 74 | def scrape_city(module): 75 | city = module.geodata.city 76 | data = add_metadata(module.parse_html(get_html(city))) 77 | with db.cursor(commit=True) as cursor: 78 | save_data_to_db(cursor, data, city.id) 79 | 80 | 81 | def main(): 82 | """ 83 | Iterate over all cities in ./cities, 84 | scrape and save their data to the database 85 | """ 86 | # the catch-all enterprise loop 87 | db.setup() 88 | for module in env.supported_cities().values(): 89 | try: 90 | scrape_city(module) 91 | except Exception as e: 92 | print("Failed to scrape '%s': %s" % 93 | (module.geodata.city.name, e)) 94 | print(traceback.format_exc()) 95 | -------------------------------------------------------------------------------- /park_api/util.py: -------------------------------------------------------------------------------- 1 | import pytz 2 | from datetime import datetime 3 | 4 | from park_api import db 5 | 6 | LOT_COUNTS_PER_CITY = {} 7 | 8 | 9 | def get_most_lots_from_known_data(city, lot_name): 10 | """ 11 | Get the total value from the highest known value in the last saved JSON. 12 | This is useful for cities that don't publish 13 | total number of spaces for a parking lot. 14 | 15 | Caveats: 16 | - Returns 0 if not found. 17 | - If a lot name exists twice only the last value is returned. 18 | 19 | :param city: 20 | :param lot_name: 21 | :return: 22 | """ 23 | global LOT_COUNTS_PER_CITY 24 | # FIXME ugly work around, this should be really fixed in a different way 25 | lot_counts = LOT_COUNTS_PER_CITY.get(city, {}) 26 | if lot_counts == {}: 27 | with db.cursor() as cursor: 28 | sql = """ 29 | SELECT data FROM parkapi 30 | WHERE city=%s 31 | ORDER BY timestamp_downloaded DESC LIMIT 600; 32 | """ 33 | cursor.execute(sql, (city,)) 34 | all_data = cursor.fetchall() 35 | for json_data in all_data: 36 | lots = json_data[0]["lots"] 37 | for lot in lots: 38 | highest_count = lot_counts.get(lot_name, 0) 39 | count = int(lot["free"]) 40 | if count > highest_count: 41 | lot_counts[lot_name] = count 42 | LOT_COUNTS_PER_CITY[city] = lot_counts 43 | return lot_counts.get(lot_name, 0) 44 | 45 | 46 | def utc_now(): 47 | """ 48 | Returns the current UTC time in ISO format. 49 | 50 | :return: 51 | """ 52 | return datetime.utcnow().replace(microsecond=0).isoformat() 53 | 54 | 55 | def remove_special_chars(string): 56 | """ 57 | Remove any umlauts, spaces and punctuation from a string. 58 | 59 | :param string: 60 | :return: 61 | """ 62 | replacements = { 63 | "ä": "ae", 64 | "ö": "oe", 65 | "ü": "ue", 66 | "ß": "ss", 67 | "-": "", 68 | " ": "", 69 | ".": "", 70 | ",": "", 71 | "'": "", 72 | "\"": "", 73 | "/": "", 74 | "\\": "", 75 | "\n": "", 76 | "\t": "" 77 | } 78 | for repl in replacements.keys(): 79 | string = string.replace(repl, replacements[repl]) 80 | return string 81 | 82 | 83 | def convert_date(date_string, date_format, timezone="Europe/Berlin"): 84 | """ 85 | Convert a date into a ISO formatted UTC date string. 86 | Timezone defaults to Europe/Berlin. 87 | 88 | :param date_string: 89 | :param date_format: 90 | :param timezone: 91 | :return: 92 | """ 93 | last_updated = datetime.strptime(date_string, date_format) 94 | local_timezone = pytz.timezone(timezone) 95 | last_updated = local_timezone.localize(last_updated, is_dst=None) 96 | last_updated = last_updated.astimezone(pytz.utc).replace(tzinfo=None) 97 | 98 | return last_updated.replace(microsecond=0).isoformat() 99 | -------------------------------------------------------------------------------- /park_api/cities/Frankfurt.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.util import convert_date 3 | from park_api.geodata import GeoData 4 | import requests 5 | 6 | # This loads the geodata for this city if .geojson exists in the same directory as this file. 7 | # No need to remove this if there's no geodata (yet), everything will still work. 8 | geodata = GeoData(__file__) 9 | 10 | # This function is called by the scraper and given the data of the page specified as source in geojson above. 11 | # It's supposed to return a dictionary containing everything the current spec expects. Tests will fail if it doesn't ;) 12 | def parse_html(html): 13 | 14 | # BeautifulSoup is a great and easy way to parse the html and find the bits and pieces we're looking for. 15 | soup = BeautifulSoup(html, "html.parser") 16 | r = requests.get('http://offenedaten.frankfurt.de/dataset/e821f156-69cf-4dd0-9ffe-13d9d6218597/resource/eac5ca3d-4285-48f4-bfe3-d3116a262e5f/download/parkdatensta.xml') 17 | geo = BeautifulSoup(r.text, "html.parser") 18 | # last_updated is the date when the data on the page was last updated, it should be listed on most pages 19 | last_updated = soup.find_all("publicationtime")[0].text.split(".")[0] 20 | 21 | 22 | data = { 23 | # convert_date is a utility function you can use to turn this date into the correct string format 24 | "last_updated": last_updated, 25 | # URL for the page where the scraper can gather the data 26 | "lots": [] 27 | } 28 | for tr in soup.select("parkingfacilitytablestatuspublication > parkingfacilitystatus"): 29 | node = tr.find("parkingfacilityreference") 30 | lot_id = tr.find("parkingfacilityreference")["id"] 31 | lot_total = int(tr.find("totalparkingcapacityshorttermoverride").text) 32 | lot_free = max(lot_total - int(tr.find("totalnumberofoccupiedparkingspaces").text), 0) 33 | 34 | # please be careful about the state only being allowed to contain either open, closed or nodata 35 | # should the page list other states, please map these into the three listed possibilities 36 | state = tr.find("parkingfacilitystatus") 37 | if state and state.text in ["open", "closed"]: 38 | state = state.text 39 | else: 40 | state = "nodata" 41 | 42 | lotNode = geo.find(id=lot_id) 43 | if not lotNode: 44 | continue 45 | coordsNode = lotNode.find("pointcoordinates") 46 | 47 | lot = { 48 | "name": lotNode.find("parkingfacilitydescription").text, 49 | "address": "none", 50 | "coords": {"lng": float(coordsNode.find("longitude").text), "lat": float(coordsNode.find("latitude").text)}, 51 | "type": "none", 52 | "id": lot_id 53 | } 54 | 55 | data["lots"].append({ 56 | "name": lot["name"], 57 | "free": lot_free, 58 | "total": lot_total, 59 | "address": lot["address"], 60 | "coords": lot["coords"], 61 | "state": state, 62 | "lot_type": lot["type"], 63 | "id": lot["id"], 64 | "forecast": False, 65 | }) 66 | return data 67 | -------------------------------------------------------------------------------- /park_api/cities/Nuernberg.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.util import convert_date 3 | from park_api.geodata import GeoData 4 | 5 | # This loads the geodata for this city if .geojson exists in the same directory as this file. 6 | # No need to remove this if there's no geodata (yet), everything will still work. 7 | geodata = GeoData(__file__) 8 | 9 | # This function is called by the scraper and given the data of the page specified as source in geojson above. 10 | # It's supposed to return a dictionary containing everything the current spec expects. Tests will fail if it doesn't ;) 11 | def parse_html(html): 12 | 13 | # BeautifulSoup is a great and easy way to parse the html and find the bits and pieces we're looking for. 14 | soup = BeautifulSoup(html, "html.parser") 15 | 16 | # last_updated is the date when the data on the page was last updated, it should be listed on most pages 17 | # suche: 18 | date_time_text = soup.find('td', width='233').text.strip() 19 | 20 | data = { 21 | # convert_date is a utility function you can use to turn this date into the correct string format 22 | # 'Stand: 05.06.2019, 14:40:20' 23 | "last_updated": convert_date(date_time_text, 'Stand: %d.%m.%Y, %H:%M:%S'), 24 | # URL for the page where the scraper can gather the data 25 | "lots": [] 26 | } 27 | 28 | # everything is in table-objects 29 | # so we have to go down several levels of table-objects 30 | html_level0 = soup.find('table') 31 | html_level1 = html_level0.find_all( 'table') 32 | html_level2 = html_level1[1].find_all('table') 33 | html_level3 = html_level2[0].find_all('table') 34 | html_level4 = html_level3[2].find_all('table') 35 | # here we have the data of the tables 36 | # [0]: header 37 | # [1]: empty 38 | # all following: empty or Parkhaus 39 | for html_parkhaus in html_level4[2:] : 40 | if ( html_parkhaus.text.strip() == '' ) : continue # table is empty 41 | html_parkhaus_all_rows = html_parkhaus.find_all('tr') 42 | for html_parkhaus_row in html_parkhaus_all_rows : 43 | # one row: one parkhaus 44 | html_parkhaus_data = html_parkhaus_row.find_all('td') 45 | parking_name_list = html_parkhaus_data[1].text.split() 46 | parking_name = '' 47 | for parking_name_part in parking_name_list : 48 | if ( parking_name != '' ) : parking_name += ' ' 49 | parking_name += parking_name_part 50 | 51 | lot = geodata.lot(parking_name) 52 | parking_state = 'open' 53 | parking_free = 0 54 | try : 55 | parking_free = int(html_parkhaus_data[2].text) 56 | except: 57 | parking_state = 'nodata' 58 | 59 | data["lots"].append({ 60 | "name": parking_name, 61 | "free": parking_free, 62 | "total": lot.total, 63 | "address": lot.address, 64 | "coords": lot.coords, 65 | "state": parking_state, 66 | "lot_type": lot.type, 67 | "id": lot.id, 68 | "forecast": False, 69 | }) 70 | 71 | return data 72 | -------------------------------------------------------------------------------- /park_api/env.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from park_api import structs, security 4 | import importlib 5 | import configparser 6 | import sys 7 | import subprocess 8 | import logging 9 | 10 | API_VERSION = '1.0' 11 | SERVER_VERSION = '0.3.0' 12 | SOURCE_REPOSITORY = 'https://github.com/offenesdresden/ParkAPI' 13 | 14 | APP_ROOT = os.path.realpath(os.path.join(os.path.dirname(__file__), "..")) 15 | 16 | SERVER_CONF = None 17 | ENV = None 18 | SUPPORTED_CITIES = None 19 | DATABASE = {} 20 | 21 | DEFAULT_CONFIGURATION = { 22 | "port": 5000, 23 | "host": "::1", 24 | "debug": False, 25 | "live_scrape": True, 26 | "database_uri": "postgres:///park_api", 27 | } 28 | 29 | 30 | def is_production(): 31 | return ENV == "production" 32 | 33 | 34 | def is_development(): 35 | return ENV == "development" 36 | 37 | 38 | def is_testing(): 39 | return ENV == "testing" 40 | 41 | 42 | def is_staging(): 43 | return ENV == "staging" 44 | 45 | 46 | def load_cities(): 47 | """ 48 | Iterate over files in park_api/cities to add them to list of available cities. 49 | This list is used to stop requests trying to access files and output them which are not cities. 50 | """ 51 | cities = {} 52 | path = os.path.join(APP_ROOT, "park_api", "cities") 53 | for file in filter(security.file_is_allowed, os.listdir(path)): 54 | city = importlib.import_module("park_api.cities." + file.title()[:-3]) 55 | cities[file[:-3]] = city 56 | return cities 57 | 58 | 59 | def supported_cities(): 60 | global SUPPORTED_CITIES 61 | if SUPPORTED_CITIES is None: 62 | SUPPORTED_CITIES = load_cities() 63 | return SUPPORTED_CITIES 64 | 65 | 66 | def load_config(): 67 | global ENV 68 | ENV = os.getenv("env", "development") 69 | 70 | config_path = os.path.join(APP_ROOT, "config.ini") 71 | try: 72 | config_file = open(config_path) 73 | except (OSError, FileNotFoundError) as e: 74 | print("Failed load configuration: %s" % e) 75 | exit(1) 76 | config = configparser.ConfigParser(DEFAULT_CONFIGURATION, strict=False) 77 | config.read_file(config_file) 78 | 79 | try: 80 | raw_config = config[ENV] 81 | except KeyError: 82 | print("environment '%s' does not exists in config.ini" % ENV, 83 | file=sys.stderr) 84 | exit(1) 85 | 86 | global SERVER_CONF, DATABASE_URI, SUPPORTED_CITIES, LIVE_SCRAPE 87 | SERVER_CONF = structs.ServerConf(host=raw_config.get('host'), 88 | port=raw_config.getint("port"), 89 | debug=raw_config.getboolean("debug")) 90 | LIVE_SCRAPE = raw_config.getboolean("live_scrape") 91 | DATABASE_URI = raw_config.get("database_uri") 92 | 93 | def determine_server_version(): 94 | global SERVER_VERSION 95 | try: 96 | proc = subprocess.Popen(["git", "rev-list", "--all", "--count"], stdout=subprocess.PIPE) 97 | rev = proc.stdout.read().decode('utf-8').strip() 98 | SERVER_VERSION = '0.3.{0}'.format(rev) 99 | except (UnicodeDecodeError, OSError) as e: 100 | logging.warning("Could not determine server version correctly: {0}".format(str(e))) 101 | 102 | load_config() 103 | determine_server_version() 104 | -------------------------------------------------------------------------------- /tests/fixtures/ingolstadt.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | Ingolstadt 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 |
Ansichten von Ingolstadt
25 |
26 | 27 |

Freie Parkplätze:

28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 |

(04.07.2015, 17.44 Uhr)

36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 |
Congressgarage55
Festplatz1141
Hallenbad17
Hauptbahnhof234
Hauptbahnhof Ost222
Münster162
Nordbahnhof178
Reduit Tilly242
Schloss260
Theater-Ost357
Theater-West361
102 | 103 | 104 | 105 | 106 |
107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | -------------------------------------------------------------------------------- /park_api/cities/Aarhus.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.util import convert_date 3 | from park_api.geodata import GeoData 4 | import json 5 | 6 | # This loads the geodata for this city if .geojson exists in the same directory as this file. 7 | geodata = GeoData(__file__) 8 | 9 | 10 | def parse_html(text_content): 11 | data_as_json = json.loads(text_content) 12 | 13 | # last_updated is the date when the data on the page was last updated, it should be listed on most pages 14 | last_updated = data_as_json["result"]["records"][0]["date"] 15 | data = { 16 | "last_updated": convert_date(last_updated, "%Y/%m/%d %H:%M:%S"), 17 | "lots": [] 18 | } 19 | 20 | # The page at https://www.odaa.dk/dataset/parkeringshuse-i-aarhus describes how the counts are made 21 | map_json_names = { 22 | "NORREPORT": "Nørreport", 23 | # "SKOLEBAKKEN": None, 24 | "SCANDCENTER": "Scandinavian Center", 25 | "BRUUNS": "Bruuns Galleri", 26 | "MAGASIN": "Magasin", 27 | "KALKVAERKSVEJ": "Kalkværksvej", 28 | "SALLING": "Salling", 29 | "Navitas": "Navitas", 30 | "NewBusgadehuset": "Busgadehuset" 31 | } 32 | 33 | cummulatives = { 34 | "Urban Level 1": "Dokk1", 35 | "Urban Level 2+3": "Dokk1" 36 | } 37 | 38 | cumulative_lots = {} 39 | 40 | for record in data_as_json["result"]["records"]: 41 | lot_code = record["garageCode"] 42 | total = int(record["totalSpaces"]) 43 | free = max(int(record["totalSpaces"]) - int(record["vehicleCount"]), 0) 44 | 45 | if lot_code not in map_json_names.keys() and lot_code not in cummulatives.keys(): 46 | continue 47 | elif lot_code in map_json_names.keys(): 48 | lot_name = map_json_names[lot_code] 49 | lot = geodata.lot(lot_name) 50 | data["lots"].append({ 51 | "name": lot_name, 52 | "free": free, 53 | "total": total, 54 | "address": lot.address, 55 | "coords": lot.coords, 56 | "state": "unknown", 57 | "lot_type": lot.type, 58 | "id": lot.id, 59 | "forecast": False, 60 | }) 61 | elif lot_code not in cummulatives.keys(): 62 | lot_name = cummulatives[lot_code] 63 | if lot_name not in cumulative_lots.keys(): 64 | cumulative_lots[lot_name] = { 65 | "name": lot_name, 66 | "free": free, 67 | "total": total, 68 | "address": lot.address, 69 | "coords": lot.coords, 70 | "state": "unknown", 71 | "lot_type": lot.type, 72 | "id": lot.id, 73 | "forecast": False, 74 | } 75 | else: 76 | current_data = cumulative_lots[lot_name] 77 | cumulative_lots[lot_name] = { 78 | "name": lot_name, 79 | "free": current_data["free"] + free, 80 | "total": current_data["total"] + total, 81 | "address": lot.address, 82 | "coords": lot.coords, 83 | "state": "unknown", 84 | "lot_type": lot.type, 85 | "id": lot.id, 86 | "forecast": False, 87 | } 88 | 89 | for lot in cumulative_lots: 90 | data["lots"].append(lot) 91 | 92 | return data 93 | -------------------------------------------------------------------------------- /park_api/cities/Heilbronn.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [{ 4 | "type": "Feature", 5 | "geometry": { 6 | "type": "Point", 7 | "coordinates": [ 8 | 9.218716, 9 | 49.142525 10 | ] 11 | }, 12 | "properties": { 13 | "name": "Heilbronn", 14 | "type": "city", 15 | "url": "https://www.heilbronn.de/startseite.html", 16 | "source": "https://www.heilbronn.de/allgemeine-inhalte/ajax-parkhausbelegung.html?type=1496993343", 17 | "active_support":false 18 | } 19 | }, 20 | { 21 | "type": "Feature", 22 | "properties": { 23 | "name": "Am Bollwerksturm", 24 | "total": 304, 25 | "address": "Mannheimer Str. 25", 26 | "type": "Parkhaus" 27 | }, 28 | "geometry": { 29 | "type": "Point", 30 | "coordinates": [ 31 | 9.216159, 32 | 49.146106 33 | ] 34 | } 35 | }, 36 | { 37 | "type": "Feature", 38 | "properties": { 39 | "name": "City-Parkhaus Experimenta", 40 | "total": 500, 41 | "address": "Bahnhofstraße 6", 42 | "type": "Parkhaus" 43 | }, 44 | "geometry": { 45 | "type": "Point", 46 | "coordinates": [ 47 | 9.213191, 48 | 49.143647 49 | ] 50 | } 51 | }, 52 | { 53 | "type": "Feature", 54 | "properties": { 55 | "name": "Harmonie", 56 | "total": 435, 57 | "address": "Allee 28", 58 | "type": "Tiefgarage" 59 | }, 60 | "geometry": { 61 | "type": "Point", 62 | "coordinates": [ 63 | 9.225386, 64 | 49.142295 65 | ] 66 | } 67 | }, 68 | { 69 | "type": "Feature", 70 | "properties": { 71 | "name": "Kiliansplatz", 72 | "total": 230, 73 | "address": "Kiliansstr. 11", 74 | "type": "Parkhaus" 75 | }, 76 | "geometry": { 77 | "type": "Point", 78 | "coordinates": [ 79 | 9.220854, 80 | 49.14117 81 | ] 82 | } 83 | }, 84 | { 85 | "type": "Feature", 86 | "properties": { 87 | "name": "Käthchenhof", 88 | "total": 67, 89 | "address": "Gerberstraße", 90 | "type": "Tiefgarage" 91 | }, 92 | "geometry": { 93 | "type": "Point", 94 | "coordinates": [ 95 | 9.21755, 96 | 49.143207 97 | ] 98 | } 99 | }, 100 | { 101 | "type": "Feature", 102 | "properties": { 103 | "name": "Stadtgalerie", 104 | "total": 660, 105 | "address": "Allerheiligenstraße", 106 | "type": "Parkhaus" 107 | }, 108 | "geometry": { 109 | "type": "Point", 110 | "coordinates": [ 111 | 9.216804, 112 | 49.140119 113 | ] 114 | } 115 | }, 116 | { 117 | "type": "Feature", 118 | "properties": { 119 | "name": "Theaterforum K3", 120 | "total": 460, 121 | "address": "Berliner Platz 12", 122 | "type": "Tiefgarage" 123 | }, 124 | "geometry": { 125 | "type": "Point", 126 | "coordinates": [ 127 | 9.220796, 128 | 49.145997 129 | ] 130 | } 131 | }, 132 | { 133 | "type": "Feature", 134 | "properties": { 135 | "name": "Wollhaus", 136 | "total": 450, 137 | "address": "Am Wollhaus", 138 | "type": "Parkhaus" 139 | }, 140 | "geometry": { 141 | "type": "Point", 142 | "coordinates": [ 143 | 9.220889, 144 | 49.139792 145 | ] 146 | } 147 | }] 148 | } 149 | 150 | 151 | -------------------------------------------------------------------------------- /park_api/cities/Dresden.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import time 4 | import datetime 5 | from bs4 import BeautifulSoup 6 | from park_api.geodata import GeoData 7 | from park_api.util import convert_date, get_most_lots_from_known_data 8 | 9 | geodata = GeoData(__file__) 10 | 11 | 12 | def parse_html(html): 13 | if geodata.private_data: 14 | api_data = json.loads(html) 15 | dt = time.strptime(api_data[0]["timestamp"].split(".")[0], "%Y-%m-%dT%H:%M:%S") 16 | ts = time.gmtime(time.mktime(dt)) 17 | data = { 18 | "lots": [], 19 | "last_updated": time.strftime("%Y-%m-%dT%H:%M:%S", ts) 20 | } 21 | status = ['open', 'closed', 'unknown'] 22 | id_lots = {geodata.lots[n].aux: geodata.lots[n] for n in geodata.lots} 23 | for dataset in api_data: 24 | try: 25 | lot = id_lots[dataset['id']] 26 | forecast = os.path.isfile("forecast_data/" + lot.id + ".csv") 27 | data["lots"].append({ 28 | "coords": lot.coords, 29 | "name": lot.name, 30 | "total": lot.total, 31 | "free": max(lot.total - dataset["belegung"], 0), 32 | "state": status[dataset["status"] - 1], 33 | "id": lot.id, 34 | "lot_type": lot.type, 35 | "address": lot.address, 36 | "forecast": forecast, 37 | "region": "" 38 | }) 39 | except KeyError: 40 | pass 41 | else: 42 | #use website 43 | soup = BeautifulSoup(html, "html.parser") 44 | date_field = soup.find(id="P1_LAST_UPDATE").text 45 | last_updated = convert_date(date_field, "%d.%m.%Y %H:%M:%S") 46 | data = { 47 | "lots": [], 48 | "last_updated": last_updated 49 | } 50 | 51 | for table in soup.find_all("table"): 52 | if table["summary"] != "": 53 | region = table["summary"] 54 | if region == "Busparkplätze": 55 | continue 56 | 57 | for lot_row in table.find_all("tr"): 58 | if lot_row.find("th") is not None: 59 | continue 60 | 61 | cls = lot_row.find("div")["class"] 62 | state = "nodata" 63 | if "green" in cls or "yellow" in cls or "red" in cls: 64 | state = "open" 65 | elif "park-closed" in cls: 66 | state = "closed" 67 | 68 | lot_name = lot_row.find("td", {"headers": "BEZEICHNUNG"}).text 69 | 70 | try: 71 | col = lot_row.find("td", {"headers": "FREI"}) 72 | free = int(col.text) 73 | except ValueError: 74 | free = 0 75 | 76 | try: 77 | col = lot_row.find("td", {"headers": "KAPAZITAET"}) 78 | total = int(col.text) 79 | except ValueError: 80 | total = get_most_lots_from_known_data("Dresden", lot_name) 81 | 82 | lot = geodata.lot(lot_name) 83 | forecast = os.path.isfile("forecast_data/" + lot.id + ".csv") 84 | 85 | data["lots"].append({ 86 | "coords": lot.coords, 87 | "name": lot_name, 88 | "total": total, 89 | "free": free, 90 | "state": state, 91 | "id": lot.id, 92 | "lot_type": lot.type, 93 | "address": lot.address, 94 | "forecast": forecast, 95 | "region": region 96 | }) 97 | return data 98 | -------------------------------------------------------------------------------- /tests/fixtures/aarhus.json: -------------------------------------------------------------------------------- 1 | { 2 | "help":"http://www.odaa.dk/api/3/action/help_show?name=datastore_search", 3 | "success":true, 4 | "result":{ 5 | "resource_id":"2a82a145-0195-4081-a13c-b0e587e9b89c", 6 | "fields":[ 7 | { 8 | "type":"int4", 9 | "id":"_id" 10 | }, 11 | { 12 | "type":"text", 13 | "id":"date" 14 | }, 15 | { 16 | "type":"text", 17 | "id":"garageCode" 18 | }, 19 | { 20 | "type":"int4", 21 | "id":"totalSpaces" 22 | }, 23 | { 24 | "type":"int4", 25 | "id":"vehicleCount" 26 | } 27 | ], 28 | "records":[ 29 | { 30 | "date":"2016/10/29 17:30:04", 31 | "vehicleCount":56, 32 | "_id":1, 33 | "totalSpaces":65, 34 | "garageCode":"NORREPORT" 35 | }, 36 | { 37 | "date":"2016/10/29 17:30:04", 38 | "vehicleCount":512, 39 | "_id":2, 40 | "totalSpaces":512, 41 | "garageCode":"SKOLEBAKKEN" 42 | }, 43 | { 44 | "date":"2016/10/29 17:30:04", 45 | "vehicleCount":885, 46 | "_id":3, 47 | "totalSpaces":1240, 48 | "garageCode":"SCANDCENTER" 49 | }, 50 | { 51 | "date":"2016/10/29 17:30:04", 52 | "vehicleCount":352, 53 | "_id":4, 54 | "totalSpaces":967, 55 | "garageCode":"BRUUNS" 56 | }, 57 | { 58 | "date":"2016/10/29 17:30:04", 59 | "vehicleCount":6187, 60 | "_id":5, 61 | "totalSpaces":142, 62 | "garageCode":"BUSGADEHUSET" 63 | }, 64 | { 65 | "date":"2016/10/29 17:30:04", 66 | "vehicleCount":175, 67 | "_id":6, 68 | "totalSpaces":383, 69 | "garageCode":"MAGASIN" 70 | }, 71 | { 72 | "date":"2016/10/29 17:30:04", 73 | "vehicleCount":29, 74 | "_id":7, 75 | "totalSpaces":210, 76 | "garageCode":"KALKVAERKSVEJ" 77 | }, 78 | { 79 | "date":"2016/10/29 17:30:04", 80 | "vehicleCount":278, 81 | "_id":8, 82 | "totalSpaces":700, 83 | "garageCode":"SALLING" 84 | }, 85 | { 86 | "date":"2016/10/29 17:30:04", 87 | "vehicleCount":0, 88 | "_id":9, 89 | "totalSpaces":0, 90 | "garageCode":"DOKK1" 91 | }, 92 | { 93 | "date":"2016/10/29 17:30:04", 94 | "vehicleCount":73, 95 | "_id":10, 96 | "totalSpaces":449, 97 | "garageCode":"Navitas" 98 | }, 99 | { 100 | "date":"2016/10/29 17:30:04", 101 | "vehicleCount":102, 102 | "_id":11, 103 | "totalSpaces":105, 104 | "garageCode":"NewBusgadehuset" 105 | }, 106 | { 107 | "date":"2016/10/29 17:30:04", 108 | "vehicleCount":35, 109 | "_id":12, 110 | "totalSpaces":319, 111 | "garageCode":"Urban Level 1" 112 | }, 113 | { 114 | "date":"2016/10/29 17:30:04", 115 | "vehicleCount":66, 116 | "_id":13, 117 | "totalSpaces":654, 118 | "garageCode":"Urban Level 2+3" 119 | } 120 | ], 121 | "_links":{ 122 | "start":"/api/action/datastore_search?resource_id=2a82a145-0195-4081-a13c-b0e587e9b89c", 123 | "next":"/api/action/datastore_search?offset=100&resource_id=2a82a145-0195-4081-a13c-b0e587e9b89c" 124 | }, 125 | "total":13 126 | } 127 | } 128 | 129 | -------------------------------------------------------------------------------- /park_api/cities/Limburg.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [{ 4 | "type": "Feature", 5 | "geometry": { 6 | "type": "Point", 7 | "coordinates": [ 8 | 8.063907, 9 | 50.388856 10 | ] 11 | }, 12 | "properties": { 13 | "name": "Limburg", 14 | "type": "city", 15 | "url": "https://www.limburg.de/", 16 | "source": "http://p127393.mittwaldserver.info/LM/_pls/pls.php", 17 | "active_support":false 18 | } 19 | }, 20 | { 21 | "type": "Feature", 22 | "properties": { 23 | "name": "Altstadt", 24 | "total": 0, 25 | "address": "Sackgasse 11", 26 | "type": "Tiefgarage" 27 | }, 28 | "geometry": { 29 | "type": "Point", 30 | "coordinates": [ 31 | 8.063018, 32 | 50.388892 33 | ] 34 | } 35 | }, 36 | { 37 | "type": "Feature", 38 | "properties": { 39 | "name": "Bahnhof", 40 | "total": 0, 41 | "address": "Graupfortstraße 7", 42 | "type": "Parkhaus" 43 | }, 44 | "geometry": { 45 | "type": "Point", 46 | "coordinates": [ 47 | 8.064100, 48 | 50.385133 49 | ] 50 | } 51 | }, 52 | { 53 | "type": "Feature", 54 | "properties": { 55 | "name": "City", 56 | "total": 0, 57 | "address": "Frankfurter Straße 12", 58 | "type": "Parkhaus" 59 | }, 60 | "geometry": { 61 | "type": "Point", 62 | "coordinates": [ 63 | 8.066296, 64 | 50.386418 65 | ] 66 | } 67 | }, 68 | { 69 | "type": "Feature", 70 | "properties": { 71 | "name": "Karstadt", 72 | "total": 0, 73 | "address": "Werner-Senger-Straße 15", 74 | "type": "Parkhaus" 75 | }, 76 | "geometry": { 77 | "type": "Point", 78 | "coordinates": [ 79 | 8.060811, 80 | 50.386820 81 | ] 82 | } 83 | }, 84 | { 85 | "type": "Feature", 86 | "properties": { 87 | "name": "Neumarkt", 88 | "total": 0, 89 | "address": "Neumarkt", 90 | "type": "Parkplatz" 91 | }, 92 | "geometry": { 93 | "type": "Point", 94 | "coordinates": [ 95 | 8.063834, 96 | 50.385931 97 | ] 98 | } 99 | }, 100 | { 101 | "type": "Feature", 102 | "properties": { 103 | "name": "PH-Mitte", 104 | "total": 0, 105 | "address": "Grabenstraße 26", 106 | "type": "Parkhaus" 107 | }, 108 | "geometry": { 109 | "type": "Point", 110 | "coordinates": [ 111 | 8.062045, 112 | 50.388618 113 | ] 114 | } 115 | }, 116 | { 117 | "type": "Feature", 118 | "properties": { 119 | "name": "Sparkasse", 120 | "total": 0, 121 | "address": "Schiede 41", 122 | "type": "Parkhaus" 123 | }, 124 | "geometry": { 125 | "type": "Point", 126 | "coordinates": [ 127 | 8.059188, 128 | 50.387296 129 | ] 130 | } 131 | }, 132 | { 133 | "type": "Feature", 134 | "properties": { 135 | "name": "Stadthalle", 136 | "total": 0, 137 | "address": "Diezer Straße 17", 138 | "type": "Parkhaus" 139 | }, 140 | "geometry": { 141 | "type": "Point", 142 | "coordinates": [ 143 | 8.061152, 144 | 50.387457 145 | ] 146 | } 147 | }, 148 | { 149 | "type": "Feature", 150 | "properties": { 151 | "name": "WERKStadt", 152 | "total": 0, 153 | "address": "Joseph-Schneider-Straße 1", 154 | "type": "Parkplatz" 155 | }, 156 | "geometry": { 157 | "type": "Point", 158 | "coordinates": [ 159 | 8.056576, 160 | 50.384046 161 | ] 162 | } 163 | } 164 | ] 165 | } 166 | 167 | 168 | -------------------------------------------------------------------------------- /park_api/cities/Ulm.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [{ 4 | "type": "Feature", 5 | "geometry": { 6 | "type": "Point", 7 | "coordinates": [ 8 | 9.99109, 9 | 48.39851 10 | ] 11 | }, 12 | "properties": { 13 | "name": "Ulm", 14 | "type": "city", 15 | "url": "https://neuulm.ulm.de/stadt-ulm", 16 | "source": "https://www.parken-in-ulm.de/", 17 | "active_support":true 18 | } 19 | }, 20 | { 21 | "type": "Feature", 22 | "properties": { 23 | "name": "Am Rathaus", 24 | "total": 558, 25 | "address": "Neue Str. 113", 26 | "type": "Tiefgarage" 27 | }, 28 | "geometry": { 29 | "type": "Point", 30 | "coordinates": [ 31 | 9.993047, 32 | 48.397305 33 | ] 34 | } 35 | }, 36 | { 37 | "type": "Feature", 38 | "properties": { 39 | "name": "Deutschhaus", 40 | "total": 594, 41 | "address": "Friedrich-Ebert-Straße 8", 42 | "type": "Parkhaus" 43 | }, 44 | "geometry": { 45 | "type": "Point", 46 | "coordinates": [ 47 | 9.984545, 48 | 48.397917 49 | ] 50 | } 51 | }, 52 | { 53 | "type": "Feature", 54 | "properties": { 55 | "name": "Fischerviertel", 56 | "total": 395, 57 | "address": "Schwilmengasse", 58 | "type": "Tiefgarage" 59 | }, 60 | "geometry": { 61 | "type": "Point", 62 | "coordinates": [ 63 | 9.988355, 64 | 48.396583 65 | ] 66 | } 67 | }, 68 | { 69 | "type": "Feature", 70 | "properties": { 71 | "name": "Salzstadel", 72 | "total": 530, 73 | "address": "Salzstadelgasse 14", 74 | "type": "Tiefgarage" 75 | }, 76 | "geometry": { 77 | "type": "Point", 78 | "coordinates": [ 79 | 9.990002, 80 | 48.401160 81 | ] 82 | } 83 | }, 84 | { 85 | "type": "Feature", 86 | "properties": { 87 | "name": "Frauenstraße", 88 | "total": 770, 89 | "address": "Rosengasse 19", 90 | "type": "Tiefgarage" 91 | }, 92 | "geometry": { 93 | "type": "Point", 94 | "coordinates": [ 95 | 9.996022, 96 | 48.400979 97 | ] 98 | } 99 | }, 100 | { 101 | "type": "Feature", 102 | "properties": { 103 | "name": "Congress Centrum Nord / Basteicenter", 104 | "total": 420, 105 | "address": "Wichernstraße", 106 | "type": "Tiefgarage" 107 | }, 108 | "geometry": { 109 | "type": "Point", 110 | "coordinates": [ 111 | 10.001591, 112 | 48.401814 113 | ] 114 | } 115 | }, 116 | { 117 | "type": "Feature", 118 | "properties": { 119 | "name": "Congress Centrum Süd / Maritim Hotel", 120 | "total": 56, 121 | "address": "Basteistraße 46", 122 | "type": "Tiefgarage" 123 | }, 124 | "geometry": { 125 | "type": "Point", 126 | "coordinates": [ 127 | 10.004385, 128 | 48.401223 129 | ] 130 | } 131 | }, 132 | { 133 | "type": "Feature", 134 | "properties": { 135 | "name": "Kornhaus", 136 | "total": 135, 137 | "address": "Rosengasse 9", 138 | "type": "Tiefgarage" 139 | }, 140 | "geometry": { 141 | "type": "Point", 142 | "coordinates": [ 143 | 9.99491, 144 | 48.40093 145 | ] 146 | } 147 | }, 148 | { 149 | "type": "Feature", 150 | "properties": { 151 | "name": "Theater", 152 | "total": 83, 153 | "address": "Olgastraße 63", 154 | "type": "Tiefgarage" 155 | }, 156 | "geometry": { 157 | "type": "Point", 158 | "coordinates": [ 159 | 9.98559, 160 | 48.40062 161 | ] 162 | } 163 | }] 164 | } 165 | -------------------------------------------------------------------------------- /park_api/cities/Magdeburg.py: -------------------------------------------------------------------------------- 1 | from bs4 import BeautifulSoup 2 | from park_api.util import convert_date 3 | from park_api.geodata import GeoData 4 | 5 | # This loads the geodata for this city if .geojson exists in the same directory as this file. 6 | # No need to remove this if there's no geodata (yet), everything will still work. 7 | geodata = GeoData(__file__) 8 | 9 | # This function is called by the scraper and given the data of the page specified as source in geojson above. 10 | # It's supposed to return a dictionary containing everything the current spec expects. Tests will fail if it doesn't ;) 11 | def parse_html(html): 12 | 13 | # BeautifulSoup is a great and easy way to parse the html and find the bits and pieces we're looking for. 14 | soup = BeautifulSoup(html, "html.parser") 15 | 16 | data = { 17 | "last_updated": '', # will add this later 18 | # URL for the page where the scraper can gather the data 19 | "lots": [] 20 | } 21 | 22 | # find all entries 23 | outer_table = soup.find('table') 24 | # first group of lots 25 | inner_tables = outer_table.find_all('table') 26 | # inner_tables[0] ist Navi-Leiste, weiter mit first_part[1] 27 | rows = inner_tables[1].find_all('tr') 28 | for row in rows[6:] : 29 | one_row = row.find_all('td') 30 | if ( one_row[0].text == '' ) : continue 31 | # 32 | if ( len(one_row) <= 5 ) : 33 | startingPoint = 0 34 | else : 35 | startingPoint = 1 36 | parking_name = one_row[startingPoint+0].text.strip() 37 | lot = geodata.lot(parking_name) 38 | try : 39 | parking_free = 0 40 | if ( 'offline' == one_row[startingPoint+1].text.strip() ) : 41 | parking_status = 'nodata' 42 | else : 43 | parking_status = 'open' 44 | parking_free = int(one_row[startingPoint+1].text) 45 | except : 46 | parking_status = 'nodata' 47 | 48 | data["lots"].append({ 49 | "name": parking_name, 50 | "free": parking_free, 51 | "total": lot.total, 52 | "address": lot.address, 53 | "coords": lot.coords, 54 | "state": parking_status, 55 | "lot_type": lot.type, 56 | "id": lot.id, 57 | "forecast": False, 58 | }) 59 | 60 | # second group of lots 61 | rows = inner_tables[2].find_all('tr') 62 | for row in rows[4:9] : 63 | one_row = row.find_all('td') 64 | if ( one_row[0].text == '' ) : continue 65 | # 66 | if ( len(one_row) <= 2 ) : 67 | startingPoint = 0 68 | else : 69 | startingPoint = 1 70 | parking_name = one_row[startingPoint+0].text.strip() 71 | lot = geodata.lot(parking_name) 72 | if ( lot.address == None ) : print('not found: ', '/'+parking_name+'/') 73 | try : 74 | parking_free = 0 75 | if ( 'offline' == one_row[startingPoint+1].text.strip() ) : 76 | parking_status = 'nodata' 77 | else : 78 | parking_status = 'open' 79 | parking_free = int(one_row[startingPoint+1].text) 80 | except : 81 | parking_status = 'nodata' 82 | 83 | data["lots"].append({ 84 | "name": parking_name, 85 | "free": parking_free, 86 | "total": lot.total, 87 | "address": lot.address, 88 | "coords": lot.coords, 89 | "state": parking_status, 90 | "lot_type": lot.type, 91 | "id": lot.id, 92 | "forecast": False, 93 | }) 94 | 95 | # finaly we set the last_updated field 96 | # Letzte Aktualisierung vom 12.06.2019 11:22 Uhr 97 | current_date = rows[10].text.split() 98 | data["last_updated"] = convert_date(current_date[12]+' '+current_date[13], '%d.%m.%Y %H:%M') 99 | 100 | return data 101 | -------------------------------------------------------------------------------- /park_api/cities/Aarhus.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [{ 4 | "type": "Feature", 5 | "geometry": { 6 | "type": "Point", 7 | "coordinates": [ 8 | 10.203201, 9 | 56.153005 10 | ] 11 | }, 12 | "properties": { 13 | "name": "Aarhus", 14 | "type": "city", 15 | "url": "https://www.aarhus.dk", 16 | "source": "http://www.odaa.dk/api/action/datastore_search?resource_id=2a82a145-0195-4081-a13c-b0e587e9b89c", 17 | "active_support": true, 18 | "attribution":{ 19 | "contributor":"Manuel R. Ciosici", 20 | "url":"https://www.odaa.dk/dataset/parkeringshuse-i-aarhus", 21 | "license":"Aarhus License – https://www.odaa.dk/base/image/Vilkår%20for%20brug%20af%20danske%20offentlige%20data%20-%20Aarhus%20Kommune.%20docx.pdf" 22 | } 23 | } 24 | }, { 25 | "type": "Feature", 26 | "properties": { 27 | "name": "Nørreport", 28 | "total": 65, 29 | "address": "Nørreport 22", 30 | "type": "Parkplatz" 31 | }, 32 | "geometry": { 33 | "type": "Point", 34 | "coordinates": [ 35 | 10.212737, 36 | 56.161859 37 | ] 38 | } 39 | }, 40 | { 41 | "type": "Feature", 42 | "properties": { 43 | "name": "Scandinavian Center", 44 | "total": 1240, 45 | "address": "Margrethepladsen 2", 46 | "type": "Parkhaus" 47 | }, 48 | "geometry": { 49 | "type": "Point", 50 | "coordinates": [ 51 | 10.198423, 52 | 56.151647 53 | ] 54 | } 55 | }, 56 | { 57 | "type": "Feature", 58 | "properties": { 59 | "name": "Bruuns Galleri", 60 | "total": 967, 61 | "address": "Værkmestergade 25", 62 | "type": "Tiefgarage" 63 | }, 64 | "geometry": { 65 | "type": "Point", 66 | "coordinates": [ 67 | 10.206229, 68 | 56.149843 69 | ] 70 | } 71 | }, 72 | { 73 | "type": "Feature", 74 | "properties": { 75 | "name": "Busgadehuset", 76 | "total": 105, 77 | "address": "Telefonsmøgen 18", 78 | "type": "Tiefgarage" 79 | }, 80 | "geometry": { 81 | "type": "Point", 82 | "coordinates": [ 83 | 10.206103, 84 | 56.155285 85 | ] 86 | } 87 | }, 88 | { 89 | "type": "Feature", 90 | "properties": { 91 | "name": "Magasin", 92 | "total": 383, 93 | "address": "Åboulevarden 80", 94 | "type": "Parkhaus" 95 | }, 96 | "geometry": { 97 | "type": "Point", 98 | "coordinates": [ 99 | 10.204983, 100 | 56.156690 101 | ] 102 | } 103 | }, 104 | { 105 | "type": "Feature", 106 | "properties": { 107 | "name": "Kalkværksvej", 108 | "total": 210, 109 | "address": "Kalkværksvej", 110 | "type": "Parkplatz" 111 | }, 112 | "geometry": { 113 | "type": "Point", 114 | "coordinates": [ 115 | 10.211539, 116 | 56.149350 117 | ] 118 | } 119 | }, 120 | { 121 | "type": "Feature", 122 | "properties": { 123 | "name": "Salling", 124 | "total": 700, 125 | "address": "Østergade 25", 126 | "type": "Parkhaus" 127 | }, 128 | "geometry": { 129 | "type": "Point", 130 | "coordinates": [ 131 | 10.207709, 132 | 56.153909 133 | ] 134 | } 135 | }, 136 | { 137 | "type": "Feature", 138 | "properties": { 139 | "name": "Navitas", 140 | "total": 449, 141 | "address": "Inge Lehmanns Gade", 142 | "type": "Tiefgarage" 143 | }, 144 | "geometry": { 145 | "type": "Point", 146 | "coordinates": [ 147 | 10.216824, 148 | 56.159878 149 | ] 150 | } 151 | }, 152 | { 153 | "type": "Feature", 154 | "properties": { 155 | "name": "Dokk1", 156 | "total": 973, 157 | "address": "Hack Kampmanns Plads 2", 158 | "type": "Tiefgarage" 159 | }, 160 | "geometry": { 161 | "type": "Point", 162 | "coordinates": [ 163 | 10.213976, 164 | 56.152698 165 | ] 166 | } 167 | } 168 | ] 169 | } 170 | -------------------------------------------------------------------------------- /park_api/cities/Konstanz.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [{ 4 | "type": "Feature", 5 | "geometry": { 6 | "type": "Point", 7 | "coordinates": [ 8 | 9.1758200, 9 | 47.6603300 10 | ] 11 | }, 12 | "properties": { 13 | "name": "Konstanz", 14 | "type": "city", 15 | "url": "http://www.konstanz.de", 16 | "source": "https://www.konstanz.de/start/leben+in+konstanz/parkleitsystem.html", 17 | "active_support": false 18 | } 19 | }, { 20 | "type": "Feature", 21 | "properties": { 22 | "name": "Marktstätte", 23 | "total": 282, 24 | "address": "Dammgasse 3", 25 | "type": "Parkhaus" 26 | }, 27 | "geometry": { 28 | "type": "Point", 29 | "coordinates": [ 30 | 9.17631447315216, 31 | 47.659621142013435 32 | ] 33 | } 34 | },{ 35 | "type": "Feature", 36 | "properties": { 37 | "name": "Altstadt", 38 | "total": 359, 39 | "address": "Untere Laube 26", 40 | "type": "Parkhaus" 41 | }, 42 | "geometry": { 43 | "type": "Point", 44 | "coordinates": [ 45 | 9.172049760818481, 46 | 47.663019166809015 47 | ] 48 | } 49 | }, 50 | { 51 | "type": "Feature", 52 | "properties": { 53 | "name": "Lago", 54 | "total": 936, 55 | "address": "Hafenstraße 31", 56 | "type": "Parkhaus" 57 | }, 58 | "geometry": { 59 | "type": "Point", 60 | "coordinates": [ 61 | 9.176606833934784, 62 | 47.657483940292856 63 | ] 64 | } 65 | }, 66 | { 67 | "type": "Feature", 68 | "properties": { 69 | "name": "Augustiner / Karstadt", 70 | "total": 284, 71 | "address": "Bruderturmgasse", 72 | "type": "Parkhaus" 73 | }, 74 | "geometry": { 75 | "type": "Point", 76 | "coordinates": [ 77 | 9.173318445682526, 78 | 47.65950913530439 79 | ] 80 | } 81 | }, 82 | { 83 | "type": "Feature", 84 | "properties": { 85 | "name": "Fischmarkt", 86 | "total": 158, 87 | "address": "Salmannsweilergasse 1", 88 | "type": "Parkhaus" 89 | }, 90 | "geometry": { 91 | "type": "Point", 92 | "coordinates": [ 93 | 9.176258146762848, 94 | 47.66129940731421 95 | ] 96 | } 97 | }, 98 | { 99 | "type": "Feature", 100 | "properties": { 101 | "name": "Döbele", 102 | "total": 335, 103 | "address": "über Kreisverkehr Grenzbachstraße", 104 | "type": "Parkhaus" 105 | }, 106 | "geometry": { 107 | "type": "Point", 108 | "coordinates": [ 109 | 9.16858971118927, 110 | 47.65779106680178 111 | ] 112 | } 113 | }, 114 | { 115 | "type": "Feature", 116 | "properties": { 117 | "name": "Bodenseeforum", 118 | "total": 500, 119 | "type": "Parkplatz" 120 | }, 121 | "geometry": { 122 | "type": "Point", 123 | "coordinates": [ 124 | 9.164448380470276, 125 | 47.67286153200742 126 | ] 127 | } 128 | }, 129 | { 130 | "type": "Feature", 131 | "properties": { 132 | "name": "Byk Gulden Str.", 133 | "total": 0, 134 | "address": "Byk Gulden Str.", 135 | "type": "Parkhaus" 136 | }, 137 | "geometry": { 138 | "type": "Point", 139 | "coordinates": [ 140 | 9.140249490737915, 141 | 47.68337197354464 142 | ] 143 | } 144 | }, 145 | { 146 | "type": "Feature", 147 | "properties": { 148 | "name": "Benediktiner", 149 | "total": 143, 150 | "address": "Benediktinerplatz 8", 151 | "type": "Parkhaus" 152 | }, 153 | "geometry": { 154 | "type": "Point", 155 | "coordinates": [ 156 | 9.178049862384796, 157 | 47.66877236211362 158 | ] 159 | } 160 | }, 161 | { 162 | "type": "Feature", 163 | "properties": { 164 | "name": "Seerheincenter", 165 | "total": 280, 166 | "address": "Steinstraße 2", 167 | "type": "Parkhaus" 168 | }, 169 | "geometry": { 170 | "type": "Point", 171 | "coordinates": [ 172 | 9.183433055877686, 173 | 47.67272065604162 174 | ] 175 | } 176 | } 177 | ] 178 | } 179 | -------------------------------------------------------------------------------- /tests/fixtures/heilbronn.html: -------------------------------------------------------------------------------- 1 | 2 |
Datum: 05.08.2019 - Uhrzeit: 14:51

Ort
Am Bollwerksturm
Zufahrt: Mannheimer Straße 25 3 | 4 |
Freie Parkplätze: 122

Ort
5 | City-Parkhaus Experimenta 6 | 7 | 8 |
Freie Parkplätze: 357

Ort
Harmonie
Zufahrt: Gymnasiumstraße 9 | 10 |
Freie Parkplätze: 206

Ort
Kiliansplatz
Zufahrt: Kilianstraße 11 | 12 |
Freie Parkplätze: 45

Ort
Käthchenhof
Zufahrt: Gerberstraße 13 | 14 |
Freie Parkplätze: 21

Ort
Stadtgalerie
Zufahrt: Allerheiligenstraße, Deutschhofstraße 15 | 16 |
Freie Parkplätze: 176

Ort
Theaterforum K3
Zufahrt: Mannheimer Straße, Gartenstraße 17 | 18 |
Freie Parkplätze: 213

Ort
Wollhaus
Zufahrt: Allee 19 | 20 |
Freie Parkplätze: 126
21 | 22 | -------------------------------------------------------------------------------- /park_api/cities/Rosenheim.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [{ 4 | "type": "Feature", 5 | "geometry": { 6 | "type": "Point", 7 | "coordinates": [ 8 | 12.128548, 9 | 47.856756 10 | ] 11 | }, 12 | "properties": { 13 | "name": "Rosenheim", 14 | "type": "city", 15 | "url": "https://www.rosenheim.de/", 16 | "source": "https://www.rosenheim.de/index.php?eID=jwParkingGetParkings", 17 | "headers" : {"Accept" : "application/json; charset=utf-8"}, 18 | "active_support":false 19 | } 20 | }, 21 | { 22 | "type": "Feature", 23 | "properties": { 24 | "name": "P1 Zentrum", 25 | "address": "Hammerweg 1", 26 | "type": "Parkhaus" 27 | }, 28 | "geometry": { 29 | "type": "Point", 30 | "coordinates": [ 31 | 12.127189, 32 | 47.852263 33 | ] 34 | } 35 | }, 36 | { 37 | "type": "Feature", 38 | "properties": { 39 | "name": "P2 KU'KO", 40 | "address": "Kufsteiner Str. 4", 41 | "type": "Parkhaus" 42 | }, 43 | "geometry": { 44 | "type": "Point", 45 | "coordinates": [ 46 | 12.125916, 47 | 47.853270 48 | ] 49 | } 50 | }, 51 | { 52 | "type": "Feature", 53 | "properties": { 54 | "name": "P3 Rathaus", 55 | "address": "Königstraße 20", 56 | "type": "Parkplatz" 57 | }, 58 | "geometry": { 59 | "type": "Point", 60 | "coordinates": [ 61 | 12.129512, 62 | 47.853999 63 | ] 64 | } 65 | }, 66 | { 67 | "type": "Feature", 68 | "properties": { 69 | "name": "P4 Mitte", 70 | "address": "Frühlingstraße 5", 71 | "type": "Parkhaus" 72 | }, 73 | "geometry": { 74 | "type": "Point", 75 | "coordinates": [ 76 | 12.124568, 77 | 47.856551 78 | ] 79 | } 80 | }, 81 | { 82 | "type": "Feature", 83 | "properties": { 84 | "name": "P6 Salinplatz", 85 | "address": "Salinstraße 8A", 86 | "type": "Parkplatz/Tiefgarage" 87 | }, 88 | "geometry": { 89 | "type": "Point", 90 | "coordinates": [ 91 | 12.123202, 92 | 47.852729 93 | ] 94 | } 95 | }, 96 | { 97 | "type": "Feature", 98 | "properties": { 99 | "name": "P7 Altstadt-Ost", 100 | "address": "Mühlbachbogen", 101 | "type": "Tiefgarage" 102 | }, 103 | "geometry": { 104 | "type": "Point", 105 | "coordinates": [ 106 | 12.131591, 107 | 47.857572 108 | ] 109 | } 110 | }, 111 | 112 | { 113 | "type": "Feature", 114 | "properties": { 115 | "name": "P8 Beilhack-Citydome", 116 | "address": "Am Hammer 7A", 117 | "type": "Parkplatz" 118 | }, 119 | "geometry": { 120 | "type": "Point", 121 | "coordinates": [ 122 | 12.126049, 123 | 47.850794 124 | ] 125 | } 126 | }, 127 | { 128 | "type": "Feature", 129 | "properties": { 130 | "name": "P9 Am Klinikum", 131 | "address": "In Der Schmucken 7", 132 | "type": "Parkhaus" 133 | }, 134 | "geometry": { 135 | "type": "Point", 136 | "coordinates": [ 137 | 12.132828, 138 | 47.858294 139 | ] 140 | } 141 | }, 142 | { 143 | "type": "Feature", 144 | "properties": { 145 | "name": "P10 Stadtcenter", 146 | "address": "Brixstraße 3", 147 | "type": "Parkhaus" 148 | }, 149 | "geometry": { 150 | "type": "Point", 151 | "coordinates": [ 152 | 12.127718, 153 | 47.853439 154 | ] 155 | } 156 | }, 157 | { 158 | "type": "Feature", 159 | "properties": { 160 | "name": "P11 Beilhack-Gießereistr.", 161 | "address": "Gießereistraße 2", 162 | "type": "Parkplatz" 163 | }, 164 | "geometry": { 165 | "type": "Point", 166 | "coordinates": [ 167 | 12.126827, 168 | 47.849683 169 | ] 170 | } 171 | }, 172 | { 173 | "type": "Feature", 174 | "properties": { 175 | "name": "P12 Bahnhof Nord", 176 | "address": "Eduard-Rüber-Straße", 177 | "type": "Parkhaus" 178 | }, 179 | "geometry": { 180 | "type": "Point", 181 | "coordinates": [ 182 | 12.113999, 183 | 47.852733 184 | ] 185 | } 186 | }] 187 | } 188 | 189 | 190 | -------------------------------------------------------------------------------- /park_api/cities/Muenster.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [{ 4 | "type": "Feature", 5 | "geometry": { 6 | "type": "Point", 7 | "coordinates": [ 8 | 7.6257100, 9 | 51.9623600 10 | ] 11 | }, 12 | "properties": { 13 | "name": "Münster", 14 | "type": "city", 15 | "url": "http://www.stadt-muenster.de/tiefbauamt/parkleitsystem", 16 | "source": "http://www.stadt-muenster.de/tiefbauamt/parkleitsystem", 17 | "active_support": false 18 | } 19 | }, { 20 | "type": "Feature", 21 | "properties": { 22 | "name": "PH Theater", 23 | "total": 793 24 | }, 25 | "geometry": { 26 | "type":"Point", 27 | "coordinates":[7.626454, 51.965676] 28 | } 29 | }, 30 | { 31 | "type": "Feature", 32 | "properties": { 33 | "name": "PP Hörsterplatz", 34 | "total": 202 35 | }, 36 | "geometry": { 37 | "type":"Point", 38 | "coordinates":[7.632232, 51.964237] 39 | } 40 | }, 41 | { 42 | "type": "Feature", 43 | "properties": { 44 | "name": "PH Alter Steinweg", 45 | "total": 350 46 | }, 47 | "geometry": { 48 | "type":"Point", 49 | "coordinates":[7.632326, 51.962245] 50 | } 51 | }, 52 | { 53 | "type": "Feature", 54 | "properties": { 55 | "name": "Busparkplatz", 56 | "total": 63 57 | }, 58 | "geometry": { 59 | "type":"Point", 60 | "coordinates":[7.616792, 51.965429] 61 | } 62 | }, 63 | { 64 | "type": "Feature", 65 | "properties": { 66 | "name": "PP Schlossplatz Nord", 67 | "total": 450 68 | }, 69 | "geometry": { 70 | "type":"Point", 71 | "coordinates":[7.617017, 51.964472] 72 | } 73 | }, 74 | { 75 | "type": "Feature", 76 | "properties": { 77 | "name": "PP Schlossplatz Süd", 78 | "total": 460 79 | }, 80 | "geometry": { 81 | "type":"Point", 82 | "coordinates":[7.616700, 51.962386] 83 | } 84 | }, 85 | { 86 | "type": "Feature", 87 | "properties": { 88 | "name": "PH Aegidii", 89 | "total": 780 90 | }, 91 | "geometry": { 92 | "type":"Point", 93 | "coordinates":[7.622396, 51.960839] 94 | } 95 | }, 96 | { 97 | "type": "Feature", 98 | "properties": { 99 | "name": "PP Georgskommende", 100 | "total": 272 101 | }, 102 | "geometry": { 103 | "type":"Point", 104 | "coordinates":[7.619322, 51.959491] 105 | } 106 | }, 107 | { 108 | "type": "Feature", 109 | "properties": { 110 | "name": "PH Münster Arkaden", 111 | "total": 248 112 | }, 113 | "geometry": { 114 | "type":"Point", 115 | "coordinates":[7.626499, 51.959854] 116 | } 117 | }, 118 | { 119 | "type": "Feature", 120 | "properties": { 121 | "name": "PH Karstadt", 122 | "total": 183 123 | }, 124 | "geometry": { 125 | "type":"Point", 126 | "coordinates":[7.629907, 51.960783] 127 | } 128 | }, 129 | { 130 | "type": "Feature", 131 | "properties": { 132 | "name": "PH Stubengasse", 133 | "total": 318 134 | }, 135 | "geometry": { 136 | "type":"Point", 137 | "coordinates":[7.630287, 51.959884] 138 | } 139 | }, 140 | { 141 | "type": "Feature", 142 | "properties": { 143 | "name": "PH Bremer Platz", 144 | "total": 416 145 | }, 146 | "geometry": { 147 | "type":"Point", 148 | "coordinates":[7.637701, 51.957459] 149 | } 150 | }, 151 | { 152 | "type": "Feature", 153 | "properties": { 154 | "name": "PH Engelenschanze", 155 | "total": 480 156 | }, 157 | "geometry": { 158 | "type":"Point", 159 | "coordinates":[7.631493, 51.955674] 160 | } 161 | }, 162 | { 163 | "type": "Feature", 164 | "properties": { 165 | "name": "PH Bahnhofstraße", 166 | "total": 339 167 | }, 168 | "geometry": { 169 | "type":"Point", 170 | "coordinates":[7.632764, 51.955504] 171 | } 172 | }, 173 | { 174 | "type": "Feature", 175 | "properties": { 176 | "name": "PH Cineplex", 177 | "total": 590 178 | }, 179 | "geometry": { 180 | "type":"Point", 181 | "coordinates":[7.635975, 51.949828] 182 | } 183 | }, 184 | { 185 | "type": "Feature", 186 | "properties": { 187 | "name": "PH Stadthaus 3", 188 | "total": 372 189 | }, 190 | "geometry": { 191 | "type":"Point", 192 | "coordinates":[7.641026, 51.949385] 193 | } 194 | } 195 | ] 196 | } 197 | -------------------------------------------------------------------------------- /park_api/cities/Bonn.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [{ 4 | "type": "Feature", 5 | "geometry": { 6 | "type": "Point", 7 | "coordinates": [ 8 | 7.0954800, 9 | 50.7343800 10 | ] 11 | }, 12 | "properties": { 13 | "name": "Bonn", 14 | "type": "city", 15 | "url": "http://www.bcp-bonn.de", 16 | "source": "http://www.bcp-bonn.de", 17 | "active_support": false 18 | } 19 | }, { 20 | "type": "Feature", 21 | "geometry": { 22 | "type": "Point", 23 | "coordinates": [ 24 | 7.101677, 25 | 50.734121 26 | ] 27 | }, 28 | "properties": { 29 | "name": "Bonner City Parkraum GmbH", 30 | "description": "Verwaltung, Am Hof 26a, 53113 Bonn, Tel. +49(0)228 96991-0, Fax +49(0)228 96991-21, Mail bcp@citypark-bonn.de, http://www.bcp-bonn.de" 31 | } 32 | }, { 33 | "type": "Feature", 34 | "geometry": { 35 | "type": "Point", 36 | "coordinates": [ 37 | 7.103549000000001, 38 | 50.734559 39 | ] 40 | }, 41 | "properties": { 42 | "name": "Marktgarage", 43 | "description": "Marktgarage Bonn, Einfahrt Stockenstraße, Tel. +49(0)228 88657580, Bonner City Parkraum GmbH, http://bcp-bonn.de", 44 | "total": 325, 45 | "address": "Stockenstraße" 46 | } 47 | }, { 48 | "type": "Feature", 49 | "geometry": { 50 | "type": "Point", 51 | "coordinates": [ 52 | 7.096412, 53 | 50.735764 54 | ] 55 | }, 56 | "properties": { 57 | "name": "Münsterplatzgarage", 58 | "description": "Münsterplatzgarage Bonn, Einfahrt Budapester Straße, Tel. +49(0)228 691348, Bonner City Parkraum GmbH, http://www.bcp-bonn.de", 59 | "total": 319, 60 | "address": "Budapester Straße" 61 | } 62 | }, { 63 | "type": "Feature", 64 | "geometry": { 65 | "type": "Point", 66 | "coordinates": [ 67 | 7.096701, 68 | 50.737056 69 | ] 70 | }, 71 | "properties": { 72 | "name": "Friedensplatzgarage", 73 | "description": "Friedensplatzgarage Bonn, Einfahrt Oxfordstraße, Tel. +49(0)228 655281, Bonner City Parkraum GmbH, http://www.bcp-bonn.de", 74 | "total": 822, 75 | "address": "Oxfordstraße" 76 | } 77 | }, { 78 | "type": "Feature", 79 | "geometry": { 80 | "type": "Point", 81 | "coordinates": [ 82 | 7.095905, 83 | 50.733897 84 | ] 85 | }, 86 | "properties": { 87 | "name": "Bahnhofgarage", 88 | "description": "Bahnhofgarage Bonn, Einfahrt Münsterstraße, Tel. +49(0)228 634328, Bonner City Parkraum GmbH, http://www.bcp-bonn.de", 89 | "total": 110, 90 | "address": "Münsterstraße" 91 | } 92 | }, { 93 | "type": "Feature", 94 | "geometry": { 95 | "type": "Point", 96 | "coordinates": [ 97 | 7.094, 98 | 50.737791 99 | ] 100 | }, 101 | "properties": { 102 | "name": "Stadthausgarage", 103 | "description": "Stadthausgarage Bonn, Einfahrt Weiherstraße, Tel. +49(0)228 656085, Bonner City Parkraum GmbH, http://www.bcp-bonn.de", 104 | "total": 300, 105 | "address": "Weiherstraße" 106 | } 107 | }, { 108 | "type": "Feature", 109 | "geometry": { 110 | "type": "Point", 111 | "coordinates": [ 112 | 7.103313, 113 | 50.739629 114 | ] 115 | }, 116 | "properties": { 117 | "name": "Beethoven-Parkhaus", 118 | "description": "Beethoven-Parkhaus Bonn, Einfahrt Engeltalstraße, Tel. +49(0)228 3695635, Bonner City Parkraum GmbH, http://www.bcp-bonn.de", 119 | "total": 426, 120 | "address": "Engeltalstraße" 121 | } 122 | }, { 123 | "type": "Feature", 124 | "geometry": { 125 | "type": "Point", 126 | "coordinates": [ 127 | 7.10139, 128 | 50.741206 129 | ] 130 | }, 131 | "properties": { 132 | "name": "Parkplatz Beethovenhalle", 133 | "description": "Parkplatz Beethovenhalle Bonn, Einfahrt Wachsbleiche, Tel. +49(0)228 3695635, Bonner City Parkraum GmbH, http://www.bcp-bonn.de" 134 | } 135 | }, { 136 | "type": "Feature", 137 | "geometry": { 138 | "type": "Point", 139 | "coordinates": [ 140 | 7.130245, 141 | 50.718237 142 | ] 143 | }, 144 | "properties": { 145 | "name": "Parkplatz Rheinaue", 146 | "description": "Parkplatz Rheinaue Bonn, Einfahrt Charles-De-Gaulle-Straße, Tel. +49(0)228 656085, Bonner City Parkraum GmbH, http://www.bcp-bonn.de" 147 | } 148 | },{ 149 | "type":"Feature", 150 | "geometry":{ 151 | "type":"Point", 152 | "coordinates": [7.0936335, 50.7339257] 153 | }, 154 | "properties": { 155 | "name":"Beethoven-Parkplatz" 156 | } 157 | },{ 158 | "type":"Feature", 159 | "geometry":{ 160 | "type":"Point", 161 | "coordinates": [7.1286246, 50.7171669] 162 | }, 163 | "properties": { 164 | "name":"Parkplatz Charles-de-Gaulle-Straße" 165 | } 166 | } 167 | ] 168 | } 169 | -------------------------------------------------------------------------------- /park_api/geodata.py: -------------------------------------------------------------------------------- 1 | import os 2 | from datetime import date 3 | import calendar 4 | 5 | import json 6 | from collections import namedtuple 7 | from park_api import env 8 | from park_api.util import remove_special_chars 9 | 10 | 11 | lot_fields = ['name', 'id', 'type', 'lng', 'lat', 'address', 'total', 'aux'] 12 | 13 | 14 | class Lot(namedtuple('Lot', lot_fields)): 15 | @property 16 | def coords(self): 17 | if self.lng is not None and self.lat is not None: 18 | return {'lng': self.lng, 'lat': self.lat} 19 | return None 20 | 21 | city_fields = ['name', 'id', 'lng', 'lat', 'url', 'source', 'headers', 'public_source', 'active_support', 'attribution'] 22 | 23 | 24 | class City(namedtuple('City', city_fields)): 25 | @property 26 | def coords(self): 27 | if self.lng is not None and self.lat is not None: 28 | return {'lng': self.lng, 'lat': self.lat} 29 | return None 30 | 31 | 32 | def generate_id(s): 33 | return remove_special_chars(s.lower()) 34 | 35 | 36 | class GeoData: 37 | def __init__(self, city): 38 | json_file = city[:-3] + ".geojson" 39 | self.city_name = os.path.basename(city[:-3]) 40 | json_path = os.path.join(env.APP_ROOT, "park_api", "cities", json_file) 41 | try: 42 | with open(json_path) as f: 43 | self._process_json(json.load(f)) 44 | except FileNotFoundError: 45 | self.lots = {} 46 | private_file = city[:-3] + ".json" 47 | private_path = os.path.join(env.APP_ROOT, "park_api", "cities", private_file) 48 | try: 49 | with open(private_path) as p: 50 | self.private_data = json.load(p) 51 | self._process_private(self.private_data) 52 | except FileNotFoundError: 53 | self.private_data = None 54 | 55 | def _process_json(self, json): 56 | self.lots = {} 57 | self.city = None 58 | for f in json["features"]: 59 | self._process_feature(f) 60 | if self.city is None: 61 | self.city = City(self.city_name, 62 | self.city_name, 63 | None, 64 | None, 65 | None, 66 | None, 67 | None, 68 | None) 69 | 70 | def _process_private(self, json): 71 | if self.city: 72 | self.city = City(self.city[0], 73 | self.city[1], 74 | self.city[2], 75 | self.city[3], 76 | self.city[4], 77 | json["source"], 78 | json["public"], 79 | self.city[7], 80 | self.city[8]) 81 | 82 | def _process_feature(self, feature): 83 | props = feature["properties"] 84 | _type = props.get("type", None) 85 | name = props["name"] 86 | lng, lat = self._coords(feature) 87 | if _type == "city": 88 | self.city = self._city_from_props(name, lng, lat, props) 89 | else: 90 | lot = self._lot_from_props(name, lng, lat, props) 91 | self.lots[name] = lot 92 | 93 | def _city_from_props(self, name, lng, lat, props): 94 | url = props.get("url", None) 95 | source = props.get("source", None) 96 | headers = props.get("headers", {}) 97 | active_support = props.get("active_support", None) 98 | attribution = props.get("attribution", None) 99 | return City(name, 100 | self.city_name, 101 | lng, 102 | lat, 103 | url, 104 | source, 105 | headers, 106 | source, 107 | active_support, 108 | attribution) 109 | 110 | def _lot_from_props(self, name, lng, lat, props): 111 | address = props.get("address", None) 112 | total = props.get("total", 0) 113 | if "total_by_weekday" in props.keys(): 114 | weekday = calendar.day_name[date.today().weekday()] 115 | if weekday in props.get("total_by_weekday"): 116 | total = props.get("total_by_weekday").get(weekday) 117 | _type = props.get("type", None) 118 | _aux = props.get("aux", None) 119 | _id = generate_id(self.city_name + name) 120 | return Lot(name, _id, _type, lng, lat, address, total, _aux) 121 | 122 | def _coords(self, feature): 123 | geometry = feature.get("geometry", None) 124 | if geometry is None: 125 | return None, None 126 | else: 127 | lng, lat = geometry["coordinates"] 128 | return lng, lat 129 | 130 | def lot(self, name): 131 | lot = self.lots.get(name, None) 132 | if lot is None: 133 | _id = generate_id(self.city_name + name) 134 | return Lot(name, _id, None, None, None, None, 0, None) 135 | return lot 136 | -------------------------------------------------------------------------------- /park_api/cities/Oldenburg.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [{ 4 | "type": "Feature", 5 | "geometry": { 6 | "type": "Point", 7 | "coordinates": [ 8 | 8.2146700, 9 | 53.1411800 10 | ] 11 | }, 12 | "properties": { 13 | "name": "Oldenburg", 14 | "type": "city", 15 | "url": "http://www.oldenburg.de", 16 | "source": "http://oldenburg-service.de/pls2.php", 17 | "active_support": false 18 | } 19 | }, { 20 | "type": "Feature", 21 | "properties": { 22 | "name": "Schlosshöfe", 23 | "total": 430, 24 | "address": "Mühlenstraße", 25 | "type": "Parkhaus" 26 | }, 27 | "geometry": { 28 | "type": "Point", 29 | "coordinates": [ 30 | 8.2168988, 31 | 53.1391841 32 | ] 33 | } 34 | }, 35 | { 36 | "type": "Feature", 37 | "properties": { 38 | "name": "Waffenplatz", 39 | "total": 650, 40 | "address": "Waffenplatz 3", 41 | "type": "Parkhaus" 42 | }, 43 | "geometry": { 44 | "type": "Point", 45 | "coordinates": [ 46 | 8.21115, 47 | 53.14169 48 | ] 49 | } 50 | }, 51 | { 52 | "type": "Feature", 53 | "properties": { 54 | "name": "City", 55 | "total": 440, 56 | "address": "Staulinie 10", 57 | "type": "Parkhaus" 58 | }, 59 | "geometry": { 60 | "type": "Point", 61 | "coordinates": [ 62 | 8.21514, 63 | 53.14161 64 | ] 65 | } 66 | }, 67 | { 68 | "type": "Feature", 69 | "properties": { 70 | "name": "Galeria Kaufhof", 71 | "total": 326, 72 | "address": "Ritterstraße", 73 | "type": "Parkhaus" 74 | }, 75 | "geometry": { 76 | "type": "Point", 77 | "coordinates": [ 78 | 8.2161275, 79 | 53.1396571 80 | ] 81 | } 82 | }, 83 | { 84 | "type": "Feature", 85 | "properties": { 86 | "name": "Pferdemarkt", 87 | "total": 401, 88 | "address": "Pferdemarkt 13", 89 | "type": "Parkplatz" 90 | }, 91 | "geometry": { 92 | "type": "Point", 93 | "coordinates": [ 94 | 8.21271, 95 | 53.14727 96 | ] 97 | } 98 | }, 99 | { 100 | "type": "Feature", 101 | "properties": { 102 | "name": "CCO Parkdeck 1", 103 | "total": 190, 104 | "address": "Heiligengeiststraße 4", 105 | "type": "Parkhaus" 106 | }, 107 | "geometry": { 108 | "type": "Point", 109 | "coordinates": [ 110 | 8.21322, 111 | 53.14406 112 | ] 113 | } 114 | }, 115 | { 116 | "type": "Feature", 117 | "properties": { 118 | "name": "CCO Parkdeck 2", 119 | "total": 230, 120 | "address": "Heiligengeiststraße 4", 121 | "type": "Parkhaus" 122 | }, 123 | "geometry": { 124 | "type": "Point", 125 | "coordinates": [ 126 | 8.21322, 127 | 53.14406 128 | ] 129 | } 130 | }, 131 | { 132 | "type": "Feature", 133 | "properties": { 134 | "name": "Hbf/ZOB", 135 | "total": 358, 136 | "address": "Karlstraße", 137 | "type": "Parkhaus" 138 | }, 139 | "geometry": { 140 | "type": "Point", 141 | "coordinates": [ 142 | 8.2225073, 143 | 53.145681 144 | ] 145 | } 146 | }, 147 | { 148 | "type": "Feature", 149 | "properties": { 150 | "name": "Theaterwall", 151 | "total": 125, 152 | "address": "Theaterwall 4", 153 | "type": "Parkplatz" 154 | }, 155 | "geometry": { 156 | "type": "Point", 157 | "coordinates": [ 158 | 8.2127383, 159 | 53.1382987 160 | ] 161 | } 162 | }, 163 | { 164 | "type": "Feature", 165 | "properties": { 166 | "name": "Theatergarage", 167 | "total": 107, 168 | "address": "Roonstraße", 169 | "type": "Parkhaus" 170 | }, 171 | "geometry": { 172 | "type": "Point", 173 | "coordinates": [ 174 | 8.2096205, 175 | 53.1382523 176 | ] 177 | } 178 | }, 179 | { 180 | "type": "Feature", 181 | "properties": { 182 | "name": "Cinemaxx", 183 | "total": 0, 184 | "address": "Stau 79-85", 185 | "type": "Parkhaus" 186 | }, 187 | "geometry": { 188 | "type": "Point", 189 | "coordinates": [ 190 | 8.226060, 191 | 53.141114 192 | ] 193 | } 194 | }, 195 | { 196 | "type": "Feature", 197 | "properties": { 198 | "name": "Heiligengeist-Höfe", 199 | "total": 275, 200 | "address": "Georgstraße", 201 | "type": "Parkhaus" 202 | }, 203 | "geometry": { 204 | "type": "Point", 205 | "coordinates": [ 206 | 8.2168988, 207 | 53.1391841 208 | ] 209 | } 210 | } 211 | ] 212 | } 213 | -------------------------------------------------------------------------------- /park_api/cities/Basel.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [{ 4 | "type": "Feature", 5 | "geometry": { 6 | "type": "Point", 7 | "coordinates": [ 8 | 7.5885761, 9 | 47.5595986 10 | ] 11 | }, 12 | "properties": { 13 | "name": "Basel", 14 | "type": "city", 15 | "url": "http://www.parkleitsystem-basel.ch/status.php", 16 | "source": "http://www.parkleitsystem-basel.ch/rss_feed.php", 17 | "active_support": true, 18 | "attribution":{ 19 | "contributor":"Immobilien Basel-Stadt", 20 | "url":"http://www.parkleitsystem-basel.ch/impressum.php", 21 | "license":"Creative-Commons-Null-Lizenz (CC-0)" 22 | } 23 | } 24 | }, { 25 | "type": "Feature", 26 | "geometry": { 27 | "type": "Point", 28 | "coordinates": [7.6089067, 47.5651794] 29 | }, 30 | "properties": { 31 | "name": "Parkhaus Bad. Bahnhof", 32 | "address": "Schwarzwaldstrasse 160", 33 | "total": 300, 34 | "total_by_weekday": { 35 | "Saturday": 750, 36 | "Sunday": 750 37 | } 38 | } 39 | }, { 40 | "type": "Feature", 41 | "geometry": { 42 | "type": "Point", 43 | "coordinates": [7.602175, 47.563241] 44 | }, 45 | "properties": { 46 | "name": "Parkhaus Messe", 47 | "address": "Riehenstrasse 101", 48 | "total": 752 49 | } 50 | }, { 51 | "type": "Feature", 52 | "geometry": { 53 | "type": "Point", 54 | "coordinates": [7.5967098, 47.5630411] 55 | }, 56 | "properties": { 57 | "name": "Parkhaus Europe", 58 | "address": "Hammerstrasse 68", 59 | "total": 120 60 | } 61 | }, { 62 | "type": "Feature", 63 | "geometry": { 64 | "type": "Point", 65 | "coordinates": [7.594263, 47.5607142] 66 | }, 67 | "properties": { 68 | "name": "Parkhaus Rebgasse", 69 | "address": "Rebgasse 20", 70 | "total": 250 71 | } 72 | }, { 73 | "type": "Feature", 74 | "geometry": { 75 | "type": "Point", 76 | "coordinates": [7.5946604, 47.5639644] 77 | }, 78 | "properties": { 79 | "name": "Parkhaus Claramatte", 80 | "address": "Klingentalstrasse 25", 81 | "total": 100, 82 | "total_by_weekday": { 83 | "Saturday": 170, 84 | "Sunday": 170 85 | } 86 | } 87 | }, { 88 | "type": "Feature", 89 | "geometry": { 90 | "type": "Point", 91 | "coordinates": [7.5917937, 47.5622725] 92 | }, 93 | "properties": { 94 | "name": "Parkhaus Clarahuus", 95 | "address": "Webergasse 34", 96 | "total": 52 97 | } 98 | }, { 99 | "type": "Feature", 100 | "geometry": { 101 | "type": "Point", 102 | "coordinates": [7.5874932, 47.5506254] 103 | }, 104 | "properties": { 105 | "name": "Parkhaus Elisabethen", 106 | "address": "Steinentorberg 5", 107 | "total": 840 108 | } 109 | }, { 110 | "type": "Feature", 111 | "geometry": { 112 | "type": "Point", 113 | "coordinates": [7.5858936, 47.5524554] 114 | }, 115 | "properties": { 116 | "name": "Parkhaus Steinen", 117 | "address": "Steinenschanze 5", 118 | "total": 526 119 | } 120 | }, { 121 | "type": "Feature", 122 | "geometry": { 123 | "type": "Point", 124 | "coordinates": [7.5824076, 47.561101] 125 | }, 126 | "properties": { 127 | "name": "Parkhaus City", 128 | "address": "Schanzenstrasse 48", 129 | "total": 1114 130 | } 131 | }, { 132 | "type": "Feature", 133 | "geometry": { 134 | "type": "Point", 135 | "coordinates": [7.58658, 47.5592347] 136 | }, 137 | "properties": { 138 | "name": "Parkhaus Storchen", 139 | "address": "Fischmarkt 10", 140 | "total": 142 141 | } 142 | }, { 143 | "type": "Feature", 144 | "geometry": { 145 | "type": "Point", 146 | "coordinates": [7.5929374, 47.5468617] 147 | }, 148 | "properties": { 149 | "name": "Parkhaus Post Basel", 150 | "address": "Gartenstrasse 143", 151 | "total": 72 152 | } 153 | }, { 154 | "type": "Feature", 155 | "geometry": { 156 | "type": "Point", 157 | "coordinates": [7.5922975, 47.547299] 158 | }, 159 | "properties": { 160 | "name": "Parkhaus Centralbahnparking", 161 | "address": "Gartenstrasse 150", 162 | "total": 286 163 | } 164 | }, { 165 | "type": "Feature", 166 | "geometry": { 167 | "type": "Point", 168 | "coordinates": [7.5943046, 47.5504299] 169 | }, 170 | "properties": { 171 | "name": "Parkhaus Aeschen", 172 | "address": "Aeschengraben 9", 173 | "total": 97 174 | } 175 | }, { 176 | "type": "Feature", 177 | "geometry": { 178 | "type": "Point", 179 | "coordinates": [7.593512, 47.5515968] 180 | }, 181 | "properties": { 182 | "name": "Parkhaus Anfos", 183 | "address": "Henric Petri-Strasse 21", 184 | "total": 162 185 | } 186 | }, { 187 | "type": "Feature", 188 | "geometry": { 189 | "type": "Point", 190 | "coordinates": [7.5884556, 47.5458851] 191 | }, 192 | "properties": { 193 | "name": "Parkhaus Bahnhof Süd", 194 | "address": "Güterstrasse 115", 195 | "total": 100 196 | } 197 | }] 198 | } 199 | -------------------------------------------------------------------------------- /tests/fixtures/luebeck.html: -------------------------------------------------------------------------------- 1 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 | 134 | 135 | 136 | 137 | 138 | 139 | 140 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 | 153 | 154 | 155 | 156 | 157 | 158 | 159 | 160 | 161 | 162 | 163 | 164 | 165 | 166 | 167 | 168 | 169 | 170 | 171 | 172 | 173 | 174 | 175 | 176 | 177 | 178 | 179 | 180 | 181 | 182 | 183 | 184 | 185 | 186 | 187 | 188 | 189 | 190 | 191 | 192 | 193 | 194 | 195 | 196 | 197 | 198 | 199 | 200 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | 208 |
Stand: 19.06.2015, 11:20 Uhr
Parkplätze LübeckGesamtFreiBelegung
PH St. MarienGeöffnet
PP Lastadie P3349
PP Lastadie P4162
PP Lastadie P5Vorübergehend geschlossen.
PP MuK347317
PP Radisson Hotel574
PH Am Burgtor21375
PH Falkenstrasse12579
PH Huexstrasse10029
PP Kanalstrasse 1-5631503
PH Linden Arcaden400169
PH Haerder-Center19844
PH KarstadtGeöffnet
PH Mitte420169
PH Pferdemarkt6149
PH Am Holstentor390189
PH Radisson Hotel5757
Parkplätze Travemünde
PP Kowitzberg750700
PP Backbord135134
PP Godewind250241
PH Maritim300149
PP Am Leuchtturm184179
PP Leuchtenfeld750746
PP Hafenbahnhof5729
PP Baggersand 1342340
PP Baggersand 2124118
Gesamt65414331
209 | -------------------------------------------------------------------------------- /park_api/cities/Aalborg.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [{ 4 | "type": "Feature", 5 | "geometry": { 6 | "type": "Point", 7 | "coordinates": [ 8 | 9.924623, 9 | 57.050031 10 | ] 11 | }, 12 | "properties": { 13 | "name": "Aalborg", 14 | "type": "city", 15 | "url": "http://www.aalborgcity.dk/live-parkering.aspx", 16 | "source": "http://plcinfo.mintrafik.dk/", 17 | "active_support": false, 18 | "attribution":{ 19 | "contributor":"Manuel R. Ciosici", 20 | "url":"http://www.aalborgcity.dk/live-parkering.aspx", 21 | "license":"" 22 | } 23 | } 24 | }, { 25 | "type": "Feature", 26 | "properties": { 27 | "name": "Parking Lot 1", 28 | "total": 200, 29 | "address": "Musterstraße 1", 30 | "type": "Parkhaus" 31 | }, 32 | "geometry": { 33 | "type": "Point", 34 | "coordinates": [ 35 | 8.2168988, 36 | 53.1391841 37 | ] 38 | } 39 | }, 40 | { 41 | "type": "Feature", 42 | "properties": { 43 | "name": "Sømandshjemmet", 44 | "total": 78, 45 | "address": "Østerbro 27", 46 | "type": "Parkplatz" 47 | }, 48 | "geometry": { 49 | "type": "Point", 50 | "coordinates": [ 51 | 9.93556, 52 | 57.04646 53 | ] 54 | } 55 | }, 56 | { 57 | "type": "Feature", 58 | "properties": { 59 | "name": "Sauers Plads", 60 | "total": 360, 61 | "address": "Sauers Plads", 62 | "type": "Parkplatz" 63 | }, 64 | "geometry": { 65 | "type": "Point", 66 | "coordinates": [ 67 | 9.92745, 68 | 57.04419 69 | ] 70 | } 71 | }, 72 | { 73 | "type": "Feature", 74 | "properties": { 75 | "name": "Salling", 76 | "total": 312, 77 | "address": "Braskensgade", 78 | "type": "Parkhaus" 79 | }, 80 | "geometry": { 81 | "type": "Point", 82 | "coordinates": [ 83 | 9.922735, 84 | 57.04758 85 | ] 86 | } 87 | }, 88 | { 89 | "type": "Feature", 90 | "properties": { 91 | "name": "Palads", 92 | "total": 475, 93 | "address": "Jomfru Anes Gård 5", 94 | "type": "Parkhaus" 95 | }, 96 | "geometry": { 97 | "type": "Point", 98 | "coordinates": [ 99 | 9.920601, 100 | 57.05014 101 | ] 102 | } 103 | }, 104 | { 105 | "type": "Feature", 106 | "properties": { 107 | "name": "Musikkens Hus", 108 | "total": 285, 109 | "address": "Nyhavnsgade 53", 110 | "type": "Tiefgarage" 111 | }, 112 | "geometry": { 113 | "type": "Point", 114 | "coordinates": [ 115 | 9.93311, 116 | 57.04737 117 | ] 118 | } 119 | }, 120 | { 121 | "type": "Feature", 122 | "properties": { 123 | "name": "Kongrescenter", 124 | "total": 700, 125 | "address": "Vesterbro 14", 126 | "type": "Parkplatz" 127 | }, 128 | "geometry": { 129 | "type": "Point", 130 | "coordinates": [ 131 | 9.91458, 132 | 57.04456 133 | ] 134 | } 135 | }, 136 | { 137 | "type": "Feature", 138 | "properties": { 139 | "name": "Kennedy Arkaden", 140 | "total": 350, 141 | "address": "John F. Kennedys Plads 1U", 142 | "type": "Parkhaus" 143 | }, 144 | "geometry": { 145 | "type": "Point", 146 | "coordinates": [ 147 | 9.918808, 148 | 57.04178 149 | ] 150 | } 151 | }, 152 | { 153 | "type": "Feature", 154 | "properties": { 155 | "name": "Gåsepigen", 156 | "total": 145, 157 | "address": "Stengade 1", 158 | "type": "Parkplatz" 159 | }, 160 | "geometry": { 161 | "type": "Point", 162 | "coordinates": [ 163 | 9.91376, 164 | 57.04751 165 | ] 166 | } 167 | }, 168 | { 169 | "type": "Feature", 170 | "properties": { 171 | "name": "Føtex", 172 | "total": 150, 173 | "address": "Fjordgade 6", 174 | "type": "Parkhaus" 175 | }, 176 | "geometry": { 177 | "type": "Point", 178 | "coordinates": [ 179 | 9.924956, 180 | 57.04754 181 | ] 182 | } 183 | }, 184 | { 185 | "type": "Feature", 186 | "properties": { 187 | "name": "Friis", 188 | "total": 731, 189 | "address": "Nytorv 27", 190 | "type": "Tiefgarage" 191 | }, 192 | "geometry": { 193 | "type": "Point", 194 | "coordinates": [ 195 | 9.927327, 196 | 57.04769 197 | ] 198 | } 199 | }, 200 | { 201 | "type": "Feature", 202 | "properties": { 203 | "name": "C W Obel", 204 | "total": 376, 205 | "address": "Badehusvej 14", 206 | "type": "Parkhaus" 207 | }, 208 | "geometry": { 209 | "type": "Point", 210 | "coordinates": [ 211 | 9.910837, 212 | 57.05243 213 | ] 214 | } 215 | }, 216 | { 217 | "type": "Feature", 218 | "properties": { 219 | "name": "Budolfi Plads", 220 | "total": 157, 221 | "address": "Vingårdsgade 10", 222 | "type": "Parkplatz" 223 | }, 224 | "geometry": { 225 | "type": "Point", 226 | "coordinates": [ 227 | 9.9183, 228 | 57.0475 229 | ] 230 | } 231 | } 232 | ] 233 | } 234 | -------------------------------------------------------------------------------- /park_api/cities/Ingolstadt.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [{ 4 | "type": "Feature", 5 | "geometry": { 6 | "type": "Point", 7 | "coordinates": [ 8 | 11.4237200, 9 | 48.7650800 10 | ] 11 | }, 12 | "properties": { 13 | "name": "Ingolstadt", 14 | "type": "city", 15 | "url": "http://www.ingolstadt.mobi/parkplatzauskunft.cfm", 16 | "source": "http://www.ingolstadt.mobi/parkplatzauskunft.cfm", 17 | "active_support": true 18 | } 19 | }, { 20 | "type": "Feature", 21 | "properties": { 22 | "name": "Hauptbahnhof", 23 | "total": 812, 24 | "address": "Elisabethstraße 3", 25 | "type": "Parkhaus" 26 | }, 27 | "geometry": { 28 | "type": "Point", 29 | "coordinates": [ 30 | 11.437298655509947, 31 | 48.74315480852817 32 | ] 33 | } 34 | }, 35 | { 36 | "type": "Feature", 37 | "properties": { 38 | "name": "Hauptbahnhof Ost", 39 | "total": 240, 40 | "address": "Martin-Hemm-Straße 8", 41 | "type": "Parkhaus" 42 | }, 43 | "geometry": { 44 | "type": "Point", 45 | "coordinates": [ 46 | 11.439857482910156, 47 | 48.744905834783395 48 | ] 49 | } 50 | }, 51 | { 52 | "type": "Feature", 53 | "properties": { 54 | "name": "Hallenbad", 55 | "total": 836, 56 | "address": "Jahnstraße 9", 57 | "type": "Parkplatz" 58 | }, 59 | "geometry": { 60 | "type": "Point", 61 | "coordinates": [ 62 | 11.42030954360962, 63 | 48.760856819513116 64 | ] 65 | } 66 | }, 67 | { 68 | "type": "Feature", 69 | "properties": { 70 | "name": "Festplatz", 71 | "total": 1437, 72 | "address": "Dreizehnerstraße", 73 | "type": "Parkplatz" 74 | }, 75 | "geometry": { 76 | "type": "Point", 77 | "coordinates": [ 78 | 11.423431634902954, 79 | 48.7706936821076 80 | ] 81 | } 82 | }, 83 | { 84 | "type": "Feature", 85 | "properties": { 86 | "name": "Klinikum" 87 | }, 88 | "geometry": { 89 | "type": "Point", 90 | "coordinates": [ 91 | 11.374958753585815, 92 | 48.771443213043426 93 | ] 94 | } 95 | }, 96 | { 97 | "type": "Feature", 98 | "properties": { 99 | "name": "Nordbahnhof", 100 | "total": 252, 101 | "address": "Am Nordbahnhof 3", 102 | "type": "Parkplatz" 103 | }, 104 | "geometry": { 105 | "type": "Point", 106 | "coordinates": [ 107 | 11.431505084037779, 108 | 48.77336648651627 109 | ] 110 | } 111 | }, 112 | { 113 | "type": "Feature", 114 | "properties": { 115 | "name": "Südl. Ringstraße", 116 | "total": 257, 117 | "address": "Südliche Ringstraße", 118 | "type": "Parkplatz" 119 | }, 120 | "geometry": { 121 | "type": "Point", 122 | "coordinates": [ 123 | 11.437261104583738, 124 | 48.75859357346676 125 | ] 126 | } 127 | }, 128 | { 129 | "type": "Feature", 130 | "properties": { 131 | "name": "Congressgarage", 132 | "total": 213, 133 | "address": "Schloßlände 25", 134 | "type": "Tiefgarage" 135 | }, 136 | "geometry": { 137 | "type": "Point", 138 | "coordinates": [ 139 | 11.433302164077759, 140 | 48.76454144880872 141 | ] 142 | } 143 | }, 144 | { 145 | "type": "Feature", 146 | "properties": { 147 | "name": "Münster", 148 | "total": 384, 149 | "address": "Bergbräustraße", 150 | "type": "Tiefgarage" 151 | }, 152 | "geometry": { 153 | "type": "Point", 154 | "coordinates": [ 155 | 11.419150829315186, 156 | 48.764354041367575 157 | ] 158 | } 159 | }, 160 | { 161 | "type": "Feature", 162 | "properties": { 163 | "name": "Reduit Tilly", 164 | "total": 436, 165 | "address": "Regimentstraße", 166 | "type": "Tiefgarage" 167 | }, 168 | "geometry": { 169 | "type": "Point", 170 | "coordinates": [ 171 | 11.433484554290771, 172 | 48.759930315491225 173 | ] 174 | } 175 | }, 176 | { 177 | "type": "Feature", 178 | "properties": { 179 | "name": "Schloss", 180 | "total": 435, 181 | "address": "Esplanade", 182 | "type": "Tiefgarage" 183 | }, 184 | "geometry": { 185 | "type": "Point", 186 | "coordinates": [ 187 | 11.43128514289856, 188 | 48.76627758638221 189 | ] 190 | } 191 | }, 192 | { 193 | "type": "Feature", 194 | "properties": { 195 | "name": "Theater-Ost", 196 | "total": 682, 197 | "address": "Schloßländle", 198 | "type": "Tiefgarage" 199 | }, 200 | "geometry": { 201 | "type": "Point", 202 | "coordinates": [ 203 | 11.430437564849852, 204 | 48.76376352655507 205 | ] 206 | } 207 | }, 208 | { 209 | "type": "Feature", 210 | "properties": { 211 | "name": "Theater-West", 212 | "total": 599, 213 | "address": "Schutterstraße", 214 | "type": "Tiefgarage" 215 | }, 216 | "geometry": { 217 | "type": "Point", 218 | "coordinates": [ 219 | 11.427175998687744, 220 | 48.76281232342508 221 | ] 222 | } 223 | } 224 | ] 225 | } 226 | -------------------------------------------------------------------------------- /tests/fixtures/oldenburg.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | Parkleit-Informationssystem der Stadt Oldenburg 9 | 21 | 22 | 23 | 24 | Letzte Aktualisierung: 08.05.2019 15:50:00 25 |
26 |
27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | 59 | 60 | 61 | 62 | 63 | 64 | 65 | 66 | 67 | 68 | 69 | 70 | 71 | 72 | 73 | 74 | 75 | 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | 95 | 96 | 97 | 98 | 99 | 100 | 101 | 102 | 103 | 104 | 105 | 106 | 107 | 108 | 109 | 110 |
NameFreie PlätzeTrendStatus
Waffenplatz
50
gleich bleibende Tendenz
Offen
City
111
gleich bleibende Tendenz
Offen
Galeria Kaufhof
162
gleich bleibende Tendenz
Offen
Pferdemarkt
328
gleich bleibende Tendenz
Offen
CCO Parkdeck 1
0
gleich bleibende Tendenz
Offen
CCO Parkdeck 2
24
gleich bleibende Tendenz
Offen
Hbf/ZOB
11
gleich bleibende Tendenz
Offen
Theaterwall
42
gleich bleibende Tendenz
Offen
Theatergarage
64
gleich bleibende Tendenz
Offen
Heiligengeist-Höfe
63
gleich bleibende Tendenz
Offen
Schlosshöfe
161
gleich bleibende Tendenz
Offen
Cinemaxx
0
gleich bleibende Tendenz
Offen
111 | 112 | 113 | -------------------------------------------------------------------------------- /park_api/cities/Hanau.geojson: -------------------------------------------------------------------------------- 1 | { 2 | "type": "FeatureCollection", 3 | "features": [{ 4 | "type": "Feature", 5 | "geometry": { 6 | "type": "Point", 7 | "coordinates": [ 8 | 8.917053, 9 | 50.133739 10 | ] 11 | }, 12 | "properties": { 13 | "name": "Hanau", 14 | "type": "city", 15 | "url": "https://www.hanau.de/", 16 | "source": "http://www.hanau-neu-erleben.de/reise/parken/072752/index.html", 17 | "active_support":false 18 | } 19 | }, 20 | { 21 | "type": "Feature", 22 | "properties": { 23 | "name": "Tiefgarage Am Markt", 24 | "total": 185, 25 | "address": "Am Markt", 26 | "type": "Tiefgarage" 27 | }, 28 | "geometry": { 29 | "type": "Point", 30 | "coordinates": [ 31 | 8.916543, 32 | 50.132449 33 | ] 34 | } 35 | }, 36 | { 37 | "type": "Feature", 38 | "properties": { 39 | "name": "Parkhaus Am Forum", 40 | "total": 560, 41 | "address": "Am Frankfurter Tor 10", 42 | "type": "Parkhaus" 43 | }, 44 | "geometry": { 45 | "type": "Point", 46 | "coordinates": [ 47 | 8.913654, 48 | 50.134264 49 | ] 50 | } 51 | }, 52 | { 53 | "type": "Feature", 54 | "properties": { 55 | "name": "Parkhaus Nürnberger Straße", 56 | "total": 348, 57 | "address": "Nürnberger Str. 16", 58 | "type": "Parkhaus" 59 | }, 60 | "geometry": { 61 | "type": "Point", 62 | "coordinates": [ 63 | 8.920955, 64 | 50.132298 65 | ] 66 | } 67 | }, 68 | { 69 | "type": "Feature", 70 | "properties": { 71 | "name": "Parkhaus Kinopolis 2", 72 | "total": 430, 73 | "address": "Steinheimer Str.", 74 | "type": "Parkhaus" 75 | }, 76 | "geometry": { 77 | "type": "Point", 78 | "coordinates": [ 79 | 8.914273, 80 | 50.129825 81 | ] 82 | } 83 | }, 84 | { 85 | "type": "Feature", 86 | "properties": { 87 | "name": "Parkplatz Klinikum", 88 | "total": 68, 89 | "address": "Leimenstraße", 90 | "type": "Parkplatz" 91 | }, 92 | "geometry": { 93 | "type": "Point", 94 | "coordinates": [ 95 | 8.922359, 96 | 50.133315 97 | ] 98 | } 99 | }, 100 | { 101 | "type": "Feature", 102 | "properties": { 103 | "name": "Parkhaus Congress Park", 104 | "total": 331, 105 | "address": "Eugen-Kaiser-Straße", 106 | "type": "Parkhaus" 107 | }, 108 | "geometry": { 109 | "type": "Point", 110 | "coordinates": [ 111 | 8.916136, 112 | 50.139814 113 | ] 114 | } 115 | }, 116 | { 117 | "type": "Feature", 118 | "properties": { 119 | "name": "Tiefgarage Congress Park", 120 | "total": 63, 121 | "address": "Heinrich-Bott-Straße", 122 | "type": "Tiefgarage" 123 | }, 124 | "geometry": { 125 | "type": "Point", 126 | "coordinates": [ 127 | 8.918148, 128 | 50.138479 129 | ] 130 | } 131 | }, 132 | { 133 | "type": "Feature", 134 | "properties": { 135 | "name": "Tiefgarage Forum", 136 | "total": 471, 137 | "address": "Im Forum 5", 138 | "type": "Tiefgarage" 139 | }, 140 | "geometry": { 141 | "type": "Point", 142 | "coordinates": [ 143 | 8.916190, 144 | 50.135262 145 | ] 146 | } 147 | }, 148 | { 149 | "type": "Feature", 150 | "properties": { 151 | "name": "Parkhaus Kinopolis", 152 | "total": 186, 153 | "address": "Am Steinheimer Tor 17", 154 | "type": "Parkhaus" 155 | }, 156 | "geometry": { 157 | "type": "Point", 158 | "coordinates": [ 159 | 8.915746, 160 | 50.129269 161 | ] 162 | } 163 | }, 164 | { 165 | "type": "Feature", 166 | "properties": { 167 | "name": "Parkhaus City Center", 168 | "total": 526, 169 | "address": "Kurt-Blaum-Platz 8", 170 | "type": "Parkhaus" 171 | }, 172 | "geometry": { 173 | "type": "Point", 174 | "coordinates": [ 175 | 8.922597, 176 | 50.131280 177 | ] 178 | } 179 | }, 180 | { 181 | "type": "Feature", 182 | "properties": { 183 | "name": "Tiefgarage Klinikum Süd", 184 | "total": 75, 185 | "address": "Röderstraße 1", 186 | "type": "Tiefgarage" 187 | }, 188 | "geometry": { 189 | "type": "Point", 190 | "coordinates": [ 191 | 8.923727, 192 | 50.131262 193 | ] 194 | } 195 | }, 196 | { 197 | "type": "Feature", 198 | "properties": { 199 | "name": "Parkhaus Gloria Palais", 200 | "total": 280, 201 | "address": "Am Steinheimer Tor 1a", 202 | "type": "Parkhaus" 203 | }, 204 | "geometry": { 205 | "type": "Point", 206 | "coordinates": [ 207 | 8.908575, 208 | 50.132539 209 | ] 210 | } 211 | }, 212 | { 213 | "type": "Feature", 214 | "properties": { 215 | "name": "Parkplatz Main-Kinzig-Halle", 216 | "total": 96, 217 | "address": "Eberhardstraße", 218 | "type": "Parkplatz" 219 | }, 220 | "geometry": { 221 | "type": "Point", 222 | "coordinates": [ 223 | 8.921375, 224 | 50.137234 225 | ] 226 | } 227 | } 228 | ] 229 | } 230 | 231 | 232 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## ParkAPI 2 | 3 | [![Build Status](https://travis-ci.org/offenesdresden/ParkAPI.svg?branch=master)](https://travis-ci.org/offenesdresden/ParkAPI) 4 | 5 | ParkAPI is a project trying to consolidate pages where cities publish the amount of empty spaces on their public parking lots (be it an HTML page, XML data or something else) into a simple to use JSON API. This then serves as a backend for the mobile app ParkenDD ([iOS](https://github.com/kiliankoe/ParkenDD) & [Android](https://github.com/jklmnn/ParkenDD)). 6 | 7 | **View the current data directly directly in your browser [here](https://offenesdresden.github.io/ParkAPI/).** 8 | 9 | ![image](./image.jpg) 10 | 11 | The idea here is to fetch new data from the relevant pages regularly and serve that from the application so that the amount of stress on the original servers can be kept to a minimum. This data then also enables the calculation of forecast data (short- and longterm) that can be provided right alongside. 12 | 13 | This software is currently running at [api.parkendd.de](https://api.parkendd.de). It should always be at the most current version of this repo. 14 | 15 | ### Usage 16 | 17 | **GET /** 18 | 19 | Get metadata containing the list of supported cities and their IDs (usually just the same name with replaced umlauts and no spaces), a link to this repository and both the version of the JSON output (`api_version`) and of the server application (`server_version`). 20 | 21 | ```js 22 | { 23 | "api_version": "1.0", 24 | "server_version": "1.0.0", 25 | "reference": "https://github.com/offenesdresden/ParkAPI", 26 | "cities": 27 | { 28 | "City 1": "city1id", 29 | "City 2": "city2id", 30 | ... 31 | } 32 | } 33 | ``` 34 | 35 | **GET /city\_id** 36 | 37 | Get data for a single city that looks something like this. Times are in UTC and parameters marked as optional may not exist for each city or parking lot. Usually only when a city supplies this somehow and we can include it. 38 | 39 | Also please note that this is not valid JSON. Just an example for how the output can be expected. For a specific [JSON schema](http://json-schema.org) please have a look at the [wiki here](https://github.com/offenesdresden/ParkAPI/wiki/city.json). 40 | 41 | 42 | ```js 43 | { 44 | "last_updated": "2015-06-15T12:31:00", 45 | "last_downloaded": "2015-06-15T12:31:25", 46 | "data_source": "http://examplecity.com", 47 | "lots": [ 48 | { 49 | "coords": { 50 | "lat": 51.05031, 51 | "lng": 13.73754 52 | }, 53 | "name": "Altmarkt", 54 | "total": 400, 55 | "free": 235, 56 | "state": "open|closed|nodata", 57 | "id": "lot_id", 58 | "forecast": true|false, 59 | "region": "Region X", // optional 60 | "address": "Musterstraße 5", // optional 61 | "lot_type": "Parkhaus" // optional 62 | }, 63 | ... 64 | } 65 | ``` 66 | 67 | 68 | ### Setup your own server 69 | 70 | - First you will need python (at least 3.3), pip and virtualenv installed. In the following it is assumed that python is python3 and virtualenv is virtualenv3. If this is not the case for your distribution please use the correct executables. If virtualenv3 is not available, use virtualenv -p /usr/bin/python3. 71 | 72 | - Install the following packages: postgresql libpq-dev 73 | 74 | - Clone the repo: 75 | 76 | $ git clone git@github.com:offenesdresden/ParkAPI.git 77 | $ cd ParkAPI 78 | 79 | - Create a new virtualenv: 80 | 81 | $ virtualenv venv 82 | $ source venv/bin/activate 83 | 84 | - Install dependencies: 85 | 86 | (venv) $ pip install -e . 87 | 88 | - Set up postgresql: 89 | 90 | $ sudo -u postgres createuser -P -d park_api 91 | $ sudo -u postgres createdb -O park_api park_api 92 | 93 | - Run the server: 94 | 95 | $ bin/parkapi-server 96 | 97 | - Run the tests: 98 | 99 | $ python -m unittest discover tests 100 | 101 | Throwing errors? Sure you installed the requirements and are using Python 3.x? Still nothing? Please [tell us](https://github.com/offenesdresden/ParkAPI/issues/new) about it. 102 | 103 | ### Adding support for a new city 104 | 105 | You know of a city that publishes their current parking data but isn’t yet supported by this project? Or you want to help out with one of the cities listed [here](https://github.com/offenesdresden/ParkAPI/issues?q=is%3Aopen+is%3Aissue+label%3Anew_data)? Awesome! Let’s get you started. 106 | 107 | Just fork this project and go ahead and duplicate `cities/Sample_City.py` as a place to get started. Also have a look at other city scrapers for reference. The basic idea is to include all code specific to gathering data for a city in its file. 108 | 109 | If you have the necessary geodata it'd be great if you could create a geojson file as well. It's name is the same as the city and in the same directory, just with `.geojson` at the end. 110 | [geojson.io](http://geojson.io) is definitely a recommended ressource as well! 111 | 112 | When you're done don't forget to include your new city in the tests (`./tests/test_cities.py` - it's only three lines exactly identical to the other cities in there) and run them to see if it all works out. 113 | 114 | Now all that's left to do is to send us a pull request with your new stuff :) 115 | 116 | Very cool! Thanks for helping out! [You rock!](http://i.giphy.com/JVdF14CQQH7gs.gif) 117 | 118 | *Note*: Please don't include umlauts or other special characters in the name of the city file(s). The correct city name is specified inside the `city.py` file, but the filename should be ascii-compatible and with underscores instead of spaces. Should a city with the same name already exist you're going to have to find some way to make it unique, maybe by including a state or region? 119 | 120 | #### Credits 121 | 122 | Image header by [Mattes](https://commons.wikimedia.org/wiki/User:Mattes) (Own work) [CC BY 2.0 de](http://creativecommons.org/licenses/by/2.0/de/deed.en), via Wikimedia Commons 123 | --------------------------------------------------------------------------------