├── runtime.txt
├── Procfile
├── migrations
├── README
├── script.py.mako
├── alembic.ini
├── versions
│ └── 8906a52041a4_.py
└── env.py
├── static
├── favicon.ico
├── images
│ ├── markers
│ │ ├── red.png
│ │ ├── green.png
│ │ ├── green-dot.png
│ │ └── red-dot.png
│ ├── sl-cem-logo.png
│ ├── sl-cem-header.png
│ ├── samples
│ │ ├── hs-gross.jpg
│ │ ├── hs-oneil.jpg
│ │ ├── app-sample.png
│ │ └── hs-anderson.jpg
│ └── headstone
│ │ └── no-image.png
├── css
│ └── map.css
└── js
│ └── map.js
├── errors.py
├── sample.env
├── manage.py
├── requirements.txt
├── config.py
├── LICENSE
├── .gitignore
├── templates
└── index.html
├── README.md
├── admin.py
├── models.py
├── tests.py
└── app.py
/runtime.txt:
--------------------------------------------------------------------------------
1 | python-3.5.1
2 |
--------------------------------------------------------------------------------
/Procfile:
--------------------------------------------------------------------------------
1 | web: gunicorn app:app
2 |
--------------------------------------------------------------------------------
/migrations/README:
--------------------------------------------------------------------------------
1 | Generic single-database configuration.
--------------------------------------------------------------------------------
/static/favicon.ico:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitoprincipe/cemetery-map/master/static/favicon.ico
--------------------------------------------------------------------------------
/static/images/markers/red.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitoprincipe/cemetery-map/master/static/images/markers/red.png
--------------------------------------------------------------------------------
/static/images/sl-cem-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitoprincipe/cemetery-map/master/static/images/sl-cem-logo.png
--------------------------------------------------------------------------------
/static/images/markers/green.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitoprincipe/cemetery-map/master/static/images/markers/green.png
--------------------------------------------------------------------------------
/static/images/sl-cem-header.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitoprincipe/cemetery-map/master/static/images/sl-cem-header.png
--------------------------------------------------------------------------------
/static/images/markers/green-dot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitoprincipe/cemetery-map/master/static/images/markers/green-dot.png
--------------------------------------------------------------------------------
/static/images/markers/red-dot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitoprincipe/cemetery-map/master/static/images/markers/red-dot.png
--------------------------------------------------------------------------------
/static/images/samples/hs-gross.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitoprincipe/cemetery-map/master/static/images/samples/hs-gross.jpg
--------------------------------------------------------------------------------
/static/images/samples/hs-oneil.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitoprincipe/cemetery-map/master/static/images/samples/hs-oneil.jpg
--------------------------------------------------------------------------------
/static/images/headstone/no-image.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitoprincipe/cemetery-map/master/static/images/headstone/no-image.png
--------------------------------------------------------------------------------
/static/images/samples/app-sample.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitoprincipe/cemetery-map/master/static/images/samples/app-sample.png
--------------------------------------------------------------------------------
/static/images/samples/hs-anderson.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fitoprincipe/cemetery-map/master/static/images/samples/hs-anderson.jpg
--------------------------------------------------------------------------------
/errors.py:
--------------------------------------------------------------------------------
1 | ERR_GENERAL = 'error'
2 | ERR_NO_FILE_SPECIFIED = 'error - no file specified'
3 | ERR_NO_SUCH_BURIAL = 'error - no such burial ID exists'
4 | ERR_NOT_IMAGE = 'error - file is not an image file'
5 |
--------------------------------------------------------------------------------
/sample.env:
--------------------------------------------------------------------------------
1 | source env/bin/activate
2 | export APP_SETTINGS="config.DevelopmentConfig"
3 | export DATABASE_URL="postgresql://localhost/cemdb"
4 | export GOOGLE_MAPS_KEY="MAPS_KEY_GOES_HERE"
5 | export GOOGLE_CLIENT_ID="CLIENT_ID_GOES_HERE"
6 | export GOOGLE_CLIENT_SECRET="CLIENT_SECRET_GOES_HERE"
7 | export GOOGLE_ADMIN_EMAIL_LIST="['slcemdev@gmail.com','jbshep@gmail.com']"
8 |
--------------------------------------------------------------------------------
/manage.py:
--------------------------------------------------------------------------------
1 | from flask_script import Manager
2 | from flask_migrate import Migrate, MigrateCommand
3 | import os
4 |
5 | from app import app, db
6 |
7 | app.config.from_object(os.environ['APP_SETTINGS'])
8 |
9 | migrate = Migrate(app, db)
10 | manager = Manager(app)
11 |
12 | manager.add_command('db', MigrateCommand)
13 |
14 | if __name__ == '__main__':
15 | manager.run()
16 |
--------------------------------------------------------------------------------
/migrations/script.py.mako:
--------------------------------------------------------------------------------
1 | """${message}
2 |
3 | Revision ID: ${up_revision}
4 | Revises: ${down_revision}
5 | Create Date: ${create_date}
6 |
7 | """
8 |
9 | # revision identifiers, used by Alembic.
10 | revision = ${repr(up_revision)}
11 | down_revision = ${repr(down_revision)}
12 |
13 | from alembic import op
14 | import sqlalchemy as sa
15 | ${imports if imports else ""}
16 |
17 | def upgrade():
18 | ${upgrades if upgrades else "pass"}
19 |
20 |
21 | def downgrade():
22 | ${downgrades if downgrades else "pass"}
23 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | alembic==0.8.6
2 | click==6.6
3 | Flask==0.11.1
4 | Flask-Admin==1.4.2
5 | Flask-Debug==0.4.3
6 | Flask-Migrate==1.8.1
7 | Flask-Script==2.0.5
8 | Flask-SQLAlchemy==2.1
9 | google-api-python-client==1.6.2
10 | gunicorn==19.6.0
11 | httplib2==0.10.3
12 | itsdangerous==0.24
13 | Jinja2==2.8
14 | Mako==1.0.4
15 | MarkupSafe==0.23
16 | oauth2client==4.0.0
17 | olefile==0.44
18 | pep8==1.7.0
19 | Pillow==4.0.0
20 | psycopg2==2.6.2
21 | pyasn1==0.2.3
22 | pyasn1-modules==0.0.8
23 | python-editor==1.0.1
24 | requests==2.10.0
25 | rsa==3.4.2
26 | six==1.10.0
27 | SQLAlchemy==1.0.14
28 | uritemplate==3.0.0
29 | Werkzeug==0.11.10
30 | WTForms==2.1
31 |
--------------------------------------------------------------------------------
/migrations/alembic.ini:
--------------------------------------------------------------------------------
1 | # A generic, single database configuration.
2 |
3 | [alembic]
4 | # template used to generate migration files
5 | # file_template = %%(rev)s_%%(slug)s
6 |
7 | # set to 'true' to run the environment during
8 | # the 'revision' command, regardless of autogenerate
9 | # revision_environment = false
10 |
11 |
12 | # Logging configuration
13 | [loggers]
14 | keys = root,sqlalchemy,alembic
15 |
16 | [handlers]
17 | keys = console
18 |
19 | [formatters]
20 | keys = generic
21 |
22 | [logger_root]
23 | level = WARN
24 | handlers = console
25 | qualname =
26 |
27 | [logger_sqlalchemy]
28 | level = WARN
29 | handlers =
30 | qualname = sqlalchemy.engine
31 |
32 | [logger_alembic]
33 | level = INFO
34 | handlers =
35 | qualname = alembic
36 |
37 | [handler_console]
38 | class = StreamHandler
39 | args = (sys.stderr,)
40 | level = NOTSET
41 | formatter = generic
42 |
43 | [formatter_generic]
44 | format = %(levelname)-5.5s [%(name)s] %(message)s
45 | datefmt = %H:%M:%S
46 |
--------------------------------------------------------------------------------
/config.py:
--------------------------------------------------------------------------------
1 | import os
2 | basedir = os.path.abspath(os.path.dirname(__file__))
3 |
4 |
5 | class Config(object):
6 | DEBUG = False
7 | TESTING = False
8 | CSRF_ENABLED = True
9 | SECRET_KEY = 'to-be-determined'
10 | SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
11 | GOOGLE_MAPS_KEY = os.environ['GOOGLE_MAPS_KEY']
12 | GOOGLE_CLIENT_ID = os.environ['GOOGLE_CLIENT_ID']
13 | GOOGLE_CLIENT_SECRET = os.environ['GOOGLE_CLIENT_SECRET']
14 | GOOGLE_ADMIN_EMAIL_LIST = []
15 | if 'GOOGLE_ADMIN_EMAIL_LIST' in os.environ:
16 | GOOGLE_ADMIN_EMAIL_LIST = eval(os.environ['GOOGLE_ADMIN_EMAIL_LIST'])
17 | # HS_IMAGE_TARGET can be set to either 'db' or 'file' depending on
18 | # where we wish to store images, either a LargeBinary in the database
19 | # or in the filesystem.
20 | HS_IMAGE_TARGET = 'db'
21 |
22 |
23 | class ProductionConfig(Config):
24 | DEBUG = False
25 |
26 |
27 | class DevelopmentConfig(Config):
28 | DEVELOPMENT = True
29 | DEBUG = True
30 |
31 |
32 | class TestingConfig(Config):
33 | TESTING = True
34 |
--------------------------------------------------------------------------------
/static/css/map.css:
--------------------------------------------------------------------------------
1 | body {
2 | background-image: url('/static/images/sl-cem-header.png');
3 | background-size: 2500px 200px;
4 | }
5 | #logo {
6 | height: 100px;
7 | padding-bottom: 20px;
8 | }
9 | .hdr-bar {
10 | background-color: rgb(175, 186, 126);
11 | }
12 | .ftr-bar {
13 | background-color: rgb(73, 80, 40);
14 | }
15 | .bld-text {
16 | color: rgb(97, 58, 40);
17 | }
18 | #hdr-row {
19 | margin-top: 8px;
20 | margin-bottom: 8px;
21 | }
22 | #search-btn {
23 | margin-top: 4px;
24 | }
25 | #reset-btn {
26 | margin-top: 4px;
27 | }
28 | .hdr-text {
29 | font-size: 22pt;
30 | color: black;
31 | }
32 | .hdr-text-1 {
33 | font-size: 22pt;
34 | color: grey;
35 | }
36 | .hdr-text-2 {
37 | font-size: 22pt;
38 | color: lightBlue;
39 | }
40 | .btn-toolbar {
41 | margin-left: 0px;
42 | padding: 6px;
43 | padding-top: 4px;
44 | background-color: lightGrey;
45 | background-color: rgb(175, 186, 126);
46 | }
47 | .dropdown-menu {
48 | padding: 8px;
49 | }
50 | #error-message {
51 | min-height: 1em;
52 | color: red;
53 | font-weight: bold;
54 | }
55 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License (MIT)
2 |
3 | Copyright (c) 2016 BVU Computer Science
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | env/
12 | build/
13 | develop-eggs/
14 | dist/
15 | downloads/
16 | eggs/
17 | .eggs/
18 | lib/
19 | lib64/
20 | parts/
21 | sdist/
22 | var/
23 | *.egg-info/
24 | .installed.cfg
25 | *.egg
26 |
27 | # PyInstaller
28 | # Usually these files are written by a python script from a template
29 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
30 | *.manifest
31 | *.spec
32 |
33 | # Installer logs
34 | pip-log.txt
35 | pip-delete-this-directory.txt
36 |
37 | # Unit test / coverage reports
38 | htmlcov/
39 | .tox/
40 | .coverage
41 | .coverage.*
42 | .cache
43 | nosetests.xml
44 | coverage.xml
45 | *,cover
46 | .hypothesis/
47 |
48 | # Translations
49 | *.mo
50 | *.pot
51 |
52 | # Django stuff:
53 | *.log
54 | local_settings.py
55 |
56 | # Flask stuff:
57 | instance/
58 | .webassets-cache
59 |
60 | # Scrapy stuff:
61 | .scrapy
62 |
63 | # Sphinx documentation
64 | docs/_build/
65 |
66 | # PyBuilder
67 | target/
68 |
69 | # IPython Notebook
70 | .ipynb_checkpoints
71 |
72 | # pyenv
73 | .python-version
74 |
75 | # celery beat schedule file
76 | celerybeat-schedule
77 |
78 | # dotenv
79 | .env
80 |
81 | # virtualenv
82 | venv/
83 | ENV/
84 |
85 | # Mac stuff
86 | .DS_Store
87 |
88 | # Spyder project settings
89 | .spyderproject
90 |
91 | # Rope project settings
92 | .ropeproject
93 |
94 | # headstone images
95 | static/images/headstone/hs-*.*
96 |
97 | # DB backups
98 | static/download/*.*
99 |
100 | # debugging data
101 | data/*
102 |
--------------------------------------------------------------------------------
/migrations/versions/8906a52041a4_.py:
--------------------------------------------------------------------------------
1 | """empty message
2 |
3 | Revision ID: 8906a52041a4
4 | Revises: None
5 | Create Date: 2017-03-17 15:46:02.839375
6 |
7 | """
8 |
9 | # revision identifiers, used by Alembic.
10 | revision = '8906a52041a4'
11 | down_revision = None
12 |
13 | from alembic import op
14 | import sqlalchemy as sa
15 |
16 |
17 | def upgrade():
18 | ### commands auto generated by Alembic - please adjust! ###
19 | op.create_table('burials',
20 | sa.Column('id', sa.Integer(), nullable=False),
21 | sa.Column('sd_type', sa.String(), nullable=True),
22 | sa.Column('sd', sa.String(), nullable=True),
23 | sa.Column('lot', sa.String(), nullable=True),
24 | sa.Column('space', sa.String(), nullable=True),
25 | sa.Column('lot_owner', sa.String(), nullable=True),
26 | sa.Column('year_purch', sa.String(), nullable=True),
27 | sa.Column('last_name', sa.String(), nullable=True),
28 | sa.Column('first_name', sa.String(), nullable=True),
29 | sa.Column('sex', sa.String(), nullable=True),
30 | sa.Column('birth_date', sa.String(), nullable=True),
31 | sa.Column('birth_place', sa.String(), nullable=True),
32 | sa.Column('death_date', sa.String(), nullable=True),
33 | sa.Column('age', sa.String(), nullable=True),
34 | sa.Column('death_place', sa.String(), nullable=True),
35 | sa.Column('death_cause', sa.String(), nullable=True),
36 | sa.Column('burial_date', sa.String(), nullable=True),
37 | sa.Column('notes', sa.String(), nullable=True),
38 | sa.Column('more_notes', sa.String(), nullable=True),
39 | sa.Column('hidden_notes', sa.String(), nullable=True),
40 | sa.Column('lat', sa.Float(), nullable=True),
41 | sa.Column('lng', sa.Float(), nullable=True),
42 | sa.PrimaryKeyConstraint('id')
43 | )
44 | op.create_table('burial_images',
45 | sa.Column('id', sa.Integer(), nullable=False),
46 | sa.Column('burial_id', sa.Integer(), nullable=True),
47 | sa.Column('filename', sa.String(), nullable=True),
48 | sa.Column('data', sa.LargeBinary(), nullable=True),
49 | sa.ForeignKeyConstraint(['burial_id'], ['burials.id'], ),
50 | sa.PrimaryKeyConstraint('id')
51 | )
52 | ### end Alembic commands ###
53 |
54 |
55 | def downgrade():
56 | ### commands auto generated by Alembic - please adjust! ###
57 | op.drop_table('burial_images')
58 | op.drop_table('burials')
59 | ### end Alembic commands ###
60 |
--------------------------------------------------------------------------------
/templates/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 | Search | Storm Lake Cemetery, Storm Lake, Iowa, U.S.A.
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
}})
19 |
20 |
21 |
47 |
48 |
49 |
50 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
--------------------------------------------------------------------------------
/migrations/env.py:
--------------------------------------------------------------------------------
1 | from __future__ import with_statement
2 | from alembic import context
3 | from sqlalchemy import engine_from_config, pool
4 | from logging.config import fileConfig
5 | import logging
6 |
7 | # this is the Alembic Config object, which provides
8 | # access to the values within the .ini file in use.
9 | config = context.config
10 |
11 | # Interpret the config file for Python logging.
12 | # This line sets up loggers basically.
13 | fileConfig(config.config_file_name)
14 | logger = logging.getLogger('alembic.env')
15 |
16 | # add your model's MetaData object here
17 | # for 'autogenerate' support
18 | # from myapp import mymodel
19 | # target_metadata = mymodel.Base.metadata
20 | from flask import current_app
21 | config.set_main_option('sqlalchemy.url',
22 | current_app.config.get('SQLALCHEMY_DATABASE_URI'))
23 | target_metadata = current_app.extensions['migrate'].db.metadata
24 |
25 | # other values from the config, defined by the needs of env.py,
26 | # can be acquired:
27 | # my_important_option = config.get_main_option("my_important_option")
28 | # ... etc.
29 |
30 |
31 | def run_migrations_offline():
32 | """Run migrations in 'offline' mode.
33 |
34 | This configures the context with just a URL
35 | and not an Engine, though an Engine is acceptable
36 | here as well. By skipping the Engine creation
37 | we don't even need a DBAPI to be available.
38 |
39 | Calls to context.execute() here emit the given string to the
40 | script output.
41 |
42 | """
43 | url = config.get_main_option("sqlalchemy.url")
44 | context.configure(url=url)
45 |
46 | with context.begin_transaction():
47 | context.run_migrations()
48 |
49 |
50 | def run_migrations_online():
51 | """Run migrations in 'online' mode.
52 |
53 | In this scenario we need to create an Engine
54 | and associate a connection with the context.
55 |
56 | """
57 |
58 | # this callback is used to prevent an auto-migration from being generated
59 | # when there are no changes to the schema
60 | # reference: http://alembic.readthedocs.org/en/latest/cookbook.html
61 | def process_revision_directives(context, revision, directives):
62 | if getattr(config.cmd_opts, 'autogenerate', False):
63 | script = directives[0]
64 | if script.upgrade_ops.is_empty():
65 | directives[:] = []
66 | logger.info('No changes in schema detected.')
67 |
68 | engine = engine_from_config(config.get_section(config.config_ini_section),
69 | prefix='sqlalchemy.',
70 | poolclass=pool.NullPool)
71 |
72 | connection = engine.connect()
73 | context.configure(connection=connection,
74 | target_metadata=target_metadata,
75 | process_revision_directives=process_revision_directives,
76 | **current_app.extensions['migrate'].configure_args)
77 |
78 | try:
79 | with context.begin_transaction():
80 | context.run_migrations()
81 | finally:
82 | connection.close()
83 |
84 | if context.is_offline_mode():
85 | run_migrations_offline()
86 | else:
87 | run_migrations_online()
88 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 | cemetery-map
4 | ============
5 | The cemetery-map Web application is a responsive Python+Flask+SQLAlchemy+Google
6 | Maps app that allows people to search for loved ones in a cemetery. This
7 | application is a Flask rewrite of a previous Node.js app written by
8 | [jbshep](http://github.com/jbshep) and his students, which was designed
9 | specifically for the cemetery in Storm Lake, Iowa, U.S.A.
10 |
11 | ## Installation
12 |
13 | Those interested in contributing code are encouraged to use virtualenv to
14 | manage their Python version, site-packages, and environment variables. After
15 | downloading and installing virtualenv, developers can create a virtual
16 | environment in a directory named `env` by typing something like
17 | `pyvenv-3.5 env`. To activate new virtual environment, developers should
18 | create their own `.env` file that can be sourced (i.e., the command to be typed would be `source .env`)
19 | to populate the shell with important environment variables that contain
20 | API keys, database URL's, and other configuration values that are defined
21 | in `config.py`. A sample `.env` file can be found in the repository root; it
22 | is named sample.env.
23 |
24 | To begin installation of the app, you must first install PostgreSQL and add
25 | PostgreSQL's bin directory to your path. Run psql and type the command `create
26 | database cemdb;` You can then build and run the app as follows:
27 | ```console
28 | pip3 install -r requirements.txt
29 | python3 manage.py db upgrade
30 | python3 manage.py runserver
31 | ```
32 |
33 | You can now browse to [http://localhost:5000/](http://localhost:5000/) to view the running app.
34 |
35 | If you did not add PostgreSQL's bin directory to your PATH prior to executing
36 | the above commands, you will likely see the error `Error: pg_config executable
37 | not found.`
38 |
39 | ## Deploying on Heroku
40 |
41 | New developers can develop locally on their own computers. However, at some point you may wish to deploy to the cloud. This section describes how to deploy cemetery-map to Heroku. You only need to complete the instructions in this section if you want to deploy to Heroku.
42 |
43 | First, obtain a Heroku user ID and install the Heroku toolbelt by visiting [Heroku.com](http://www.heroku.com/). When you are logged in to the Heroku Web site and the developer dashboard, create a new Python app named cemetery-map. If that name is currently in use, you may choose a slightly different name. Then, in your shell in your local repository directory, type the following commands; these assume your app is named cemetery-map.
44 |
45 | First, give the Heroku toolbelt your Heroku developer credentials by typing:
46 | ```console
47 | heroku login
48 | ```
49 |
50 | Then, add a new git remote for Heroku so that we can push code to it.
51 | ```console
52 | heroku git:remote -a cemetery-map
53 | git push heroku master
54 | ```
55 |
56 | Our Heroku instance has environment variables that contain configuration information the same way we store info in environment variables within our virtual environment. We can see what variables have been defined by executing:
57 | ```console
58 | heroku config --app cemetery-map
59 | ```
60 |
61 | Initially the above command will return nothing. To add config variables for our Heroku app, use the `config:set` command, like this:
62 | ```console
63 | heroku config:set APP_SETTINGS=config.ProductionConfig --remote heroku
64 | ```
65 |
66 | Consult `config.py` to see which environment variables will need to be defined.
67 |
68 | Finally, run the DB migrations remotely to create the necessary database tables.
69 |
70 | ```console
71 | heroku run python manage.py db upgrade --app cemetery-map
72 | ```
73 |
74 | ## Contributing
75 |
76 | BVU students interested in contributing should write code that conforms to the
77 | [PEP8](https://www.python.org/dev/peps/pep-0008/) coding standards. Individual files should be checked by typing `pep8 filename`. Developers are encouraged to use GitHub's fork/pull request mechanism for contributing to this repository.
78 |
--------------------------------------------------------------------------------
/admin.py:
--------------------------------------------------------------------------------
1 | from models import Burial, BurialImage
2 | from flask_admin import Admin, form
3 | from flask_admin.contrib.sqla import ModelView
4 | from flask_admin.form.upload import FileUploadField
5 | from jinja2 import Markup
6 | from wtforms import ValidationError, fields
7 | # from wtforms.validators import required
8 | from wtforms.widgets import HTMLString, html_params, FileInput
9 |
10 |
11 | # LargeBinary upload code lifted from
12 | # http://stackoverflow.com/questions/33722132/storing-a-pdf-file-in-db-with-flask-admin
13 | class BlobUploadField(fields.StringField):
14 |
15 | widget = FileInput()
16 |
17 | def __init__(self, label=None, allowed_extensions=None, size_field=None,
18 | filename_field=None, mimetype_field=None, **kwargs):
19 |
20 | self.allowed_extensions = allowed_extensions
21 | self.size_field = size_field
22 | self.filename_field = filename_field
23 | self.mimetype_field = mimetype_field
24 | # validators = [required()]
25 | validators = []
26 |
27 | super(BlobUploadField, self).__init__(label, validators, **kwargs)
28 |
29 | def is_file_allowed(self, filename):
30 | """
31 | Check if file extension is allowed.
32 |
33 | :param filename:
34 | File name to check
35 | """
36 | if not self.allowed_extensions:
37 | return True
38 |
39 | return ('.' in filename and
40 | filename.rsplit('.', 1)[1].lower() in
41 | map(lambda x: x.lower(), self.allowed_extensions))
42 |
43 | def _is_uploaded_file(self, data):
44 | return data and data.filename
45 |
46 | def pre_validate(self, form):
47 | super(BlobUploadField, self).pre_validate(form)
48 | if (self._is_uploaded_file(self.data) and
49 | not self.is_file_allowed(self.data.filename)):
50 | raise ValidationError(gettext('Invalid file extension'))
51 |
52 | def process_formdata(self, valuelist):
53 | if valuelist:
54 | data = valuelist[0]
55 | self.data = data
56 |
57 | def populate_obj(self, obj, name):
58 |
59 | if self._is_uploaded_file(self.data):
60 |
61 | _blob = self.data.read()
62 |
63 | setattr(obj, name, _blob)
64 |
65 | if self.size_field:
66 | setattr(obj, self.size_field, len(_blob))
67 |
68 | '''
69 | if self.filename_field:
70 | setattr(obj, self.filename_field, self.data.filename)
71 | '''
72 |
73 | if self.mimetype_field:
74 | setattr(obj, self.mimetype_field, self.data.content_type)
75 |
76 |
77 | class BurialModelView(ModelView):
78 | column_searchable_list = (Burial.last_name, Burial.first_name,
79 | Burial.lot_owner, Burial.sd_type,
80 | Burial.sd, Burial.lot, Burial.space)
81 |
82 |
83 | class BurialImageModelView(ModelView):
84 | form_extra_fields = {'data': BlobUploadField(
85 | label='File',
86 | allowed_extensions=['png', 'jpg', 'jpeg', 'gif'],
87 | size_field='size',
88 | filename_field='filename',
89 | mimetype_field='mimetype'
90 | )}
91 |
92 | def _photo_formatter(view, context, burialimage, attr_name):
93 | ''' This formatter displays the first ten bytes.
94 | '''
95 | attr_val = getattr(burialimage, attr_name)
96 | return 'NULL' if (attr_val is None or
97 | len(attr_val) == 0) else str(attr_val[0:10]) + '...'
98 |
99 | def _download_formatter(self, context, burialimage, attr_name):
100 | ''' This formatter displays a download link that can be used to
101 | download the image for viewing.
102 | '''
103 | attr_val = getattr(burialimage, attr_name)
104 |
105 | if attr_val is None or len(attr_val) == 0:
106 | return 'NULL'
107 |
108 | return Markup("Download"
109 | .format(url=self.get_url('download_image',
110 | burial_id=burialimage.burial_id,
111 | image_id=burialimage.id)))
112 |
113 | column_formatters = {
114 | 'data': _download_formatter,
115 | }
116 |
--------------------------------------------------------------------------------
/models.py:
--------------------------------------------------------------------------------
1 | from app import db
2 | from json import JSONEncoder
3 |
4 |
5 | class Burial(db.Model):
6 | __tablename__ = 'burials'
7 |
8 | # Most columns are strings--even dates--because a city's data
9 | # is often free-form in cases where dates are fuzzy,
10 | # e.g., '1960?' (note the question mark).
11 | id = db.Column(db.Integer, primary_key=True)
12 | sd_type = db.Column(db.String())
13 | sd = db.Column(db.String())
14 | lot = db.Column(db.String())
15 | space = db.Column(db.String())
16 | lot_owner = db.Column(db.String())
17 | year_purch = db.Column(db.String())
18 | last_name = db.Column(db.String())
19 | first_name = db.Column(db.String())
20 | sex = db.Column(db.String())
21 | birth_date = db.Column(db.String())
22 | birth_place = db.Column(db.String())
23 | death_date = db.Column(db.String())
24 | age = db.Column(db.String())
25 | death_place = db.Column(db.String())
26 | death_cause = db.Column(db.String())
27 | burial_date = db.Column(db.String())
28 | notes = db.Column(db.String())
29 | more_notes = db.Column(db.String())
30 | hidden_notes = db.Column(db.String())
31 | lat = db.Column(db.Float())
32 | lng = db.Column(db.Float())
33 |
34 | def __repr__(self):
35 | return '' % \
36 | (self.id, self.last_name, self.first_name)
37 |
38 |
39 | class BurialImage(db.Model):
40 | __tablename__ = 'burial_images'
41 |
42 | id = db.Column(db.Integer, primary_key=True)
43 | burial_id = db.Column(db.Integer, db.ForeignKey('burials.id'))
44 | burial = db.relationship('Burial',
45 | backref=db.backref('burial_images',
46 | lazy='dynamic'))
47 |
48 | # Depending on deployment environment, we may choose to store images in the
49 | # filesystem or in the DB. We will maintain columns for both and then rely
50 | # on the Flask app config object to tell us which we should use.
51 | filename = db.Column(db.String())
52 | data = db.Column(db.LargeBinary)
53 |
54 |
55 | class BurialJSONEncoder(JSONEncoder):
56 | def default(self, o):
57 | d = o.__dict__
58 |
59 | # Remove values that are not JSON serializable.
60 | if '_sa_instance_state' in d:
61 | del d['_sa_instance_state']
62 |
63 | return d
64 |
65 |
66 | def get_burials(columns_dict={}):
67 | """Retrieves burials matching the given dictionary criteria.
68 | If no arguments or an empty dictionary is given, all burials will
69 | be returned.
70 | """
71 | q = Burial.query
72 | for attr, value, in columns_dict.items():
73 | if value != "":
74 | q = q.filter(getattr(Burial, attr).like("%%%s%%" % value))
75 | return q.all()
76 |
77 |
78 | def get_burial(id):
79 | return Burial.query.filter(Burial.id == id).first()
80 |
81 |
82 | def add_burial(columns_dict):
83 | b = Burial(**columns_dict)
84 | db.session.add(b)
85 | db.session.commit()
86 |
87 |
88 | def remove_all_burials():
89 | Burial.query.delete()
90 | BurialImage.query.delete()
91 | db.session.commit()
92 | db.engine.execute('alter sequence burials_id_seq RESTART with 1')
93 | db.engine.execute('alter sequence burial_images_id_seq RESTART with 1')
94 |
95 |
96 | def get_burial_images(burial_id=None):
97 | if burial_id is None:
98 | return BurialImage.query.all()
99 |
100 | return BurialImage.query.filter(BurialImage.burial_id == burial_id).all()
101 |
102 |
103 | def get_burial_image(image_id):
104 | return BurialImage.query.filter(BurialImage.id == image_id).first()
105 |
106 |
107 | def add_burial_image(burial_id, filename, data):
108 | bi = BurialImage()
109 | bi.burial_id = burial_id
110 | bi.filename = filename
111 | bi.data = data
112 | db.session.add(bi)
113 | db.session.commit()
114 |
115 |
116 | def set_latlng(the_id, lat, lng):
117 | burial = get_burial(the_id)
118 | burial.lat = lat
119 | burial.lng = lng
120 | db.session.commit()
121 |
122 |
123 | def make_dummy_data():
124 | b = get_burial(1)
125 | b.lat = 42.634739
126 | b.lng = -95.173137
127 | db.session.commit()
128 |
129 | b = get_burial(2)
130 | b.lat = 42.634639
131 | b.lng = -95.173237
132 | db.session.commit()
133 |
134 | b = get_burial(3)
135 | b.lat = 42.633739
136 | b.lng = -95.175087
137 | db.session.commit()
138 |
139 | b = get_burial(1878)
140 | b.lat = 42.633839
141 | b.lng = -95.175187
142 | db.session.commit()
143 |
144 | b = get_burial(1879)
145 | b.lat = 42.633939
146 | b.lng = -95.175287
147 | db.session.commit()
148 |
--------------------------------------------------------------------------------
/tests.py:
--------------------------------------------------------------------------------
1 | import os
2 | import unittest
3 | import json
4 |
5 | from app import app, db
6 | from models import Burial, BurialImage, add_burial
7 |
8 | SQLITE_DB_PATH = 'data/cemdb-test.db'
9 | TEST_BURIAL_ID = 1
10 | TEST_LAT = 42.641333
11 | TEST_LNG = -95.211234
12 | TEST_IMAGE_PATHNAME = 'static/images/samples/hs-anderson.jpg'
13 | TEST_IMAGE_FILENAME = 'hs-anderson.jpg'
14 |
15 |
16 | class TestCase(unittest.TestCase):
17 | def setUp(self):
18 | app.config['TESTING'] = True
19 | app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///' + SQLITE_DB_PATH
20 | self.client = app.test_client()
21 | db.create_all()
22 | self.create_test_data()
23 |
24 |
25 | def tearDown(self):
26 | db.session.remove()
27 | db.drop_all()
28 | os.remove(SQLITE_DB_PATH)
29 |
30 |
31 | def create_test_data(self):
32 | '''Creates test burial objects, keeps them resident in
33 | self.test_burials, and adds them to the temporary test DB.
34 | '''
35 | self.test_burials = [
36 | dict(first_name='Stewey', last_name='Anderson'),
37 | dict(first_name='Linus', last_name='Anderson'),
38 | dict(first_name='Linus', last_name='Smith'),
39 | ]
40 |
41 | for b in self.test_burials:
42 | add_burial(b)
43 |
44 |
45 | def test_api_search(self):
46 | '''Tests the Search API both with and without parameters.
47 | Searching without parameters should return all burial records.
48 | '''
49 | res = self.client.post('/api/search', data=dict(last_name='Anderson'))
50 | list_from_api = json.loads(str(res.get_data(), encoding='utf-8'))
51 | list_from_test_burials = [b for b in self.test_burials
52 | if b['last_name'] == 'Anderson']
53 | assert len(list_from_api) == len(list_from_test_burials)
54 |
55 | res = self.client.post('/api/search', data={})
56 | list_all_from_api = json.loads(str(res.get_data(), encoding='utf-8'))
57 | assert len(list_all_from_api) == len(self.test_burials)
58 |
59 |
60 | def test_integ_camera_get(self):
61 | '''Ensures camera integration REST endpoint burial-summary
62 | returns all burials with the prescribed reduced subset
63 | of attributes.
64 | '''
65 | res = self.client.get('/api/burial-summary')
66 | list_from_api = json.loads(str(res.get_data(), encoding='utf-8'))
67 | assert len(list_from_api) == len(self.test_burials)
68 | assert len(list_from_api) > 0
69 | first_burial = list_from_api[0]
70 | assert 'id' in first_burial
71 | assert 'first_name' in first_burial
72 | assert 'last_name' in first_burial
73 | assert 'birth_date' in first_burial
74 | assert 'death_date' in first_burial
75 |
76 |
77 | def test_integ_camera_post(self):
78 | # Do a POST to /api/update-burial.
79 | with open(TEST_IMAGE_PATHNAME, 'rb') as test_image_file:
80 | res_update = self.client.post('/api/update-burial',
81 | data=dict(
82 | id=str(TEST_BURIAL_ID),
83 | lat=str(TEST_LAT),
84 | lng=str(TEST_LNG),
85 | file=(test_image_file, TEST_IMAGE_FILENAME),
86 | ))
87 |
88 | # Check the integrity of the POST. Determine whether it was
89 | # successful using other API calls.
90 |
91 | # First, check the lat/lng.
92 | res_search = self.client.get('/api/search',
93 | data=dict(id=TEST_BURIAL_ID))
94 | burial_list = json.loads(str(res_search.get_data(), encoding='utf-8'))
95 | assert len(burial_list) == 1
96 | single_burial = burial_list[0]
97 | assert single_burial['lat'] == TEST_LAT
98 | assert single_burial['lng'] == TEST_LNG
99 |
100 | # Then, check the headstone image to see if the image retrieved from
101 | # the API matches the image from the filesystem.
102 | res_image_ids = self.client.get('/api/headstones/{}'
103 | .format(TEST_BURIAL_ID))
104 | image_ids = json.loads(str(res_image_ids.get_data(), encoding='utf-8'))
105 | image_id = image_ids[0]
106 | res_image = self.client.get('/api/headstone/{}/{}'
107 | .format(TEST_BURIAL_ID, image_id))
108 | image_data_from_api = res_image.get_data()
109 | with open(TEST_IMAGE_PATHNAME, 'rb') as test_image_file:
110 | image_data_from_disk = test_image_file.read()
111 | assert image_data_from_api == image_data_from_disk
112 |
113 |
114 | if __name__ == '__main__':
115 | unittest.main()
116 |
--------------------------------------------------------------------------------
/static/js/map.js:
--------------------------------------------------------------------------------
1 | // The main map.
2 | var map;
3 | // This is only one infowindow. We will just move it around. Callbacks will
4 | // set the content.
5 | var infowindow;
6 | var markers;
7 | var missingMarker;
8 | var defaultMapCenter;
9 | var missingLatLng;
10 | function initialize() {
11 | //defaultCenter = new google.maps.LatLng(42.634739, -95.174137);
12 | defaultMapCenter = new google.maps.LatLng(42.634739, -95.173137);
13 | missingLatLng = new google.maps.LatLng(42.633739, -95.175087);
14 |
15 | // Map props can be hardcoded. The cemetery is unlikely to get up and move away.
16 | var mapProp = {
17 | center: defaultMapCenter,
18 | //zoom:18,
19 | zoom:17,
20 | mapTypeId:google.maps.MapTypeId.SATELLITE
21 | };
22 |
23 | map = new google.maps.Map(document.getElementById("googleMap"), mapProp);
24 | infowindow = new google.maps.InfoWindow(); // content prop to be set later
25 |
26 | markers = new Array();
27 |
28 | missingMarker = new google.maps.Marker({
29 | position: missingLatLng,
30 | icon: '/static/images/markers/red.png',
31 | map: null
32 | });
33 | google.maps.event.addListener(missingMarker, 'click', function(evt) {
34 | openInfowindow(this);
35 | });
36 | }
37 |
38 | function bitPitEgg() {
39 | var burial = {
40 | id : 0,
41 | sd_type : "",
42 | sd : "",
43 | lot : "",
44 | space : "",
45 | lot_owner : "",
46 | year_purch : "",
47 | first_name : "The Bit Pit, ",
48 | last_name : "Home of BVU Computer Science",
49 | sex : "",
50 | birth_date : "N/A",
51 | birth_place : "",
52 | death_date : "N/A",
53 | age : "",
54 | death_place : "",
55 | death_cause : "",
56 | burial_date : "",
57 | notes : "",
58 | more_notes : "",
59 | hidden_notes : "",
60 | lat : 42.641333,
61 | lng : -95.211234,
62 | img : ""
63 | };
64 | return burial;
65 | }
66 |
67 | /**
68 | * Clears burial markers and the missing marker.
69 | */
70 | function clearMarkers() {
71 | for (var i = 0; i < markers.length; i++) {
72 | markers[i].setMap(null);
73 | if (missingMarker != markers[i]) {
74 | markers[i] = null;
75 | }
76 | }
77 | while (markers.length > 0) {
78 | markers.pop();
79 | }
80 |
81 | //placeMarker( bitPitEgg() );
82 | }
83 |
84 | /**
85 | * Creates burial markers for any results with valid lat/lng.
86 | * Activates missing marker for results without a lat/lgn.
87 | */
88 | function updateMarkers(ary) {
89 | for (var i = 0; i < ary.length; i++) {
90 | if (ary[i].lat != 0 && ary[i].lng != 0) {
91 | // Place burial marker.
92 | marker = makeBurialMarker(ary[i]);
93 | markers.push(marker);
94 | } else {
95 | if (missingMarker.map != map) {
96 | // Activating missing marker.
97 | missingMarker.setMap(map);
98 | missingMarker.numberMissing = 0;
99 | markers.push(missingMarker);
100 | } else {
101 | missingMarker.numberMissing++;
102 | }
103 | }
104 | }
105 | }
106 |
107 | /**
108 | * san is short for 'sanitize'. Instead of showing blanks for field values,
109 | * if a field lacks a value we will show its value as 'Not listed'.
110 | */
111 | function san(txt) {
112 | if (txt == undefined || txt.trim() == "") {
113 | return "Not listed";
114 | } else {
115 | return txt;
116 | }
117 | }
118 |
119 | function openInfowindow(marker) {
120 | if (marker == missingMarker) {
121 | infowindow.setContent('There are ' + marker.numberMissing + ' additional people '
122 | + 'buried in the cemetery
'
123 | + 'that match your criteria, but we don\'t have location
'
124 | + 'information for them yet. Please bear with us while we
'
125 | + 'update our site. Thank you.');
126 | } else {
127 | var content = "" + marker.first_name + " " + marker.last_name + "
"
128 | + ""
129 | + "
"
130 | + "Born Date: " + san(marker.birth_date)
131 | + "
"
132 | + "Born Place: " + san(marker.birth_place)
133 | + "
"
134 | + "Death Date: " + san(marker.death_date)
135 | + "
"
136 | + "Death Place: " + san(marker.death_place)
137 | + "
"
138 | + "Burial Date: " + san(marker.burial_date)
139 | + "
"
140 | + "Lot Owner: " + san(marker.lot_owner);
141 | infowindow.setContent(content);
142 | }
143 | infowindow.open(map, marker);
144 | }
145 |
146 | function makeBurialMarker(burial) {
147 | var marker = new google.maps.Marker({
148 | position: new google.maps.LatLng(burial.lat, burial.lng),
149 | icon: '/static/images/markers/green-dot.png',
150 | map: map
151 | });
152 |
153 | // Add info to the marker so that we can display it later in the infowindow.
154 | marker.id = burial.id;
155 | marker.sd_type = burial.sd_type;
156 | marker.sd = burial.sd;
157 | marker.lot = burial.lot;
158 | marker.space = burial.space;
159 | marker.lot_owner = burial.lot_owner;
160 | marker.year_purch = burial.year_purch;
161 | marker.first_name = burial.first_name;
162 | marker.last_name = burial.last_name;
163 | marker.sex = burial.sex;
164 | marker.birth_date = burial.birth_date;
165 | marker.birth_place = burial.birth_place;
166 | marker.death_date = burial.death_date;
167 | marker.age = burial.age;
168 | marker.death_place = burial.death_place;
169 | marker.death_cause = burial.death_cause;
170 | marker.burial_date = burial.burial_date;
171 | marker.notes = burial.notes;
172 | marker.more_notes = burial.more_notes;
173 | marker.hidden_notes = burial.hidden_notes;
174 | marker.lat = burial.lat;
175 | marker.lng = burial.lng;
176 |
177 | google.maps.event.addListener(marker, 'click', function(evt) {
178 | openInfowindow(this);
179 | });
180 |
181 | return marker;
182 | }
183 |
184 |
185 | google.maps.event.addDomListener(window, 'load', initialize);
186 |
187 | function handleSearchResponse(res) {
188 | try {
189 | res = eval(res);
190 | console.log(res);
191 | console.log(res.length);
192 | clearMarkers();
193 | if (res.length == 0) {
194 | $('#error-message').html('No results found.');
195 | } else {
196 | updateMarkers(res);
197 | $('#search-btn').dropdown('toggle');
198 | $('#error-message').html('');
199 | }
200 | } catch (e) {
201 | $('#error-message').html('Search temporarily unavailable.');
202 | console.log(e);
203 | }
204 | }
205 |
206 | $(document).ready(function() {
207 | $('.dropdown-menu input, .dropdown-menu label').click(function(e) {
208 | // Prevent clicks on the dropdown from dismissing the dropdown.
209 | e.stopPropagation();
210 | });
211 |
212 | $('#do-search-btn').click(function(e) {
213 | // Make 'searching' feedback, probably next to do-search-btn.
214 | // Fire jQuery Ajax req
215 | $.post( '/api/search',
216 | {
217 | first_name: $('#first_name').get(0).value,
218 | last_name: $('#last_name').get(0).value,
219 | birth_place: $('#birth_place').get(0).value,
220 | birth_date: $('#birth_date').get(0).value,
221 | death_place: $('#death_place').get(0).value,
222 | death_date: $('#death_date').get(0).value,
223 | lot_owner: $('#lot_owner').get(0).value
224 | },
225 | handleSearchResponse );
226 | });
227 |
228 |
229 | $('#reset-btn').click(function(e) {
230 | clearMarkers();
231 | map.panTo(defaultMapCenter);
232 | });
233 |
234 | /*
235 | $('.btn-group').on('show.bs.dropdown', function () {
236 | console.log('got .dropdown-menu show.bs.dropdown');
237 | });
238 | $('.btn-group').on('hide.bs.dropdown', function () {
239 | console.log('got .dropdown-menu hide.bs.dropdown');
240 | });
241 | */
242 | });
243 |
--------------------------------------------------------------------------------
/app.py:
--------------------------------------------------------------------------------
1 | from flask import abort, Flask, json, redirect, \
2 | render_template, request, Response, url_for, session
3 | from flask_sqlalchemy import SQLAlchemy
4 | from werkzeug.utils import secure_filename
5 | from errors import *
6 | from apiclient import discovery
7 | from oauth2client import client
8 | from time import gmtime, strftime
9 | import os
10 | import random
11 | import string
12 | import httplib2
13 | import json
14 | import shutil
15 | import zipfile
16 |
17 |
18 | app = Flask(__name__)
19 | app.config.from_object(os.environ['APP_SETTINGS'])
20 | app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
21 | db = SQLAlchemy(app)
22 |
23 |
24 | UPLOAD_FOLDER = 'static/images/headstone'
25 | ALLOWED_IMAGE_EXTENSIONS = set(['jpg', 'gif', 'png'])
26 | app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
27 | ALLOWED_DATA_EXTENSIONS = set(['csv', 'zip'])
28 |
29 | DOWNLOAD_FOLDER = 'static/download'
30 | app.config['DOWNLOAD_FOLDER'] = DOWNLOAD_FOLDER
31 |
32 |
33 | from models import Burial, BurialImage, BurialJSONEncoder, \
34 | get_burials, get_burial, add_burial, remove_all_burials, \
35 | get_burial_images, get_burial_image, \
36 | add_burial_image, set_latlng
37 |
38 |
39 | from admin import Admin, BurialModelView, BurialImageModelView
40 |
41 | admin = Admin(app, name='cemetery-map', template_mode='bootstrap3')
42 | admin.add_view(BurialModelView(Burial, db.session))
43 | admin.add_view(BurialImageModelView(BurialImage, db.session))
44 |
45 |
46 | def randstr():
47 | return ''.join(random.choice(string.ascii_uppercase + string.digits)
48 | for _ in range(30))
49 |
50 |
51 | def allowed_image_file(filename):
52 | return '.' in filename and \
53 | filename.rsplit('.', 1)[1] in ALLOWED_IMAGE_EXTENSIONS
54 |
55 |
56 | def allowed_data_file(filename):
57 | return '.' in filename and \
58 | filename.rsplit('.', 1)[1] in ALLOWED_DATA_EXTENSIONS
59 |
60 |
61 | def split_csv_line(line):
62 | buf = ''
63 | cols = []
64 | in_quotes = False
65 | for i in range(0, len(line)):
66 | if line[i] == ',' and not in_quotes:
67 | cols.append(buf)
68 | buf = ''
69 | elif line[i] == '"':
70 | in_quotes = not in_quotes
71 | else:
72 | buf = buf + str(line[i])
73 |
74 | if buf == '\r':
75 | cols.append('')
76 | elif buf != '':
77 | cols.append(buf)
78 |
79 | # The CSV file may not have lat/lng values.
80 | # If not, give it defaults of 0,0.
81 |
82 | while len(cols) < 22:
83 | cols.append(0)
84 |
85 | return cols
86 |
87 |
88 | def is_secure_path(request_path):
89 | '''Determines whether the requested URL path should require secure access
90 | through Google+ OAuth2.
91 | '''
92 | return request_path.startswith('/admin') or request_path == '/api/data' \
93 | or request_path == '/api/add-test-latlng'
94 |
95 |
96 | @app.before_request
97 | def before_request():
98 | '''Checks whether requested URL path requires the user to authenticate
99 | and authorize using OAuth2 through the Google+ API. If the Google
100 | user's email address belongs to a known admin, go ahead and allow
101 | access. Otherwise, emit a 403 Forbidden to the client.
102 |
103 | Prior to using the Google+ API, the Web developer must have used
104 | the Google Developer Console (https://console.developers.google.com)
105 | to 1.) enable the Google+ API, and 2.) create an OAuth 2 client
106 | ID & secret and make them available through app config.
107 | '''
108 | if is_secure_path(request.path):
109 | if 'credentials' not in session:
110 | return redirect(url_for('oauth2callback'))
111 | credentials = client.OAuth2Credentials.from_json(session['credentials'])
112 | if credentials.access_token_expired:
113 | return redirect(url_for('oauth2callback'))
114 | else:
115 | http_auth = credentials.authorize(httplib2.Http())
116 | plus_service = discovery.build('plus', 'v1', http=http_auth)
117 | person = plus_service.people().get(userId='me', fields='emails')\
118 | .execute()
119 | email = person['emails'][0]['value']
120 | if email not in app.config['GOOGLE_ADMIN_EMAIL_LIST']:
121 | abort(403)
122 |
123 |
124 | @app.route('/oauth2callback')
125 | def oauth2callback():
126 | flow = client.OAuth2WebServerFlow(
127 | client_id=app.config['GOOGLE_CLIENT_ID'],
128 | client_secret=app.config['GOOGLE_CLIENT_SECRET'],
129 | scope='https://www.googleapis.com/auth/plus.login ' +
130 | 'https://www.googleapis.com/auth/userinfo.email',
131 | redirect_uri=url_for('oauth2callback', _external=True))
132 | if 'code' not in request.args:
133 | auth_uri = flow.step1_get_authorize_url()
134 | return redirect(auth_uri)
135 | else:
136 | auth_code = request.args.get('code')
137 | credentials = flow.step2_exchange(auth_code)
138 | session['credentials'] = credentials.to_json()
139 | return redirect(url_for('admin.index'))
140 |
141 |
142 | @app.route('/')
143 | def index():
144 | '''Downloads the initial map page.
145 | '''
146 | return render_template('index.html',
147 | maps_key=app.config['GOOGLE_MAPS_KEY'])
148 |
149 |
150 | @app.route('/headstones/', methods=['GET'])
151 | def images_iframe_content(burial_id):
152 | '''Returns an HTML snippet containing all headstone images for the given
153 | burial_id, or a single 'no image' image if the burial_id has no
154 | associated headstone images. This URL is referenced in
155 | static/js/map.js.
156 | '''
157 | html = ''
158 | burial_images = get_burial_images(burial_id)
159 | if len(burial_images) == 0:
160 | return ' \
162 | + ')
'
163 | for bi in burial_images:
164 | html += ' \
166 | + ')
'
167 | return html
168 |
169 |
170 | @app.route('/api/search', methods=['GET', 'POST'])
171 | def search():
172 | '''Returns a JSON list of matching burials or an error string on failure.
173 | If no form key/value pairs are specified, *ALL* burials are returned.
174 | This includes purchased plots that do not yet have a burial.
175 | '''
176 | try:
177 | js = json.dumps(get_burials(request.form), cls=BurialJSONEncoder)
178 | resp = Response(js, status=200, mimetype='application/json')
179 | return resp
180 | except Exception as e:
181 | print('Error: {}'.format(str(e)))
182 | return ERR_GENERAL
183 |
184 |
185 | @app.route('/api/headstone//', methods=['GET'])
186 | def download_image(burial_id, image_id):
187 | '''Retrieves image corresponding to the burial ID provided in the URL.
188 | This URL will most likely be specified in HTML as the 'src' attribute
189 | of an 'img' tag.
190 | '''
191 | target = app.config['HS_IMAGE_TARGET']
192 | bi = get_burial_image(image_id)
193 | if bi is None:
194 | abort(404)
195 | elif target == 'file':
196 | return redirect(
197 | os.path.join(app.config['UPLOAD_FOLDER'], bi.filename), code=302)
198 | elif target == 'db':
199 | return app.response_class(bi.data, mimetype='application/octet-stream')
200 |
201 |
202 | @app.route('/api/headstone/none', methods=['GET'])
203 | def no_image():
204 | '''URL referenced by images_iframe_content() if a burial has no
205 | headstone images associated with it.
206 | '''
207 | return redirect(
208 | os.path.join(app.config['UPLOAD_FOLDER'], 'no-image.png'),
209 | code=302)
210 |
211 |
212 | @app.route('/api/headstones/', methods=['GET'])
213 | def get_headstone_ids(burial_id):
214 | '''Returns a JSON list of BurialImage ID's for the specified Burial ID.
215 | '''
216 | try:
217 | bis = get_burial_images(burial_id)
218 | bids = [bi.id for bi in bis]
219 | js = json.dumps(bids)
220 | resp = Response(js, status=200, mimetype='application/json')
221 | return resp
222 | except Exception as e:
223 | print('Error: {}'.format(str(e)))
224 | return ERR_GENERAL
225 |
226 |
227 | @app.route('/api/headstone/', methods=['POST'])
228 | def upload_image(burial_id):
229 | '''Given an HTML form with enctype=multipart/form-data and an input
230 | type=file, this REST endpoint places a headstone image file into
231 | the upload folder UPLOAD_FOLDER and then updates the database
232 | with the new filename.
233 |
234 | This function is typically called directly by another route, such as
235 | POST /api/update-burial.
236 | '''
237 |
238 | if not get_burial(burial_id):
239 | return ERR_NO_SUCH_BURIAL
240 |
241 | try:
242 | if 'file' not in request.files:
243 | return ERR_NO_FILE_SPECIFIED
244 | infile = request.files['file']
245 | if infile.filename == '':
246 | return ERR_NO_FILE_SPECIFIED
247 |
248 | target = app.config['HS_IMAGE_TARGET']
249 | if infile and allowed_image_file(infile.filename):
250 | filename = secure_filename(infile.filename)
251 | suffix = filename[filename.rindex('.'):]
252 | filename = 'hs-' + randstr() + '-' + str(burial_id) + suffix
253 | filepath = os.path.join(app.config['UPLOAD_FOLDER'], filename)
254 |
255 | add_burial_image(
256 | burial_id,
257 | filename if target == 'file' else None,
258 | infile.read() if target == 'db' else None)
259 |
260 | if target == 'file':
261 | infile.save(filepath)
262 | else:
263 | return ERR_NOT_IMAGE
264 | except Exception as e:
265 | print('Error: {}'.format(str(e)))
266 | return ERR_GENERAL
267 |
268 | return 'ok'
269 |
270 |
271 | @app.route('/api/data', methods=['GET'])
272 | def database_download():
273 | '''Retrieves a CSV file containing all database data. The filename
274 | will contain the date on which the request was completed.
275 |
276 | In a future version, this REST endpoint will retrieves a
277 | ZIP file containing a CSV of all database data
278 | and all headstone image files.
279 | '''
280 |
281 | if not os.path.isdir(app.config['DOWNLOAD_FOLDER']):
282 | os.mkdir(app.config['DOWNLOAD_FOLDER'])
283 |
284 | filename = 'cemdb-'+strftime('%Y%m%d-%H%M%S', gmtime())+'.csv'
285 | pathname = os.path.join(app.config['DOWNLOAD_FOLDER'], filename)
286 |
287 | with open(pathname, 'w') as csv_file:
288 | csv_file.write(
289 | 'id,sd_type,sd,lot,space,lot_owner,year_purch,first_name,' +
290 | 'last_name,sex,birth_date,birth_place,death_date,age,' +
291 | 'death_place,death_cause,burial_date,notes,more_notes,' +
292 | 'hidden_notes,lat,lng\n')
293 |
294 | burials = get_burials()
295 |
296 | for burial in burials:
297 | csv_file.write(str(burial.id)+',')
298 | csv_file.write('"'+burial.sd_type+'",')
299 | csv_file.write('"'+burial.sd+'",')
300 | csv_file.write('"'+burial.lot+'",')
301 | csv_file.write('"'+burial.space+'",')
302 | csv_file.write('"'+burial.lot_owner+'",')
303 | csv_file.write('"'+burial.year_purch+'",')
304 | csv_file.write('"'+burial.first_name+'",')
305 | csv_file.write('"'+burial.last_name+'",')
306 | csv_file.write('"'+burial.sex+'",')
307 | csv_file.write('"'+burial.birth_date+'",')
308 | csv_file.write('"'+burial.birth_place+'",')
309 | csv_file.write('"'+burial.death_date+'",')
310 | csv_file.write('"'+burial.age+'",')
311 | csv_file.write('"'+burial.death_place+'",')
312 | csv_file.write('"'+burial.death_cause+'",')
313 | csv_file.write('"'+burial.burial_date+'",')
314 | csv_file.write('"'+burial.notes+'",')
315 | csv_file.write('"'+burial.more_notes+'",')
316 | csv_file.write('"'+burial.hidden_notes+'",')
317 | csv_file.write(str(burial.lat)+',')
318 | csv_file.write(str(burial.lng)+'\n')
319 |
320 | return redirect(pathname, code=302)
321 |
322 |
323 | @app.route('/api/data', methods=['DELETE'])
324 | def database_nuke():
325 | '''Nukes all data in the DB. This route is only available in
326 | Development, not in Test or Production.
327 | '''
328 | if 'DEVELOPMENT' in app.config:
329 | remove_all_burials()
330 | return 'ok'
331 | else:
332 | abort(404)
333 |
334 |
335 | @app.route('/api/data', methods=['POST'])
336 | def database_upload():
337 | '''Reloads all application data from a CSV file. CSV file should be
338 | sent as form-data using the key 'file'.
339 |
340 | In a future version, this REST endpoint will reload all
341 | application data from a ZIP file containing
342 | a CSV of all database data and all headstone images.
343 | '''
344 |
345 | if 'file' not in request.files:
346 | return ERR_NO_FILE_SPECIFIED
347 |
348 | file = request.files['file']
349 | if file.filename == '':
350 | return ERR_NO_FILE_SPECIFIED
351 |
352 | if file and allowed_data_file(file.filename):
353 | filename = secure_filename(file.filename)
354 | file.save(filename)
355 |
356 | remove_all_burials()
357 |
358 | import codecs
359 | with codecs.open(filename, 'r', encoding='utf-8',
360 | errors='ignore') as csv_file:
361 |
362 | # Assume there's a header line and ignore it.
363 | lines = csv_file.readlines()[1:]
364 |
365 | # Add a burial DB row for each line in the CSV file.
366 | # ID columns in the CSV file are ignored.
367 | for line in lines:
368 | col_values = split_csv_line(line)
369 | add_burial({
370 | 'sd_type': col_values[1],
371 | 'sd': col_values[2],
372 | 'lot': col_values[3],
373 | 'space': col_values[4],
374 | 'lot_owner': col_values[5],
375 | 'year_purch': col_values[6],
376 | 'first_name': col_values[7],
377 | 'last_name': col_values[8],
378 | 'sex': col_values[9],
379 | 'birth_date': col_values[10],
380 | 'birth_place': col_values[11],
381 | 'death_date': col_values[12],
382 | 'age': col_values[13],
383 | 'death_place': col_values[14],
384 | 'death_cause': col_values[15],
385 | 'burial_date': col_values[16],
386 | 'notes': col_values[17],
387 | 'more_notes': col_values[18],
388 | 'hidden_notes': col_values[19],
389 | 'lat': col_values[20],
390 | 'lng': col_values[21],
391 | })
392 |
393 | return 'ok - %d burials loaded' % len(lines)
394 |
395 |
396 | @app.route('/api/data/headstones', methods=['GET'])
397 | def download_images():
398 | '''Retrieves a ZIP file containing all headstone images in the database.
399 | This REST endpoint accomplishes this by staging the image files into a
400 | directory, ZIP'ing the stagign directory into a ZIP file, removing the
401 | staging directory, and then finally redirecting to the ZIP file.
402 | '''
403 | if not os.path.isdir(app.config['DOWNLOAD_FOLDER']):
404 | os.mkdir(app.config['DOWNLOAD_FOLDER'])
405 |
406 | imgdirname = 'headstones-' + strftime('%Y%m%d-%H%M%S', gmtime())
407 | imgdirpath = os.path.join(app.config['DOWNLOAD_FOLDER'], imgdirname)
408 | os.mkdir(imgdirpath)
409 |
410 | target = app.config['HS_IMAGE_TARGET']
411 | bis = get_burial_images()
412 | for bi in bis:
413 | if target == 'file':
414 | srcpath = os.path.join(app.config['UPLOAD_FOLDER'], bi.filename)
415 | destpath = os.path.join(imgdirpath, bi.filename)
416 | shutil.copyfile(srcpath, destpath)
417 | elif target == 'db':
418 | destpath = os.path.join(imgdirpath, str(bi.id))
419 | with open(destpath, 'wb') as imgf:
420 | imgf.write(bi.data)
421 |
422 | zippath = imgdirpath + '.zip'
423 | zipf = zipfile.ZipFile(zippath, 'w', zipfile.ZIP_DEFLATED)
424 | for root, dirs, files in os.walk(imgdirpath):
425 | for f in files:
426 | zipf.write(os.path.join(root, f))
427 |
428 | shutil.rmtree(imgdirpath)
429 | return redirect(zippath, code=302)
430 |
431 |
432 | @app.route('/api/burial-summary', methods=['GET'])
433 | def burial_summary():
434 | '''This REST endpoint is used by the Android camera app 'cemetery-cam'
435 | to retrieve a subset of burial information for all burials
436 | in the cemetery. This subset is represented by a JSON array objects
437 |
438 | {
439 | id: ID,
440 | first_name: FNAME,
441 | last_name: LNAME,
442 | birth_date: BDATE,
443 | death_date: DDATE
444 | }
445 |
446 | where the CAPS strings represent the actual values returned. Only actual
447 | burials are returned. Plots without an actual burial are excluded from
448 | the returned list. Callers can expect the burials to be alphabetized by
449 | last_name.
450 |
451 | This information is used by the camera app to select a burial prior to
452 | filling in its headstone photo and latitude/longitude. The headstone photo
453 | and latitude/longitude get uploaded using the POST /api/update-burial REST
454 | endpoint.
455 | '''
456 | try:
457 | burials = get_burials()
458 | burials_less = []
459 | for burial in burials:
460 | burials_less.append({
461 | 'id': burial.id,
462 | 'first_name': burial.first_name,
463 | 'last_name': burial.last_name,
464 | 'birth_date': burial.birth_date,
465 | 'death_date': burial.death_date,
466 | })
467 |
468 | burials_less = sorted(
469 | list(filter(lambda b: b['last_name'] != "", burials_less)),
470 | key=lambda b: b['last_name'])
471 |
472 | js = json.dumps(burials_less, cls=BurialJSONEncoder)
473 | resp = Response(js, status=200, mimetype='application/json')
474 | return resp
475 | except Exception as e:
476 | return ERR_GENERAL
477 |
478 |
479 | @app.route('/api/update-burial', methods=['POST'])
480 | def update_burial():
481 | '''This REST endpoint is used by the Android camera app 'cemetery-cam'
482 | to update the latitude, longitude, and headstone image given a certain
483 | burial ID.
484 | '''
485 | set_latlng(request.form['id'], request.form['lat'], request.form['lng'])
486 | upload_image(request.form['id'])
487 | return 'ok'
488 |
489 |
490 | from models import make_dummy_data
491 |
492 |
493 | @app.route('/api/add-test-latlng', methods=['GET', 'POST'])
494 | def add_test_data():
495 | make_dummy_data()
496 | return 'ok'
497 |
498 |
499 | if __name__ == '__main__':
500 | print('Using environment', os.environ['APP_SETTINGS'])
501 | app.run()
502 |
--------------------------------------------------------------------------------