├── test ├── __init__.py ├── data │ ├── sample.csv │ ├── sample.bibtex │ ├── sampleutf8.bibtex │ ├── bulk_upload.json │ ├── sample.bibtex.bibjson │ └── sample.json ├── base.py ├── fixtures.json ├── test_importer.py ├── test_dao.py └── test_web.py ├── bibserver ├── __init__.py ├── view │ ├── __init__.py │ └── account.py ├── static │ ├── vendor │ │ ├── facetview │ │ │ ├── vendor │ │ │ │ ├── d3 │ │ │ │ │ ├── .gitmodules │ │ │ │ │ ├── .gitignore │ │ │ │ │ ├── package.json │ │ │ │ │ ├── d3.csv.min.js │ │ │ │ │ ├── LICENSE │ │ │ │ │ ├── d3.csv.js │ │ │ │ │ ├── Makefile │ │ │ │ │ └── d3.geom.min.js │ │ │ │ ├── bootstrap │ │ │ │ │ └── img │ │ │ │ │ │ ├── glyphicons-halflings.png │ │ │ │ │ │ └── glyphicons-halflings-white.png │ │ │ │ ├── jquery-ui-1.8.18.custom │ │ │ │ │ └── images │ │ │ │ │ │ ├── ui-icons_222222_256x240.png │ │ │ │ │ │ ├── ui-icons_2e83ff_256x240.png │ │ │ │ │ │ ├── ui-icons_454545_256x240.png │ │ │ │ │ │ ├── ui-icons_888888_256x240.png │ │ │ │ │ │ ├── ui-icons_cd0a0a_256x240.png │ │ │ │ │ │ ├── ui-bg_flat_0_aaaaaa_40x100.png │ │ │ │ │ │ ├── ui-bg_glass_55_fbf9ee_1x400.png │ │ │ │ │ │ ├── ui-bg_glass_65_ffffff_1x400.png │ │ │ │ │ │ ├── ui-bg_glass_75_dadada_1x400.png │ │ │ │ │ │ ├── ui-bg_glass_75_e6e6e6_1x400.png │ │ │ │ │ │ ├── ui-bg_glass_75_ffffff_1x400.png │ │ │ │ │ │ ├── ui-bg_inset-soft_95_fef1ec_1x100.png │ │ │ │ │ │ └── ui-bg_highlight-soft_75_cccccc_1x100.png │ │ │ │ └── linkify │ │ │ │ │ └── 1.0 │ │ │ │ │ ├── plugins │ │ │ │ │ ├── jquery.linkify-1.0-twitter-min.js │ │ │ │ │ └── jquery.linkify-1.0-twitter.js │ │ │ │ │ ├── jquery.linkify-1.0-min.js │ │ │ │ │ └── jquery.linkify-1.0.js │ │ │ ├── css │ │ │ │ ├── style.css │ │ │ │ └── facetview.css │ │ │ ├── simple.html │ │ │ ├── local.html │ │ │ ├── index.html │ │ │ └── README.rst │ │ ├── bootstrap │ │ │ └── img │ │ │ │ ├── glyphicons-halflings.png │ │ │ │ └── glyphicons-halflings-white.png │ │ ├── jtedit │ │ │ ├── vendor │ │ │ │ ├── bootstrap │ │ │ │ │ └── img │ │ │ │ │ │ ├── glyphicons-halflings.png │ │ │ │ │ │ └── glyphicons-halflings-white.png │ │ │ │ ├── jquery-ui │ │ │ │ │ └── images │ │ │ │ │ │ ├── ui-icons_222222_256x240.png │ │ │ │ │ │ ├── ui-icons_228ef1_256x240.png │ │ │ │ │ │ ├── ui-icons_ef8c08_256x240.png │ │ │ │ │ │ ├── ui-icons_ffd27a_256x240.png │ │ │ │ │ │ ├── ui-icons_ffffff_256x240.png │ │ │ │ │ │ ├── ui-bg_flat_10_000000_40x100.png │ │ │ │ │ │ ├── ui-bg_glass_100_f6f6f6_1x400.png │ │ │ │ │ │ ├── ui-bg_glass_100_fdf5ce_1x400.png │ │ │ │ │ │ ├── ui-bg_glass_65_ffffff_1x400.png │ │ │ │ │ │ ├── ui-bg_gloss-wave_35_f6a828_500x100.png │ │ │ │ │ │ ├── ui-bg_highlight-soft_75_ffe45c_1x100.png │ │ │ │ │ │ ├── ui-bg_diagonals-thick_18_b81900_40x40.png │ │ │ │ │ │ ├── ui-bg_diagonals-thick_20_666666_40x40.png │ │ │ │ │ │ └── ui-bg_highlight-soft_100_eeeeee_1x100.png │ │ │ │ └── jquery.autoResize.js │ │ │ ├── README │ │ │ ├── index.html │ │ │ └── jtedit.css │ │ └── jquery-ui-1.8.18.custom │ │ │ └── images │ │ │ ├── ui-icons_222222_256x240.png │ │ │ ├── ui-icons_2e83ff_256x240.png │ │ │ ├── ui-icons_454545_256x240.png │ │ │ ├── ui-icons_888888_256x240.png │ │ │ ├── ui-icons_cd0a0a_256x240.png │ │ │ ├── ui-bg_flat_0_aaaaaa_40x100.png │ │ │ ├── ui-bg_glass_55_fbf9ee_1x400.png │ │ │ ├── ui-bg_glass_65_ffffff_1x400.png │ │ │ ├── ui-bg_glass_75_dadada_1x400.png │ │ │ ├── ui-bg_glass_75_e6e6e6_1x400.png │ │ │ ├── ui-bg_glass_75_ffffff_1x400.png │ │ │ ├── ui-bg_inset-soft_95_fef1ec_1x100.png │ │ │ └── ui-bg_highlight-soft_75_cccccc_1x100.png │ ├── opendata.png │ ├── openservice.png │ └── css │ │ └── bibserver.css ├── auth │ ├── __init__.py │ ├── user.py │ └── collection.py ├── default_settings.py ├── templates │ ├── disabled.html │ ├── _formhelpers.html │ ├── collection │ │ └── index.html │ ├── account │ │ ├── users.html │ │ ├── login.html │ │ ├── register.html │ │ └── view.html │ ├── tickets │ │ └── view.html │ ├── create.html │ ├── home │ │ ├── faq.html │ │ └── index.html │ ├── upload.html │ ├── base.html │ └── record.html ├── core.py ├── config.py ├── util.py └── importer.py ├── requirements.test.txt ├── requirements.txt ├── .gitignore ├── doc ├── bibjson.rst ├── auth.rst ├── licenses.rst ├── frontend.rst ├── config.rst ├── index.rst ├── install.rst ├── upload.rst ├── parsers.rst ├── api.rst ├── deploy.rst └── Makefile ├── parserscrapers_plugins ├── bibjson.py ├── plugins.json ├── csvparser.py ├── JSONParser.py ├── RISParser.py ├── wikipedia.py └── NLMXMLParser.py ├── setup.py ├── LICENSE ├── tox.ini ├── .travis.yml ├── README.rst └── cli.py /test/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /bibserver/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /bibserver/view/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /requirements.test.txt: -------------------------------------------------------------------------------- 1 | nose 2 | nose-cov -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/d3/.gitmodules: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /bibserver/auth/__init__.py: -------------------------------------------------------------------------------- 1 | import collection 2 | import user 3 | -------------------------------------------------------------------------------- /bibserver/default_settings.py: -------------------------------------------------------------------------------- 1 | SECRET_KEY = 'default-key' 2 | 3 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | --index-url https://pypi.python.org/simple/ 2 | 3 | -e . 4 | -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/d3/.gitignore: -------------------------------------------------------------------------------- 1 | _site 2 | node_modules 3 | -------------------------------------------------------------------------------- /bibserver/static/opendata.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/opendata.png -------------------------------------------------------------------------------- /bibserver/static/openservice.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/openservice.png -------------------------------------------------------------------------------- /bibserver/static/vendor/bootstrap/img/glyphicons-halflings.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/bootstrap/img/glyphicons-halflings.png -------------------------------------------------------------------------------- /bibserver/static/vendor/bootstrap/img/glyphicons-halflings-white.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/bootstrap/img/glyphicons-halflings-white.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/vendor/bootstrap/img/glyphicons-halflings.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jtedit/vendor/bootstrap/img/glyphicons-halflings.png -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/bootstrap/img/glyphicons-halflings.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/facetview/vendor/bootstrap/img/glyphicons-halflings.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-icons_222222_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-icons_222222_256x240.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-icons_2e83ff_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-icons_2e83ff_256x240.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-icons_454545_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-icons_454545_256x240.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-icons_888888_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-icons_888888_256x240.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-icons_cd0a0a_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-icons_cd0a0a_256x240.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/README: -------------------------------------------------------------------------------- 1 | JTEDIT is a jquery plugin for prettily displaying JSON objects 2 | and allowing edit of them 3 | 4 | created by Mark MacGillivray - mark@cottagelabs.com 5 | 6 | Docs are on the way... 7 | -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/vendor/bootstrap/img/glyphicons-halflings-white.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jtedit/vendor/bootstrap/img/glyphicons-halflings-white.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-icons_222222_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-icons_222222_256x240.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-icons_228ef1_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-icons_228ef1_256x240.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-icons_ef8c08_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-icons_ef8c08_256x240.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-icons_ffd27a_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-icons_ffd27a_256x240.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-icons_ffffff_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-icons_ffffff_256x240.png -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/bootstrap/img/glyphicons-halflings-white.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/facetview/vendor/bootstrap/img/glyphicons-halflings-white.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-bg_flat_0_aaaaaa_40x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-bg_flat_0_aaaaaa_40x100.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_55_fbf9ee_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_55_fbf9ee_1x400.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_65_ffffff_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_65_ffffff_1x400.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_75_dadada_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_75_dadada_1x400.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_75_e6e6e6_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_75_e6e6e6_1x400.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_75_ffffff_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_75_ffffff_1x400.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-bg_flat_10_000000_40x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-bg_flat_10_000000_40x100.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-bg_glass_100_f6f6f6_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-bg_glass_100_f6f6f6_1x400.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-bg_glass_100_fdf5ce_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-bg_glass_100_fdf5ce_1x400.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-bg_glass_65_ffffff_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-bg_glass_65_ffffff_1x400.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-bg_inset-soft_95_fef1ec_1x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-bg_inset-soft_95_fef1ec_1x100.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-bg_highlight-soft_75_cccccc_1x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jquery-ui-1.8.18.custom/images/ui-bg_highlight-soft_75_cccccc_1x100.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-bg_gloss-wave_35_f6a828_500x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-bg_gloss-wave_35_f6a828_500x100.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-bg_highlight-soft_75_ffe45c_1x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-bg_highlight-soft_75_ffe45c_1x100.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-bg_diagonals-thick_18_b81900_40x40.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-bg_diagonals-thick_18_b81900_40x40.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-bg_diagonals-thick_20_666666_40x40.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-bg_diagonals-thick_20_666666_40x40.png -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-bg_highlight-soft_100_eeeeee_1x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/jtedit/vendor/jquery-ui/images/ui-bg_highlight-soft_100_eeeeee_1x100.png -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-icons_222222_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-icons_222222_256x240.png -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-icons_2e83ff_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-icons_2e83ff_256x240.png -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-icons_454545_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-icons_454545_256x240.png -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-icons_888888_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-icons_888888_256x240.png -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-icons_cd0a0a_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-icons_cd0a0a_256x240.png -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .env 2 | *.pyc 3 | store/* 4 | *.egg-info/* 5 | .*.swp 6 | sandbox/* 7 | src* 8 | .DS_Store 9 | .Python 10 | doc/_build/* 11 | download_cache/* 12 | bibjson_cache/* 13 | local_config.json 14 | bin/* 15 | ingest.pid 16 | /.tox 17 | /.coverage 18 | -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-bg_flat_0_aaaaaa_40x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-bg_flat_0_aaaaaa_40x100.png -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_55_fbf9ee_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_55_fbf9ee_1x400.png -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_65_ffffff_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_65_ffffff_1x400.png -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_75_dadada_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_75_dadada_1x400.png -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_75_e6e6e6_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_75_e6e6e6_1x400.png -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_75_ffffff_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-bg_glass_75_ffffff_1x400.png -------------------------------------------------------------------------------- /test/data/sample.csv: -------------------------------------------------------------------------------- 1 | "bibtype","citekey","title","author","year","eprint","subject" 2 | "misc","arXiv:0807.3308","Visibility to infinity in the hyperbolic plane, despite obstacles","Itai Benjamini,Johan Jonasson,Oded Schramm,Johan Tykesson","2008","arXiv:0807.3308","sle" 3 | -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-bg_inset-soft_95_fef1ec_1x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-bg_inset-soft_95_fef1ec_1x100.png -------------------------------------------------------------------------------- /doc/bibjson.rst: -------------------------------------------------------------------------------- 1 | ============= 2 | About bibJSON 3 | ============= 4 | 5 | To Do: 6 | 7 | - Brief detail of bibjson and link to http://okfnlabs.org/bibjson/ for more info. 8 | - Also link to open biblio principles. 9 | - Also mention the licenses documentation page. 10 | -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-bg_highlight-soft_75_cccccc_1x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/rufuspollock-okfn/bibserver/HEAD/bibserver/static/vendor/facetview/vendor/jquery-ui-1.8.18.custom/images/ui-bg_highlight-soft_75_cccccc_1x100.png -------------------------------------------------------------------------------- /test/data/sample.bibtex: -------------------------------------------------------------------------------- 1 | @misc{arXiv:0807.3308, 2 | title = {Visibility to infinity in the hyperbolic plane, despite obstacles}, 3 | author = {Itai Benjamini and Johan Jonasson and Oded Schramm and Johan Tykesson}, 4 | year = {2008}, 5 | eprint = {arXiv:0807.3308}, 6 | subject = {sle}, 7 | } 8 | -------------------------------------------------------------------------------- /bibserver/templates/disabled.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 |
5 |
6 |

Not enabled

7 |

Sorry, that functionality is not available.

8 |
9 |
10 | {% endblock %} 11 | 12 | -------------------------------------------------------------------------------- /test/data/sampleutf8.bibtex: -------------------------------------------------------------------------------- 1 | @article{pohl-2011-interview, 2 | author = {Matzat, Lorenz and Pohl, Adrian}, 3 | title = {“Bibliotheken für öffnen”}, 4 | publisher = {Zur Konzeption und Implementierung einer Infrastruktur für freie bibliographische Daten}, 5 | url = {http://blog.zeit.de/open-data/2011/11/08/open-bibliographic-data/}, 6 | year = 2011 7 | } 8 | -------------------------------------------------------------------------------- /test/data/bulk_upload.json: -------------------------------------------------------------------------------- 1 | { 2 | "collections" : [ 3 | { 4 | "source" : "data/sample.bibtex", 5 | "format" : "bibtex", 6 | "collection" : "coll1" 7 | }, 8 | { 9 | "source" : "data/sample.bibtex", 10 | "format" : "bibtex", 11 | "collection" : "coll2" 12 | } 13 | ] 14 | } 15 | -------------------------------------------------------------------------------- /bibserver/templates/_formhelpers.html: -------------------------------------------------------------------------------- 1 | {% macro render_field(field) %} 2 | {{ field.label }} 3 |
4 | {{ field(**kwargs) }} 5 | {% if field.errors %} 6 | 11 | {% endif %} 12 |
13 | {% endmacro %} 14 | 15 | -------------------------------------------------------------------------------- /bibserver/auth/user.py: -------------------------------------------------------------------------------- 1 | from bibserver.core import current_user 2 | from bibserver.config import config 3 | 4 | def update(account, user): 5 | allowed = not account.is_anonymous and user.id == account.id 6 | if not account.is_anonymous: 7 | if account.id in config['super_user']: 8 | allowed = True 9 | return allowed 10 | 11 | def is_super(account): 12 | return not account.is_anonymous and account.id in config['super_user'] 13 | -------------------------------------------------------------------------------- /bibserver/templates/collection/index.html: -------------------------------------------------------------------------------- 1 | {% extends "/base.html" %} 2 | 3 | {% block content %} 4 | 5 | 10 | 11 |
12 |

All collections

13 | 14 |
15 | 16 |
17 |
18 | 19 | {% endblock %} 20 | 21 | 22 | -------------------------------------------------------------------------------- /test/data/sample.bibtex.bibjson: -------------------------------------------------------------------------------- 1 | {"records": [{"author": [{"name": "Benjamini, Itai", "id": "BenjaminiItai"}, {"name": "Jonasson, Johan", "id": "JonassonJohan"}, {"name": "Schramm, Oded", "id": "SchrammOded"}, {"name": "Tykesson, Johan", "id": "TykessonJohan"}], "title": "Visibility to infinity in the hyperbolic plane, despite obstacles", "eprint": "arXiv:0807.3308", "year": "2008", "type": "misc", "id": "arXiv:0807.3308", "subject": {"name": "sle", "id": "sle"}}], "metadata": {}} -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/linkify/1.0/plugins/jquery.linkify-1.0-twitter-min.js: -------------------------------------------------------------------------------- 1 | // Twitter plugin for jQuery.fn.linkify() 1.0 - MIT/GPL Licensed 2 | jQuery.extend(jQuery.fn.linkify.plugins,{twitterUser:{re:/(^|["'(]|<|\s)@([a-z0-9_-]+)((?:[:?]|\.+)?(?:\s|$)|>|[)"',])/gi,tmpl:'$1@$2$3'},twitterHashtag:{re:/(^|["'(]|<|\s)(#.+?)((?:[:?]|\.+)?(?:\s|$)|>|[)"',])/gi,tmpl:function(d,b,a,c){return b+''+a+''+c}}}); 3 | -------------------------------------------------------------------------------- /bibserver/templates/account/users.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 | 5 | {% for user in users %} 6 | 7 |
8 |

{{ user._id }}

9 |

{{ user.description}}

10 |

created : {{ user._created }}
collections:{{ user.collections }}

11 |
12 | 13 | {% endfor %} 14 | 15 |
16 | 17 | {% endblock %} 18 | 19 | -------------------------------------------------------------------------------- /bibserver/static/css/bibserver.css: -------------------------------------------------------------------------------- 1 | body { 2 | padding-top: 40px; 3 | } 4 | 5 | .content { 6 | margin-top: 20px; 7 | } 8 | 9 | form{ 10 | display:inline; 11 | } 12 | 13 | footer { 14 | border-top:2px solid #ccc; 15 | margin-top:50px; 16 | padding-top:30px; 17 | } 18 | 19 | div.alert-messages { 20 | margin-top: 15px; 21 | } 22 | 23 | span.upload_label{ 24 | width:150px; 25 | display:block; 26 | float:left; 27 | padding:5px 10px 0 0; 28 | text-align:right; 29 | } 30 | 31 | #adminarea{ 32 | display:none; 33 | } 34 | #shareembeddets{ 35 | display:none; 36 | } 37 | -------------------------------------------------------------------------------- /doc/auth.rst: -------------------------------------------------------------------------------- 1 | ============================ 2 | Authorisation and superusers 3 | ============================ 4 | 5 | 6 | Users 7 | ===== 8 | 9 | Explain how users create accounts and login, and how they get and can use their 10 | API key - this is just an overview as the details should be in the API pages and 11 | so on. 12 | 13 | Also point out that users can add other users to their collection as admins - 14 | giving them the same controls as the owner has. 15 | 16 | 17 | Superusers 18 | ========== 19 | 20 | Refer to config settings for how superusers can be listed, and how a superuser 21 | can go to any collection or user page and edit them. 22 | -------------------------------------------------------------------------------- /bibserver/auth/collection.py: -------------------------------------------------------------------------------- 1 | from bibserver.core import current_user 2 | from bibserver.config import config 3 | 4 | def read(account, collection): 5 | return True 6 | 7 | def update(account, collection): 8 | allowed = not account.is_anonymous and collection["owner"] == account.id 9 | if not account.is_anonymous: 10 | try: 11 | if account.id in collection['_admins']: 12 | allowed = True 13 | except: 14 | pass 15 | if account.id in config['super_user']: 16 | allowed = True 17 | return allowed 18 | 19 | def create(account, collection): 20 | return not account.is_anonymous 21 | 22 | -------------------------------------------------------------------------------- /doc/licenses.rst: -------------------------------------------------------------------------------- 1 | ================ 2 | Licensing issues 3 | ================ 4 | 5 | 6 | Software 7 | ======== 8 | 9 | Bibserver is open source MIT. It relies on other open source packages. 10 | 11 | 12 | 13 | Metadata 14 | ======== 15 | 16 | Bibserver is a tool that enables easy sharing of bibliographic metadata. It 17 | uses the bibJSON format, and converts from other typical formats into bibJSON. 18 | 19 | Mention open biblio principles, and refer to bibjson docs page. Point out that 20 | content uploaded should be freely available, but licenses can be attached to 21 | collections if desired. 22 | 23 | Typically, public domain / open access should be licensed as such. 24 | 25 | 26 | -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/d3/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "d3", 3 | "version": "2.3.2", 4 | "description": "A small, free JavaScript library for manipulating documents based on data.", 5 | "keywords": [ 6 | "dom", 7 | "w3c", 8 | "visualization", 9 | "svg", 10 | "animation", 11 | "canvas" 12 | ], 13 | "homepage": "http://mbostock.github.com/d3/", 14 | "author": { 15 | "name": "Mike Bostock", 16 | "url": "http://bost.ocks.org/mike" 17 | }, 18 | "repository": { 19 | "type": "git", 20 | "url": "http://github.com/mbostock/d3.git" 21 | }, 22 | "dependencies": { 23 | "uglify-js": "1.0.6", 24 | "jsdom": "0.2.3", 25 | "vows": "0.5.10" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /doc/frontend.rst: -------------------------------------------------------------------------------- 1 | ====================== 2 | The Bibserver frontend 3 | ====================== 4 | 5 | 6 | The standard pages 7 | ================== 8 | 9 | Explain the various standard pages, how they are built from the templates, 10 | how they are presented via flask to the end user, what they typically have 11 | on them 12 | 13 | 14 | Facetview 15 | ========= 16 | 17 | Explain in more detail the facetview javascript / jquery app, and how it 18 | embeds onto various pages and its functionality 19 | 20 | 21 | Embedding remotely 22 | ================== 23 | 24 | Explain how facetview can be embedded in a remote web page and still call back 25 | to a bibserver instance, enabling display of search results in any web page. 26 | -------------------------------------------------------------------------------- /bibserver/templates/account/login.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 | 5 |
6 |
7 |   8 |
9 |
10 | {% from "_formhelpers.html" import render_field %} 11 |
12 | {{ render_field(form.username, placeholder="your username") }} 13 | {{ render_field(form.password, placeholder="********") }} 14 |
15 | 16 |
17 |
18 |


No account yet? Sign up now »

19 |
20 |
21 | {% endblock %} 22 | 23 | -------------------------------------------------------------------------------- /test/base.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | 4 | from bibserver import dao 5 | from bibserver.config import config 6 | 7 | TESTDB = 'bibserver-test' 8 | 9 | here = os.path.dirname(__file__) 10 | fixtures_path = os.path.join(here, 'fixtures.json') 11 | fixtures = json.load(open(fixtures_path)) 12 | 13 | config["ELASTIC_SEARCH_DB"] = TESTDB 14 | dao.init_db() 15 | 16 | 17 | class Fixtures(object): 18 | raw = fixtures 19 | 20 | @classmethod 21 | def create_account(cls): 22 | accountdict = dict(fixtures['accounts'][0]) 23 | pw = accountdict['password_raw'] 24 | del accountdict['password_raw'] 25 | cls.account = dao.Account(**accountdict) 26 | cls.account.set_password(pw) 27 | cls.account.save() 28 | 29 | __all__ = ['config', 'fixtures', 'Fixtures', 'dao', 'TESTDB', 'json'] 30 | 31 | -------------------------------------------------------------------------------- /parserscrapers_plugins/bibjson.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | ''' 4 | BibJSON identity parser. 5 | Reads a valid BibJSON input on stdin, parses it as a JSON file. 6 | Performs some basic validation, and outputs the serialised BibJSON on stdout. 7 | ''' 8 | 9 | import sys 10 | import json 11 | 12 | 13 | def parse(): 14 | data = sys.stdin.read() 15 | data_json = json.loads(data) 16 | sys.stdout.write(json.dumps(data_json, indent=2)) 17 | 18 | 19 | def main(): 20 | conf = {"display_name": "BibJSON", 21 | "format": "jsoncheck", 22 | "contact": "openbiblio-dev@lists.okfn.org", 23 | "bibserver_plugin": True, 24 | "BibJSON_version": "0.81"} 25 | for x in sys.argv[1:]: 26 | if x == '-bibserver': 27 | sys.stdout.write(json.dumps(conf)) 28 | sys.exit() 29 | parse() 30 | 31 | 32 | if __name__ == '__main__': 33 | main() 34 | -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | jtedit 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | setup( 4 | name = 'bibserver', 5 | version = '0.5.1', 6 | packages = find_packages(), 7 | install_requires = [ 8 | "Flask", 9 | "Flask-Login", 10 | "Flask-WTF", 11 | "pyes", 12 | "chardet", 13 | "requests", 14 | ], 15 | url = 'http://bibserver.okfn.org/', 16 | author = 'Open Knowledge Foundation', 17 | author_email = 'openbiblio@okfn.org', 18 | description = 'BibServer is a RESTful bibliographic data server.', 19 | license = 'AGPL', 20 | classifiers = [ 21 | 'Development Status :: 3 - Alpha', 22 | 'Environment :: Console', 23 | 'Intended Audience :: Developers', 24 | 'License :: OSI Approved :: MIT License', 25 | 'Operating System :: OS Independent', 26 | 'Programming Language :: Python', 27 | 'Topic :: Software Development :: Libraries :: Python Modules' 28 | ], 29 | ) 30 | -------------------------------------------------------------------------------- /test/fixtures.json: -------------------------------------------------------------------------------- 1 | { 2 | "accounts": [ 3 | { 4 | "_id": "tester", 5 | "fullname": "The Tester", 6 | "email": "tester@okfn.org", 7 | "password_raw": "pass" 8 | } 9 | ], 10 | "records": [ 11 | { 12 | "id":"someID", 13 | "type": "book", 14 | "title":"Great Unwritten Collaborations", 15 | "author": [{"name":"Leo Tolstoy"},{"name":"Robert Musil"}], 16 | "journal": {"name":"journal_name"}, 17 | "collection": "great_novels", 18 | "moreinfo":"some more information about this collection", 19 | "owner": "tester", 20 | "ns1:thing":"some thing" 21 | }, 22 | { 23 | "type": "article", 24 | "title":"A mathematical theory of communication", 25 | "author": [{"name":"Claude Shannon"}], 26 | "journal": {"name":"Bell System Technical Journal"}, 27 | "id": "otherid", 28 | "owner": "tester", 29 | "collection": "information_theory" 30 | } 31 | ] 32 | } 33 | 34 | -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/d3/d3.csv.min.js: -------------------------------------------------------------------------------- 1 | (function(){function b(a){return/[",\n]/.test(a)?'"'+a.replace(/\"/g,'""')+'"':a}function a(a){return a.map(b).join(",")}d3.csv=function(a,b){d3.text(a,"text/csv",function(a){b(a&&d3.csv.parse(a))})},d3.csv.parse=function(a){var b;return d3.csv.parseRows(a,function(a,c){if(c){var d={},e=-1,f=b.length;while(++e=a.length)return d;if(i){i=!1;return c}var b=f.lastIndex;if(a.charCodeAt(b)===34){var e=b;while(e++@$2$3' 19 | }, 20 | twitterHashtag: { 21 | // /\B#\w*[a-zA-Z]+\w*/gi // start at word boundry - must include at least one a-z - ends at word boundry 22 | re: /(^|["'(]|<|\s)(#.+?)((?:[:?]|\.+)?(?:\s|$)|>|[)"',])/gi, 23 | tmpl: function (match, pre, hashTag, post) { 24 | return pre+''+hashTag+''+post; 25 | } 26 | } 27 | }); -------------------------------------------------------------------------------- /doc/config.rst: -------------------------------------------------------------------------------- 1 | ============= 2 | Configuration 3 | ============= 4 | 5 | Configuration is managed via some key files. To make changes, just use 6 | local_config.json. 7 | 8 | 9 | config.json 10 | =========== 11 | 12 | The main default config - does not need to be altered. Instead, it is updated 13 | by local_config.json 14 | 15 | 16 | local_config.json 17 | ================= 18 | 19 | This is where configurations should be set for you particular instances. These 20 | are the various values you can set: 21 | 22 | 23 | config.py 24 | ========= 25 | 26 | This is the config class - it loads config in from config.json, then overrides 27 | with updates from local_config.json. The config object is then made available 28 | and can be imported elsewhere in the app. Changes to config need only happen 29 | in local_config.json. 30 | 31 | 32 | core.py 33 | ======= 34 | 35 | This is where configure_app and create_app are definedm which prepare the flask 36 | settings for the app to run. This is imported by web.py when the app is created. 37 | Settings are read from config. 38 | 39 | 40 | default_settings.py 41 | =================== 42 | 43 | Contains a "secure-key" setting for flask config 44 | -------------------------------------------------------------------------------- /bibserver/core.py: -------------------------------------------------------------------------------- 1 | import os 2 | from flask import Flask 3 | 4 | from bibserver import default_settings 5 | from flask_login import LoginManager, current_user 6 | login_manager = LoginManager() 7 | 8 | def create_app(): 9 | app = Flask(__name__) 10 | configure_app(app) 11 | setup_error_email(app) 12 | login_manager.setup_app(app) 13 | return app 14 | 15 | def configure_app(app): 16 | app.config.from_object(default_settings) 17 | # parent directory 18 | here = os.path.dirname(os.path.abspath( __file__ )) 19 | config_path = os.path.join(os.path.dirname(here), 'app.cfg') 20 | if os.path.exists(config_path): 21 | app.config.from_pyfile(config_path) 22 | 23 | def setup_error_email(app): 24 | ADMINS = app.config.get('ADMINS', '') 25 | if not app.debug and ADMINS: 26 | import logging 27 | from logging.handlers import SMTPHandler 28 | mail_handler = SMTPHandler('127.0.0.1', 29 | 'server-error@no-reply.com', 30 | ADMINS, 'error') 31 | mail_handler.setLevel(logging.ERROR) 32 | app.logger.addHandler(mail_handler) 33 | 34 | app = create_app() 35 | 36 | -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/css/style.css: -------------------------------------------------------------------------------- 1 | /********************************************** 2 | * Generic Mods to Bootstrap 3 | *********************************************/ 4 | 5 | html, body { 6 | background-color: #eee; 7 | } 8 | body { 9 | padding-top: 40px; /* 40px to make the container go all the way to the bottom of the topbar */ 10 | } 11 | 12 | .content { 13 | background-color: #fff; 14 | padding: 20px; 15 | margin: 0 -20px; /* negative indent the amount of the padding to maintain the grid system */ 16 | -webkit-border-radius: 0 0 6px 6px; 17 | -moz-border-radius: 0 0 6px 6px; 18 | border-radius: 0 0 6px 6px; 19 | -webkit-box-shadow: 0 1px 2px rgba(0,0,0,.15); 20 | -moz-box-shadow: 0 1px 2px rgba(0,0,0,.15); 21 | box-shadow: 0 1px 2px rgba(0,0,0,.15); 22 | } 23 | 24 | .page-header { 25 | background-color: #f5f5f5; 26 | padding: 20px 20px 10px; 27 | margin: -20px -20px 20px; 28 | } 29 | 30 | .container > footer p { 31 | text-align: center; 32 | } 33 | 34 | /* specific additions for the example index page */ 35 | .nav-logo img { 36 | margin-top: 4px; 37 | } 38 | 39 | h4 { 40 | line-height: 28px; 41 | margin-bottom: 10px; 42 | } 43 | 44 | -------------------------------------------------------------------------------- /parserscrapers_plugins/plugins.json: -------------------------------------------------------------------------------- 1 | {"wikipedia": {"display_name": "Wikipedia search to citations", "format": "wikipedia", "downloads": true, "bibserver_plugin": true, "BibJSON_version": "0.81", "contact": "openbiblio-dev@lists.okfn.org", "_path": "parserscrapers_plugins/wikipedia.py"}, "ris": {"display_name": "RIS", "format": "ris", "bibserver_plugin": true, "BibJSON_version": "0.81", "contact": "openbiblio-dev@lists.okfn.org", "_path": "parserscrapers_plugins/RISParser.py"}, "jsoncheck": {"display_name": "BibJSON", "format": "jsoncheck", "bibserver_plugin": true, "BibJSON_version": "0.81", "contact": "openbiblio-dev@lists.okfn.org", "_path": "parserscrapers_plugins/bibjson.py"}, "json": {"display_name": "JSON", "format": "json", "bibserver_plugin": true, "BibJSON_version": "0.81", "contact": "openbiblio-dev@lists.okfn.org", "_path": "parserscrapers_plugins/JSONParser.py"}, "bibtex": {"display_name": "BibTex", "format": "bibtex", "bibserver_plugin": true, "BibJSON_version": "0.81", "contact": "openbiblio-dev@lists.okfn.org", "_path": "parserscrapers_plugins/bibtex.py"}, "csv": {"display_name": "CSV", "format": "csv", "bibserver_plugin": true, "BibJSON_version": "0.81", "contact": "openbiblio-dev@lists.okfn.org", "_path": "parserscrapers_plugins/csvparser.py"}} -------------------------------------------------------------------------------- /bibserver/config.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | 4 | '''read the config.json file and make available as a config dict''' 5 | 6 | def load_config(path): 7 | fileobj = open(path) 8 | c = "" 9 | for line in fileobj: 10 | if line.strip().startswith("#"): 11 | continue 12 | else: 13 | c += line 14 | out = json.loads(c) 15 | 16 | # add some critical defaults if necessary 17 | if 'facet_field' not in out: 18 | out['facet_field'] = '' 19 | 20 | return out 21 | 22 | here = os.path.dirname(__file__) 23 | parent = os.path.dirname(here) 24 | config_path = os.path.join(parent, 'config.json') 25 | config = load_config(config_path) 26 | 27 | if os.path.exists(os.path.join(parent, 'local_config.json')): 28 | local_config = load_config(os.path.join(parent, 'local_config.json')) 29 | config.update(local_config) 30 | 31 | __all__ = ['config'] 32 | 33 | 34 | ''' wrap a config dict in a class if required''' 35 | 36 | class Config(object): 37 | def __init__(self,confdict=config): 38 | '''Create Configuration object from a configuration dictionary.''' 39 | self.cfg = confdict 40 | 41 | def __getattr__(self, attr): 42 | return self.cfg.get(attr, None) 43 | 44 | 45 | 46 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | envlist = py{27,34,36,37,py},pep8 3 | skip_missing_interpreters=true 4 | 5 | [testenv] 6 | # Most of these are defaults but if you specify any you can't fall back 7 | # to defaults for others. 8 | basepython = 9 | py27: python2.7 10 | py34: python3.4 11 | py35: python3.5 12 | py36: python3.6 13 | py37: python3.7 14 | py38: python3.8 15 | pypy: pypy 16 | pypy3: pypy3 17 | py2: python2.7 18 | py3: python3.6 19 | 20 | passenv = PYTHONPATH 21 | deps = -r{toxinidir}/requirements.txt 22 | -r{toxinidir}/requirements.test.txt 23 | #TODO: Un-ignore test failures below once they pass 24 | whitelist_externals = {toxinidir}/parserscrapers_plugins/bibtex.py 25 | commands = {toxinidir}/parserscrapers_plugins/bibtex.py test/data/sample.bibtex 26 | {toxinidir}/parserscrapers_plugins/bibtex.py test/data/sampleutf8.bibtex 27 | - nosetests --with-coverage {posargs} 28 | 29 | [testenv:pep8] 30 | skip_install = true 31 | basepython = python2.7 32 | deps = flake8 33 | # TODO: Unignore flake8 errors for bibserver and test 34 | commands = - flake8 bibserver {posargs} 35 | flake8 parserscrapers_plugins {posargs} 36 | - flake8 test 37 | 38 | [flake8] 39 | exclude = 40 | .tox 41 | show-source = true 42 | 43 | [testenv:py27-osx-builtin] 44 | basepython = /usr/bin/python2.7 45 | -------------------------------------------------------------------------------- /doc/index.rst: -------------------------------------------------------------------------------- 1 | ==================================== 2 | Welcome to BibServer's documentation 3 | ==================================== 4 | 5 | BibServer_ is an open-source RESTful bibliographic data server. BibServer makes 6 | it easy to create and manage collections of bibliographic records such as 7 | reading lists, publication lists and even complete library catalogs. 8 | 9 | Main features: 10 | 11 | * Create and manage bibliographic collections simply and easily 12 | * Import (and export) your collection from bibtex, MARC, RIS, BibJSON, RDF or 13 | other bibliogrpaphic formats in a matter of seconds 14 | * Browse collection via an elegant faceted interface 15 | * Embed the collection browser in other websites 16 | * Full RESTful API 17 | * Open-source and free to use 18 | * ~~Hosted service available at http://bibsoup.net/~~ 19 | 20 | .. _BibServer: http://bibserver.org/~~ 21 | 22 | 23 | Quick Links 24 | =========== 25 | 26 | * Code: http://github.com/okfn/bibserver 27 | * Mailing list: http://lists.okfn.org/mailman/listinfo/openbiblio-dev 28 | * ~~Live demo: http://dev.bibsoup.net/ (sandbox) or http://bibsoup.net/~~ 29 | 30 | 31 | Installation 32 | ============ 33 | 34 | .. toctree:: 35 | :maxdepth: 2 36 | 37 | install 38 | deploy 39 | config 40 | upload 41 | frontend 42 | api 43 | parsers 44 | auth 45 | bibjson 46 | licenses 47 | 48 | Indices and tables 49 | ================== 50 | 51 | * :ref:`genindex` 52 | * :ref:`modindex` 53 | * :ref:`search` 54 | 55 | -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/d3/LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2010, Michael Bostock 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | * Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | 10 | * Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | * The name Michael Bostock may not be used to endorse or promote products 15 | derived from this software without specific prior written permission. 16 | 17 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 18 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 19 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 20 | DISCLAIMED. IN NO EVENT SHALL MICHAEL BOSTOCK BE LIABLE FOR ANY DIRECT, 21 | INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, 22 | BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, 23 | DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY 24 | OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING 25 | NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, 26 | EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 27 | -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/css/facetview.css: -------------------------------------------------------------------------------- 1 | #facetview_freetext{ 2 | -moz-border-radius: 0px; 3 | -webkit-border-radius: 0px; 4 | border-radius: 0px; 5 | } 6 | #facetview_filters h3{ 7 | margin:40px 0 20px 0; 8 | } 9 | #facetview_advanced{ 10 | float:left; 11 | clear:left; 12 | margin:10px; 13 | padding:3px 5px 5px 5px; 14 | -moz-border-radius: 5px; 15 | -webkit-border-radius: 5px; 16 | border-radius: 5px; 17 | border:1px solid #a1a1a1; 18 | color:#a1a1a1; 19 | overflow:hidden; 20 | width:165px; 21 | } 22 | #facetview_advanced select{ 23 | width:160px; 24 | background:#fff; 25 | border:1px solid #a1a1a1; 26 | color:#a1a1a1; 27 | font-size:11px; 28 | -moz-border-radius: 2px; 29 | -webkit-border-radius: 2px; 30 | border-radius: 2px; 31 | } 32 | #facetview_visualisation{ 33 | border:1px solid #ccc; 34 | margin:10px 0 10px 0; 35 | -moz-border-radius: 5px; 36 | -webkit-border-radius: 5px; 37 | border-radius: 5px; 38 | } 39 | .node{ 40 | cursor:pointer; 41 | } 42 | .facetview_filterchoice{ 43 | text-decoration:none; 44 | color:#373737; 45 | } 46 | .facetview_filterselected{ 47 | margin:5px; 48 | } 49 | .facetview_freetext_filterdiv{ 50 | float:left; 51 | clear:both; 52 | background:#eee; 53 | padding:0; 54 | color:green; 55 | width:100%; 56 | } 57 | .facetview_advancedshow{ 58 | text-decoration:none; 59 | color:#a1a1a1; 60 | } 61 | 62 | .facetview_resultactions a{ 63 | color:#353535; 64 | font-weight:bold; 65 | text-decoration:none; 66 | margin:0 5px 0 5px; 67 | } 68 | 69 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: false 2 | 3 | language: python 4 | 5 | # Run each tox environment separately 6 | matrix: 7 | include: 8 | - os: osx 9 | language: generic 10 | env: # System-provided 11 | - TOX_ENV=py27-osx-builtin 12 | - BREW_PYTHON_PACKAGE= 13 | - os: osx 14 | language: generic 15 | env: # Homebrew-provided 16 | - TOX_ENV=py27 17 | - BREW_PYTHON_PACKAGE=python@2 18 | # - os: osx 19 | # language: generic 20 | # env: # Python 3 from Homebrew 21 | # - TOX_ENV=py37 22 | # - BREW_PYTHON_PACKAGE=python@3 23 | - os: linux 24 | python: 2.7 25 | env: TOX_ENV=py27 26 | # - os: linux 27 | # python: 3.7 28 | # env: TOX_ENV=py37 29 | # sudo: true # Required for python 3.7 30 | # dist: xenial 31 | 32 | services: 33 | - elasticsearch 34 | 35 | # Cache pip requirements for faster builds 36 | cache: pip 37 | 38 | install: 39 | # Python test requirements 40 | - | 41 | if [[ $TRAVIS_OS_NAME == 'osx' ]]; then 42 | if [[ -n "$BREW_PYTHON_PACKAGE" ]]; then 43 | brew update 44 | if ! brew list --versions "$BREW_PYTHON_PACKAGE" >/dev/null; then 45 | brew install "$BREW_PYTHON_PACKAGE" 46 | elif ! brew outdated "$BREW_PYTHON_PACKAGE"; then 47 | brew upgrade "$BREW_PYTHON_PACKAGE" 48 | fi 49 | fi 50 | sudo pip2 install tox 51 | else 52 | pip install tox 53 | fi 54 | 55 | # Wait for elasticsearch to start up 56 | - sleep 10 57 | 58 | script: 59 | - PATH="/usr/local/bin:$PATH" tox -e "$TOX_ENV" 60 | -------------------------------------------------------------------------------- /test/test_importer.py: -------------------------------------------------------------------------------- 1 | from base import * 2 | 3 | from bibserver.importer import Importer 4 | import bibserver.dao 5 | import os 6 | 7 | class TestImporter: 8 | @classmethod 9 | def setup_class(cls): 10 | pass 11 | 12 | @classmethod 13 | def teardown_class(cls): 14 | conn, db = dao.get_conn() 15 | conn.delete_index(TESTDB) 16 | 17 | def test_upload(self): 18 | owner = dao.Account(id='testaccount1') 19 | owner.save() 20 | i = Importer(owner=owner) 21 | data = open('test/data/sample.bibtex.bibjson') 22 | collection_in = { 23 | 'label': u'My Test Collection' 24 | } 25 | coll, records = i.upload(data, collection_in) 26 | assert coll.id 27 | assert owner.collections[0].id == coll.id, owner.collections 28 | 29 | assert len(records) == 1, records 30 | recid = records[0]['_id'] 31 | out = bibserver.dao.Record.get(recid) 32 | assert out["year"] == '2008', out 33 | assert out['collection'] == coll['collection'] 34 | 35 | # now try uploading exactly the same data again 36 | data = open('test/data/sample.bibtex.bibjson') 37 | newcoll, records = i.upload(data, collection_in) 38 | # still should have only one collection 39 | assert len(owner.collections) == 1 40 | assert newcoll.id == coll.id 41 | assert len(records) == 1 42 | assert records[0]['collection'] == coll['collection'] 43 | # still should have only one record in it 44 | recs_for_collection = dao.Record.query('collection:"' + coll['collection'] + '"') 45 | assert recs_for_collection.total == 1, recs_for_collection 46 | 47 | -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/linkify/1.0/jquery.linkify-1.0-min.js: -------------------------------------------------------------------------------- 1 | // encoding: utf-8 2 | // $.fn.linkify 1.0 - MIT/GPL Licensed - More info: http://github.com/maranomynet/linkify/ 3 | (function(b){var x=/(^|["'(\s]|<)(www\..+?\..+?)((?:[:?]|\.+)?(?:\s|$)|>|[)"',])/g,y=/(^|["'(\s]|<)((?:(?:https?|ftp):\/\/|mailto:).+?)((?:[:?]|\.+)?(?:\s|$)|>|[)"',])/g,z=function(h){return h.replace(x,'$1$2$3').replace(y,'$1$2$3').replace(/"<``>/g,'"http')},s=b.fn.linkify=function(c){if(!b.isPlainObject(c)){c={use:(typeof c=='string')?c:undefined,handleLinks:b.isFunction(c)?c:arguments[1]}}var d=c.use,k=s.plugins||{},l=[z],f,m=[],n=c.handleLinks;if(d==undefined||d=='*'){for(var i in k){l.push(k[i])}}else{d=b.isArray(d)?d:b.trim(d).split(/ *, */);var o,i;for(var p=0,A=d.length;p1&&/\S/.test(a)){var q,r;f=f||b('
')[0];f.innerHTML='';f.appendChild(e.cloneNode(false));var u=f.childNodes;for(var v=0,g;(g=l[v]);v++){var w=u.length,j;while(w--){j=u[w];if(j.nodeType==3){a=j.nodeValue;if(a.length>1&&/\S/.test(a)){r=a;a=a.replace(/&/g,'&').replace(//g,'>');a=b.isFunction(g)?g(a):a.replace(g.re,g.tmpl);q=q||r!=a;r!=a&&b(j).after(a).remove()}}}}a=f.innerHTML;if(n){a=b('
').html(a);m=m.concat(a.find('a').toArray().reverse());a=a.contents()}q&&b(e).after(a).remove()}}else if(e.nodeType==1&&!/^(a|button|textarea)$/i.test(e.tagName)){arguments.callee.call(e)}}});n&&n(b(m.reverse()));return this};s.plugins={mailto:{re:/(^|["'(\s]|<)([^"'(\s&]+?@.+\.[a-z]{2,7})(([:?]|\.+)?(\s|$)|>|[)"',])/gi,tmpl:'$1$2$3'}}})(jQuery); 4 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | .. image:: https://travis-ci.org/okfn/bibserver.svg?branch=master 2 | :target: https://travis-ci.org/okfn/bibserver 3 | 4 | BibServer is an open-source RESTful bibliographic data server. BibServer makes 5 | it easy to create and manage collections of bibliographic records such as 6 | reading lists, publication lists and even complete library catalogs. 7 | 8 | Main features: 9 | 10 | * Create and manage bibliographic collections simply and easily 11 | * Import (and export) your collection from bibtex, MARC, RIS, BibJSON, RDF or 12 | other bibliographic formats in a matter of seconds 13 | * Browse collection via an elegant faceted interface 14 | * Embed the collection browser in other websites 15 | * Full RESTful API 16 | * Open-source and free to use 17 | 18 | 19 | Quick Links 20 | =========== 21 | 22 | * Code: http://github.com/okfn/bibserver 23 | * Documentation: https://bibserver.readthedocs.io/ 24 | * Mailing list: http://lists.okfn.org/mailman/listinfo/openbiblio-dev 25 | 26 | 27 | Installation 28 | ============ 29 | 30 | See doc/install.rst or 31 | https://bibserver.readthedocs.io/en/latest/install.html 32 | 33 | 34 | Command Line Usage 35 | ================== 36 | 37 | Command link script in `cli.py`. To see commands do:: 38 | 39 | ./cli.py -h 40 | 41 | 42 | Developers 43 | ========== 44 | 45 | To run the tests: 46 | 47 | 1. Install nose (python-nose) 48 | 2. Run the following command:: 49 | 50 | nosetests -v test/ 51 | 52 | 53 | Copyright and License 54 | ===================== 55 | 56 | Copyright 2011-2012 Open Knowledge Foundation. 57 | 58 | Licensed under the MIT license 59 | 60 | 61 | 62 | Vendor packages 63 | =============== 64 | 65 | This BibServer repository also includes the following vendor packages, all of 66 | which are JavaScript plugins available under open source license: 67 | 68 | * http://jquery.com 69 | * http://jqueryui.com 70 | * http://twitter.github.com/bootstrap 71 | * http://github.com/okfn/facetview 72 | * http://d3js.org 73 | * http://code.google.com/p/jquery-linkify/ 74 | 75 | -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/simple.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | FacetView 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 45 | 46 | 53 | 54 | 55 | 56 | 57 |
58 | 59 | 60 | 61 | -------------------------------------------------------------------------------- /parserscrapers_plugins/csvparser.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import csv 3 | import sys 4 | import json 5 | import chardet 6 | import cStringIO 7 | 8 | 9 | class CSVParser(object): 10 | 11 | def __init__(self, fileobj): 12 | 13 | data = fileobj.read() 14 | self.encoding = chardet.detect(data).get('encoding', 'ascii') 15 | 16 | # Some files have Byte-order marks inserted at the start 17 | if data[:3] == '\xef\xbb\xbf': 18 | data = data[3:] 19 | self.fileobj = cStringIO.StringIO(data) 20 | 21 | def parse(self): 22 | # dialect = csv.Sniffer().sniff(fileobj.read(1024)) 23 | d = csv.DictReader(self.fileobj) 24 | data = [] 25 | 26 | # do any required conversions 27 | for row in d: 28 | for k, v in row.items(): 29 | del row[k] 30 | row[k.lower()] = v 31 | if "author" in row: 32 | row["author"] = [{"name": i} for i in row["author"].split(",")] 33 | if "editor" in row: 34 | row["editor"] = [{"name": i} for i in row["editor"].split(",")] 35 | if "journal" in row: 36 | row["journal"] = {"name": row["journal"]} 37 | data.append(row) 38 | return data, {} 39 | 40 | 41 | def parse(): 42 | parser = CSVParser(sys.stdin) 43 | records, metadata = parser.parse() 44 | if len(records) > 0: 45 | sys.stdout.write(json.dumps({'records': records, 'metadata': metadata})) # noqa E501 46 | else: 47 | sys.stderr.write('Zero records were parsed from the data') 48 | 49 | 50 | def main(): 51 | conf = {"display_name": "CSV", 52 | "format": "csv", 53 | "contact": "openbiblio-dev@lists.okfn.org", 54 | "bibserver_plugin": True, 55 | "BibJSON_version": "0.81"} 56 | for x in sys.argv[1:]: 57 | if x == '-bibserver': 58 | sys.stdout.write(json.dumps(conf)) 59 | sys.exit() 60 | parse() 61 | 62 | 63 | if __name__ == '__main__': 64 | main() 65 | -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/local.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | FacetView 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 45 | 46 | 53 | 54 | 55 | 56 | 57 |
58 | 59 | 60 | 61 | -------------------------------------------------------------------------------- /doc/install.rst: -------------------------------------------------------------------------------- 1 | ============ 2 | Installation 3 | ============ 4 | 5 | Simple Setup 6 | ============ 7 | 8 | 1. Install pre-requisites: 9 | 10 | * Python (>= 2.7), pip and virtualenv. 11 | * git 12 | * ElasticSearch_ (> 0.17 series) 13 | 14 | 2. [optional] Create a virtualenv and enable it:: 15 | 16 | # in bash 17 | virtualenv {myenv} 18 | . {myenv}/bin/activate 19 | 20 | 3. Get the source:: 21 | 22 | # by convention we put it in the virtualenv but you can put anywhere 23 | # mkdir {myenv}/src 24 | # git clone https://github.com/okfn/bibserver {myenv}/src/ 25 | git clone https://github.com/okfn/bibserver 26 | 27 | 3. Install the app:: 28 | 29 | # move to your checkout of bibserver 30 | # cd {myenv}/src/bibserver 31 | cd bibserver 32 | # do a development install from current directory 33 | pip install -e . 34 | # alternatively if you do not want a development install 35 | # note there is an error with this at the moment - do dev install 36 | # python setup.py install 37 | 38 | 4. Run the webserver:: 39 | 40 | python bibserver/web.py 41 | 42 | .. _ElasticSearch: http://www.elasticsearch.org/ 43 | 44 | 45 | See doc/deploy.rst or https://bibserver.readthedocs.io/en/latest/deploy.html 46 | for more details on a full installation 47 | 48 | 49 | Install example 50 | =============== 51 | 52 | Install commands on a clean installation of Ubuntu_11.10_:: 53 | 54 | sudo apt-get install python-pip python-dev build-essential 55 | sudo pip install --upgrade pip 56 | sudo pip install --upgrade virtualenv 57 | sudo apt-get install git 58 | 59 | wget https://github.com/downloads/elasticsearch/elasticsearch/elasticsearch-0.18.2.tar.gz 60 | tar -xzvf elasticsearch-0.18.2.tar.gz 61 | ./elasticsearch-0.18.2/bin/elasticsearch start 62 | 63 | virtualenv . 64 | . ./bin/activate 65 | 66 | git clone https://github.com/okfn/bibserver 67 | cd bibserver 68 | pip install -e . 69 | 70 | python bibserver/web.py 71 | 72 | You will now find your bibserver running at localhost:5000. 73 | 74 | .. _Ubuntu_11.10: http:ubuntu.com 75 | 76 | 77 | -------------------------------------------------------------------------------- /bibserver/templates/account/register.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 | {% from "_formhelpers.html" import render_field %} 5 | 6 | 28 | 29 |
30 |
31 |
32 |
33 | {{ render_field(form.w) }} 34 | {{ render_field(form.n) }} 35 | {{ render_field(form.s, placeholder="********") }} 36 | {{ render_field(form.c, placeholder="********") }} 37 | {{ render_field(form.d) }} 38 |
39 | 40 |





41 |
42 |
43 |
44 |
45 |

It is necessary to register to use this service. This is only so that 46 | your collections can be allocated to you. There is no cost. You will only be emailed about service issues.

47 | 48 |

After registering and signing in, you can view your user information 49 | by clicking your username on the top right menu. This includes your api_key, 50 | which you will need if you want to send data via the API.

51 | 52 |

Don't get your username wrong! We can't change it! (If you do, just 53 | create a new account for yourself.)

54 |
55 |
56 |
57 | 58 | {% endblock %} 59 | 60 | -------------------------------------------------------------------------------- /bibserver/util.py: -------------------------------------------------------------------------------- 1 | from urllib import urlopen, urlencode 2 | import md5 3 | import re 4 | from unicodedata import normalize 5 | from functools import wraps 6 | from flask import request, current_app 7 | 8 | 9 | def jsonp(f): 10 | """Wraps JSONified output for JSONP""" 11 | @wraps(f) 12 | def decorated_function(*args, **kwargs): 13 | callback = request.args.get('callback', False) 14 | if callback: 15 | content = str(callback) + '(' + str(f(*args,**kwargs).data) + ')' 16 | return current_app.response_class(content, mimetype='application/javascript') 17 | else: 18 | return f(*args, **kwargs) 19 | return decorated_function 20 | 21 | 22 | # derived from http://flask.pocoo.org/snippets/45/ (pd) and customised 23 | def request_wants_json(): 24 | best = request.accept_mimetypes.best_match(['application/json', 'text/html']) 25 | if best == 'application/json' and request.accept_mimetypes[best] > request.accept_mimetypes['text/html']: 26 | best = True 27 | else: 28 | best = False 29 | if request.values.get('format','').lower() == 'json' or request.path.endswith(".json"): 30 | best = True 31 | return best 32 | 33 | 34 | # derived from http://flask.pocoo.org/snippets/5/ (public domain) 35 | # changed delimiter to _ instead of - due to ES search problem on the - 36 | _punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+') 37 | def slugify(text, delim=u'_'): 38 | """Generates an slightly worse ASCII-only slug.""" 39 | result = [] 40 | for word in _punct_re.split(text.lower()): 41 | word = normalize('NFKD', word).encode('ascii', 'ignore') 42 | if word: 43 | result.append(word) 44 | return unicode(delim.join(result)) 45 | 46 | 47 | # get gravatar for email address 48 | def get_gravatar(email, size=None, default=None, border=None): 49 | email = email.lower().strip() 50 | hash = md5.md5(email).hexdigest() 51 | args = {'gravatar_id':hash} 52 | if size and 1 <= int(size) <= 512: 53 | args['size'] = size 54 | if default: args['default'] = default 55 | if border: args['border'] = border 56 | 57 | url = 'http://www.gravatar.com/avatar.php?' + urlencode(args) 58 | 59 | response = urlopen(url) 60 | image = response.read() 61 | response.close() 62 | 63 | return image 64 | 65 | -------------------------------------------------------------------------------- /doc/upload.rst: -------------------------------------------------------------------------------- 1 | .. _upload: 2 | 3 | ===================== 4 | Uploading collections 5 | ===================== 6 | 7 | When a bibserver instance is configured to allow uploads, it is possible to 8 | upload from a source URL or file from PC into the instance via the /upload page. 9 | 10 | A typical bibserver will support upload from the parsers it has available to it 11 | - read more about the parsers and running them independently, or writing new ones, 12 | on the parsers documentation page - :ref:`parsers` 13 | 14 | 15 | The upload page 16 | =============== 17 | 18 | To upload, just go to the upload page. Provide a URL or file, a collection name 19 | and description, confirm the license and the format type. 20 | 21 | The Upload form can either be given a URL from which the Bibserver will retrieve the data to import, or a user can upload a file from her local machine to be imported. Bibserver tries to guess the format of the supplied URL by looking at the filename extension of the supplied URL. (this is unreliable and might be removed in future). 22 | If the fileformat can not be guessed, a list of supported fileformats for that install of Bibserver is shown. 23 | 24 | If a converter of the right format is not available, your source file can be 25 | converted elsewhere into JSON following the bibJSON standard, then the JSON 26 | file can be imported directly. 27 | 28 | 29 | Upload from other online services 30 | ================================= 31 | 32 | Examples of providing URLs for uploading directly from other online sources 33 | such as bibsonomy. 34 | 35 | 36 | Monitoring tickets 37 | ================== 38 | 39 | Explain the tickets page, and the info that can be got there. 40 | 41 | 42 | Viewing an uploaded collection 43 | ============================== 44 | 45 | On upload, a tidy version of collection name is made for URL. 46 | 47 | Once a collection has uploaded, it can be found at /username/collection. 48 | 49 | 50 | 51 | Multiple files to same collection 52 | ================================= 53 | 54 | Confirm what happens when uploading mutliple files of different content to the 55 | same collection name 56 | 57 | 58 | Overwriting records and internal IDs 59 | ==================================== 60 | 61 | Mention the method by which internal IDs are allocated, and how records can be 62 | overwritten if an upload is performed of records that are somewhat identical to 63 | current records. Point out what this means for local edits. 64 | 65 | 66 | -------------------------------------------------------------------------------- /cli.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import os 3 | import sys 4 | import optparse 5 | import inspect 6 | 7 | # does setup of cfg 8 | from bibserver import dao 9 | 10 | 11 | def rebuild_db(): 12 | '''Rebuild the db''' 13 | conn, db = dao.get_conn() 14 | conn.delete_index(db) 15 | conn.create_index(db) 16 | 17 | def fixtures(): 18 | import test.base 19 | for dict_ in test.base.fixtures['records']: 20 | dao.Record.upsert(dict_) 21 | 22 | def convert(inpath): 23 | '''Convert from bibtex to bibjson. One argument expected: path to bibtext 24 | file. 25 | ''' 26 | import bibserver.parsers.BibTexParser 27 | import json 28 | parser = parsers.BibTexParser.BibTexParser() 29 | bibtex = open(inpath).read() 30 | print json.dumps(parser.parse(bibtex), indent=2, sort_keys=True) 31 | 32 | def bulk_upload(colls_list): 33 | '''Take a collections list in a JSON file and use the bulk_upload importer. 34 | colls_list described in importer.py 35 | ''' 36 | import bibserver.importer 37 | return bibserver.importer.bulk_upload(colls_list) 38 | 39 | 40 | ## ================================================== 41 | ## Misc stuff for setting up a command line interface 42 | 43 | def _module_functions(functions): 44 | local_functions = dict(functions) 45 | for k,v in local_functions.items(): 46 | if not inspect.isfunction(v) or k.startswith('_'): 47 | del local_functions[k] 48 | return local_functions 49 | 50 | def _main(functions_or_object): 51 | isobject = inspect.isclass(functions_or_object) 52 | if isobject: 53 | _methods = _object_methods(functions_or_object) 54 | else: 55 | _methods = _module_functions(functions_or_object) 56 | 57 | usage = '''%prog {action} 58 | 59 | Actions: 60 | ''' 61 | usage += '\n '.join( 62 | [ '%s: %s' % (name, m.__doc__.split('\n')[0] if m.__doc__ else '') for (name,m) 63 | in sorted(_methods.items()) ]) 64 | parser = optparse.OptionParser(usage) 65 | # Optional: for a config file 66 | # parser.add_option('-c', '--config', dest='config', 67 | # help='Config file to use.') 68 | options, args = parser.parse_args() 69 | 70 | if not args or not args[0] in _methods: 71 | parser.print_help() 72 | sys.exit(1) 73 | 74 | method = args[0] 75 | if isobject: 76 | getattr(functions_or_object(), method)(*args[1:]) 77 | else: 78 | _methods[method](*args[1:]) 79 | 80 | __all__ = [ '_main' ] 81 | 82 | if __name__ == '__main__': 83 | _main(locals()) 84 | 85 | 86 | -------------------------------------------------------------------------------- /bibserver/templates/tickets/view.html: -------------------------------------------------------------------------------- 1 | {% extends "/base.html" %} 2 | 3 | {% block content %} 4 |

Upload Tickets

5 | 6 | {% if ingest_tickets|length >= 1 %} 7 |
8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | {% for t in ingest_tickets %} 19 | 20 | 21 | 27 | 28 | 29 | 30 | 31 | 56 | 57 | 58 | 59 | 60 | 61 | {% endfor %} 62 |
StateOwnerCollectionTimestampFormatSource  Description
{{t.state}} 22 | {% if (t.id == ticket.id) and (t.state != 'new') and ((t.owner == current_user._id) or current_user.is_super) %} 23 |
Reset 24 |  Delete 25 | {% endif%} 26 |
{{t.owner}}{{t.collection}}{{t._last_modified}}{{t.format}}{% if t.source_url and t.source_url.startswith('http') %}Source URL 32 | {% else %} 33 | '{{t.source_url}}' 34 | {% endif %} 35 | {% if t.id == ticket.id %}
36 | {% if ticket.state == 'failed' %} 37 |
38 |

Has an error condition

39 | 40 | {% for e in ticket.exception %} 41 |
    {{e.0}}
    {{e.1}}
42 | {% endfor %} 43 | 44 |
45 | {% endif %} 46 | {% if ticket.failed_index %} 47 |
48 |

Failed imports

49 | {% for r in ticket.failed_index %} 50 |

{{r}}

51 | {% endfor %} 52 |
53 | {% endif %} 54 | {% endif %} 55 |
{% if ticket.data_md5 %}Raw data{% endif %}{% if ticket.data_json %}BibJSON{% endif %}{{t.description}}
63 |
64 | {% endif %} 65 | 66 | {% endblock %} 67 | -------------------------------------------------------------------------------- /parserscrapers_plugins/JSONParser.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import chardet 4 | import cStringIO 5 | import json 6 | import sys 7 | 8 | 9 | class JSONParser(object): 10 | 11 | def __init__(self, fileobj): 12 | 13 | data = fileobj.read() 14 | self.encoding = chardet.detect(data).get('encoding', 'ascii') 15 | 16 | # Some files have Byte-order marks inserted at the start 17 | if data[:3] == '\xef\xbb\xbf': 18 | data = data[3:] 19 | self.fileobj = cStringIO.StringIO(data) 20 | 21 | def parse(self): 22 | incoming = json.load(self.fileobj) 23 | 24 | if 'records' in incoming: 25 | # if the incoming is bibjson, get records and metadata 26 | data = self.customisations(incoming['records']) 27 | metadata = incoming.get('metadata', {}) 28 | else: 29 | data = incoming 30 | metadata = {} 31 | 32 | return data, metadata 33 | 34 | def customisations(self, records): 35 | for record in records: 36 | # tidy any errant authors as strings 37 | if 'author' in record: 38 | if ' and ' in record['author']: 39 | record['author'] = record['author'].split(' and ') 40 | # do any conversions to objects 41 | for index, item in enumerate(record.get('author', [])): 42 | if not isinstance(item, dict): 43 | record['author'][index] = {"name": item} 44 | # copy an citekey to cid 45 | if 'citekey' in record: 46 | record['id'] = record['citekey'] 47 | if 'cid' in record: 48 | record['id'] = record['cid'] 49 | # copy keys to singular 50 | if 'links' in record: 51 | record['link'] = record['links'] 52 | del record['links'] 53 | return records 54 | 55 | 56 | def parse(): 57 | parser = JSONParser(sys.stdin) 58 | records, metadata = parser.parse() 59 | if len(records) > 0: 60 | sys.stdout.write( 61 | json.dumps({'records': records, 'metadata': metadata}) 62 | ) 63 | else: 64 | sys.stderr.write('Zero records were parsed from the data') 65 | 66 | 67 | def main(): 68 | conf = {"display_name": "JSON", 69 | "format": "json", 70 | "contact": "openbiblio-dev@lists.okfn.org", 71 | "bibserver_plugin": True, 72 | "BibJSON_version": "0.81"} 73 | for x in sys.argv[1:]: 74 | if x == '-bibserver': 75 | sys.stdout.write(json.dumps(conf)) 76 | sys.exit() 77 | parse() 78 | 79 | 80 | if __name__ == '__main__': 81 | main() 82 | -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/d3/d3.csv.js: -------------------------------------------------------------------------------- 1 | (function(){d3.csv = function(url, callback) { 2 | d3.text(url, "text/csv", function(text) { 3 | callback(text && d3.csv.parse(text)); 4 | }); 5 | }; 6 | d3.csv.parse = function(text) { 7 | var header; 8 | return d3.csv.parseRows(text, function(row, i) { 9 | if (i) { 10 | var o = {}, j = -1, m = header.length; 11 | while (++j < m) o[header[j]] = row[j]; 12 | return o; 13 | } else { 14 | header = row; 15 | return null; 16 | } 17 | }); 18 | }; 19 | 20 | d3.csv.parseRows = function(text, f) { 21 | var EOL = {}, // sentinel value for end-of-line 22 | EOF = {}, // sentinel value for end-of-file 23 | rows = [], // output rows 24 | re = /\r\n|[,\r\n]/g, // field separator regex 25 | n = 0, // the current line number 26 | t, // the current token 27 | eol; // is the current token followed by EOL? 28 | 29 | re.lastIndex = 0; // work-around bug in FF 3.6 30 | 31 | /** @private Returns the next token. */ 32 | function token() { 33 | if (re.lastIndex >= text.length) return EOF; // special case: end of file 34 | if (eol) { eol = false; return EOL; } // special case: end of line 35 | 36 | // special case: quotes 37 | var j = re.lastIndex; 38 | if (text.charCodeAt(j) === 34) { 39 | var i = j; 40 | while (i++ < text.length) { 41 | if (text.charCodeAt(i) === 34) { 42 | if (text.charCodeAt(i + 1) !== 34) break; 43 | i++; 44 | } 45 | } 46 | re.lastIndex = i + 2; 47 | var c = text.charCodeAt(i + 1); 48 | if (c === 13) { 49 | eol = true; 50 | if (text.charCodeAt(i + 2) === 10) re.lastIndex++; 51 | } else if (c === 10) { 52 | eol = true; 53 | } 54 | return text.substring(j + 1, i).replace(/""/g, "\""); 55 | } 56 | 57 | // common case 58 | var m = re.exec(text); 59 | if (m) { 60 | eol = m[0].charCodeAt(0) !== 44; 61 | return text.substring(j, m.index); 62 | } 63 | re.lastIndex = text.length; 64 | return text.substring(j); 65 | } 66 | 67 | while ((t = token()) !== EOF) { 68 | var a = []; 69 | while ((t !== EOL) && (t !== EOF)) { 70 | a.push(t); 71 | t = token(); 72 | } 73 | if (f && !(a = f(a, n++))) continue; 74 | rows.push(a); 75 | } 76 | 77 | return rows; 78 | }; 79 | d3.csv.format = function(rows) { 80 | return rows.map(d3_csv_formatRow).join("\n"); 81 | }; 82 | 83 | function d3_csv_formatRow(row) { 84 | return row.map(d3_csv_formatValue).join(","); 85 | } 86 | 87 | function d3_csv_formatValue(text) { 88 | return /[",\n]/.test(text) 89 | ? "\"" + text.replace(/\"/g, "\"\"") + "\"" 90 | : text; 91 | } 92 | })(); 93 | -------------------------------------------------------------------------------- /bibserver/templates/create.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 |
5 |
6 |
7 | 8 |

Create a new collection

9 | 10 |

Create a new collection then search for records to add to it. Alternatively, upload a collection from your own source.

11 | 12 |
name your collection: 13 | 14 |

provide a description: 15 | 16 |

collection license: 18 | 35 | 36 |

37 | 38 |
39 |
40 | 41 | 59 | 60 |
61 | {% endblock %} 62 | 63 | -------------------------------------------------------------------------------- /bibserver/view/account.py: -------------------------------------------------------------------------------- 1 | import uuid 2 | 3 | from flask import Blueprint, request, url_for, flash, redirect 4 | from flask import render_template 5 | from flask_login import login_user, logout_user 6 | from flask_wtf import Form 7 | from wtforms import Form, TextField, TextAreaField, PasswordField, validators, ValidationError 8 | 9 | from bibserver.config import config 10 | import bibserver.dao as dao 11 | 12 | blueprint = Blueprint('account', __name__) 13 | 14 | 15 | @blueprint.route('/') 16 | def index(): 17 | return 'Accounts' 18 | 19 | 20 | class LoginForm(Form): 21 | username = TextField('Username', [validators.Required()]) 22 | password = PasswordField('Password', [validators.Required()]) 23 | 24 | @blueprint.route('/login', methods=['GET', 'POST']) 25 | def login(): 26 | form = LoginForm(request.form, csrf_enabled=False) 27 | if request.method == 'POST' and form.validate(): 28 | password = form.password.data 29 | username = form.username.data 30 | user = dao.Account.get(username) 31 | if user and user.check_password(password): 32 | login_user(user, remember=True) 33 | flash('Welcome back', 'success') 34 | return redirect('/'+user.id) 35 | else: 36 | flash('Incorrect username/password', 'error') 37 | if request.method == 'POST' and not form.validate(): 38 | flash('Invalid form', 'error') 39 | return render_template('account/login.html', form=form, upload=config['allow_upload']) 40 | 41 | 42 | @blueprint.route('/logout') 43 | def logout(): 44 | logout_user() 45 | flash('You are now logged out', 'success') 46 | return redirect(url_for('home')) 47 | 48 | 49 | def existscheck(form, field): 50 | test = dao.Account.get(form.w.data) 51 | if test: 52 | raise ValidationError('Taken! Please try another.') 53 | 54 | class RegisterForm(Form): 55 | w = TextField('Username', [validators.Length(min=3, max=25),existscheck]) 56 | n = TextField('Email Address', [validators.Length(min=3, max=35), validators.Email(message='Must be a valid email address')]) 57 | s = PasswordField('Password', [ 58 | validators.Required(), 59 | validators.EqualTo('c', message='Passwords must match') 60 | ]) 61 | c = PasswordField('Repeat Password') 62 | d = TextAreaField('Describe yourself') 63 | 64 | @blueprint.route('/register', methods=['GET', 'POST']) 65 | def register(): 66 | # TODO: re-enable csrf 67 | form = RegisterForm(request.form, csrf_enabled=False) 68 | if request.method == 'POST' and form.validate(): 69 | api_key = str(uuid.uuid4()) 70 | account = dao.Account( 71 | _id=form.w.data, 72 | email=form.n.data, 73 | description = form.d.data, 74 | api_key=api_key 75 | ) 76 | account.set_password(form.s.data) 77 | account.save() 78 | login_user(account, remember=True) 79 | flash('Thanks for signing-up', 'success') 80 | return redirect('/'+account.id) 81 | if request.method == 'POST' and not form.validate(): 82 | flash('Please correct the errors', 'error') 83 | return render_template('account/register.html', form=form) 84 | 85 | -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/jtedit.css: -------------------------------------------------------------------------------- 1 | #jtedit{ 2 | font-size:16px; 3 | border:2px solid #ccc; 4 | float:left; 5 | clear:both; 6 | margin:20px; 7 | -moz-border-radius: 10px; 8 | -webkit-border-radius: 10px; 9 | border-radius: 10px; 10 | } 11 | #jtedit_json{ 12 | margin:10px; 13 | -moz-border-radius: 10px; 14 | -webkit-border-radius: 10px; 15 | border-radius: 10px; 16 | min-width:400px; 17 | min-height:400px; 18 | } 19 | .jtedit_actions{ 20 | width:100%; 21 | float:left; 22 | clear:both; 23 | background:#eee; 24 | padding:4px 0 4px 0; 25 | -moz-border-radius: 0 0 10px 10px; 26 | -webkit-border-radius: 0 0 10px 10px; 27 | border-radius: 0 0 10px 10px; 28 | } 29 | .jtedit_actions:first-child{ 30 | margin:0 0 5px 0; 31 | -moz-border-radius: 10px 10px 0 0; 32 | -webkit-border-radius: 10px 10px 0 0; 33 | border-radius: 10px 10px 0 0; 34 | } 35 | .jtedit_opts{ 36 | display:block; 37 | clear:both; 38 | -moz-border-radius: 0 0 10px 0; 39 | -webkit-border-radius: 0 0 10px 0; 40 | border-radius: 0 0 10px 0; 41 | border:none; 42 | border-top:1px solid #ccc; 43 | } 44 | 45 | div.jtedit_actions > div.btn-group{ 46 | margin:0 5px 0 5px; 47 | float:left; 48 | display:inline; 49 | } 50 | .jtedit_optionsgroup{ 51 | margin:7px 0 0 -10px; 52 | padding:0; 53 | float:left; 54 | display:inline; 55 | border:none; 56 | } 57 | .jtedit_optionswarn{ 58 | border-color: rgba(255, 0, 0, 0.8); 59 | -webkit-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 8px rgba(255, 0, 0, 0.6); 60 | -moz-box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 8px rgba(255, 0, 0, 0.6); 61 | box-shadow: inset 0 1px 1px rgba(0, 0, 0, 0.075), 0 0 8px rgba(255, 0, 0, 0.6); 62 | outline: 0; 63 | outline: thin dotted \9; 64 | /* IE6-9 */ 65 | } 66 | .jtedit_key{ 67 | font-weight:bold; 68 | color:#000; 69 | -moz-border-radius: 10px 0 0 10px; 70 | -webkit-border-radius: 10px 0 0 10px; 71 | border-radius: 10px 0 0 10px; 72 | float:left; 73 | border:none; 74 | padding:5px 30px 5px 10px; 75 | width:150px; 76 | height:20px; 77 | margin:0 -20px 0 -5px; 78 | font-size:16px; 79 | background:none; 80 | } 81 | .jtedit_value{ 82 | -moz-border-radius: 0 10px 10px 0; 83 | -webkit-border-radius: 0 10px 10px 0; 84 | border-radius: 0 10px 10px 0; 85 | border:none; 86 | padding:5px 10px 5px 10px; 87 | margin:0; 88 | font-size:16px; 89 | background:none; 90 | } 91 | .jtedit_hidden{ 92 | display:none; 93 | } 94 | 95 | .jtedit_kvcontainer{ 96 | float:left; 97 | clear:both; 98 | margin:5px; 99 | background:#eee; 100 | border:1px solid #ccc; 101 | -moz-border-radius: 10px; 102 | -webkit-border-radius: 10px; 103 | border-radius: 10px; 104 | padding-left:5px; 105 | } 106 | 107 | .jtedit_vals{ 108 | -moz-border-radius: 0 10px 10px 0; 109 | -webkit-border-radius: 0 10px 10px 0; 110 | border-radius: 0 10px 10px 0; 111 | border-left:2px solid #ccc; 112 | float:left; 113 | height:100%; 114 | background:#fff; 115 | } 116 | -------------------------------------------------------------------------------- /bibserver/templates/account/view.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 | 5 | 10 | 11 | {% if admin %} 12 | 13 | 30 | 31 |
32 |
33 | {% if current_user.id != account.id %} 34 |

Hi {{ current_user.id }}, you are the superuser
35 | Viewing account: {{ account.id }}
36 | You have full edit permissions

37 | {% else %} 38 |

Hi {{ current_user.id }}

39 |

{{ current_user.description }}

40 | {% endif %} 41 |


Your api_key is:

42 |

43 |

You need to append this to your API calls if you want to make changes. 44 | Check out the docs to learn more.

45 | {% if superuser %} 46 |

You are the superuser! You can view and edit anything! 47 |
Be careful...

48 | {% endif %} 49 |
50 |
51 |

Edit your details

52 |

(Deleting your details irrevocably deletes your account.)

53 |
54 |
55 | 56 |
57 |
58 |

Your collections

59 |

You have {{ recordcount }} records across {{ collcount }} collections. Choose one to view - admin options are available in each collection.

60 | {% if account['collections']|length == 0 %} 61 |

You have no collections yet. Create one!

62 | {% else %} 63 | {% for coll in account['collections'] %} 64 |

{{coll['label']}} - 65 | {{coll['description']}} ({{coll|length}} records)

66 | {% endfor %} 67 | {% endif %} 68 |
69 |
70 |

All your records

71 |
72 |
73 | 74 | {% else %} 75 | 76 |
77 |
78 |

{{ account.id }}

79 |

{{ account.description }}

80 |
81 |
82 |

You are not logged in as this user. Use the login page if you want to change this

83 |
84 |
85 | 86 |
87 |
88 |
89 |
90 | 91 | {% endif %} 92 | 93 | {% endblock %} 94 | 95 | -------------------------------------------------------------------------------- /doc/parsers.rst: -------------------------------------------------------------------------------- 1 | .. _parsers: 2 | 3 | =============== 4 | Parsing sources 5 | =============== 6 | 7 | The most common way of importing bibliographic records into Bibserver is using the Upload form in the web interface. 8 | See :ref:`upload`. When you run the Bibserver application from the command line, an ingest system is started in a separate process, which handles the processing of uploads asynchronously. This is done to keep the web interface responsive as some data uploads may involved downloading and processing very large data files. 9 | 10 | To run the ingest process manually separate from the Bibserver application, start it up with a -d flag. 11 | For example: 12 | 13 | python bibserver/ingest.py -d 14 | 15 | Note that it is normally not required to run the ingest manually, the startup of ingest should be done by the main bibserver command line web script. See: :ref:`deploy` 16 | 17 | The parsers 18 | =========== 19 | 20 | For each kind of file that can be imported into a Bibserver, a 'parser' exists. 21 | A parser is an executable file that accepts a file format on standard input and always outputs Bibjson. 22 | The parsers are stored in a directory called 'parserscrapers_plugins' by default. 23 | 24 | When the importing subsystem of Bibserver (named 'ingest') is initialised, all the executable files 25 | found in the parserscrapers_plugins directory are executed with a -bibserver command line parameter. 26 | A parser **must** recognise this parameter and output a JSON config section in response, indicating if this is a valid Bibserver parser and the format that is supported. 27 | All the parsers found are stored in a json data snippet named 'plugins.json' which can be used to determine what the current list of supported data formats for a given instance are. (this is used for example in the Upload forms) 28 | 29 | The download cache 30 | ================== 31 | 32 | When a data import is submitted to Bibserver, a 'ticket' is made which tracks the progress of the upload. 33 | The ticket lists the format of the imported data, who requested it, the time it was made and the progress of the import. When an import is completed, it is also possible to see the raw data, plus the resulting Bibjson data. 34 | 35 | The ingest tickets, downloaded data plus resulting Bibjson are all stored in a directory named 'download_cache' by default. (this location can be changed in the config.json file) 36 | The list of tickets in a system can be viewed on the /ticket/ URL. Each ticket has an ID, and one could then view either the source data /ticket//data or the resulting Bibjson /ticket//bibjson 37 | 38 | All the data in a Bibserver instance can be listed by looking at a URL: /data.txt. This produces a text file with the URL for each Bibjson data file per line. This can be used for automated buld data downloads of Bibserver data. 39 | 40 | Making a new parser 41 | =================== 42 | 43 | Even though Bibserver is written in Python, it is not necessary to write a parser in Python - it can be written in any programming language. At the time of writing there is one example parser written in Perl to support the MARC format, which is commonly found in library automation systems. 44 | 45 | To make a new parser: 46 | 47 | - you should be able to make standalone executable in the parserscrapers_plugins that can be called from a shell 48 | 49 | - the parser must support the -bibserver command line paramater which gives details about the data format supported 50 | 51 | - read data from standard input, parse and convert the data to Bibjson, and print the resulting Bibjson to standard output. 52 | 53 | TODO: how to submit a pull request or email to include it in the repo. 54 | -------------------------------------------------------------------------------- /bibserver/importer.py: -------------------------------------------------------------------------------- 1 | # the data import manager 2 | # gets an uploaded file or retrieves a file from a URL 3 | # indexes the records found in the file by upserting via the DAO 4 | import urllib2 5 | import re 6 | from cStringIO import StringIO 7 | import unicodedata 8 | import uuid 9 | import json 10 | 11 | import bibserver.dao 12 | import bibserver.util as util 13 | from bibserver.config import config 14 | 15 | class Importer(object): 16 | def __init__(self, owner, requesturl=False): 17 | self.owner = owner 18 | self.requesturl = requesturl 19 | 20 | def upload(self, fileobj, collection=None): 21 | '''Import a bibjson collection into the database. 22 | 23 | :param fileobj: a fileobj pointing to file from which to import 24 | collection records (and possibly collection metadata) 25 | :param collection: collection dict for use when creating collection. If 26 | undefined collection must be extractable from the fileobj. 27 | 28 | :return: same as `index` method. 29 | ''' 30 | jsonin = json.load(fileobj) 31 | metadata = jsonin.get('metadata',False) 32 | record_dicts = jsonin.get('records', jsonin) 33 | 34 | # if metadata provided from file, roll it into the collection object 35 | if metadata: 36 | metadata.update(collection) 37 | collection = metadata 38 | 39 | return self.index(collection, record_dicts) 40 | 41 | def index(self, collection_dict, record_dicts): 42 | '''Add this collection and its records to the database index. 43 | :return: (collection, records) tuple of collection and associated 44 | record objects. 45 | ''' 46 | col_label_slug = util.slugify(collection_dict['label']) 47 | collection = bibserver.dao.Collection.get_by_owner_coll(self.owner.id, col_label_slug) 48 | if not collection: 49 | collection = bibserver.dao.Collection(**collection_dict) 50 | assert 'label' in collection, 'Collection must have a label' 51 | if not 'collection' in collection: 52 | collection['collection'] = col_label_slug 53 | collection['owner'] = self.owner.id 54 | 55 | collection.save() 56 | 57 | for rec in record_dicts: 58 | if not type(rec) is dict: continue 59 | rec['owner'] = collection['owner'] 60 | if 'collection' in rec: 61 | if collection['collection'] != rec['collection']: 62 | rec['collection'] = collection['collection'] 63 | else: 64 | rec['collection'] = collection['collection'] 65 | if not self.requesturl and 'SITE_URL' in config: 66 | self.requesturl = str(config['SITE_URL']) 67 | if self.requesturl: 68 | if not self.requesturl.endswith('/'): 69 | self.requesturl += '/' 70 | if '_id' not in rec: 71 | rec['_id'] = bibserver.dao.make_id(rec) 72 | rec['url'] = self.requesturl + collection['owner'] + '/' + collection['collection'] + '/' 73 | if 'id' in rec: 74 | rec['url'] += rec['id'] 75 | elif '_id' in rec: 76 | rec['url'] += rec['_id'] 77 | bibserver.dao.Record.bulk_upsert(record_dicts) 78 | return collection, record_dicts 79 | 80 | def findformat(filename): 81 | if filename.endswith(".json"): return "json" 82 | if filename.endswith(".bibtex"): return "bibtex" 83 | if filename.endswith(".bib"): return "bibtex" 84 | if filename.endswith(".csv"): return "csv" 85 | return "bibtex" 86 | 87 | -------------------------------------------------------------------------------- /bibserver/templates/home/faq.html: -------------------------------------------------------------------------------- 1 | {% extends "/base.html" %} 2 | 3 | {% block content %} 4 | 5 | 49 | 50 | {% endblock %} 51 | -------------------------------------------------------------------------------- /bibserver/templates/upload.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 |
5 |
6 |
7 | 8 |

Upload your collection

9 | 10 |
11 | collection URL:
12 |
13 | 14 |
15 | upload from your PC:
16 |
17 | 18 | (or provide wikipedia search terms,
19 | or upload from your PC)

20 | 21 |
name your collection: 22 | 23 |

provide a description: 24 | 25 |

collection license: 26 | 28 | 45 | 46 |
47 |
please specify file format:

48 | {% for p in parser_plugins %} 49 | {{p.display_name}}
50 | {% endfor %} 51 |
52 | 53 |

54 | 55 | 56 |
57 |
58 | 59 | 60 |
61 |

If you don't want to upload to a collection yet, you can also just create a new empty collection.

62 |

BibSoup uses bibJSON to represent bibliographic records; so in order to upload a collection, 63 | it needs to be in bibJSON.

64 |

If your collection is available online, then you can upload directly from the URL of the online file - 65 | and this also enables you to one-click refresh your collection whenever you make changes to that source file.

66 |

If your collection is not already in bibJSON, then try using one of our parsers to convert it.

67 |

Visit http://bibjson.org to learn more about bibJSON.

68 | 69 |
70 |

Once you start an upload, a ticket will be created to track your upload request. You can 71 | then track your upload progress via the upload tickets info page.

72 | View Upload Tickets 73 |
74 | 75 |
76 |
77 | {% endblock %} 78 | 79 | -------------------------------------------------------------------------------- /test/test_dao.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import pprint 4 | from nose.tools import assert_equal 5 | 6 | from base import fixtures, Fixtures, TESTDB 7 | import bibserver.dao as dao 8 | import bibserver.util as util 9 | from datetime import datetime, timedelta 10 | 11 | class TestDAO: 12 | @classmethod 13 | def setup_class(cls): 14 | Fixtures.create_account() 15 | 16 | @classmethod 17 | def teardown_class(cls): 18 | conn, db = dao.get_conn() 19 | conn.delete_index(TESTDB) 20 | 21 | def test_get_None(self): 22 | r = dao.Record.get(None) 23 | assert r == None 24 | 25 | def test_01_record(self): 26 | # Note, adding a one second negative delay to the timestamp 27 | # otherwise the comparison overflows when subtracting timestamps 28 | t1 = datetime.now() - timedelta(seconds=1) 29 | recdict = fixtures['records'][0] 30 | record = dao.Record.upsert(recdict) 31 | outrecord = dao.Record.get(record.id) 32 | for attr in ['type', 'author']: 33 | assert record[attr] == recdict[attr], record 34 | assert outrecord[attr] == recdict[attr], outrecord 35 | 36 | print outrecord.keys() 37 | assert '_created' in outrecord 38 | assert '_last_modified' in outrecord 39 | last_modified_in_record = outrecord['_last_modified'] 40 | t2 = datetime.strptime(last_modified_in_record, r"%Y%m%d%H%M%S") 41 | difference = t2 - t1 42 | print last_modified_in_record, t1, t2, difference 43 | assert difference.seconds < 1 44 | 45 | def test_02_collection(self): 46 | label = u'My Collection' 47 | slug = util.slugify(label) 48 | colldict = { 49 | 'label': label, 50 | 'slug': slug, 51 | 'owner': Fixtures.account.id 52 | } 53 | coll = dao.Collection.upsert(colldict) 54 | assert coll.id, coll 55 | assert coll['label'] == label 56 | # should only be one collection for this account so this is ok 57 | account_colls = Fixtures.account.collections 58 | assert coll.id == account_colls[0].id, account_colls 59 | 60 | def test_making_ids(self): 61 | recdict1 = fixtures['records'][0].copy() 62 | del recdict1['_id'] 63 | recdict2 = recdict1.copy() 64 | recdict3 = recdict1.copy() 65 | recdict3['foobar'] = 'baz' 66 | a = dao.make_id(recdict1) 67 | b = dao.make_id(recdict3) 68 | print a 69 | print b 70 | assert a != b 71 | record1 = dao.Record.upsert(recdict1) 72 | record2 = dao.Record.upsert(recdict2) 73 | record3 = dao.Record.upsert(recdict3) 74 | print record1, '*'*5 75 | print record2, '*'*5 76 | print record3, '*'*5 77 | assert record1['_id'] == record2['_id'] 78 | assert record1['_id'] != record3['_id'] 79 | 80 | class TestDAOQuery: 81 | @classmethod 82 | def setup_class(cls): 83 | for rec in fixtures['records']: 84 | dao.Record.upsert(rec) 85 | 86 | @classmethod 87 | def teardown_class(cls): 88 | conn, db = dao.get_conn() 89 | conn.delete_index(TESTDB) 90 | 91 | def test_query(self): 92 | out = dao.Record.query() 93 | assert out.total == 2 94 | 95 | def test_query_size(self): 96 | out = dao.Record.query(size=1) 97 | assert out.total == 2 98 | assert_equal(len(out), 1) 99 | 100 | def test_query_facet(self): 101 | facet_fields = [{'key':'type'}] 102 | out = dao.Record.query(facet_fields=facet_fields) 103 | print pprint.pprint(out) 104 | facetterms = out['facets']['type']['terms'] 105 | assert len(facetterms) == 2 106 | assert facetterms[0]['term'] == 'book' 107 | assert facetterms[0]['count'] == 1 108 | 109 | def test_query_term(self): 110 | out = dao.Record.query(terms={'type': ['book']}) 111 | assert_equal(out.total, 1) 112 | 113 | -------------------------------------------------------------------------------- /test/test_web.py: -------------------------------------------------------------------------------- 1 | from nose.tools import assert_equal 2 | import urllib 3 | from base import * 4 | from bibserver import web, ingest 5 | import os 6 | 7 | 8 | class TestWeb(object): 9 | @classmethod 10 | def setup_class(cls): 11 | web.app.config['TESTING'] = True 12 | cls.app = web.app.test_client() 13 | # fixture data 14 | recdict = fixtures['records'][0] 15 | cls.record = dao.Record.upsert(recdict) 16 | Fixtures.create_account() 17 | config['download_cache_directory'] = 'test/data/downloads' 18 | ingest.init() 19 | 20 | @classmethod 21 | def teardown_class(cls): 22 | conn, db = dao.get_conn() 23 | conn.delete_index(TESTDB) 24 | for x in os.listdir('test/data/downloads'): 25 | os.unlink(os.path.join('test/data/downloads', x)) 26 | os.rmdir('test/data/downloads') 27 | 28 | def test_home(self): 29 | res = self.app.get('/') 30 | assert 'BibSoup' in res.data, res.data 31 | 32 | def test_faq(self): 33 | res = self.app.get('/faq') 34 | assert 'This service is an example' in res.data, res.data 35 | 36 | def test_record(self): 37 | res = self.app.get('/' + Fixtures.account.id + '/' + self.record["collection"] + '/' + self.record["_id"] + '.json') 38 | assert res.status == '200 OK', res.status 39 | out = json.loads(res.data) 40 | assert out["id"] == self.record["id"], out 41 | 42 | def test_upload(self): 43 | res = self.app.get('/upload') 44 | print res.status 45 | assert res.status == '302 FOUND', res.status 46 | res = self.app.get('/upload', 47 | headers={'REMOTE_USER': Fixtures.account.id} 48 | ) 49 | assert res.status == '200 OK', res.status 50 | assert 'upload' in res.data, res.data 51 | 52 | def test_upload_post(self): 53 | startnum = dao.Record.query().total 54 | res = self.app.post('/upload?format=bibtex&collection='+urllib.quote_plus('"My Test Collection"'), 55 | data = {'upfile': (open('test/data/sample.bibtex'), 'sample.bibtex')}, 56 | headers={'REMOTE_USER': Fixtures.account.id} 57 | ) 58 | assert res.status == '302 FOUND', res.status 59 | # Now we have to trigger the ingest handling of the ticket 60 | # which is normally done asynchronously 61 | for state in ('new', 'downloaded', 'parsed'): 62 | for t in ingest.get_tickets(state): 63 | ingest.determine_action(t) 64 | 65 | endnum = dao.Record.query().total 66 | assert_equal(endnum, startnum+1) 67 | 68 | # TODO: re-enable 69 | # This does not work because login in the previous method appears to 70 | # persist to this method. Not sure how to fix this ... 71 | def _test_upload_post_401(self): 72 | bibtex_data = open('test/data/sample.bibtex').read() 73 | res = self.app.post('/upload', 74 | data=dict( 75 | format='bibtex', 76 | collection='My Test Collection', 77 | data=bibtex_data, 78 | ) 79 | ) 80 | assert res.status == '401 UNAUTHORIZED', res.status 81 | 82 | def test_query(self): 83 | res = self.app.get('/query') 84 | assert res.status == '200 OK', res.status 85 | 86 | res = self.app.get('/query?q=title:non-existent') 87 | assert res.status == '200 OK', res.status 88 | out = json.loads(res.data) 89 | assert out.total == 0, out 90 | 91 | def test_accounts_query_inaccessible(self): 92 | res = self.app.get('/query/account') 93 | assert res.status == '401 UNAUTHORIZED', res.status 94 | 95 | def test_search(self): 96 | res = self.app.get('/search?q=tolstoy&format=json') 97 | assert res.status == '200 OK', res.status 98 | out = json.loads(res.data) 99 | assert len(out) == 1, out 100 | assert "Tolstoy" in out[0]["author"][0]["name"], out 101 | 102 | 103 | 104 | -------------------------------------------------------------------------------- /bibserver/templates/home/index.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block content %} 4 | 5 | 59 | 60 | 61 | 62 |
63 |
64 |

Welcome to BibSoup!

65 |
66 |
67 |

BibSoup makes it easy to find, manage and share bibliography.

68 | Learn more » 69 |
70 |
71 | 72 |
73 |
74 |
75 |

We have {{records}} records 76 |

across {{colls}} collections 77 |

shared by {{users}} users

78 |
79 |
80 |

Search all records

81 |

Search shared collections and records, find material relevant to your interests, make new collections.

82 |

Search everything »

83 |
84 |
85 |

Create collections

86 |

upload from your own source, or create a new collection.

87 |
88 |
89 | 90 |
91 |
92 |

Browse recent Collections

93 |
94 |

Browse all collections »

95 |
96 |
97 | 98 |
99 | {% endblock %} 100 | -------------------------------------------------------------------------------- /doc/api.rst: -------------------------------------------------------------------------------- 1 | =========== 2 | The web API 3 | =========== 4 | 5 | 6 | web.py and search.py 7 | ==================== 8 | 9 | Detail the workings of the web.py file. 10 | 11 | Explain the search.py file too, which is used by web.py extensively. 12 | 13 | 14 | URL routes 15 | ========== 16 | 17 | / 18 | - 19 | 20 | methods GET 21 | 22 | returns HTML 23 | 24 | The front page of the web service. 25 | 26 | /faq 27 | ---- 28 | 29 | methods GET 30 | 31 | returns HTML 32 | 33 | The frequently asked questions page of the web service. 34 | 35 | /account 36 | -------- 37 | 38 | * /account/register 39 | * /account/login 40 | * /account/logout 41 | 42 | returns HTML 43 | 44 | Esed for account creation via a web browser, although it would be 45 | possible to register a new account by POSTing to the registration endpoint. 46 | 47 | 48 | /query 49 | ------ 50 | 51 | * /query/record 52 | * /query/collection 53 | 54 | methods GET or POST 55 | 56 | returns JSON 57 | 58 | Exposes the query endpoints of the elasticsearch backend indices, so 59 | queries can be issued against it directly. 60 | 61 | For exmaple /query/record/_search?q=* will return all records in the standard 62 | ten results at a time. 63 | 64 | /users 65 | ------ 66 | 67 | requires authorisation 68 | 69 | methods GET 70 | 71 | returns HTML or JSON 72 | 73 | Provides a list of users 74 | 75 | /collections 76 | ------------ 77 | 78 | * /collections/ 79 | * /collections// 80 | 81 | methods GET 82 | 83 | returns HTML or JSON 84 | 85 | Provides a list of all collections, a list of collections for a user, or 86 | a particular collection for a user. 87 | 88 | /upload 89 | ------- 90 | 91 | methods GET (to return the browser form) or POST 92 | 93 | requires authorisation 94 | 95 | LIST THE PARAMS 96 | 97 | For uploading from source files into collections. 98 | 99 | /create 100 | ------- 101 | 102 | not implemented 103 | 104 | For creating new records. 105 | 106 | / 107 | ----------- 108 | 109 | * //collections 110 | * // 111 | * /// 112 | 113 | methods GET or POST or DELETE 114 | 115 | returns HTML or JSON 116 | 117 | requires authorisation for retrieval of user data via GET, and for POSTs 118 | 119 | List as JSON all collections for a given user (same as /collections/). 120 | 121 | Access information about a user, a collection, or a record. 122 | Update the records by POSTing updated versions. 123 | Delete the users, collections, records by DELETE. 124 | 125 | // 126 | -------------------------------- 127 | 128 | methods GET 129 | 130 | returns HTML or JSON 131 | 132 | Anything that is not matched to a known endpoint but that can be matched to a 133 | key in the record index will cause an attempt to interpret it as an implicit 134 | value. For example, /year/2012 will attempt to return all records in the index 135 | (therefore across all collections) that have a a key called "year" where the 136 | value equals "2012". 137 | 138 | /search 139 | ------- 140 | 141 | methods GET 142 | 143 | returns HTML or JSON 144 | 145 | The search endpoint allows for search across the full record index of the instance. 146 | 147 | / 148 | ----------- 149 | 150 | Any route that cannot be matched to any previous endpoint, including an implicit 151 | facet, performs the same as the /search endpoint. 152 | 153 | 154 | Programmatic access 155 | =================== 156 | 157 | The API endpoints can be queried either via a web browser - which defaults to 158 | providing HTML output, of course - or programmatically by requests e.g. via cURL. 159 | 160 | Requests for access to data operate as usual - but requests to insert data 161 | require authentication; this is achieved via API keys. Every user account has 162 | an API key assigned to it, which can be retrieved from the /username page; it 163 | can then be provided as a parameter to any request that attempts to submit data 164 | into the system - e.g. a request to the /upload endpoint. 165 | 166 | Each endpoint can return HTML or JSON; JSON can be requested either by appending 167 | .json to the URL portion, or adding format=json to the URL parameters, or by 168 | setting the "accept" headers on your request to "application/json". 169 | 170 | Here is an example of retrieving some records from a collection via cURL: 171 | 172 | ADD EXAMPLE 173 | 174 | Here is an example of submitting a new collection via cURL: 175 | 176 | ADD EXAMPLE 177 | 178 | 179 | 180 | 181 | -------------------------------------------------------------------------------- /parserscrapers_plugins/RISParser.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | '''this file to be called from the command line, expects RIS input on stdin. 4 | Returns a list of record dicts, and metadata on stdout 5 | 6 | Details of the RIS format 7 | http://en.wikipedia.org/wiki/RIS_%28file_format%29 8 | ''' 9 | 10 | import chardet 11 | import cStringIO 12 | import sys 13 | import json 14 | 15 | FIELD_MAP = { 16 | "DO": "doi", 17 | "SP": "pages", 18 | "M2": "start page", 19 | "DB": "name of database", 20 | "DA": "date", 21 | "M1": "number", 22 | "M3": "type", 23 | "N1": "notes", 24 | "ST": "short title", 25 | "DP": "database provider", 26 | "CN": "call number", 27 | "IS": "number", 28 | "LB": "label", 29 | "TA": "translated author", 30 | "TY": "type ", 31 | "UR": "url", 32 | "TT": "translated title", 33 | "PY": "year", 34 | "PB": "publisher", 35 | "A3": "tertiary author", 36 | "C8": "custom 8", 37 | "A4": "subsidiary author", 38 | "TI": "title", 39 | "C3": "custom 3", 40 | "C2": "pmcid", 41 | "C1": "note", 42 | "C7": "custom 7", 43 | "C6": "nihmsid", 44 | "C5": "custom 5", 45 | "C4": "custom 4", 46 | "AB": "note", 47 | "AD": "institution", 48 | "VL": "volume", 49 | "CA": "caption", 50 | "T2": "secondary title", 51 | "T3": "tertiary title", 52 | "AN": "accession number", 53 | "L4": "figure", 54 | "NV": "number of volumes", 55 | "AU": "author", 56 | "RP": "reprint edition", 57 | "L1": "file attachments", 58 | "ET": "epub date", 59 | "A2": "author", 60 | "RN": "note", 61 | "LA": "language", 62 | "CY": "place published", 63 | "J2": "alternate title", 64 | "RI": "reviewed item", 65 | "KW": "keyword", 66 | "SN": "issn", 67 | "Y2": "access date", 68 | "SE": "section", 69 | "OP": "original publication", 70 | "JF": "journal", 71 | } 72 | 73 | VALUE_MAP = { 74 | 'AU': lambda v: [{u'name': vv.decode('utf8')} for vv in v] 75 | } 76 | 77 | 78 | def DEFAULT_VALUE_FUNC(v): 79 | return u' '.join(vv.decode('utf8') for vv in v) 80 | 81 | 82 | class RISParser(object): 83 | 84 | def __init__(self, fileobj): 85 | 86 | data = fileobj.read() 87 | self.encoding = chardet.detect(data).get('encoding', 'ascii') 88 | 89 | # Some files have Byte-order marks inserted at the start 90 | if data[:3] == '\xef\xbb\xbf': 91 | data = data[3:] 92 | self.fileobj = cStringIO.StringIO(data) 93 | self.data = [] 94 | 95 | def add_chunk(self, chunk): 96 | if not chunk: 97 | return 98 | tmp = {} 99 | for k, v in chunk.items(): 100 | tmp[FIELD_MAP.get(k, k)] = VALUE_MAP.get(k, DEFAULT_VALUE_FUNC)(v) 101 | self.data.append(tmp) 102 | 103 | def parse(self): 104 | self.data = [] 105 | chunk = {} 106 | last_field = None 107 | for line in self.fileobj: 108 | if line.startswith(' ') and last_field: 109 | chunk.setdefault(last_field, []).append(line.strip()) 110 | continue 111 | line = line.strip() 112 | if not line: 113 | continue 114 | parts = line.split(' - ') 115 | if len(parts) < 2: 116 | continue 117 | field = parts[0] 118 | last_field = field 119 | if field == 'TY': 120 | self.add_chunk(chunk) 121 | chunk = {} 122 | value = ' - '.join(parts[1:]) 123 | if value: 124 | chunk.setdefault(field, []).append(value) 125 | self.add_chunk(chunk) 126 | return self.data, {} 127 | 128 | 129 | def parse(): 130 | parser = RISParser(sys.stdin) 131 | records, metadata = parser.parse() 132 | if len(records) > 0: 133 | sys.stdout.write(json.dumps( 134 | {'records': records, 'metadata': metadata} 135 | )) 136 | else: 137 | sys.stderr.write('Zero records were parsed from the data') 138 | 139 | 140 | def main(): 141 | conf = {"display_name": "RIS", 142 | "format": "ris", 143 | "contact": "openbiblio-dev@lists.okfn.org", 144 | "bibserver_plugin": True, 145 | "BibJSON_version": "0.81"} 146 | for x in sys.argv[1:]: 147 | if x == '-bibserver': 148 | sys.stdout.write(json.dumps(conf)) 149 | sys.exit() 150 | parse() 151 | 152 | 153 | if __name__ == '__main__': 154 | main() 155 | -------------------------------------------------------------------------------- /parserscrapers_plugins/wikipedia.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | ''' 4 | Wikipedia search to citations parser 5 | Reads a query term on stdin 6 | ''' 7 | 8 | import sys 9 | import re 10 | import json 11 | import urllib 12 | import urllib2 13 | import httplib 14 | import traceback 15 | 16 | 17 | def repl(matchobj): 18 | return matchobj.group(0) 19 | 20 | 21 | def wikitext_to_dict(txt): 22 | buf = [] 23 | for c in re.findall('{{Citation |cite journal(.*?)}}', txt): 24 | if c.strip().startswith('needed'): 25 | continue 26 | c = re.sub('{{.*?|.*?|(.*?)}}', repl, c) 27 | tmp = {} 28 | for cc in c.split('|'): 29 | ccc = cc.strip().split('=') 30 | if len(ccc) == 2: 31 | tmp[ccc[0].strip()] = ccc[1].strip() 32 | if tmp: 33 | if 'author' in tmp: 34 | auth_string = tmp['author'].split(',') 35 | tmp['author'] = [] 36 | for au in auth_string: 37 | au = au.strip() 38 | if au.startswith('and '): 39 | au = au[4:] 40 | tmp.setdefault('author', []).append({'name': au}) 41 | name = '%s %s' % (tmp.get('first', ''), tmp.get('last', '')) 42 | if name.strip(): 43 | tmp.setdefault('author', []).append({'name': name}) 44 | if 'journal' in tmp: 45 | tmp['journal'] = {'name': tmp['journal']} 46 | buf.append(tmp) 47 | return buf 48 | 49 | 50 | def parse(local_cache): 51 | q = sys.stdin.read() 52 | URL = 'http://en.wikipedia.org/w/api.php?action=query&list=search&srlimit=50&srprop=wordcount&format=json&srsearch=' # noqa: E501 (URL) 53 | URLraw = 'http://en.wikipedia.org/w/index.php?action=raw&title=' 54 | data_json = False 55 | if local_cache: 56 | try: 57 | cached_data = json.loads(open('wikipedia.py.data').read()) 58 | data_json = cached_data.get('data1', {}) 59 | except IOError: 60 | cached_data = {'data1': {}, 'data2': {}} 61 | if not data_json: 62 | data = urllib2.urlopen(URL+urllib.quote_plus(q)).read() 63 | data_json = json.loads(data) 64 | if local_cache: 65 | cached_data['data1'] = data_json 66 | records = [] 67 | 68 | try: 69 | search_result = data_json.get("query") 70 | if not search_result: 71 | search_result = data_json.get("query-continue", {"search": []}) 72 | for x in search_result["search"]: 73 | if x['wordcount'] > 20: 74 | quoted_title = urllib.quote_plus(x['title'].encode('utf8')) 75 | try: 76 | title_data = None 77 | if local_cache: 78 | title_data = cached_data.get('data2', {}).get(quoted_title) # noqa E501 79 | if title_data is None: 80 | title_data = urllib2.urlopen(URLraw+quoted_title).read() # noqa E501 81 | if local_cache: 82 | cached_data.setdefault('data2', {})[quoted_title] = title_data # noqa E501 83 | except httplib.BadStatusLine: 84 | sys.stderr.write('Problem reading %s\n' % (URLraw + quoted_title)) # noqa E501 85 | continue 86 | citations = wikitext_to_dict(title_data) 87 | if citations: 88 | for c in citations: 89 | c['link'] = [ 90 | {'url': 'http://en.wikipedia.org/wiki/' + quoted_title} # noqa E501 91 | ] 92 | records.extend(citations) 93 | except: # noqa E722 94 | sys.stderr.write(traceback.format_exc()) 95 | sys.stdout.write(json.dumps({'records': records, 'metadata': {}})) 96 | if local_cache: 97 | open('wikipedia.py.data', 'w').write(json.dumps(cached_data)) 98 | 99 | 100 | def main(): 101 | conf = {"display_name": "Wikipedia search to citations", 102 | "format": "wikipedia", 103 | "downloads": True, 104 | "contact": "openbiblio-dev@lists.okfn.org", 105 | "bibserver_plugin": True, 106 | "BibJSON_version": "0.81"} 107 | local_cache = False 108 | for x in sys.argv[1:]: 109 | if x == '-bibserver': 110 | sys.stdout.write(json.dumps(conf)) 111 | sys.exit() 112 | elif x == '-cache': 113 | local_cache = True 114 | parse(local_cache) 115 | 116 | 117 | if __name__ == '__main__': 118 | main() 119 | -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | FacetView 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 53 | 54 | 55 | 56 | 57 | 73 | 74 |
75 |
76 | 81 |

82 | FacetView is a pure javascript frontend for ElasticSearch or SOLR 83 | search indices. 84 |

85 |

86 | It lets you easily embed a faceted browser and search front end into any web page. It also provides a micro-framework you can build on when creating user interfaces to SOLR and ElasticSearch. 87 |

88 | 89 |

Demo

90 |
91 | 92 | 118 | 119 |

Use It Yourself

120 |

See the README on the GitHub repo.

121 | 122 |
123 | 124 |
125 |

© Open Knowledge Foundation and Cottage Labs 2011. Openly Licensed.

127 |
128 |
129 | 130 | 131 | -------------------------------------------------------------------------------- /bibserver/templates/base.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | BibSoup 6 | 7 | 8 | 9 | 10 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 80 | 81 | 82 |
83 | 84 |
85 | {% with messages = get_flashed_messages(with_categories=True) %} 86 | {%if messages %} 87 | {% for category, message in messages %} 88 |
89 | × 90 | {{ message }} 91 |
92 | {% endfor %} 93 | {% endif %} 94 | {% endwith %} 95 |
96 | 97 |
98 | {% block content %}{% endblock %} 99 |
100 | 101 | 123 | 124 |
125 | 126 | 127 | 128 | -------------------------------------------------------------------------------- /doc/deploy.rst: -------------------------------------------------------------------------------- 1 | .. _deploy: 2 | 3 | ========== 4 | Deployment 5 | ========== 6 | 7 | Pre-requisites 8 | ============== 9 | 10 | This example is for installing bibserver to run bibsoup.net, but applies to 11 | other instances - just change relevant parts e.g. domain name and so on. 12 | 13 | These instructions work on an ubuntu / debian machine, and explain how to get a 14 | stable deployment using: 15 | 16 | * git (to get latest copy of code) 17 | * nginx (the web server that proxies to the web app) 18 | * python2.7+, pip, virtualenv (required to run the app) 19 | * gunicorn (runs the web app that receives the proxy from nginx) 20 | * supervisord (keeps everything up and running) 21 | 22 | 23 | nginx config 24 | ============ 25 | 26 | Create an nginx site config named e.g. bibsoup.net 27 | default location is /etc/nginx/sites-available 28 | (for OKF machines should be in ~/etc/nginx/sites-available then symlinked) 29 | then symlink from /etc/nginx/sites-enabled 30 | 31 | upstream bibsoup_app_server { 32 | server 127.0.0.1:5050 fail_timeout=0; 33 | } 34 | 35 | server { 36 | server_name bibsoup.net; 37 | 38 | access_log /var/log/nginx/bibsoup.net.access.log; 39 | 40 | server_name_in_redirect off; 41 | 42 | client_max_body_size 20M; 43 | 44 | location / { 45 | ## straight-forward proxy 46 | proxy_redirect off; 47 | proxy_connect_timeout 75s; 48 | proxy_read_timeout 180s; 49 | proxy_set_header Host $host; 50 | proxy_set_header X-Real-IP $remote_addr; 51 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 52 | 53 | proxy_pass http://bibsoup_app_server; 54 | } 55 | } 56 | 57 | 58 | supervisord config 59 | ================== 60 | 61 | Create a supervisord config named e.g. bibsoup.net.conf 62 | - the default location for this is /etc/supervisor/conf.d 63 | (for OKF machines, should be put in ~/etc/supervisor/conf.d then symlinked) 64 | 65 | [program:bibsoup.net] 66 | command=/home/okfn/var/srvc/%(program_name)s/bin/gunicorn -w 4 -b 127.0.0.1:5050 bibserver.web:app 67 | user=www-data 68 | directory=/home/okfn/var/srvc/%(program_name)s/src/bibserver 69 | stdout_logfile=/var/log/supervisor/%(program_name)s-access.log 70 | stderr_logfile=/var/log/supervisor/%(program_name)s-error.log 71 | autostart=true 72 | 73 | 74 | Install bibserver 75 | ================= 76 | 77 | Create a virtualenv and get the latest bibserver code installed. 78 | Bibserver requires python2.7+ so make sure that is available on your system, 79 | then start a virtualenv to run it in 80 | 81 | virtualenv -p python2.7 bibsoup.net --no-site-packages 82 | cd bibsoup.net 83 | mkdir src 84 | cd bin 85 | source activate 86 | cd ../src 87 | git clone https://github.com/okfn/bibserver 88 | cd bibserver 89 | python setup.py install 90 | 91 | 92 | Currently, setup.py install does not result in running system because 93 | config.json cannot be found. So, do dev install. This will be fixed asap 94 | 95 | pip install -e . 96 | 97 | 98 | Then install gunicorn into the virtualenv 99 | 100 | pip install gunicorn 101 | 102 | 103 | Now create a local_config.json with details as necessary 104 | for example check the ES index you with to use for this instance (default is bibserver) 105 | 106 | { 107 | "debug": false, 108 | "port": 5050, 109 | "ELASTIC_SEARCH_DB" : "bibserver_something", 110 | "ELASTIC_SEARCH_HOST" : "localhost:9200" 111 | } 112 | 113 | 114 | Now run bibserver directly to check it is working 115 | - this requires elasticsearch to be up and running, as it attempts to create indices. 116 | 117 | If it works, you should see confirmation of creation of the index and the mappings; 118 | if all good, kill it and move on. If not, debug the issues. 119 | 120 | python bibserver/web.py 121 | 122 | 123 | If the above step failed to push the mappings, you can do so manually. 124 | A command such as the following, augmented for your ES index URL and your index name, 125 | should do the job for you (default mappings are in config.json) 126 | (remember to do record and collection) 127 | 128 | curl -X PUT localhost:9200/bibserver/record/_mapping -d '{ 129 | "record" : { 130 | "date_detection" : false, 131 | "dynamic_templates" : [ 132 | { 133 | "default" : { 134 | "match" : "*", 135 | "match_mapping_type": "string", 136 | "mapping" : { 137 | "type" : "multi_field", 138 | "fields" : { 139 | "{name}" : {"type" : "{dynamic_type}", "index" : "analyzed", "store" : "no"}, 140 | "exact" : {"type" : "{dynamic_type}", "index" : "not_analyzed", "store" : "yes"} 141 | } 142 | } 143 | } 144 | } 145 | ] 146 | } 147 | }' 148 | 149 | 150 | Enable everything 151 | ================= 152 | 153 | In the case of OKF service deployment, make symbolic links from the supervisor 154 | and nginx files which should be in the ~/etc folder into the /etc/nginx/sites-available 155 | and /etc/supervisor/conf.d folders, then make symbolic link from /etc/nginx/sites-available 156 | into /etc/nginx/sites-enabled - if you do not use this pattern, just put the config 157 | directly in /etc/nginx/sites-available and symlink from there into sites-enabled 158 | 159 | cd /etc/nginx/sites-available 160 | ln -s ~/etc/nginx/sites-available/bibsoup.net . 161 | cd /etc/supervisor/conf.d 162 | ln -s ~/etc/supervisor/conf.d/bibsoup.net.conf . 163 | 164 | 165 | Then enable the new nginx and supervisor settings 166 | 167 | cd /etc/nginx/sites-enabled 168 | ln -s ../sites-available/bibsoup.net . 169 | /etc/init.d/nginx reload 170 | supervisorctl reread 171 | supervisorctl update 172 | 173 | 174 | Configure your domain name to point at your server, and it should work. 175 | 176 | 177 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | # the i18n builder cannot share the environment and doctrees with the others 15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 16 | 17 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 18 | 19 | help: 20 | @echo "Please use \`make ' where is one of" 21 | @echo " html to make standalone HTML files" 22 | @echo " dirhtml to make HTML files named index.html in directories" 23 | @echo " singlehtml to make a single large HTML file" 24 | @echo " pickle to make pickle files" 25 | @echo " json to make JSON files" 26 | @echo " htmlhelp to make HTML files and a HTML help project" 27 | @echo " qthelp to make HTML files and a qthelp project" 28 | @echo " devhelp to make HTML files and a Devhelp project" 29 | @echo " epub to make an epub" 30 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 31 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 32 | @echo " text to make text files" 33 | @echo " man to make manual pages" 34 | @echo " texinfo to make Texinfo files" 35 | @echo " info to make Texinfo files and run them through makeinfo" 36 | @echo " gettext to make PO message catalogs" 37 | @echo " changes to make an overview of all changed/added/deprecated items" 38 | @echo " linkcheck to check all external links for integrity" 39 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 40 | 41 | clean: 42 | -rm -rf $(BUILDDIR)/* 43 | 44 | html: 45 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 46 | @echo 47 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 48 | 49 | dirhtml: 50 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 51 | @echo 52 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 53 | 54 | singlehtml: 55 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 56 | @echo 57 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 58 | 59 | pickle: 60 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 61 | @echo 62 | @echo "Build finished; now you can process the pickle files." 63 | 64 | json: 65 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 66 | @echo 67 | @echo "Build finished; now you can process the JSON files." 68 | 69 | htmlhelp: 70 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 71 | @echo 72 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 73 | ".hhp project file in $(BUILDDIR)/htmlhelp." 74 | 75 | qthelp: 76 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 77 | @echo 78 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 79 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 80 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/BibServer.qhcp" 81 | @echo "To view the help file:" 82 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/BibServer.qhc" 83 | 84 | devhelp: 85 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 86 | @echo 87 | @echo "Build finished." 88 | @echo "To view the help file:" 89 | @echo "# mkdir -p $$HOME/.local/share/devhelp/BibServer" 90 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/BibServer" 91 | @echo "# devhelp" 92 | 93 | epub: 94 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 95 | @echo 96 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 97 | 98 | latex: 99 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 100 | @echo 101 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 102 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 103 | "(use \`make latexpdf' here to do that automatically)." 104 | 105 | latexpdf: 106 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 107 | @echo "Running LaTeX files through pdflatex..." 108 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 109 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 110 | 111 | text: 112 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 113 | @echo 114 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 115 | 116 | man: 117 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 118 | @echo 119 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 120 | 121 | texinfo: 122 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 123 | @echo 124 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 125 | @echo "Run \`make' in that directory to run these through makeinfo" \ 126 | "(use \`make info' here to do that automatically)." 127 | 128 | info: 129 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 130 | @echo "Running Texinfo files through makeinfo..." 131 | make -C $(BUILDDIR)/texinfo info 132 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 133 | 134 | gettext: 135 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 136 | @echo 137 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 138 | 139 | changes: 140 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 141 | @echo 142 | @echo "The overview file is in $(BUILDDIR)/changes." 143 | 144 | linkcheck: 145 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 146 | @echo 147 | @echo "Link check complete; look for any errors in the above output " \ 148 | "or in $(BUILDDIR)/linkcheck/output.txt." 149 | 150 | doctest: 151 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 152 | @echo "Testing of doctests in the sources finished, look at the " \ 153 | "results in $(BUILDDIR)/doctest/output.txt." 154 | -------------------------------------------------------------------------------- /bibserver/templates/record.html: -------------------------------------------------------------------------------- 1 | {% extends "/base.html" %} 2 | 3 | {% block content %} 4 | 5 | {% if multiple %} 6 |
7 |
8 |

You have requested a record based on the identifier it has in its' collection. 9 | Unfortunately, there are multiple records in the collection with the same identifier. 10 | If you are the collection owner, you should fix this (every record should have a 11 | unique identifier within your collection).

12 |

However, we allocate unique identifiers to each record ourselves, too. So, you 13 | can choose which record you would like to view and access it via the unique 14 | identifier we assigned it:

15 |
    16 | {% for item in multiple %} 17 |
  • {{item['_id']}}
  • 18 | {% endfor %} 19 |
20 |
21 |
22 | 23 | {% else %} 24 | 25 | 91 | 92 |
93 | 99 |
100 | 101 |
102 |
103 | {% if prettyrecord %} 104 | {{ prettyrecord | safe }} 105 | {% endif %} 106 |
107 |
108 |
109 |
110 | 111 |
112 | 113 |
114 | {% if admin %} 115 |

Edit this record

116 | {% endif %} 117 |
118 | 119 |
120 | {{ servicecore | safe }} 121 | {% if notes|length != 0 %} 122 | {% if admin %}

Notes

{% endif %} 123 | {% for item in notes %} 124 | {% if item['public'] %} 125 | 126 | {% else %} 127 | {% if admin %} 128 | 129 | {% endif %} 130 | {% endif %} 131 | {% endfor %} 132 | {% endif %} 133 | {% if admin %}
{{ item['content'] }}
{{ item['content'] }}

add a note



{% endif %} 134 | 135 | {% if mlt|length != 0 %} 136 |

More like this

137 | {% for item in mlt %} 138 | 139 | {% endfor %} 140 |
{{ item['title'] }}
141 | {% endif %} 142 |
143 | 144 |
145 | 146 | {% endif %} 147 | 148 | {% endblock %} 149 | 150 | -------------------------------------------------------------------------------- /bibserver/static/vendor/jtedit/vendor/jquery.autoResize.js: -------------------------------------------------------------------------------- 1 | /* 2 | * jQuery.fn.autoResize 1.13 3 | * -- 4 | * https://github.com/jamespadolsey/jQuery.fn.autoResize 5 | * -- 6 | * This program is free software. It comes without any warranty, to 7 | * the extent permitted by applicable law. You can redistribute it 8 | * and/or modify it under the terms of the Do What The Fuck You Want 9 | * To Public License, Version 2, as published by Sam Hocevar. See 10 | * http://sam.zoy.org/wtfpl/COPYING for more details. */ 11 | 12 | (function($){ 13 | 14 | var uid = 'ar' + +new Date, 15 | 16 | defaults = autoResize.defaults = { 17 | onResize: function(){}, 18 | animate: { 19 | duration: 200, 20 | complete: function(){} 21 | }, 22 | extraSpace: 50, 23 | minHeight: 'original', 24 | maxHeight: 500, 25 | minWidth: 'original', 26 | maxWidth: 500 27 | }; 28 | 29 | autoResize.cloneCSSProperties = [ 30 | 'lineHeight', 'textDecoration', 'letterSpacing', 31 | 'fontSize', 'fontFamily', 'fontStyle', 'fontWeight', 32 | 'textTransform', 'textAlign', 'direction', 'wordSpacing', 'fontSizeAdjust', 33 | 'padding', 'width' 34 | ]; 35 | 36 | autoResize.cloneCSSValues = { 37 | position: 'absolute', 38 | top: -9999, 39 | left: -9999, 40 | opacity: 0, 41 | overflow: 'hidden' 42 | }; 43 | 44 | autoResize.resizableFilterSelector = [ 45 | 'textarea:not(textarea.' + uid + ')', 46 | 'input:not(input[type])', 47 | 'input[type=text]', 48 | 'input[type=password]' 49 | ].join(','); 50 | 51 | autoResize.AutoResizer = AutoResizer; 52 | 53 | $.fn.autoResize = autoResize; 54 | 55 | function autoResize(config) { 56 | this.filter(autoResize.resizableFilterSelector).each(function(){ 57 | new AutoResizer( $(this), config ); 58 | }); 59 | return this; 60 | } 61 | 62 | function AutoResizer(el, config) { 63 | 64 | if (el.data('AutoResizer')) { 65 | el.data('AutoResizer').destroy(); 66 | } 67 | 68 | config = this.config = $.extend({}, autoResize.defaults, config); 69 | this.el = el; 70 | 71 | this.nodeName = el[0].nodeName.toLowerCase(); 72 | 73 | this.originalHeight = el.height(); 74 | this.previousScrollTop = null; 75 | 76 | this.value = el.val(); 77 | 78 | if (config.maxWidth === 'original') config.maxWidth = el.width(); 79 | if (config.minWidth === 'original') config.minWidth = el.width(); 80 | if (config.maxHeight === 'original') config.maxHeight = el.height(); 81 | if (config.minHeight === 'original') config.minHeight = el.height(); 82 | 83 | if (this.nodeName === 'textarea') { 84 | el.css({ 85 | resize: 'none', 86 | overflowY: 'hidden' 87 | }); 88 | } 89 | 90 | el.data('AutoResizer', this); 91 | 92 | this.createClone(); 93 | this.injectClone(); 94 | this.bind(); 95 | 96 | } 97 | 98 | AutoResizer.prototype = { 99 | 100 | bind: function() { 101 | 102 | var check = $.proxy(function(){ 103 | this.check(); 104 | return true; 105 | }, this); 106 | 107 | this.unbind(); 108 | 109 | this.el 110 | .bind('keyup.autoResize', check) 111 | //.bind('keydown.autoResize', check) 112 | .bind('change.autoResize', check); 113 | 114 | this.check(null, true); 115 | 116 | }, 117 | 118 | unbind: function() { 119 | this.el.unbind('.autoResize'); 120 | }, 121 | 122 | createClone: function() { 123 | 124 | var el = this.el, 125 | clone = this.nodeName === 'textarea' ? el.clone() : $(''); 126 | 127 | this.clone = clone; 128 | 129 | $.each(autoResize.cloneCSSProperties, function(i, p){ 130 | clone[0].style[p] = el.css(p); 131 | }); 132 | 133 | clone 134 | .removeAttr('name') 135 | .removeAttr('id') 136 | .addClass(uid) 137 | .attr('tabIndex', -1) 138 | .css(autoResize.cloneCSSValues); 139 | 140 | if (this.nodeName === 'textarea') { 141 | clone.height('auto'); 142 | } else { 143 | clone.width('auto').css({ 144 | whiteSpace: 'nowrap' 145 | }); 146 | } 147 | 148 | }, 149 | 150 | check: function(e, immediate) { 151 | 152 | var config = this.config, 153 | clone = this.clone, 154 | el = this.el, 155 | value = el.val(); 156 | 157 | if (this.nodeName === 'input') { 158 | 159 | clone.text(value); 160 | 161 | // Calculate new width + whether to change 162 | var cloneWidth = clone.width(), 163 | newWidth = (cloneWidth + config.extraSpace) >= config.minWidth ? 164 | cloneWidth + config.extraSpace : config.minWidth, 165 | currentWidth = el.width(); 166 | 167 | newWidth = Math.min(newWidth, config.maxWidth); 168 | 169 | if ( 170 | (newWidth < currentWidth && newWidth >= config.minWidth) || 171 | (newWidth >= config.minWidth && newWidth <= config.maxWidth) 172 | ) { 173 | 174 | config.onResize.call(el); 175 | 176 | el.scrollLeft(0); 177 | 178 | config.animate && !immediate ? 179 | el.stop(1,1).animate({ 180 | width: newWidth 181 | }, config.animate) 182 | : el.width(newWidth); 183 | 184 | } 185 | 186 | return; 187 | 188 | } 189 | 190 | // TEXTAREA 191 | 192 | clone.width(el.width()).height(0).val(value).scrollTop(10000); 193 | 194 | var scrollTop = clone[0].scrollTop + config.extraSpace; 195 | 196 | // Don't do anything if scrollTop hasen't changed: 197 | if (this.previousScrollTop === scrollTop) { 198 | return; 199 | } 200 | 201 | this.previousScrollTop = scrollTop; 202 | 203 | if (scrollTop >= config.maxHeight) { 204 | el.css('overflowY', ''); 205 | scrollTop = config.maxHeight; 206 | } else { 207 | el.css('overflowY', 'hidden'); 208 | } 209 | 210 | if (scrollTop < config.minHeight) { 211 | scrollTop = config.minHeight; 212 | } 213 | 214 | config.onResize.call(el); 215 | 216 | // Either animate or directly apply height: 217 | config.animate && !immediate ? 218 | el.stop(1,1).animate({ 219 | height: scrollTop 220 | }, config.animate) 221 | : el.height(scrollTop); 222 | 223 | }, 224 | 225 | destroy: function() { 226 | this.unbind(); 227 | this.el.removeData('AutoResizer'); 228 | this.clone.remove(); 229 | delete this.el; 230 | delete this.clone; 231 | }, 232 | 233 | injectClone: function() { 234 | ( 235 | autoResize.cloneContainer || 236 | (autoResize.cloneContainer = $('').appendTo('body')) 237 | ).append(this.clone); 238 | } 239 | 240 | }; 241 | 242 | })(jQuery); 243 | -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/d3/Makefile: -------------------------------------------------------------------------------- 1 | # See the README for installation instructions. 2 | 3 | NODE_PATH ?= ./node_modules 4 | JS_COMPILER = $(NODE_PATH)/uglify-js/bin/uglifyjs 5 | JS_TESTER = $(NODE_PATH)/vows/bin/vows 6 | 7 | all: \ 8 | d3.js \ 9 | d3.min.js \ 10 | d3.chart.js \ 11 | d3.chart.min.js \ 12 | d3.layout.js \ 13 | d3.layout.min.js \ 14 | d3.csv.js \ 15 | d3.csv.min.js \ 16 | d3.geo.js \ 17 | d3.geo.min.js \ 18 | d3.geom.js \ 19 | d3.geom.min.js \ 20 | d3.time.js \ 21 | d3.time.min.js \ 22 | package.json 23 | 24 | # Modify this rule to build your own custom release. 25 | # Run `make d3.custom.min.js` to produce the minified version. 26 | 27 | d3.custom.js: \ 28 | d3.js \ 29 | d3.geom.js \ 30 | d3.layout.js 31 | 32 | .INTERMEDIATE d3.js: \ 33 | src/start.js \ 34 | d3.core.js \ 35 | d3.scale.js \ 36 | d3.svg.js \ 37 | d3.behavior.js \ 38 | src/end.js 39 | 40 | d3.core.js: \ 41 | src/compat/date.js \ 42 | src/compat/style.js \ 43 | src/core/core.js \ 44 | src/core/array.js \ 45 | src/core/this.js \ 46 | src/core/functor.js \ 47 | src/core/rebind.js \ 48 | src/core/ascending.js \ 49 | src/core/descending.js \ 50 | src/core/min.js \ 51 | src/core/max.js \ 52 | src/core/sum.js \ 53 | src/core/quantile.js \ 54 | src/core/zip.js \ 55 | src/core/bisect.js \ 56 | src/core/first.js \ 57 | src/core/last.js \ 58 | src/core/nest.js \ 59 | src/core/keys.js \ 60 | src/core/values.js \ 61 | src/core/entries.js \ 62 | src/core/permute.js \ 63 | src/core/merge.js \ 64 | src/core/split.js \ 65 | src/core/collapse.js \ 66 | src/core/range.js \ 67 | src/core/requote.js \ 68 | src/core/round.js \ 69 | src/core/xhr.js \ 70 | src/core/text.js \ 71 | src/core/json.js \ 72 | src/core/html.js \ 73 | src/core/xml.js \ 74 | src/core/ns.js \ 75 | src/core/dispatch.js \ 76 | src/core/format.js \ 77 | src/core/ease.js \ 78 | src/core/event.js \ 79 | src/core/interpolate.js \ 80 | src/core/uninterpolate.js \ 81 | src/core/rgb.js \ 82 | src/core/hsl.js \ 83 | src/core/selection.js \ 84 | src/core/selection-select.js \ 85 | src/core/selection-selectAll.js \ 86 | src/core/selection-attr.js \ 87 | src/core/selection-classed.js \ 88 | src/core/selection-style.js \ 89 | src/core/selection-property.js \ 90 | src/core/selection-text.js \ 91 | src/core/selection-html.js \ 92 | src/core/selection-append.js \ 93 | src/core/selection-insert.js \ 94 | src/core/selection-remove.js \ 95 | src/core/selection-data.js \ 96 | src/core/selection-enter.js \ 97 | src/core/selection-enter-select.js \ 98 | src/core/selection-filter.js \ 99 | src/core/selection-map.js \ 100 | src/core/selection-sort.js \ 101 | src/core/selection-on.js \ 102 | src/core/selection-each.js \ 103 | src/core/selection-call.js \ 104 | src/core/selection-empty.js \ 105 | src/core/selection-node.js \ 106 | src/core/selection-transition.js \ 107 | src/core/selection-root.js \ 108 | src/core/transition.js \ 109 | src/core/transition-select.js \ 110 | src/core/transition-selectAll.js \ 111 | src/core/transition-attr.js \ 112 | src/core/transition-style.js \ 113 | src/core/transition-text.js \ 114 | src/core/transition-remove.js \ 115 | src/core/transition-delay.js \ 116 | src/core/transition-duration.js \ 117 | src/core/transition-each.js \ 118 | src/core/transition-transition.js \ 119 | src/core/timer.js \ 120 | src/core/noop.js 121 | 122 | d3.scale.js: \ 123 | src/scale/scale.js \ 124 | src/scale/nice.js \ 125 | src/scale/linear.js \ 126 | src/scale/bilinear.js \ 127 | src/scale/polylinear.js \ 128 | src/scale/log.js \ 129 | src/scale/pow.js \ 130 | src/scale/sqrt.js \ 131 | src/scale/ordinal.js \ 132 | src/scale/category.js \ 133 | src/scale/quantile.js \ 134 | src/scale/quantize.js 135 | 136 | d3.svg.js: \ 137 | src/svg/svg.js \ 138 | src/svg/arc.js \ 139 | src/svg/line.js \ 140 | src/svg/line-radial.js \ 141 | src/svg/area.js \ 142 | src/svg/area-radial.js \ 143 | src/svg/chord.js \ 144 | src/svg/diagonal.js \ 145 | src/svg/diagonal-radial.js \ 146 | src/svg/mouse.js \ 147 | src/svg/touches.js \ 148 | src/svg/symbol.js \ 149 | src/svg/axis.js 150 | 151 | d3.behavior.js: \ 152 | src/behavior/behavior.js \ 153 | src/behavior/drag.js \ 154 | src/behavior/zoom.js 155 | 156 | d3.chart.js: \ 157 | src/start.js \ 158 | src/chart/chart.js \ 159 | src/chart/box.js \ 160 | src/chart/bullet.js \ 161 | src/chart/horizon.js \ 162 | src/chart/qq.js \ 163 | src/end.js 164 | 165 | d3.layout.js: \ 166 | src/start.js \ 167 | src/layout/layout.js \ 168 | src/layout/bundle.js \ 169 | src/layout/chord.js \ 170 | src/layout/force.js \ 171 | src/layout/partition.js \ 172 | src/layout/pie.js \ 173 | src/layout/stack.js \ 174 | src/layout/histogram.js \ 175 | src/layout/hierarchy.js \ 176 | src/layout/pack.js \ 177 | src/layout/cluster.js \ 178 | src/layout/tree.js \ 179 | src/layout/treemap.js \ 180 | src/end.js 181 | 182 | d3.geo.js: \ 183 | src/start.js \ 184 | src/geo/geo.js \ 185 | src/geo/azimuthal.js \ 186 | src/geo/albers.js \ 187 | src/geo/bonne.js \ 188 | src/geo/equirectangular.js \ 189 | src/geo/mercator.js \ 190 | src/geo/type.js \ 191 | src/geo/path.js \ 192 | src/geo/bounds.js \ 193 | src/geo/circle.js \ 194 | src/geo/greatArc.js \ 195 | src/geo/greatCircle.js \ 196 | src/end.js 197 | 198 | d3.csv.js: \ 199 | src/start.js \ 200 | src/csv/csv.js \ 201 | src/csv/parse.js \ 202 | src/csv/format.js \ 203 | src/end.js 204 | 205 | d3.time.js: \ 206 | src/start.js \ 207 | src/time/time.js \ 208 | src/time/format.js \ 209 | src/time/format-utc.js \ 210 | src/time/format-iso.js \ 211 | src/time/range.js \ 212 | src/time/second.js \ 213 | src/time/seconds.js \ 214 | src/time/minute.js \ 215 | src/time/minutes.js \ 216 | src/time/hour.js \ 217 | src/time/hours.js \ 218 | src/time/day.js \ 219 | src/time/days.js \ 220 | src/time/week.js \ 221 | src/time/weeks.js \ 222 | src/time/month.js \ 223 | src/time/months.js \ 224 | src/time/year.js \ 225 | src/time/years.js \ 226 | src/time/scale.js \ 227 | src/time/scale-utc.js \ 228 | src/end.js 229 | 230 | d3.geom.js: \ 231 | src/start.js \ 232 | src/geom/geom.js \ 233 | src/geom/contour.js \ 234 | src/geom/hull.js \ 235 | src/geom/polygon.js \ 236 | src/geom/voronoi.js \ 237 | src/geom/delaunay.js \ 238 | src/geom/quadtree.js \ 239 | src/end.js 240 | 241 | test: all 242 | @$(JS_TESTER) 243 | 244 | %.min.js: %.js Makefile 245 | @rm -f $@ 246 | $(JS_COMPILER) < $< > $@ 247 | 248 | d3.js d3%.js: Makefile 249 | @rm -f $@ 250 | cat $(filter %.js,$^) > $@ 251 | @chmod a-w $@ 252 | 253 | install: 254 | mkdir -p node_modules 255 | npm install 256 | 257 | package.json: d3.js 258 | node src/package.js > $@ 259 | 260 | clean: 261 | rm -f d3*.js 262 | -------------------------------------------------------------------------------- /parserscrapers_plugins/NLMXMLParser.py: -------------------------------------------------------------------------------- 1 | from xml.etree.ElementTree import ElementTree 2 | from bibserver.parsers import BaseParser 3 | 4 | '''this file can be called as a module or called directly from the 5 | command line like so: 6 | 7 | python NLMXMLParser.py /path/to/file.xml 8 | 9 | Returns a list of record dicts 10 | Or just parse a record directly like so: 11 | 12 | python NLMXMLParser.py '...' 13 | 14 | Returns a record dict 15 | ''' 16 | 17 | 18 | class NLMXMLParser(BaseParser): 19 | 20 | def __init__(self, fileobj): 21 | super(NLMXMLParser, self).__init__(fileobj) 22 | 23 | # set which bibjson schema this parser parses to 24 | self.schema = "v0.82" 25 | self.has_metadata = False 26 | self.persons = [] 27 | 28 | self.identifier_types = ["doi", "isbn", "issn"] 29 | 30 | def parse(self): 31 | '''given a fileobject, parse it for NLM XML records, 32 | and pass them to the record parser''' 33 | records = [] 34 | 35 | et = ElementTree() 36 | et.parse(self.fileobj) 37 | 38 | records.append(self.parse_front_matter(et.find('front'))) 39 | 40 | records.extend(self.parse_references(et.findall('back/ref-list/ref'))) 41 | 42 | return records, {"schema": self.schema} 43 | 44 | def parse_front_matter(self, front): 45 | 46 | article_meta = front.find('article-meta') 47 | journal_meta = front.find('journal-meta') 48 | 49 | record = { 50 | 'title': self.get_article_title(article_meta), 51 | 'author': self.get_article_authors(article_meta), 52 | 'year': article_meta.findtext('pub-date/year'), 53 | 'volume': article_meta.findtext('volume'), 54 | 'number': article_meta.findtext('issue'), 55 | 'pages': self.get_page_numbers(article_meta), 56 | 57 | 'journal': self.get_journal_name(journal_meta), 58 | 'publisher': self.get_journal_publisher_name(journal_meta) 59 | } 60 | 61 | doi = front.findtext('article-meta/article-id[@pub-id-type="doi"]') 62 | record['identifiers'] = [ 63 | {'id': doi, 'type': 'doi'} 64 | ] 65 | 66 | return record 67 | 68 | def parse_references(self, ref_list): 69 | records = [] 70 | for ref in ref_list: 71 | records.append(self.parse_reference(ref)) 72 | return records 73 | 74 | def parse_reference(self, reference): 75 | 76 | citation = reference.find('citation') 77 | 78 | if citation.attrib['citation-type'] == 'journal': 79 | record = self.parse_journal(citation) 80 | 81 | elif citation.attrib['citation-type'] == 'other': 82 | record = self.parse_other(citation) 83 | 84 | else: 85 | raise Exception('Unsupported citation type: ' + citation.attrib['citation-type']) # noqa E501 86 | 87 | record['id'] = reference.attrib['id'] 88 | 89 | return record 90 | 91 | def parse_journal(self, citation): 92 | return self.filter_empty_fields({ 93 | 'title': self.get_journal_citation_title(citation), 94 | 'author': self.get_citation_authors(citation), 95 | 'year': citation.findtext('year'), 96 | 'journal': citation.findtext('source'), 97 | 'volume': citation.findtext('volume'), 98 | 'pages': self.get_page_numbers(citation) 99 | }) 100 | 101 | def parse_other(self, citation): 102 | return self.filter_empty_fields({ 103 | 'title': self.get_other_citation_title(citation), 104 | 'booktitle': self.get_other_citation_booktitle(citation), 105 | 'author': self.get_citation_authors(citation), 106 | 'editor': self.get_citation_editors(citation), 107 | 'year': citation.findtext('year'), 108 | 'publisher': self.get_citation_publisher(citation), 109 | 'volume': citation.findtext('volume'), 110 | 'pages': self.get_page_numbers(citation) 111 | }) 112 | 113 | def get_article_title(self, article_meta): 114 | return "".join( 115 | article_meta.find('title-group/article-title').itertext() 116 | ) 117 | 118 | def get_article_authors(self, article_meta): 119 | return self.get_names(article_meta.findall('contrib-group/contrib[@contrib-type="author"]/name')) # noqa E501 120 | 121 | def get_page_numbers(self, context): 122 | if context.find('fpage') is None: 123 | return context.findtext('elocation-id') 124 | elif context.find('lpage') is None: 125 | return context.findtext('fpage') 126 | else: 127 | return '%s--%s' % (context.findtext('fpage'), context.findtext('lpage')) # noqa E501 128 | 129 | def get_journal_citation_title(self, citation): 130 | if citation.find('article-title') is None: 131 | return None 132 | else: 133 | return "".join(citation.find('article-title').itertext()) 134 | 135 | def get_other_citation_title(self, citation): 136 | if citation.find('article-title') is None: 137 | return self.get_citation_source(citation) 138 | else: 139 | return "".join(citation.find('article-title').itertext()) 140 | 141 | def get_other_citation_booktitle(self, citation): 142 | if citation.find('article-title') is None: 143 | return None 144 | else: 145 | return self.get_citation_source(citation) 146 | 147 | def get_citation_source(self, citation): 148 | if citation.find('source') is None: 149 | return None 150 | else: 151 | return "".join(citation.find('source').itertext()) 152 | 153 | def get_citation_publisher(self, context): 154 | if context.find('publisher-name') is None: 155 | return None 156 | elif context.find('publisher-loc') is None: 157 | return context.findtext('publisher-name') 158 | else: 159 | return context.findtext('publisher-name') + ', ' + context.findtext('publisher-loc') # noqa E501 160 | 161 | def get_citation_authors(self, citation): 162 | return self.get_names(citation.findall('person-group[@person-group-type="author"]/name')) # noqa E501 163 | 164 | def get_citation_editors(self, citation): 165 | return self.get_names(citation.findall('person-group[@person-group-type="editor"]/name')) # noqa E501 166 | 167 | def get_journal_name(self, journal_meta): 168 | return journal_meta.findtext('.//journal-title') 169 | 170 | def get_journal_publisher_name(self, journal_meta): 171 | return journal_meta.findtext('.//publisher-name') 172 | 173 | def get_names(self, names): 174 | return ['%s, %s' % (name.findtext('surname'), name.findtext('given-names')) for name in names] # noqa E501 175 | 176 | def filter_empty_fields(self, dict): 177 | record = {} 178 | for k, v in dict.iteritems(): 179 | if v is not None: 180 | record[k] = v 181 | return record 182 | 183 | 184 | # in case file is run directly 185 | if __name__ == "__main__": 186 | import sys 187 | parser = NLMXMLParser(open(sys.argv[1])) 188 | print parser.parse() 189 | -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/d3/d3.geom.min.js: -------------------------------------------------------------------------------- 1 | (function(){function k(a){return{x:a[0],y:a[1]}}function j(a,b,c,d,e,f){if(!a(b,c,d,e,f)){var g=(c+e)*.5,h=(d+f)*.5,i=b.nodes;i[0]&&j(a,i[0],c,d,g,h),i[1]&&j(a,i[1],g,d,e,h),i[2]&&j(a,i[2],c,h,g,f),i[3]&&j(a,i[3],g,h,e,f)}}function i(){return{leaf:!0,nodes:[],point:null}}function h(a,b){var c={list:a.map(function(a,b){return{index:b,x:a[0],y:a[1]}}).sort(function(a,b){return a.yb.y?1:a.xb.x?1:0}),bottomSite:null},d={list:[],leftEnd:null,rightEnd:null,init:function(){d.leftEnd=d.createHalfEdge(null,"l"),d.rightEnd=d.createHalfEdge(null,"l"),d.leftEnd.r=d.rightEnd,d.rightEnd.l=d.leftEnd,d.list.unshift(d.leftEnd,d.rightEnd)},createHalfEdge:function(a,b){return{edge:a,side:b,vertex:null,l:null,r:null}},insert:function(a,b){b.l=a,b.r=a.r,a.r.l=b,a.r=b},leftBound:function(a){var b=d.leftEnd;do b=b.r;while(b!=d.rightEnd&&e.rightOf(b,a));b=b.l;return b},del:function(a){a.l.r=a.r,a.r.l=a.l,a.edge=null},right:function(a){return a.r},left:function(a){return a.l},leftRegion:function(a){return a.edge==null?c.bottomSite:a.edge.region[a.side]},rightRegion:function(a){return a.edge==null?c.bottomSite:a.edge.region[g[a.side]]}},e={bisect:function(a,b){var c={region:{l:a,r:b},ep:{l:null,r:null}},d=b.x-a.x,e=b.y-a.y,f=d>0?d:-d,g=e>0?e:-e;c.c=a.x*d+a.y*e+(d*d+e*e)*.5,f>g?(c.a=1,c.b=e/d,c.c/=d):(c.b=1,c.a=d/e,c.c/=e);return c},intersect:function(a,b){var c=a.edge,d=b.edge;if(!c||!d||c.region.r==d.region.r)return null;var e=c.a*d.b-c.b*d.a;if(Math.abs(e)<1e-10)return null;var f=(c.c*d.b-d.c*c.b)/e,g=(d.c*c.a-c.c*d.a)/e,h=c.region.r,i=d.region.r,j,k;h.y=k.region.r.x;return l&&j.side==="l"||!l&&j.side==="r"?null:{x:f,y:g}},rightOf:function(a,b){var c=a.edge,d=c.region.r,e=b.x>d.x;if(e&&a.side==="l")return 1;if(!e&&a.side==="r")return 0;if(c.a===1){var f=b.y-d.y,g=b.x-d.x,h=0,i=0;!e&&c.b<0||e&&c.b>=0?i=h=f>=c.b*g:(i=b.x+b.y*c.b>c.c,c.b<0&&(i=!i),i||(h=1));if(!h){var j=d.x-c.region.l.x;i=c.b*(g*g-f*f)m*m+n*n}return a.side==="l"?i:!i},endPoint:function(a,c,d){a.ep[c]=d;!a.ep[g[c]]||b(a)},distance:function(a,b){var c=a.x-b.x,d=a.y-b.y;return Math.sqrt(c*c+d*d)}},f={list:[],insert:function(a,b,c){a.vertex=b,a.ystar=b.y+c;for(var d=0,e=f.list,g=e.length;dh.ystar||a.ystar==h.ystar&&b.x>h.vertex.x)continue;break}e.splice(d,0,a)},del:function(a){for(var b=0,c=f.list,d=c.length;bp.y&&(q=o,o=p,p=q,u="r"),t=e.bisect(o,p),n=d.createHalfEdge(t,u),d.insert(l,n),e.endPoint(t,g[u],s),r=e.intersect(l,n),r&&(f.del(l),f.insert(l,r,e.distance(r,o))),r=e.intersect(n,m),r&&f.insert(n,r,e.distance(r,o));else break}for(j=d.right(d.leftEnd);j!=d.rightEnd;j=d.right(j))b(j.edge)}function f(a,b,c,d){var e=a[0],f=b[0],g=c[0],h=d[0],i=a[1],j=b[1],k=c[1],l=d[1],m=e-g,n=f-e,o=h-g,p=i-k,q=j-i,r=l-k,s=(o*p-r*m)/(r*n-o*q);return[e+s*n,i+s*q]}function e(a,b,c){return(c[0]-b[0])*(a[1]-b[1])<(c[1]-b[1])*(a[0]-b[0])}function d(a,b,c,d){var e,f,g,h,i,j,k;e=d[a],f=e[0],g=e[1],e=d[b],h=e[0],i=e[1],e=d[c],j=e[0],k=e[1];return(k-g)*(h-f)-(i-g)*(j-f)>0}function c(a){var b=0,c=0;for(;;){if(a(b,c))return[b,c];b===0?(b=c+1,c=0):(b=b-1,c=c+1)}}d3.geom={},d3.geom.contour=function(d,e){var f=e||c(d),g=[],h=f[0],i=f[1],j=0,k=0,l=NaN,m=NaN,n=0;do n=0,d(h-1,i-1)&&(n+=1),d(h,i-1)&&(n+=2),d(h-1,i)&&(n+=4),d(h,i)&&(n+=8),n===6?(j=m===-1?-1:1,k=0):n===9?(j=0,k=l===1?-1:1):(j=a[n],k=b[n]),j!=l&&k!=m&&(g.push([h,i]),l=j,m=k),h+=j,i+=k;while(f[0]!=h||f[1]!=i);return g};var a=[1,0,1,1,-1,0,-1,1,0,0,0,0,-1,0,-1,NaN],b=[0,-1,0,0,0,-1,0,0,1,-1,1,1,0,-1,0,NaN];d3.geom.hull=function(a){if(a.length<3)return[];var b=a.length,c=b-1,e=[],f=[],g,h,i=0,j,k,l,m,n,o,p,q;for(g=1;g=l*l+m*m?e[g].index=-1:(e[n].index=-1,p=e[g].angle,n=g,o=h)):(p=e[g].angle,n=g,o=h);f.push(i);for(g=0,h=0;g<2;++h)e[h].index!==-1&&(f.push(e[h].index),g++);q=f.length;for(;h=0?(c=a.ep.r,d=a.ep.l):(c=a.ep.l,d=a.ep.r),a.a===1?(g=c?c.y:-1e6,e=a.c-a.b*g,h=d?d.y:1e6,f=a.c-a.b*h):(e=c?c.x:-1e6,g=a.c-a.a*e,f=d?d.x:1e6,h=a.c-a.a*f);var i=[e,g],j=[f,h];b[a.region.l.index].push(i,j),b[a.region.r.index].push(i,j)});return b.map(function(b,c){var d=a[c][0],e=a[c][1];b.forEach(function(a){a.angle=Math.atan2(a[0]-d,a[1]-e)});return b.sort(function(a,b){return a.angle-b.angle}).filter(function(a,c){return!c||a.angle-b[c-1].angle>1e-10})})};var g={l:"r",r:"l"};d3.geom.delaunay=function(a){var b=a.map(function(){return[]}),c=[];h(a,function(c){b[c.region.l.index].push(a[c.region.r.index])}),b.forEach(function(b,d){var e=a[d],f=e[0],g=e[1];b.forEach(function(a){a.angle=Math.atan2(a[0]-f,a[1]-g)}),b.sort(function(a,b){return a.angle-b.angle});for(var h=0,i=b.length-1;h=g,k=b.y>=h,l=(k<<1)+j;a.leaf=!1,a=a.nodes[l]||(a.nodes[l]=i()),j?c=g:e=g,k?d=h:f=h,n(a,b,c,d,e,f)}function n(a,b,c,d,e,f){if(!isNaN(b.x)&&!isNaN(b.y))if(a.leaf){var g=a.point;g?Math.abs(g.x-b.x)+Math.abs(g.y-b.y)<.01?o(a,b,c,d,e,f):(a.point=null,o(a,g,c,d,e,f),o(a,b,c,d,e,f)):a.point=b}else o(a,b,c,d,e,f)}var f,g=-1,h=a.length;h&&isNaN(a[0].x)&&(a=a.map(k));if(arguments.length<5)if(arguments.length===3)e=d=c,c=b;else{b=c=Infinity,d=e=-Infinity;while(++gd&&(d=f.x),f.y>e&&(e=f.y);var l=d-b,m=e-c;l>m?e=c+l:d=b+m}var p=i();p.add=function(a){n(p,a,b,c,d,e)},p.visit=function(a){j(a,p,b,c,d,e)},a.forEach(p.add);return p}})() -------------------------------------------------------------------------------- /test/data/sample.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "bibnumber": "3.1", 4 | "school": "Dept. Prob. and Stat., University of Sheffield", 5 | "title": "Stopping time identities and limit theorems for Markov chains", 6 | "author": [ 7 | "Pitman, J W" 8 | ], 9 | "collection": "pitnoid", 10 | "id": "p74t", 11 | "year": "1974", 12 | "keywords": [ 13 | "Stopping time", 14 | "Identities", 15 | "Markov chain", 16 | "Occupation time", 17 | "Rate of convergence", 18 | "Transition probabilities", 19 | "Coupling" 20 | ], 21 | "type": "phdthesis" 22 | }, 23 | { 24 | "bibnumber": "9", 25 | "title": "Birth, death and conditioning of Markov chains", 26 | "journal": "Annals of Probability", 27 | "author": [ 28 | "Jacobsen, M", 29 | "Pitman, J W" 30 | ], 31 | "mrclass": "60J10", 32 | "collection": "pitnoid", 33 | "volume": "5", 34 | "id": "jp77", 35 | "mrnumber": "MR0445613", 36 | "year": "1977", 37 | "keywords": [ 38 | "Path decomposition", 39 | "Conditioned process", 40 | "Conditional independence", 41 | "Markov chain", 42 | "Birth time", 43 | "Death time" 44 | ], 45 | "type": "article", 46 | "pages": "430 to 450", 47 | "znumber": "0363.60052" 48 | }, 49 | { 50 | "bibnumber": "10", 51 | "title": "An extension of de Finetti's theorem", 52 | "journal": "Advances in Applied Probability", 53 | "author": [ 54 | "Pitman, J" 55 | ], 56 | "collection": "pitnoid", 57 | "volume": "10", 58 | "id": "p78", 59 | "year": "1978", 60 | "type": "article", 61 | "pages": "268 to 270" 62 | }, 63 | { 64 | "bibnumber": "11", 65 | "title": "A pointwise ergodic theorem for the group of rational rotations", 66 | "journal": "Trans. Amer. Math. Soc.", 67 | "author": [ 68 | "Dubins, Lester E", 69 | "Pitman, Jim" 70 | ], 71 | "mrclass": "60G42 (28D99)", 72 | "collection": "pitnoid", 73 | "volume": "251", 74 | "id": "dp79", 75 | "mrnumber": "MR531981", 76 | "year": "1980", 77 | "keywords": [ 78 | "Ergodic theory", 79 | "Rational rotation" 80 | ], 81 | "type": "article", 82 | "pages": "299 to 308" 83 | }, 84 | { 85 | "bibnumber": "23", 86 | "title": "A decomposition of Bessel bridges", 87 | "journal": "Z. Wahrsch. Verw. Gebiete", 88 | "author": [ 89 | "Pitman, Jim", 90 | "Yor, Marc" 91 | ], 92 | "mrclass": "60J60 (60J55)", 93 | "collection": "pitnoid", 94 | "volume": "59", 95 | "id": "py82", 96 | "mrnumber": "MR656509", 97 | "year": "1982", 98 | "keywords": [ 99 | "Bessel bridge", 100 | "Levy Khintchine representation", 101 | "Local time", 102 | "Ray Knight theorem", 103 | "Markov excursion", 104 | "Occupation time" 105 | ], 106 | "type": "article", 107 | "pages": "425 to 457", 108 | "znumber": "0484.60062" 109 | }, 110 | { 111 | "bibnumber": "32", 112 | "title": "Comple\u0301ments a\u0300 l'e\u0301tude asymptotique des nombres de tours du mouvement brownien complexe autour d'un nombre fini de points", 113 | "journal": "C.R. Acad. Sc. Paris, Se\u0301rie I", 114 | "author": [ 115 | "Pitman, Jim", 116 | "Yor, Marc" 117 | ], 118 | "mrclass": "60J65", 119 | "collection": "pitnoid", 120 | "volume": "305", 121 | "id": "py87", 122 | "mrnumber": "MR921145", 123 | "year": "1987", 124 | "keywords": [ 125 | "Planar Brownian motion", 126 | "Winding", 127 | "Asymptotic law" 128 | ], 129 | "type": "article", 130 | "pages": "757 to 760" 131 | }, 132 | { 133 | "bibnumber": "35", 134 | "links": [ 135 | "http://stat.berkeley.edu/users/pitman/further.asym.pdf" 136 | ], 137 | "title": "Further asymptotic laws of planar Brownian motion", 138 | "journal": "Annals of Probability", 139 | "author": [ 140 | "Pitman, Jim", 141 | "Yor, Marc" 142 | ], 143 | "mrclass": "60J65 (60F05 60G44)", 144 | "collection": "pitnoid", 145 | "volume": "17", 146 | "id": "py89", 147 | "mrnumber": "MR1009441", 148 | "year": "1989", 149 | "keywords": [ 150 | "Planar Brownian motion", 151 | "Winding", 152 | "Asymptotic law" 153 | ], 154 | "type": "article", 155 | "pages": "965 to 1011", 156 | "znumber": "0686.60085" 157 | }, 158 | { 159 | "bibnumber": "34", 160 | "links": [ 161 | "http://stat.berkeley.edu/users/pitman/arc.pdf" 162 | ], 163 | "title": "The shortest planar arc of width one", 164 | "journal": "Amer. Math. Monthly", 165 | "author": [ 166 | "Adhikari, Ani", 167 | "Pitman, Jim" 168 | ], 169 | "mrclass": "52A40", 170 | "collection": "pitnoid", 171 | "volume": "96, No 4", 172 | "id": "ap89", 173 | "mrnumber": "MR992078", 174 | "year": "1989", 175 | "keywords": [ 176 | "Planar arc", 177 | "Worm problem" 178 | ], 179 | "type": "article", 180 | "pages": "309 to 327", 181 | "znumber": "0692.52001" 182 | }, 183 | { 184 | "bibnumber": "46.1", 185 | "title": "The two-parameter generalization of Ewens' random partition structure", 186 | "author": [ 187 | "Pitman, J" 188 | ], 189 | "number": "345", 190 | "collection": "pitnoid", 191 | "id": "jp.ew", 192 | "year": "1992", 193 | "keywords": [ 194 | "Ewens sampling formula", 195 | "Two parameter family of partition structures", 196 | "Partition structure" 197 | ], 198 | "type": "techreport", 199 | "institution": "Dept. Statistics, U.C. Berkeley" 200 | }, 201 | { 202 | "bibnumber": "48", 203 | "title": "Dilatations d'espace-temps, re\u0301arrangements des trajectoires browniennes, et quelques extensions d'une identite\u0301 de Knight", 204 | "journal": "C.R. Acad. Sci. Paris", 205 | "author": [ 206 | "Pitman, James W", 207 | "Yor, Marc" 208 | ], 209 | "mrclass": "60J65", 210 | "collection": "pitnoid", 211 | "volume": "t. 316, Se\u0301rie I", 212 | "id": "py93c", 213 | "mrnumber": "MR1214423", 214 | "year": "1993", 215 | "keywords": [ 216 | "Random scaling", 217 | "Path rearrangement", 218 | "Knights identity" 219 | ], 220 | "type": "article", 221 | "pages": "723 to 726" 222 | } 223 | ] 224 | -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/vendor/linkify/1.0/jquery.linkify-1.0.js: -------------------------------------------------------------------------------- 1 | /* encoding: utf-8 2 | 3 | **** linkify plugin for jQuery - automatically finds and changes URLs in text content into proper hyperlinks **** 4 | 5 | Version: 1.0 6 | 7 | Copyright (c) 2009 8 | Már Örlygsson (http://mar.anomy.net/) & 9 | Hugsmiðjan ehf. (http://www.hugsmidjan.is) 10 | 11 | Dual licensed under a MIT licence (http://en.wikipedia.org/wiki/MIT_License) 12 | and GPL 2.0 or above (http://www.gnu.org/licenses/old-licenses/gpl-2.0.html). 13 | 14 | ----------------------------------------------------------------------------- 15 | 16 | Demo and Qunit-tests: 17 | * <./jquery.linkify-1.0-demo.html> 18 | * <./jquery.linkify-1.0-test.html> 19 | 20 | Documentation: 21 | * ... 22 | 23 | Get updates from: 24 | * 25 | * 26 | 27 | ----------------------------------------------------------------------------- 28 | 29 | Requires: 30 | * jQuery (1.2.6 or later) 31 | 32 | Usage: 33 | 34 | jQuery('.articlebody').linkify(); 35 | 36 | // adding plugins: 37 | jQuery.extend( jQuery.fn.linkify.plugins, { 38 | name1: { 39 | re: RegExp 40 | tmpl: String/Function 41 | }, 42 | name2: function(html){ return html; } 43 | }); 44 | 45 | // Uses all plugins by default: 46 | jQuery('.articlebody').linkify(); 47 | 48 | // Use only certain plugins: 49 | jQuery('.articlebody').linkify( 'name1,name2' ); 50 | jQuery('.articlebody').linkify({ use: 'name1,name2' }); 51 | jQuery('.articlebody').linkify({ use: ['name1','name2'] }); 52 | 53 | // Explicitly use all plugins: 54 | jQuery('.articlebody').linkify('*'); 55 | jQuery('.articlebody').linkify({ use: '*' }); 56 | jQuery('.articlebody').linkify({ use: ['*'] }); 57 | 58 | // Use no plugins: 59 | jQuery('.articlebody').linkify(''); 60 | jQuery('.articlebody').linkify({ use: '' }); 61 | jQuery('.articlebody').linkify({ use: [] }); 62 | jQuery('.articlebody').linkify({ use: [''] }); 63 | 64 | // Perfmorm actions on all newly created links: 65 | jQuery('.articlebody').linkify( function (links){ links.addClass('linkified'); } ); 66 | jQuery('.articlebody').linkify({ handleLinks: function (links){ links.addClass('linkified'); } }); 67 | 68 | */ 69 | 70 | (function($){ 71 | 72 | var noProtocolUrl = /(^|["'(\s]|<)(www\..+?\..+?)((?:[:?]|\.+)?(?:\s|$)|>|[)"',])/g, 73 | httpOrMailtoUrl = /(^|["'(\s]|<)((?:(?:https?|ftp):\/\/|mailto:).+?)((?:[:?]|\.+)?(?:\s|$)|>|[)"',])/g, 74 | linkifier = function ( html ) { 75 | return html 76 | .replace( noProtocolUrl, '$1$2$3' ) // NOTE: we escape `"http` as `"<``>` to make sure `httpOrMailtoUrl` below doesn't find it as a false-positive 77 | .replace( httpOrMailtoUrl, '$1$2$3' ) 78 | .replace( /"<``>/g, '"http' ); // reinsert `"http` 79 | }, 80 | 81 | 82 | linkify = $.fn.linkify = function ( cfg ) { 83 | if ( !$.isPlainObject( cfg ) ) 84 | { 85 | cfg = { 86 | use: (typeof cfg == 'string') ? cfg : undefined, 87 | handleLinks: $.isFunction(cfg) ? cfg : arguments[1] 88 | }; 89 | } 90 | var use = cfg.use, 91 | allPlugins = linkify.plugins || {}, 92 | plugins = [linkifier], 93 | tmpCont, 94 | newLinks = [], 95 | callback = cfg.handleLinks; 96 | if ( use == undefined || use == '*' ) // use === undefined || use === null 97 | { 98 | for ( var name in allPlugins ) 99 | { 100 | plugins.push( allPlugins[name] ); 101 | } 102 | } 103 | else 104 | { 105 | use = $.isArray( use ) ? use : $.trim(use).split( / *, */ ); 106 | var plugin, 107 | name; 108 | for ( var i=0, l=use.length; i1 && /\S/.test(html) ) 129 | { 130 | var htmlChanged, 131 | preHtml; 132 | tmpCont = tmpCont || $('
')[0]; 133 | tmpCont.innerHTML = ''; 134 | tmpCont.appendChild( n.cloneNode(false) ); 135 | var tmpContNodes = tmpCont.childNodes; 136 | 137 | for (var j=0, plugin; (plugin = plugins[j]); j++) 138 | { 139 | var k = tmpContNodes.length, 140 | tmpNode; 141 | while ( k-- ) 142 | { 143 | tmpNode = tmpContNodes[k]; 144 | if ( tmpNode.nodeType == 3 ) 145 | { 146 | html = tmpNode.nodeValue; 147 | if ( html.length>1 && /\S/.test(html) ) 148 | { 149 | preHtml = html; 150 | html = html 151 | .replace( /&/g, '&' ) 152 | .replace( //g, '>' ); 154 | html = $.isFunction( plugin ) ? 155 | plugin( html ): 156 | html.replace( plugin.re, plugin.tmpl ); 157 | htmlChanged = htmlChanged || preHtml!=html; 158 | preHtml!=html && $(tmpNode).after(html).remove(); 159 | } 160 | } 161 | } 162 | } 163 | html = tmpCont.innerHTML; 164 | if ( callback ) 165 | { 166 | html = $('
').html(html); 167 | //newLinks.push.apply( newLinks, html.find('a').toArray() ); 168 | newLinks = newLinks.concat( html.find('a').toArray().reverse() ); 169 | html = html.contents(); 170 | } 171 | htmlChanged && $(n).after(html).remove(); 172 | } 173 | } 174 | else if ( n.nodeType == 1 && !/^(a|button|textarea)$/i.test(n.tagName) ) 175 | { 176 | arguments.callee.call( n ); 177 | } 178 | }; 179 | }); 180 | callback && callback( $(newLinks.reverse()) ); 181 | return this; 182 | }; 183 | 184 | linkify.plugins = { 185 | // default mailto: plugin 186 | mailto: { 187 | re: /(^|["'(\s]|<)([^"'(\s&]+?@.+\.[a-z]{2,7})(([:?]|\.+)?(\s|$)|>|[)"',])/gi, 188 | tmpl: '$1$2$3' 189 | } 190 | }; 191 | 192 | })(jQuery); 193 | -------------------------------------------------------------------------------- /bibserver/static/vendor/facetview/README.rst: -------------------------------------------------------------------------------- 1 | FacetView_ is a pure javascript frontend for ElasticSearch or SOLR search 2 | indices (although there have been recent changes that may not work for SOLR yet 3 | - work in progress). 4 | 5 | (NOTE: recent changes have not been built against SOLR... it probably does not currently work against a SOLR index. May fix soon.) 6 | 7 | It's been developed as a jQuery plugin and lets you easily embed a faceted 8 | browse front end into any web page. 9 | 10 | .. _FacetView: http://okfnlabs.org/facetview/ 11 | 12 | Development is now taking place in this repo: http://github.com/okfn/facetview 13 | 14 | 15 | Demo 16 | ==== 17 | 18 | See http://okfnlabs.org/facetview/ or if you have the source just take a look 19 | at index.html or simple.html 20 | 21 | 22 | Status 23 | ====== 24 | 25 | FacetView is pretty new, and still under active development but is already 26 | pretty stable. If you have suggestions or want to make a contribution please 27 | check out the github repo. 28 | 29 | 30 | Using FacetView 31 | =============== 32 | 33 | Add the following code to your web page:: 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | * BUT change the src URLs to something sensible depending on where you install 49 | the files; or something different if you have the files available already. 50 | If using your own, NOTE the versions; particularly bootstrap - we are on the 2.x 51 | * d3 scripts can be dropped if you intend to disable filter visualisations. 52 | 53 | 54 | Then add a script somewhere to your page that actually calls and sets up the 55 | facetview on a particular page element: 56 | 57 | 70 | 71 | 72 | Now that you have everything ready, you will probably want to customize to 73 | get it looking the way you want it. 74 | 75 | 76 | Customization 77 | ============= 78 | 79 | Once you have FacetView all ready to go, you should probably do some 80 | customisation. There are a few ways to do this: 81 | 82 | Edit the config in jquery.facetview.js 83 | -------------------------------------- 84 | 85 | View the config options near the top of the file to learn more. Some 86 | important points: 87 | 88 | * search_url – you need this. Should be an elasticsearch or SOLR query endpoint 89 | * search_index – your index type, solr or elasticsearch 90 | * result_display - there is a long example of result display. It is a list of 91 | lists; each list represents a line; each line contains objects; the objects 92 | specify the field they should output, and pre and post information to surround 93 | it with 94 | * display_images - if this is set to true, then facetview will attempt to find 95 | the first http://... that ends with .jpg / .jpeg / .png / .gif in each record; 96 | if one is found, it will be displayed in the search result as a 100 x (up to) 97 | 150 px thumbnail 98 | * default_url_params – parameters to pass through with every query; should 99 | include “wt”:”json” for SOLR queries to return JSON, and probably 100 | “facet.mincount”:1 for SOLR queries to ignore zero counts on facet values 101 | * predefined_filters – use these to apply some filters that will be appended 102 | to every search. For example, customise a facetview display to only show 103 | records with a particular owner / collection / tag / whatever 104 | 105 | Pass in config settings when calling FacetView 106 | ---------------------------------------------- 107 | 108 | All of the settings can also be defined when calling FacetView, and will 109 | overwrite the values set in the file itself. So you can do something like 110 | this:: 111 | 112 | 120 | 121 | Passing config parameters in the URL 122 | ------------------------------------ 123 | 124 | Configs can be passed on the URL as query parameters. For example, 125 | ?q=blah will set the starting search to "blah". You can add complex 126 | queries as JSON objects, such as ?paging={"size":20,"from":10}. Nice... 127 | 128 | Providing the location of an external config file 129 | ------------------------------------------------- 130 | 131 | A file can be made available anywhere on the web (depending, keep reading) 132 | with any of the above listed settings in it (written in the usual way for a 133 | JSON object). Then, just pass the URL of your config file when you call 134 | FacetView - as a parameter called "config_file", and it will attempt to read 135 | that config file for you. 136 | 137 | The first attempt will make a JSONP request to the URL you specify, so if your 138 | file is properly set up on a server that enables it to respond to such a request, 139 | you can make these calls to any address on the internet. 140 | 141 | If JSONP call fails, then a normal GET will be executed. So if the file is under 142 | the same domain, it should be retrievable. In this case, the file must be 143 | normally readable to a GET request - e.g. it should have a .html extension, or 144 | be otherwise set up to return your config as a string to the GET request. The 145 | JSON config object is then parsed and read in. 146 | 147 | Config precedence 148 | ----------------- 149 | 150 | When you introduce a new config object, they are merged into earlier configs with 151 | overwrite. So any config you specify in facetview.jquery.js will be overwritten 152 | and appended with newer info from any config passed in when calling facetview, 153 | which is overwritten by config parameters passed in the URL, 154 | and a call to a remote config file will similarly overwrite and append to all 155 | previous. 156 | 157 | Change the layout by making and using a custom CSS file 158 | ------------------------------------------------------- 159 | 160 | Facetview uses the latest `twitter bootstrap`_. When you embed facetview in a page, 161 | you need to include the calls to bootstrap js and css files (see the example 162 | index.html here for more info). You could restyle facetview any way you want, 163 | either with or without bootstrap - although it would be a hassle to strip 164 | bootstrap out; recommend working with or around it. 165 | 166 | 167 | Copyright and License 168 | ===================== 169 | 170 | Copyright 2011 Open Knowledge Foundation and Cottage Labs. 171 | 172 | Licensed under the `MIT License`_ 173 | 174 | .. _twitter bootstrap: http://twitter.github.com/bootstrap/ 175 | .. _MIT License: http://www.opensource.org/licenses/mit-license.php 176 | 177 | --------------------------------------------------------------------------------