├── .babelrc ├── .coveralls.yml ├── .eslintrc.json ├── .gitignore ├── .npmignore ├── .travis.yml ├── README.md ├── docs └── index.md ├── hyper-readings.es.js ├── lib ├── README.md ├── array-to-tree.js ├── constants.js ├── definitions │ ├── ZoteroMappingNotes.md │ ├── activity-streams │ │ ├── collection-behavior.js │ │ ├── collection-page-behavior.js │ │ ├── index.js │ │ ├── object-behavior.js │ │ └── page-stream.js │ ├── annotation-node.js │ ├── bibframe │ │ ├── contribution-behavior.js │ │ ├── index.js │ │ ├── instance-behavior.js │ │ ├── item-behavior.js │ │ ├── shared-behavior.js │ │ ├── work-behavior.js │ │ ├── work-instance-or-item-behavior.js │ │ └── work-or-instance-behavior.js │ ├── container-behavior.js │ ├── foaf │ │ ├── agent-behavior.js │ │ └── person-behavior.js │ ├── helpers.js │ ├── index.js │ ├── list-item-behavior.js │ ├── standard-behavior.js │ └── utils.js ├── hyperreadings.js ├── importers │ ├── html.js │ ├── index.js │ ├── markdown.js │ ├── parse5Helpers.js │ └── zoteroApi.js └── utils.js ├── package-lock.json ├── package.json ├── presentations └── SLNSW Fellowship Presentation │ ├── HyperReadings Presentation.pdf │ └── SLNSW_presentation_notes.pdf ├── reading-lists └── hyper-graph-db-research.md ├── test ├── array-to-tree.test.js ├── behaviors │ ├── annotation-node.test.js │ ├── collection-node.test.js │ ├── container-node.test.js │ ├── helpers.test.js │ └── standard-node.test.js ├── helpers │ └── general.js ├── hyperreadings.test.js ├── importers │ ├── markdown.test.js │ ├── parse5Helpers.test.js │ └── zoteroApi │ │ ├── basic.test.js │ │ ├── book.test.js │ │ ├── bookSection.test.js │ │ └── journalArticle.test.js ├── mocha.opts └── util.test.js └── webpack.config.js /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "development": { 4 | "presets": [ 5 | [ 6 | "env", 7 | { 8 | "targets": { "node": "8" } 9 | } 10 | ] 11 | ] 12 | }, 13 | "test": { 14 | "presets": [ 15 | [ 16 | "env", 17 | { 18 | "targets": { "node": "8" } 19 | } 20 | ] 21 | ] 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /.coveralls.yml: -------------------------------------------------------------------------------- 1 | repo_token: s3ptG5RRDldPIWGLrrscu4kaVI6vRh47a 2 | -------------------------------------------------------------------------------- /.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": ["standard"] 3 | } 4 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | coverage/ 3 | .nyc_output/ 4 | dist/ 5 | *.log 6 | .DS_Store 7 | # ignore locally created hyper-reading dbs 8 | *.db/ 9 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | coverage/ 3 | .nyc_output/ 4 | *.log 5 | .DS_Store 6 | 7 | # ignore locally created hyper-reading dbs 8 | *.db/ 9 | 10 | # documentation and test folders not to be packaged with npm 11 | presentations/ 12 | docs/ 13 | test/ 14 | reading-lists/ 15 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | sudo: false 3 | node_js: 4 | # - 4 temporarily disabled while transpiling with babel is not setup 5 | # - 6 6 | - 8 7 | - 9 8 | - 10 9 | script: 10 | - npm run travis 11 | after_script: 12 | - npm run report-coverage 13 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # HyperReadings 2 | 3 | [![Coverage Status](https://coveralls.io/repos/github/samiz-dat/hyperreadings/badge.svg?branch=master)](https://coveralls.io/github/samiz-dat/hyperreadings?branch=master) [![Build Status](https://travis-ci.org/samiz-dat/hyperreadings.svg?branch=master)](https://travis-ci.org/samiz-dat/hyperreadings) 4 | 5 | This project is a work in progress. 6 | 7 | See https://samiz-dat.github.io/hyperreadings/ 8 | 9 | ## Related repositories 10 | 11 | This project is developed as part of 12 | 13 | - [dat-library](https://github.com/samiz-dat/dat-library) 14 | - [dat-cardcat](https://github.com/samiz-dat/dat-cardcat) 15 | -------------------------------------------------------------------------------- /hyper-readings.es.js: -------------------------------------------------------------------------------- 1 | export { default as HyperReadings } from './lib/hyperreadings' 2 | -------------------------------------------------------------------------------- /lib/README.md: -------------------------------------------------------------------------------- 1 | # HyperReadings - Working API Document 2 | 3 | HyperReadings is just a collection of standardised operations built on top of a [hyper-graph-db](hyperdb.). 4 | 5 | # Node Operations 6 | 7 | ## predicate determined operations 8 | 9 | container is object with 10 | predicate: co:containsAsHeader (optional) 11 | predicate: co:contains 12 | predicate: co:firstItem 13 | 14 | container has operations: 15 | iterate: 16 | 1. get the co:firstItem, 17 | 2. execute cb, 18 | 3. then if has co:nextItem follow it, 19 | else finished 20 | 4. repeat 2... 21 | 22 | ## type determined operations 23 | 24 | ### po:Block 25 | ### – doco:Section 26 | ### —– doco:Title 27 | ### —– doco:Paragraph 28 | 29 | All can be containers. 30 | 31 | ### po:Inline 32 | ### – doco:TextChunk 33 | ### – datacite:AlternateResourceIdentifier 34 | 35 | # HyperReading level operations 36 | 37 | This is basically type operations on `hr:root` 38 | 39 | ## reading / viewing / navigating operations 40 | 41 | #### `hr.node(name)` 42 | 43 | Return node object from name. If no name exists, return null? 44 | Node object includes all operations which can be performed on the node. 45 | 46 | #### `hr.disconnected()` 47 | 48 | Return all nodes that are not connected to another node. This could be super expensive operation on large graphs, as you have to check every node. 49 | 50 | eg: 51 | ```sparql 52 | SELECT ?item 53 | WHERE { 54 | ?item ?a ?b 55 | NOT EXISTS { 56 | ?c ?d ?item 57 | } 58 | } 59 | ``` 60 | 61 | #### `hr.iterate(fn)` 62 | 63 | Calls fn for each contained element. 64 | > passes the itemContent node to fn. Perhaps pre-wrapped with hr.node(x) to allow access to all the operations of the itemContent. 65 | 66 | #### `hr.references()` 67 | 68 | Returns a stream of reference objects. 69 | *? What are these? How do we deconstruct them. ?* 70 | 71 | #### `hr.skeleton()` 72 | 73 | Return a hierarchical representation of the hr object. 74 | Iterating over each container, returning the nodes and types. 75 | I am imagining this will be useful for rendering / displaying quick index view of the hyperreading object. 76 | 77 | #### `hr.display(engine)` 78 | 79 | *What would an engine look like?* 80 | Perhaps a series of functions that are called in response to data. 81 | ```js 82 | { 83 | 'po:Inline': (element) => { /* do something */ } 84 | } 85 | ``` 86 | or functions to determine type? 87 | ```js 88 | function (node) { 89 | /* perform some analysis on the graph to determine type */ 90 | return type 91 | } 92 | ``` 93 | 94 | ## writing 95 | 96 | These operations will always be based on element context. Use hr.node(name) to create node object. 97 | 98 | #### `hr.createNode(type, [properties])` 99 | 100 | Creates a blankNode. 101 | 102 | *Needs to be attached as the object of another triple statement to make it discoverable.* 103 | 104 | Returns reference to newly created Node. 105 | 106 | #### `node.insertItem(newNode, [index])` 107 | 108 | Adds a new item to container like node. 109 | 110 | 1. inserts`< node co:contains newNode >` 111 | 2. make newItem `< newItem co:hasContent newNode >` 112 | 3. if `< node co:firstItem ? >` does not exist, insert`< node co:firstItem newItem >` 113 | else find lastItem of node, or not at index-1, and insert ` newItem >` 114 | 4. return existing node 115 | 116 | #### `node.removeItem(nodeToDelete || index)` 117 | 118 | Only for container like nodes 119 | 120 | 1. find node or node at index 121 | 2. get nodeToDeletes nextItem 122 | 3. delete key `< node po:contains nodeToDelete >` 123 | 4. file all references `< ? co:nextItem nodeToDelete >` 124 | 5. update all to `< ? co:nextItem nextItemOfNoteToDelete >` 125 | 6. if `< node co:firstItem nodeToDelete >` exist 126 | update it to `< node co:firstItem nextItemOfNoteToDelete >` 127 | 128 | *ToDo: Factor in removing a number of items at a time* 129 | 130 | #### `node.disconnect()` 131 | 132 | delete all references to this node. 133 | 134 | 1. delete all keys matching `< ?a ?b node >` 135 | 2. stitch up all lists that are broken by this action 136 | 3. eg - if ?b is `firstItem` or `nextItem` 137 | 4. return node 138 | 139 | #### `node.destroy()` 140 | 141 | Disconnect node and then delete it and all of its children 142 | 1. node.disconnect() 143 | 2. find all `< node ?a ?b >` 144 | 3. for each check if it has other nodes connecting to it eg `< ? ? ?b >` 145 | if not call .destroy() on it too. 146 | 147 | *? what happens to circular references with such an operation! ?* 148 | 149 | #### `node.addNode(type)` 150 | 151 | eg: 152 | 1. create newNode = `< newNode a doco:Section >` 153 | 2. node.insertItem(newNode) 154 | 3. return newNode 155 | 156 | #### `node.attr(name, value)` 157 | 158 | Value can be named node or rdf value. 159 | 160 | eg: 161 | 1. delete all `< node name ? >` triples 162 | 1. create new key `< node name value >` 163 | 2. return node 164 | 165 | *? Do we add validation ?* 166 | 167 | *? What about updating an attribute if it already exists, sometimes we want to allow multiple values, other times singular. Should singular be the default - with removing previous values if they exist ?* 168 | 169 | #### `node.addSection()` 170 | 171 | Shorthand for `addNode('doco:Section')` 172 | 173 | #### `node.addTitle()` 174 | 175 | Shorthand for `addNode('doco:Title')` 176 | 177 | #### `node.addParagraph()` 178 | 179 | Shorthand for `addNode('doco:Paragraph')` 180 | 181 | *note different types although allowing the same operations, allow renderers to display them differently.* 182 | 183 | #### `node.addUrlLink(href)` 184 | 185 | Perhaps: 186 | 187 | 1. add inline node eg: `node.addNode('po:Inline')` 188 | 2. create a reference node eg: 189 | ```js 190 | { 191 | a: 'datacite:AlternateResourceIdentifier' 192 | 'cito:usesIdentifierScheme': 'datacite:url' 193 | value: href 194 | } 195 | ``` 196 | 3. then attach reference to inlineNode eg: `inlineNode.attr('cito:hasIdentifier', referenceNode)` 197 | 198 | Or if not using inline nodes to represent the text, perhaps it would be better to simple specify a { to: from: } type relationship between reference and parent nodes content. 199 | 200 | #### `node.addMetadata()` 201 | -------------------------------------------------------------------------------- /lib/array-to-tree.js: -------------------------------------------------------------------------------- 1 | const identity = a => a 2 | 3 | export default function arrayToTree (array, splitter, transform) { 4 | if (!transform) transform = identity 5 | var depth = 0 6 | var nested = [] 7 | var layers = [] 8 | var current = nested 9 | var newSection 10 | array.forEach((child) => { 11 | var splitValue = splitter(child) 12 | if (splitValue === null) { 13 | // add to current array 14 | current.push(child) 15 | } else if (splitValue > depth) { 16 | // add current array to top of layers 17 | layers.push({ depth: depth, array: current }) 18 | // update depth to latest 19 | depth = splitValue 20 | newSection = [child] 21 | // go one layer deeper 22 | current.push(transform(newSection)) 23 | current = newSection 24 | } else if (splitValue <= depth) { 25 | var previousLayer = layers.pop() 26 | // if depth < layer depth - we continue down the tree until we find the depth we want 27 | while (previousLayer && previousLayer.depth >= splitValue) { 28 | previousLayer = layers.pop() 29 | } 30 | // if depth >= layer depth - we add to this one 31 | if (!previousLayer) { 32 | // current array is root 33 | current = nested 34 | } else { 35 | // get the array associated with depth 36 | layers.push(previousLayer) 37 | current = previousLayer.array 38 | } 39 | // add 40 | depth = splitValue 41 | newSection = [child] 42 | current.push(transform(newSection)) 43 | current = newSection 44 | } 45 | }) 46 | return nested 47 | } 48 | -------------------------------------------------------------------------------- /lib/constants.js: -------------------------------------------------------------------------------- 1 | export const PREFIXES = { 2 | rdf: 'http://www.w3.org/1999/02/22-rdf-syntax-ns#', 3 | rdfs: 'http://www.w3.org/2000/01/rdf-schema#', 4 | co: 'http://purl.org/co/', 5 | c4o: 'http://purl.org/spar/c4o', 6 | po: 'http://www.essepuntato.it/2008/12/pattern#', 7 | doco: 'http://purl.org/spar/doco/', 8 | cito: 'http://purl.org/spar/cito/', 9 | datacite: 'http://purl.org/spar/datacite/', 10 | oa: 'http://www.w3.org/ns/oa#', 11 | as: 'http://www.w3.org/ns/activitystreams#', 12 | dc: 'http://purl.org/dc/elements/1.1/', 13 | dcterms: 'http://purl.org/dc/terms/', 14 | dctypes: 'http://purl.org/dc/dcmitype/', 15 | foaf: 'http://xmlns.com/foaf/0.1/', 16 | schema: 'http://schema.org/', 17 | marcrel: 'http://id.loc.gov/vocabulary/relators/', 18 | bf: 'http://id.loc.gov/ontologies/bibframe/' 19 | } 20 | 21 | export const ZOTERO_TO_MARC_ROLES = { 22 | artist: 'art', // - Artist // 'Artist', // 23 | contributor: 'ctb', // - Contributor // 'Contributor' 24 | performer: 'prf', // - Performer // 'Performer' 25 | composer: 'cmp', // - Composer // 'Composer' 26 | wordsBy: 'lyr', // - Lyricist // 'Words By' 27 | sponsor: 'spn', // - Sponsor // 'Sponsor' 28 | cosponsor: 'spn', // - Sponsor // 'Cosponsor' 29 | author: 'aut', // - Author // 'Author' 30 | // commenter: ? // 'Commenter' 31 | editor: 'edt', // - Editor // 'Editor' 32 | seriesEditor: 'edt', // - Editor // 'Series Editor' 33 | translator: 'trl', // - Translator // 'Translator' 34 | // book author in chapter is special as points to author of containing book. // 35 | bookAuthor: 'aut', // - Author // 'Book Author', 36 | // counsel: n/a? // 'Counsel' 37 | programmer: 'prg', // - Programmer // 'Programmer' 38 | // reviewedAuthor: 'Reviewed Author', // special - aut of the book which is subject 39 | recipient: 'rcp', // - Addressee // 'Recipient', // rcp - Addressee 40 | director: 'drt', // - Director // 'Director' 41 | producer: 'pro', // - Producer // 'Producer' 42 | scriptwriter: 'aus', // - Screenwriter // 'Scriptwriter' 43 | interviewee: 'ive', // - Interviewee // 'Interview With' 44 | interviewer: 'ivr', // - Interviewer // 'Interviewer' 45 | cartographer: 'ctg', // - Cartographer // 'Cartographer' 46 | inventor: 'inv', // - Inventor // 'Inventor' 47 | // attorneyAgent: ? // 'Attorney/Agent' 48 | podcaster: 'hst', // - Host // 'Podcaster' 49 | // guest: ? // 'Guest' 50 | presenter: 'pre', // - Presenter // 'Presenter' 51 | castMember: 'prf' // - Performer // 'Cast Member' 52 | } 53 | 54 | export const MARC_RELATORS = { 55 | 'abr': 'Abridger', 56 | 'acp': 'Art copyist', 57 | 'act': 'Actor', 58 | 'adi': 'Art director', 59 | 'adp': 'Adapter', 60 | 'aft': 'Author of afterword, colophon, etc.', 61 | 'anl': 'Analyst', 62 | 'anm': 'Animator', 63 | 'ann': 'Annotator', 64 | 'ant': 'Bibliographic antecedent', 65 | 'ape': 'Appellee', 66 | 'apl': 'Appellant', 67 | 'app': 'Applicant', 68 | 'aqt': 'Author in quotations or text abstracts', 69 | 'arc': 'Architect', 70 | 'ard': 'Artistic director', 71 | 'arr': 'Arranger', 72 | 'art': 'Artist', 73 | 'asg': 'Assignee', 74 | 'asn': 'Associated name', 75 | 'ato': 'Autographer', 76 | 'att': 'Attributed name', 77 | 'auc': 'Auctioneer', 78 | 'aud': 'Author of dialog', 79 | 'aui': 'Author of introduction, etc.', 80 | 'aus': 'Screenwriter', 81 | 'aut': 'Author', 82 | 'bdd': 'Binding designer', 83 | 'bjd': 'Bookjacket designer', 84 | 'bkd': 'Book designer', 85 | 'bkp': 'Book producer', 86 | 'blw': 'Blurb writer', 87 | 'bnd': 'Binder', 88 | 'bpd': 'Bookplate designer', 89 | 'brd': 'Broadcaster', 90 | 'brl': 'Braille embosser', 91 | 'bsl': 'Bookseller', 92 | 'cas': 'Caster', 93 | 'ccp': 'Conceptor', 94 | 'chr': 'Choreographer', 95 | // '-clb': ' Collaborator', 96 | 'cli': 'Client', 97 | 'cll': 'Calligrapher', 98 | 'clr': 'Colorist', 99 | 'clt': 'Collotyper', 100 | 'cmm': 'Commentator', 101 | 'cmp': 'Composer', 102 | 'cmt': 'Compositor', 103 | 'cnd': 'Conductor', 104 | 'cng': 'Cinematographer', 105 | 'cns': 'Censor', 106 | 'coe': 'Contestant-appellee', 107 | 'col': 'Collector', 108 | 'com': 'Compiler', 109 | 'con': 'Conservator', 110 | 'cor': 'Collection registrar', 111 | 'cos': 'Contestant', 112 | 'cot': 'Contestant-appellant', 113 | 'cou': 'Court governed', 114 | 'cov': 'Cover designer', 115 | 'cpc': 'Copyright claimant', 116 | 'cpe': 'Complainant-appellee', 117 | 'cph': 'Copyright holder', 118 | 'cpl': 'Complainant', 119 | 'cpt': 'Complainant-appellant', 120 | 'cre': 'Creator', 121 | 'crp': 'Correspondent', 122 | 'crr': 'Corrector', 123 | 'crt': 'Court reporter', 124 | 'csl': 'Consultant', 125 | 'csp': 'Consultant to a project', 126 | 'cst': 'Costume designer', 127 | 'ctb': 'Contributor', 128 | 'cte': 'Contestee-appellee', 129 | 'ctg': 'Cartographer', 130 | 'ctr': 'Contractor', 131 | 'cts': 'Contestee', 132 | 'ctt': 'Contestee-appellant', 133 | 'cur': 'Curator', 134 | 'cwt': 'Commentator for written text', 135 | 'dbp': 'Distribution place', 136 | 'dfd': 'Defendant', 137 | 'dfe': 'Defendant-appellee', 138 | 'dft': 'Defendant-appellant', 139 | 'dgg': 'Degree granting institution', 140 | 'dgs': 'Degree supervisor', 141 | 'dis': 'Dissertant', 142 | 'dln': 'Delineator', 143 | 'dnc': 'Dancer', 144 | 'dnr': 'Donor', 145 | 'dpc': 'Depicted', 146 | 'dpt': 'Depositor', 147 | 'drm': 'Draftsman', 148 | 'drt': 'Director', 149 | 'dsr': 'Designer', 150 | 'dst': 'Distributor', 151 | 'dtc': 'Data contributor', 152 | 'dte': 'Dedicatee', 153 | 'dtm': 'Data manager', 154 | 'dto': 'Dedicator', 155 | 'dub': 'Dubious author', 156 | 'edc': 'Editor of compilation', 157 | 'edm': 'Editor of moving image work', 158 | 'edt': 'Editor', 159 | 'egr': 'Engraver', 160 | 'elg': 'Electrician', 161 | 'elt': 'Electrotyper', 162 | 'eng': 'Engineer', 163 | 'enj': 'Enacting jurisdiction', 164 | 'etr': 'Etcher', 165 | 'evp': 'Event place', 166 | 'exp': 'Expert', 167 | 'fac': 'Facsimilist', 168 | 'fds': 'Film distributor', 169 | 'fld': 'Field director', 170 | 'flm': 'Film editor', 171 | 'fmd': 'Film director', 172 | 'fmk': 'Filmmaker', 173 | 'fmo': 'Former owner', 174 | 'fmp': 'Film producer', 175 | 'fnd': 'Funder', 176 | 'fpy': 'First party', 177 | 'frg': 'Forger', 178 | 'gis': 'Geographic information specialist', 179 | // '-grt': ' Graphic technician', 180 | 'his': 'Host institution', 181 | 'hnr': 'Honoree', 182 | 'hst': 'Host', 183 | 'ill': 'Illustrator', 184 | 'ilu': 'Illuminator', 185 | 'ins': 'Inscriber', 186 | 'inv': 'Inventor', 187 | 'isb': 'Issuing body', 188 | 'itr': 'Instrumentalist', 189 | 'ive': 'Interviewee', 190 | 'ivr': 'Interviewer', 191 | 'jud': 'Judge', 192 | 'jug': 'Jurisdiction governed', 193 | 'lbr': 'Laboratory', 194 | 'lbt': 'Librettist', 195 | 'ldr': 'Laboratory director', 196 | 'led': 'Lead', 197 | 'lee': 'Libelee-appellee', 198 | 'lel': 'Libelee', 199 | 'len': 'Lender', 200 | 'let': 'Libelee-appellant', 201 | 'lgd': 'Lighting designer', 202 | 'lie': 'Libelant-appellee', 203 | 'lil': 'Libelant', 204 | 'lit': 'Libelant-appellant', 205 | 'lsa': 'Landscape architect', 206 | 'lse': 'Licensee', 207 | 'lso': 'Licensor', 208 | 'ltg': 'Lithographer', 209 | 'lyr': 'Lyricist', 210 | 'mcp': 'Music copyist', 211 | 'mdc': 'Metadata contact', 212 | 'med': 'Medium', 213 | 'mfp': 'Manufacture place', 214 | 'mfr': 'Manufacturer', 215 | 'mod': 'Moderator', 216 | 'mon': 'Monitor', 217 | 'mrb': 'Marbler', 218 | 'mrk': 'Markup editor', 219 | 'msd': 'Musical director', 220 | 'mte': 'Metal-engraver', 221 | 'mtk': 'Minute taker', 222 | 'mus': 'Musician', 223 | 'nrt': 'Narrator', 224 | 'opn': 'Opponent', 225 | 'org': 'Originator', 226 | 'orm': 'Organizer', 227 | 'osp': 'Onscreen presenter', 228 | 'oth': 'Other', 229 | 'own': 'Owner', 230 | 'pan': 'Panelist', 231 | 'pat': 'Patron', 232 | 'pbd': 'Publishing director', 233 | 'pbl': 'Publisher', 234 | 'pdr': 'Project director', 235 | 'pfr': 'Proofreader', 236 | 'pht': 'Photographer', 237 | 'plt': 'Platemaker', 238 | 'pma': 'Permitting agency', 239 | 'pmn': 'Production manager', 240 | 'pop': 'Printer of plates', 241 | 'ppm': 'Papermaker', 242 | 'ppt': 'Puppeteer', 243 | 'pra': 'Praeses', 244 | 'prc': 'Process contact', 245 | 'prd': 'Production personnel', 246 | 'pre': 'Presenter', 247 | 'prf': 'Performer', 248 | 'prg': 'Programmer', 249 | 'prm': 'Printmaker', 250 | 'prn': 'Production company', 251 | 'pro': 'Producer', 252 | 'prp': 'Production place', 253 | 'prs': 'Production designer', 254 | 'prt': 'Printer', 255 | 'prv': 'Provider', 256 | 'pta': 'Patent applicant', 257 | 'pte': 'Plaintiff-appellee', 258 | 'ptf': 'Plaintiff', 259 | 'pth': 'Patent holder', 260 | 'ptt': 'Plaintiff-appellant', 261 | 'pup': 'Publication place', 262 | 'rbr': 'Rubricator', 263 | 'rcd': 'Recordist', 264 | 'rce': 'Recording engineer', 265 | 'rcp': 'Addressee', 266 | 'rdd': 'Radio director', 267 | 'red': 'Redaktor', 268 | 'ren': 'Renderer', 269 | 'res': 'Researcher', 270 | 'rev': 'Reviewer', 271 | 'rpc': 'Radio producer', 272 | 'rps': 'Repository', 273 | 'rpt': 'Reporter', 274 | 'rpy': 'Responsible party', 275 | 'rse': 'Respondent-appellee', 276 | 'rsg': 'Restager', 277 | 'rsp': 'Respondent', 278 | 'rsr': 'Restorationist', 279 | 'rst': 'Respondent-appellant', 280 | 'rth': 'Research team head', 281 | 'rtm': 'Research team member', 282 | 'sad': 'Scientific advisor', 283 | 'sce': 'Scenarist', 284 | 'scl': 'Sculptor', 285 | 'scr': 'Scribe', 286 | 'sds': 'Sound designer', 287 | 'sec': 'Secretary', 288 | 'sgd': 'Stage director', 289 | 'sgn': 'Signer', 290 | 'sht': 'Supporting host', 291 | 'sll': 'Seller', 292 | 'sng': 'Singer', 293 | 'spk': 'Speaker', 294 | 'spn': 'Sponsor', 295 | 'spy': 'Second party', 296 | 'srv': 'Surveyor', 297 | 'std': 'Set designer', 298 | 'stg': 'Setting', 299 | 'stl': 'Storyteller', 300 | 'stm': 'Stage manager', 301 | 'stn': 'Standards body', 302 | 'str': 'Stereotyper', 303 | 'tcd': 'Technical director', 304 | 'tch': 'Teacher', 305 | 'ths': 'Thesis advisor', 306 | 'tld': 'Television director', 307 | 'tlp': 'Television producer', 308 | 'trc': 'Transcriber', 309 | 'trl': 'Translator', 310 | 'tyd': 'Type designer', 311 | 'tyg': 'Typographer', 312 | 'uvp': 'University place', 313 | 'vac': 'Voice actor', 314 | 'vdg': 'Videographer', 315 | // '-voc': ' Vocalist', 316 | 'wac': 'Writer of added commentary', 317 | 'wal': 'Writer of added lyrics', 318 | 'wam': 'Writer of accompanying material', 319 | 'wat': 'Writer of added text', 320 | 'wdc': 'Woodcutter', 321 | 'wde': 'Wood engraver', 322 | 'win': 'Writer of introduction', 323 | 'wit': 'Witness', 324 | 'wpr': 'Writer of preface', 325 | 'wst': 'Writer of supplementary textual content' 326 | } 327 | -------------------------------------------------------------------------------- /lib/definitions/ZoteroMappingNotes.md: -------------------------------------------------------------------------------- 1 | ## Zotero Types: 2 | 3 | - **Artwork**: `artwork` 4 | http://purl.org/spar/fabio/ArtisticWork 5 | - **Audio Recording**: `audioRecording` 6 | http://purl.org/spar/fabio/SoundRecording 7 | - **Blog Post**: `blogPost` 8 | http://purl.org/spar/fabio/BlogPost 9 | - **Book**: `book` 10 | http://purl.org/spar/fabio/Book 11 | - **Book Section**: `bookSection` 12 | http://purl.org/spar/fabio/BookChapter 13 | - **Computer Program**: `computerProgram` 14 | http://purl.org/spar/fabio/ComputerProgram 15 | - **Conference Paper**: `conferencePaper` 16 | http://purl.org/spar/fabio/ConferencePaper 17 | - **Dictionary Entry**: `dictionaryEntry` 18 | http://purl.org/spar/fabio/Entry 19 | - **Document**: `document` 20 | http://purl.org/spar/fabio/Expression 21 | - **E-mail**: `email` 22 | http://purl.org/spar/fabio/Email 23 | - **Encyclopedia Article**: `encyclopediaArticle` ? 24 | - **Film**: `film` 25 | http://purl.org/spar/fabio/Film 26 | - **Forum Post**: `forumPost` 27 | http://purl.org/spar/fabio/WebContent 28 | - **Journal Article**: `journalArticle` 29 | http://purl.org/spar/fabio/JournalArticle 30 | - **Letter**: `letter` 31 | http://purl.org/spar/fabio/Letter 32 | - **Magazine Article**: `magazineArticle` 33 | http://purl.org/spar/fabio/MagazineArticle 34 | - **Manuscript**: `manuscript` 35 | http://purl.org/spar/fabio/Manuscript 36 | - **Map**: `map` 37 | http://purl.org/spar/fabio/StillImage 38 | - **Newspaper Article**: `newspaperArticle` 39 | http://purl.org/spar/fabio/NewspaperArticle 40 | - **Patent**: `patent` 41 | http://purl.org/spar/fabio/Patent 42 | - **Podcast**: `podcast` 43 | http://purl.org/spar/fabio/AudioDocument 44 | - **Presentation**: `presentation` 45 | http://purl.org/spar/fabio/Presentation 46 | - **Report**: `report` 47 | http://purl.org/spar/fabio/ReportDocument 48 | - **Thesis**: `thesis` 49 | http://purl.org/spar/fabio/Thesis 50 | - **Video Recording**: `videoRecording` 51 | http://purl.org/spar/fabio/MovingImage 52 | - **Web Page**: `webpage` 53 | http://purl.org/spar/fabio/WebPage 54 | 55 | **These types are ignored for now:** 56 | 57 | - **Bill**: `bill` 58 | - **Case**: `case` 59 | - **Hearing**: `hearing` 60 | - **Instant Message**: `instantMessage` 61 | - **Interview**: `interview` 62 | - **Note**: `note` 63 | - **Radio Broadcast**: `radioBroadcast` 64 | - **Statute**: `statute` 65 | - **TV Broadcast**: `tvBroadcast` 66 | 67 | There is no Type in BibFrame2.0. Instead we should translate these into Content, Media, and Carrier fields . For example: https://rdabasics.com/2012/09/10/content-media-and-carrier-fields/ 68 | 69 | `Work -> content -> [ Content -> rdf:value -> x ]` 70 | 71 | `[Work or Instance] -> media -> [ Media -> rdf:value -> x ]` 72 | 73 | `Instance -> carrier -> [ Carrier -> rdf:value -> x ]` 74 | 75 | `[Work or Instance] -> issuance -> ` 76 | 77 | Issuance will likely be one of the following: 78 | 79 | - integrating resource 80 | > A resource that is added to or changed by means of updates that do not remain discrete and are integrated into the whole. Examples include updating loose-leafs and updating Web sites. Integrating resources may be finite or continuing. 81 | - multipart monograph 82 | >A resource issued in two or more parts, either simultaneously or successively, that is complete or intended to be completed within a finite number of parts 83 | - serial 84 | > A resource issued in successive parts, usually bearing numbering, that has no predetermined conclusion. 85 | - single unit 86 | > A resource that is complete in one part or intended to be completed within a finite number of parts. 87 | 88 | BibFrame2.0 basic structure is: 89 | 90 | `Work -> hasInstance -> [Instance -> hasItem -> Item ]` 91 | 92 | 93 | ## Zotero Type Fields 94 | 95 | 96 | #### `itemType` 97 | 98 | This is a standard zotero type as above 99 | 100 | #### `relations` 101 | 102 | This is an array of ids pointing to other zotero items 103 | 104 | #### `collections` 105 | 106 | This is an array of ids pointing to other zotero items 107 | 108 | `[Work, Instance or Item] -> partOf -> [Work, Instance or Item]` 109 | 110 | #### `tags` 111 | 112 | ???? - This corresponds with subject generally 113 | Can be translated into `[Work, Instance or Item] -> subject -> Literal` 114 | 115 | #### `shortTitle` 116 | 117 | http://purl.org/spar/fabio/hasShortTitle 118 | 119 | Short form of the title, often without the subtitle. Used mostly in footnote styles for subsequent citations 120 | 121 | `[Work, Instance or Item] -> title -> [ AbbreviatedTitle -> rdfs:label title ]` 122 | 123 | #### `creators` 124 | 125 | - http://purl.org/dc/terms/creator 126 | 127 | `creators` is an array of: 128 | 129 | ```js 130 | { 131 | firstName: "First", 132 | lastName: "Last", 133 | name: "Name" 134 | } 135 | ``` 136 | 137 | Creators have has roles: 138 | 139 | Mapping to Marc Roles in comments. 140 | Look at https://www.loc.gov/marc/relators/relacode.html and http://memory.loc.gov/diglib/loc.terms/relators/dc-contributor.html for details. 141 | 142 | ```js 143 | { 144 | artist: 'Artist', // art - Artist 145 | contributor: 'Contributor', // ctb - Contributor 146 | performer: 'Performer', // prf - Performer 147 | composer: 'Composer', // cmp - Composer 148 | wordsBy: 'Words By', // lyr - Lyricist 149 | sponsor: 'Sponsor', // spn - Sponsor 150 | cosponsor: 'Cosponsor', // spn - Sponsor 151 | author: 'Author', // aut- Author 152 | commenter: 'Commenter', // for blog post only ? 153 | editor: 'Editor', // edt - Editor 154 | seriesEditor: 'Series Editor', // edt - Editor 155 | translator: 'Translator', // trl - Translator 156 | bookAuthor: 'Book Author', // in chapter -> special as points to author of containing book. // aut - Author 157 | counsel: 'Counsel', // n/a? 158 | programmer: 'Programmer', // prg - Programmer 159 | reviewedAuthor: 'Reviewed Author', // special - aut of the book which is subject 160 | recipient: 'Recipient', // rcp - Addressee 161 | director: 'Director', // drt - Director 162 | producer: 'Producer', // pro - Producer 163 | scriptwriter: 'Scriptwriter', // aus - Screenwriter 164 | interviewee: 'Interview With', // ive - Interviewee 165 | interviewer: 'Interviewer', // ivr - Interviewer 166 | cartographer: 'Cartographer', // ctg - Cartographer 167 | inventor: 'Inventor', // inv - Inventor 168 | attorneyAgent: 'Attorney/Agent', // ? 169 | podcaster: 'Podcaster', // hst - Host 170 | guest: 'Guest', // ? 171 | presenter: 'Presenter', // pre - Presenter 172 | castMember: 'Cast Member' // prf - Performer 173 | } 174 | ``` 175 | 176 | `[Work, Instance or Item] -> contribution -> [ Contribution -> role -> a-marc-relator, -> agent -> Agent ]` 177 | 178 | Where `Agent` may be a `Person` which is a subclass of FOAF. 179 | See: http://xmlns.com/foaf/spec/#term_Person 180 | 181 | #### `extra` 182 | 183 | This field will often contain other data eg: https://www.zotero.org/support/kb/item_types_and_fields#citing_fields_from_extra 184 | 185 | #### `abstractNote` 186 | 187 | - http://purl.org/dc/terms/abstract 188 | 189 | `[Work or Instance] -> summary -> Summary` 190 | 191 | #### `rights` 192 | 193 | - http://purl.org/dc/terms/rights 194 | 195 | `[Work or Instance] -> copyrightRegistration -> CopyrightRegistration` 196 | 197 | #### `url` 198 | 199 | - http://purl.org/spar/fabio/hasURL 200 | 201 | `Item -> electronicLocator -> url` 202 | 203 | #### `accessDate` 204 | 205 | - http://purl.org/spar/fabio/hasAccessDate 206 | 207 | This is really only related to citation data. 208 | 209 | #### `title` 210 | 211 | This is not within types: `case` `email` `statute` 212 | 213 | - http://purl.org/dc/terms/title 214 | 215 | `[Work, Instance or Item] -> title -> [ Title -> rdfs:label -> x ]` 216 | 217 | #### `language` 218 | 219 | This is not within types: `computerProgram` `note` 220 | 221 | - http://purl.org/dc/terms/language 222 | 223 | `[ unspecified ] -> bf:language ` 224 | 225 | Should probably use controlled data from here: http://id.loc.gov/vocabulary/languages.html 226 | 227 | 228 | #### `date` 229 | 230 | This is not within types: `case` `note` `patent` `podcast` `statute` 231 | 232 | - http://purl.org/dc/terms/date 233 | 234 | `Instance -> provisionActivity -> [ Publication -> date -> value ]` 235 | 236 | #### `callNumber` 237 | 238 | This is only within the types: `artwork` `audioRecording` `book` `bookSection` `computerProgram` `conferencePaper` `dictionaryEntry` `document` `encyclopediaArticle` `film` `interview` `journalArticle` `letter` `magazineArticle` `manuscript` `map` `newspaperArticle` `radioBroadcast` `report` `thesis` `tvBroadcast` `videoRecording` 239 | 240 | 241 | `Item -> shelfMark -> [ ShelfMark -> rdfs:label -> callNumber ]` 242 | 243 | `ShelfMark` should also have bf:source for where the number has come from. 244 | 245 | #### `archiveLocation` 246 | 247 | This is only within the types: `artwork` `audioRecording` `book` `bookSection` `computerProgram` `conferencePaper` `dictionaryEntry` `document` `encyclopediaArticle` `film` `interview` `journalArticle` `letter` `magazineArticle` `manuscript` `map` `newspaperArticle` `radioBroadcast` `report` `thesis` `tvBroadcast` `videoRecording` 248 | 249 | `Item -> physicalLocation -> value` 250 | 251 | #### `archive` 252 | 253 | This is only within the types: `artwork` `audioRecording` `book` `bookSection` `computerProgram` `conferencePaper` `dictionaryEntry` `document` `encyclopediaArticle` `film` `interview` `journalArticle` `letter` `magazineArticle` `manuscript` `map` `newspaperArticle` `radioBroadcast` `report` `thesis` `tvBroadcast` `videoRecording` 254 | 255 | `Item -> heldBy -> [ Agent ]` 256 | 257 | Where `Agent` is the archive which holds this item 258 | 259 | #### `libraryCatalog` 260 | 261 | This is only within the types: `artwork` `audioRecording` `book` `bookSection` `computerProgram` `conferencePaper` `dictionaryEntry` `document` `encyclopediaArticle` `film` `interview` `journalArticle` `letter` `magazineArticle` `manuscript` `map` `newspaperArticle` `radioBroadcast` `report` `thesis` `tvBroadcast` `videoRecording` 262 | 263 | 264 | `Instance -> source -> [ Source ]` 265 | 266 | #### `place` 267 | 268 | This is only within the types: `audioRecording` `book` `bookSection` `computerProgram` `conferencePaper` `dictionaryEntry` `encyclopediaArticle` `hearing` `manuscript` `map` `newspaperArticle` `patent` `presentation` `radioBroadcast` `report` `thesis` `tvBroadcast` `videoRecording` 269 | 270 | `Instance -> provisionActivity -> [ProvisionActivity -> place [ Place -> rdfs:label -> "Edinburgh, Scotland" ]]` 271 | 272 | ProvisionActivity would include date and place info. 273 | 274 | 275 | #### `pages` 276 | 277 | This is only within the types: `bookSection` `conferencePaper` `dictionaryEntry` `encyclopediaArticle` `hearing` `journalArticle` `magazineArticle` `newspaperArticle` `patent` `report` `statute` 278 | 279 | http://prismstandard.org/namespaces/basic/2.0/pageRange 280 | 281 | Difficult to figure this one out. 282 | 283 | Page numbers are stored in mark here under enumeration - https://www.loc.gov/marc/bibliographic/bd76x78x.html 284 | It uses SICI as a standard for encoding the page range - [view the standard here](https://groups.niso.org/apps/group_public/download.php/6514/Serial%20Item%20and%20Contribution%20Identifier%20(SICI).pdf) 285 | 286 | Maybe should be stored in Enumeration and Chronology 287 | 288 | #### `volume` 289 | 290 | This is only within the types: `audioRecording` `book` `bookSection` `conferencePaper` `dictionaryEntry` `encyclopediaArticle` `journalArticle` `magazineArticle` `videoRecording` 291 | 292 | http://purl.org/spar/fabio/hasSequenceIdentifier 293 | 294 | TODO: 295 | probably map to Extent 296 | 297 | #### `ISBN` 298 | 299 | This is only within the types: `audioRecording` `book` `bookSection` `computerProgram` `conferencePaper` `dictionaryEntry` `encyclopediaArticle` `map` `videoRecording` 300 | 301 | http://prismstandard.org/namespaces/basic/2.0/isbn 302 | 303 | `[unspecified] -> identifiedBy -> [Isbn -> rdf:value -> number]` 304 | 305 | #### `publisher` 306 | 307 | This is only within the types: `book` `bookSection` `conferencePaper` `dictionaryEntry` `document` `encyclopediaArticle` `hearing` `map` 308 | 309 | `[Work or Instance] -> provisionActivity -> [Publication -> agent -> [Agent -> rdfs:label "Oxford University Press"]] ` 310 | 311 | #### `numberOfVolumes` 312 | 313 | This is only within the types: `audioRecording` `book` `bookSection` `dictionaryEntry` `encyclopediaArticle` `hearing` `videoRecording` 314 | - http://purl.org/spar/fabio/hasVolumeCount 315 | 316 | `[Work or Instance] -> hasSeries -> [[Work or Instance] -> seriesEnumeration -> Literal ]` 317 | 318 | #### `seriesTitle` 319 | 320 | This is only within the types: `audioRecording` `computerProgram` `journalArticle` `map` `podcast` `report` `videoRecording` 321 | 322 | - http://purl.org/vocab/frbr/core#partOf - http://purl.org/spar/fabio/Series / http://purl.org/spar/fabio/JournalIssue etc with title 323 | 324 | 325 | Zotero docs: 326 | > Title of a series of articles within one issue of a journal (e.g., a special section or “From the Cover”). “Section Title” would likely be a more appropriate label. See [here](http://dtd.nlm.nih.gov/publishing/tag-library/2.2/n-ihv0.html) for an explanation. For citation purposes, this field is currently equivalent to “Series” and is erroneously used instead of series in some item types (e.g., Audio Recording, Map). 327 | 328 | `[Work or Instance] -> ofPart -> [[Work or Instance] -> title -> Title ]` 329 | 330 | 331 | #### `runningTime` 332 | 333 | This is only within the types: `audioRecording` `film` `podcast` `radioBroadcast` `tvBroadcast` `videoRecording` 334 | 335 | - http://purl.org/dc/terms/format 336 | 337 | `[Work or Instance] -> duration -> Literal` 338 | 339 | #### `edition` 340 | 341 | This is only within the types: `book` `bookSection` `dictionaryEntry` `encyclopediaArticle` `map` `newspaperArticle` 342 | 343 | 344 | 345 | #### `series` 346 | 347 | This is only within the types: `book` `bookSection` `conferencePaper` `dictionaryEntry` `encyclopediaArticle` `journalArticle`', 348 | 349 | `[Work or Instance] -> hasSeries -> [[Work or Instance] -> title -> Title ]` 350 | 351 | #### `history` 352 | 353 | This is only within the types: `bill` `case` `hearing` `statute` 354 | 355 | - ignored 356 | 357 | #### `seriesNumber` 358 | 359 | This is only within the types: `book` `bookSection` `dictionaryEntry` `encyclopediaArticle`', 360 | 361 | `[Work or Instance] -> seriesEnumeration -> Literal` 362 | 363 | #### `session` 364 | 365 | This is only within the types: `bill` `hearing` `statute` 366 | 367 | - ignored 368 | 369 | #### `section` 370 | 371 | This is only within the types: `bill` `newspaperArticle` `statute` 372 | 373 | - http://prismstandard.org/namespaces/basic/2.0/section 374 | 375 | - ignore for now 376 | 377 | #### `numPages` 378 | 379 | This is only within the types: `book` `manuscript` `thesis` 380 | 381 | - http://purl.org/spar/fabio/hasPageCount 382 | 383 | `Instance -> extent -> [ Extent -> rdfs:label -> "viii, 235 p." ]` 384 | 385 | #### `videoRecordingFormat` 386 | 387 | This is only within the types: `film` `tvBroadcast` `videoRecording` 388 | 389 | - http://purl.org/dc/terms/format 390 | 391 | `Instance -> videoCharacteristic -> [VideoFormat -> rdfs:label -> '8mm']` 392 | 393 | #### `publicationTitle` 394 | 395 | This is only within the types: `journalArticle` `magazineArticle` `newspaperArticle` 396 | 397 | `[Work, Instance or Item] -> partOf -> [ [Work, Instance or Item] -> title -> [ Title -> mainTitle -> x ] ]` 398 | 399 | #### `ISSN` 400 | 401 | This is only within the types: `journalArticle` `magazineArticle` `newspaperArticle` 402 | 403 | - http://prismstandard.org/namespaces/basic/2.0/issn 404 | 405 | `[unspecified] -> identifiedBy -> [Issn -> rdf:value -> number]` 406 | 407 | #### `episodeNumber` 408 | 409 | This is only within the types: `podcast` `radioBroadcast` `tvBroadcast` 410 | 411 | -- ignore for now - but likely Enumeration and Chronology 412 | 413 | #### `DOI` 414 | 415 | This is only within the types: `conferencePaper` `journalArticle` 416 | // or as `extra` field DOI: 417 | 418 | http://prismstandard.org/namespaces/basic/2.0/doi 419 | 420 | `[unspecified] -> identifiedBy -> [Doi -> rdf:value -> number]` 421 | 422 | #### `network` 423 | 424 | This is only within the types: `radioBroadcast` `tvBroadcast` 425 | 426 | - ignored 427 | 428 | #### `legislativeBody` 429 | 430 | This is only within the types: `bill` `hearing` 431 | 432 | - ignored 433 | 434 | #### `issue` 435 | 436 | This is only within the types: `journalArticle` `magazineArticle` 437 | 438 | `Item -> enumerationAndChronology -> [ Enumeration -> rdfs:label -> 2]` 439 | 440 | #### `code` 441 | 442 | This is only within the types: `bill` `statute` 443 | 444 | - ignored 445 | 446 | #### `websiteType` 447 | 448 | This is only within the types: `blogPost` `webpage`', 449 | 450 | -- ignored 451 | 452 | > Rarely used. Describes the genre of a webpage such as “personal blog” or “intranet”. 453 | 454 | #### `programTitle` 455 | 456 | This is only within the types: `radioBroadcast` `tvBroadcast` 457 | 458 | `[Work, Instance or Item] -> partOf -> [ [Work, Instance or Item] -> title -> [ Title -> mainTitle -> x ] ]` 459 | 460 | #### `audioRecordingFormat` 461 | 462 | This is only within the types: `audioRecording` `radioBroadcast` 463 | 464 | - ignore for now 465 | 466 | #### `caseName` 467 | 468 | This is only within the types: `case` 469 | 470 | - ingored 471 | 472 | #### `reporter` 473 | 474 | This is only within the types: `case` 475 | 476 | - ingored 477 | 478 | #### `reporterVolume` 479 | 480 | This is only within the types: `case` 481 | 482 | - ingored 483 | 484 | #### `court` 485 | 486 | This is only within the types: `case` 487 | 488 | - ingored 489 | 490 | #### `docketNumber` 491 | 492 | This is only within the types: `case` 493 | 494 | - ingored 495 | 496 | #### `firstPage` 497 | 498 | This is only within the types: `case` 499 | 500 | - ingored 501 | 502 | #### `dateDecided` 503 | 504 | This is only within the types: `case` 505 | 506 | - ingored 507 | 508 | #### `versionNumber` 509 | 510 | This is only within the types: `computerProgram` 511 | 512 | - http://prismstandard.org/namespaces/basic/2.0/versionIdentifier 513 | 514 | 515 | `[Instance] -> editionStatement -> Literal` 516 | 517 | #### `blogTitle` 518 | 519 | This is only within the types: blogPost', - http://purl.org/dc/terms/title 520 | 521 | `[Work, Instance or Item] -> partOf -> [ [Work, Instance or Item] -> title -> [ Title -> mainTitle -> x ] ]` 522 | 523 | #### `company` 524 | 525 | This is only within the types: `computerProgram` 526 | 527 | `Unspecified -> agent -> [Organization -> rdfs:label -> x ]` 528 | 529 | #### `programmingLanguage` 530 | 531 | This is only within the types: `computerProgram` 532 | 533 | #### `proceedingsTitle` 534 | 535 | This is only within the types: `conferencePaper`', 536 | 537 | #### `conferenceName` 538 | 539 | This is only within the types: `conferencePaper`', 540 | 541 | #### `websiteTitle` 542 | 543 | This is only within the types: `webpage`, 544 | 545 | `[Work, Instance or Item] -> partOf -> [ [Work, Instance or Item] -> title -> [ Title -> mainTitle -> x ] ]` 546 | 547 | #### `dictionaryTitle` 548 | 549 | This is only within the types: `dictionaryEntry`, 550 | 551 | `[Work, Instance or Item] -> partOf -> [ [Work, Instance or Item] -> title -> [ Title -> mainTitle -> x ] ]` 552 | 553 | #### `subject` 554 | 555 | This is only within the types: `email`, 556 | 557 | 558 | 559 | #### `encyclopediaTitle` 560 | 561 | This is only within the types: encyclopediaArticle', 562 | 563 | #### `distributor` 564 | 565 | This is only within the types: film', 566 | 567 | #### `genre` 568 | 569 | This is only within the types: film', 570 | 571 | #### `codeVolume` 572 | 573 | This is only within the types: bill', - ignored 574 | 575 | #### `forumTitle` 576 | 577 | This is only within the types: forumPost', http://purl.org/dc/terms/title 578 | 579 | #### `postType` 580 | 581 | This is only within the types: forumPost', 582 | 583 | #### `committee` 584 | 585 | This is only within the types: hearing', - ignored 586 | 587 | #### `documentNumber` 588 | 589 | This is only within the types: hearing', - ignored 590 | 591 | #### `interviewMedium` 592 | 593 | This is only within the types: interview', - ignored 594 | 595 | #### `label` 596 | 597 | This is only within the types: audioRecording', 598 | 599 | #### `codePages` 600 | 601 | This is only within the types: bill', - ignored 602 | 603 | #### `seriesText` 604 | 605 | This is only within the types: journalArticle', 606 | 607 | #### `journalAbbreviation` 608 | 609 | This is only within the types: journalArticle', 610 | 611 | #### `artworkMedium` 612 | 613 | This is only within the types: artwork', 614 | 615 | #### `letterType` 616 | 617 | This is only within the types: letter', 618 | 619 | #### `manuscriptType` 620 | 621 | This is only within the types: manuscript', 622 | 623 | #### `mapType` 624 | 625 | This is only within the types: map', 626 | 627 | #### `scale` 628 | 629 | This is only within the types: map', 630 | 631 | #### // `note` 632 | 633 | This is only within the types: note', 634 | 635 | #### `country` 636 | 637 | This is only within the types: patent', 638 | 639 | #### `assignee` 640 | 641 | This is only within the types: patent', 642 | 643 | #### `issuingAuthority` 644 | 645 | This is only within the types: patent', 646 | 647 | #### `patentNumber` 648 | 649 | This is only within the types: patent', 650 | 651 | #### `filingDate` 652 | 653 | This is only within the types: patent', 654 | 655 | #### `applicationNumber` 656 | 657 | This is only within the types: patent', 658 | 659 | #### `priorityNumbers` 660 | 661 | This is only within the types: patent', 662 | 663 | #### `issueDate` 664 | 665 | This is only within the types: patent', 666 | 667 | #### `references` 668 | 669 | This is only within the types: patent', 670 | 671 | #### `legalStatus` 672 | 673 | This is only within the types: patent', 674 | 675 | #### `artworkSize` 676 | 677 | This is only within the types: artwork', 678 | 679 | #### `audioFileType` 680 | 681 | This is only within the types: `podcast` 682 | 683 | #### `presentationType` 684 | 685 | This is only within the types: `presentation` 686 | 687 | #### `meetingName` 688 | 689 | This is only within the types: `presentation` 690 | 691 | #### `bookTitle` 692 | 693 | This is only within the types: `bookSection` 694 | 695 | #### `billNumber` 696 | 697 | This is only within the types: bill - ignored 698 | 699 | #### `reportNumber` 700 | 701 | This is only within the types: report 702 | 703 | #### `reportType` 704 | 705 | This is only within the types: report 706 | 707 | #### `institution` 708 | 709 | This is only within the types: report 710 | 711 | #### `nameOfAct` 712 | 713 | This is only within the types: statute - ignore 714 | 715 | #### `codeNumber` 716 | 717 | This is only within the types: statute - ignored 718 | 719 | #### `publicLawNumber` 720 | 721 | This is only within the types: statute - ignored 722 | 723 | #### `dateEnacted` 724 | 725 | This is only within the types: statute - ignored 726 | 727 | #### `thesisType` 728 | 729 | This is only within the types: thesis 730 | 731 | #### `university` 732 | 733 | This is only within the types: thesis 734 | 735 | #### `studio` 736 | 737 | This is only within the types: videoRecording 738 | 739 | #### `system` 740 | 741 | This is only within the types: computerProgram' 742 | 743 | -------------------------------------------------------------------------------- /lib/definitions/activity-streams/collection-behavior.js: -------------------------------------------------------------------------------- 1 | import { Transform } from 'stream' 2 | import { 3 | makeGet, 4 | makeAll, 5 | makeSet 6 | // makeAdd, 7 | // makeRemove 8 | } from '../helpers' 9 | import { spo } from '../../utils' 10 | import PageStream from './page-stream' 11 | 12 | const CollectionBehavior = {} 13 | 14 | CollectionBehavior.itemStream = function () { 15 | const stream = this.hr.graph.getStream(spo(this.name, 'as:items')) 16 | const cast = this._castToNodeOrValue.bind(this) 17 | const tripleToNode = new Transform({ 18 | objectMode: true, 19 | transform (data, encoding, cb) { 20 | if (this.destroyed) { 21 | return cb(null, null) 22 | } 23 | cast(data.object) 24 | .then(v => !this.destroyed && cb(null, v)) 25 | .catch(cb) 26 | }, 27 | destroy () { 28 | stream.destroy() 29 | } 30 | }) 31 | return stream.pipe(tripleToNode) 32 | } 33 | 34 | CollectionBehavior.stream = function (opts) { 35 | return new PageStream(this, opts) 36 | } 37 | 38 | CollectionBehavior.addNewPage = async function () { 39 | const last = await this.lastPage() 40 | const page = await this.hr.createNode('as:CollectionPage', { 'as:partOf': this }) 41 | if (last) { 42 | await page.setPrev(last) 43 | await last.setNext(page) 44 | } else { 45 | await this.setFirstPage(page) 46 | } 47 | await this.setLastPage(page) 48 | return page 49 | } 50 | // TODO: add ability to remove pages 51 | 52 | // as:totalItems 53 | /* A non-negative integer specifying the total number of objects contained by the logical view of the collection. This number might not reflect the actual number of items serialized within the Collection object instance. */ 54 | CollectionBehavior.totalItems = async function () { 55 | const total = await this.get('as:totalItems') 56 | if (!total) return 0 57 | return total 58 | } 59 | CollectionBehavior.incrementItemCount = async function (inc = 1) { 60 | const count = await this.totalItems() 61 | return this.set('as:totalItems', Number(count) + inc) 62 | } 63 | 64 | // as:current 65 | /* 66 | In a paged Collection, indicates the page that contains the most recently updated member items. 67 | Range: CollectionPage | Link 68 | */ 69 | CollectionBehavior.currentPage = makeGet('as:current') 70 | CollectionBehavior.setCurrentPage = makeSet('as:current') 71 | 72 | // as:first 73 | /* 74 | In a paged Collection, indicates the furthest preceeding page of items in the collection. 75 | Range: CollectionPage | Link 76 | */ 77 | CollectionBehavior.firstPage = makeGet('as:first') 78 | CollectionBehavior.setFirstPage = makeSet('as:first') 79 | 80 | CollectionBehavior.getPageAt = async function (i) { 81 | if (i < 0) return undefined 82 | let next = await this.firstPage() 83 | if (!next) return 84 | let offset = 0 85 | while (offset < i) { 86 | next = await next.next() 87 | if (!next) return 88 | offset++ 89 | } 90 | return next 91 | } 92 | 93 | // as:last 94 | /* 95 | In a paged Collection, indicates the furthest proceeding page of the collection. 96 | Range: CollectionPage | Link 97 | */ 98 | CollectionBehavior.lastPage = makeGet('as:last') 99 | CollectionBehavior.setLastPage = makeSet('as:last') 100 | 101 | // as:items 102 | /* 103 | Identifies the items contained in a collection. The items might be ordered or unordered. 104 | */ 105 | CollectionBehavior.addItem = async function (item) { 106 | if (this.type === 'as:CollectionPage') { 107 | // increment item of parent 108 | const parent = await this.getParentCollection() 109 | await parent.incrementItemCount() 110 | await parent.setCurrentPage(this) 111 | } 112 | await this.incrementItemCount() 113 | this.add('as:items', item) 114 | } 115 | CollectionBehavior.items = makeAll('as:items') 116 | // TODO: add ability to remove items 117 | 118 | export default CollectionBehavior 119 | -------------------------------------------------------------------------------- /lib/definitions/activity-streams/collection-page-behavior.js: -------------------------------------------------------------------------------- 1 | import { 2 | makeGet, 3 | makeSet 4 | // makeRemove 5 | } from '../helpers' 6 | 7 | const CollectionPageBehavior = {} 8 | 9 | // as:partOf 10 | /* Identifies the Collection to which a CollectionPage objects items belong. */ 11 | CollectionPageBehavior.getParentCollection = makeGet('as:partOf') 12 | // as:next 13 | /* In a paged Collection, indicates the next page of items. */ 14 | CollectionPageBehavior.next = makeGet('as:next') 15 | CollectionPageBehavior.setNext = makeSet('as:next') 16 | 17 | // as:prev 18 | /* In a paged Collection, identifies the previous page of items. */ 19 | CollectionPageBehavior.prev = makeGet('as:prev') 20 | CollectionPageBehavior.setPrev = makeSet('as:prev') 21 | 22 | // TODO: potentially override adding items to automatically increment the parent collections totalItems 23 | 24 | export default CollectionPageBehavior 25 | -------------------------------------------------------------------------------- /lib/definitions/activity-streams/index.js: -------------------------------------------------------------------------------- 1 | import { compose, createFactory } from '../utils' 2 | import StandardBehavior from '../standard-behavior' 3 | import ObjectBehavior from './object-behavior' 4 | import CollectionBehavior from './collection-behavior' 5 | import CollectionPageBehavior from './collection-page-behavior' 6 | 7 | const ComposedObjectBehavior = compose(StandardBehavior, ObjectBehavior) 8 | const ComposedCollectionBehavior = compose(ComposedObjectBehavior, CollectionBehavior) 9 | 10 | const createObjectNode = createFactory(ComposedObjectBehavior) 11 | const createCollectionNode = createFactory(ComposedCollectionBehavior) 12 | const createCollectionPageNode = createFactory(compose(ComposedCollectionBehavior, CollectionPageBehavior)) 13 | 14 | export default { 15 | 'as:Object': createObjectNode, 16 | 'as:Collection': createCollectionNode, 17 | 'as:CollectionPage': createCollectionPageNode, 18 | 'as:OrderedCollection': createCollectionNode, 19 | 'as:OrderedCollectionPage': createCollectionPageNode 20 | } 21 | -------------------------------------------------------------------------------- /lib/definitions/activity-streams/object-behavior.js: -------------------------------------------------------------------------------- 1 | import { 2 | makeGet, 3 | makeSet, 4 | makeRemove 5 | } from '../helpers' 6 | 7 | const ObjectBehavior = {} 8 | 9 | // as:attachment 10 | // as:attributedTo 11 | // as:audience 12 | // as:content 13 | /* 14 | The content or textual representation of the Object encoded as a JSON string. By default, the value of content is HTML. The mediaType property can be used in the object to indicate a different content type. 15 | */ 16 | ObjectBehavior.content = makeGet('as:content') 17 | ObjectBehavior.setContent = makeSet('as:content') 18 | ObjectBehavior.removeContent = makeRemove('as:content') 19 | 20 | // as:context 21 | /* 22 | Identifies the context within which the object exists or an activity was performed. 23 | The notion of "context" used is intentionally vague. The intended function is to serve as a means of grouping objects and activities that share a common originating context or purpose. An example could be all activities relating to a common project or event. 24 | Range: Object | Link 25 | */ 26 | ObjectBehavior.context = makeGet('as:context') 27 | ObjectBehavior.setContext = makeSet('as:context') 28 | ObjectBehavior.removeContext = makeRemove('as:context') 29 | 30 | // as:name 31 | /* A simple, human-readable, plain-text name for the object. HTML markup must not be included. */ 32 | ObjectBehavior.getName = makeGet('as:name') 33 | ObjectBehavior.setName = makeSet('as:name') 34 | ObjectBehavior.removeName = makeRemove('as:name') 35 | 36 | // as:endTime 37 | // as:generator 38 | // as:icon 39 | // as:image 40 | // as:inReplyTo 41 | // as:location 42 | // as:preview 43 | // as:published 44 | // as:replies 45 | // as:startTime 46 | 47 | // as:summary 48 | /* A natural language summarization of the object encoded as HTML. Multiple language tagged summaries may be provided. */ 49 | ObjectBehavior.summary = makeGet('as:summary') 50 | ObjectBehavior.setSummary = makeSet('as:summary') 51 | ObjectBehavior.removeSummary = makeRemove('as:summary') 52 | 53 | // as:tag 54 | 55 | // as:updated 56 | /* The date and time at which the object was updated */ 57 | // TODO: should override the default set - to include an update to this field automatically 58 | 59 | // as:url 60 | // as:to 61 | // as:bto 62 | // as:cc 63 | // as:bcc 64 | 65 | // as:mediaType 66 | /* 67 | When used on a Link, identifies the MIME media type of the referenced resource. 68 | When used on an Object, identifies the MIME media type of the value of the content property. If not specified, the content property is assumed to contain text/html content. 69 | */ 70 | ObjectBehavior.mediaType = makeGet('as:mediaType') 71 | ObjectBehavior.setMediaType = makeSet('as:mediaType') 72 | ObjectBehavior.removeMediaType = makeRemove('as:mediaType') 73 | 74 | // as:duration 75 | 76 | export default ObjectBehavior 77 | -------------------------------------------------------------------------------- /lib/definitions/activity-streams/page-stream.js: -------------------------------------------------------------------------------- 1 | import { Readable } from 'stream' 2 | 3 | class PageStream extends Readable { 4 | constructor (collection, opts) { 5 | super({ objectMode: true }) 6 | this.collection = collection 7 | this.next = null 8 | this.itemStream = null 9 | this.count = 0 10 | this.currentPage = opts && opts.page !== undefined ? opts.page : 0 11 | this.startPage = this.currentPage 12 | this.endPage = (opts && opts.page !== undefined) ? this.currentPage : Infinity 13 | this.limit = (opts && opts.limit) ? opts.limit : Infinity 14 | this.proxyStreamData = this._proxyStreamData.bind(this) 15 | this.nextStream = this._nextStream.bind(this) 16 | } 17 | 18 | _proxyStreamData (data) { 19 | if (this.count >= this.limit) { 20 | this.push(null) 21 | // this is required for node 10 22 | process.nextTick(() => this.destroy()) 23 | return 24 | } 25 | const ok = this.push(data) 26 | this.count++ 27 | if (!ok) this.itemStream.pause() 28 | if (this.count === this.limit) { 29 | } 30 | } 31 | 32 | _nextStream () { 33 | if (this.next && (this.currentPage < this.endPage)) { 34 | this.next.next() 35 | .then(next => { 36 | this.next = next 37 | if (!next) { 38 | this.push(null) 39 | return 40 | } 41 | this.newItemStream(this.next.itemStream()) 42 | }) 43 | } else { 44 | this.push(null) 45 | } 46 | } 47 | 48 | newItemStream (stream) { 49 | this.itemStream = stream 50 | this.itemStream.on('data', this.proxyStreamData) 51 | this.itemStream.on('end', this.nextStream) 52 | this.itemStream.on('error', (err) => { 53 | this.emit('error', err) 54 | }) 55 | } 56 | 57 | _destroy () { 58 | if (this.itemStream) { 59 | this.itemStream.removeListener('end', this.nextStream) 60 | this.itemStream.destroy() 61 | } 62 | } 63 | 64 | _read () { 65 | if (this.count >= this.limit) return 66 | if (!this.next && !this.itemStream) { 67 | this.collection.getPageAt(this.startPage) 68 | .then(next => { 69 | if (!next) { 70 | // no first page this is an unpaginated collection 71 | this.itemStream = this.newItemStream(this.collection.itemStream()) 72 | } else { 73 | this.next = next 74 | this.newItemStream(next.itemStream()) 75 | } 76 | }) 77 | return 78 | } 79 | if (this.itemStream && this.itemStream.isPaused()) { 80 | this.itemStream.once('readable', (data) => { 81 | if (data !== null) { 82 | this.push(data) 83 | } 84 | this.itemStream.resume() 85 | }) 86 | } 87 | } 88 | } 89 | 90 | export default PageStream 91 | -------------------------------------------------------------------------------- /lib/definitions/annotation-node.js: -------------------------------------------------------------------------------- 1 | import inherits from 'inherits' 2 | import StandardNode from './standard-node' 3 | import { spo } from '../utils' 4 | 5 | const MOTIVATIONS = { 6 | assessing: 'oa:assessing', 7 | bookmarking: 'oa:bookmarking', 8 | classifying: 'oa:classifying', 9 | commenting: 'oa:commenting', 10 | describing: 'oa:describing', 11 | editing: 'oa:editing', 12 | highlighting: 'oa:highlighting', 13 | identifying: 'oa:identifying', 14 | linking: 'oa:linking', 15 | moderating: 'oa:moderating', 16 | questioning: 'oa:questioning', 17 | replying: 'oa:replying', 18 | tagging: 'oa:tagging' 19 | } 20 | 21 | function AnnotationNode (hr, name, type, ctx) { 22 | StandardNode.call(this, hr, name, type, ctx) 23 | } 24 | 25 | inherits(AnnotationNode, StandardNode) 26 | 27 | AnnotationNode.getTargets = async function () { 28 | return this.all('oa:hasTarget') 29 | } 30 | 31 | AnnotationNode.getBodies = async function () { 32 | return this.all('oa:hasBody') 33 | } 34 | 35 | AnnotationNode.setMotivation = async function (motivation) { 36 | const motive = MOTIVATIONS[motivation] 37 | if (motive) return this.set('oa:hasMotivation', motive) 38 | } 39 | 40 | AnnotationNode.setTarget = async function (id, opts) { 41 | // TODO: check if target is already assigned 42 | const target = await this.hr.newBlankNodeName() 43 | await this.hr._put(spo(target, 'oa:hasSource', id)) 44 | // todo: add support for other selectors 45 | const selector = await this.hr.createNode('oa:TextPositionSelector') 46 | await selector.set('oa:start', opts.start) 47 | await selector.set('oa:end', opts.end) 48 | await this.hr._put(spo(target, 'oa:hasSelector', selector)) 49 | await this.set('oa:hasTarget', { name: target }) 50 | } 51 | 52 | AnnotationNode.setBody = async function (node) { 53 | const bodies = await this.getBodies() 54 | if (bodies) { 55 | // should destroy if not connected to another annotation 56 | // for now just remove link 57 | // TODO: dont remove if it is node 58 | await Promise.all(bodies.map(body => { 59 | this.remove('oa:hasBody', body) 60 | })) 61 | } 62 | if (typeof node === 'string') { 63 | // add as an embeded text node text node 64 | const body = await this.hr.createNode('oa:TextualBody', { 65 | 'rdf:value': node, 66 | 'dc:format': 'text/plain' 67 | }) 68 | this.set('oa:hasBody', body) 69 | return 70 | } 71 | // else simply point to the node 72 | this.set('oa:hasBody', node) 73 | } 74 | 75 | export default AnnotationNode 76 | -------------------------------------------------------------------------------- /lib/definitions/bibframe/contribution-behavior.js: -------------------------------------------------------------------------------- 1 | import { makeGet, makeSet } from '../helpers' 2 | 3 | const ContributionBehavior = {} 4 | 5 | // set/get agent is inherited from SharedBehavior 6 | ContributionBehavior.getRole = makeGet('bf:role') 7 | ContributionBehavior.setRole = makeSet('bf:role') 8 | 9 | export default ContributionBehavior 10 | -------------------------------------------------------------------------------- /lib/definitions/bibframe/index.js: -------------------------------------------------------------------------------- 1 | import { compose, createFactory } from '../utils' 2 | import StandardBehavior from '../standard-behavior' 3 | import AgentBehavior from '../foaf/agent-behavior' 4 | import PersonBehavior from '../foaf/person-behavior' 5 | import WorkBehavior from './work-behavior' 6 | import InstanceBehavior from './instance-behavior' 7 | import ItemBehavior from './item-behavior' 8 | import SharedBehavior from './shared-behavior' 9 | import WorkOrInstanceBehavior from './work-or-instance-behavior' 10 | import WorkInstanceOrItemBehavior from './work-instance-or-item-behavior' 11 | import ContributionBehavior from './contribution-behavior' 12 | 13 | const commonBehaviors = compose( 14 | StandardBehavior, 15 | SharedBehavior 16 | ) 17 | 18 | const commonBehaviorsWorkInstanceItem = compose( 19 | commonBehaviors, 20 | WorkInstanceOrItemBehavior 21 | ) 22 | 23 | const WorkNode = createFactory(compose( 24 | commonBehaviorsWorkInstanceItem, 25 | WorkOrInstanceBehavior, 26 | WorkBehavior 27 | )) 28 | const InstanceNode = createFactory(compose( 29 | commonBehaviorsWorkInstanceItem, 30 | WorkOrInstanceBehavior, 31 | InstanceBehavior 32 | )) 33 | const ItemNode = createFactory(compose( 34 | commonBehaviorsWorkInstanceItem, 35 | ItemBehavior 36 | )) 37 | 38 | const AgentNode = createFactory(compose(commonBehaviors, AgentBehavior)) 39 | const PersonNode = createFactory(compose(commonBehaviors, AgentBehavior, PersonBehavior)) 40 | const ContributionNode = createFactory(compose(commonBehaviors, ContributionBehavior)) 41 | 42 | export default { 43 | 'bf:Work': WorkNode, 44 | // subclasses of Work 45 | 'bf:Text': WorkNode, 46 | 'bf:Cartography': WorkNode, 47 | 'bf:Audio': WorkNode, 48 | 'bf:NotatedMusic': WorkNode, 49 | 'bf:NotatedMovement': WorkNode, 50 | 'bf:Dataset': WorkNode, 51 | 'bf:StillImage': WorkNode, 52 | 'bf:MovingImage': WorkNode, 53 | 'bf:Object': WorkNode, 54 | 'bf:Multimedia': WorkNode, 55 | 'bf:MixedMaterial': WorkNode, 56 | 57 | 'bf:Instance': InstanceNode, 58 | // subclasses of Instances 59 | 'bf:Print': InstanceNode, 60 | 'bf:Manuscript': InstanceNode, 61 | 'bf:Archival': InstanceNode, 62 | 'bf:Tactile': InstanceNode, 63 | 'bf:Electronic': InstanceNode, 64 | 65 | 'bf:Item': ItemNode, 66 | 'bf:Contribution': ContributionNode, 67 | 'bf:Agent': AgentNode, 68 | 'foaf:Agent': AgentNode, 69 | 'bf:Person': PersonNode, 70 | 'foaf:Person': PersonNode 71 | } 72 | -------------------------------------------------------------------------------- /lib/definitions/bibframe/instance-behavior.js: -------------------------------------------------------------------------------- 1 | import { 2 | makeRecipricalBinding, 3 | makeGet, 4 | makeSet, 5 | makeRemove, 6 | makeAdd, 7 | makeAll, 8 | allWithType 9 | } from '../helpers' 10 | 11 | const InstanceBehavior = {} 12 | // has subclass 13 | // Print 14 | // Manuscript 15 | // Archival 16 | // Tactile 17 | // Electronic 18 | 19 | // provisionActivityStatement - literal 20 | // // Statement relating to providers of a resource; usually transcribed. 21 | // responsibilityStatement -> literal 22 | // // Statement relating to any persons, families, or corporate bodies responsible for the creation of, or contributing to the content of a resource; usually transcribed. 23 | 24 | // editionStatement -> literal 25 | InstanceBehavior.editionStatement = makeGet('bf:editionStatement') 26 | InstanceBehavior.setEditionStatement = makeSet('bf:editionStatement') 27 | // editionEnumeration -> literal 28 | InstanceBehavior.editionEnumeration = makeGet('bf:editionEnumeration') 29 | InstanceBehavior.setEditionEnumeration = makeSet('bf:editionEnumeration') 30 | // seriesStatement -> Literal 31 | InstanceBehavior.seriesStatement = makeGet('bf:seriesStatement') 32 | InstanceBehavior.setSeriesStatement = makeSet('bf:seriesStatement') 33 | // seriesEnumeration -> Literal 34 | InstanceBehavior.seriesEnumeration = makeGet('bf:seriesEnumeration') 35 | InstanceBehavior.setSeriesEnumeration = makeSet('bf:seriesEnumeration') 36 | // subseriesEnumeration -> Literal 37 | // subseriesStatement -> Literal 38 | 39 | // provisionActivity 40 | InstanceBehavior.addPublication = async function (data) { 41 | const publication = await this.hr.createNode('bf:Publication', { 'bf:date': data.date }) 42 | // add place 43 | if (data.place) { 44 | let places = await this.hr.findPlaces({ label: data.place }) 45 | let place 46 | if (places.length === 0) { 47 | place = await this.hr.createNode('bf:Place', { 'rdfs:label': data.place }) 48 | } else { 49 | place = places[0] 50 | } 51 | await publication.set('bf:place', place) 52 | } 53 | if (data.publisher) { 54 | let agents = await this.hr.findAgents({ label: data.publisher }) 55 | let publisher 56 | if (agents.length === 0) { 57 | publisher = await this.hr.createNode('bf:Agent', { 'rdfs:label': data.publisher }) 58 | } else { 59 | publisher = agents[0] 60 | } 61 | await publication.set('bf:agent', publisher) 62 | } 63 | return this.add('bf:provisionActivity', publication) 64 | } 65 | 66 | InstanceBehavior.publications = async function () { 67 | const publications = await allWithType.call(this, 'bf:provisionActivity', 'bf:Publication') 68 | return Promise.all(publications.map(async (pub) => { 69 | return { 70 | date: await pub.get('bf:date'), 71 | place: await pub.get('bf:place'), 72 | agent: await pub.get('bf:agent') 73 | } 74 | })) 75 | } 76 | 77 | // baseMaterial -> BaseMaterial 78 | // bookFormat -> BookFormat 79 | // carrier -> Carrier 80 | // dimensions -> Literal 81 | 82 | // extent -> Extent 83 | 84 | // // Number and type of units and/or subunits making up a resource. 85 | // hasItem -> Item 86 | InstanceBehavior.addItem = makeRecipricalBinding('bf:hasItem', 'bf:itemOf', 'many-to-one') 87 | InstanceBehavior.removeItem = makeRemove('bf:hasItem') 88 | InstanceBehavior.items = makeAll('bf:hasItem') 89 | // hasReproduction -> Instance 90 | // instanceOf -> Work 91 | InstanceBehavior.instanceOf = makeRecipricalBinding('bf:instanceOf', 'bf:hasInstance', 'many-to-many') 92 | // issuedWith -> Instance 93 | 94 | export default InstanceBehavior 95 | -------------------------------------------------------------------------------- /lib/definitions/bibframe/item-behavior.js: -------------------------------------------------------------------------------- 1 | import { 2 | makeGet, 3 | makeSet, 4 | makeRemove, 5 | makeSetAsLabelOnType, 6 | makeGetLabelFromNode 7 | // makeAdd, 8 | // makeAll 9 | } from '../helpers' 10 | import { isNode } from '../../utils' 11 | 12 | const ItemBehavior = {} 13 | 14 | // enumerationAndChronology -> EnumerationAndChronology / Enumeration or Chronology 15 | // // Enumeration: Numbering or other enumeration associated with issues or items held. 16 | // // Chronology: Dates associated with issues or items held. 17 | // sublocation -> Sublocation 18 | // heldBy -> Agent 19 | ItemBehavior.setHeldBy = async function (name) { 20 | if (isNode(name)) return this.set('bf:heldBy', name) 21 | const agents = await this.hr.findAgents({ label: name }) 22 | let agent 23 | if (agents.length === 0) { 24 | agent = await this.hr.createNode('bf:Agent', { 'rdfs:label': name }) 25 | } else { 26 | agent = agents[0] 27 | } 28 | return this.set('bf:heldBy', agent) 29 | } 30 | ItemBehavior.heldBy = makeGet('bf:heldBy') 31 | 32 | // immediateAcquisition -> ImmediateAcquisition 33 | // // Information about the circumstances, e.g., source, date, method, under which the resource was directly acquired. 34 | // itemOf -> Instance 35 | ItemBehavior.itemOf = makeGet('bf:itemOf') 36 | // physicalLocation -> Literal 37 | ItemBehavior.getPhysicalLocation = makeGet('bf:physicalLocation') 38 | ItemBehavior.setPhysicalLocation = makeSet('bf:physicalLocation') 39 | ItemBehavior.removePhysicalLocation = makeRemove('bf:physicalLocation') 40 | 41 | // shelfMark -> ShelfMark 42 | // // Piece identifier, such as a call or other type of number. 43 | ItemBehavior.setShelfMark = makeSetAsLabelOnType('bf:shelfMark', 'bf:ShelfMark') 44 | ItemBehavior.getShelfMark = makeGetLabelFromNode('bf:shelfMark', 'bf:ShelfMark') 45 | // electronicLocator -> 46 | // // Electronic location from which the resource is available. 47 | ItemBehavior.getElectronicLocator = makeGet('bf:electronicLocator') 48 | ItemBehavior.setElectronicLocator = makeSet('bf:electronicLocator') 49 | ItemBehavior.removeElectronicLocator = makeRemove('bf:electronicLocator') 50 | export default ItemBehavior 51 | -------------------------------------------------------------------------------- /lib/definitions/bibframe/shared-behavior.js: -------------------------------------------------------------------------------- 1 | import { 2 | makeGet, 3 | makeSet, 4 | makeRemove, 5 | makeAdd, 6 | // makeAll, 7 | makeSetAsLabelOnType, 8 | makeGetLabelFromNode 9 | } from '../helpers' 10 | 11 | const SharedBehavior = {} 12 | 13 | // adminMetadata -> AdminMetadata 14 | 15 | // agent -> Agent 16 | SharedBehavior.getAgent = makeGet('bf:agent') 17 | SharedBehavior.setAgent = makeSet('bf:agent') 18 | 19 | // code -> Literal 20 | SharedBehavior.getCode = makeGet('bf:code') 21 | SharedBehavior.setCode = makeSet('bf:code') 22 | SharedBehavior.removeCode = makeRemove('bf:code') 23 | 24 | // count -> Literal 25 | SharedBehavior.getCount = makeGet('bf:count') 26 | SharedBehavior.setCount = makeSet('bf:count') 27 | SharedBehavior.removeCount = makeRemove('bf:count') 28 | 29 | // date -> Literal 30 | SharedBehavior.getDate = makeGet('bf:date') 31 | SharedBehavior.setDate = makeSet('bf:date') 32 | SharedBehavior.removeDate = makeRemove('bf:date') 33 | 34 | // identifiedBy -> Identifier 35 | SharedBehavior.identifiers = async function () { 36 | const ids = await this.all('bf:identifiedBy') 37 | return Promise.all(ids.map(async id => ({ 38 | type: id.type, 39 | value: await id.get('rdf:value') 40 | }))) 41 | } 42 | SharedBehavior.addIdentifier = makeAdd('bf:identifiedBy') 43 | SharedBehavior.removeIdentifier = makeRemove('bf:identifiedBy') 44 | 45 | SharedBehavior.addDoi = async function (value) { 46 | const node = await this.hr.createNode('bf:Doi', { 'rdf:value': value }) 47 | return this.add('bf:identifiedBy', node) 48 | } 49 | SharedBehavior.addIsbn = async function (value) { 50 | const node = await this.hr.createNode('bf:Isnb', { 'rdf:value': value }) 51 | return this.add('bf:identifiedBy', node) 52 | } 53 | SharedBehavior.addIssn = async function (value) { 54 | const node = await this.hr.createNode('bf:Issn', { 'rdf:value': value }) 55 | return this.add('bf:identifiedBy', node) 56 | } 57 | SharedBehavior.addIssnL = async function (value) { 58 | const node = await this.hr.createNode('bf:IssnL', { 'rdf:value': value }) 59 | return this.add('bf:identifiedBy', node) 60 | } 61 | 62 | // language -> Language 63 | SharedBehavior.language = makeGetLabelFromNode('bf:language', 'bf:Language') 64 | SharedBehavior.setLanguage = makeSetAsLabelOnType('bf:language', 'bf:Language') 65 | SharedBehavior.removeLanguage = makeRemove('bf:language') 66 | 67 | // note -> Note 68 | SharedBehavior.note = makeGetLabelFromNode('bf:note', 'bf:Note') 69 | SharedBehavior.setNote = makeSetAsLabelOnType('bf:note', 'bf:Note') 70 | SharedBehavior.removeNote = makeRemove('bf:note') 71 | 72 | // part -> Literal 73 | // // Part of a resource to which information applies. 74 | SharedBehavior.getPart = makeGet('bf:part') 75 | SharedBehavior.setPart = makeSet('bf:part') 76 | SharedBehavior.removePart = makeRemove('bf:part') 77 | 78 | // place -> Place 79 | // // Geographic location or place entity associated with a resource or element of description, such as the place associated with the publication, printing, distribution, issue, release or production of a resource, place of an event. 80 | 81 | // qualifier -> Literal 82 | // // Qualifier of information, such as an addition to a title to make it unique or qualifying information associated with an identifier. 83 | SharedBehavior.getQualifier = makeGet('bf:qualifier') 84 | SharedBehavior.setQualifier = makeSet('bf:qualifier') 85 | SharedBehavior.removeQualifier = makeRemove('bf:qualifier') 86 | 87 | // source -> Source 88 | // // Resource from which value or label came or was derived, such as the formal source/scheme from which a classification number is taken or derived, list from which an agent name is taken or derived, source within which an identifier is unique. 89 | SharedBehavior.setSource = makeSetAsLabelOnType('bf:source', 'bf:Source') 90 | SharedBehavior.source = makeGetLabelFromNode('bf:source', 'bf:Source') 91 | 92 | // status -> Status 93 | // // Designation of the validity or position of something, such as indication that the classification number is canceled or invalid, circulation availability of an item, indication of whether the identifier is canceled or invalid. 94 | SharedBehavior.setStatus = makeSetAsLabelOnType('bf:status', 'bf:Status') 95 | SharedBehavior.status = makeGetLabelFromNode('bf:status', 'bf:Status') 96 | 97 | // unit -> Unit 98 | // // Units in which a value is expressed, such as the physical or logical constituent of a resource (e.g., a volume, audiocassette, film reel, a map, a digital file). 99 | 100 | export default SharedBehavior 101 | -------------------------------------------------------------------------------- /lib/definitions/bibframe/work-behavior.js: -------------------------------------------------------------------------------- 1 | const WorkBehavior = {} 2 | // Subclasses: 3 | // Text 4 | // Cartography 5 | // Audio 6 | // NotatedMusic 7 | // NotatedMovement 8 | // Dataset 9 | // StillImage 10 | // MovingImage 11 | // Object 12 | // Multimedia 13 | // MixedMaterial 14 | 15 | // eventContentOf -> Event 16 | // expressionOf -> Work 17 | // hasExpression -> Work 18 | // hasInstance -> Instance 19 | 20 | // geographicCoverage -> GeographicCoverage 21 | 22 | // historyOfWork -> Literal 23 | 24 | // legalDate -> date 25 | // // Date of legal work, or promulgation of a law, or signing of a treaty. 26 | 27 | // originDate -> Literal 28 | // // Date or date range associated with the creation of a Work. 29 | 30 | // originPlace -> Place 31 | // // Place from which the creation of the Work originated. 32 | 33 | // temporalCoverage -> Literal 34 | // // Time period coverage of the content of the resource. 35 | 36 | // version -> Literal 37 | // // Term or terms that identify works such as arranged for music, vulgate for religious work, etc. 38 | 39 | // content -> Content 40 | // // Categorization reflecting the fundamental form of communication in which the content is expressed and the human sense through which it is intended to be perceived. 41 | 42 | export default WorkBehavior 43 | -------------------------------------------------------------------------------- /lib/definitions/bibframe/work-instance-or-item-behavior.js: -------------------------------------------------------------------------------- 1 | import { 2 | makeGet, 3 | makeSet, 4 | makeRemove, 5 | makeAll, 6 | makeAdd, 7 | makeGetLabelFromNode, 8 | makeSetAsLabelOnType, 9 | makeRemoveOfType 10 | } from '../helpers' 11 | 12 | const WorkInstanceOrItemBehavior = {} 13 | 14 | // title -> Title 15 | WorkInstanceOrItemBehavior.setTitle = makeSetAsLabelOnType('bf:title', 'bf:Title') 16 | WorkInstanceOrItemBehavior.getTitle = makeGetLabelFromNode('bf:title', 'bf:Title') 17 | WorkInstanceOrItemBehavior.removeTitle = makeRemoveOfType('bf:title', 'bf:Title') 18 | 19 | WorkInstanceOrItemBehavior.setAbbreviatedTitle = makeSetAsLabelOnType('bf:title', 'bf:AbbreviatedTitle') 20 | WorkInstanceOrItemBehavior.getAbbreviatedTitle = makeGetLabelFromNode('bf:title', 'bf:AbbreviatedTitle') 21 | WorkInstanceOrItemBehavior.removeAbbreviatedTitle = makeRemoveOfType('bf:title', 'bf:AbbreviatedTitle') 22 | 23 | // subject 24 | WorkInstanceOrItemBehavior.addSubject = makeAdd('bf:subject') 25 | WorkInstanceOrItemBehavior.removeSubject = makeRemove('bf:subject') 26 | WorkInstanceOrItemBehavior.subjects = makeAll('bf:subject') 27 | 28 | // classification -> Classification 29 | // contribution -> Contribution 30 | WorkInstanceOrItemBehavior.addContributionNode = makeAdd('bf:contribution') 31 | WorkInstanceOrItemBehavior.addContribution = async function (agent, role) { 32 | const contribution = await this.hr.createNode('bf:Contribution', { 'bf:agent': agent, 'bf:role': role }) 33 | return this.add('bf:contribution', contribution) 34 | } 35 | WorkInstanceOrItemBehavior.contributions = async function (agent, role) { 36 | const contributions = await this.all('bf:contribution') 37 | return Promise.all(contributions.map(async (c) => { 38 | const agent = await c.getAgent() 39 | const role = await c.getRole() 40 | return { 41 | name: await agent.getName(), 42 | firstName: await agent.getFirstName(), 43 | lastName: await agent.getLastName(), 44 | role: role.name 45 | } 46 | })) 47 | } 48 | 49 | // custodialHistory -> Literal 50 | WorkInstanceOrItemBehavior.setCustodialHistory = makeSet('bf:custodialHistory') 51 | WorkInstanceOrItemBehavior.removeCustodialHistory = makeRemove('bf:custodialHistory') 52 | WorkInstanceOrItemBehavior.custodialHistory = makeGet('bf:custodialHistory') 53 | 54 | // genreForm -> GenreForm 55 | // usageAndAccessPolicy -> UsageAndAccessPolicy 56 | 57 | // RELATIONSHIPS 58 | // accompaniedBy -> Work, Instance or Item 59 | // accompanies -> Work, Instance or Item 60 | // hasEquivalent -> Work, Instance or Item 61 | // hasPart -> Work, Instance or Item 62 | // partOf -> Work, Instance or Item 63 | // referencedBy -> Work, Instance or Item 64 | // references -> Work, Instance or Item 65 | 66 | export default WorkInstanceOrItemBehavior 67 | -------------------------------------------------------------------------------- /lib/definitions/bibframe/work-or-instance-behavior.js: -------------------------------------------------------------------------------- 1 | import { 2 | makeRecipricalBinding, 3 | makeGet, 4 | makeSet, 5 | makeRemove, 6 | makeAll, 7 | // makeAdd, 8 | makeSetAsLabelOnType, 9 | makeGetLabelFromNode 10 | } from '../helpers' 11 | 12 | const WorkOrInstanceBehavior = {} 13 | 14 | // // Relationships 15 | // absorbed -> Work or Instance 16 | // absorbedBy -> Work or Instance 17 | // continuedBy -> Work or Instance 18 | // continues -> Work or Instance 19 | // continuedInPartBy -> Work or Instance 20 | // continuesInPart -> Work or Instance 21 | // dataSource -> Work or Instance 22 | // derivativeOf -> Work or Instance 23 | // derivedFrom -> Work or Instance 24 | // findingAid -> Work or Instance 25 | // findingAidOf -> Work or Instance 26 | // firstIssue -> Work or Instance 27 | // lastIssue -> Work or Instance 28 | // hasDerivative -> Work or Instance 29 | // index -> Work or Instance 30 | // indexOf -> Work or Instance 31 | // mergedToForm -> Work or Instance 32 | // mergerOf -> Work or Instance 33 | // originalVersion -> Work or Instance 34 | // originalVersionOf -> Work or Instance 35 | // otherEdition -> Work or Instance 36 | // precededBy -> Work or Instance 37 | // succeededBy -> Work or Instance 38 | // replacedBy -> Work or Instance 39 | // replacementOf -> Work or Instance 40 | // separatedFrom -> Work or Instance 41 | // SplitInto -> Work or Instance 42 | // supplement -> Work or Instance 43 | // supplementTo -> Work or Instance 44 | // translation -> Work or Instance 45 | // translationOf -> Work or Instance 46 | 47 | // seriesOf -> Work or Instance 48 | // hasSeries -> Work or Instance 49 | WorkOrInstanceBehavior.setSeriesOf = makeRecipricalBinding('bf:hasSeries', 'bf:seriesOf', 'one-to-many') 50 | WorkOrInstanceBehavior.hasSeries = makeGet('bf:hasSeries') 51 | WorkOrInstanceBehavior.seriesOf = makeAll('bf:seriesOf') 52 | // subseriesOf -> Work or Instance 53 | // hasSubseries -> Work or Instance 54 | 55 | // acquisitionSource -> AcquisitionSource 56 | // // Information about an organization, person, etc., from which a resource may be obtained. 57 | // acquisitionTerms -> Literal 58 | // // Conditions under which the publisher, distributor, etc., will normally supply a resource, e.g., price of a resource. 59 | 60 | // awards -> Literal 61 | // // Information on awards associated with the described resource 62 | WorkOrInstanceBehavior.setAwards = makeSet('bf:awards') 63 | WorkOrInstanceBehavior.removeAwards = makeRemove('bf:awards') 64 | WorkOrInstanceBehavior.awards = makeGet('bf:awards') 65 | 66 | // copyrightDate -> Literal 67 | WorkOrInstanceBehavior.setCopyrightDate = makeSet('bf:copyrightDate') 68 | WorkOrInstanceBehavior.removeCopyrightDate = makeRemove('bf:copyrightDate') 69 | WorkOrInstanceBehavior.copyrightDate = makeGet('bf:copyrightDate') 70 | 71 | // coverArt -> CoverArt 72 | 73 | // credits -> Literal 74 | WorkOrInstanceBehavior.setCredits = makeSet('bf:credits') 75 | WorkOrInstanceBehavior.removeCredits = makeRemove('bf:credits') 76 | WorkOrInstanceBehavior.credits = makeGet('bf:credits') 77 | 78 | // duration -> Literal 79 | WorkOrInstanceBehavior.setDuration = makeSet('bf:duration') 80 | WorkOrInstanceBehavior.removeDuration = makeRemove('bf:duration') 81 | WorkOrInstanceBehavior.duration = makeGet('bf:duration') 82 | 83 | // frequency -> Frequency 84 | 85 | // intendedAudience -> IntendedAudience 86 | 87 | // issuance -> Issuance 88 | // TODO: look at LOC controlled vocabulary 89 | // http://id.loc.gov/search/?q=memberOf:http://id.loc.gov/vocabulary/issuance/collection_issuance 90 | // serial: serl 91 | // integrating resource: intg 92 | // single unit/monograph: mono 93 | // multipart monograph/multivolume monograph: mulm 94 | 95 | // media -> Media 96 | 97 | // musicFormat -> MusicFormat 98 | 99 | // natureOfContent -> Literal 100 | // // Characterization that epitomizes the primary content of a resource, e.g., field recording of birdsong; combined time series analysis and graph plotting system. 101 | WorkOrInstanceBehavior.setNatureOfContent = makeSet('bf:natureOfContent') 102 | WorkOrInstanceBehavior.removePreferredCitation = makeRemove('bf:natureOfContent') 103 | WorkOrInstanceBehavior.natureOfContent = makeGet('bf:natureOfContent') 104 | 105 | // preferredCitation -> Literal 106 | WorkOrInstanceBehavior.setPreferredCitation = makeSet('bf:preferredCitation') 107 | WorkOrInstanceBehavior.removePreferredCitation = makeRemove('bf:preferredCitation') 108 | WorkOrInstanceBehavior.preferredCitation = makeGet('bf:preferredCitation') 109 | 110 | // review -> Review 111 | 112 | // soundContent -> SoundContent 113 | 114 | // summary -> Summary 115 | WorkOrInstanceBehavior.setSummary = makeSetAsLabelOnType('bf:summary', 'bf:Summary') 116 | WorkOrInstanceBehavior.getSummary = makeGetLabelFromNode('bf:summary', 'bf:Summary') 117 | 118 | // Work or instance -> bf:copyrightRegistration 119 | WorkOrInstanceBehavior.setRights = makeSetAsLabelOnType('bf:copyrightRegistration', 'bf:CopyrightRegistration') 120 | WorkOrInstanceBehavior.rights = makeGetLabelFromNode('bf:copyrightRegistration', 'bf:CopyrightRegistration') 121 | 122 | // supplementaryContent -> SupplementaryContent 123 | // tableOfContents -> TableOfContents 124 | 125 | export default WorkOrInstanceBehavior 126 | -------------------------------------------------------------------------------- /lib/definitions/container-behavior.js: -------------------------------------------------------------------------------- 1 | import nanoiterator from 'nanoiterator' 2 | import toStream from 'nanoiterator/to-stream' 3 | 4 | // TODO: add smart caching to avoid a lot of unnecessary iteration 5 | 6 | const ContainerBehavior = {} 7 | 8 | ContainerBehavior.firstItem = function () { 9 | return this.get('co:firstItem') 10 | } 11 | 12 | ContainerBehavior.itemContent = function () { 13 | return this.get('co:itemContent') 14 | } 15 | 16 | ContainerBehavior.contains = function () { 17 | return this.all('po:contains') 18 | } 19 | 20 | ContainerBehavior.next = function () { 21 | return this.get('co:nextItem') 22 | } 23 | 24 | ContainerBehavior.iterator = function (opts) { 25 | var next = null 26 | var returnContent = !(opts && opts.listItems) 27 | var iterator = nanoiterator({ 28 | next: (cb) => { 29 | var promised = (!next) ? this.firstItem() : next.next() 30 | promised 31 | .then(node => { 32 | next = node 33 | return returnContent ? next && next.itemContent() : next 34 | }) 35 | .then(node => cb(null, node)) 36 | .catch(cb) 37 | } 38 | }) 39 | return iterator 40 | } 41 | 42 | ContainerBehavior.stream = function (opts) { 43 | return toStream(this.iterator(opts)) 44 | } 45 | 46 | ContainerBehavior.iterate = async function (fn, opts) { 47 | // this could be replace with stream as so, but is much slower 48 | // return new Promise((resolve, reject) => { 49 | // const stream = this.stream(opts) 50 | // stream.on('data', fn) 51 | // stream.on('end', resolve) 52 | // stream.on('error', reject) 53 | // }) 54 | var next = await this.firstItem() 55 | while (next) { 56 | // these should probably be called in parallel to speed things up 57 | if (fn) { 58 | if (opts && opts.listItems) { 59 | await fn(next) 60 | } else { 61 | var contents = await next.itemContent() 62 | await fn(contents) 63 | } 64 | } 65 | next = await next.next() 66 | } 67 | } 68 | 69 | ContainerBehavior.lastItem = async function () { 70 | // first check if a last item is explicitly set 71 | var last = await this.get('co:lastItem') 72 | if (last) return last 73 | // else iterate over next items until there are none left 74 | await this.iterate((node) => { last = node }, { listItems: true }) 75 | return last 76 | } 77 | 78 | /* TODO: implement index insertion */ 79 | ContainerBehavior.insertNode = async function (node, index) { 80 | // validate newNode 81 | // 1. inserts`< node co:contains newNode >` 82 | await this.add('po:contains', node) 83 | // 2. make newItem `< newItem co:hasContent newNode >` 84 | var newItem = await this.hr.createNode('co:ListItem') 85 | await newItem.set('co:itemContent', node) 86 | // 3. if `< node co:firstItem ? >` does not exist, insert`< node co:firstItem newItem >` 87 | var alreadyHasFirst = await this.has('co:firstItem') 88 | // console.log('alreadyHasFirst', alreadyHasFirst) 89 | if (!alreadyHasFirst) { 90 | await this.set('co:firstItem', newItem) 91 | } else { 92 | // else find lastItem of node, or not at index-1, and insert ` newItem >` 93 | var lastItem = await this.lastItem() 94 | // console.log('lastItem', lastItem && lastItem.name) 95 | if (lastItem) { 96 | await lastItem.set('co:nextItem', newItem) 97 | } 98 | } 99 | // 4. return existing node 100 | return this 101 | } 102 | 103 | ContainerBehavior.updateList = async function (nodeIds) { 104 | // this is one of many possible implementations. 105 | // it could be smarter - determine programmatically whether 106 | // procedurally updating itemContent is more effective 107 | // than adjusting co:firstItem and co:nextItem pointers. 108 | // This probably would have to factor in if the nodeIds are new 109 | // or if they are just a reordering of existing content. 110 | let count = 0 111 | // 1. iterate over existing list 112 | // TODO: allow the iterator to stop early 113 | await this.iterate(async (child) => { 114 | // 1.2 check if current child is within range of array 115 | // 1.2.1 remove all nodes that are greater than nodeIds length 116 | if (count > nodeIds.length) return 117 | if (count === nodeIds.length) { 118 | // destroying this list node cascades down all the remaining nodes in the list 119 | // and in effect deletes all remaining nodes and their content. 120 | // We add this.name to .destroy's ignore list, so that the connection to 121 | // this.name -> po:contains does not prevent the removal of itemContent. 122 | await child.parent.destroy([this.name]) 123 | count += 1 124 | return 125 | } 126 | const newNodeName = nodeIds[count] 127 | // 1.3 check if content is the same 128 | // if so do nothing 129 | if (child.name === newNodeName) { 130 | count += 1 131 | return 132 | } 133 | // if not then we want to update content pointer 134 | await child.parent.setContent({ name: newNodeName }) 135 | // and remove the current node from container 136 | if (!nodeIds.includes(child.name)) { 137 | await this.remove('po:contains', child) 138 | } 139 | await this.add('po:contains', { name: newNodeName }) 140 | count += 1 141 | }) 142 | // 2. insert remaining nodes 143 | while (count < nodeIds.length) { 144 | await this.insertNode({ name: nodeIds[count] }) 145 | count += 1 146 | } 147 | } 148 | 149 | ContainerBehavior.at = async function (index, opts) { 150 | let count = 0 151 | const iterator = this.iterator(opts) 152 | return new Promise((resolve, reject) => { 153 | iterator.next(onNext) 154 | function onNext (err, val) { 155 | if (err) { 156 | iterator.destroy() 157 | return reject(err) 158 | } 159 | if (val === null || count > index) { 160 | iterator.destroy() 161 | return resolve(null) 162 | } 163 | if (count === index) { 164 | iterator.destroy(() => resolve(val)) 165 | } 166 | count++ 167 | iterator.next(onNext) 168 | } 169 | }) 170 | } 171 | 172 | ContainerBehavior.removeNodeAt = async function (index) { 173 | // const node = await this.at(index, { listItems: true }) 174 | const node = await this.at(index) 175 | if (!node) return 176 | return node.destroy() 177 | } 178 | 179 | ContainerBehavior.removeNodesFrom = async function (index) { 180 | const node = await this.at(index, { listItems: true }) 181 | if (!node) return 182 | return node.destroy() 183 | } 184 | 185 | ContainerBehavior.removeNode = async function (node) { 186 | // 1. find node or node at index 187 | // 2. get nodeToDeletes nextItem 188 | // 3. delete key `< node po:contains nodeToDelete >` 189 | // 4. file all references `< ? co:nextItem nodeToDelete >` 190 | // 5. update all to `< ? co:nextItem nextItemOfNoteToDelete >` 191 | // 6. if `< node co:firstItem nodeToDelete >` exist 192 | // update it to `< node co:firstItem nextItemOfNoteToDelete >` 193 | } 194 | 195 | export default ContainerBehavior 196 | -------------------------------------------------------------------------------- /lib/definitions/foaf/agent-behavior.js: -------------------------------------------------------------------------------- 1 | import { makeGet, makeSet, makeRemove, makeAll, makeAdd } from '../helpers' 2 | 3 | const AgentBehavior = {} 4 | 5 | AgentBehavior.getName = makeGet('foaf:name') 6 | AgentBehavior.setName = makeSet('foaf:name') 7 | AgentBehavior.removeName = makeRemove('foaf:name') 8 | 9 | AgentBehavior.allInterests = makeAll('foaf:interest') 10 | AgentBehavior.addInterest = makeAdd('foaf:interest') 11 | AgentBehavior.removeInterest = makeRemove('foaf:interest') 12 | 13 | AgentBehavior.getAge = makeGet('foaf:age') 14 | AgentBehavior.setAge = makeSet('foaf:age') 15 | AgentBehavior.removeAge = makeRemove('foaf:age') 16 | 17 | AgentBehavior.getTitle = makeGet('foaf:title') 18 | AgentBehavior.setTitle = makeSet('foaf:title') 19 | AgentBehavior.removeTitle = makeRemove('foaf:title') 20 | 21 | AgentBehavior.allMadeRelations = makeAll('foaf:made') 22 | AgentBehavior.addMadeRelation = makeAdd('foaf:made') 23 | AgentBehavior.removeMadeRelation = makeRemove('foaf:made') 24 | 25 | export default AgentBehavior 26 | -------------------------------------------------------------------------------- /lib/definitions/foaf/person-behavior.js: -------------------------------------------------------------------------------- 1 | import { makeGet, makeSet, makeRemove, makeAll, makeAdd } from '../helpers' 2 | 3 | const PersonBehavior = {} 4 | 5 | PersonBehavior.getPlan = makeGet('foaf:plan') 6 | PersonBehavior.setPlan = makeSet('foaf:plan') 7 | PersonBehavior.removePlan = makeRemove('foaf:plan') 8 | 9 | PersonBehavior.getFirstName = makeGet('foaf:firstName') 10 | PersonBehavior.setFirstName = makeSet('foaf:firstName') 11 | PersonBehavior.removeFirstName = makeRemove('foaf:firstName') 12 | 13 | PersonBehavior.getGivenName = makeGet('foaf:givenName') 14 | PersonBehavior.setGivenName = makeSet('foaf:givenName') 15 | PersonBehavior.removeGivenName = makeRemove('foaf:givenName') 16 | 17 | PersonBehavior.getSurname = makeGet('foaf:surname') 18 | PersonBehavior.setSurname = makeSet('foaf:surname') 19 | PersonBehavior.removeSurname = makeRemove('foaf:surname') 20 | 21 | PersonBehavior.getLastName = makeGet('foaf:lastName') 22 | PersonBehavior.setLastName = makeSet('foaf:lastName') 23 | PersonBehavior.removeLastName = makeRemove('foaf:lastName') 24 | 25 | PersonBehavior.getFamilyName = makeGet('foaf:familyName') 26 | PersonBehavior.setFamilyName = makeSet('foaf:familyName') 27 | PersonBehavior.removeFamilyName = makeRemove('foaf:familyName') 28 | 29 | PersonBehavior.getGeekcode = makeGet('foaf:geekcode') 30 | PersonBehavior.setGeekcode = makeSet('foaf:geekcode') 31 | PersonBehavior.removeGeekcode = makeRemove('foaf:geekcode') 32 | 33 | PersonBehavior.getCurrentProjects = makeGet('foaf:currentProject') 34 | PersonBehavior.allCurrentProjects = makeAll('foaf:currentProject') 35 | PersonBehavior.addCurrentProject = makeAdd('foaf:currentProject') 36 | PersonBehavior.getCurrentProject = makeRemove('foaf:currentProject') 37 | 38 | PersonBehavior.allPastProjects = makeAll('foaf:pastProject') 39 | PersonBehavior.addPastProject = makeAdd('foaf:pastProject') 40 | PersonBehavior.removePastProject = makeRemove('foaf:pastProject') 41 | 42 | PersonBehavior.allPublications = makeAll('foaf:publications') 43 | PersonBehavior.addPublication = makeAdd('foaf:publications') 44 | PersonBehavior.removePublication = makeRemove('foaf:publications') 45 | 46 | PersonBehavior.getWorkplaceHomepage = makeGet('foaf:workplaceHomepage') 47 | PersonBehavior.setWorkplaceHomepage = makeSet('foaf:workplaceHomepage') 48 | PersonBehavior.removeWorkplaceHomepage = makeRemove('foaf:workplaceHomepage') 49 | 50 | PersonBehavior.getWorkInfoHomepage = makeGet('foaf:workInfoHomepage') 51 | PersonBehavior.setWorkInfoHomepage = makeSet('foaf:workInfoHomepage') 52 | PersonBehavior.removeWorkInfoHomepage = makeRemove('foaf:workInfoHomepage') 53 | 54 | PersonBehavior.getSchoolHomepage = makeGet('foaf:schoolHomepage') 55 | PersonBehavior.setSchoolHomepage = makeSet('foaf:schoolHomepage') 56 | PersonBehavior.removeSchoolHomepage = makeRemove('foaf:schoolHomepage') 57 | 58 | PersonBehavior.getMyersBriggs = makeGet('foaf:myersBriggs') 59 | PersonBehavior.setMyersBriggs = makeSet('foaf:myersBriggs') 60 | PersonBehavior.removeMyersBriggs = makeRemove('foaf:myersBriggs') 61 | 62 | PersonBehavior.getImage = makeGet('foaf:img') 63 | PersonBehavior.setImage = makeSet('foaf:img') 64 | PersonBehavior.removeImage = makeRemove('foaf:img') 65 | 66 | PersonBehavior.allKnowRelations = makeAll('foaf:knows') 67 | PersonBehavior.addKnowRelation = makeAdd('foaf:knows') 68 | PersonBehavior.removeKnowRelation = makeRemove('foaf:knows') 69 | 70 | export default PersonBehavior 71 | -------------------------------------------------------------------------------- /lib/definitions/helpers.js: -------------------------------------------------------------------------------- 1 | import StandardBehavior from './standard-behavior' 2 | import { spo, isNode, toRdfValue } from '../utils' 3 | 4 | function makeShortcutFactory (fn, ...defaults) { 5 | return function (...args) { return fn.apply(this, [...defaults, ...args]) } 6 | } 7 | 8 | export const makeGet = makeShortcutFactory.bind(null, StandardBehavior.get) 9 | export const makeSet = makeShortcutFactory.bind(null, StandardBehavior.set) 10 | export const makeRemove = makeShortcutFactory.bind(null, StandardBehavior.remove) 11 | export const makeAll = makeShortcutFactory.bind(null, StandardBehavior.all) 12 | export const makeAdd = makeShortcutFactory.bind(null, StandardBehavior.add) 13 | 14 | async function getWithType (value, type) { 15 | const v = this.hr.graph.v 16 | const res = await this.hr._search([ 17 | spo(this.name, value, v('id')), 18 | spo(v('id'), 'rdf:type', type) 19 | ], { limit: 1 }) 20 | if (!res.length) return null 21 | return this.hr.node({ name: res[0].id, type }) 22 | } 23 | 24 | export async function allWithType (value, type) { 25 | const v = this.hr.graph.v 26 | const res = await this.hr._search([ 27 | spo(this.name, value, v('id')), 28 | spo(v('id'), 'rdf:type', type) 29 | ], { limit: 1 }) 30 | if (!res.length) return [] 31 | return Promise.all(res.map(r => this.hr.node({ name: r.id, type }))) 32 | } 33 | 34 | // function makeSetAsNodeWithProperties (property, type) { 35 | // return async function (properties) { 36 | // let node = await getWithType.call(this, property, type) 37 | // if (!node) { 38 | // const node = await this.hr.createNode(type, properties) 39 | // return this.add(property, node) 40 | // } 41 | // return node.merge(properties) 42 | // } 43 | // } 44 | 45 | export function makeRecipricalBinding (hasPredicate, ofPredicate, relation = 'many-to-one') { 46 | // do we enforce type checking - only setting relations between specific types? 47 | // or perhaps this is not needed, as program should be written in such a way as to 48 | // avoid assigning wrong cases 49 | if (relation === 'many-to-one') { 50 | return async function (item) { 51 | await this.add(hasPredicate, item) 52 | await item.set(ofPredicate, this) 53 | } 54 | } 55 | if (relation === 'one-to-many') { 56 | return async function (item) { 57 | await this.set(hasPredicate, item) 58 | await item.add(ofPredicate, this) 59 | } 60 | } 61 | if (relation === 'many-to-many') { 62 | return async function (item) { 63 | await this.add(hasPredicate, item) 64 | await item.add(ofPredicate, this) 65 | } 66 | } 67 | if (relation === 'one-to-one') { 68 | return async function (item) { 69 | await this.set(hasPredicate, item) 70 | await item.set(ofPredicate, this) 71 | } 72 | } 73 | throw new Error('The makeRecipricalBinding function expects a valid relation type') 74 | } 75 | 76 | async function findSimilarNodes (profile) { 77 | const v = this.hr.graph.v 78 | const query = Object.keys(profile).map(predicate => { 79 | if (predicate === 'type') { 80 | return spo(v('id'), `rdf:${predicate}`, profile[predicate]) 81 | } 82 | return spo(v('id'), predicate, toRdfValue(profile[predicate])) 83 | }) 84 | const results = await this.hr._search(query) 85 | return results.map(data => ({ name: data.id, type: profile.type })) 86 | } 87 | 88 | async function getMatchingNodeOrCreateNew (value, predicate, type) { 89 | const matches = await findSimilarNodes.call(this, { [predicate]: value, type }) 90 | if (matches.length === 0) { 91 | // no matches so we creating a new node 92 | return this.hr.createNode(type, { [predicate]: value }) 93 | } else { 94 | // should we do further analysis, just because it matches this predicate 95 | // do we really want to assume that a node is a match 96 | // how do we choose between multiple matches 97 | return matches[0] 98 | } 99 | } 100 | 101 | function makeSetNodeOrAsLiteralPredicateWithType (predicate, property, type) { 102 | return async function (value) { 103 | if (isNode(value)) { 104 | return this.set(property, value) 105 | } 106 | // check if property already holds a node with this type 107 | let node = await getWithType.call(this, property, type) 108 | // if not we will add it 109 | if (!node) { 110 | // check if node of type with label already exists 111 | // if so we will reuse it rather than duplicating entry 112 | node = await getMatchingNodeOrCreateNew.call(this, value, predicate, type) 113 | return this.add(property, node) 114 | } 115 | // if node already exists 116 | // check if existing predicate->value is shared with others 117 | const parents = await node.parents(undefined, { limit: 2 }) 118 | if (parents && parents.length > 1) { 119 | // this node is used elsewhere - so modifing it will also modify its other uses 120 | // in which case we want to create a new node 121 | node = await getMatchingNodeOrCreateNew.call(this, value, predicate, type) 122 | return this.set(property, node) 123 | } 124 | // edit the existing node as its not used elsewhere 125 | // but still should check if node with value already exists 126 | const matches = await findSimilarNodes.call(this, { [predicate]: value, type }) 127 | if (matches.length === 0) return node.set(predicate, value) 128 | return this.set(property, matches[0]) 129 | } 130 | } 131 | 132 | function makeGetLiteralOrPredicateFromNodeWithType (predicate, property, type) { 133 | return async function () { 134 | let node = await this.get(property) 135 | if (!node || !node.name) return node 136 | if (node.type !== type) { 137 | node = await getWithType.call(this, property, type) 138 | } 139 | if (node) return node.get(predicate) 140 | return null 141 | } 142 | } 143 | 144 | export const makeSetAsLabelOnType = makeSetNodeOrAsLiteralPredicateWithType.bind(null, 'rdfs:label') 145 | // export const makeSetAsValueOnType = makeSetAsPredicateOnType.bind(null, 'rdf:value') 146 | 147 | export const makeGetLabelFromNode = makeGetLiteralOrPredicateFromNodeWithType.bind(null, 'rdfs:label') 148 | // export const makeGetValueFromNode = makeGetPredicateFromNodeWithType.bind(null, 'rdf:value') 149 | 150 | export function makeRemoveOfType (property, type) { 151 | return async function () { 152 | let node = await getWithType.call(this, property, type) 153 | if (node) { 154 | await this.remove(property, node) 155 | await node.destroy() 156 | } 157 | } 158 | } 159 | -------------------------------------------------------------------------------- /lib/definitions/index.js: -------------------------------------------------------------------------------- 1 | import { compose, createFactory } from './utils' 2 | import activityStreamFactories from './activity-streams' 3 | import bibframeFactories from './bibframe' 4 | import StandardBehavior from './standard-behavior' 5 | import ContainerBehavior from './container-behavior' 6 | import ListItemBehavior from './list-item-behavior' 7 | 8 | const createStandardNode = createFactory(StandardBehavior) 9 | const createContainerNode = createFactory(compose(StandardBehavior, ContainerBehavior)) 10 | const createListItemNode = createFactory(compose(StandardBehavior, ListItemBehavior)) 11 | 12 | const FACTORY_BY_TYPE = Object.assign( 13 | { 14 | // basic types for hyper-reader document 15 | // TODO: move away from storing documents in the db 16 | // in future we should use store files, and db should be 17 | // just bibliographic, annotations, and relationship data 18 | 'hr:root': createContainerNode, 19 | 'hr:head': createContainerNode, 20 | 'hr:body': createContainerNode, 21 | 'doco:List': createContainerNode, 22 | 'doco:Section': createContainerNode, 23 | 'doco:Paragraph': createContainerNode, 24 | 'doco:Title': createContainerNode, 25 | 'po:Block': createContainerNode, 26 | 'po:Inline': createContainerNode, 27 | 'co:ListItem': createListItemNode 28 | }, 29 | bibframeFactories, 30 | activityStreamFactories 31 | ) 32 | 33 | export default function createNodeInterface (hr, name, type, context) { 34 | const props = { 35 | hr, 36 | name, 37 | type, 38 | rel: context && context.rel, 39 | parent: context && context.parent, 40 | child: context && context.child 41 | } 42 | return (FACTORY_BY_TYPE[type] || createStandardNode)(props) 43 | } 44 | -------------------------------------------------------------------------------- /lib/definitions/list-item-behavior.js: -------------------------------------------------------------------------------- 1 | 2 | import StandardBehavior from './standard-behavior' 3 | import { ops, spo } from '../utils' 4 | 5 | const ListItemBehavior = {} 6 | 7 | ListItemBehavior.setContent = function (value) { 8 | return this.set('co:itemContent', value) 9 | } 10 | 11 | ListItemBehavior.previous = async function () { 12 | var triples = await this.hr._get(ops(this.name, 'co:nextItem'), { limit: 1 }) 13 | if (!triples.length) triples = await this.hr._get(ops(this.name, 'co:firstItem'), { limit: 1 }) 14 | // no need to check if value, as if this is not a named node we have bigger problems 15 | return triples.length ? this.hr.node(triples[0].subject) : undefined 16 | } 17 | 18 | ListItemBehavior.next = function () { 19 | return this.get('co:nextItem') 20 | } 21 | 22 | ListItemBehavior.itemContent = async function () { 23 | const nodes = await this.hr._get(spo(this.name, 'co:itemContent'), { limit: 1 }) 24 | if (!nodes || nodes.length === 0) return null 25 | return this.hr.node(nodes[0].object, { rel: 'co:itemContent', parent: this }) 26 | } 27 | 28 | ListItemBehavior.remove = async function (attr, value) { 29 | if (attr === 'co:itemContent') { 30 | // stitch up list so that iteration still works 31 | const prev = await this.previous() 32 | const next = await this.next() 33 | if (prev) { 34 | const relation = prev.type === this.type ? 'co:nextItem' : 'co:firstItem' 35 | if (next) { 36 | // set removes previous relationship 37 | // this is a total hack to stop prev.set() triggering a cascading destroy 38 | // which wipes out the entire chain of the container. 39 | // this is because next has only one parent - which is the element getting removed 40 | await this.hr._del(spo(prev.name, relation, this.name)) 41 | await prev.set(relation, next) 42 | } else { 43 | await prev.remove(relation, this) 44 | } 45 | } 46 | } 47 | return StandardBehavior.remove.call(this, attr, value) 48 | } 49 | 50 | export default ListItemBehavior 51 | -------------------------------------------------------------------------------- /lib/definitions/standard-behavior.js: -------------------------------------------------------------------------------- 1 | import { 2 | spo, 3 | ops, 4 | isRdfLiteral, 5 | fromRdfValue, 6 | toRdfValue, 7 | isNode 8 | } from '../utils' 9 | 10 | const StandardBehavior = {} 11 | 12 | // STANDARD FUNCTIONALITY 13 | StandardBehavior._castToNodeOrValue = async function (value) { 14 | if (isRdfLiteral(value)) { 15 | return fromRdfValue(value) 16 | } 17 | return this.hr.node(value) 18 | } 19 | 20 | StandardBehavior.get = async function (attr, value) { 21 | const nodes = await this.hr._get(spo(this.name, attr, value), { limit: 1 }) 22 | // console.log('get', spo(this.name, attr, value), nodes) 23 | if (!nodes || nodes.length === 0) return null 24 | return this._castToNodeOrValue(nodes[0].object) 25 | } 26 | 27 | StandardBehavior.all = async function (attr, value) { 28 | const triples = await this.hr._get(spo(this.name, attr, value)) 29 | return Promise.all(triples.map(node => this._castToNodeOrValue(node.object))) 30 | } 31 | 32 | StandardBehavior.add = function (attr, value) { 33 | return this.set(attr, value, true) 34 | } 35 | 36 | StandardBehavior.set = async function (attr, value, allowMultiple) { 37 | if (!attr) throw new Error('node.set() requires attribute') 38 | if (typeof attr !== 'string') { 39 | // TODO: set multiple 40 | } 41 | // 1. delete all `< node name ? >` triples 42 | if (!allowMultiple) { 43 | // replace with stream 44 | // const triples = await this.hr._get(spo(this.name, attr)) 45 | // if object is a node this will potentially leave floating nodes. 46 | // so we call destroy if the node has more than one parent 47 | const oldValues = await this.all(attr) 48 | for (var i = 0; i < oldValues.length; i++) { 49 | const v = oldValues[i] 50 | if (isNode(v)) { 51 | const parents = await v.parents(null, { limit: 2 }) 52 | if (parents.length === 1) { 53 | await v.destroy() 54 | } else { 55 | await this.hr._del(spo(this.name, attr, v.name)) 56 | } 57 | } else { 58 | await this.remove(attr, v) 59 | } 60 | } 61 | // await Promise.all(triples.map(triple => this.hr._del(triple))) 62 | } 63 | // 2. create new key `< node name value >` 64 | await this.hr._put(spo(this.name, attr, toRdfValue(value))) 65 | // 3. return node 66 | return this 67 | } 68 | 69 | function equalityCheck (a, b) { 70 | if (typeof a === 'object' && typeof b === 'object') { 71 | if (Array.isArray(a) && Array.isArray(b)) { 72 | if (a.length !== b.length) return false 73 | for (let i = 0; i < a.length; i++) { 74 | if (!equalityCheck(a, b)) return false 75 | } 76 | return true 77 | } 78 | return a.name === b.name 79 | } 80 | return a === b 81 | } 82 | 83 | function deepIncludes (array, value) { 84 | return !!array.find(v => equalityCheck(v, value)) 85 | } 86 | 87 | function shouldDeleteForUpdate (newPredicates, v, properties, currentProps) { 88 | return !newPredicates.includes(v) || !equalityCheck(properties[v], currentProps[v]) 89 | } 90 | 91 | function shouldDeleteForMerge (newPredicates, v, properties, currentProps) { 92 | return (properties[v] && !equalityCheck(properties[v], currentProps[v])) 93 | } 94 | 95 | function _update (shouldDelete) { 96 | return async function (properties) { 97 | // get all existing properties 98 | const currentProps = await this.properties() 99 | const oldPredicates = Object.keys(currentProps) 100 | const newPredicates = Object.keys(properties) 101 | const del = oldPredicates.reduce((p, v) => { 102 | if (v === 'rdf:type') return p 103 | if (shouldDelete(newPredicates, v, properties, currentProps)) p.push(v) 104 | return p 105 | }, []) 106 | 107 | await Promise.all(del.map(v => { 108 | const value = currentProps[v] 109 | if (Array.isArray(value)) { 110 | const newValueIsAnArray = Array.isArray(properties[v]) 111 | return Promise.all(value.map(val => { 112 | if (newValueIsAnArray && deepIncludes(properties[v], val)) return 113 | return this.remove(v, val) 114 | })) 115 | } 116 | return this.remove(v, value) 117 | })) 118 | await Promise.all(newPredicates.map(v => { 119 | const value = properties[v] 120 | const oldValue = currentProps[v] 121 | if (equalityCheck(value, oldValue)) return 122 | if (Array.isArray(value)) { 123 | const oldValueIsArray = Array.isArray(oldValue) 124 | return Promise.all(value.map(val => { 125 | if (oldValueIsArray && deepIncludes(oldValue, val)) return 126 | return this.add(v, val) 127 | })) 128 | } 129 | return this.set(v, value) 130 | })) 131 | } 132 | } 133 | 134 | StandardBehavior.update = _update(shouldDeleteForUpdate) 135 | StandardBehavior.merge = _update(shouldDeleteForMerge) 136 | 137 | StandardBehavior.has = async function (attr, value) { 138 | var nodes = await this.hr._get(spo(this.name, attr, value && toRdfValue(value)), { limit: 1 }) 139 | if (!nodes) return false 140 | if (nodes.length === 0) return false 141 | return true 142 | } 143 | 144 | StandardBehavior.remove = async function (attr, value) { 145 | // get the nodes 146 | // if this is a parent we already know that these exist so should not check. 147 | // console.log('remove', this.name, attr, value && value.name) 148 | // console.log(spo(this.name, attr, value && toRdfValue(value))) 149 | const triples = await this.hr._get(spo(this.name, attr, value && toRdfValue(value))) 150 | if (!triples) return this 151 | await Promise.all(triples.map(triple => this.hr._del(triple))) 152 | return this 153 | } 154 | 155 | /** return array of parents elements */ 156 | StandardBehavior.parents = async function (relation, opts) { 157 | const triples = await this.hr._get(ops(this.name, relation), opts) 158 | // console.log('parents', triples.map(t => t.subject)) 159 | // all parents will be named nodes as they refer to this child node 160 | return Promise.all(triples.map(node => this.hr.node( 161 | node.subject, 162 | { child: this, rel: node.predicate } 163 | ))) 164 | } 165 | 166 | function filterChildNodes (triple) { 167 | if (isRdfLiteral(triple.object)) return false 168 | return triple.predicate && triple.predicate !== 'rdf:type' 169 | } 170 | 171 | StandardBehavior.properties = async function () { 172 | const triples = await this.hr._get(spo(this.name)) 173 | return triples.reduce((p, triple) => { 174 | const value = isRdfLiteral(triple.object) ? fromRdfValue(triple.object) : { name: triple.object } 175 | if (p[triple.predicate] === undefined) p[triple.predicate] = value 176 | else if (Array.isArray(p[triple.predicate])) p[triple.predicate].push(value) 177 | else p[triple.predicate] = [p[triple.predicate], value] 178 | return p 179 | }, {}) 180 | } 181 | 182 | /** return array of child elements */ 183 | StandardBehavior.children = async function (includeLiterals) { 184 | let triples = await this.hr._get(spo(this.name)) 185 | // also filter literals out - as they will note be nodes; 186 | if (!includeLiterals) { 187 | const promises = triples 188 | .filter(filterChildNodes) 189 | .map(node => this.hr.node( 190 | node.object, 191 | { parent: this, rel: node.predicate } 192 | )) 193 | return Promise.all(promises) 194 | } 195 | return Promise.all(triples.map(async (node) => { 196 | // console.log('obj', node.object) 197 | if (isRdfLiteral(node.object)) { 198 | return { 199 | attr: node.predicate, 200 | value: node.object 201 | } 202 | } 203 | return this.hr.node(node.object, { parent: this, rel: node.predicate }) 204 | })) 205 | } 206 | 207 | StandardBehavior.disconnect = async function () { 208 | const parents = await this.parents() 209 | // parents.map(parent => console.log('diconnect', parent.name, parent.rel, this.name)) 210 | for (var i = 0; i < parents.length; i++) { 211 | const parent = parents[i] 212 | // console.log('diconnect', parent.name, parent.rel, this.name) 213 | await parent.remove(parent.rel, this) 214 | } 215 | // await Promise.all(parents.map(parent => parent.remove(parent.rel, this))) 216 | return this 217 | } 218 | 219 | StandardBehavior.destroy = async function (ignore) { 220 | if (!ignore) ignore = [this.name] 221 | else ignore.push(this.name) 222 | // 1. node.disconnect() 223 | await this.disconnect() 224 | // 2. find all `< node ?a ?b >` 225 | const children = await this.children(true) 226 | for (var i = 0; i < children.length; i++) { 227 | const child = children[i] 228 | if (isNode(child)) { 229 | // 3. for each check if it has other nodes connecting to it eg `< ? ? ?b >` 230 | let itsParents = await child.parents() 231 | itsParents = itsParents.filter(cp => !ignore.includes(cp.name)) 232 | if (!itsParents.length) { 233 | // console.log('DESTROY child ->', child.name, child.rel) 234 | await child.destroy(ignore) 235 | } else { 236 | // console.log('DELETE relation to child ->', this.name, child.rel, child.name) 237 | await this.hr._del(spo(this.name, child.rel, child.name)) 238 | } 239 | } 240 | await this.hr._del(spo(this.name, child.attr, child.value)) 241 | } 242 | } 243 | 244 | export default StandardBehavior 245 | -------------------------------------------------------------------------------- /lib/definitions/utils.js: -------------------------------------------------------------------------------- 1 | function assign (target, ...sources) { 2 | sources.forEach(source => { 3 | let descriptors = Object.keys(source).reduce((descriptors, key) => { 4 | descriptors[key] = Object.getOwnPropertyDescriptor(source, key) 5 | return descriptors 6 | }, {}) 7 | Object.getOwnPropertySymbols(source).forEach(sym => { 8 | let descriptor = Object.getOwnPropertyDescriptor(source, sym) 9 | if (descriptor.enumerable) { 10 | descriptors[sym] = descriptor 11 | } 12 | }) 13 | Object.defineProperties(target, descriptors) 14 | }) 15 | return target 16 | } 17 | 18 | export function compose (...behaviors) { 19 | return assign({}, ...behaviors) 20 | } 21 | 22 | export function createFactory (prototype, defaults) { 23 | return function (props) { 24 | const instance = Object.create(prototype) 25 | if (defaults) Object.assign(instance, defaults) 26 | return Object.assign(instance, props) 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /lib/hyperreadings.js: -------------------------------------------------------------------------------- 1 | import events from 'events' 2 | import fs from 'fs' 3 | import hypergraph from 'hyper-graph-db' 4 | import inherits from 'inherits' 5 | import uuid from 'uuid/v4' 6 | 7 | import importers from './importers/index' 8 | import createNodeInterface from './definitions/index' 9 | import { PREFIXES } from './constants' 10 | import { spo, ops, toRdfValue } from './utils' 11 | 12 | function resolveCallback (resolve, reject) { 13 | return function (err, result) { 14 | if (err) return reject(err) 15 | resolve(result) 16 | } 17 | } 18 | 19 | function HyperReadings (storage, key, opts) { 20 | if (!(this instanceof HyperReadings)) return new HyperReadings(storage, key, opts) 21 | if (typeof key !== 'string' && !opts) { 22 | opts = key 23 | key = null 24 | } 25 | events.EventEmitter.call(this) 26 | opts = opts || {} 27 | if (!opts.prefixes) opts.prefixes = PREFIXES 28 | if (!opts.name) opts.name = 'hr://' 29 | this.swarm = opts.swarm 30 | this.network = null 31 | this._nodeCount = 0 32 | this.graph = hypergraph(storage, key, opts) 33 | this.graph.on('ready', (e) => { 34 | this.emit('ready', e) 35 | }) 36 | } 37 | 38 | inherits(HyperReadings, events.EventEmitter) 39 | 40 | /** Get underlying hyperdb key */ 41 | HyperReadings.prototype.key = function () { 42 | return this.graph.db.key.toString('hex') 43 | } 44 | 45 | HyperReadings.prototype.joinNetwork = function (opts) { 46 | if (!this.swarm) { 47 | console.warn('Cannot join network - no swarm set.') 48 | return 49 | } 50 | if (this.network) { 51 | console.warn('Already joined network') 52 | return this.network 53 | } 54 | this.network = this.swarm(this.graph.db, opts) 55 | return this.network 56 | } 57 | 58 | HyperReadings.prototype.leaveNetwork = async function () { 59 | if (!this.network) return 60 | return new Promise((resolve, reject) => { 61 | this.network.leave(this.graph.db.discoveryKey) 62 | this.network.destroy(resolveCallback(resolve, reject)) 63 | }).then(() => { this.network = null }) 64 | } 65 | 66 | HyperReadings.prototype.importZoteroReference = function (data) { 67 | return this.import(data, { type: 'zoteroApi' }) 68 | } 69 | 70 | HyperReadings.prototype.importFile = function (filename, opts) { 71 | if (!filename || typeof filename !== 'string') throw new Error('HyperReadings.importFile requires filename to be a string') 72 | // infer import type from filename 73 | let type = opts && opts.type 74 | if (!type) { 75 | const match = filename.match(/\.(w+)$/) 76 | if ((!match || importers[match[1]])) throw new Error(`HyperReadings.import does not recognize file extension`) 77 | type = match[1] 78 | } else if (!importers[type]) { 79 | throw new Error(`HyperReadings.import does not recognize type ${type}`) 80 | } 81 | return new Promise((resolve, reject) => fs.readFile(filename, resolveCallback)) 82 | .then(data => this.import(data, type)) 83 | } 84 | 85 | HyperReadings.prototype.import = function (data, opts) { 86 | if (!data) return Promise.resolve() 87 | if (!opts || !opts.type) throw new Error('HyperReadings.import requires opts.type to be set') 88 | if (!importers[opts.type]) throw new Error(`HyperReadings.import does not recognize type ${opts.type}`) 89 | return importers[opts.type](this, data) 90 | } 91 | 92 | HyperReadings.prototype.setTitle = function (title) { 93 | if (!title || typeof title !== 'string') throw new Error('Title must be a string') 94 | return new Promise((resolve, reject) => this.graph.db.put('@title', title, (e) => { 95 | if (e) return reject(e) 96 | resolve() 97 | })) 98 | } 99 | 100 | HyperReadings.prototype.title = function () { 101 | return new Promise((resolve, reject) => this.graph.db.get('@title', (e, nodes) => { 102 | if (e) return reject(e) 103 | resolve(nodes) 104 | })).then((nodes) => { 105 | if (!nodes || nodes.length === 0) { 106 | return 'untitled' 107 | } 108 | return nodes[0].value.toString() 109 | }) 110 | } 111 | 112 | /** Get hyperreading root node */ 113 | HyperReadings.prototype.root = async function () { 114 | return this.nodeByType('hr:root') 115 | } 116 | 117 | HyperReadings.prototype.head = async function () { 118 | return this.nodeByType('hr:head') 119 | } 120 | 121 | HyperReadings.prototype.body = async function () { 122 | return this.nodeByType('hr:body') 123 | } 124 | 125 | HyperReadings.prototype.iterate = async function (...args) { 126 | const r = await this.root() 127 | return r.iterate(...args) 128 | } 129 | 130 | HyperReadings.prototype.createCollection = async function (name, description) { 131 | // TODO: check if collection with name already exists 132 | return this.createNode('as:Collection', { 'as:name': name, 'as:content': description }) 133 | } 134 | 135 | HyperReadings.prototype.collections = async function () { 136 | return this.nodesByType('as:Collection') 137 | } 138 | 139 | HyperReadings.prototype.getCollection = async function (name) { 140 | const v = this.graph.v 141 | const query = [ spo(v('id'), 'rdf:type', 'as:Collection') ] 142 | if (name) query.push(spo(v('id'), 'as:name', toRdfValue(name))) 143 | const collections = await this._search(query, { limit: 1 }) 144 | if (collections.length === 0) return null 145 | return this.node({ name: collections[0].id, type: 'as:Collection' }) 146 | } 147 | 148 | HyperReadings.prototype.exists = async function (id, type) { 149 | const node = await this._get(spo(id, type ? 'rdf:type' : undefined, type), { limit: 1 }) 150 | return !!(node && node.length > 0) 151 | } 152 | 153 | HyperReadings.prototype.findPeople = async function (opts) { 154 | const v = this.graph.v 155 | const query = [ spo(v('id'), 'rdf:type', 'bf:Person') ] 156 | if (opts && opts.name) query.push(spo(v('id'), 'foaf:name', toRdfValue(opts.name))) 157 | if (opts && opts.firstName) query.push(spo(v('id'), 'foaf:firstName', toRdfValue(opts.firstName))) 158 | if (opts && opts.lastName) query.push(spo(v('id'), 'foaf:lastName', toRdfValue(opts.lastName))) 159 | const people = await this._search(query) 160 | return Promise.all(people.map(p => this.node({ name: p.id, type: 'bf:Person' }))) 161 | } 162 | 163 | HyperReadings.prototype.findAgents = async function (opts) { 164 | const v = this.graph.v 165 | const query = [ spo(v('id'), 'rdf:type', 'bf:Agent') ] 166 | if (opts && opts.label) query.push(spo(v('id'), 'rdfs:label', opts.label)) 167 | const agents = await this._search(query) 168 | return Promise.all(agents.map(p => this.node({ name: p.id, type: 'bf:Agent' }))) 169 | } 170 | 171 | HyperReadings.prototype.findPlaces = async function (opts) { 172 | const v = this.graph.v 173 | const query = [ spo(v('id'), 'rdf:type', 'bf:Place') ] 174 | if (opts && opts.label) query.push(spo(v('id'), 'rdfs:label', opts.label)) 175 | const places = await this._search(query) 176 | return Promise.all(places.map(p => this.node({ name: p.id, type: 'bf:Place' }))) 177 | } 178 | 179 | HyperReadings.prototype.findSeriesWithTitle = async function (title) { 180 | // todo: Not sure if this is the most efficient way to get a series 181 | const found = await this._query(` 182 | PREFIX rdf: 183 | PREFIX rdfs: 184 | PREFIX bf: 185 | select * where { 186 | { 187 | ?title rdf:type bf:AbbreviatedTitle . 188 | } UNION { 189 | ?title rdf:type bf:Title . 190 | } 191 | ?title rdfs:label ${toRdfValue(title)} . 192 | ?id rdf:type bf:Instance . 193 | ?id bf:title ?title . 194 | }`) 195 | return Promise.all(found.map(p => this.node({ name: p['?id'], type: 'bf:Instance' }))) 196 | } 197 | 198 | /** Get all nodes by type */ 199 | HyperReadings.prototype.nodesByType = async function (type, opts) { 200 | // big question - how do we maintain new node names not clashing with old. 201 | const triples = await this._get(ops(type, 'rdf:type'), opts) 202 | return Promise.all(triples.map(triple => this.node(triple))) 203 | } 204 | 205 | /** Get all nodes by type */ 206 | HyperReadings.prototype.nodeByType = async function (type, opts) { 207 | // big question - how do we maintain new node names not clashing with old. 208 | const nodes = await this.nodesByType(type, { limit: 1 }) 209 | return nodes.length > 0 ? nodes[0] : null 210 | } 211 | 212 | /** Cast to specific node type */ 213 | HyperReadings.prototype.node = async function (data, context) { 214 | let name 215 | let type 216 | if (typeof data === 'string') { // from named node 217 | name = data 218 | } else if (data.subject) { // from triple 219 | name = data.subject 220 | if (data.predicate === 'rdf:type' && data.object) type = data.object 221 | } else if (data.name) { // from other node like object 222 | name = data.name 223 | type = data.type 224 | } 225 | if (!type) { 226 | type = await this._getType(name) 227 | } 228 | return createNodeInterface(this, name, type, context) 229 | } 230 | 231 | /** Create a new blank node of type */ 232 | HyperReadings.prototype.createNode = async function (type, properties) { 233 | if (!type) { 234 | throw new Error('Cannot create a node without type') 235 | } 236 | let nodeId = properties && properties.id 237 | if (!nodeId) { 238 | // big question - how do we maintain new node names not clashing with old. 239 | // const name = await this._name() 240 | nodeId = await this.newBlankNodeName() 241 | } 242 | const triple = spo(nodeId, 'rdf:type', type) 243 | await this._put(triple) 244 | // BUG triple is mutated by put, change this once fixed 245 | const node = await this.node(spo(nodeId, 'rdf:type', type)) 246 | if (properties) { 247 | // TODO: enable arrays to be added too. 248 | await Promise.all(Object.keys(properties).map((key) => { 249 | if (key === 'id' || typeof properties[key] === 'undefined') return 250 | return node.set(key, properties[key]) 251 | })) 252 | } 253 | return node 254 | } 255 | 256 | HyperReadings.prototype.newBlankNodeName = async function () { 257 | const name = await this._name() 258 | // should probably use hashes so as to avoid collisions 259 | return name + 'n' + uuid() 260 | } 261 | 262 | /** Returns node type */ 263 | HyperReadings.prototype._getType = async function (subject) { 264 | var triples = await this._get(spo(subject, 'rdf:type'), { limit: 1 }) 265 | return triples.length ? triples[0].object : undefined 266 | } 267 | HyperReadings.prototype._name = function (triple, opts) { 268 | return new Promise((resolve, reject) => this.graph.name(resolveCallback(resolve, reject))) 269 | } 270 | 271 | HyperReadings.prototype._get = function (triple, opts) { 272 | return new Promise((resolve, reject) => this.graph.get(triple, opts, resolveCallback(resolve, reject))) 273 | } 274 | 275 | HyperReadings.prototype._put = function (triple) { 276 | return new Promise((resolve, reject) => this.graph.put(triple, resolveCallback(resolve, reject))) 277 | } 278 | 279 | HyperReadings.prototype._del = function (triple) { 280 | return new Promise((resolve, reject) => this.graph.del(triple, resolveCallback(resolve, reject))) 281 | } 282 | 283 | HyperReadings.prototype._search = function (patterns, opts) { 284 | return new Promise((resolve, reject) => this.graph.search(patterns, opts, resolveCallback(resolve, reject))) 285 | } 286 | 287 | HyperReadings.prototype._query = function (query) { 288 | return new Promise((resolve, reject) => this.graph.query(query, resolveCallback(resolve, reject))) 289 | } 290 | 291 | export default HyperReadings 292 | -------------------------------------------------------------------------------- /lib/importers/html.js: -------------------------------------------------------------------------------- 1 | import parse5 from 'parse5' 2 | import { /* attr, */ textContent } from './parse5Helpers' 3 | import arrayToTree from '../array-to-tree' 4 | 5 | function createTypeDescriptionStatement (type) { 6 | return async (hr, node) => { 7 | const n = await hr.createNode(type) 8 | return { node: n } 9 | } 10 | } 11 | 12 | function createTypeDescriptionStatementWithTextContent (type) { 13 | return async (hr, node) => { 14 | const n = await hr.createNode(type, { 15 | 'c4o:hasContent': textContent(node) 16 | }) 17 | return { node: n, terminal: true } 18 | } 19 | } 20 | 21 | var describeParagraph = createTypeDescriptionStatementWithTextContent('doco:Paragraph') 22 | var describeTitle = createTypeDescriptionStatementWithTextContent('doco:Title') 23 | var describeSection = createTypeDescriptionStatement('doco:Section') 24 | // var describeInlineElement = createTypeDescriptionStatement('po:Inline') 25 | var describeHTML = createTypeDescriptionStatement('hr:root') 26 | var describeBody = createTypeDescriptionStatement('hr:body') 27 | var describeHead = createTypeDescriptionStatement('hr:head') 28 | // TODO: bring this back, but as annotations on text sections 29 | // async function describeLink (hr, node) { 30 | // const href = attr(node, 'href') 31 | // if (!href) return 32 | // const identifer = await hr.createNode('datacite:AlternateResourceIdentifier') 33 | // await identifer.set('cito:usesIdentifierScheme', 'datacite:url') 34 | // await identifer.set('rdf:value', href.value) 35 | // const linkNode = await hr.createNode('po:Inline') 36 | // await linkNode.set('cito:hasIdentifier', identifer) 37 | // return linkNode 38 | // } 39 | 40 | // async function describeText (hr, node) { 41 | // const text = await hr.createNode('doco:TextChunk') 42 | // await text.set('rdf:value', node.value) 43 | // return text 44 | // } 45 | 46 | // async function describeSpan (hr, node) { 47 | // switch (attr(node, 'data-type')) { 48 | // case 'comment': { 49 | // const text = attr(node, 'data-comment') 50 | // const comment = await hr.createNode('fabio:Comment') 51 | // await comment.set('c4o:hasContent', text) 52 | // return comment 53 | // } 54 | // default: { 55 | // return hr.createNode('po:Inline') 56 | // } 57 | // } 58 | // } 59 | 60 | function isEmptyNode (node) { 61 | return !node.childNodes && node.value && /^\s*$/.test(node.value) 62 | } 63 | 64 | // TYPES 65 | var htmlNodeMappings = { 66 | 'html': describeHTML, 67 | 'head': describeHead, 68 | 'body': describeBody, 69 | 'section': describeSection, 70 | 'p': describeParagraph, 71 | 'h1': describeTitle, 72 | 'h2': describeTitle, 73 | 'h3': describeTitle, 74 | 'h4': describeTitle, 75 | 'h5': describeTitle, 76 | 'h6': describeTitle 77 | // '#text': describeText, 78 | // TODO: implement inline elements as annotations on content 79 | // 'em': describeInlineElement, 80 | // 'i': describeInlineElement, 81 | // 'strong': describeInlineElement, 82 | // 'b': describeInlineElement, 83 | // 'a': describeLink, 84 | // 'span': describeSpan 85 | } 86 | var defaultMapping = function (hr, node) { 87 | // make this inherited from parent / inline or block 88 | return hr.createNode('po:Block') 89 | } 90 | 91 | const headerRegex = /^h(\d+)$/ 92 | const headingSplitter = (value) => { 93 | const match = value.nodeName.match(headerRegex) 94 | return match && match[1] 95 | } 96 | 97 | const sectionTransform = (array) => ({ 98 | nodeName: 'section', 99 | tagName: 'section', 100 | childNodes: array, 101 | inferred: true 102 | }) 103 | 104 | function addInferredSections (nodes) { 105 | return arrayToTree(nodes, headingSplitter, sectionTransform) 106 | } 107 | 108 | export default async function htmlImporter (hr, html) { 109 | var doc = parse5.parse(html) 110 | var stack = [] 111 | await walk(doc.childNodes[0]) 112 | while (stack.length) { 113 | var data = stack.shift() 114 | await walk(data.node, data.context) 115 | } 116 | return hr 117 | // end 118 | function nextNodes (parent, nodes) { 119 | // prepopulate this node with the expected content 120 | const filteredNodes = nodes 121 | .filter(node => !isEmptyNode(node)) 122 | if (!filteredNodes.length) return [] 123 | // return context for next nodes 124 | return filteredNodes.map((node, i) => { 125 | return { 126 | node, 127 | context: { parent } 128 | } 129 | }) 130 | } 131 | async function walk (node, context) { 132 | if (isEmptyNode(node)) { 133 | return 134 | } 135 | // process individual node 136 | var mapper = htmlNodeMappings[node.nodeName] || defaultMapping 137 | // add all children to the queue to be processed next 138 | var { node: hrNode, terminal } = await mapper(hr, node) 139 | if (context && context.parent && context.parent.insertNode) await context.parent.insertNode(hrNode) 140 | if (terminal) return 141 | if (!node.childNodes && node.childNodes.length === 0) return 142 | if (!node.inferred) node.childNodes = addInferredSections(node.childNodes) 143 | const next = nextNodes(hrNode, node.childNodes) 144 | // push next to stack 145 | // console.log(next) 146 | Array.prototype.push.apply(stack, next) 147 | } 148 | } 149 | -------------------------------------------------------------------------------- /lib/importers/index.js: -------------------------------------------------------------------------------- 1 | import htmlImporter from './html' 2 | import markdownImporter from './markdown' 3 | import zoteroApiImporter from './zoteroApi' 4 | 5 | export default { 6 | // ttl: todo importer 7 | // rdf: todo importer 8 | md: markdownImporter, 9 | html: htmlImporter, 10 | zoteroApi: zoteroApiImporter 11 | } 12 | -------------------------------------------------------------------------------- /lib/importers/markdown.js: -------------------------------------------------------------------------------- 1 | import marked from 'marked' 2 | import htmlImporter from './html' 3 | 4 | function markdownImporter (hr, text) { 5 | const html = marked(text) 6 | return htmlImporter(hr, html) 7 | } 8 | 9 | export default markdownImporter 10 | -------------------------------------------------------------------------------- /lib/importers/parse5Helpers.js: -------------------------------------------------------------------------------- 1 | function * iterator (node) { 2 | if (!node.childNodes || node.childNodes.length === 0) return 3 | for (let child of node.childNodes) { 4 | yield child 5 | yield * iterator(child) 6 | } 7 | } 8 | 9 | function accumulator (node, reducer, ctx) { 10 | const itr = iterator(node) 11 | let c = itr.next().value 12 | while (c) { 13 | ctx = reducer(ctx, c) 14 | c = itr.next().value 15 | } 16 | return ctx 17 | } 18 | 19 | export function textContent (node) { 20 | return accumulator(node, (prev, child) => { 21 | if (child.nodeName === '#text') return prev + child.value 22 | // maybe should not do this.. but lets roll with it for now 23 | else if (child.nodeName === 'br') return prev + '\n' 24 | return prev 25 | }, '') 26 | } 27 | 28 | export function attr (node, name) { 29 | if (!node || !node.attrs) return undefined 30 | const attr = node.attrs.find(attr => attr.name === name) 31 | return attr && attr.value 32 | } 33 | -------------------------------------------------------------------------------- /lib/importers/zoteroApi.js: -------------------------------------------------------------------------------- 1 | import { ZOTERO_TO_MARC_ROLES } from '../constants' 2 | 3 | function convertRoleToMarcRelator (role) { 4 | const relator = ZOTERO_TO_MARC_ROLES[role] || 'aut' 5 | return { name: `marcrel:${relator}` } 6 | } 7 | 8 | async function importCreators (hr, node, creators) { 9 | for (let i = 0; i < creators.length; i++) { 10 | const creator = creators[i] 11 | if (creator) { 12 | const marcRole = convertRoleToMarcRelator(creator.role) 13 | const name = [creator.firstName, creator.lastName].join(' ') 14 | // first find an existing person with ths 15 | const people = await hr.findPeople({ name }) 16 | let person 17 | if (people.length === 0) { 18 | person = await hr.createNode('bf:Person', { 19 | 'foaf:name': name, 20 | 'foaf:firstName': creator.firstName, 21 | 'foaf:lastName': creator.lastName 22 | }) 23 | } else { 24 | // just select the first; however we should probably allow for some disambiguation 25 | person = people[0] 26 | const contribution = await hr._query(` 27 | PREFIX rdf: 28 | PREFIX rdfs: 29 | PREFIX bf: 30 | PREFIX hr: 31 | PREFIX marcrel: 32 | 33 | select * where { 34 | ?contribution rdf:type bf:Contribution . 35 | ?contribution bf:agent <${person.name}> . 36 | ?contribution bf:role ${marcRole.name} . 37 | } 38 | `) 39 | if (contribution.length) { 40 | return node.addContributionNode({ name: contribution[0]['?contribution'], type: 'bf:Contribution' }) 41 | } 42 | } 43 | await node.addContribution(person, marcRole) 44 | } 45 | } 46 | } 47 | 48 | async function importTagsAsSubjects (node, tags) { 49 | for (let i = 0; i < tags.length; i++) { 50 | if (tags[i] && tags[i].tag) { 51 | await node.addSubject(tags[i].tag) 52 | } 53 | } 54 | } 55 | 56 | async function importIdentifier (node, str, type) { 57 | const ids = str.split(' ') 58 | for (let i = 0; i < ids.length; i++) { 59 | const id = ids[i].trim() 60 | if (id && id.length) { 61 | if (type === 'isbn') await node.addIsbn(id) 62 | if (type === 'issn') await node.addIssn(id) 63 | if (type === 'doi') await node.addDoi(id) 64 | } 65 | } 66 | } 67 | 68 | async function importIdentifiersFromExtraField (node, extra) { 69 | const lines = extra.split('\n') 70 | for (let i = 0; i < lines.length; i++) { 71 | const line = lines[i] 72 | if (line.startsWith('DOI:')) { 73 | await importIdentifier(node, line.substring(4), 'doi') 74 | } 75 | } 76 | } 77 | 78 | function hasItemLevelFields (data) { 79 | if (data.url) return true 80 | if (data.archive) return true 81 | if (data.archiveLocation) return true 82 | if (data.callNumber) return true 83 | return false 84 | } 85 | 86 | function hasProvisionFields (data) { 87 | if (data.place) return true 88 | if (data.publisher) return true 89 | if (data.date) return true 90 | return false 91 | } 92 | 93 | function hasSeriesLevelFields (data) { 94 | if (data.series) return data.series 95 | return false 96 | } 97 | 98 | function hasParentLevelFields (data) { 99 | 100 | } 101 | 102 | async function getCollectionToAddTo (hr) { 103 | const collections = await hr.collections() 104 | if (collections.length === 0) { 105 | const collection = await hr.createCollection('default') 106 | return collection.addNewPage() 107 | } 108 | const lastPage = await collections[0].lastPage() 109 | if (lastPage) { 110 | const total = await lastPage.totalItems() 111 | // pagination limit should probably not be hard coded here. 112 | if (total < 20) return lastPage 113 | return collections[0].addNewPage() 114 | } 115 | // returns unpaginated collection not sure if this is what we really want 116 | // or whether we add a page to the existing collection 117 | // but what if the collection already has items?... 118 | return collections[0] 119 | } 120 | 121 | async function zotero (hr, data) { 122 | if (!data || !data.itemType) throw new Error('ZoteroApi translator expects data with itemType') 123 | // check if collection exists 124 | const collection = await getCollectionToAddTo(hr) 125 | const instance = await hr.createNode('bf:Instance') 126 | // add instance to collection 127 | await collection.addItem(instance) 128 | if (data.creators) await importCreators(hr, instance, data.creators) 129 | if (data.title) await instance.setTitle(data.title) 130 | if (data.shortTitle) await instance.setAbbreviatedTitle(data.shortTitle) 131 | if (data.abstractNote) await instance.setSummary(data.abstractNote) 132 | if (data.language) await instance.setLanguage(data.language) 133 | if (data.rights) await instance.setRights(data.rights) 134 | if (data.tags) await importTagsAsSubjects(instance, data.tags) 135 | if (data.ISBN) await importIdentifier(instance, data.ISBN, 'isbn') 136 | if (data.ISSN) await importIdentifier(instance, data.ISSN, 'issn') 137 | if (data.DOI) await importIdentifier(instance, data.DOI, 'doi') 138 | if (data.extra) { 139 | await importIdentifiersFromExtraField(instance, data.extra) 140 | await instance.setNote(data.extra) 141 | } 142 | if (data.libraryCatalog) await instance.setSource(data.libraryCatalog) 143 | if (hasProvisionFields(data)) { 144 | await instance.addPublication({ 145 | place: data.place, 146 | date: data.date, 147 | publisher: data.publisher 148 | }) 149 | } 150 | if (hasItemLevelFields(data)) { 151 | const item = await hr.createNode('bf:Item') 152 | instance.addItem(item) 153 | if (data.callNumber) await item.setShelfMark(data.callNumber) 154 | if (data.url) await item.setElectronicLocator(data.url) 155 | if (data.archive) { 156 | await item.setHeldBy(data.archive) 157 | } 158 | if (data.archiveLocation) { 159 | // do we add or set ? 160 | await item.setShelfMark(data.archiveLocation) 161 | } 162 | } 163 | if (hasParentLevelFields(data)) { 164 | 165 | } 166 | if (hasSeriesLevelFields(data)) { 167 | // check if the series already exists 168 | // then create series and link to instance 169 | const existing = await hr.findSeriesWithTitle(data.series) 170 | let series 171 | if (existing.length === 0) { 172 | series = await hr.createNode('bf:Instance') 173 | await series.setTitle(data.series) 174 | } else { 175 | series = existing[0] 176 | } 177 | await instance.setSeriesOf(series) 178 | } 179 | if (data.seriesNumber) await instance.setSeriesEnumeration(data.seriesNumber) 180 | if (data.edition) await instance.setEditionStatement(data.edition) 181 | return instance 182 | } 183 | 184 | export default zotero 185 | -------------------------------------------------------------------------------- /lib/utils.js: -------------------------------------------------------------------------------- 1 | export function spo (s, p, o) { 2 | return { 3 | subject: s, 4 | predicate: p, 5 | object: o 6 | } 7 | } 8 | 9 | export function ops (o, p, s) { 10 | return spo(s, p, o) 11 | } 12 | 13 | export function isRdfLiteral (value) { 14 | return (typeof value === 'string' && value.charAt(0) === '"') 15 | } 16 | 17 | const rdfValueMatch = /(".+")(?:\^\^(.+))?$/ 18 | export function fromRdfValue (value) { 19 | const match = value.match(rdfValueMatch) 20 | if (!match) return value 21 | // this could be smarter getting value type as well. 22 | // return { value: match[1], type: match[2] } 23 | if (match[2] === 'xsd:decimal') { 24 | return parseFloat(match[1].slice(1, -1)) 25 | } 26 | return JSON.parse(match[1]) 27 | } 28 | 29 | export function isNode (value) { 30 | return !!value.name 31 | } 32 | 33 | export function toRdfValue (value) { 34 | if (isNode(value)) { 35 | return value.name 36 | } else if (typeof value === 'number') { 37 | return `"${value}"^^xsd:decimal` 38 | } else { 39 | return JSON.stringify(value) 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hyper-readings", 3 | "version": "0.0.15", 4 | "description": "a tool for making and sharing p2p distributed reading lists", 5 | "main": "dist/hyper-readings.js", 6 | "module": "hyper-readings.es.js", 7 | "directories": { 8 | "doc": "docs" 9 | }, 10 | "scripts": { 11 | "prepublish": "npm run build", 12 | "build": "cross-env NODE_ENV=production webpack --progress", 13 | "dev": "cross-env NODE_ENV=development webpack --progress -w", 14 | "lint": "standard", 15 | "lint:fix": "standard --fix", 16 | "test": "cross-env NODE_ENV=test mocha", 17 | "tdd": "cross-env NODE_ENV=test mocha -w", 18 | "travis": "nyc npm run test", 19 | "report-coverage": "nyc report --reporter=text-lcov | coveralls" 20 | }, 21 | "repository": { 22 | "type": "git", 23 | "url": "git+https://github.com/sdockray/hyperreadings.git" 24 | }, 25 | "keywords": [ 26 | "p2p", 27 | "hyper", 28 | "readings", 29 | "dat" 30 | ], 31 | "author": "Benjamin Forster", 32 | "license": "MIT", 33 | "bugs": { 34 | "url": "https://github.com/sdockray/hyperreadings/issues" 35 | }, 36 | "homepage": "https://github.com/sdockray/hyperreadings#readme", 37 | "dependencies": { 38 | "hyper-graph-db": "^0.3.6", 39 | "marked": "^0.3.15", 40 | "nanoiterator": "^1.1.0", 41 | "parse5": "^4.0.0", 42 | "uuid": "^3.2.1" 43 | }, 44 | "devDependencies": { 45 | "babel-cli": "^6.26.0", 46 | "babel-loader": "^7.1.4", 47 | "babel-preset-env": "^1.6.1", 48 | "chai": "^4.1.2", 49 | "coveralls": "^3.0.0", 50 | "cross-env": "^5.1.4", 51 | "eslint": "^4.19.1", 52 | "eslint-plugin-import": "^2.10.0", 53 | "eslint-plugin-node": "^6.0.1", 54 | "eslint-plugin-promise": "^3.7.0", 55 | "mocha": "^5.0.1", 56 | "nyc": "^11.6.0", 57 | "random-access-memory": "^3.0.0", 58 | "standard": "^12.0.0", 59 | "webpack": "^4.6.0", 60 | "webpack-cli": "^2.0.15", 61 | "webpack-node-externals": "^1.7.2" 62 | } 63 | } 64 | -------------------------------------------------------------------------------- /presentations/SLNSW Fellowship Presentation/HyperReadings Presentation.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/samiz-dat/hyperreadings/314584dcbacc01c142789a335824067ec9127f78/presentations/SLNSW Fellowship Presentation/HyperReadings Presentation.pdf -------------------------------------------------------------------------------- /presentations/SLNSW Fellowship Presentation/SLNSW_presentation_notes.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/samiz-dat/hyperreadings/314584dcbacc01c142789a335824067ec9127f78/presentations/SLNSW Fellowship Presentation/SLNSW_presentation_notes.pdf -------------------------------------------------------------------------------- /reading-lists/hyper-graph-db-research.md: -------------------------------------------------------------------------------- 1 | # Research for hyper graph db 2 | 3 | This is a loose collection of links that have been collated through the development of [hyper-graph-db](https://www.github.com/e-e-e/hyper-graph-db). 4 | 5 | ## Metadata Standards and Ontologies 6 | 7 | - [PRISM](http://www.prismstandard.org/resources/mod_prism.html) 8 | - [Functional Requirements for Bibliographic Records](https://www.ifla.org/publications/functional-requirements-for-bibliographic-records) 9 | - [Expression of Extended FRBR Concepts in RDF](http://vocab.org/frbr/extended) 10 | - [FaBiO, the FRBR-aligned Bibliographic Ontology](https://sparontologies.github.io/fabio/current/fabio.html) 11 | - [The Discourse Elements Ontology (DEO)](https://sparontologies.github.io/deo//current/deo.html) 12 | - [DoCO, the Document Components Ontology](https://sparontologies.github.io/doco/current/doco.html) 13 | - [Dublin Core: DCMI Metadata Terms](http://dublincore.org/documents/dcmi-terms/) 14 | - [Using Dublin Core](http://dublincore.org/documents/2000/07/16/usageguide/qualified-html.shtml) 15 | - [The Citation Counting and Context Characterisation Ontology (C4O)](https://sparontologies.github.io/c4o/current/c4o.html) 16 | - [The RDA Registry](http://www.rdaregistry.info/) 17 | - [Open Metadata Registry - FRBRer](http://metadataregistry.org/schema/show/id/5.html) 18 | 19 | ## RDF data 20 | 21 | ### Large repositories 22 | 23 | - [Open Citations Corpus](http://opencitations.net/download) 24 | - [Wikipedia Dumps](http://wiki.dbpedia.org/news/dbpedia-based-rdf-dumps-wikidata) 25 | - [WorldCat Most Highly Held 2012](https://archive.org/services/purl/purl/dataset/WorldCat/datadumps/WorldCatMostHighlyHeld-2012-05-15.nt.gz) 26 | - [Open Library Dump 2016](https://old.datahub.io/dataset/open-library) 27 | - [Open Library Dumps](https://openlibrary.org/developers/dumps) 28 | - [Linked data Laundromat](http://lodlaundromat.org/) 29 | - The European Library (dead 2016) 30 | - [Research Libraries UK Linked Open Data](http://www.theeuropeanlibrary.org/tel4/access/data/lod) 31 | - [Research Libraries Bulk data](http://www.theeuropeanlibrary.org/tel4/access/data/opendata/details) 32 | - [Freebase](https://developers.google.com/freebase/) (dead 2016) 33 | 34 | ### Single entry access points 35 | 36 | - [doi rdf data via crossref](https://www.crossref.org/blog/content-negotiation-for-crossref-dois/) 37 | - [WorldCat data](https://www.oclc.org/developer/develop/data-sets.en.html) 38 | - eg. [Harper Lee, Go set a watchman (ttl)](http://experiment.worldcat.org/entity/work/data/2283978583.ttl) 39 | - [Open Library Restful API](https://openlibrary.org/dev/docs/restful_api) 40 | - eg. [Judith Butler, Gender Trouble (rdf)](https://openlibrary.org/works/OL3292666W.rdf) 41 | 42 | ## Sparql 43 | 44 | - [SPARQL By Example](https://www.w3.org/2009/Talks/0615-qbe/) 45 | - [How to SPARQL](http://rdf.myexperiment.org/howtosparql?page=Using+the+SPARQL+Endpoint#) 46 | - [SPARQL Examples](https://codyburleson.com/display/ldn/SPARQL+examples) 47 | 48 | ## Projects 49 | 50 | - [Initiative for Open Citations](https://i4oc.org/) 51 | - [Public Knowledge Project](https://pkp.sfu.ca) 52 | - [Open Harvester Systems](https://pkp.sfu.ca/ohs/) 53 | - [Open Journal Systems](https://pkp.sfu.ca/ojs/) 54 | - [Open Archives Initiative](http://www.openarchives.org) 55 | - [Code{4}Lib](code4lib.org) 56 | - [Citation Style Language](http://citationstyles.org/developers/) 57 | - [The Cost of Knowledge Petition](http://thecostofknowledge.com/) 58 | - [RDF Balls](http://rballs.info/) 59 | - [Linked Open Vocabularies](http://lov.okfn.org/dataset/lov) 60 | 61 | ## Open Source Projects 62 | 63 | - [Terrier](https://github.com/Authorea/terrier) 64 | - [RDA Registry](https://github.com/RDARegistry) 65 | 66 | ## OA Repositories + Journals 67 | 68 | - [African Journals Online (AJOL)](https://www.ajol.info) 69 | - [Performance Paradigm](http://www.performanceparadigm.net/) 70 | - [Radical America](https://repository.library.brown.edu/studio/collections/id_594/) 71 | - [Archiving Student Activism at Princeton (ASAP) Collection](https://findingaids.princeton.edu/collections/AC437/c1) 72 | - [Documenting Ferguson](http://digital.wustl.edu/ferguson/) 73 | - [Taipei Biennial 2012 Journal](http://www.taipeibiennial.org/2012/en/journal.html) 74 | - [Repositório Institucional UNESP](https://repositorio.unesp.br/?locale-attribute=en) 75 | - [SUPERVALENT THOUGHT](https://supervalentthought.com) 76 | - [arXiv.org](https://arxiv.org/) 77 | - [Public Library of Science (PLOS)](https://www.plos.org/) 78 | - [Directory of Open Access Journals (DOAJ)](https://doaj.org/) 79 | 80 | ## Reading lists 81 | 82 | - [A Distributed Systems Reading List](https://dancres.github.io/Pages/) 83 | - [Humane Technology reading lists](https://community.humanetech.com/t/humane-technology-reading-lists/21/21) 84 | - [The Only Humane-Tech Reading List Ever](https://technological-simplicity.com/the-only-human-tech-reading-list-ever-9ccb6298890d?gi=3a2d84a9ca75) 85 | - [Decolonising Science Reading List](https://medium.com/@chanda/decolonising-science-reading-list-339fb773d51f) 86 | 87 | ## Articles 88 | 89 | - Crystal Fraser, Zoe Todd, [Decolonial Sensibilities: Indigenous Research and Engaging with Archives in Contemporary Colonial Canada](http://www.internationaleonline.org/research/decolonising_practices/54_decolonial_sensibilities_indigenous_research_and_engaging_with_archives_in_contemporary_colonial_canada) 90 | - [Library of Alexandria v2.0](https://medium.com/on-archivy/library-of-alexandria-v2-0-697fc0f590f0) 91 | - [The Hubris of Neutrality in Archives](https://medium.com/on-archivy/the-hubris-of-neutrality-in-archives-8df6b523fe9f) 92 | - [Exploring #WomensMarch](https://medium.com/on-archivy/exploring-womensmarch-dcc30221101c) 93 | - [Trove and the case for radical openness](https://overland.org.au/2016/03/trove-and-the-case-for-radical-openness/) 94 | - Judith Butler, [What is Critique? An Essay on Foucault’s Virtue](http://eipcp.net/transversal/0806/butler/en) 95 | - Kenneth Korstad Langås, [Library & Experimental Arts](http://eejournal.no/home/2017/5/11/kenneth-korstad-langs-library-experimental-arts) 96 | - Nika Mavrody, [The Dangers of Reading in Bed](https://www.theatlantic.com/technology/archive/2017/05/reading-in-bed/527388/?utm_source=atlfb) 97 | - Bryan Newbold, [IA Papers Manifest](https://archive.org/details/ia_papers_manifest_20170919) 98 | - Michael Dudley, [Algorithms Don’t Think About Race. So Tech Giants Need To.](https://decolonizedlibrarian.wordpress.com/2017/02/07/algorithms-dont-think-about-race-so-tech-giants-need-to/) 99 | - Daniel MARTÍNEZ AVILA, José Augusto CHAVES GUIMARÃES, [Library classifications criticisms: universality, poststructuralism and ethics](https://repositorio.unesp.br/bitstream/handle/11449/114737/ISSN11353716-2013-19-02-21-26.pdf?sequence=1&isAllowed=y) 100 | - Sanford Berman, [Prejudices and Antipathies](http://www.sanfordberman.org/prejant.htm) 101 | - Mike Jones, [Metadata and power: toward relational agency in archival description](http://www.mikejonesonline.com/contextjunky/2017/09/27/metadata-and-power-toward-relational-agency-in-archival-description/#_edn24) 102 | - Alexander R. Galloway, [An Interview with McKenzie Wark](http://www.boundary2.org/2017/04/alexander-r-galloway-an-interview-with-mckenzie-wark/) 103 | - Lindsay McKenzie, [Sci-Hub’s cache of pirated papers is so big, subscription journals are doomed, data analyst suggests](http://www.sciencemag.org/news/2017/07/sci-hub-s-cache-pirated-papers-so-big-subscription-journals-are-doomed-data-analyst) 104 | - Bastian Greshake, [Looking into Pandora's Box: The Content of Sci-Hub and its Usage](https://f1000research.com/articles/6-541/v1) 105 | - Lawrence Liang, [Shadow Libraries](http://www.e-flux.com/journal/37/61228/shadow-libraries/) 106 | - Stephen Schwartz, [Libraries Burning: From Sarajevo to Mosul](http://www.islamicpluralism.org/2462/libraries-burning-from-sarajevo-to-mosul) 107 | - Mohsen Biparva, [Book-Burning or Genocide](http://en.izhamburg.de/index.aspx?pid=99&articleid=67937) 108 | - Jarrett M. Drake, [RadTech Meets RadArch: Towards A New Principle for Archives and Archival Description](https://medium.com/on-archivy/radtech-meets-radarch-towards-a-new-principle-for-archives-and-archival-description-568f133e4325) 109 | - Miriam Rasch, [Reflect and Act! Introduction to the Society of the Query Reader](http://networkcultures.org/query/2014/04/23/reflect-and-act-introduction-to-the-society-of-the-query-reader/) 110 | - Colleen Flaherty, [References and Citations for All](https://www.insidehighered.com/news/2017/12/06/scholars-push-free-access-online-citation-data-saying-they-need-and-deserve-access#.WigwHdRyMdA.twitter) 111 | - Manu Sporny, [JSON-LD and Why I Hate the Semantic Web](http://manu.sporny.org/2014/json-ld-origins-2/) 112 | 113 | ## unordered 114 | 115 | http://dublincore.org/documents/dc-rdf/ 116 | http://milicicvuk.com/blog/2011/09/24/the-rdf-graph-uri-pattern/ 117 | http://patterns.dataincubator.org/book/proxy-uris.html 118 | https://www.w3.org/standards/techs/rdf#w3c_all 119 | https://www.w3.org/ns/org# 120 | http://dublincore.org/2012/06/14/dcterms 121 | https://www.w3.org/2000/01/rdf-schema# 122 | https://www.w3.org/2002/07/owl# 123 | https://www.d3web.de/Wiki.jsp?page=Demo%20-%20Simpsons 124 | https://www.w3.org/TR/2012/REC-rdf-plain-literal-20121211/ 125 | http://book.freeyourmetadata.org/ 126 | https://pro.europeana.eu/resources/apis/record 127 | http://schematron.com/front-page/the-schematron-skeleton-implementation/ 128 | https://www.w3.org/TR/skos-primer/ 129 | http://www.willpowerinfo.co.uk/glossary.htm 130 | https://www.w3.org/1999/02/22-rdf-syntax-ns# 131 | https://old.datahub.io/dataset/doi 132 | https://rdfa.info/play/ 133 | http://www.worldcat.org/title/fr-david/oclc/849686432&referer=brief_results 134 | http://digital-libraries.saic.edu/cdm/singleitem/collection/jfabc/id/569/rec/1 135 | http://experiment.worldcat.org/oclc/940766981.ttl 136 | http://www.publicbooks.org/trump-syllabus-3-0/ 137 | http://www.publicbooks.org/wp-json/wp/v2/posts?search=syllabus 138 | http://rdf-translator.appspot.com/ 139 | https://alistapart.com/article/introduction-to-rdfa 140 | -------------------------------------------------------------------------------- /test/array-to-tree.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 3 | import { expect } from 'chai' 4 | import arrayToTree from '../lib/array-to-tree' 5 | 6 | describe('arrayToTree', () => { 7 | it('takes a flat array and makes it into a nested array', () => { 8 | var input = ['.', '#', '.', '.', '##', '.', '####', '.', '##', '.', '##', '.', '#', '.'] 9 | var expected = [ 10 | '.', 11 | ['#', '.', '.', 12 | ['##', '.', 13 | ['####', '.'] 14 | ], 15 | ['##', '.'], 16 | ['##', '.'] 17 | ], 18 | ['#', '.'] 19 | ] 20 | var hashSplitter = (value) => { 21 | if (value && value[0] === '#') return value.length 22 | return null 23 | } 24 | expect(arrayToTree(input, hashSplitter)).to.deep.eql(expected) 25 | }) 26 | 27 | it('takes optional transform function, allowing customisation of nesting', () => { 28 | var input = ['.', '#', '.', '.', '##', '.', '####', '.', '##', '.', '##', '.', '#', '.'] 29 | var expected = [ 30 | '.', 31 | { children: ['#', '.', '.', 32 | { children: ['##', '.', 33 | { children: ['####', '.'] } 34 | ] }, 35 | { children: ['##', '.'] }, 36 | { children: ['##', '.'] } 37 | ] }, 38 | { children: ['#', '.'] } 39 | ] 40 | var hashSplitter = (value) => { 41 | if (value && value[0] === '#') return value.length 42 | return null 43 | } 44 | var transform = (array) => { 45 | return { children: array } 46 | } 47 | expect(arrayToTree(input, hashSplitter, transform)).to.deep.eql(expected) 48 | }) 49 | }) 50 | -------------------------------------------------------------------------------- /test/behaviors/annotation-node.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | import { expect } from 'chai' 3 | import { ramHyperReadings } from '../helpers/general' 4 | 5 | describe('AnnotationNode', () => { 6 | let hr 7 | beforeEach(done => { 8 | hr = ramHyperReadings() 9 | hr.on('ready', done) 10 | }) 11 | describe('#setMotivation(motive)', () => { 12 | it('sets motivation') 13 | context('when there is an exiting motivation', () => { 14 | it('overrides existing motivation') 15 | }) 16 | }) 17 | describe('#setTarget(id, opts)', () => { 18 | context('when opts.start and opts.end are provided', () => { 19 | it('creates a new target with source equal to id') 20 | it('creates a new target with a TextPositionSelector') 21 | }) 22 | context('when there is an exiting target', () => { 23 | it('overrides removes it before setting new target') 24 | it('does not do anything if target is already equivalent') 25 | }) 26 | }) 27 | describe('#setBody(node)', () => { 28 | context('when first argument as plain text', () => { 29 | it('adds a new TextualBody node as the body') 30 | }) 31 | context('when first argument is a node', () => { 32 | it('adds the node as the body of the annotation') 33 | }) 34 | context('when there is an exiting motivation', () => { 35 | it('overrides existing motivation') 36 | }) 37 | }) 38 | }) 39 | -------------------------------------------------------------------------------- /test/behaviors/collection-node.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | import { expect } from 'chai' 3 | import { ramHyperReadings, collect } from '../helpers/general' 4 | 5 | describe('Collection Node', () => { 6 | let hr 7 | let collection 8 | 9 | describe('.totalItems()', () => { 10 | context('with no items', () => { 11 | beforeEach(async () => { 12 | hr = ramHyperReadings() 13 | collection = await hr.createCollection('wonderful') 14 | }) 15 | it('returns zero', async () => { 16 | const total = await collection.totalItems() 17 | expect(total).to.eql(0) 18 | }) 19 | }) 20 | context('with no pagination', () => { 21 | beforeEach(async () => { 22 | hr = ramHyperReadings() 23 | collection = await hr.createCollection('wonderful') 24 | for (var i = 0; i < 4; i++) { 25 | await collection.addItem(`item${i}`) 26 | } 27 | }) 28 | it('return the number of items within the collection', async () => { 29 | const total = await collection.totalItems() 30 | expect(total).to.eql(4) 31 | }) 32 | it('increases when items are added', async () => { 33 | await collection.addItem('new item') 34 | const total = await collection.totalItems() 35 | expect(total).to.eql(5) 36 | }) 37 | it('decreases when items are removed') 38 | }) 39 | context('with pagination', () => { 40 | beforeEach(async () => { 41 | hr = ramHyperReadings() 42 | collection = await hr.createCollection('wonderful') 43 | for (var i = 0; i < 4; i++) { 44 | const page = await collection.addNewPage() 45 | for (var j = 0; j < 6; j++) { 46 | await page.addItem(`page${i}-item${j}`) 47 | } 48 | } 49 | }) 50 | it('return the number of items within the collection, not the number of pages', async () => { 51 | const total = await collection.totalItems() 52 | expect(total).to.eql(24) 53 | }) 54 | it('increases when items are added', async () => { 55 | for (var i = 0; i < 2; i++) { 56 | const page = await collection.addNewPage() 57 | for (var j = 0; j < 6; j++) { 58 | await page.addItem(`page${i}-item${j}`) 59 | } 60 | } 61 | const total = await collection.totalItems() 62 | expect(total).to.eql(36) 63 | }) 64 | it('decreases when items are removed') 65 | }) 66 | }) 67 | 68 | describe('.stream()', () => { 69 | context('with no pagination', () => { 70 | before(async () => { 71 | hr = ramHyperReadings() 72 | collection = await hr.createCollection('wonderful') 73 | for (var i = 0; i < 4; i++) { 74 | await collection.addItem(`item${i}`) 75 | } 76 | }) 77 | context('with default options', () => { 78 | it('returns a stream of all items in the collection', async () => { 79 | const items = await collect(collection.stream()) 80 | expect(items).to.have.length(4) 81 | const expected = Array.from({ length: 4 }, (v, i) => `item${i}`) 82 | expect(items).to.have.members(expected) 83 | }) 84 | }) 85 | context('with opts.limit', () => { 86 | it('returns items up to the limit', async () => { 87 | const limit = 2 88 | const items = await collect(collection.stream({ limit })) 89 | expect(items).to.have.length(2) 90 | }) 91 | }) 92 | context('with opts.pages', () => { 93 | it('has no effect', async () => { 94 | const page = 1 95 | const items = await collect(collection.stream({ page })) 96 | expect(items).to.have.length(4) 97 | }) 98 | }) 99 | }) 100 | context('no items', () => { 101 | before(async () => { 102 | hr = ramHyperReadings() 103 | collection = await hr.createCollection('wonderful') 104 | }) 105 | context('with default options', () => { 106 | it('returns a stream of all items in the collection', async () => { 107 | const items = await collect(collection.stream()) 108 | expect(items).to.have.length(0) 109 | }) 110 | }) 111 | context('with opts.limit', () => { 112 | it('returns items up to the limit', async () => { 113 | const limit = 2 114 | const items = await collect(collection.stream({ limit })) 115 | expect(items).to.have.length(0) 116 | }) 117 | }) 118 | }) 119 | context('with 4 pages and 6 items per page (Literals)', () => { 120 | before(async () => { 121 | hr = ramHyperReadings() 122 | collection = await hr.createCollection('wonderful') 123 | for (var i = 0; i < 4; i++) { 124 | const page = await collection.addNewPage() 125 | for (var j = 0; j < 6; j++) { 126 | await page.addItem(`page${i}-item${j}`) 127 | } 128 | } 129 | }) 130 | context('with default options', () => { 131 | it('returns a stream of all items in the collection', async () => { 132 | const items = await collect(collection.stream()) 133 | expect(items).to.have.length(4 * 6) 134 | const expected = Array.from({ length: 4 * 6 }, (v, i) => `page${Math.floor(i / 6)}-item${i % 6}`) 135 | expect(items).to.have.members(expected) 136 | }) 137 | 138 | it('returns values in page order', async () => { 139 | const items = await collect(collection.stream()) 140 | expect(items).to.have.length(4 * 6) 141 | items.forEach((v, i) => { 142 | expect(v).to.match(new RegExp(`^page${Math.floor(i / 6)}-`)) 143 | }) 144 | }) 145 | }) 146 | context('with opts.limit', () => { 147 | it('returns items up to the limit', async () => { 148 | const limit = 1 149 | const items = await collect(collection.stream({ limit })) 150 | expect(items).to.have.length(limit) 151 | const expected = Array.from({ length: limit }, (v, i) => `page${Math.floor(i / 6)}-item${i % 6}`) 152 | expect(items).to.have.members(expected) 153 | }) 154 | 155 | it('returns values in page order', async () => { 156 | const limit = 10 157 | const items = await collect(collection.stream({ limit })) 158 | expect(items).to.have.length(limit) 159 | items.forEach((v, i) => { 160 | expect(v).to.match(new RegExp(`^page${Math.floor(i / 6)}-`)) 161 | }) 162 | }) 163 | }) 164 | context('with opts.page', () => { 165 | it('returns values from a specific page', async () => { 166 | const items = await collect(collection.stream({ page: 2 })) 167 | expect(items).to.have.length(6) 168 | items.forEach((v, i) => { 169 | expect(v).to.match(new RegExp(`^page2-`)) 170 | }) 171 | }) 172 | it('returns values from the first page when page = 0', async () => { 173 | const items = await collect(collection.stream({ page: 0 })) 174 | expect(items).to.have.length(6) 175 | items.forEach((v, i) => { 176 | expect(v).to.match(new RegExp(`^page0-`)) 177 | }) 178 | }) 179 | it('returns nothing when page is out of range (positive)', async () => { 180 | const items = await collect(collection.stream({ page: 5 })) 181 | expect(items).to.have.length(0) 182 | }) 183 | it('returns nothing when page is out of range (negative)', async () => { 184 | const items = await collect(collection.stream({ page: -5 })) 185 | expect(items).to.have.length(0) 186 | }) 187 | }) 188 | }) 189 | }) 190 | }) 191 | -------------------------------------------------------------------------------- /test/behaviors/container-node.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | import { expect } from 'chai' 3 | import { ramHyperReadings } from '../helpers/general' 4 | 5 | async function confirmIteration (container, expected) { 6 | let count = 0 7 | await container.iterate(async (node) => { 8 | expect(node.type).to.eql('doco:Paragraph') 9 | expect(node.name).to.eql(expected[count]) 10 | count++ 11 | }) 12 | expect(count).to.eql(expected.length) 13 | } 14 | 15 | async function confirmContent (container, expected) { 16 | const contents = await container.contains() 17 | expect(contents).to.have.length(expected.length) 18 | expect(contents.map(v => v.name)).to.include.all.members(expected) 19 | } 20 | 21 | async function confirmIterationAndContent (container, expected) { 22 | // check iteration 23 | await confirmIteration(container, expected) 24 | // check contains 25 | await confirmContent(container, expected) 26 | } 27 | 28 | describe('ContainerNode', () => { 29 | let hr 30 | beforeEach(done => { 31 | // const file = fs.readFileSync('./reading-lists/hyper-graph-db-research.md') 32 | hr = ramHyperReadings() 33 | hr.on('ready', done) 34 | }) 35 | 36 | describe('.iterate(fn)', async () => { 37 | context('with empty container', () => { 38 | it('does not call callback', async () => { 39 | const c = await hr.createNode('doco:Section') 40 | await c.iterate(async () => { 41 | throw new Error('Should not invoke function') 42 | }) 43 | }) 44 | }) 45 | context('with empty container that once had content but is now empty', () => { 46 | it('does not call callback (with removeNodeAt)', async () => { 47 | const c = await hr.createNode('doco:Section') 48 | const p = await hr.createNode('doco:Paragraph') 49 | await c.insertNode(p) 50 | let count = 0 51 | await c.iterate(() => { count++ }) 52 | expect(count).to.eql(1) 53 | await c.removeNodeAt(0) 54 | await c.iterate(async () => { 55 | throw new Error('Should not invoke function') 56 | }) 57 | }) 58 | it('does not call callback (with removeNodesFrom)', async () => { 59 | const c = await hr.createNode('doco:Section') 60 | const p = await hr.createNode('doco:Paragraph') 61 | await c.insertNode(p) 62 | let count = 0 63 | await c.iterate(() => { count++ }) 64 | expect(count).to.eql(1) 65 | await c.removeNodesFrom(0) 66 | await c.iterate(async () => { 67 | throw new Error('Should not invoke function') 68 | }) 69 | }) 70 | }) 71 | }) 72 | 73 | describe('insert()', () => { 74 | it('inserts new nodes', async () => { 75 | const c = await hr.createNode('doco:Section') 76 | for (var i = 0; i < 4; i++) { 77 | const p = await hr.createNode('doco:Paragraph') 78 | await p.set('rdf:value', i) 79 | await c.insertNode(p) 80 | } 81 | 82 | let count = 0 83 | await c.iterate(async (node) => { 84 | const v = await node.get('rdf:value') 85 | expect(node.type).to.eql('doco:Paragraph') 86 | expect(v).to.eql(count) 87 | count++ 88 | }) 89 | expect(count).to.eql(4) 90 | }) 91 | }) 92 | 93 | describe('.at(index, [opts])', () => { 94 | let c 95 | context('without opts', () => { 96 | context('with empty container', () => { 97 | beforeEach(async () => { 98 | c = await hr.createNode('doco:Section') 99 | }) 100 | it('returns null if no content at that index', async () => { 101 | const node = await c.at(0) 102 | expect(node).to.eql(null) 103 | }) 104 | it('returns null if no content at that index (again)', async () => { 105 | const node = await c.at(2000) 106 | expect(node).to.eql(null) 107 | }) 108 | }) 109 | context('with content in container', () => { 110 | let initialData = [] 111 | beforeEach(async () => { 112 | c = await hr.createNode('doco:Section') 113 | initialData = [] 114 | /* insert nodes */ 115 | for (var i = 0; i < 4; i++) { 116 | const p = await hr.createNode('doco:Paragraph') 117 | initialData.push(p.name) 118 | await c.insertNode(p) 119 | } 120 | }) 121 | it('returns content at index', async () => { 122 | let node = await c.at(0) 123 | expect(node.name).to.eql(initialData[0]) 124 | node = await c.at(1) 125 | expect(node.name).to.eql(initialData[1]) 126 | node = await c.at(3) 127 | expect(node.name).to.eql(initialData[3]) 128 | }) 129 | it('returns null if no content at that index', async () => { 130 | const node = await c.at(2000) 131 | expect(node).to.eql(null) 132 | }) 133 | }) 134 | }) 135 | 136 | context('with opts.listItems = true', () => { 137 | const opts = { listItems: true } 138 | context('with empty container', () => { 139 | beforeEach(async () => { 140 | c = await hr.createNode('doco:Section') 141 | }) 142 | it('returns null if no content at that index', async () => { 143 | const node = await c.at(0, opts) 144 | expect(node).to.eql(null) 145 | }) 146 | }) 147 | context('with content in container', () => { 148 | let listItems = [] 149 | beforeEach(async () => { 150 | c = await hr.createNode('doco:Section') 151 | listItems = [] 152 | for (var i = 0; i < 4; i++) { 153 | const p = await hr.createNode('doco:Paragraph') 154 | await c.insertNode(p) 155 | } 156 | await c.iterate(async (child) => { 157 | listItems.push(child.name) 158 | }, opts) 159 | }) 160 | it('returns content at index', async () => { 161 | let node = await c.at(0, opts) 162 | expect(node.name).to.eql(listItems[0]) 163 | node = await c.at(1, opts) 164 | expect(node.name).to.eql(listItems[1]) 165 | node = await c.at(3, opts) 166 | expect(node.name).to.eql(listItems[3]) 167 | }) 168 | it('returns null if no content at that index', async () => { 169 | const node = await c.at(2000, opts) 170 | expect(node).to.eql(null) 171 | }) 172 | }) 173 | }) 174 | }) 175 | 176 | describe('.removeNodeAt(index)', () => { 177 | let c 178 | let initialData = [] 179 | beforeEach(async () => { 180 | c = await hr.createNode('doco:Section') 181 | initialData = [] 182 | /* insert nodes */ 183 | for (var i = 0; i < 5; i++) { 184 | const p = await hr.createNode('doco:Paragraph') 185 | initialData.push(p.name) 186 | await c.insertNode(p) 187 | } 188 | }) 189 | it('removes node at the start', async () => { 190 | initialData.splice(0, 1) 191 | await c.removeNodeAt(0) 192 | await confirmIteration(c, initialData) 193 | }) 194 | it('removes node in the middle', async () => { 195 | initialData.splice(3, 1) 196 | await c.removeNodeAt(3) 197 | await confirmIteration(c, initialData) 198 | }) 199 | it('removes node at the end', async () => { 200 | const end = initialData.length - 1 201 | initialData.splice(end, 1) 202 | await c.removeNodeAt(end) 203 | await confirmIteration(c, initialData) 204 | }) 205 | }) 206 | 207 | describe('.removeNodesFrom(index)', () => { 208 | let c 209 | let initialData = [] 210 | beforeEach(async () => { 211 | c = await hr.createNode('doco:Section') 212 | initialData = [] 213 | /* insert nodes */ 214 | for (var i = 0; i < 5; i++) { 215 | const p = await hr.createNode('doco:Paragraph') 216 | initialData.push(p.name) 217 | await c.insertNode(p) 218 | } 219 | }) 220 | it('removes node starting from the start', async () => { 221 | await c.removeNodesFrom(0) 222 | await confirmIteration(c, []) 223 | }) 224 | it('removes node starting from somewhere in the middle', async () => { 225 | const after = initialData.slice(0, 3) 226 | await c.removeNodesFrom(3) 227 | await confirmIteration(c, after) 228 | }) 229 | it('removes node starting from the end', async () => { 230 | const after = initialData.slice(0, initialData.length - 1) 231 | await c.removeNodesFrom(initialData.length - 1) 232 | await confirmIteration(c, after) 233 | }) 234 | }) 235 | 236 | describe('.updateList(nodeIds)', async () => { 237 | context('with empty container', () => { 238 | it('inserts adds all node ids to the container in the order of the array', async () => { 239 | const c = await hr.createNode('doco:Section') 240 | const newData = [] 241 | /* insert nodes */ 242 | for (var i = 0; i < 4; i++) { 243 | const p = await hr.createNode('doco:Paragraph') 244 | newData.push(p.name) 245 | } 246 | await c.updateList(newData) 247 | // check iteration 248 | await confirmIterationAndContent(c, newData) 249 | }) 250 | }) 251 | context('with pre-existing container with nodes already inserted', () => { 252 | let c = null 253 | let initialData = [] 254 | beforeEach(async () => { 255 | c = await hr.createNode('doco:Section') 256 | initialData = [] 257 | /* insert nodes */ 258 | for (var i = 0; i < 4; i++) { 259 | const p = await hr.createNode('doco:Paragraph') 260 | initialData.push(p.name) 261 | await c.insertNode(p) 262 | } 263 | }) 264 | it('updates the list to match array (prepend only)', async () => { 265 | const newData = [...initialData] 266 | for (var j = 0; j < 2; j++) { 267 | const p = await hr.createNode('doco:Paragraph') 268 | newData.push(p.name) 269 | } 270 | await c.updateList(newData) 271 | await confirmIterationAndContent(c, newData) 272 | }) 273 | it('updates the list to match array (append only)', async () => { 274 | const newData = [] 275 | for (var j = 0; j < 2; j++) { 276 | const p = await hr.createNode('doco:Paragraph') 277 | newData.push(p.name) 278 | } 279 | newData.push(...initialData) 280 | await c.updateList(newData) 281 | await confirmIterationAndContent(c, newData) 282 | }) 283 | it('updates the list to match array (inserted in the middle)', async () => { 284 | const newData = initialData.slice(0, 2) 285 | for (var j = 0; j < 2; j++) { 286 | const p = await hr.createNode('doco:Paragraph') 287 | newData.push(p.name) 288 | } 289 | newData.push(...initialData.slice(2)) 290 | await c.updateList(newData) 291 | await confirmIterationAndContent(c, newData) 292 | }) 293 | it('updates the list to match array (zipped insertion)', async () => { 294 | const data = [] 295 | for (var j = 0; j < initialData.length; j++) { 296 | const p = await hr.createNode('doco:Paragraph') 297 | data.push(p.name) 298 | } 299 | const newData = [] 300 | data.forEach((v, i) => { 301 | newData.push(v, initialData[i]) 302 | }) 303 | await c.updateList(newData) 304 | await confirmIterationAndContent(c, newData) 305 | }) 306 | it('updates the list to match array (append and prepend)', async () => { 307 | const newData = [] 308 | for (var j = 0; j < 2; j++) { 309 | const p = await hr.createNode('doco:Paragraph') 310 | newData.push(p.name) 311 | } 312 | newData.push(...initialData) 313 | for (j = 0; j < 2; j++) { 314 | const p = await hr.createNode('doco:Paragraph') 315 | newData.push(p.name) 316 | } 317 | await c.updateList(newData) 318 | await confirmIterationAndContent(c, newData) 319 | }) 320 | it('updates the list to match array (empty)', async () => { 321 | const newData = [] 322 | await c.updateList(newData) 323 | await confirmIterationAndContent(c, newData) 324 | }) 325 | it('updates the list to match array (deletions)', async () => { 326 | const newData = initialData.slice(1, 2) 327 | await c.updateList(newData) 328 | await confirmIterationAndContent(c, newData) 329 | }) 330 | it('updates the list to match array (deletions + preppended)', async () => { 331 | const newData = [] 332 | for (var j = 0; j < 2; j++) { 333 | const p = await hr.createNode('doco:Paragraph') 334 | newData.push(p.name) 335 | } 336 | newData.push(...initialData.slice(1, 2)) 337 | await c.updateList(newData) 338 | await confirmIterationAndContent(c, newData) 339 | }) 340 | it('updates the list to match array (deletions + appended)', async () => { 341 | const newData = initialData.slice(1, 2) 342 | for (var j = 0; j < 2; j++) { 343 | const p = await hr.createNode('doco:Paragraph') 344 | newData.push(p.name) 345 | } 346 | await c.updateList(newData) 347 | await confirmIterationAndContent(c, newData) 348 | }) 349 | it('updates the list to match array (deletions + appended)', async () => { 350 | const newData = initialData.slice(1, 2) 351 | for (var j = 0; j < 2; j++) { 352 | const p = await hr.createNode('doco:Paragraph') 353 | newData.push(p.name) 354 | } 355 | await c.updateList(newData) 356 | await confirmIterationAndContent(c, newData) 357 | }) 358 | it('updates the list to match array (deletions + appended + preppended)', async () => { 359 | const newData = [] 360 | for (var j = 0; j < 2; j++) { 361 | const p = await hr.createNode('doco:Paragraph') 362 | newData.push(p.name) 363 | } 364 | newData.push(...initialData.slice(1, 2)) 365 | for (j = 0; j < 2; j++) { 366 | const p = await hr.createNode('doco:Paragraph') 367 | newData.push(p.name) 368 | } 369 | await c.updateList(newData) 370 | await confirmIterationAndContent(c, newData) 371 | }) 372 | }) 373 | }) 374 | 375 | xdescribe('remove()', async () => { 376 | it('removes existing node', async () => { 377 | const c = await hr.createNode('doco:Section') 378 | const inserted = [] 379 | for (var i = 0; i < 4; i++) { 380 | const p = await hr.createNode('doco:Paragraph') 381 | inserted.push(p) 382 | await p.set('rdf:value', i) 383 | await c.insertNode(p) 384 | } 385 | 386 | inserted[1].disconnect() 387 | 388 | // did this actually work? 389 | let count = 0 390 | await c.iterate(async (node) => { 391 | // const v = await node.get('rdf:value', 1) 392 | expect(node.type).to.eql('doco:Paragraph') 393 | // expect(v).to.eql(count) 394 | count++ 395 | }) 396 | expect(count).to.eql(3) 397 | }) 398 | }) 399 | }) 400 | -------------------------------------------------------------------------------- /test/behaviors/helpers.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | import { expect } from 'chai' 3 | import { ramHyperReadings } from '../helpers/general' 4 | 5 | import { makeSetAsLabelOnType } from '../../lib/definitions/helpers' 6 | import { createFactory, compose } from '../../lib/definitions/utils' 7 | import StandardBehavior from '../../lib/definitions/standard-behavior' 8 | import { spo } from '../../lib/utils' 9 | 10 | describe('Behaviour Construction Helpers', () => { 11 | let hr 12 | 13 | beforeEach(done => { 14 | hr = ramHyperReadings() 15 | hr.on('ready', done) 16 | }) 17 | 18 | describe('.makeSetAsLabelOnType()', () => { 19 | let factory 20 | let node 21 | let otherNode 22 | const PREDICATE = 'hr:predicate' 23 | const CLASS = 'hr:TestClass' 24 | before(() => { 25 | // create factory to generate nodes with the behavours we want to test 26 | const testBehavior = { 27 | setTest: makeSetAsLabelOnType(PREDICATE, CLASS) 28 | } 29 | factory = createFactory(compose(StandardBehavior, testBehavior)) 30 | }) 31 | 32 | beforeEach(async () => { 33 | let triple = spo('_:1', 'rdf:type', 'hr:test') 34 | await hr._put(triple) 35 | node = factory({ name: triple.subject, type: triple.object, hr }) 36 | triple = spo('_:2', 'rdf:type', 'hr:test') 37 | await hr._put(triple) 38 | otherNode = factory({ name: triple.subject, type: triple.object, hr }) 39 | }) 40 | 41 | context('when predicate has not already been set', () => { 42 | context('when setting literal value', () => { 43 | it('sets literal as a label with type on specified predicate', async () => { 44 | await node.setTest('value') 45 | const value = await node.get(PREDICATE) 46 | expect(value.type).to.equal(CLASS) 47 | expect(await value.get('rdfs:label')).to.equal('value') 48 | }) 49 | }) 50 | context('when setting node value', () => { 51 | it('sets node directly on specified predicate', async () => { 52 | const newNode = await hr.createNode('as:Note') 53 | await node.setTest(newNode) 54 | const value = await node.get(PREDICATE) 55 | expect(value.name).to.equal(newNode.name) 56 | }) 57 | }) 58 | context('when setting literal value which already exists on another node', () => { 59 | it('does not create a new node unnecessarily but uses existing node', async () => { 60 | await otherNode.setTest(2018) 61 | await node.setTest(2018) 62 | const otherValue = await otherNode.get(PREDICATE) 63 | const value = await node.get(PREDICATE) 64 | expect(value.name).to.equal(otherValue.name) 65 | }) 66 | }) 67 | }) 68 | 69 | context('when predicate already exists', () => { 70 | context('when setting literal value', () => { 71 | it('overwrites literal as a label with type on specified predicate', async () => { 72 | await node.setTest('oldValue') 73 | await node.setTest('value') 74 | const value = await node.get(PREDICATE) 75 | expect(value.type).to.equal(CLASS) 76 | expect(await value.get('rdfs:label')).to.equal('value') 77 | }) 78 | }) 79 | context('when setting node value', () => { 80 | it('overwrites node directly on specified predicate', async () => { 81 | await node.setTest('oldValue') 82 | const newNode = await hr.createNode('as:Note') 83 | await node.setTest(newNode) 84 | const value = await node.get(PREDICATE) 85 | expect(value.name).to.equal(newNode.name) 86 | }) 87 | it('deletes old node', async () => { 88 | await node.setTest('oldValue') 89 | const oldNode = await node.get(PREDICATE) 90 | const newNode = await hr.createNode('as:Note') 91 | await node.setTest(newNode) 92 | expect(await oldNode.get('rdfs:label')).to.eql(null) 93 | }) 94 | }) 95 | context('when setting a literal value which already exists on another node', () => { 96 | it('does not create a new node unnecessarily but uses existing node', async () => { 97 | await node.setTest('oldValue') 98 | await otherNode.setTest(2018) 99 | await node.setTest(2018) 100 | const otherValue = await otherNode.get(PREDICATE) 101 | const value = await node.get(PREDICATE) 102 | expect(value.name).to.equal(otherValue.name) 103 | }) 104 | }) 105 | context('when overwriting a literal value which already exists on another node', () => { 106 | it('creates a new node and does not edit the existing one', async () => { 107 | await node.setTest(2018) 108 | await otherNode.setTest(2018) 109 | await node.setTest('new value') 110 | const otherValue = await otherNode.get(PREDICATE) 111 | const value = await node.get(PREDICATE) 112 | expect(await otherValue.get('rdfs:label')).to.equal(2018) 113 | expect(await value.get('rdfs:label')).to.equal('new value') 114 | }) 115 | }) 116 | }) 117 | }) 118 | describe('.makeGetLiteralOrPredicateFromNodeWithType()', () => { 119 | let factory 120 | let node 121 | let otherNode 122 | const PREDICATE = 'hr:predicate' 123 | const CLASS = 'hr:TestClass' 124 | before(() => { 125 | // create factory to generate nodes with the behavours we want to test 126 | const testBehavior = { 127 | setTest: makeSetAsLabelOnType(PREDICATE, CLASS) 128 | } 129 | factory = createFactory(compose(StandardBehavior, testBehavior)) 130 | }) 131 | 132 | beforeEach(async () => { 133 | let triple = spo('_:1', 'rdf:type', 'hr:test') 134 | await hr._put(triple) 135 | node = factory({ name: triple.subject, type: triple.object, hr }) 136 | triple = spo('_:2', 'rdf:type', 'hr:test') 137 | await hr._put(triple) 138 | otherNode = factory({ name: triple.subject, type: triple.object, hr }) 139 | }) 140 | }) 141 | }) 142 | -------------------------------------------------------------------------------- /test/helpers/general.js: -------------------------------------------------------------------------------- 1 | import ram from 'random-access-memory' 2 | import hyperreadings from '../../lib/hyperreadings' 3 | 4 | export function ramHyperReadings () { 5 | return hyperreadings(() => ram()) 6 | } 7 | 8 | export function collect (stream) { 9 | const data = [] 10 | return new Promise((resolve, reject) => { 11 | stream.on('data', data.push.bind(data)) 12 | stream.on('end', () => resolve(data)) 13 | stream.on('error', reject) 14 | }) 15 | } 16 | -------------------------------------------------------------------------------- /test/hyperreadings.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 3 | import { expect } from 'chai' 4 | import { ramHyperReadings } from './helpers/general' 5 | describe('hyperreadings', () => { 6 | let hr 7 | 8 | context('with new hyperreading', () => { 9 | beforeEach(done => { 10 | hr = ramHyperReadings() 11 | hr.on('ready', done) 12 | }) 13 | 14 | describe('hr.key()', () => { 15 | it('returns the hyperdb key', () => { 16 | const key = hr.key() 17 | expect(key).to.be.a('string') 18 | expect(key).to.have.length(64) 19 | }) 20 | }) 21 | 22 | describe('hr.setTitle(title)', () => { 23 | it('throws error if title is empty', () => { 24 | expect(() => hr.setTitle('')).to.throw(/Title must be a string/) 25 | }) 26 | it('throws error if title is not a string', () => { 27 | expect(() => hr.setTitle(222)).to.throw(/Title must be a string/) 28 | }) 29 | it('sets the title of the reading list', async () => { 30 | await hr.setTitle('hello') 31 | const title = await hr.title() 32 | expect(title).to.eql('hello') 33 | }) 34 | }) 35 | 36 | describe('hr.title()', () => { 37 | it('gets the title of the reading list', async () => { 38 | await hr.setTitle('radical librarians') 39 | const title = await hr.title() 40 | expect(title).to.eql('radical librarians') 41 | }) 42 | 43 | it('returns untitled if title has not been set', async () => { 44 | const title = await hr.title() 45 | expect(title).to.eql('untitled') 46 | }) 47 | }) 48 | 49 | describe('initial state', () => { 50 | it('starts without a root node', async () => { 51 | var r = await hr.root() 52 | expect(r).to.eql(null) 53 | }) 54 | it('has private _nodeCount === 0', () => { 55 | expect(hr._nodeCount).to.eql(0) 56 | }) 57 | }) 58 | 59 | describe('hr.importFile(filename, opts)', () => { 60 | it('throws error if filename is not given', async () => { 61 | expect(() => hr.importFile()).to.throw(/requires filename to be a string/) 62 | }) 63 | it('throws error if filename is not string', async () => { 64 | expect(() => hr.importFile(23)).to.throw(/requires filename to be a string/) 65 | }) 66 | it('throws error if filetype is not recognized', async () => { 67 | expect(() => hr.importFile('test.pdf')).to.throw(/does not recognize file extension/) 68 | }) 69 | }) 70 | 71 | describe('hr.import(data, opts)', () => { 72 | it('resolves to undefined when no data is provided', async () => { 73 | var data = await hr.import() 74 | expect(data).to.eql(undefined) 75 | }) 76 | it('throws error if opts.type is not set', async () => { 77 | expect(() => hr.import('# test')).to.throw(/requires opts.type to be set/) 78 | expect(() => hr.import('# test', {})).to.throw(/requires opts.type to be set/) 79 | }) 80 | it('throws error if opts.type is not supported', async () => { 81 | expect(() => hr.import('# test', { type: 'pdf' })).to.throw(/does not recognize type/) 82 | }) 83 | }) 84 | 85 | describe('hr.root()', () => { 86 | it('returns a node object for hr root', async () => { 87 | const node = await hr.createNode('hr:root') 88 | const r = await hr.root() 89 | // expect(r).to.be.instanceOf() 90 | expect(r).to.include.keys('name', 'type') 91 | // these variables should not be hard coded as each are defined in code 92 | expect(r.name).to.eql(node.name) 93 | expect(r.type).to.eql('hr:root') 94 | }) 95 | }) 96 | 97 | describe('hr.nodesByType(type)', () => { 98 | it('adds a new blank node to the graph', async () => { 99 | const type = 'http://example.com/namespace/' 100 | await hr.createNode(type) 101 | await hr.createNode(type) 102 | await hr.createNode('not-this-one') 103 | const nodes = await hr.nodesByType(type) 104 | expect(nodes).to.have.length(2) 105 | nodes.forEach(node => expect(node.type).to.eql(type)) 106 | }) 107 | }) 108 | 109 | describe('hr.createNode(type, [properties])', () => { 110 | it('adds a new blank node to the graph of type', async () => { 111 | const type = 'http://example.com/namespace/' 112 | const n = await hr.createNode(type) 113 | const nodes = await hr.nodesByType(type) 114 | expect(nodes).to.have.length(1) 115 | expect(nodes[0].name).to.eql(n.name) 116 | }) 117 | it('returns rejected promise if no type is provided', () => { 118 | return hr.createNode() 119 | .then(() => expect.fail()) 120 | .catch(err => { 121 | expect(err).to.be.an('error') 122 | expect(err.message).to.be.string('Cannot create a node without type') 123 | }) 124 | }) 125 | it('allows you to set properties when creating new node', async () => { 126 | const type = 'http://example.com/namespace/' 127 | const data = { 'rdf:value': 23, 'c4o:hasContent': 'very import contents' } 128 | const n = await hr.createNode(type, data) 129 | const nodes = await hr.nodesByType(type) 130 | expect(nodes).to.have.length(1) 131 | expect(await n.get('rdf:value')).to.eql(data['rdf:value']) 132 | expect(await n.get('c4o:hasContent')).to.eql(data['c4o:hasContent']) 133 | }) 134 | 135 | it('allows you to set the nodes name via property.id', async () => { 136 | const type = 'http://example.com/namespace/' 137 | const data = { id: 'named', 'rdf:value': 23, 'c4o:hasContent': 'very import contents' } 138 | const n = await hr.createNode(type, data) 139 | const nodes = await hr.nodesByType(type) 140 | expect(nodes).to.have.length(1) 141 | expect(n.name).to.eql('named') 142 | expect(await n.get('id')).to.eql(null) 143 | expect(await n.get('rdf:value')).to.eql(data['rdf:value']) 144 | expect(await n.get('c4o:hasContent')).to.eql(data['c4o:hasContent']) 145 | }) 146 | }) 147 | describe('hr.exists(id, [type])', () => { 148 | let name 149 | beforeEach(async () => { 150 | const n = await hr.createNode('hr:test') 151 | name = n.name 152 | }) 153 | context('with type', () => { 154 | it('returns false if node does not exist', async () => { 155 | expect(await hr.exists('hr:not')).to.eql(false) 156 | }) 157 | it('returns true if node exists', async () => { 158 | expect(await hr.exists(name)).to.eql(true) 159 | }) 160 | }) 161 | context('without type', () => { 162 | it('returns false if node does not exist', async () => { 163 | expect(await hr.exists('hr:not')).to.eql(false) 164 | }) 165 | it('returns false if node names exists but not with type', async () => { 166 | expect(await hr.exists(name, 'hr:root')).to.eql(false) 167 | }) 168 | it('returns true if node exists', async () => { 169 | expect(await hr.exists(name, 'hr:test')).to.eql(true) 170 | }) 171 | }) 172 | }) 173 | }) 174 | }) 175 | -------------------------------------------------------------------------------- /test/importers/markdown.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 3 | import { expect } from 'chai' 4 | import markdownImporter from '../../lib/importers/markdown' 5 | import { ramHyperReadings } from '../helpers/general' 6 | 7 | describe('markdownImporter', () => { 8 | context('with very simple markdown', () => { 9 | let hr 10 | before(async () => { 11 | const test = '# yes\n\nno\nmaybe' 12 | hr = ramHyperReadings() 13 | return markdownImporter(hr, test) 14 | }) 15 | it('contains head and body nodes', async () => { 16 | const expected = ['hr:head', 'hr:body'] 17 | await hr.iterate((node) => { 18 | expect(node.type).to.eql(expected.shift()) 19 | }) 20 | }) 21 | it('has an empty head', async () => { 22 | const head = await hr.head() 23 | await head.iterate((node) => { 24 | expect.fail() 25 | }) 26 | }) 27 | it('has body with single section node', async () => { 28 | const bodys = await hr.nodesByType('hr:body', { limit: 1 }) 29 | const body = bodys[0] 30 | const expected = ['doco:Section'] 31 | await body.iterate((node) => { 32 | expect(node.type).to.eql(expected.shift()) 33 | }) 34 | }) 35 | it('has section to have Title and Paragraph', async () => { 36 | const body = await hr.body() 37 | const section = await body.get('po:contains') 38 | const expected = [ 39 | { type: 'doco:Title', 'c4o:hasContent': 'yes' }, 40 | { type: 'doco:Paragraph', 'c4o:hasContent': 'no\nmaybe' }] 41 | await section.iterate(async (node) => { 42 | const x = expected.shift() 43 | expect(node.type).to.eql(x.type) 44 | expect(await node.get('c4o:hasContent')).to.eql(x['c4o:hasContent']) 45 | }) 46 | }) 47 | }).timeout(5000) 48 | }) 49 | -------------------------------------------------------------------------------- /test/importers/parse5Helpers.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | import parse5 from 'parse5' 3 | import { expect } from 'chai' 4 | import { attr, textContent } from '../../lib/importers/parse5Helpers' 5 | 6 | describe('attr', () => { 7 | let node 8 | before(() => { 9 | node = parse5.parseFragment('
something
').childNodes[0] 10 | }) 11 | it('returns a attribute value from node (id)', () => { 12 | console.log(node) 13 | const output = attr(node, 'id') 14 | expect(output).to.eql('test') 15 | }) 16 | it('returns a attribute value from node (data-attr)', () => { 17 | const output = attr(node, 'data-node') 18 | expect(output).to.eql('something') 19 | }) 20 | it('returns a attribute value from node (class)', () => { 21 | const output = attr(node, 'class') 22 | expect(output).to.eql('class names') 23 | }) 24 | it('returns a attribute value from node (class)', () => { 25 | const output = attr(node, 'data-not-set') 26 | expect(output).to.eql(undefined) 27 | }) 28 | it('returns undefined if node is falsy', () => { 29 | expect(attr()).to.eql(undefined) 30 | }) 31 | }) 32 | 33 | describe('textContent', () => { 34 | it('returns text content from node (simple)', () => { 35 | const fragment = '
so what
' 36 | const node = parse5.parseFragment(fragment).childNodes[0] 37 | expect(textContent(node)).to.eql('so what') 38 | }) 39 | it('returns text content from node (nested)', () => { 40 | const fragment = '
deeply

nested node also works

' 41 | const node = parse5.parseFragment(fragment).childNodes[0] 42 | expect(textContent(node)).to.eql('deeply nested node also works') 43 | }) 44 | it('returns text content from node (preserves white spaces over lines)', () => { 45 | const fragment = `
46 | over 47 | lines 48 |
` 49 | const node = parse5.parseFragment(fragment).childNodes[0] 50 | expect(textContent(node)).to.eql('\n over\n lines\n ') 51 | }) 52 | it('returns text content from node (preserves white spaces)', () => { 53 | const fragment = '
over lines
' 54 | const node = parse5.parseFragment(fragment).childNodes[0] 55 | expect(textContent(node)).to.eql(' over lines ') 56 | }) 57 | it('returns text content from node (html breaks)', () => { 58 | const fragment = '
over
lines
' 59 | const node = parse5.parseFragment(fragment).childNodes[0] 60 | expect(textContent(node)).to.eql('over\nlines') 61 | }) 62 | it('returns text content from node (html breaks unclosed)', () => { 63 | const fragment = '
over
lines
' 64 | const node = parse5.parseFragment(fragment).childNodes[0] 65 | expect(textContent(node)).to.eql('over\nlines') 66 | }) 67 | }) 68 | -------------------------------------------------------------------------------- /test/importers/zoteroApi/basic.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | import { expect } from 'chai' 3 | import { ramHyperReadings, collect } from '../../helpers/general' 4 | 5 | describe('basic zotero import behaviors', () => { 6 | let hr 7 | beforeEach(async () => { 8 | hr = ramHyperReadings() 9 | }) 10 | 11 | describe('reuse behaviors', () => { 12 | context('when there is a pre-existing contribution', () => { 13 | it('does not unnecessarily create duplicate contributions', async () => { 14 | const first = await hr.importZoteroReference({ 15 | itemType: 'The Promise of Happiness', 16 | creators: [{ 17 | firstName: 'Sara', 18 | lastName: 'Ahmed', 19 | creatorType: 'author' 20 | }] 21 | }) 22 | const second = await hr.importZoteroReference({ 23 | itemType: 'book', 24 | title: 'Queer phenomenology', 25 | creators: [{ 26 | firstName: 'Sara', 27 | lastName: 'Ahmed', 28 | creatorType: 'author' 29 | }] 30 | }) 31 | expect((await first.get('bf:contribution')).name).to.eql((await second.get('bf:contribution')).name) 32 | }) 33 | }) 34 | 35 | context('when there is a pre-existing person', () => { 36 | it('does not unnecessarily create a new Person, but links to existing matching person', async () => { 37 | await hr.importZoteroReference({ 38 | itemType: 'book', 39 | title: 'The Promise of Happiness', 40 | creators: [{ 41 | firstName: 'Sara', 42 | lastName: 'Ahmed', 43 | creatorType: 'author' 44 | }] 45 | }) 46 | await hr.importZoteroReference({ 47 | itemType: 'book', 48 | title: 'When Species Meet', 49 | creators: [{ 50 | creatorType: 'author', 51 | firstName: 'Donna', 52 | lastName: 'Haraway' 53 | }] 54 | }) 55 | await hr.importZoteroReference({ 56 | itemType: 'book', 57 | title: 'Thinking Through the Skin', 58 | creators: [ 59 | { 60 | firstName: 'Sara', 61 | lastName: 'Ahmed', 62 | creatorType: 'editor' 63 | } 64 | ] 65 | }) 66 | const people = await hr.findPeople() 67 | expect(people).to.have.length(2) 68 | }) 69 | }) 70 | 71 | context('when there is a pre-existing series', () => { 72 | it('does not unnecessarily create a new series, but links to existing matching series', async () => { 73 | const first = await hr.importZoteroReference({ 74 | itemType: 'book', 75 | title: 'Critique of cynical reason', 76 | series: 'Ideology' 77 | }) 78 | const second = await hr.importZoteroReference({ 79 | itemType: 'book', 80 | title: 'Postmodernism, or, The Cultural Logic of Late Capitalism', 81 | series: 'Ideology' 82 | }) 83 | const third = await hr.importZoteroReference({ 84 | itemType: 'book', 85 | title: 'Dance', 86 | series: 'Documents of Contemporary Art' 87 | }) 88 | const collection = await hr.getCollection('default') 89 | const references = await collect(collection.stream()) 90 | expect(references).to.have.length(3) 91 | expect((await first.hasSeries()).name).is.eql((await second.hasSeries()).name) 92 | expect((await first.hasSeries()).name).is.not.eql((await third.hasSeries()).name) 93 | }) 94 | }) 95 | 96 | context('when there is a pre-existing subject', () => { 97 | // TODO: fix subjects to not use strings, but types 98 | // Maybe madsrdf:SimpleType - although that then opens the big question of supporting ComplexTypes 99 | it.skip('does not unnecessarily create duplicate subjects', async () => { 100 | await hr.importZoteroReference({ 101 | itemType: 'book', 102 | title: 'The Promise of Happiness', 103 | tags: [ 104 | { tag: 'Philosophy', type: 1 }, 105 | { tag: 'Women\'s Studies', type: 1 }, 106 | { tag: 'LGBT Studies', type: 1 } 107 | ] 108 | }) 109 | await hr.importZoteroReference({ 110 | itemType: 'book', 111 | title: 'Queer phenomenology', 112 | tags: [ 113 | { tag: 'Social Science', type: 1 }, 114 | { tag: 'Women\'s Studies', type: 1 }, 115 | { tag: 'LGBT Studies', type: 1 } 116 | ] 117 | }) 118 | expect(await hr.subjects()).to.have.length(3) 119 | }) 120 | }) 121 | 122 | context('when there is a pre-existing publication', () => { 123 | it('does not unnecessarily create new publication data') 124 | }) 125 | }) 126 | }) 127 | -------------------------------------------------------------------------------- /test/importers/zoteroApi/book.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | import { expect } from 'chai' 3 | import { ramHyperReadings, collect } from '../../helpers/general' 4 | 5 | const data = { 6 | itemType: 'book', 7 | title: 'Martha Rosler: The Bowery in two inadequate descriptive systems', 8 | creators: [ 9 | { 10 | creatorType: 'author', 11 | firstName: 'Steve', 12 | lastName: 'Edwards' 13 | } 14 | ], 15 | abstractNote: 'a note describing the work', 16 | series: 'One work', 17 | seriesNumber: '4', 18 | volume: '1', 19 | numberOfVolumes: '2', 20 | edition: 'first edition', 21 | place: 'London', 22 | publisher: 'Afterall', 23 | date: '2012', 24 | numPages: '147', 25 | language: 'english', 26 | ISBN: '978-1-84638-083-9 978-1-84638-084-6', 27 | shortTitle: 'Martha Rosler', 28 | url: 'a url', 29 | accessDate: '', // we will ignore this for now 30 | archive: 'A fake archive', 31 | archiveLocation: 'Box 2', 32 | libraryCatalog: 'Library of Congress ISBN', 33 | callNumber: 'N6537.R582 A62 2012', 34 | rights: 'a rights statement', 35 | extra: 'this is a bit of extra info', 36 | tags: [ 37 | { 38 | tag: 'Bowery in two inadequate descriptive systems', 39 | type: 1 40 | }, 41 | { 42 | tag: 'Conceptual art', 43 | type: 1 44 | }, 45 | { 46 | tag: 'Criticism and interpretation', 47 | type: 1 48 | }, 49 | { 50 | tag: 'Rosler, Martha', 51 | type: 1 52 | }, 53 | { 54 | tag: 'United States', 55 | type: 1 56 | } 57 | ] 58 | } 59 | 60 | describe('importing a book from zotero api', () => { 61 | let hr 62 | let reference 63 | before(async () => { 64 | hr = ramHyperReadings() 65 | await hr.importZoteroReference(data) 66 | const collection = await hr.getCollection('default') 67 | const references = await collect(collection.stream()) 68 | if (references.length) reference = references[0] 69 | }) 70 | 71 | it('creates a new bf:Instance as a reference', async () => { 72 | expect(reference.type).to.eql('bf:Instance') 73 | }) 74 | 75 | it('sets title on the instance', async () => { 76 | const title = await reference.getTitle() 77 | expect(title).to.eql('Martha Rosler: The Bowery in two inadequate descriptive systems') 78 | }) 79 | 80 | it('sets abbreviated title on the instance', async () => { 81 | const abbreviatedTitle = await reference.getAbbreviatedTitle() 82 | expect(abbreviatedTitle).to.eql('Martha Rosler') 83 | }) 84 | 85 | it('adds creators as contributions to the instance', async () => { 86 | const contributions = await reference.contributions() 87 | expect(contributions).to.have.length(1) 88 | expect(contributions).to.deep.eql([{ 89 | name: 'Steve Edwards', 90 | firstName: 'Steve', 91 | lastName: 'Edwards', 92 | role: 'marcrel:aut' 93 | }]) 94 | }) 95 | 96 | it('sets associated tags as subjects on the instance', async () => { 97 | const subjects = await reference.subjects() 98 | expect(subjects).to.have.length(5) 99 | expect(subjects).to.include.members([ 100 | 'Bowery in two inadequate descriptive systems', 101 | 'Conceptual art', 102 | 'Criticism and interpretation', 103 | 'Rosler, Martha', 104 | 'United States' 105 | ]) 106 | }) 107 | 108 | it('sets abstractNote to summary on the instance', async () => { 109 | const summary = await reference.getSummary() 110 | expect(summary).to.eql('a note describing the work') 111 | }) 112 | 113 | it('sets ISBN field as identifiers on the instance', async () => { 114 | const identifiers = await reference.identifiers() 115 | expect(identifiers).to.deep.include.members([ 116 | { type: 'bf:Isnb', value: '978-1-84638-083-9' }, 117 | { type: 'bf:Isnb', value: '978-1-84638-084-6' } 118 | ]) 119 | }) 120 | 121 | it('sets place, date and publisher fields as provisionActivity on the instance', async () => { 122 | const publications = await reference.publications() 123 | expect(publications).to.have.length(1) 124 | expect(publications[0].date).to.eql('2012') 125 | expect(await publications[0].agent.get('rdfs:label')).to.eql('Afterall') 126 | expect(await publications[0].place.get('rdfs:label')).to.eql('London') 127 | }) 128 | 129 | context('with item level information present', () => { 130 | let item 131 | before(async () => { 132 | const items = await reference.items() 133 | item = items[0] 134 | }) 135 | 136 | it('creates an item', async () => { 137 | expect(item.type).to.eql('bf:Item') 138 | }) 139 | 140 | it('sets relations between the instance and item', async () => { 141 | const itemsInstance = await item.itemOf() 142 | const instancesItems = await reference.items() 143 | expect(itemsInstance.name).to.eql(reference.name) 144 | expect(instancesItems[0].name).to.eql(item.name) 145 | }) 146 | 147 | it('sets url as electronicLocator on item', async () => { 148 | const locator = await item.getElectronicLocator() 149 | expect(locator).to.eql('a url') 150 | }) 151 | 152 | // this value is overridden by the archive location if present 153 | // need to conditionally test, and decide on default behavior 154 | it.skip('sets callNumber as shelfMark on item', async () => { 155 | const shelfMark = await item.getShelfMark() 156 | expect(shelfMark).to.eql('N6537.R582 A62 2012') 157 | }) 158 | it('creates an agent with name of archive field and assigns it to heldBy field', async () => { 159 | const agent = await item.heldBy() 160 | expect(await agent.get('rdfs:label')).to.eql(data.archive) 161 | }) 162 | it('sets archiveLocation as shelfMark on item', async () => { 163 | const shelfMark = await item.getShelfMark() 164 | expect(shelfMark).to.eql('Box 2') 165 | }) 166 | }) 167 | 168 | context('with series level information present', () => { 169 | it('creates a new series when series fields have values', async () => { 170 | const series = await reference.hasSeries() 171 | const seriesTitle = await series.getTitle() 172 | expect(seriesTitle).to.eql('One work') 173 | }) 174 | 175 | it('creates series with reciprocal relationship', async () => { 176 | const series = await reference.hasSeries() 177 | const seriesParts = await series.seriesOf() 178 | expect(seriesParts[0].name).to.eql(reference.name) 179 | }) 180 | }) 181 | 182 | it('sets seriesNumber to seriesEnumeration on instance', async () => { 183 | const seriesEnumeration = await reference.seriesEnumeration() 184 | expect(seriesEnumeration).to.eql(data.seriesNumber) 185 | }) 186 | 187 | it('sets edition to editionStatement on instance', async () => { 188 | const editionStatement = await reference.editionStatement() 189 | expect(editionStatement).to.eql(data.edition) 190 | }) 191 | 192 | // Enumeration and chronology are not very clear in BIBFRAME yet - not sure how we should implement it 193 | it.skip('sets pages to extent on instance', async () => { 194 | const extents = await reference.getExtents() 195 | expect(extents).length.to.eql(2) 196 | expect(extents).to.contain(data.pages) 197 | }) 198 | // Enumeration and chronology are not very clear in BIBFRAME yet - not sure how we should implement it 199 | it.skip('sets volume to extent on instance', async () => { 200 | const extents = await reference.getExtents() 201 | expect(extents).length.to.eql(2) 202 | expect(extents).to.contain(data.volume) 203 | }) 204 | 205 | it('needs to address numberOfVolumes') 206 | it('needs to address accessDate') 207 | 208 | it('sets language on the instance', async () => { 209 | const language = await reference.language() 210 | expect(language).to.equal(data.language) 211 | }) 212 | 213 | it('sets libraryCatalog as source', async () => { 214 | const source = await reference.source() 215 | expect(source).to.equal(data.libraryCatalog) 216 | }) 217 | 218 | it('set rights as copyrightRegistration', async () => { 219 | const rights = await reference.rights() 220 | expect(rights).to.equal(data.rights) 221 | }) 222 | 223 | it('sets extra as a note', async () => { 224 | const source = await reference.note() 225 | expect(source).to.equal(data.extra) 226 | }) 227 | }) 228 | -------------------------------------------------------------------------------- /test/importers/zoteroApi/bookSection.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | import { expect } from 'chai' 3 | import { ramHyperReadings, collect } from '../../helpers/general' 4 | 5 | const data = { 6 | itemType: 'bookSection', 7 | title: 'POETRY', 8 | creators: [ 9 | { 10 | creatorType: 'editor', 11 | firstName: 'Joanne', 12 | lastName: 'Shattock' 13 | } 14 | ], 15 | abstractNote: 'a note describing the work', 16 | bookTitle: 'The Cambridge Bibliography of English Literature', 17 | series: 'Not a real series', 18 | seriesNumber: '', 19 | volume: '4', 20 | numberOfVolumes: '', 21 | edition: '3', 22 | place: 'Cambridge', 23 | publisher: 'Cambridge University Press', 24 | date: '2000', 25 | pages: '207-858', 26 | language: '', 27 | ISBN: '978-0-511-51868-3', 28 | shortTitle: '', 29 | url: 'http://www.crossref.org/deleted_DOI.html', 30 | accessDate: '2018-07-03 06:57:08', 31 | archive: '', 32 | archiveLocation: '', 33 | libraryCatalog: 'Crossref', 34 | callNumber: '', 35 | rights: '', 36 | extra: 'DOI: 10.1017/CBO9780511518683.005', 37 | tags: [] 38 | } 39 | 40 | describe('importing a book section from zotero api', () => { 41 | let hr 42 | let reference 43 | before(async () => { 44 | hr = ramHyperReadings() 45 | await hr.importZoteroReference(data) 46 | const collection = await hr.getCollection('default') 47 | const references = await collect(collection.stream()) 48 | if (references.length) reference = references[0] 49 | }) 50 | 51 | it('creates a new bf:Instance', async () => { 52 | expect(reference.type).to.eql('bf:Instance') 53 | }) 54 | 55 | it('sets title on the instance', async () => { 56 | const title = await reference.getTitle() 57 | expect(title).to.eql('POETRY') 58 | }) 59 | 60 | it('sets abbreviated title on the instance', async () => { 61 | const abbreviatedTitle = await reference.getAbbreviatedTitle() 62 | expect(abbreviatedTitle).to.eql(null) 63 | }) 64 | 65 | it('adds creators as contributions to the instance', async () => { 66 | const contributions = await reference.contributions() 67 | expect(contributions).to.have.length(1) 68 | expect(contributions).to.deep.eql([{ 69 | firstName: 'Joanne', 70 | lastName: 'Shattock', 71 | name: 'Joanne Shattock', 72 | role: 'marcrel:aut' 73 | }]) 74 | }) 75 | 76 | it('sets associated tags as subjects on the instance', async () => { 77 | const subjects = await reference.subjects() 78 | expect(subjects).to.have.length(0) 79 | }) 80 | 81 | it('sets abstractNote to summary on the instance', async () => { 82 | const summary = await reference.getSummary() 83 | expect(summary).to.eql('a note describing the work') 84 | }) 85 | 86 | it('adds ISBN field and DOI from extra field as an identifier on the instance', async () => { 87 | const identifiers = await reference.identifiers() 88 | expect(identifiers).to.deep.include.members([ 89 | { type: 'bf:Isnb', value: '978-0-511-51868-3' }, 90 | { type: 'bf:Doi', value: '10.1017/CBO9780511518683.005' } 91 | ]) 92 | }) 93 | 94 | it('sets place, date and publisher fields as provisionActivity on the instance', async () => { 95 | const publications = await reference.publications() 96 | expect(publications).to.have.length(1) 97 | expect(publications[0].date).to.eql('2000') 98 | expect(await publications[0].agent.get('rdfs:label')).to.eql('Cambridge University Press') 99 | expect(await publications[0].place.get('rdfs:label')).to.eql('Cambridge') 100 | }) 101 | 102 | context('with item level information present', () => { 103 | let item 104 | before(async () => { 105 | const items = await reference.items() 106 | item = items[0] 107 | }) 108 | 109 | it('creates an item', async () => { 110 | expect(item.type).to.eql('bf:Item') 111 | }) 112 | 113 | it('sets relations between the instance and item', async () => { 114 | const itemsInstance = await item.itemOf() 115 | const instancesItems = await reference.items() 116 | expect(itemsInstance.name).to.eql(reference.name) 117 | expect(instancesItems[0].name).to.eql(item.name) 118 | }) 119 | 120 | it('sets url as electronicLocator on item', async () => { 121 | const locator = await item.getElectronicLocator() 122 | expect(locator).to.eql('http://www.crossref.org/deleted_DOI.html') 123 | }) 124 | 125 | it('sets callNumber as shelfMark on item', async () => { 126 | const shelfMark = await item.getShelfMark() 127 | expect(shelfMark).to.eql(null) 128 | }) 129 | }) 130 | 131 | context('with series level information present', () => { 132 | it('creates a new series when series fields have values', async () => { 133 | const series = await reference.hasSeries() 134 | const seriesTitle = await series.getTitle() 135 | expect(seriesTitle).to.eql('Not a real series') 136 | }) 137 | 138 | it('creates series with reciprocal relationship', async () => { 139 | const series = await reference.hasSeries() 140 | const seriesParts = await series.seriesOf() 141 | expect(seriesParts[0].name).to.eql(reference.name) 142 | }) 143 | }) 144 | 145 | it('needs to address bookTitle') 146 | it('needs to address seriesNumber') 147 | it('needs to address volume') 148 | it('needs to address numberOfVolumes') 149 | it('needs to address edition') 150 | it('needs to address pages') 151 | it('needs to address language') 152 | it('needs to address accessDate') 153 | it('needs to address archive') 154 | it('needs to address archiveLocation') 155 | it('needs to address libraryCatalog') 156 | it('needs to address rights') 157 | it('needs to address extra') 158 | }) 159 | -------------------------------------------------------------------------------- /test/importers/zoteroApi/journalArticle.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | import { expect } from 'chai' 3 | import { ramHyperReadings, collect } from '../../helpers/general' 4 | 5 | const data = { 6 | itemType: 'journalArticle', 7 | title: 'On Libraries : Introduction', 8 | creators: [ 9 | { 10 | creatorType: 'author', 11 | firstName: 'Misha', 12 | lastName: 'Myers' 13 | }, 14 | { 15 | creatorType: 'author', 16 | firstName: 'Deirdre', 17 | lastName: 'Heddon' 18 | } 19 | ], 20 | abstractNote: 'a note describing the work', 21 | publicationTitle: 'Performance Research', 22 | volume: '22', 23 | issue: '1', 24 | pages: '1-8', 25 | date: 'January 2, 2017', 26 | series: '', 27 | seriesTitle: '', 28 | seriesText: '', 29 | journalAbbreviation: '', 30 | language: '', 31 | DOI: '10.1080/13528165.2017.1285554', 32 | ISSN: '1352-8165', 33 | shortTitle: 'On Libraries', 34 | url: 'https://doi.org/10.1080/13528165.2017.1285554', 35 | accessDate: '2018-07-03 08:44:41', 36 | archive: '', 37 | archiveLocation: '', 38 | libraryCatalog: 'Taylor and Francis+NEJM', 39 | callNumber: '', 40 | rights: '', 41 | extra: '', 42 | tags: [] 43 | } 44 | 45 | describe('importing a journal article from zotero api', () => { 46 | let hr 47 | let reference 48 | before(async () => { 49 | hr = ramHyperReadings() 50 | await hr.importZoteroReference(data) 51 | const collection = await hr.getCollection('default') 52 | const references = await collect(collection.stream()) 53 | if (references.length) reference = references[0] 54 | }) 55 | 56 | it('needs tests for title') 57 | it('needs tests for creators') 58 | it('needs tests for abstractNote') 59 | it('needs tests for publicationTitle') 60 | it('needs tests for volume') 61 | it('needs tests for issue') 62 | it('needs tests for pages') 63 | it('needs tests for date') 64 | it('needs tests for series') 65 | it('needs tests for seriesTitle') 66 | it('needs tests for seriesText') 67 | it('needs tests for journalAbbreviation') 68 | it('needs tests for language') 69 | it('needs tests for DOI') 70 | it('needs tests for ISSN') 71 | it('needs tests for shortTitle') 72 | it('needs tests for url') 73 | it('needs tests for accessDate') 74 | it('needs tests for archive') 75 | it('needs tests for archiveLocation') 76 | it('needs tests for libraryCatalog') 77 | it('needs tests for callNumber') 78 | it('needs tests for rights') 79 | it('needs tests for extra') 80 | it('needs tests for tags') 81 | }) 82 | -------------------------------------------------------------------------------- /test/mocha.opts: -------------------------------------------------------------------------------- 1 | --reporter spec 2 | --recursive 3 | --require babel-core/register 4 | -------------------------------------------------------------------------------- /test/util.test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 3 | // import { expect } from 'chai' 4 | // import hyperreadings from '../lib/utils' 5 | 6 | describe('isRdfLiteral', () => { 7 | it('needs to be tested') 8 | }) 9 | describe('fromRdfValue', () => { 10 | it('needs to be tested') 11 | }) 12 | describe('isNode', () => { 13 | it('needs to be tested') 14 | }) 15 | describe('toRdfValue', () => { 16 | it('needs to be tested') 17 | }) 18 | -------------------------------------------------------------------------------- /webpack.config.js: -------------------------------------------------------------------------------- 1 | var path = require('path') 2 | var nodeExternals = require('webpack-node-externals') 3 | 4 | module.exports = { 5 | mode: process.env.NODE_ENV, 6 | target: 'node', 7 | entry: './hyper-readings.es.js', 8 | output: { 9 | path: path.resolve(__dirname, 'dist'), 10 | filename: 'hyper-readings.js', 11 | library: 'HyperReadings', 12 | libraryTarget: 'umd', 13 | umdNamedDefine: true, 14 | globalObject: `typeof self !== 'undefined' ? self : this` 15 | }, 16 | externals: [ 17 | 'fs', 18 | 'crypto', 19 | nodeExternals() 20 | ], 21 | resolve: { 22 | extensions: ['.js'], 23 | modules: [path.resolve(__dirname, 'lib'), 'node_modules'] 24 | }, 25 | module: { 26 | rules: [ 27 | { 28 | test: /\.js$/, 29 | exclude: /node_modules/, 30 | use: { 31 | loader: 'babel-loader' 32 | } 33 | } 34 | ] 35 | } 36 | } 37 | --------------------------------------------------------------------------------