├── .github └── workflow │ └── issue.yml ├── .gitignore ├── LICENSE ├── README.md ├── diff_patch ├── README.md ├── diff_demo.py ├── diff_patch.js └── mongo_demo.py ├── getting_started ├── javascript-client │ ├── Contact.csv │ ├── Employees.csv │ ├── README.md │ ├── add_contractors.js │ ├── insert_data.js │ ├── lesson_1.md │ ├── lesson_2.md │ ├── lesson_3.md │ ├── lesson_4.md │ ├── lesson_5.md │ ├── query_data.js │ ├── schema.js │ └── update_data.js └── python-client │ ├── .TDB │ ├── Contact.csv │ ├── Employees.csv │ ├── Makefile │ ├── README.md │ ├── add_contractors.py │ ├── config.json │ ├── exported_employees.csv │ ├── exported_it_team.csv │ ├── insert_data.py │ ├── lesson_1.md │ ├── lesson_2.md │ ├── lesson_3.md │ ├── lesson_4.md │ ├── lesson_5.md │ ├── lesson_6.md │ ├── lesson_7.md │ ├── query_data.py │ ├── schema.py │ ├── update_data.py │ └── woql_query.py ├── netflix ├── Netflix Tutorial.ipynb ├── netflix.csv └── netflix.py ├── nobel_prize ├── README.md ├── config.json ├── nobel_prize.csv ├── nobel_prize.py └── schema.py ├── nuclear ├── Makefile ├── README.md ├── elements.csv ├── enrichment.md ├── geo_schema.json ├── nuclear.csv ├── nuclear.md ├── nuclear_schema.json ├── scraping.md ├── source.json ├── unit_schema.json └── units.json ├── python-ast ├── python-ast.py └── python-schema.json ├── react-example ├── .gitignore ├── README.md ├── package-lock.json ├── package.json ├── public │ ├── favicon.ico │ ├── index.html │ ├── logo192.png │ ├── logo512.png │ ├── manifest.json │ └── robots.txt └── src │ ├── index-0.js │ ├── index-1.js │ ├── index-2.js │ ├── index-3.js │ ├── index-4.js │ ├── index.css │ └── index.js ├── star-wars ├── readme.md ├── star-wars-dataset.ttl └── star-wars.json ├── stock_index ├── README.md ├── indexData.csv.zip ├── other.csv └── script.py └── terminusBlog ├── LICENSE ├── Makefile ├── README.md ├── blog ├── package-lock.json ├── package.json ├── public │ ├── favicon.ico │ ├── index.html │ ├── logo192.png │ ├── logo512.png │ ├── manifest.json │ └── robots.txt └── src │ ├── App.css │ ├── App.js │ ├── App.test.js │ ├── index.css │ ├── index.js │ ├── reportWebVitals.js │ └── setupTests.js ├── create_from_files.py ├── data.json ├── entries ├── data_layout.md ├── excel_as_code.md ├── graph_representation.md ├── graphql_in_fourteen_days.md ├── iri_generation.md ├── json_as_rdf.md ├── json_diff_and_patch.md ├── many_worlds.md ├── mergeable_records.md ├── mongo_neo4j_terminus.md ├── mutable_graphs.md ├── semantic_future.md ├── star_wars.md ├── syntactic_versioning.md ├── table_diff_is_np_hard.md ├── terminus_blog.md ├── terminusdb_cli.md ├── terminusdb_v1.1.md └── why_terminusx.md └── schema.json /.github/workflow/issue.yml: -------------------------------------------------------------------------------- 1 | name: Labeling new issue 2 | on: 3 | issues: 4 | types: ['opened'] 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: Renato66/auto-label@v2 10 | with: 11 | repo-token: ${{ secrets.GITHUB_TOKEN }} 12 | ignore-comments: true 13 | default-labels: '["triage"]' 14 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .ipynb_checkpoints/ 2 | 3 | .pytest_cache/ 4 | woqlClientP.sublime-project 5 | woqlClientP.sublime-workspace 6 | 7 | tests/__pycache__/ 8 | 9 | build/ 10 | dist/ 11 | .eggs/ 12 | 13 | *.egg-info/ 14 | *.egg 15 | *.py[cod] 16 | __pycache__/ 17 | woqlclient/__pycache__/ 18 | woqlclient/*.egg-info/ 19 | 20 | .eggs/ 21 | 22 | coverage.xml 23 | report.xml 24 | .coverage 25 | 26 | *.so 27 | *~ 28 | 29 | venv/ 30 | 31 | # due to using tox and pytest 32 | .tox 33 | 34 | *.pyc 35 | *.swp 36 | *.egg 37 | 38 | .DS_Store 39 | 40 | Pipfile.lock 41 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Tutorials for using TerminusDB 2 | 3 | ## Installation 4 | 5 | #### TerminusDB 6 | 7 | Docker image available at https://github.com/terminusdb/terminusdb-bootstrap 8 | 9 | #### Python Client 10 | 11 | Latest version: [![PyPI version shields.io](https://img.shields.io/pypi/v/terminusdb-client.svg?logo=pypi)](https://pypi.python.org/pypi/terminusdb-client/) 12 | 13 | Create new environment (optional but recommended): 14 | 15 | ``` 16 | $ python3 -m venv ~/.virtualenvs/terminusdb 17 | $ source ~/.virtualenvs/terminusdb/bin/activate 18 | ``` 19 | 20 | Install using pip: 21 | 22 | `$ python3 -m pip install terminusdb-client` 23 | 24 | If you are new to TerminusDB/ TerminusCMS and will use Python client, you are recommended to check out the [Getting Started tutorial series](./getting_started/python-client). 25 | 26 | 27 | #### JavaScript Client 28 | 29 | Install using npm following: 30 | https://github.com/terminusdb/terminus-client 31 | 32 | --- 33 | 34 | ## GraphQL Tutorials 35 | 36 | For more details about how to use GraphQL with TerminusDB, please refer to the [GraphQL section of our documentation](https://terminusdb.com/docs/guides/reference-guides/graphql_query). 37 | 38 | ### Building a Blog-Focused CMS with TerminusDB 39 | 40 | This project is designed to show how you can build a custom web-app using TerminusDB from scratch with little effort. 41 | 42 | Details: [README](./terminusBlog) 43 | 44 | ### Playing with Star Wars RDF Data Set Using GraphQL 45 | 46 | Taking you through the process of loading RDF into TerminusDB and then using GraphQL to query the Star Wars data. 47 | 48 | Details: [README](./star-wars) 49 | 50 | ## Getting Started using TerminusDB/ TerminusCMS with the Python client 51 | 52 | A tutorial series to help anyone who's new to TerminusDB/ TerminusCMS to start working using the Python client. 53 | 54 | Details: [README](./getting_started/python-client/README.md) 55 | 56 | ## Getting Started using TerminusDB/ TerminusCMS with the JavaScript client 57 | 58 | A tutorial series to help anyone who's new to TerminusDB/ TerminusCMS to start working using the JavaScript client. 59 | 60 | Details: [README](./getting_started/javascript-client) 61 | 62 | ## Stock Index Data 63 | 64 | An example showing how to load stock index data from CSV. 65 | 66 | Details: [index](./stock_index) 67 | 68 | ## Nuclear Power Plant Example 69 | 70 | An example data product which holds information about all operating nuclear power reactors. 71 | 72 | Details: [index](./nuclear) 73 | 74 | ## Using TerminusCMS in React App 75 | 76 | Use TerminusCMS to build a React app that serves as a simple content management app. 77 | 78 | Details: [README](./react-example/README.md) 79 | -------------------------------------------------------------------------------- /diff_patch/diff_demo.py: -------------------------------------------------------------------------------- 1 | from pprint import pprint 2 | from terminusdb_client import WOQLClient, DocumentTemplate, WOQLSchema 3 | 4 | # Using TerminusDB Open API for JSON Diff and Patch 5 | 6 | tdb_diff = WOQLClient("https://cloud.terminusdb.com/jsondiff") 7 | tdb_patch = WOQLClient("https://cloud.terminusdb.com/jsonpatch") 8 | 9 | # Or you can use the local version as below 10 | 11 | # client = WOQLClient("http://localhost:6363/") 12 | # client.connect() 13 | 14 | ### Comparing a document object ### 15 | 16 | class Person(DocumentTemplate): 17 | name: str 18 | age: int 19 | 20 | jane = Person(name="Jane", age=18) 21 | janine = Person(name="Janine", age=18) 22 | 23 | result_patch = tdb_diff.diff(jane, janine) 24 | 25 | pprint(result_patch.content) 26 | 27 | # apply result patch to get back final document 28 | 29 | after_patch = tdb_patch.patch(jane, result_patch) 30 | 31 | pprint(after_patch) 32 | assert after_patch == janine._obj_to_dict() 33 | 34 | ### Comapring document objects in json (dict) formats 35 | 36 | jane = { "@id" : "Person/Jane", "@type" : "Person", "name" : "Jane"} 37 | janine = { "@id" : "Person/Jane", "@type" : "Person", "name" : "Janine"} 38 | 39 | result_patch = tdb_diff.diff(jane, janine) 40 | 41 | pprint(result_patch.content) 42 | 43 | # apply result patch to get back final document 44 | 45 | after_patch = tdb_patch.patch(jane, result_patch) 46 | 47 | pprint(after_patch) 48 | assert after_patch == janine 49 | 50 | 51 | ### Comparing TerminusDB schemas (Not working at the moment) 52 | # 53 | # class Company(DocumentTemplate): 54 | # name: str 55 | # director: Person 56 | # 57 | # schema1 = WOQLSchema() 58 | # schema1.add_obj("Person", Person) 59 | # schema2 = WOQLSchema() 60 | # schema2.add_obj("Person", Person) 61 | # schema2.add_obj("Company", Company) 62 | # 63 | # result_patch = tdb_diff.diff(schema1, schema2) 64 | # 65 | # pprint(result_patch.content) 66 | 67 | # apply result patch to get back final document 68 | 69 | # after_patch = tdb_patch.patch(schema1, result_patch) 70 | # 71 | # pprint(after_patch) 72 | # assert after_patch == schema2.to_dict() 73 | 74 | ### Comparing JSON schemas (http://json-schema.org/understanding-json-schema/about.html) (Not working at the moment) 75 | 76 | # schema1 = { 77 | # "type": "object", 78 | # "properties": { 79 | # "name": { "type": "string" }, 80 | # "birthday": { "type": "string", "format": "date" }, 81 | # "address": { "type": "string" }, 82 | # } 83 | # } 84 | # 85 | # schema2 = { 86 | # "type": "object", 87 | # "properties": { 88 | # "first_name": { "type": "string" }, 89 | # "last_name": { "type": "string" }, 90 | # "birthday": { "type": "string", "format": "date" }, 91 | # "address": { 92 | # "type": "object", 93 | # "properties": { 94 | # "street_address": { "type": "string" }, 95 | # "city": { "type": "string" }, 96 | # "state": { "type": "string" }, 97 | # "country": { "type" : "string" } 98 | # } 99 | # } 100 | # } 101 | # } 102 | # 103 | # result_patch = tdb_diff.diff(schema1, schema2) 104 | # 105 | # pprint(result_patch.content) 106 | 107 | # apply result patch to get back final document 108 | 109 | # after_patch = tdb_patch.patch(schema1, result_patch) 110 | # 111 | # pprint(after_patch) 112 | # assert after_patch == schema2 113 | -------------------------------------------------------------------------------- /diff_patch/diff_patch.js: -------------------------------------------------------------------------------- 1 | const TerminusClient = require("@terminusdb/terminusdb-client"); 2 | 3 | db.inventory.deleteMany({}); 4 | 5 | const mongoPatch = function(patch){ 6 | let query = {}; 7 | let set = {}; 8 | 9 | if('object' === typeof patch){ 10 | for(var key in patch){ 11 | const entry = patch[key]; 12 | 13 | if( entry['@op'] == 'SwapValue'){ 14 | query[key] = entry['@before']; 15 | set[key] = entry['@after']; 16 | }else if(key === '_id'){ 17 | query[key] = ObjectId(entry); 18 | }else{ 19 | let [sub_query,sub_set] = mongoPatch(entry); 20 | query[key] = sub_query; 21 | if(! sub_set === null){ 22 | set[key] = sub_set; 23 | } 24 | } 25 | } 26 | return [query,set] 27 | }else{ 28 | return [patch,null] 29 | } 30 | } 31 | 32 | var client = new TerminusClient.WOQLClient("http://127.0.0.1:6363") 33 | db.inventory.insertMany([ 34 | { name: "Jane", age:18 }, 35 | { name: "Janine", age:18 } 36 | ]); 37 | 38 | const jane = db.inventory.findOne( {name : "Jane" }); 39 | const janine = JSON.parse(JSON.stringify(jane)); 40 | janine.name = "Janine"; 41 | 42 | let patchPromise = client.getDiff(jane,janine,{}); 43 | patchPromise.then( patch => { 44 | let [q,s] = mongoPatch(patch) 45 | console.log([q,s]); 46 | 47 | const res = db.inventory.updateOne(q, { $set : s}); 48 | console.log(res); 49 | if (res.modifiedCount == 1){ 50 | console.log("yay!") 51 | }else{ 52 | console.log("boo!") 53 | } 54 | console.log(patch); 55 | }); 56 | 57 | 58 | 59 | -------------------------------------------------------------------------------- /diff_patch/mongo_demo.py: -------------------------------------------------------------------------------- 1 | import datetime as dt 2 | import os 3 | from pprint import pprint 4 | from terminusdb_client import WOQLClient 5 | 6 | from pymongo import MongoClient 7 | import pymongo 8 | 9 | # Provide the mongodb atlas url in env variable to connect python to mongodb using pymongo 10 | # Create a connection using MongoClient. 11 | 12 | client = MongoClient(os.environ["MONGO_CONNECTION_STRING"]) 13 | 14 | # Create the database for our example (we will use the same database throughout the tutorial 15 | connection = client['user_shopping_list'] 16 | 17 | collection_name = connection["user_1_items"] 18 | 19 | item_1 = { 20 | "_id" : "U1IT00001", 21 | "item_name" : "Blender", 22 | "max_discount" : "10%", 23 | "batch_number" : "RR450020FRG", 24 | "price" : 340, 25 | "category" : "kitchen appliance" 26 | } 27 | 28 | item_2 = { 29 | "_id" : "U1IT00002", 30 | "item_name" : "Egg", 31 | "category" : "food", 32 | "quantity" : 12, 33 | "price" : 36, 34 | "item_description" : "brown country eggs" 35 | } 36 | collection_name.insert_many([item_1,item_2]) 37 | 38 | expiry_date = '2021-07-13T00:00:00.000' 39 | expiry = dt.datetime.fromisoformat(expiry_date) 40 | item_3 = { 41 | "item_name" : "Bread", 42 | "quantity" : 2, 43 | "ingredients" : "all-purpose flour", 44 | "expiry_date" : expiry 45 | } 46 | collection_name.insert_one(item_3) 47 | 48 | # Now I want to change up item 1 49 | 50 | new_item_1 = { 51 | "_id" : "U1IT00001", 52 | "item_name" : "Blender", 53 | "max_discount" : "50%", 54 | "batch_number" : "RR450020FRG", 55 | "price" : 450, 56 | "category" : "kitchen appliance" 57 | } 58 | 59 | # But before we update it in Mongo, I want to review the changes first 60 | 61 | # Using TerminusDB Open API for JSON Diff 62 | tdb_endpoint = WOQLClient("https://cloud.terminusdb.com/jsondiff") 63 | 64 | # Find the item back from database in case someone already changed it 65 | item_1 = collection_name.find_one({"item_name" : "Blender"}) 66 | patch = tdb_endpoint.diff(item_1, new_item_1) 67 | 68 | pprint(patch.content) 69 | 70 | # If we apprive, then proceed 71 | collection_name.update_one(patch.before, {"$set": patch.update}) 72 | 73 | # Working on more complicated objects 74 | 75 | expiry_date = '2021-07-15T00:00:00.000' 76 | expiry = dt.datetime.fromisoformat(expiry_date) 77 | new_item_3 = { 78 | "item_name" : "Bread", 79 | "quantity" : 5, 80 | "ingredients" : "all-purpose flour", 81 | "expiry_date" : expiry 82 | } 83 | 84 | item_3 = collection_name.find_one({"item_name" : "Bread"}) 85 | item_id = item_3.pop('_id') # We wnat to pop it out and optionally we can add it back 86 | patch = tdb_endpoint.diff(item_3, new_item_3) 87 | 88 | pprint(patch.content) 89 | 90 | # Add _id back, though it still works without 91 | before = patch.before 92 | before['_id'] = item_id 93 | collection_name.update_one(before, {"$set": patch.update}) 94 | -------------------------------------------------------------------------------- /getting_started/javascript-client/Contact.csv: -------------------------------------------------------------------------------- 1 | Employee id,Contact number,Home address,Postcode 2 | 001,(01986) 113367,"1 Market Place, Bungay",NR35 1AP 3 | 002,(01925) 682388,"200 Manchester Road, Woolston",WA1 4HJ 4 | 003,(01274) 708080,"139 Otley Road, Shipley",BD18 2PT 5 | 004,(0161) 532 7302,"2 Ansdell Road, Stockport",SK5 6SY -------------------------------------------------------------------------------- /getting_started/javascript-client/Employees.csv: -------------------------------------------------------------------------------- 1 | Employee id,Name,Title,Team,Manager 2 | 001,Destiny Norris,Marketing Manager,Marketing, 3 | 002,Darci Prosser,Creative Writer,Marketing,001 4 | 003,Alanah Bloggs,Frontend Developer,IT,004 5 | 004,Fabian Dalby,Web Service Manager,IT, -------------------------------------------------------------------------------- /getting_started/javascript-client/README.md: -------------------------------------------------------------------------------- 1 | # Getting started using TerminusDB Javascript Client 2 | 3 | This is a step by step tutorial to get you started using TerminusDB/ TerminusCMS for the first time. This will cover all the basics that you can work with TerminusDB/ TerminusCMS using the Javascript client. 4 | 5 | - [Lesson 1 - Installing, start project and create an empty database with schema](lesson_1.md) 6 | - [Lesson 2 - Importing data form CSV using Javascript script](lesson_2.md) 7 | - [Lesson 3 - Update and import new data that links to old data](lesson_3.md) 8 | - [Lesson 4 - Query on the database and get result back as json](lesson_4.md) 9 | - [Lesson 5 - Version control: time travel, branching and rebase](lesson_5.md) -------------------------------------------------------------------------------- /getting_started/javascript-client/add_contractors.js: -------------------------------------------------------------------------------- 1 | const TerminusClient = require("@terminusdb/terminusdb-client"); 2 | 3 | // TODO: Change teamname and username 4 | const teamName = "yourTeam" 5 | const username = "yourUser" 6 | 7 | const client = new TerminusClient.WOQLClient( 8 | `https://cloud.terminusdb.com/${teamName}/`, 9 | { user: username, organization: teamName , db:"GettingStartedDB" } 10 | ); 11 | 12 | //Assign your key to environment variable TERMINUSDB_ACCESS_TOKEN 13 | client.setApiKey(process.env.TERMINUSDB_ACCESS_TOKEN); 14 | 15 | const getCommitHistory = async (branch) => { 16 | 17 | const woqlLib = TerminusClient.WOQL; 18 | const commitQuery = woqlLib.lib().commits(branch); 19 | 20 | const res = await client.query(commitQuery); 21 | console.log(res.bindings); 22 | return res.bindings 23 | 24 | }; 25 | 26 | const addContractors = async () => { 27 | 28 | const rhys = { 29 | "@type": "Employee", 30 | employee_id: "006", 31 | name: "Rhys Arnold", 32 | title: "UX Designer", 33 | team: "IT", 34 | contact_number: "078 3951 7569", 35 | address: { 36 | "@type": "Address", 37 | postcode: "DG4 2ZQ", 38 | street: "Helland Bridge", 39 | street_num: 1, 40 | town: "Ulzieside", 41 | }, 42 | }; 43 | 44 | const maya = { 45 | "@type": "Employee", 46 | employee_id: "007", 47 | name: "Maya O'Brien", 48 | title: "Creative Content Creator", 49 | team: "Marketing", 50 | contact_number: "078 1788 9177", 51 | address: { 52 | "@type": "Address", 53 | postcode: "GU3 3AF", 54 | street: "Tadcaster Rd", 55 | street_num: 24, 56 | town: "Pitch Place", 57 | }, 58 | }; 59 | await client.addDocument([rhys, maya],{},"","Adding contractors"); 60 | }; 61 | 62 | 63 | const runScript = async () => { 64 | try{ 65 | 66 | const defaultBranches = await client.getBranches(); 67 | console.log("Default Branches: ", defaultBranches); 68 | 69 | // Create new contractor branch 70 | await client.branch("contractors"); 71 | console.log("Branch created successfully!") 72 | 73 | const newBranches = await client.getBranches(); 74 | console.log("New Branches: ", newBranches); 75 | 76 | // checkout to new branch contractors 77 | client.checkout("contractors"); 78 | 79 | await addContractors(); 80 | console.log("Added Contractors successfully!") 81 | 82 | 83 | console.log("Main Commit History: ") 84 | await getCommitHistory("main"); 85 | 86 | 87 | console.log("Contractors Commit History: ") 88 | await getCommitHistory("contractors"); 89 | 90 | client.checkout("main"); 91 | 92 | await client.rebase({rebase_from: `${teamName}/GettingStartedDB/local/branch/contractors/`, message: "Merging from contractors" , author: "USer"}); 93 | console.log("Rebase done successfully!"); 94 | 95 | const mainCommits = await getCommitHistory("main"); 96 | 97 | //We would like to keep the commits up to the `Adding Ethan` one 98 | const mainCommitObj = mainCommits.find(item=>item["Message"]["@value"] === 'Adding ethan') 99 | const oldMainCommitID = mainCommitObj['Commit ID']['@value'] 100 | 101 | console.log('Main Commit ID',oldMainCommitID ) 102 | 103 | await client.resetBranch("main", oldMainCommitID); 104 | console.log("Reset done successfully!"); 105 | 106 | console.log("Main Commit History: ") 107 | await getCommitHistory("main"); 108 | }catch(err){ 109 | console.log(err.message) 110 | } 111 | } 112 | runScript(); -------------------------------------------------------------------------------- /getting_started/javascript-client/insert_data.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs"); 2 | const path = require("path"); 3 | const csv = require("fast-csv"); 4 | 5 | const TerminusClient = require("@terminusdb/terminusdb-client"); 6 | 7 | 8 | // TODO: Change teamname and username 9 | const teamName = "yourTeam" 10 | const username = "yourUser" 11 | 12 | const client = new TerminusClient.WOQLClient( 13 | `https://cloud.terminusdb.com/${teamName}/`, 14 | { user: username, organization: teamName , db:"GettingStartedDB" } 15 | ); 16 | 17 | //Assign your key to environment variable TERMINUSDB_ACCESS_TOKEN 18 | client.setApiKey(process.env.TERMINUSDB_ACCESS_TOKEN); 19 | 20 | const contact_numbers = {}; 21 | const addresses = {}; 22 | const employees = []; 23 | 24 | // function to load and parse huge CSV files 25 | const readCsv = (fileName) => { 26 | // TODO: change the directoryPath to point were the csv are stored 27 | return new Promise((resolve, reject) => { 28 | const data = []; 29 | 30 | fs.createReadStream( 31 | path.resolve( 32 | __dirname, 33 | fileName 34 | ) 35 | ) 36 | .pipe(csv.parse({ headers: true, ignoreEmpty: true })) 37 | .on("error", reject) 38 | .on("data", (row) => data.push(row)) 39 | .on("end", () => { 40 | console.log(`Parsed ${data.length} rows`); 41 | resolve(data); 42 | }); 43 | }); 44 | }; 45 | 46 | const insertData = async () => { 47 | // read Contact.csv 48 | let resultContacts = await readCsv("Contact.csv"); 49 | resultContacts.forEach((element) => { 50 | contact_numbers[element["Employee id"]] = element["Contact number"]; 51 | 52 | let street = element["Home address"].split(",")[0]; 53 | let street_num = Number(street.split(" ")[0]); 54 | let street_name = street.split(" ").slice(1).join(" "); 55 | let town = element["Home address"].split(",")[1].substr(1); 56 | 57 | addresses[element["Employee id"]] = { 58 | "@type": "Address", 59 | street: street_name, 60 | street_num, 61 | town, 62 | postcode: element["Postcode"], 63 | }; 64 | }); 65 | 66 | let resultEmployees = await readCsv("Employees.csv"); 67 | resultEmployees.forEach((element) => { 68 | let employee = { 69 | "@type": "Employee", 70 | name: element["Name"], 71 | title: element["Title"], 72 | team: element["Team"], 73 | address: addresses[element["Employee id"]], 74 | contact_number: contact_numbers[element["Employee id"]], 75 | employee_id: element["Employee id"], 76 | }; 77 | 78 | if (element["Manager"] !== "") 79 | employee.manager = "Employee/" + element["Manager"]; 80 | 81 | employees.push(employee); 82 | }); 83 | console.log("Inserting Employees ", employees); 84 | 85 | client 86 | .addDocument(employees) 87 | .then((res) => { 88 | console.log("Employees inserted successfully", res); 89 | }) 90 | .catch((error) => { 91 | console.log(error); 92 | }); 93 | 94 | const result = await client.getDocument({"as_list":true}); 95 | console.log(result); 96 | }; 97 | 98 | insertData(); 99 | -------------------------------------------------------------------------------- /getting_started/javascript-client/lesson_2.md: -------------------------------------------------------------------------------- 1 | # Lesson 2 - Importing data from CSV using JavaScript script 2 | 3 | ## Creating the JavaScript script 4 | 5 | Let's start a new `.js` file called [insert_data.js](insert_data.js). You can copy and paste the one we have in this repo or build one yourself. We will explain the one we have so you know what it does. 6 | 7 | In the first half of the script we have to manage and import the data from the CSV. In Node.JS there is the [`fast-csv` package](https://www.npmjs.com/package/fast-csv) that helps reading of CSV files. 8 | 9 | First, install the package: 10 | 11 | ```bash 12 | npm i fast-csv 13 | ``` 14 | 15 | The script will import this package with some others: 16 | 17 | ```javascript 18 | const fs = require("fs"); 19 | const path = require("path"); 20 | const csv = require("fast-csv"); 21 | ``` 22 | 23 | It will also import `WOQLClient` which is the client that communitcates with the TerminusDB/TerminusCMS: 24 | 25 | ```javascript 26 | const TerminusClient = require("@terminusdb/terminusdb-client"); 27 | ``` 28 | 29 | At the top of the script, we prepare a few empty objects and list to hold the data, we use objects as the keys can be the `Employees id` for easy mapping: 30 | 31 | ```javascript 32 | const contact_numbers = {}; 33 | const addresses = {}; 34 | const employees = []; 35 | ``` 36 | 37 | The goal is to populate the `employees` list with the `Employee` objects. To help, we also need `contact_numbers` to hold the contact numbers while reading the `Contact.csv`. The rest of the information in `Contact.csv` will be used to construct `Address` objects and stored in `addresses`. We store the id at first and make the linking later because the manager of that employee may have not been "created" yet. 38 | 39 | Then we go ahead and read the CSVs and do the corresponding data managing: 40 | 41 | ```javascript 42 | // function to load and parse huge CSV files 43 | const readCsv = (fileName) => { 44 | // TODO: change the directoryPath to point were the csv are stored 45 | return new Promise((resolve, reject) => { 46 | const data = []; 47 | 48 | fs.createReadStream( 49 | path.resolve( 50 | __dirname, 51 | fileName 52 | ) 53 | ) 54 | .pipe(csv.parse({ headers: true, ignoreEmpty: true })) 55 | .on("error", reject) 56 | .on("data", (row) => data.push(row)) 57 | .on("end", () => { 58 | console.log(`Parsed ${data.length} rows`); 59 | resolve(data); 60 | }); 61 | }); 62 | }; 63 | 64 | const insertData = async () => { 65 | 66 | // read Contact.csv 67 | let resultContacts = await readCsv("Contact.csv"); 68 | resultContacts.forEach((element) => { 69 | contact_numbers[element["Employee id"]] = element["Contact number"]; 70 | 71 | let street = element["Home address"].split(",")[0]; 72 | let street_num = Number(street.split(" ")[0]); 73 | let street_name = street.split(" ").slice(1).join(" "); 74 | let town = element["Home address"].split(",")[1].substr(1); 75 | 76 | addresses[element["Employee id"]] = { 77 | "@type": "Address", 78 | street: street_name, 79 | street_num, 80 | town, 81 | postcode: element["Postcode"], 82 | }; 83 | }); 84 | 85 | // read Employees.csv 86 | let resultEmployees = await readCsv("Employees.csv"); 87 | resultEmployees.forEach((element) => { 88 | let employee = { 89 | "@type": "Employee", 90 | name: element["Name"], 91 | title: element["Title"], 92 | team: element["Team"], 93 | address: addresses[element["Employee id"]], 94 | contact_number: contact_numbers[element["Employee id"]], 95 | employee_id: element["Employee id"], 96 | }; 97 | 98 | if (element["Manager"] !== "") 99 | employee.manager = "Employee/" + element["Manager"]; 100 | 101 | employees.push(employee); 102 | }); 103 | }; 104 | ``` 105 | 106 | The `employees` list should now be populated with the `Employee` objects, ready to be inserted into the database. 107 | 108 | ## Using the TerminusDB JavaScript Client 109 | 110 | The script inserts all `Employees` into the database using the TerminusDB JavaScript Client. To do this, we need to create a client with our cloud endpoint: 111 | 112 | ```javascript 113 | // TODO: Change teamname and username 114 | const teamName = "teamname" 115 | const username = "username" 116 | 117 | const client = new TerminusClient.WOQLClient( 118 | `https://cloud.terminusdb.com/${teamName}/`, 119 | { user: username, organization: teamName , db:"GettingStartedDB" } 120 | ); 121 | 122 | // If you are using TerminusCMS 123 | client.setApiKey(process.env.TERMINUSDB_ACCESS_TOKEN); 124 | ``` 125 | 126 | If you are using TerminusCMS, you can find the your endpoint, team, and API token in the [TerminusCMS dashboard](https://dashboard.terminusdb.com/) under profile. 127 | 128 | The last thing to do is to insert the documents: 129 | 130 | ```javascript 131 | 132 | client.addDocument(employees).then((res)=>{ 133 | console.log("Employees inserted successfully",res); 134 | }).catch((error) => { 135 | console.log(error); 136 | }); 137 | 138 | ``` 139 | 140 | ## Running the script 141 | 142 | Run the script in the terminal. Make sure you are in the Node.JS environment that has `terminusdb-client` installed. 143 | 144 | ``` 145 | $ node insert_data.js 146 | ``` 147 | 148 | To check the data was insert correctly, use the `getDocument` function: 149 | 150 | ```javascript 151 | const result = await client.getDocument({"as_list":true}); 152 | console.log(result); 153 | ``` 154 | 155 | Or if using TerminusCMS, you can check it in the [TerminusCMS dashboard](https://dashboard.terminusdb.com/) 156 | 157 | --- 158 | 159 | [Lesson 3 - Update and import new data that links to old data](lesson_3.md) 160 | -------------------------------------------------------------------------------- /getting_started/javascript-client/lesson_3.md: -------------------------------------------------------------------------------- 1 | # Lesson 3 - Update and import new data that links to old data 2 | 3 | Remember the company phonebook stored in TerminusDB? It has been a few months and there is a new recruit: 4 | 5 | | Employee id | Name | Title | Team | Manager | 6 | | ----------- | -------------- | ------------------- | ----------- | ----------- | 7 | | 005 | Ethan Abbott | Backend Developer | IT | 004 | 8 | 9 | | Employee id | Contact number | Home address | Postcode | 10 | | ----------- | --------------- | ----------------------------- | -------- | 11 | | 005 | 070 7796 8035 | 84 Shore Street, Stoer | IV27 2TG | 12 | 13 | Also, the Marketing Manager Destiny has moved to a new address: 14 | 15 | | Employee id | Contact number | Home address | Postcode | 16 | | ----------- | --------------- | ----------------------------- | -------- | 17 | | 001 | (01986) 113367 | 73 Lairg Road, Newbigging | PH12 3RP | 18 | 19 | How are we going to update the records? 20 | 21 | ## Getting data objects back from TerminusDB/ TerminusCMS 22 | 23 | Let us first look at how to update Destiny's Address. We will use a script called [update_data.js](update_data.js). 24 | 25 | Let's examine the parts of the script. We import the Employee document that represents Destiny. Since we know the id, we will use getDocument: 26 | 27 | ```javascript 28 | const destiny = await client.getDocument({"id":"Employee/001"}); 29 | ``` 30 | 31 | ## Update a document 32 | 33 | We know `destiny` is an `Employee` object so we can go ahead and update the details: 34 | 35 | ```javascript 36 | // will have to delete "@id" because the database will create a new one 37 | delete destiny.address['@id']; 38 | 39 | destiny.address.postcode = "PH12 3RP"; 40 | destiny.address.street = "Lairg Road"; 41 | destiny.address.street_num = 73; 42 | destiny.address.town = "Newbigging"; 43 | 44 | ``` 45 | 46 | The script then sends `destiny` back to the database with `updateDocument`. The difference between `addDocument` and `updateDocument` is that if an object already exists `updateDocument` will replace the old with the new. 47 | 48 | ```javascript 49 | await client.updateDocument(destiny); 50 | ``` 51 | 52 | ## Linking a new document to an old document 53 | 54 | Now let's work on our new recruit. We now create `ethan` and link Ethan's manager as `Employee/004`: 55 | 56 | ```javascript 57 | const ethan = { 58 | "@type": "Employee", 59 | "employee_id": "005", 60 | name: "Ethan Abbott", 61 | title: "Backend Developer", 62 | team: "IT", 63 | contact_number: "070 7796 8035", 64 | address: { 65 | "@type": "Address", 66 | postcode: "IV27 2TG", 67 | street: "Shore Street", 68 | street_num: 84, 69 | town: "Stoer" 70 | }, 71 | manager: "Employee/004", 72 | } 73 | ``` 74 | 75 | All is ready so we'll put `ethan` into the database. Use `addDocument` to insert `ethan`: 76 | 77 | ```javascript 78 | await client.addDocument(ethan); 79 | ``` 80 | 81 | Before running the script ensure to set the end point, team and user credentials. 82 | 83 | Run the scripts: 84 | 85 | `$ node update_data.js` 86 | 87 | Use the terminal to check if the database is up-to-date: 88 | 89 | ```javascript 90 | const result = await client.getDocument({"as_list":true}); 91 | console.log(result); 92 | ``` 93 | 94 | If you are using TerminusCMS, you can also check it in the dashboard. 95 | 96 | --- 97 | 98 | [Lesson 4 - Query the database and get result back as JSON](lesson_4.md) 99 | -------------------------------------------------------------------------------- /getting_started/javascript-client/lesson_4.md: -------------------------------------------------------------------------------- 1 | # Lesson 4 - Query the database and get result back as JSON 2 | 3 | In previous lessons, we have learnt how to build a schema and import data. Now the database has all the data we need, we will learn how to query the database to get information out. 4 | 5 | 6 | ## Query data with a JavaScript script 7 | 8 | Let's have a look at the [query_data.js](query_data.js) script. 9 | 10 | Querying is achieved with `queryDocument`, you need to provide a JSON template that has `type` and the specific requirement(s). In our case we're looking for `"team": "it"` or `"team": "marketing"`. 11 | 12 | ```javascript 13 | const team_it = await client.queryDocument( 14 | { type: "Employee", query: { team: "IT" } }, 15 | { as_list: true } 16 | ); 17 | 18 | const team_marketing = await client.queryDocument( 19 | { type: "Employee", query: { team: "Marketing" } }, 20 | { as_list: true } 21 | ); 22 | ``` 23 | 24 | This query shows how to find an employee who lives in `Stockport`. 25 | 26 | ```javascript 27 | const result = await client.queryDocument({ type: "Employee", query: { address: { town: "Stockport" } }}); 28 | ``` 29 | 30 | We won't spoil the results for you, you have to find it out yourself :-) 31 | 32 | `$ node query_data.js` 33 | 34 | --- 35 | 36 | [Lesson 5 - Version control: time travel, branching and rebase](lesson_5.md) 37 | -------------------------------------------------------------------------------- /getting_started/javascript-client/query_data.js: -------------------------------------------------------------------------------- 1 | const TerminusClient = require("@terminusdb/terminusdb-client"); 2 | 3 | // TODO: Change teamname and username 4 | const teamName = "yourTeam" 5 | const username = "yourUser" 6 | 7 | const client = new TerminusClient.WOQLClient( 8 | `https://cloud.terminusdb.com/${teamName}/`, 9 | { user: username, organization: teamName , db:"GettingStartedDB" } 10 | ); 11 | 12 | //Assign your key to environment variable TERMINUSDB_ACCESS_TOKEN 13 | client.setApiKey(process.env.TERMINUSDB_ACCESS_TOKEN); 14 | 15 | const query_data = async () => { 16 | const team_it = await client.getDocument( 17 | { type: "Employee", query: { team: "IT" }, as_list:true} 18 | ); 19 | const team_marketing = await client.getDocument( 20 | { type: "Employee", query: { team: "Marketing" },as_list: true} 21 | ); 22 | 23 | console.log("There are " + team_it.length + " members in team it"); 24 | console.log("There are " + team_marketing.length + " members in team marketing"); 25 | 26 | // Get to know who lives in Stockport town 27 | const result = await client.getDocument({ type: "Employee", query: { address: { town: "Stockport" } }}); 28 | console.log(result); 29 | }; 30 | 31 | query_data(); 32 | -------------------------------------------------------------------------------- /getting_started/javascript-client/schema.js: -------------------------------------------------------------------------------- 1 | const TerminusClient = require("@terminusdb/terminusdb-client"); 2 | 3 | // TODO: Change teamname and username 4 | const teamName = "yourTeam" 5 | const username = "yourUser" 6 | 7 | const client = new TerminusClient.WOQLClient( 8 | `https://cloud.terminusdb.com/${teamName}/`, 9 | { user: username, organization: teamName } 10 | ); 11 | 12 | // If you are using TerminusCMS you need to generate you api key 13 | // https://terminusdb.com/docs/terminuscms/get-api-key here the documentation 14 | client.setApiKey(process.env.TERMINUSDB_ACCESS_TOKEN); 15 | 16 | const address_schema = { 17 | "@id": "Address", 18 | "@key": { 19 | "@type": "ValueHash" 20 | }, 21 | "@subdocument": [], 22 | "@type": "Class", 23 | "postcode": "xsd:string", 24 | "street": "xsd:string", 25 | "street_num": "xsd:integer", 26 | "town": "xsd:string" 27 | }; 28 | 29 | const employee_schema = { 30 | "@id": "Employee", 31 | "@key": { 32 | "@type": "Lexical", 33 | "@fields": ["employee_id"] 34 | }, 35 | "@type": "Class", 36 | "employee_id": "xsd:string", 37 | "address": "Address", 38 | "contact_number": "xsd:string", 39 | "manager": { 40 | "@class": "Employee", 41 | "@type": "Optional" 42 | }, 43 | "name": "xsd:string", 44 | "team": "Team", 45 | "title": "xsd:string" 46 | }; 47 | 48 | const team_schema = { 49 | "@id": "Team", 50 | "@type": "Enum", 51 | "@value": [ 52 | "Marketing", 53 | "IT" 54 | ] 55 | }; 56 | 57 | const createDatabaseAndSchema = async () => { 58 | 59 | await client.createDatabase("GettingStartedDB", { 60 | label: "GettingStartedDB", 61 | comment: "Created new GettingStartedDB", 62 | }); 63 | console.log("Database created successfully!"); 64 | 65 | client.db("GettingStartedDB"); 66 | 67 | // insert all the schema documents 68 | const schemas = [address_schema, team_schema, employee_schema]; 69 | 70 | await client.addDocument(schemas, { graph_type: "schema" },"","Inserting schema"); 71 | console.log("Schema inserted successfully!"); 72 | 73 | // Get commit history 74 | const woqlLib = TerminusClient.WOQL; 75 | const commitQuery = woqlLib.lib().commits(); 76 | 77 | const response= await client.query(commitQuery); 78 | console.log(response.bindings); 79 | 80 | // Get all schema documents 81 | const result = await client.getDocument({"graph_type":"schema","as_list":true}); 82 | console.log(result); 83 | }; 84 | 85 | createDatabaseAndSchema(); 86 | -------------------------------------------------------------------------------- /getting_started/javascript-client/update_data.js: -------------------------------------------------------------------------------- 1 | const TerminusClient = require("@terminusdb/terminusdb-client"); 2 | 3 | 4 | // TODO: Change teamname and username 5 | const teamName = "yourTeam" 6 | const username = "yourUser" 7 | 8 | const client = new TerminusClient.WOQLClient( 9 | `https://cloud.terminusdb.com/${teamName}/`, 10 | { user: username, organization: teamName , db:"GettingStartedDB" } 11 | ); 12 | //Assign your key to environment variable TERMINUSDB_ACCESS_TOKEN 13 | client.setApiKey(process.env.TERMINUSDB_ACCESS_TOKEN); 14 | 15 | const updateAndLinkData = async () => { 16 | const destiny = await client.getDocument({"id":"Employee/001"}); 17 | 18 | // have to delete "@id" because database will create a new one 19 | delete destiny.address['@id']; 20 | 21 | destiny.address.postcode = "PH12 3RP"; 22 | destiny.address.street = "Lairg Road"; 23 | destiny.address.street_num = 73; 24 | destiny.address.town = "Newbigging"; 25 | 26 | await client.updateDocument(destiny,{},"","updating 001"); 27 | 28 | const ethan = { 29 | "@type": "Employee", 30 | "employee_id": "005", 31 | name: "Ethan Abbott", 32 | title: "Backend Developer", 33 | team: "IT", 34 | contact_number: "070 7796 8035", 35 | address: { 36 | "@type": "Address", 37 | postcode: "IV27 2TG", 38 | street: "Shore Street", 39 | street_num: 84, 40 | town: "Stoer" 41 | }, 42 | manager: "Employee/004", 43 | } 44 | await client.addDocument(ethan,{},"","Adding ethan"); 45 | 46 | const result = await client.getDocument({"as_list":true}); 47 | console.log(result); 48 | 49 | } 50 | 51 | updateAndLinkData(); -------------------------------------------------------------------------------- /getting_started/python-client/.TDB: -------------------------------------------------------------------------------- 1 | {"branch": "main", "ref": null} -------------------------------------------------------------------------------- /getting_started/python-client/Contact.csv: -------------------------------------------------------------------------------- 1 | Employee id,Contact number,Home address,Postcode 2 | 001,(01986) 113367,"1 Market Place, Bungay",NR35 1AP 3 | 002,(01925) 682388,"200 Manchester Road, Woolston",WA1 4HJ 4 | 003,(01274) 708080,"139 Otley Road, Shipley",BD18 2PT 5 | 004,(0161) 532 7302,"2 Ansdell Road, Stockport",SK5 6SY -------------------------------------------------------------------------------- /getting_started/python-client/Employees.csv: -------------------------------------------------------------------------------- 1 | Employee id,Name,Title,Team,Manager 2 | 001,Destiny Norris,Marketing Manager,Marketing, 3 | 002,Darci Prosser,Creative Writer,Marketing,001 4 | 003,Alanah Bloggs,Frontend Developer,IT,004 5 | 004,Fabian Dalby,Web Service Manager,IT, -------------------------------------------------------------------------------- /getting_started/python-client/Makefile: -------------------------------------------------------------------------------- 1 | runproject: 2 | tdbpy commit 3 | tdbpy importcsv Employees.csv --classname EmployeesFromCSV --id "Employee id" -e Manager -m "Import Employees from CSV" 4 | python insert_data.py 5 | python update_data.py 6 | tdbpy branch contractors 7 | python add_contractors.py 8 | -------------------------------------------------------------------------------- /getting_started/python-client/README.md: -------------------------------------------------------------------------------- 1 | # Getting started using TerminusDB Python Client 2 | 3 | > **_NOTE:_** from version 10.1.0 the cli command is `tdbpy` instead of `terminusdb` 4 | 5 | This is a step by step tutorial to get you started using TerminusDB/ TerminusCMS for the first time. This will cover all the basics that you can work with TerminusDB/ TerminusCMS using the Python client. 6 | 7 | - [Lesson 1 - Installing, start project and create an empty database with schema](lesson_1.md) 8 | - [Lesson 2 - Importing a CSV into the database](lesson_2.md) 9 | - [Lesson 3 - Importing data form Python script](lesson_3.md) 10 | - [Lesson 4 - Update and import new data that links to old data](lesson_4.md) 11 | - [Lesson 5 - Query on the database and get result back as CSV or DataFrame](lesson_5.md) 12 | - [Lesson 6 - Version control: time travel, branching and rebase](lesson_6.md) 13 | - [Lesson 7 - Logical query using triple and WOQL](lesson_7.md) 14 | -------------------------------------------------------------------------------- /getting_started/python-client/add_contractors.py: -------------------------------------------------------------------------------- 1 | from terminusdb_client import Client 2 | from terminusdb_client.woqlschema import WOQLSchema 3 | 4 | # For Terminus X, use the following 5 | # client = Client("https://cloud.terminusdb.com//") 6 | # client.connect(db="demo_workshop", team="", use_token=True) 7 | 8 | client = Client("http://127.0.0.1:6363/") 9 | client.connect(db="getting_started", branch="contractors") 10 | 11 | data_schema = WOQLSchema() 12 | data_schema.from_db(client) 13 | 14 | Employee = data_schema.object.get("Employee") 15 | Address = data_schema.object.get("Address") 16 | Team = data_schema.object.get("Team") 17 | 18 | # Contractor 1 19 | 20 | rhys_address = Address( 21 | postcode="DG4 2ZQ", street="Helland Bridge", street_num=1, town="Ulzieside" 22 | ) 23 | 24 | rhys = Employee( 25 | _id="Employee/006", 26 | name="Rhys Arnold", 27 | title="UX Designer", 28 | team=Team.it, 29 | contact_number="078 3951 7569", 30 | address=rhys_address, 31 | ) 32 | 33 | # Contractor 2 34 | 35 | maya_address = Address( 36 | postcode="GU3 3AF", street="Tadcaster Rd", street_num=24, town="Pitch Place" 37 | ) 38 | 39 | maya = Employee( 40 | _id="Employee/007", 41 | name="Maya O'Brien", 42 | title="Creative Content Creator", 43 | team=Team.marketing, 44 | contact_number="078 1788 9177", 45 | address=maya_address, 46 | ) 47 | 48 | client.update_document([rhys, maya], commit_msg="Adding contractors") 49 | -------------------------------------------------------------------------------- /getting_started/python-client/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "database": "getting_started", 3 | "endpoint": "http://127.0.0.1:6363/", 4 | "team": "admin" 5 | } 6 | -------------------------------------------------------------------------------- /getting_started/python-client/exported_employees.csv: -------------------------------------------------------------------------------- 1 | Document id,contact_number,name,team,title,manager,address.postcode,address.street,address.street_num,address.town 2 | Employee/001,(01986) 113367,Destiny Norris,marketing,Marketing Manager,,PH12 3RP,Lairg Road,73,Newbigging 3 | Employee/002,(01925) 682388,Darci Prosser,marketing,Creative Writer,,WA1 4HJ,Manchester Road,200, Woolston 4 | Employee/003,(01274) 708080,Alanah Bloggs,it,Frontend Developer,Employee/004,BD18 2PT,Otley Road,139, Shipley 5 | Employee/004,(0161) 532 7302,Fabian Dalby,it,Web Service Manager,,SK5 6SY,Ansdell Road,2, Stockport 6 | Employee/005,070 7796 8035,Ethan Abbott,it,Backend Developer,Employee/004,IV27 2TG,Shore Street,84,Stoer 7 | -------------------------------------------------------------------------------- /getting_started/python-client/exported_it_team.csv: -------------------------------------------------------------------------------- 1 | Document id,contact_number,manager,name,team,title,address.postcode,address.street,address.street_num,address.town 2 | Employee/003,(01274) 708080,Employee/004,Alanah Bloggs,it,Frontend Developer,BD18 2PT,Otley Road,139, Shipley 3 | Employee/004,(0161) 532 7302,,Fabian Dalby,it,Web Service Manager,SK5 6SY,Ansdell Road,2, Stockport 4 | Employee/005,070 7796 8035,Employee/004,Ethan Abbott,it,Backend Developer,IV27 2TG,Shore Street,84,Stoer 5 | -------------------------------------------------------------------------------- /getting_started/python-client/insert_data.py: -------------------------------------------------------------------------------- 1 | import csv 2 | 3 | from schema import Address, Employee, Team 4 | from terminusdb_client import Client 5 | 6 | # we keep all the information in dictionaries with Employee id as keys 7 | employees = {} 8 | contact_numbers = {} 9 | addresses = {} 10 | managers = {} 11 | 12 | with open("Contact.csv") as file: 13 | csv_file = csv.reader(file) 14 | next(csv_file) # skiping header 15 | for row in csv_file: 16 | contact_numbers[row[0]] = row[1] 17 | street = row[2].split(",")[0] 18 | street_num = int(street.split(" ")[0]) 19 | street_name = " ".join(street.split(" ")[1:]) 20 | town = row[2].split(",")[1] 21 | addresses[row[0]] = Address( 22 | street_num=street_num, street=street_name, town=town, postcode=row[3] 23 | ) 24 | 25 | with open("Employees.csv") as file: 26 | csv_file = csv.reader(file) 27 | next(csv_file) # skiping header 28 | for row in csv_file: 29 | team = eval(f"Team.{row[3].lower()}") 30 | employees[row[0]] = Employee( 31 | _id="Employee/" + row[0], 32 | name=row[1], 33 | title=row[2], 34 | address=addresses[row[0]], 35 | contact_number=contact_numbers[row[0]], 36 | team = team 37 | ) 38 | managers[row[0]] = row[4] 39 | 40 | for emp_id, man_id in managers.items(): 41 | if man_id: 42 | employees[emp_id].manager = employees[man_id] 43 | 44 | # For Terminus X, use the following 45 | # client = Client("https://cloud.terminusdb.com//") 46 | # client.connect(db="demo_workshop", team="", use_token=True) 47 | 48 | client = Client("http://127.0.0.1:6363/") 49 | client.connect(db="getting_started") 50 | 51 | client.insert_document(list(employees.values()), commit_msg="Adding 4 Employees") 52 | -------------------------------------------------------------------------------- /getting_started/python-client/lesson_1.md: -------------------------------------------------------------------------------- 1 | # Lesson 1 - Install, start a project, and create an empty database with a schema 2 | 3 | ## Installing 4 | 5 | You can download the TerminusDB Docker image to work locally ([TerminusDB Bootstrap](https://github.com/terminusdb/terminusdb-bootstrap)) or you can connect to TerminusCMS. If you are using the Docker image, make sure that your TerminusDB container is running at localhost (https://127.0.0.1). If you are using TerminusCMS, you will need the endpoint, team, and API token available from the [TerminusCMS dashboard](https://dashboard.terminusdb.com/) under profile. 6 | 7 | It is recommended that you install the TerminusDB Python client (which works with 8 | [Python >= 3.7](https://www.python.org/downloads)) in a [separate 9 | Python environment](https://docs.python.org/3/tutorial/venv.html). In the example below we use `venv` which comes with the standard installation of 10 | Python 3. 11 | 12 | We create the new environment: 13 | 14 | ``` 15 | $ python3 -m venv ~/.virtualenvs/terminusdb 16 | $ source ~/.virtualenvs/terminusdb/bin/activate 17 | ``` 18 | 19 | Then we can install the TerminusDB Python client using pip: 20 | 21 | `$ python3 -m pip install terminusdb-client` 22 | 23 | ## Start Project 24 | 25 | > **_NOTE:_** from version 10.1.0 the CLI command is `tdbpy` instead of `terminusdb` 26 | 27 | Go to the project directory (or start a new one): 28 | 29 | `$ cd terminusdb-tutorials/getting_started/python_client` 30 | 31 | In the project directory, start a TerminusDB project: 32 | 33 | `$ tdbpy startproject` 34 | 35 | You will be asked a few questions. Pick a project name (or the database name if you already have a working database). If you are running the localhost server with default port you can just press Enter. If you're using TerminusCMS you need to provide the endpoint and other login information. 36 | 37 | This is what I did: 38 | 39 | ``` 40 | Please enter a project name (this is also the database name): getting_started 41 | Please enter an endpoint location (press enter to use the localhost default) [http://127.0.0.1:6363/]: 42 | config.json and schema.py created, please customize them to start your project. 43 | ``` 44 | 45 | ## Create an Empty Database with Schema 46 | 47 | Now with `schema.py` you can build a schema for the new database. If you open the `schema.py` you will see an example. You can commit the example as it is. 48 | 49 | In this tutorial series, we will use a company phonebook database as an example. It consists of only 2 tables, the first is the structure of the company ([Employees.csv](Employees.csv)): 50 | 51 | | Employee id | Name | Title | Team | Manager | 52 | | ----------- | -------------- | ------------------- | ----------- | ----------- | 53 | | 001 | Destiny Norris | Marketing Manager | Marketing | | 54 | | 002 | Darci Prosser | Creative Writer | Marketing | 001 | 55 | | 003 | Alanah Bloggs | Frontend Developer | IT | 004 | 56 | | 004 | Fabian Dalby | Web Service Manager | IT | | 57 | 58 | And the second is the contact details of the employees ([Contact.csv](Contact.csv)): 59 | 60 | | Employee id | Contact number | Home address | Postcode | 61 | | ----------- | --------------- | ----------------------------- | -------- | 62 | | 001 | (01986) 113367 | 1 Market Place, Bungay | NR35 1AP | 63 | | 002 | (01925) 682388 | 200 Manchester Road, Woolston | WA1 4HJ | 64 | | 003 | (01274) 708080 | 139 Otley Road, Shipley | BD18 2PT | 65 | | 004 | (0161) 532 7302 | 2 Ansdell Road, Stockport | SK5 6SY | 66 | 67 | The schema for looks like this: 68 | 69 | ```python 70 | """ 71 | Title: Phonebook for Awesome Startup 72 | Description: Database storing all the contact details of all employees in Awesome Startup 73 | Authors: Destiny Norris, Fabian Dalby 74 | """ 75 | from typing import Optional 76 | 77 | from terminusdb_client.woqlschema import DocumentTemplate, EnumTemplate 78 | 79 | 80 | class Address(DocumentTemplate): 81 | """Home address of Employee 82 | 83 | Attributes 84 | ---------- 85 | postcode : str 86 | Postal Code 87 | street : str 88 | Street name. 89 | street_num : int 90 | Street number. 91 | town : str 92 | Town name. 93 | """ 94 | 95 | _subdocument = [] 96 | postcode: str 97 | street: str 98 | street_num: int 99 | town: str 100 | 101 | 102 | class Employee(DocumentTemplate): 103 | """Employee of the Company""" 104 | 105 | address: "Address" 106 | contact_number: str 107 | manager: Optional["Employee"] 108 | name: str 109 | team: "Team" 110 | title: str 111 | 112 | 113 | class Team(EnumTemplate): 114 | marketing = () 115 | it = () 116 | ``` 117 | 118 | Now we have the right schema plan, let's commit it to be database: 119 | 120 | ``` 121 | $ tdbpy commit -m "update phonebook schema" 122 | getting_started created. 123 | getting_started schema updated. 124 | ``` 125 | 126 | To verify the schema has been committed we can look at the logs using the example below. If you are using TerminusCMS, you can see the changes in the dashboard: 127 | 128 | ``` 129 | $ tdbpy log 130 | ======== 131 | Connecting to 'getting_started' at 'http://127.0.0.1:6363/' 132 | on branch 'main' 133 | with team 'admin' 134 | ======== 135 | 136 | commit c3b0nqwl87z92suvpobqtpzr552vzqs 137 | Author: admin 138 | Date: 2021-10-01 11:38:49 139 | 140 | update phonebook schema 141 | 142 | 143 | ``` 144 | 145 | You can also look at the objects in the schema graph like this: 146 | 147 | ``` 148 | $ tdbpy alldocs --schema 149 | [{'@base': 'terminusdb:///data/', '@documentation': {'@authors': ['Destiny Norris', 'Fabian Dalby'], '@description': 'Database storing all the contact details of all employees in Awesome Startup', '@title': 'Phonebook for Awesome Startup'}, '@schema': 'terminusdb:///schema#', '@type': '@context'}, {'@documentation': {'@comment': 'Home address of Employee', '@properties': {'postcode': 'Postal Code', 'street': 'Street name.', 'street_num': 'Street number.', 'town': 'Town name.'}}, '@id': 'Address', '@key': {'@type': 'Random'}, '@subdocument': [], '@type': 'Class', 'postcode': 'xsd:string', 'street': 'xsd:string', 'street_num': 'xsd:integer', 'town': 'xsd:string'}, {'@documentation': {'@comment': 'Employee of the Company'}, '@id': 'Employee', '@type': 'Class', 'address': 'Address', 'contact_number': 'xsd:string', 'manager': {'@class': 'Employee', '@type': 'Optional'}, 'name': 'xsd:string', 'title': 'xsd:string'}, {'@id': 'Team', '@type': 'Enum', '@value': ['Marketing', 'Information Technology']}] 150 | ``` 151 | 152 | --- 153 | 154 | [Lesson 2 - Importing a CSV into the database](lesson_2.md) 155 | -------------------------------------------------------------------------------- /getting_started/python-client/lesson_2.md: -------------------------------------------------------------------------------- 1 | # Lesson 2 - Importing a CSV file into the database 2 | 3 | > **_NOTE:_** from version 10.1.0 the CLI command is `tdbpy` instead of `terminusdb` 4 | 5 | ## The `tdbpy importcsv` Command 6 | 7 | In this lesson, we will import a CSV file with the `tdbpy importcsv` command. It provides a very simple way to import a CSV. There are a few things that you can control with the `tdbpy importcsv` command, such as setting the separator character, how to handle NAs, and linking columns using a key columns. For more complicated data handling, a Python script is needed and we will demonstrate that in lesson 5: [Importing data form Python script](lesson_3.md) 8 | 9 | The example below enables you to see all the available options in `tdbpy importcsv`: 10 | 11 | ``` 12 | $ tdbpy importcsv --help 13 | Usage: tdbpy importcsv [OPTIONS] CSV_FILE [KEYS]... 14 | 15 | Import CSV file into pandas DataFrame then into TerminusDB, with 16 | read_csv() options. Options like chunksize, sep etc 17 | 18 | Options: 19 | --classname TEXT Customize the class name that data from the 20 | CSV will be imported as 21 | 22 | --chunksize INTEGER Large files will load into the database in 23 | chunks, size of the chunks [default: 1000] 24 | 25 | --schema Specify if the schema is to be updated if it exists, 26 | default False 27 | 28 | --na [skip|optional|error] Specify how to handle NAs: 'skip' will skip 29 | entries with NAs, 'optional' will make all 30 | properties optional in the database, 'error' 31 | will just throw an error if there are NAs 32 | 33 | --id TEXT Specify the column to be used as ids instead of 34 | generated ids 35 | 36 | -e, --embedded TEXT Specify embedded columns 37 | -m, --message TEXT Commit message for the import 38 | --sep TEXT Specify separator character in the CSV 39 | [default: ,] 40 | 41 | --help Show this message and exit. 42 | ``` 43 | 44 | ## Importing CSV 45 | 46 | We will continue working with the phonebook example. We will import the [Employees.csv](Employees.csv) file. Which looks like this: 47 | 48 | | Employee id | Name | Title | Team | Manager | 49 | | ----------- | -------------- | ------------------- | ----------- | ----------- | 50 | | 001 | Destiny Norris | Marketing Manager | Marketing | | 51 | | 002 | Darci Prosser | Creative Writer | Marketing | 001 | 52 | | 003 | Alanah Bloggs | Frontend Developer | IT | 004 | 53 | | 004 | Fabian Dalby | Web Service Manager | IT | | 54 | 55 | As you see there is `Employee id` used as a key to link to the `Manager` field showing who the employee's manager is. 56 | 57 | To link them, we must first install the `pandas` library. 58 | ```sh 59 | $ pip install pandas 60 | ``` 61 | We can then import the CSV file with the following command: 62 | 63 | ``` 64 | $ tdbpy importcsv Employees.csv --classname EmployeesFromCSV --id "Employee id" -e Manager -m "Import Employees from CSV" 65 | 0it [00:00, ?it/s] 66 | Schema object EmployeesFromCSV created with Employees.csv being imported into database. 67 | 1it [00:00, 1.27it/s] 68 | Records in Employees.csv inserted as type EmployeesFromCSV into database with specified ids. 69 | ``` 70 | 71 | We have imported the CSV file with the class as `EmployeesFromCSV`. There is a new class object in `schema.py` that was created along with the import: 72 | 73 | ```python 74 | class EmployeesFromCSV(DocumentTemplate): 75 | employee_id: str 76 | manager: Optional["EmployeesFromCSV"] 77 | name: Optional[str] 78 | team: Optional[str] 79 | title: Optional[str] 80 | ``` 81 | 82 | Now we can verify our data: 83 | 84 | ``` 85 | $ tdbpy alldocs 86 | [{'@id': 'EmployeesFromCSV/001', '@type': 'EmployeesFromCSV', 'employee_id': '001', 'name': 'Destiny Norris', 'team': 'Marketing', 'title': 'Marketing Manager'}, {'@id': 'EmployeesFromCSV/002', '@type': 'EmployeesFromCSV', 'employee_id': '002', 'manager': 'EmployeesFromCSV/001', 'name': 'Darci Prosser', 'team': 'Marketing', 'title': 'Creative Writer'}, {'@id': 'EmployeesFromCSV/003', '@type': 'EmployeesFromCSV', 'employee_id': '003', 'manager': 'EmployeesFromCSV/004', 'name': 'Alanah Bloggs', 'team': 'IT', 'title': 'Frontend Developer'}, {'@id': 'EmployeesFromCSV/004', '@type': 'EmployeesFromCSV', 'employee_id': '004', 'name': 'Fabian Dalby', 'team': 'IT', 'title': 'Web Service Manager'}] 87 | ``` 88 | 89 | In [chapter 5](lesson_5.md) we will learn how to query this data and/ or export data into CSV. 90 | 91 | --- 92 | 93 | [Move on to Lesson 3 - Importing data form Python script](lesson_3.md) 94 | -------------------------------------------------------------------------------- /getting_started/python-client/lesson_3.md: -------------------------------------------------------------------------------- 1 | # Lesson 3 - Importing Data From a Python Script 2 | 3 | > **_NOTE:_** from version 10.1.0 the CLI command is `tdbpy` instead of `terminusdb` 4 | 5 | In the last lesson we imported [Employees.csv](Employees.csv) using the `tdbpy importcsv` commmand. It autogenerated the schema and piped in the data from the CSV. If we check the [schema.py](schema.py) we can see the schema that was generated from the CSV: 6 | 7 | ```python 8 | class EmployeesFromCSV(DocumentTemplate): 9 | employee_id: str 10 | manager: Optional["EmployeesFromCSV"] 11 | name: Optional[str] 12 | team: Optional[str] 13 | title: Optional[str] 14 | ``` 15 | 16 | You may noticed that the schema is not the same as the one we have talked about in [Lesson 1](lesson_1.md): 17 | 18 | ```python 19 | class Employee(DocumentTemplate): 20 | """Employee of the Company""" 21 | 22 | address: "Address" 23 | contact_number: str 24 | manager: Optional["Employee"] 25 | name: str 26 | team: "Team" 27 | title: str 28 | ``` 29 | 30 | This is because we had data in the [Contact.csv](Contact.csv). Fetching data from different CSVs and matching them to our schema requires a little more customization. This can be done by creating a Python script using the [TerminusDB Python Client](https://github.com/terminusdb/terminusdb-client-python). 31 | 32 | ## Creating the Python script 33 | 34 | Let's start a new `.py` file [insert_data.py](insert_data.py). You can copy and paste the one in this repo or build one yourself. We'll explain the example script so you understand what it does. 35 | 36 | In the first half of the script, we have to manage and import the data form CSV. In Python there is the [`csv` standard library](https://docs.python.org/3/library/csv.html) that helps reading of CSV files. Go ahead and import that: 37 | 38 | ```python 39 | import csv 40 | ``` 41 | 42 | We also need to import `WOQLClient` which is the client that communitcates with the TerminusDB/ TerminusCMS and `schema.py`: 43 | 44 | ```python 45 | from terminusdb_client import WOQLClient 46 | from schema import * 47 | ``` 48 | 49 | At the top of the script, we prepare a few empty dictionaries to hold the data, we use dictionaries because the keys can be the `Employees id` for easy mapping: 50 | 51 | ```python 52 | employees = {} 53 | contact_numbers = {} 54 | addresses = {} 55 | managers = {} 56 | ``` 57 | 58 | The goal is to populate the `employees` dictionaries with the `Employee` objects. To help, we also need `contact_numbers` to hold the contact numbers while reading the `Contact.csv`. The rest of the information in `Contact.csv` will be used to construct `Address` objects and stored in `addresses`. `managers` is used to store the employee id in the `Manager` column in `Employees.csv`. We store the id at first and make the linking later because the manager of that employee may have not been "created" yet. 59 | 60 | Then we go head and read the CSVs and do the corresponding data managing: 61 | 62 | ```python 63 | with open("Contact.csv") as file: 64 | csv_file = csv.reader(file) 65 | next(csv_file) # skiping header 66 | for row in csv_file: 67 | contact_numbers[row[0]] = row[1] 68 | street = row[2].split(",")[0] 69 | street_num = int(street.split(" ")[0]) 70 | street_name = " ".join(street.split(" ")[1:]) 71 | town = row[2].split(",")[1] 72 | addresses[row[0]] = Address( 73 | street_num=street_num, street=street_name, town=town, postcode=row[3] 74 | ) 75 | 76 | with open("Employees.csv") as file: 77 | csv_file = csv.reader(file) 78 | next(csv_file) # skiping header 79 | for row in csv_file: 80 | team = eval(f"Team.{row[3].lower()}") 81 | employees[row[0]] = Employee( 82 | _id="Employee/" + row[0], 83 | name=row[1], 84 | title=row[2], 85 | address=addresses[row[0]], 86 | contact_number=contact_numbers[row[0]], 87 | team=team 88 | ) 89 | managers[row[0]] = row[4] 90 | ``` 91 | 92 | Finally, we have to make the manager links: 93 | 94 | ```python 95 | for emp_id, man_id in managers.items(): 96 | if man_id: 97 | employees[emp_id].manager = employees[man_id] 98 | ``` 99 | 100 | Now, the `employees` dictionary should be populated with the `Employee` objects, ready to be inserted into the database. 101 | 102 | ## Using the Python client 103 | 104 | The next step is the insert all `Employees` into the database. But before that, we need to create a client with our endpoint: 105 | 106 | ```python 107 | client = Client("http://127.0.0.1:6363/") 108 | ``` 109 | 110 | Then we will connect the client to our database. If you are connecting locally and use the default setting, just provide the database you are connecting to: 111 | 112 | ```python 113 | client.connect(db="getting_started") 114 | ``` 115 | 116 | If you are using TerminusCMS, you can find the information of your endpoint, team, and API token from the [TerminusCMS dashboard](https://dashboard.terminusdb.com/) under profile. 117 | 118 | Now we are all ready, the last thing to do is to insert the documents: 119 | 120 | ```python 121 | client.insert_document(list(employees.values()), commit_msg="Adding 4 Employees") 122 | ``` 123 | 124 | ## Running the script 125 | 126 | Go back to the terminal and run the script. Make sure you are in a Python environment that has `terminusdb-client` installed. 127 | 128 | ``` 129 | $ python insert_data.py 130 | ``` 131 | 132 | To check the data has been inserted correctly, use the `tdbpy alldocs` command: 133 | 134 | ``` 135 | $ tdbpy alldocs --type Employee 136 | ``` 137 | 138 | If you used TerminusCMS check it's there in the [TerminusCMS dashboard](https://dashboard.terminusdb.com/) 139 | 140 | --- 141 | 142 | [Lesson 4 - Update and import new data that links to old data](lesson_4.md) 143 | -------------------------------------------------------------------------------- /getting_started/python-client/lesson_4.md: -------------------------------------------------------------------------------- 1 | # Lesson 4 - Update and import new data that links to old data 2 | 3 | > **_NOTE:_** from version 10.1.0 the CLI command is `tdbpy` instead of `terminusdb` 4 | 5 | Remember our imaginary Awesome Startup that has their Phonebook stored in TerminusDB? It has been a few months and they have a new recruit: 6 | 7 | | Employee id | Name | Title | Team | Manager | 8 | | ----------- | -------------- | ------------------- | ----------- | ----------- | 9 | | 005 | Ethan Abbott | Backend Developer | IT | 004 | 10 | 11 | | Employee id | Contact number | Home address | Postcode | 12 | | ----------- | --------------- | ----------------------------- | -------- | 13 | | 005 | 070 7796 8035 | 84 Shore Street, Stoer | IV27 2TG | 14 | 15 | Also, our Marketing Manager Destiny has moved to a new address: 16 | 17 | | Employee id | Contact number | Home address | Postcode | 18 | | ----------- | --------------- | ----------------------------- | -------- | 19 | | 001 | (01986) 113367 | 73 Lairg Road, Newbigging | PH12 3RP | 20 | 21 | How are we going to update the records? 22 | 23 | ## Getting data objects back from TerminusDB/ TerminusCMS 24 | 25 | Let's look at how to update Destiny's Address. We will make our changes in the file [update_data.py](update_data.py). The first step after connecting with the client is to get back the schema of the database. In the terminal we can use: 26 | 27 | `$ tdbpy sync` 28 | 29 | This will update the `schema.py` with the latest schema from TerimnusDB for easy inspection. However, since we want to work with a script we will create a `WOQLSchema` object and sync that object with the database's schema: 30 | 31 | ```python 32 | data_schema = WOQLSchema() 33 | data_schema.from_db(client) 34 | ``` 35 | 36 | Now we can import the Employee document that represents Destiny. Since we know the id, we will just use get_document to do so: 37 | 38 | ```python 39 | destiny_raw = client.get_document("Employee/001") 40 | ``` 41 | 42 | `destiny_raw` would be a dictionary, we can update it directly, however, in many cases, converting back to an `Employee` object would make updating it a bit easier: 43 | 44 | ```python 45 | destiny = data_schema.import_objects(destiny_raw) 46 | ``` 47 | 48 | Notice `import_objects` will also take a list of dictionaries and return back a list of objects. 49 | 50 | ## Update a document 51 | 52 | Now `destiny` is an `Employee` object we can go ahead and update the details: 53 | 54 | ```python 55 | destiny.address.postcode = "PH12 3RP" 56 | destiny.address.street = "Lairg Road" 57 | destiny.address.street_num = 73 58 | destiny.address.town = "Newbigging" 59 | ``` 60 | 61 | Let's send `destiny` back to the database with `update_document`, the difference between `insert_document` and `update_document` is that if an object already exists `update_docuemnt` with replace the old with the new. It will also insert the document if it does not exist: 62 | 63 | ```python 64 | client.update_document(destiny, commit_msg="Update Destiny") 65 | ``` 66 | 67 | ## Linking a new document to an old document 68 | 69 | Now let's work on our new recruit. First we need to get the schema objects. Instead of importing it from `schema.py` like we did in [lesson 3](lesson_3.md), as the objects are already in the `data_schema`, we will get them out from there: 70 | 71 | ```python 72 | Employee = data_schema.object.get('Employee') 73 | Address = data_schema.object.get('Address') 74 | Team = data_schema.object.get('Team') 75 | ``` 76 | 77 | Next, we know that all the properties of `Employee` are datatypes (e.g. str, int) excpet `address` and `manager`. For `address`, we will create a new object: 78 | 79 | ```python 80 | ethan_address = Address( 81 | postcode="IV27 2TG", street="Shore Street", street_num=84, town="Stoer" 82 | ) 83 | ``` 84 | 85 | And for the manager, we will get that person from the database just like we did with Destiny: 86 | 87 | ```python 88 | manager_raw = client.get_document("Employee/004") 89 | ethan_manager = data_schema.import_objects(manager_raw) 90 | ``` 91 | 92 | We can now create `ethan`: 93 | 94 | ```python 95 | ethan = Employee( 96 | _id="Employee/005", 97 | name="Ethan Abbott", 98 | title="Backend Developer", 99 | team=Team.it, 100 | contact_number="070 7796 8035", 101 | address=ethan_address, 102 | manager=ethan_manager, 103 | ) 104 | ``` 105 | 106 | We're ready, let's put `ethan` into the database. To prove `update_document` works, we will use it to insert `ethan`: 107 | 108 | ```python 109 | client.update_document(ethan, commit_msg="Adding Ethan") 110 | ``` 111 | 112 | Or if you like, you can update `destiny` and insert `ethan` all at once like this: 113 | 114 | ```python 115 | client.update_document([destiny, ethan], commit_msg="Update Destiny and adding Ethan") 116 | ``` 117 | 118 | Run the scripts: 119 | 120 | `$ python update_data.py` 121 | 122 | Check the database is up-to-date in the terminal like we did before: 123 | 124 | `$ tdbpy alldocs` 125 | 126 | Or if you are using TerminusCMS, check it in the dashboard. 127 | 128 | --- 129 | 130 | [Lesson 5 - Query the database and get results back as a CSV or DataFrame](lesson_5.md) 131 | -------------------------------------------------------------------------------- /getting_started/python-client/lesson_5.md: -------------------------------------------------------------------------------- 1 | # Lesson 5 - Query the database and get results back as a CSV or DataFrame 2 | 3 | > **_NOTE:_** from version 10.1.0 the CLI command is `tdbpy` instead of `terminusdb` 4 | 5 | In previous lessons we learnt how to build schema and import data. Now the database has all the data we wanted. Now we want to get information out of the database. 6 | 7 | In this lesson we will learn how to query the database, get the information we need, and export either to a CSV or as a Pandas DataFrames. 8 | 9 | ## Query Data with `tdbpy` Command 10 | 11 | The most direct way to query data and export it as CSV is to use the `tdbpy` command. 12 | 13 | If you are planning to export all documents of a particular type. You can simply use the `tdbpy exportcsv` command. Let's have a look at the command: 14 | 15 | ``` 16 | $ tdbpy exportcsv --help 17 | Usage: tdbpy exportcsv [OPTIONS] CLASS_OBJ 18 | 19 | Export all documents in a TerminusDB class into a flatten CSV file. 20 | 21 | Options: 22 | --keepid If used, the id of the object and the other meta (@) is to 23 | be kept as is in the CSV 24 | 25 | --maxdep INTEGER Specify the depth of the embedding operation. When maximum 26 | is hit, the values will be kept as object ids [default: 27 | 2] 28 | 29 | --filename TEXT File name if the exported file, if not specify it will use 30 | the name of the class e.g. 'ClassName.csv' 31 | 32 | --help Show this message and exit. 33 | ``` 34 | 35 | Now let's try to export all `Employee` to a file named `exported_employees.csv` 36 | 37 | `$ tdbpy exportcsv --filename exported_employees.csv Employee` 38 | 39 | We'll quickly inspect [exported_employees.csv](exported_employees.csv) and can see it looks good. Information for all 5 employees is there. 40 | 41 | Say we want to export only members of the IT team in a CSV, we have to do a bit of query. Let's try using the `-q` option with `tdbpy alldocs` 42 | 43 | ``` 44 | $ tdbpy alldocs --type Employee -q team=it 45 | [{'@id': 'Employee/003', '@type': 'Employee', 'address': {'@id': 'Address/543050aaa73c4590b38f9aed129b17ff', '@type': 'Address', 'postcode': 'BD18 2PT', 'street': 'Otley Road', 'street_num': 139, 'town': ' Shipley'}, 'contact_number': '(01274) 708080', 'manager': 'Employee/004', 'name': 'Alanah Bloggs', 'team': 'it', 'title': 'Frontend Developer'}, {'@id': 'Employee/004', '@type': 'Employee', 'address': {'@id': 'Address/6665e689224d412aa3a882fcfd287676', '@type': 'Address', 'postcode': 'SK5 6SY', 'street': 'Ansdell Road', 'street_num': 2, 'town': ' Stockport'}, 'contact_number': '(0161) 532 7302', 'name': 'Fabian Dalby', 'team': 'it', 'title': 'Web Service Manager'}, {'@id': 'Employee/005', '@type': 'Employee', 'address': {'@id': 'Address/358ac353adbf494f97100330b504e818', '@type': 'Address', 'postcode': 'IV27 2TG', 'street': 'Shore Street', 'street_num': 84, 'town': 'Stoer'}, 'contact_number': '070 7796 8035', 'manager': 'Employee/004', 'name': 'Ethan Abbott', 'team': 'it', 'title': 'Backend Developer'}] 46 | ``` 47 | 48 | It's a bit hard to see so we are going to export it to [a CSV](exported_it_team.csv): 49 | 50 | `$ tdbpy alldocs --type Employee -q team=it -e --filename exported_it_team.csv` 51 | 52 | ## Query data in Python script 53 | 54 | If we want to do something more complicated, for example see which team has longer names in average. We can export the result to a Pandas Dataframe and do more investigation. Let's have a look at [query_data.py](query_data.py). 55 | 56 | We can make use of the magic function `result_to_df` to convert the JSON results to a Pandas DataFrame: 57 | 58 | ```python 59 | from terminusdb_client.woqldataframe import result_to_df 60 | ``` 61 | 62 | Querying can be done by `query_document`, you will have to provide a template JSON that has `@type` and the specific requirement(s) (in our case, `"team": "it"` or `"team": "marketing"`). 63 | 64 | ```python 65 | team_it_raw = client.query_document({"@type": "Employee", "team": "it"}) 66 | team_marketing_raw = client.query_document({"@type": "Employee", "team": "marketing"}) 67 | ``` 68 | 69 | We can use `reault_to_df` to get the DataFrames: 70 | 71 | ```python 72 | team_it = result_to_df(team_it_raw) 73 | team_marketing = result_to_df(team_marketing_raw) 74 | ``` 75 | 76 | Then, we can do all the data manipulation we love using Pandas: 77 | 78 | ```python 79 | team_it_avg = team_it["name"].apply(len).sum() / len(team_it) 80 | team_marketing_avg = team_it["name"].apply(len).sum() / len(team_marketing) 81 | ``` 82 | 83 | Print out the results. 84 | 85 | ```python 86 | print(f"Average name length of IT team is {team_it_avg}") 87 | print(f"Average name length of Marketing team is {team_marketing_avg}") 88 | ``` 89 | 90 | I won't spoil the results for you, you have to find it out yourself :-) 91 | 92 | `$ python query_data.py` 93 | 94 | --- 95 | 96 | [Lesson 6 - Version control: time travel, branching and rebase](lesson_6.md) 97 | -------------------------------------------------------------------------------- /getting_started/python-client/lesson_6.md: -------------------------------------------------------------------------------- 1 | # Lesson 6 - Version control: Time travel, branching, and rebase 2 | 3 | > **_NOTE:_** from version 10.1.0 the CLI command is `tdbpy` instead of `terminusdb` 4 | 5 | In this lesson about version control, we will be doing some Git like operation that enables collaborate and time travel. 6 | 7 | ## Branch, create a copy and jump between versions 8 | 9 | We'll be using the Awesome Startup example again. Now the company is super busy and decided to hire a few contractors to help out for a few months. They would like to get the contractor's details into the database, so they want to make a branch of the database and get the contractors to "fill in the details" in this branch. 10 | 11 | Making a branch is like making a copy of the database. The good thing about this operation is that, if a contractor does something wrong and accidentally modifies data that they shouldn't, the changes will only apply in the branched version. It allows managers to review changes before adopting the change in the main database. 12 | 13 | To make a branch, we use the `tdbpy branch` or `tdbpy checkout -b` command. The difference is that `tdbpy branch` will create a new branch WITHOUT going (checkout) to that branch, while `tdbpy checkout` is used to go to another branch. With the option `-b` you will create that new branch before going there. 14 | 15 | Before creating the branch, let us first see what we have: 16 | 17 | ``` 18 | $ tdbpy branch 19 | main 20 | ``` 21 | 22 | We only have the `main` branch. The `main` branch is the default branch that is created when you create a database. Let's create the new branch: 23 | 24 | ``` 25 | $ tdbpy branch contractors 26 | Branch 'contractors' created. Remain on 'main' branch. 27 | ``` 28 | 29 | We have created the `contractors` branch. Let's verify: 30 | 31 | ``` 32 | $ tdbpy branch 33 | main 34 | contractors 35 | ``` 36 | 37 | Now the contractors can add their details with a script [add_contractors.py](add_contractors.py). We add the two contractors in similar way as we added Ethan in [lesson 4](lesson_4.md) but notice one thing: 38 | 39 | ```python 40 | client.connect(db="getting_started", branch="contractors") 41 | ``` 42 | 43 | When we connect to the database, we have to specify the branch as `contractors`. 44 | 45 | Now run the script: 46 | 47 | `$ python add_contractors.py` 48 | 49 | To verify we did things right, let's see if there are any changes in the current `main` branch, you can see all the logs with: 50 | 51 | ``` 52 | $ tdbpy log 53 | ======== 54 | Connecting to 'getting_started' at 'http://127.0.0.1:6363/' 55 | on branch 'main' 56 | with team 'admin' 57 | ======== 58 | 59 | commit 8o4vkomwryjogg37u3abojflpzrt0r4 60 | Author: admin 61 | Date: 2021-10-15 11:48:32 62 | 63 | Adding Ethan (inserts) 64 | 65 | commit 680q7y1wxouy9ltni344s821jvm7zn2 66 | Author: admin 67 | Date: 2021-10-15 11:48:32 68 | 69 | Update Destiny (repleces) 70 | 71 | commit 8ebr9nm9pwgd05mlxv28g85kulrq00r 72 | Author: admin 73 | Date: 2021-10-15 11:48:31 74 | 75 | Adding 4 Employees 76 | 77 | commit thb4axo7pi08946jhuxo68rry42vdzd 78 | Author: admin 79 | Date: 2021-10-15 11:48:31 80 | 81 | Import Employees from CSV (inserts) 82 | 83 | commit l5a3jolxydc39khktgljppqi3a7ed3b 84 | Author: admin 85 | Date: 2021-10-15 11:48:29 86 | 87 | Schema updated by Python client. 88 | 89 | ``` 90 | 91 | So the last time we made changes was when we added Ethan. Contractors have not been added. 92 | 93 | Now let's go to the `contractors` branch: 94 | 95 | ``` 96 | $ tdbpy checkout contractors 97 | Checked out 'contractors' branch. 98 | ``` 99 | 100 | And check the log again: 101 | 102 | ``` 103 | $ tdbpy log 104 | ======== 105 | Connecting to 'getting_started' at 'http://127.0.0.1:6363/' 106 | on branch 'contractors' 107 | with team 'admin' 108 | ======== 109 | 110 | commit kb4nilq4qt9b2va4l0mekv19ov8wey1 111 | Author: admin 112 | Date: 2021-10-15 11:54:21 113 | 114 | Adding contractors (inserts) 115 | 116 | ... 117 | 118 | ``` 119 | 120 | We have a new entry for the log. 121 | 122 | ## Rebase, what is it about? 123 | 124 | After the `contractors` branch is created and "filled in". Our managers approve the change. Now, we would like to incorporate the changes back to the main branch. For those who are familiar with Git workflow, you will know that we need to perform a merge from the `contractors` branch to the `main` branch. But we are going to do something a bit difference here, using rebase instead of merge. 125 | 126 | Rebase means that we take the changes we made since the branching and will continue from another branch. For example, if we rebase `main` from `contractors` we will continue from what `contractors` is now, i.e. after adding the contractors. This means that we have incorporated the change in `contractors` into `main`. For more information about rebase, see the [documentation with git](https://git-scm.com/docs/git-rebase). 127 | 128 | To do it, let's go back to `main` and rebase from `contractors`: 129 | 130 | ``` 131 | $ tdbpy checkout main 132 | Checked out 'main' branch. 133 | $ tdbpy rebase contractors 134 | Rebased contractors branch. 135 | ``` 136 | 137 | Now when we do `tdbpy log` we see that we have the `Adding contractors` commit in it. 138 | 139 | ``` 140 | $ tdbpy log 141 | ======== 142 | Connecting to 'getting_started' at 'http://127.0.0.1:6363/' 143 | on branch 'main' 144 | with team 'admin' 145 | ======== 146 | 147 | commit kb4nilq4qt9b2va4l0mekv19ov8wey1 148 | Author: admin 149 | Date: 2021-10-15 11:54:21 150 | 151 | Adding contractors (inserts) 152 | 153 | ... 154 | 155 | ``` 156 | 157 | ## Reset, time traveling machine 158 | 159 | Time flies, now the project is done and the contractors have done their jobs and left the company. We have to time travel to the state of the company before the project. 160 | 161 | Let's verify our log again: 162 | 163 | ``` 164 | $ tdbpy log 165 | ======== 166 | Connecting to 'getting_started' at 'http://127.0.0.1:6363/' 167 | on branch 'main' 168 | with team 'admin' 169 | ======== 170 | 171 | commit kb4nilq4qt9b2va4l0mekv19ov8wey1 172 | Author: admin 173 | Date: 2021-10-15 11:54:21 174 | 175 | Adding contractors (inserts) 176 | 177 | commit 8o4vkomwryjogg37u3abojflpzrt0r4 178 | Author: admin 179 | Date: 2021-10-15 11:48:32 180 | 181 | Adding Ethan (inserts) 182 | 183 | ... 184 | 185 | ``` 186 | 187 | We would like to keep the commits up to the `Adding Ethan` one, take note of the commit id for that commit. Mine is `8o4vkomwryjogg37u3abojflpzrt0r4`, yours will be different. 188 | 189 | To reset, we use the `tdbpy reset` command: 190 | 191 | ``` 192 | $ tdbpy reset 8o4vkomwryjogg37u3abojflpzrt0r4 193 | Hard reset to commit 8o4vkomwryjogg37u3abojflpzrt0r4 194 | ``` 195 | 196 | Notice that it is a hard reset, meaning that the changes after the commit `Adding Ethan` is gone forever! If you are not sure or just want to temporary reset to a previous time, make sure to use `--soft` option. Now let's have a look at the log again: 197 | 198 | ``` 199 | $ tdbpy log 200 | ======== 201 | Connecting to 'getting_started' at 'http://127.0.0.1:6363/' 202 | on branch 'main' 203 | with team 'admin' 204 | ======== 205 | 206 | commit 8o4vkomwryjogg37u3abojflpzrt0r4 207 | Author: admin 208 | Date: 2021-10-15 11:48:32 209 | 210 | Adding Ethan (inserts) 211 | 212 | ... 213 | 214 | ``` 215 | 216 | We are back to where we were once more. 217 | 218 | --- 219 | 220 | [Lesson 7 - Logical query using triple and WOQL](lesson_7.md) 221 | -------------------------------------------------------------------------------- /getting_started/python-client/lesson_7.md: -------------------------------------------------------------------------------- 1 | # Lesson 7 - Logical query using triple and WOQL 2 | 3 | In this lesson we are exploring more advanced territories. We will do logical queries with WOQL query. In many cases we only need to make document queries with the document interface that we covered in lessons 1 to 5. But on rare occasions, logical queries maybe the more straight forward and easy way to find information from the data we stored in our database 4 | 5 | ## Triples - Subject, Predicate and Object 6 | 7 | In TerminusDB things are stored as triples, each consists of 'Subject', 'Predicate' and 'Object'. We can inspect all of the triples in a graph (in the following example, the instance graph) by using `star()` in `WOQLQuery`: 8 | 9 | ``` 10 | import pprint as pp 11 | 12 | from terminusdb_client import WOQLClient 13 | from terminusdb_client import WOQLQuery as wq 14 | 15 | client = WOQLClient("http://127.0.0.1:6363/") 16 | client.connect(db="getting_started") 17 | 18 | pp.pprint(wq().star().execute(client)) 19 | ``` 20 | 21 | By inspecting the output of the above code, we see all the triples within our instance graph. 22 | 23 | With a bit of understanding about how triples are related to each other, we can link triples, leaving some "variables" that we want to find the answer to. We can make WOQL queries that harness the power of logical programming in Prolog. We will explain it all with examples. 24 | 25 | ## WOQLQuery - Making logical queries with triples 26 | 27 | Let's imagine you are working with our example company, Awesome Startup. You would like to find the contact number of Darci, the creative writer, to discuss the next article publication. Here is how it is done: 28 | 29 | ``` 30 | from terminusdb_client import WOQLClient 31 | from terminusdb_client import WOQLQuery as wq 32 | 33 | client = WOQLClient("http://127.0.0.1:6363/") 34 | client.connect(db="getting_started") 35 | 36 | darci = wq().string("Darci Prosser") 37 | 38 | query = wq().triple("v:person", "@schema:name", darci) + wq().triple("v:person", "@schema:contact_number", "v:phone_num") 39 | 40 | result = query.execute(client) 41 | 42 | if result["bindings"]: 43 | print("Darci's contact number:") 44 | print(result["bindings"][0]["phone_num"]["@value"]) 45 | else: 46 | print("Cannot find result.") 47 | 48 | ``` 49 | 50 | A few things to note here, first we have to create a WOQLQuery object with the string `"Darci Prosser"` as it needed to be stated explicitly as a string in the database, not a string that we used to construct the query like `"v:person"` or `"@schema:name"`. 51 | 52 | Secondly, the prefix before the `:` is telling TerminusDB how to treat the strings in the query. For example, a `v:` denotes a variable so `v:person` is a variable that we don't know and would like to find what it is in the query. On the other hand, `@schema:` denotes that it is a property that is defined in the schema, so `@schema:name` says that `name` is the property that we stated for the `Employee` documents in the schema. 53 | 54 | Finally, we can link the triples we created with [`wq().triple`](https://terminusdb.github.io/terminusdb-client-python/woqlQuery.html#terminusdb_client.WOQLQuery.triple) with either [`wq().woql_and`](https://terminusdb.github.io/terminusdb-client-python/woqlQuery.html#terminusdb_client.WOQLQuery.woql_and) or a simple `+` like we did above. 55 | 56 | So the query above can be interpreted as: 57 | 58 | `There is a person who's name is "Darci Prosser" and I would like to know that person's contact number.` 59 | 60 | As you see, making WOQL queries is quite logical, you just need to think about what question you are asking and how to link all the questions and parts of the triple. With some practice you will get used to it. Let's try another example. 61 | 62 | Let's say you have called Darci and unfortunately she is on holiday and you cannot wait for her to get back. You decided to contact her manager instead. However, you do not know who her manager is or their contact number either. But fear not! With WOQL query it is a logical query similar to the example above. 63 | 64 | ``` 65 | query = wq().triple("v:person", "@schema:name", darci) + wq().triple("v:person", "@schema:manager", "v:manager") + wq().triple("v:manager", "@schema:contact_number", "v:phone_num") + wq().triple("v:manager", "@schema:name", "v:manager_name") 66 | 67 | result = query.execute(client) 68 | 69 | if result["bindings"]: 70 | print("Manager's name:") 71 | print(result["bindings"][0]["manager_name"]["@value"]) 72 | print("Manager's contact number:") 73 | print(result["bindings"][0]["phone_num"]["@value"]) 74 | else: 75 | print("Cannot find result.") 76 | ``` 77 | 78 | This time, instead of asking for Darci's contact number, you ask `who is the manager` and set the manager as a variable `v:manager`. Then with `v:manager` you can find out the `name` and `contact_number` of the manager. 79 | 80 | As you can see, when the questions get more complicated, more triples are added to link the extra information. The structure of the query reamins the same and is much easier and more efficient than joining tables many times in SQL queries to get the same answer. 81 | 82 | Feel free to practice and play with the WOQL query. The code we showed in this lesson can be found in the file [woql_query.py](woql_query.py) 83 | 84 | --- 85 | 86 | [Check out other tutorials](README.md) 87 | -------------------------------------------------------------------------------- /getting_started/python-client/query_data.py: -------------------------------------------------------------------------------- 1 | from terminusdb_client import Client 2 | from terminusdb_client.woqlschema import WOQLSchema 3 | from terminusdb_client.woqldataframe import result_to_df 4 | 5 | # For Terminus X, use the following 6 | # client = Client("https://cloud.terminusdb.com//") 7 | # client.connect(db="demo_workshop", team="", use_token=True) 8 | 9 | client = Client("http://127.0.0.1:6363/") 10 | client.connect(db="getting_started") 11 | 12 | team_it_raw = client.query_document({"@type": "Employee", "team": "it"}) 13 | team_marketing_raw = client.query_document({"@type": "Employee", "team": "marketing"}) 14 | 15 | team_it = result_to_df(team_it_raw) 16 | team_marketing = result_to_df(team_marketing_raw) 17 | 18 | team_it_avg = team_it["name"].apply(len).sum() / len(team_it) 19 | team_marketing_avg = team_marketing["name"].apply(len).sum() / len(team_marketing) 20 | 21 | print(f"Average name length of IT team is {team_it_avg}") 22 | print(f"Average name length of Marketing team is {team_marketing_avg}") 23 | -------------------------------------------------------------------------------- /getting_started/python-client/schema.py: -------------------------------------------------------------------------------- 1 | #### 2 | # This is the script for storing the schema of your TerminusDB 3 | # database for your project. 4 | # Use 'terminusdb commit' to commit changes to the database and 5 | # use 'terminusdb sync' to change this file according to 6 | # the exsisting database schema 7 | #### 8 | """ 9 | Title: Phonebook for Awesome Startup 10 | Description: Database storing all the contact details of all employees in Awesome Startup 11 | Authors: Destiny Norris, Fabian Dalby 12 | """ 13 | from typing import Optional 14 | 15 | from terminusdb_client.woqlschema import DocumentTemplate, EnumTemplate 16 | 17 | 18 | class Address(DocumentTemplate): 19 | """Home address of Employee 20 | 21 | Attributes 22 | ---------- 23 | postcode : str 24 | Postal Code 25 | street : str 26 | Street name. 27 | street_num : int 28 | Street number. 29 | town : str 30 | Town name. 31 | """ 32 | 33 | _subdocument = [] 34 | postcode: str 35 | street: str 36 | street_num: int 37 | town: str 38 | 39 | 40 | class Employee(DocumentTemplate): 41 | """Employee of the Company""" 42 | 43 | address: "Address" 44 | contact_number: str 45 | manager: Optional["Employee"] 46 | name: str 47 | team: "Team" 48 | title: str 49 | 50 | 51 | class Team(EnumTemplate): 52 | marketing = () 53 | it = () 54 | -------------------------------------------------------------------------------- /getting_started/python-client/update_data.py: -------------------------------------------------------------------------------- 1 | from terminusdb_client import Client 2 | from terminusdb_client.woqlschema import WOQLSchema 3 | 4 | # For Terminus X, use the following 5 | # client = Client("https://cloud.terminusdb.com//") 6 | # client.connect(db="demo_workshop", team="", use_token=True) 7 | 8 | client = Client("http://127.0.0.1:6363/") 9 | client.connect(db="getting_started") 10 | 11 | data_schema = WOQLSchema() 12 | data_schema.from_db(client) 13 | 14 | # Update a document 15 | 16 | destiny_raw = client.get_document("Employee/001") 17 | destiny = data_schema.import_objects(destiny_raw) 18 | 19 | destiny.address.postcode = "PH12 3RP" 20 | destiny.address.street = "Lairg Road" 21 | destiny.address.street_num = 73 22 | destiny.address.town = "Newbigging" 23 | 24 | client.update_document(destiny, commit_msg="Update Destiny") 25 | 26 | # Linking a new document to an old document 27 | 28 | Employee = data_schema.object.get("Employee") 29 | Address = data_schema.object.get("Address") 30 | Team = data_schema.object.get("Team") 31 | 32 | ethan_address = Address( 33 | postcode="IV27 2TG", street="Shore Street", street_num=84, town="Stoer" 34 | ) 35 | 36 | manager_raw = client.get_document("Employee/004") 37 | ethan_manager = data_schema.import_objects(manager_raw) 38 | 39 | ethan = Employee( 40 | _id="Employee/005", 41 | name="Ethan Abbott", 42 | title="Backend Developer", 43 | team=Team.it, 44 | contact_number="070 7796 8035", 45 | address=ethan_address, 46 | manager=ethan_manager, 47 | ) 48 | 49 | client.update_document(ethan, commit_msg="Adding Ethan") 50 | -------------------------------------------------------------------------------- /getting_started/python-client/woql_query.py: -------------------------------------------------------------------------------- 1 | import pprint as pp 2 | 3 | from terminusdb_client import Client 4 | from terminusdb_client import WOQLQuery as wq 5 | 6 | # For Terminus X, use the following 7 | # client = Client("https://cloud.terminusdb.com//") 8 | # client.connect(db="demo_workshop", team="", use_token=True) 9 | 10 | client = Client("http://127.0.0.1:6363/") 11 | client.connect(db="getting_started") 12 | 13 | ### Uncomment to see all triples ## 14 | # pp.pprint(wq().star().execute(client)) 15 | 16 | darci = wq().string("Darci Prosser") 17 | 18 | query = wq().triple("v:person", "@schema:name", darci) + wq().triple( 19 | "v:person", "@schema:contact_number", "v:phone_num" 20 | ) 21 | 22 | result = query.execute(client) 23 | 24 | if result["bindings"]: 25 | print("Darci's contact number:") 26 | print(result["bindings"][0]["phone_num"]["@value"]) 27 | else: 28 | print("Cannot find result.") 29 | 30 | print("=== Darci is on holiday ===") 31 | 32 | query = ( 33 | wq().triple("v:person", "@schema:name", darci) 34 | + wq().triple("v:person", "@schema:manager", "v:manager") 35 | + wq().triple("v:manager", "@schema:contact_number", "v:phone_num") 36 | + wq().triple("v:manager", "@schema:name", "v:manager_name") 37 | ) 38 | 39 | result = query.execute(client) 40 | 41 | if result["bindings"]: 42 | print("Manager's name:") 43 | print(result["bindings"][0]["manager_name"]["@value"]) 44 | print("Manager's contact number:") 45 | print(result["bindings"][0]["phone_num"]["@value"]) 46 | else: 47 | print("Cannot find result.") 48 | -------------------------------------------------------------------------------- /netflix/netflix.py: -------------------------------------------------------------------------------- 1 | from typing import Set, Optional 2 | from terminusdb_client import Client 3 | from terminusdb_client.woqlschema.woql_schema import ( 4 | DocumentTemplate, 5 | EnumTemplate, 6 | WOQLSchema, 7 | LexicalKey, 8 | ) 9 | 10 | import pandas as pd 11 | from tqdm import tqdm 12 | import tempfile 13 | import random 14 | 15 | schema = WOQLSchema() 16 | 17 | class Content(DocumentTemplate): 18 | _schema = schema 19 | title: str 20 | type_of: "Content_Type" 21 | director: Optional[str] 22 | cast: Optional[str] 23 | country_of_origin: Optional[str] 24 | release_year: int 25 | rating: "Rating" 26 | duration: str 27 | listed_in: str 28 | description: str 29 | date_added: Optional[str] 30 | 31 | class User(DocumentTemplate): 32 | _schema = schema 33 | _key = LexicalKey(keys="id") 34 | _base = "User" 35 | id : str 36 | watched_contents: Set["Content"] 37 | 38 | class Content_Type(EnumTemplate): 39 | _schema = schema 40 | TV_Show = "TV Show" 41 | Movie = "Movie" 42 | 43 | class Rating(EnumTemplate): 44 | _schema = schema 45 | TV_MA = "TV-MA" 46 | R = () 47 | PG_13 = "PG-13" 48 | TV_14 = "TV-14" 49 | TV_PG = "TV-PG" 50 | NR = () 51 | TV_G = "TV-G" 52 | TV_Y = "TV-Y" 53 | TV_Y7 = "TV-Y7" 54 | TY = () 55 | TY_7 = "TY-7" 56 | PG = () 57 | G = () 58 | NC_17 = "NC-17" 59 | TV_Y7_FV = "TV-Y7-FV" 60 | UR = () 61 | 62 | def insert_content_data(client, url): 63 | df = pd.read_csv(url, chunksize=1000) 64 | for chunk in tqdm(df, desc='Transfering data'): 65 | csv = tempfile.NamedTemporaryFile() 66 | chunk.to_csv(csv) 67 | netflix_content = read_data(csv.name) 68 | client.insert_document(netflix_content, commit_msg="Adding all Netflix content") 69 | 70 | # We will generate and insert random 50 users using following function 71 | def insert_user_data(contents): 72 | users = [] 73 | for i in range(0,50): 74 | randomlist = random.sample(range(1, 50), i%10) 75 | watched_contents = set() 76 | for index in randomlist: 77 | watched_contents.add(schema.import_objects(contents[index])) 78 | 79 | users.append(User(id=str(i), watched_contents = watched_contents)) 80 | 81 | client.insert_document(users, commit_msg="Adding users") 82 | 83 | def read_data(csv): 84 | records = [] 85 | df = pd.read_csv(csv) 86 | for index, row in df.iterrows(): 87 | 88 | type_of = row['type'].replace(" ", "_") 89 | rating = "NR" if pd.isna(row['rating']) else row['rating'].replace("-", "_") 90 | 91 | records.append(Content(title=row['title'], type_of=Content_Type[type_of], director=str(row['director']), cast=str(row['cast']), country=str(row['country']), release_year=row['release_year'], rating=Rating[rating], duration=row['duration'], listed_in=row['listed_in'], description=row['description'], date_added=str(row['date_added']))) 92 | 93 | return records 94 | 95 | def query_documents(client): 96 | documents = client.get_all_documents() 97 | 98 | # documents comes back as a iterable that can be convert into a list 99 | print("\nAll documents\n") 100 | print(list(documents)) 101 | 102 | matches = client.query_document({"@type" : "Content", 103 | "type_of": "Movie", 104 | "release_year": "2020"}) 105 | 106 | # matches comes back as a iterable that can be convert into a list 107 | print("\nDocuments matches\n") 108 | print(list(matches)) 109 | 110 | # If you want to get a specific number of records, just add count=number when calling both functions: 111 | documents = client.get_all_documents(count=5) 112 | matches = client.query_document({"@type" : "Content", 113 | "type_of": "Movie", 114 | "release_year": "2020"}, count=5) 115 | 116 | def branches(client): 117 | #You can create a new branch by calling the create_branch method 118 | client.create_branch("some_branch", empty=False) 119 | 120 | # When empty is set to False, a new branch will be created, 121 | # containing the schema and data inserted into the database previously. 122 | # If set to True, an empty branch will be created. 123 | client.create_branch("some_branch1", empty=True) 124 | 125 | # You can delete a branch by calling the delete and passing the name of the branch as parameter. 126 | client.delete_branch("some_branch") 127 | 128 | # You can switch to a different branch by setting the branch variable 129 | client.branch = "some_branch1" 130 | 131 | # List all branches 132 | branches = client.get_all_branches() 133 | 134 | print(branches) 135 | 136 | def time_travel(client): 137 | # Reset the current branch HEAD to the specified commit path. 138 | # eg: 139 | # client.reset('hvatquoq9531k1u223v4azcdr1bfyde') 140 | 141 | # Squash the current branch HEAD into a commit 142 | commit_res = client.squash('This is a squash commit message!',"username") 143 | # reset to the squash commit 144 | client.reset(commit_res, use_path=True) 145 | 146 | # Rebase the current branch onto the specified remote branch 147 | client.rebase("main") 148 | 149 | 150 | if __name__ == "__main__": 151 | db_id = "Netflix" 152 | url = "netflix.csv" 153 | 154 | # TODO: change the team name 155 | team = "" 156 | client = Client("https://cloud.terminusdb.com/"+team) 157 | 158 | try: 159 | client.connect(team=team, use_token=True) 160 | client.create_database(db_id, label = "Netflix Graph", description = "Create a graph with Netflix data") 161 | except Exception: 162 | client.connect(db=db_id, team=team, use_token=True) 163 | 164 | schema.commit(client, commit_msg = "Adding Netflix Schema") 165 | 166 | insert_content_data(client, url) 167 | 168 | contents = client.query_document({"@type" : "Content"}, count=50) 169 | 170 | insert_user_data(list(contents)) 171 | 172 | print("\nQUERING DOCUMENTS\n") 173 | query_documents(client) 174 | 175 | print("\nBranches\n") 176 | branches(client) 177 | 178 | # Get the whole commit history: 179 | commit_history = client.get_commit_history() 180 | print("\nCOMMIT HISTORY\n",commit_history) 181 | 182 | # Manipulate the commit history 183 | print("\nTime Travel\n") 184 | time_travel(client) 185 | -------------------------------------------------------------------------------- /nobel_prize/README.md: -------------------------------------------------------------------------------- 1 | # Nobel Prize Winners (1900 - 2020) 2 | 3 | In this tutorial, we will pull the Nobel Prize winners data from a [CSV file](https://www.kaggle.com/rishidamarla/nobel-prize-winners-19002020) and put it in TerminusDB/TerminusCMS. 4 | 5 | ## Check your endpoint is running 6 | 7 | You can download the TerminusDB docker image to work locally (recommended to use [Bootstrap here](https://github.com/terminusdb/terminusdb-bootstrap)) or you can connect to TerminusCMS. If you are using docker image, make sure that your TerminusDB container is running at localhost (https://127.0.0.1). If you are using TerminusCMS, get the information of the endpoint, team, and API token ready (it should be accessible in the [TerminusCMS dashboard](https://dashboard.terminusdb.com/) under profile. 8 | 9 | ## Clone this repository 10 | 11 | Clone this repository 12 | 13 | ``` 14 | $ git clone git@github.com:terminusdb/terminusdb-tutorials.git 15 | ``` 16 | 17 | Go into the `nobel_prize` directory 18 | 19 | ``` 20 | $ cd nobel_prize/ 21 | ``` 22 | 23 | ## Install TerminusDB target 24 | 25 | It is highly recommended to install different singer.io tap and targets in different python environments. Install `target-terminusdb` from PyPI in a venv environment: 26 | 27 | ``` 28 | $ python3 -m venv ~/.virtualenvs/target-terminusdb 29 | $ source ~/.virtualenvs/target-terminusdb/bin/activate 30 | $ python3 -m pip install target-terminusdb pandas tqdm tempfile 31 | ``` 32 | 33 | `pandas`, `tqdm` and `tempfile` are also required. 34 | 35 | ## Start the project 36 | 37 | In the project directory start a TerminusDB project: 38 | 39 | ``` 40 | $ tdbpy startproject 41 | ``` 42 | 43 | You will be prompt with a few questions. Pick a project name and if you are running the localhost server with default port you can just press Enter. You have to provide the endpoint and other login information if you are using TerminusCMS or otherwise. 44 | 45 | This is what I did: 46 | 47 | ```bash 48 | Please enter a project name (this will also be the database name): nobel_prize 49 | Please enter a endpoint location (press enter to use localhost default) [http://127.0.0.1:6363/]: 50 | config.json and schema.py created, please customize them to start your project. 51 | ``` 52 | 53 | ## Reading CSV 54 | 55 | ```python 56 | import singer 57 | import pandas as pd 58 | from tqdm import tqdm 59 | import tempfile 60 | 61 | schema = { 62 | 'properties': { 63 | 'firstname': {'type': 'string'}, 64 | 'surname': {'type': 'string'}, 65 | 'born': {'type': 'string'}, 66 | 'died': {'type': 'string'}, 67 | 'bornCountry': {'type': 'string'}, 68 | 'bornCountryCode': {'type': 'string'}, 69 | 'bornCity': {'type': 'string'}, 70 | 'diedCountry': {'type': 'string'}, 71 | 'diedCountryCode': {'type': 'string'}, 72 | 'diedCity': {'type': 'string'}, 73 | 'gender': {'type': 'string'}, 74 | 'year': {'type': 'string'}, 75 | 'category': {'type': 'string'}, 76 | 'overallMotivation': {'type': 'string'}, 77 | 'share': {'type': 'string'}, 78 | 'motivation': {'type': 'string'}, 79 | 'name': {'type': 'string'}, 80 | 'city': {'type': 'string'}, 81 | 'country': {'type': 'string'}, 82 | } 83 | } 84 | 85 | def read_data(url): 86 | df = pd.read_csv(url, encoding='latin1', chunksize=500) 87 | singer.write_schema('nobel_prize', schema, 'firstname') 88 | for chunk in tqdm(df, desc='Transfering data'): 89 | csv = tempfile.NamedTemporaryFile() 90 | chunk.to_csv(csv) 91 | write_data(csv.name) 92 | 93 | def write_data(csv): 94 | df = pd.read_csv(csv) 95 | selection = df.fillna('') 96 | for index, row in selection.iterrows(): 97 | singer.write_records('nobel_prize', [{'firstname': row['firstname'], 'surname': row['surname'], 'born': row['born'], 'died': row['died'], 'bornCountry': row['bornCountry'], 'bornCountryCode': row['bornCountryCode'], 'bornCity': row['bornCity'], 'diedCountry': row['diedCountry'], 'diedCountryCode': row['diedCountryCode'], 'diedCity': row['diedCity'], 'gender': row['gender'], 'year': str(row['year']), 'category': row['category'], 'overallMotivation': row['overallMotivation'], 'share': str(row['share']), 'motivation': row['motivation'], 'name': row['name'], 'city': row['city'], 'country': row['country']}]) 98 | 99 | url='nobel_prize.csv' 100 | read_data(url) 101 | ``` 102 | 103 | We'll use `pandas` to read the CSV file, `tqdm` to put a progress bar and `tempfile` to create temporary CSV files, required as we're reading data chunks. This is a small dataset and reading data in chunks is optional. 104 | 105 | We create the schema of the data we'll be writing to the stream formatted as a JSON Schema. 106 | 107 | Records in the dataset are read by calling `read_data` function and written to the stream by calling `write_data`. 108 | 109 | ## Import the Nobel Prize winners data into TerminusDB/ TerminusCMS 110 | 111 | ``` 112 | $ python nobel_prize.py | target-terminusdb -c config.json 113 | ``` 114 | 115 | ## Verify the data is in TerminusDB/TerminusCMS 116 | 117 | Check if the documents are looking good by using the following command to get 10 documents to inspect: 118 | 119 | ``` 120 | $ tdbpy alldocs -h 10 121 | ``` 122 | -------------------------------------------------------------------------------- /nobel_prize/config.json: -------------------------------------------------------------------------------- 1 | { 2 | "database": "nobel_prize", 3 | "endpoint": "http://127.0.0.1:6363/", 4 | "team": "admin" 5 | } -------------------------------------------------------------------------------- /nobel_prize/nobel_prize.csv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/terminusdb/terminusdb-tutorials/4de64c04cbfb42d31c7a4c3c509f85384c1a6a28/nobel_prize/nobel_prize.csv -------------------------------------------------------------------------------- /nobel_prize/nobel_prize.py: -------------------------------------------------------------------------------- 1 | import singer 2 | import pandas as pd 3 | from tqdm import tqdm 4 | import tempfile 5 | 6 | schema = { 7 | 'properties': { 8 | 'firstname': {'type': 'string'}, 9 | 'surname': {'type': 'string'}, 10 | 'born': {'type': 'string'}, 11 | 'died': {'type': 'string'}, 12 | 'bornCountry': {'type': 'string'}, 13 | 'bornCountryCode': {'type': 'string'}, 14 | 'bornCity': {'type': 'string'}, 15 | 'diedCountry': {'type': 'string'}, 16 | 'diedCountryCode': {'type': 'string'}, 17 | 'diedCity': {'type': 'string'}, 18 | 'gender': {'type': 'string'}, 19 | 'year': {'type': 'string'}, 20 | 'category': {'type': 'string'}, 21 | 'overallMotivation': {'type': 'string'}, 22 | 'share': {'type': 'string'}, 23 | 'motivation': {'type': 'string'}, 24 | 'name': {'type': 'string'}, 25 | 'city': {'type': 'string'}, 26 | 'country': {'type': 'string'}, 27 | } 28 | } 29 | 30 | def read_data(url): 31 | df = pd.read_csv(url, encoding='latin1', chunksize=500) 32 | singer.write_schema('nobel_prize', schema, 'firstname') 33 | for chunk in tqdm(df, desc='Transfering data'): 34 | csv = tempfile.NamedTemporaryFile() 35 | chunk.to_csv(csv) 36 | write_data(csv.name) 37 | 38 | def write_data(csv): 39 | df = pd.read_csv(csv) 40 | selection = df.fillna('') 41 | for index, row in selection.iterrows(): 42 | singer.write_records('nobel_prize', [{'firstname': row['firstname'], 'surname': row['surname'], 'born': row['born'], 'died': row['died'], 'bornCountry': row['bornCountry'], 'bornCountryCode': row['bornCountryCode'], 'bornCity': row['bornCity'], 'diedCountry': row['diedCountry'], 'diedCountryCode': row['diedCountryCode'], 'diedCity': row['diedCity'], 'gender': row['gender'], 'year': str(row['year']), 'category': row['category'], 'overallMotivation': row['overallMotivation'], 'share': str(row['share']), 'motivation': row['motivation'], 'name': row['name'], 'city': row['city'], 'country': row['country']}]) 43 | 44 | url='nobel_prize.csv' 45 | read_data(url) -------------------------------------------------------------------------------- /nobel_prize/schema.py: -------------------------------------------------------------------------------- 1 | #### 2 | # This is the script for storing the schema of your TerminusDB 3 | # database for your project. 4 | # Use 'terminusdb commit' to commit changes to the database and 5 | # use 'terminusdb sync' to change this file according to 6 | # the exsisting database schema 7 | #### 8 | from terminusdb_client.woqlschema import DocumentTemplate 9 | 10 | 11 | class nobel_prize(DocumentTemplate): 12 | born: str 13 | bornCity: str 14 | bornCountry: str 15 | bornCountryCode: str 16 | category: str 17 | city: str 18 | country: str 19 | died: str 20 | diedCity: str 21 | diedCountry: str 22 | diedCountryCode: str 23 | firstname: str 24 | gender: str 25 | motivation: str 26 | name: str 27 | overallMotivation: str 28 | share: str 29 | surname: str 30 | year: str 31 | -------------------------------------------------------------------------------- /nuclear/Makefile: -------------------------------------------------------------------------------- 1 | 2 | .PHONY: nuclear 3 | nuclear: 4 | cat nuclear.md | sed -n '/^```python/,/^```/ p' | sed '/^```python/ d' | sed '/^```/ d' > nuclear.py 5 | TERMINUSDB_TEAM=${TERMINUSDB_TEAM} TERMINUSDB_ACCESS_TOKEN=${TERMINUSDB_ACCESS_TOKEN} python3 nuclear.py 6 | 7 | .PHONY: enrichment 8 | enrichment: 9 | cat enrichment.md | sed -n '/^```python/,/^```/ p' | sed '/^```python/ d' | sed '/^```/ d' > enrichment.py 10 | TERMINUSDB_TEAM=${TERMINUSDB_TEAM} TERMINUSDB_ACCESS_TOKEN=${TERMINUSDB_ACCESS_TOKEN} python3 enrichment.py 11 | 12 | 13 | .PHONY: scraping 14 | scraping: 15 | cat scraping.md | sed -n '/^```python/,/^```/ p' | sed '/^```python/ d' | sed '/^```/ d' > scraping.py 16 | TERMINUSDB_TEAM=${TERMINUSDB_TEAM} TERMINUSDB_ACCESS_TOKEN=${TERMINUSDB_ACCESS_TOKEN} python3 scraping.py 17 | -------------------------------------------------------------------------------- /nuclear/README.md: -------------------------------------------------------------------------------- 1 | # Nuclear Reactor Data Product 2 | 3 | This is a multipart tutorial which will help to teach you the chops 4 | you need to build real world data products. 5 | 6 | To run the full code for the tutorial, you need to follow the instructions below. 7 | 8 | ## Getting the Python Client 9 | 10 | First you should go and see how to get installed with the [Python Client](https://terminusdb.com/docs/terminusdb/install-client/install-python-client) in our documentation. 11 | 12 | ## Running the Tutorial 13 | 14 | This tutorial series is structured as literate programming 15 | tutorials. This means you can either copy-paste the snippets and run 16 | them yourself or you can clone the repository and run the files 17 | directly using our short script in our `make`-file. 18 | 19 | To clone the repository and run it, however, first you'll need a [key from 20 | TerminusCMS](https://terminusdb.com/docs/terminuscms/get-api-key). 21 | 22 | You will also need to copy your *team name* into the environment variable 23 | 24 | ```shell 25 | $ git clone https://github.com/terminusdb/terminusdb-tutorials/ 26 | $ cd terminusdb-tutorials/nuclear 27 | $ export TERMINUSDB_TEAM="TerminatorsX" 28 | $ export TERMINUSDB_ACCESS_TOKEN="eyJhbG..." 29 | $ make nuclear 30 | $ make enrichment 31 | $ make scraping 32 | ``` 33 | 34 | # Nuclear Reactor Data Product (Part 1) 35 | 36 | [Nuclear](./nuclear.md) 37 | 38 | # Data Product Enrichment (Part 2) 39 | 40 | [Enrichment](./enrichment.md) 41 | 42 | # Scraping Data (Part 3) 43 | 44 | [Scraping](./scraping.md) 45 | -------------------------------------------------------------------------------- /nuclear/enrichment.md: -------------------------------------------------------------------------------- 1 | # Data Product Enrichment 2 | 3 | In this tutorial we will look at data product enrichment - adding new 4 | or derived information to an existing data product. 5 | 6 | ## Preliminaries 7 | 8 | Of course we need the client so we can talk to the server... 9 | 10 | ```python 11 | #!/usr/bin/python3 12 | from terminusdb_client import Client 13 | import os 14 | import json 15 | import urllib.parse 16 | 17 | team = os.environ['TERMINUSDB_TEAM'] 18 | team_quoted = urllib.parse.quote(team) 19 | client = Client(f"https://cloud.terminusdb.com/{team_quoted}/") 20 | client.connect(db="nuclear", team=team, use_token=True) 21 | ``` 22 | 23 | ## Calculated Values 24 | 25 | Our power plants have information about their capacity, and they have 26 | information about their annual production, but they don't give us 27 | their capacity factor. 28 | 29 | We can actually calculate this with a little work! 30 | 31 | First we'll add a bit of data verification code - just so we know that 32 | our calculations are working with the right units when we make manipulations. 33 | 34 | ```python 35 | def quantity_of(quantity,unit): 36 | if quantity['unit'] == unit: 37 | return quantity['quantity'] 38 | else: 39 | raise Exception(f"Need quantity in {unit}") 40 | ``` 41 | 42 | With this we can extract the values safely, knowing they are what we 43 | think they are. 44 | 45 | ## Safety first 46 | 47 | But we are modifying the core database with enriched data. This might 48 | not be a good idea. It's very possible that we mess something up in 49 | our calculations, and people might be using the production database. 50 | 51 | We are also deriving data - and sometimes when you derive data its a 52 | good idea to keep it segregated from the underived data. This way you 53 | know what is "ground truth" and you can re-run your processes to 54 | obtain the derived facts later. 55 | 56 | Luckily, we have an easy way of doing this: *branching*. 57 | 58 | ```python 59 | branch = "capacity_factors" 60 | try: 61 | client.delete_branch(branch) 62 | except Exception as E: 63 | print(E.error_obj) 64 | print("Branch did not yet exist") 65 | 66 | client.create_branch(branch) 67 | client.branch = branch 68 | ``` 69 | 70 | We have created a safe space in which to do our experiments. If we 71 | mess it up, we can just delete the branch. 72 | 73 | Next, we will actually do the calculation. We loop over every power 74 | plant in the database, we loop over each year for which we have 75 | calculated data, then we enrich the document with the calculated 76 | capacity factor for a given year, and return it to the database! 77 | 78 | ```python 79 | 80 | plants = client.query_document({ '@type' : 'NuclearPowerPlant' }) 81 | for plant in plants: 82 | capacity = quantity_of(plant['capacity'],'Unit/MWe') 83 | print(f"capacity {capacity}") 84 | if 'output' in plant: 85 | output = plant['output'] 86 | capacity_factors = [] 87 | for output_year in output: 88 | year = output_year['year'] 89 | GWh = quantity_of(output_year['output'],'Unit/GWh') 90 | if not(GWh == 0): 91 | capacity_factor = GWh * 1000 / (capacity * 24 * 365) 92 | capacity_factors.append({ '@type' : 'AnnualCapacityFactor', 93 | 'year' : year, 94 | 'capacity_factor' : capacity_factor }) 95 | plant['capacity_factor'] = capacity_factors 96 | try: 97 | result = client.update_document(plant) 98 | plant_id = plant['@id'] 99 | print(f"Updated... {plant_id}") 100 | except Exception as E: 101 | if hasattr(E,'error_obj'): 102 | print(json.dumps(E.error_obj, indent=4, sort_keys=True)) 103 | else: 104 | print("Unknown error") 105 | print(E) 106 | 107 | ``` 108 | 109 | Now that we've seen some enrichment based on inference from data which 110 | we already have, you might be interested in how to further improve our 111 | data in [Part 3: Scraping Data](./scraping.md). 112 | -------------------------------------------------------------------------------- /nuclear/geo_schema.json: -------------------------------------------------------------------------------- 1 | [ 2 | { "@type" : "Class", 3 | "@id" : "GeoJSON", 4 | "@abstract" : [], 5 | "bbox" : { "@class" : "xsd:double", 6 | "@dimensions" : 1, 7 | "@type" : "Array" } 8 | }, 9 | { "@type" : "Class", 10 | "@id" : "Geometry", 11 | "@inherits": "GeoJSON", 12 | "@abstract" : [], 13 | "@unfoldable" : [] 14 | }, 15 | { 16 | "@id": "Point", 17 | "@inherits": "Geometry", 18 | "@type": "Class", 19 | "coordinates": { 20 | "@class": "xsd:double", 21 | "@dimensions": 1, 22 | "@type": "Array" 23 | }, 24 | "type": "Point_Type" 25 | }, 26 | { 27 | "@id": "Point_Type", 28 | "@type": "Enum", 29 | "@value": [ 30 | "Point" 31 | ] 32 | }, 33 | { "@type" : "Class", 34 | "@id" : "Country", 35 | "@key" : { "@type" : "Lexical", 36 | "@fields" : ["name"] }, 37 | "name" : "xsd:string" 38 | } 39 | 40 | ] 41 | -------------------------------------------------------------------------------- /nuclear/nuclear_schema.json: -------------------------------------------------------------------------------- 1 | [ 2 | { "@type" : "Class", 3 | "@id" : "AnnualOutput", 4 | "@documentation" : { 5 | "@comment" : "Annual Total Output of a Nuclear Power Plant", 6 | "@properties" : { "year" : "Year of annual recorded data.", 7 | "output" : "Total energy output." 8 | } 9 | }, 10 | 11 | "@key" : { "@type" : "Lexical", 12 | "@fields" : ["year"] }, 13 | "@subdocument" : [], 14 | "year" : "xsd:gYear", 15 | "output" : "Quantity" }, 16 | 17 | { "@type" : "Class", 18 | "@id" : "AnnualCapacityFactor", 19 | "@documentation" : { 20 | "@comment" : "Annual Capacity Factor of a Nuclear Power Plant", 21 | "@properties" : { "year" : "Year of annual recorded data.", 22 | "capacity_factor" : "Fraction of maximum output." 23 | } 24 | }, 25 | "@key" : { "@type" : "Lexical", 26 | "@fields" : ["year"] }, 27 | "@subdocument" : [], 28 | "year" : "xsd:gYear", 29 | "capacity_factor" : "xsd:decimal" }, 30 | 31 | { "@type" : "Class", 32 | "@id" : "NuclearPowerPlant", 33 | "@documentation" : { 34 | "@comment" : "A Nuclear Power Plant", 35 | "@properties" : { "name" : "The name of the plant.", 36 | "location" : "A geo-location of the plant location.", 37 | "country" : "A link to the country in which the plant exists.", 38 | "capacity" : "Maximum power capacity", 39 | "capacity_factor" : "Fraction of total capacity in a given year", 40 | "url" : "URL of power plant", 41 | "owner" : "Owner of the power plant", 42 | "gppd_idnr" : "Global Power Plant Database ID Number", 43 | "commissioning_year" : "Year of commissioning of the plant", 44 | "reactors" : "Reactors that are present at the power plant" 45 | } 46 | }, 47 | "@metadata" : { 48 | "order_by" : [ 49 | "name", 50 | "country", 51 | "capacity", 52 | "owner", 53 | "commissioning_year", 54 | "location", 55 | "capacity_factor", 56 | "output", 57 | "url", 58 | "gppd_idnr", 59 | "reactors" 60 | ] 61 | }, 62 | "@key" : { "@type" : "Lexical", 63 | "@fields" : ["name"] }, 64 | "name" : "xsd:string", 65 | "location" : "Point", 66 | "country" : "Country", 67 | "capacity" : "Quantity", 68 | "capacity_factor" : { "@type" : "Set", 69 | "@class" : "AnnualCapacityFactor" }, 70 | "reactors" : { "@type" : "Set", 71 | "@class" : "Reactor" }, 72 | "gppd_idnr" : "xsd:string", 73 | "commissioning_year" : { "@type" : "Optional", 74 | "@class" : "xsd:gYear"}, 75 | "owner" : { "@type" : "Optional", 76 | "@class" : "xsd:string" }, 77 | "url" : "xsd:string", 78 | "output" : { "@type" : "Set", 79 | "@class" : "AnnualOutput" } 80 | }, 81 | 82 | { "@type" : "Enum", 83 | "@id" : "ReactorType", 84 | "@value" : [ 85 | "BWR", 86 | "PWR", 87 | "HTGR", 88 | "AGR", 89 | "CANDU", 90 | "MSR", 91 | "SFR", 92 | "LFR" 93 | ] 94 | }, 95 | 96 | { "@type" : "Class", 97 | "@id" : "Reactor", 98 | "@documentation" : { 99 | "@comment" : "A Nuclear Power Plant", 100 | "@properties" : { "name" : "The name of the reactor.", 101 | "type" : "The type of the reactor.", 102 | "moderator" : "What substance is used as a moderator.", 103 | "coolant" : "What substance is used as a coolant" 104 | } 105 | }, 106 | "@abstract" : [], 107 | "name" : "xsd:string", 108 | "type" : { "@type" : "Optional", 109 | "@class" : "ReactorType"}, 110 | "capacity" : { "@type" : "Optional", 111 | "@class" : "Quantity" }, 112 | "moderator" : { "@type" : "Optional", 113 | "@class" : "Substance" }, 114 | "coolant" : { "@type" : "Optional", 115 | "@class" : "Substance" } 116 | }, 117 | 118 | { "@type" : "Class", 119 | "@id" : "PowerReactor", 120 | "@inherits" : ["Reactor"], 121 | "@key" : { "@type" : "Lexical", 122 | "@fields" : ["name"] } 123 | }, 124 | 125 | { "@type" : "Class", 126 | "@id" : "ResearchReactor", 127 | "@inherits" : ["Reactor"], 128 | "@key" : { "@type" : "Lexical", 129 | "@fields" : ["name"] } 130 | }, 131 | 132 | { "@type" : "Class", 133 | "@id" : "ExperimentalReactor", 134 | "@inherits" : ["Reactor"], 135 | "@key" : { "@type" : "Lexical", 136 | "@fields" : ["name"] } 137 | } 138 | 139 | ] 140 | -------------------------------------------------------------------------------- /nuclear/source.json: -------------------------------------------------------------------------------- 1 | [ 2 | { "@type" : "Class", 3 | "@id" : "SourcedQuantity", 4 | "@documentation" : { 5 | "@comment" : "A data point which has a recorded source", 6 | "@properties" : { "source" : "The source from which the quantity was found."} 7 | }, 8 | "@inherits" : "Quantity", 9 | "@key" : { "@type" : "Lexical", 10 | "@fields" : ["unit", "quantity"] }, 11 | "@subdocument" : [], 12 | "source" : "Source" }, 13 | 14 | { "@type" : "Class", 15 | "@id" : "Source", 16 | "@documentation" : { 17 | "@comment" : "The Source of some data." 18 | } 19 | }, 20 | 21 | { "@type" : "Class", 22 | "@id" : "ScrapedSource", 23 | "@documentation" : { 24 | "@comment" : "Source of data scraped from a URL on the internet.", 25 | "@properties" : { "name" : "Name of the scraped source.", 26 | "url" : "The URL of the scraped resource.", 27 | "scraped_at" : "Date time at which the resource was scraped." 28 | } 29 | }, 30 | "@key" : { "@type" : "Lexical", 31 | "@fields" : ["url", "scraped_at"] }, 32 | "@inherits" : "Source", 33 | "name" : "xsd:string", 34 | "url" : "xsd:string", 35 | "scraped_at" : "xsd:dateTime" } 36 | ] 37 | -------------------------------------------------------------------------------- /nuclear/unit_schema.json: -------------------------------------------------------------------------------- 1 | [ 2 | { "@type" : "Class", 3 | "@id" : "Quantity", 4 | "@subdocument" : [], 5 | "@key" : { "@type" : "Lexical", 6 | "@fields" : ["unit", "quantity"] }, 7 | "unit" : "Unit", 8 | "quantity" : "xsd:decimal" }, 9 | 10 | { "@type" : "Class", 11 | "@id" : "Unit", 12 | "@key" : { "@type" : "Lexical", 13 | "@fields" : ["symbol"] }, 14 | "symbol" : "xsd:string", 15 | "dimension" : "Dimension", 16 | "name" : "xsd:string", 17 | "plural" : { "@type" : "Optional", 18 | "@class" : "xsd:string" }, 19 | "alternative_name" : { "@type" : "Set", 20 | "@class" : "xsd:string"}, 21 | "derived_from" : { "@type" : "Optional", 22 | "@class" : "xsd:string" }}, 23 | 24 | { "@type" : "Enum", 25 | "@id" : "Dimension", 26 | "@value" : [ 27 | "currency", 28 | "time", 29 | "length", 30 | "mass", 31 | "area", 32 | "space", 33 | "temperature", 34 | "energy", 35 | "power", 36 | "force", 37 | "torque", 38 | "speed", 39 | "rotational_speed", 40 | "acceleration", 41 | "charge", 42 | "electric_potential", 43 | "electric_current", 44 | "electric_resistance", 45 | "momentum", 46 | "angular_momentum", 47 | "dimensionless" 48 | ] 49 | } 50 | 51 | ] 52 | -------------------------------------------------------------------------------- /nuclear/units.json: -------------------------------------------------------------------------------- 1 | [ 2 | { "@type" : "Unit", 3 | "symbol" : "m", 4 | "dimension" : "length", 5 | "name" : "meter" 6 | }, 7 | 8 | { "@type" : "Unit", 9 | "symbol" : "appm", 10 | "dimension" : "dimensionless", 11 | "name" : "Atom fraction in parts per million" 12 | }, 13 | 14 | { "@type" : "Unit", 15 | "symbol" : "W", 16 | "dimension" : "power", 17 | "derived_from" : "kg⋅m²⋅s⁻³", 18 | "name" : "Watts" 19 | }, 20 | 21 | { "@type" : "Unit", 22 | "symbol" : "We", 23 | "dimension" : "power", 24 | "derived_from" : "W", 25 | "name" : "Watt electric" 26 | }, 27 | 28 | { "@type" : "Unit", 29 | "symbol" : "Wt", 30 | "dimension" : "power", 31 | "derived_from" : "W", 32 | "name" : "Watt thermal" 33 | }, 34 | 35 | { "@type" : "Unit", 36 | "symbol" : "MWe", 37 | "dimension" : "power", 38 | "derived_from" : "10⁶⋅We", 39 | "name" : "Megawatts electric" 40 | }, 41 | 42 | { "@type" : "Unit", 43 | "symbol" : "MWt", 44 | "dimension" : "power", 45 | "derived_from" : "10⁶⋅Wt", 46 | "name" : "Megawatts thermal" 47 | }, 48 | 49 | { "@type" : "Unit", 50 | "symbol" : "GWh", 51 | "dimension" : "energy", 52 | "derived_from" : "3.6⋅10¹²⋅J", 53 | "name" : "Gigawatt hours" 54 | }, 55 | 56 | { "@type" : "Unit", 57 | "symbol" : "J", 58 | "dimension" : "energy", 59 | "derived_from" : "kg⋅m²⋅s⁻²", 60 | "name" : "Joule" 61 | }, 62 | 63 | { "@type" : "Unit", 64 | "symbol" : "$", 65 | "dimension" : "currency", 66 | "name" : "US Dollar" 67 | }, 68 | 69 | { "@type" : "Unit", 70 | "symbol" : "£", 71 | "dimension" : "currency", 72 | "name" : "Pound Sterling" 73 | }, 74 | 75 | { "@type" : "Unit", 76 | "symbol" : "€", 77 | "dimension" : "currency", 78 | "name" : "Euro" 79 | }, 80 | 81 | { "@type" : "Unit", 82 | "symbol" : "m²", 83 | "dimension" : "area", 84 | "derived_from" : "m²", 85 | "name" : "Meters Squared" 86 | }, 87 | 88 | { "@type" : "Unit", 89 | "symbol" : "kg", 90 | "dimension" : "mass", 91 | "name" : "Kilogram" 92 | }, 93 | 94 | { "@type" : "Unit", 95 | "symbol" : "u", 96 | "dimension" : "mass", 97 | "name" : "Unified mass", 98 | "alternative_name" : ["Atomic Mass", "Dalton"] 99 | }, 100 | 101 | { "@type" : "Unit", 102 | "symbol" : "%", 103 | "dimension" : "dimensionless", 104 | "name" : "Percentage" 105 | }, 106 | 107 | { "@type" : "Unit", 108 | "symbol" : "V", 109 | "dimension" : "electric_potential", 110 | "derived_from" : "kg·m²·s⁻³·A⁻¹", 111 | "name" : "Volt" 112 | }, 113 | 114 | { "@type" : "Unit", 115 | "symbol" : "A", 116 | "dimension" : "electric_current", 117 | "name" : "Ampere" 118 | }, 119 | 120 | { "@type" : "Unit", 121 | "symbol" : "ohm", 122 | "dimension" : "electric_resistance", 123 | "alternative_name" : ["Ω"], 124 | "derived_from" : "V·A⁻¹", 125 | "name" : "Volt" 126 | }, 127 | 128 | { "@type" : "Unit", 129 | "symbol" : "rpm", 130 | "dimension" : "rotational_speed", 131 | "derived_from" : "min⁻¹", 132 | "name" : "Revolutions per Minute" 133 | }, 134 | 135 | { "@type" : "Unit", 136 | "symbol" : "s", 137 | "dimension" : "time", 138 | "name" : "Second" 139 | }, 140 | 141 | { "@type" : "Unit", 142 | "symbol" : "min", 143 | "dimension" : "time", 144 | "derived_from" : "60·s", 145 | "name" : "Minute" 146 | }, 147 | 148 | { "@type" : "Unit", 149 | "symbol" : "hr", 150 | "dimension" : "time", 151 | "derived_from" : "3600·s", 152 | "name" : "Hour" 153 | } 154 | 155 | ] 156 | -------------------------------------------------------------------------------- /python-ast/python-ast.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | from terminusdb_client import WOQLClient 4 | import json 5 | import ast 6 | import astunparse 7 | import urllib 8 | import sys 9 | import os 10 | 11 | def code_to_json(code): 12 | result = ast.parse(code) 13 | return ast_to_json(result) 14 | 15 | def json_to_code(json): 16 | result = json_to_ast(json) 17 | return ast.unparse(result) 18 | 19 | def ast_to_json(tree): 20 | if tree == None: 21 | return None 22 | elif isinstance(tree, list): 23 | res = [] 24 | for tr in tree: 25 | res.append(ast_to_json(tr)) 26 | return res 27 | elif isinstance(tree, str): 28 | return { '@type' : 'Value', 29 | 'string' : tree } 30 | elif isinstance(tree, int): 31 | return { '@type' : 'Value', 32 | 'integer' : tree } 33 | elif isinstance(tree, bool): 34 | return { '@type' : 'Value', 35 | 'boolean' : tree } 36 | elif isinstance(tree, float): 37 | return { '@type' : 'Value', 38 | 'float' : tree } 39 | else: 40 | cls = tree.__class__.__name__ 41 | if cls in ['Load', 'Store', 'Del']: 42 | return cls 43 | elif cls == 'Constant': 44 | const_obj = { '@type' : 'Constant', 45 | 'kind' : tree.kind } 46 | if tree.value == None: 47 | const_obj['value'] = { '@type' : 'Value', 48 | 'none' : [] } 49 | else: 50 | const_obj['value'] = ast_to_json(tree.value) 51 | return const_obj 52 | else: 53 | obj = {'@type' : cls} 54 | for field in tree._fields: 55 | obj[field] = ast_to_json(getattr(tree,field)) 56 | return obj 57 | 58 | def json_to_ast(tree): 59 | if isinstance(tree, dict): 60 | ty = tree['@type'] 61 | if ty == 'Value': 62 | # print(tree) 63 | if 'none' in tree: 64 | return None 65 | elif 'integer' in tree: 66 | return tree['integer'] 67 | elif 'boolean' in tree: 68 | return tree['boolean'] 69 | elif 'float' in tree: 70 | return tree['float'] 71 | elif 'string' in tree: 72 | return tree['string'] 73 | else: 74 | # Dubious! 75 | return None 76 | else: 77 | Cls = getattr(ast, ty) 78 | obj = Cls() 79 | for field in tree.keys(): 80 | if field in ['@id', '@type']: 81 | pass 82 | else: 83 | setattr(obj, field, json_to_ast(tree[field])) 84 | 85 | return obj 86 | elif isinstance(tree, list): 87 | asts = [] 88 | for elt in tree: 89 | asts.append(json_to_ast(elt)) 90 | asts.reverse() 91 | return asts 92 | 93 | def import_schema(client): 94 | with open('python-schema.json', 'r') as f: 95 | python_schema = json.load(f) 96 | results = client.insert_document(python_schema, 97 | graph_type="schema") 98 | print(f"Added Classes: {results}") 99 | 100 | def import_program(client, code): 101 | result = ast.parse(code) 102 | #print(ast.dump(result, indent=4)) 103 | js = code_to_json(result) 104 | #print("-----------------------------------") 105 | #print(json.dumps(js, indent=4)) 106 | #print("-----------------------------------") 107 | results = client.insert_document(js) 108 | print(f"Added Program: {results}") 109 | 110 | if __name__ == "__main__": 111 | dbid = "python-ast" 112 | label = "python-ast" 113 | description = "AST schema for python" 114 | base = 'iri://terminusdb.com/python/' 115 | schema = 'iri://terminusdb.com/python#' 116 | prefixes = {'@base' : base, 117 | '@schema' : schema } 118 | 119 | # team = os.environ['TERMINUSDB_TEAM'] 120 | team = "admin" 121 | team_quoted = urllib.parse.quote(team) 122 | client = WOQLClient('http://localhost:6363') 123 | # client = WOQLClient(f"https://cloud.terminusdb.com/{team_quoted}/") 124 | # make sure you have put the token in environment variable 125 | # https://docs.terminusdb.com/beta/#/terminusx/get-your-api-key 126 | client.connect(team=team) # , use_token=True) 127 | 128 | exists = client.has_database(dbid) 129 | 130 | if exists: 131 | print(f"Recreating {dbid}") 132 | client.delete_database(dbid, team=team, force=True) 133 | client.create_database(dbid, 134 | team, 135 | label=label, 136 | description=description, 137 | prefixes=prefixes) 138 | import_schema(client) 139 | else: 140 | print(f"Connecting to {dbid}") 141 | client.create_database(dbid, 142 | team, 143 | label=label, 144 | description=description, 145 | prefixes=prefixes) 146 | import_schema(client) 147 | client.connect(db=dbid) # ,team=team) # ,use_token=True) 148 | 149 | with open('hello-world.py', 'r') as f: 150 | hello_world = f.read() 151 | 152 | import_program(client,hello_world) 153 | [my_module] = list(client.query_document({'@type' : 'Module'})) 154 | res = json_to_ast(my_module) 155 | print(astunparse.unparse(res)) 156 | -------------------------------------------------------------------------------- /python-ast/python-schema.json: -------------------------------------------------------------------------------- 1 | [ 2 | 3 | { "@type" : "Class", 4 | "@id" : "AST", 5 | "lineno" : { "@type" : "Optional", 6 | "@class" : "xsd:integer"}, 7 | "col_offset" : { "@type" : "Optional", 8 | "@class" : "xsd:integer"}, 9 | "end_lineno" : { "@type" : "Optional", 10 | "@class" : "xsd:integer"}, 11 | "end_col_offset" : { "@type" : "Optional", 12 | "@class" : "xsd:integer"} 13 | }, 14 | 15 | { "@type" : "Class", 16 | "@id" : "Module", 17 | "body" : { "@type" : "Set", 18 | "@class" : "AST" }, 19 | "type_ignores" : {"@type" : "Set", 20 | "@class" : "Expr" } 21 | }, 22 | 23 | { "@type" : "Class", 24 | "@id" : "Expr", 25 | "@subdocument" : [], 26 | "@key" : { "@type" : "ValueHash" }, 27 | "@inherits" : ["AST"], 28 | "value" : { "@type" : "Optional", 29 | "@class" : "AST" } 30 | }, 31 | 32 | { "@type" : "Enum", 33 | "@id" : "Context", 34 | "@value" : [ 35 | "Load", 36 | "Store", 37 | "Del" 38 | ] 39 | }, 40 | 41 | { "@type" : "Class", 42 | "@id" : "Name_or_Attribute", 43 | "@inherits" : ["AST"], 44 | "@abstract" : [] 45 | }, 46 | 47 | { "@type" : "Class", 48 | "@id" : "Name", 49 | "@subdocument" : [], 50 | "@key" : { "@type" : "ValueHash" }, 51 | "@inherits" : ["Name_or_Attribute"], 52 | "id" : "xsd:string", 53 | "ctx" : "Context" 54 | }, 55 | 56 | { "@type" : "Class", 57 | "@id" : "Attribute", 58 | "@subdocument" : [], 59 | "@key" : { "@type" : "ValueHash" }, 60 | "@inherits" : ["Name_or_Attribute"], 61 | "value" : "Name_or_Attribute", 62 | "attr" : "xsd:string", 63 | "ctx" : "Context" 64 | }, 65 | 66 | { "@type" : "Class", 67 | "@id" : "Call", 68 | "@subdocument" : [], 69 | "@key" : { "@type" : "ValueHash" }, 70 | "@inherits" : ["AST"], 71 | "func" : "Name_or_Attribute", 72 | "args" : { "@type" : "List", 73 | "@class" : "Expr" }, 74 | "keywords" : { "@type" : "List", 75 | "@class" : "Expr" } 76 | }, 77 | 78 | { "@type" : "Class", 79 | "@id" : "Import", 80 | "@subdocument" : [], 81 | "@key" : { "@type" : "ValueHash" }, 82 | "@inherits" : ["AST"], 83 | "names" : { "@type" : "List", 84 | "@class" : "alias" } 85 | }, 86 | 87 | { "@type" : "Class", 88 | "@id" : "alias", 89 | "@subdocument" : [], 90 | "@key" : { "@type" : "ValueHash" }, 91 | "@inherits" : ["AST"], 92 | "name" : "xsd:string", 93 | "asname" : { "@type" : "Optional", 94 | "@class" : "xsd:string" } 95 | }, 96 | 97 | { "@type" : "Class", 98 | "@id" : "FunctionDef", 99 | "@subdocument" : [], 100 | "@key" : { "@type" : "ValueHash" }, 101 | "@inherits" : ["AST"], 102 | "name" : "xsd:string", 103 | "args" : "arguments", 104 | "body" : { "@type" : "List", 105 | "@class" : "Expr"}, 106 | "decorator_list" : { "@type" : "List", 107 | "@class" : "Expr"}, 108 | "returns" : { "@type" : "Optional", 109 | "@class" : "Expr"}, 110 | "type_comment" : { "@type" : "Optional", 111 | "@class" : "Expr"}, 112 | "asname" : { "@type" : "Optional", 113 | "@class" : "xsd:string" } 114 | }, 115 | 116 | { "@type" : "Class", 117 | "@id" : "arguments", 118 | "@subdocument" : [], 119 | "@key" : { "@type" : "ValueHash" }, 120 | "@inherits" : ["AST"], 121 | "args" : { "@type" : "List", 122 | "@class" : "arg" }, 123 | "defaults" : { "@type" : "List", 124 | "@class" : "Constant"}, 125 | "kw_defaults" : { "@type" : "List", 126 | "@class" : "Constant"}, 127 | "kwarg" : { "@type" : "Optional", 128 | "@class" : "arg"}, 129 | "kwonlyargs" : { "@type" : "List", 130 | "@class" : "Constant"}, 131 | "posonlyargs" : { "@type" : "List", 132 | "@class" : "alias"}, 133 | "vararg" : { "@type" : "Optional", 134 | "@class" : "arg"} 135 | }, 136 | 137 | { "@type" : "Class", 138 | "@id" : "arg", 139 | "@subdocument" : [], 140 | "@key" : { "@type" : "ValueHash" }, 141 | "@inherits" : ["AST"], 142 | "arg" : "xsd:string", 143 | "annotation" : { "@type" : "Optional", 144 | "@class" : "Name" }, 145 | "type_comment" : { "@type" : "Optional", 146 | "@class" : "xsd:string" } 147 | }, 148 | 149 | { "@type" : "Class", 150 | "@id" : "Constant", 151 | "@subdocument" : [], 152 | "@key" : { "@type" : "ValueHash" }, 153 | "@inherits" : ["Expr"], 154 | "kind" : { "@type" : "Optional", 155 | "@class" : "xsd:string" } 156 | }, 157 | 158 | { "@type" : "TaggedUnion", 159 | "@id" : "Value", 160 | "@key" : { "@type" : "ValueHash" }, 161 | "@inherits" : ["AST"], 162 | "@subdocument" : [], 163 | "integer" : "xsd:integer", 164 | "boolean" : "xsd:boolean", 165 | "float" : "xsd:float", 166 | "string" : "xsd:string", 167 | "none" : "sys:Unit" 168 | } 169 | ] 170 | 171 | 172 | -------------------------------------------------------------------------------- /react-example/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # production 12 | /build 13 | 14 | # misc 15 | .DS_Store 16 | .env.local 17 | .env.development.local 18 | .env.test.local 19 | .env.production.local 20 | 21 | npm-debug.log* 22 | yarn-debug.log* 23 | yarn-error.log* 24 | -------------------------------------------------------------------------------- /react-example/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "react-example", 3 | "version": "0.1.0", 4 | "private": true, 5 | "dependencies": { 6 | "@terminusdb/terminusdb-client": "^10.0.3", 7 | "@testing-library/jest-dom": "^5.16.1", 8 | "@testing-library/react": "^12.1.2", 9 | "@testing-library/user-event": "^13.5.0", 10 | "react": "^17.0.2", 11 | "react-dom": "^17.0.2", 12 | "react-scripts": "5.0.0", 13 | "web-vitals": "^2.1.3" 14 | }, 15 | "scripts": { 16 | "start": "react-scripts start", 17 | "build": "react-scripts build", 18 | "test": "react-scripts test", 19 | "eject": "react-scripts eject" 20 | }, 21 | "eslintConfig": { 22 | "extends": [ 23 | "react-app", 24 | "react-app/jest" 25 | ] 26 | }, 27 | "browserslist": { 28 | "production": [ 29 | ">0.2%", 30 | "not dead", 31 | "not op_mini all" 32 | ], 33 | "development": [ 34 | "last 1 chrome version", 35 | "last 1 firefox version", 36 | "last 1 safari version" 37 | ] 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /react-example/public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/terminusdb/terminusdb-tutorials/4de64c04cbfb42d31c7a4c3c509f85384c1a6a28/react-example/public/favicon.ico -------------------------------------------------------------------------------- /react-example/public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 12 | 13 | 17 | 18 | 27 | React App 28 | 29 | 30 | 31 |
32 | 42 | 43 | 44 | -------------------------------------------------------------------------------- /react-example/public/logo192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/terminusdb/terminusdb-tutorials/4de64c04cbfb42d31c7a4c3c509f85384c1a6a28/react-example/public/logo192.png -------------------------------------------------------------------------------- /react-example/public/logo512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/terminusdb/terminusdb-tutorials/4de64c04cbfb42d31c7a4c3c509f85384c1a6a28/react-example/public/logo512.png -------------------------------------------------------------------------------- /react-example/public/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "short_name": "React App", 3 | "name": "Create React App Sample", 4 | "icons": [ 5 | { 6 | "src": "favicon.ico", 7 | "sizes": "64x64 32x32 24x24 16x16", 8 | "type": "image/x-icon" 9 | }, 10 | { 11 | "src": "logo192.png", 12 | "type": "image/png", 13 | "sizes": "192x192" 14 | }, 15 | { 16 | "src": "logo512.png", 17 | "type": "image/png", 18 | "sizes": "512x512" 19 | } 20 | ], 21 | "start_url": ".", 22 | "display": "standalone", 23 | "theme_color": "#000000", 24 | "background_color": "#ffffff" 25 | } 26 | -------------------------------------------------------------------------------- /react-example/public/robots.txt: -------------------------------------------------------------------------------- 1 | # https://www.robotstxt.org/robotstxt.html 2 | User-agent: * 3 | Disallow: 4 | -------------------------------------------------------------------------------- /react-example/src/index-0.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import ReactDOM from 'react-dom'; 3 | import TerminusClient from "@terminusdb/terminusdb-client"; 4 | import './index.css'; 5 | 6 | class Content extends React.Component { 7 | constructor(props) { 8 | super(props); 9 | this.state = { 10 | status: "Connected", 11 | }; 12 | } 13 | 14 | render() { 15 | if (false) { 16 | return (

{this.state.status}

) 17 | } else { 18 | return (

Not connected

) 19 | } 20 | } 21 | } 22 | 23 | class Page extends React.Component { 24 | constructor(props) { 25 | super(props); 26 | this.state = { 27 | endpoint: "", 28 | user: "", 29 | team: "", 30 | apiKey: "", 31 | client: null, 32 | schema: null, 33 | entries: [], 34 | }; 35 | this.handleLoginChange = this.handleLoginChange.bind(this); 36 | this.handleSubmit = this.handleSubmit.bind(this); 37 | } 38 | 39 | async makeConnection(){ 40 | try{ 41 | // make connection here 42 | }catch(err){ 43 | console.error(err.message) 44 | } 45 | } 46 | 47 | handleLoginChange(event) { 48 | const target = event.target; 49 | const value = target.value; 50 | const name = target.name; 51 | 52 | this.setState({ 53 | [name]: value 54 | }); 55 | } 56 | 57 | handleSubmit(event) { 58 | if (this.state.endpoint && this.state.user && this.state.team && this.state.apiKey) { 59 | this.makeConnection(); 60 | } else { 61 | alert('Missing one or more input.' ) 62 | } 63 | event.preventDefault(); 64 | } 65 | 66 | 67 | render() { 68 | return ( 69 |
70 |
71 | 75 | 79 | 83 | 87 | 88 |
89 |
90 | < Content /> 91 |
92 | ); 93 | } 94 | } 95 | 96 | // ======================================== 97 | 98 | ReactDOM.render( 99 | , 100 | document.getElementById('root') 101 | ); 102 | -------------------------------------------------------------------------------- /react-example/src/index-1.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import ReactDOM from 'react-dom'; 3 | import TerminusClient from "@terminusdb/terminusdb-client"; 4 | import './index.css'; 5 | 6 | class Content extends React.Component { 7 | constructor(props) { 8 | super(props); 9 | this.state = { 10 | status: "Connected", 11 | }; 12 | } 13 | 14 | render() { 15 | if (this.props.schema) { 16 | return (

{this.state.status}

) 17 | } else { 18 | return (

Not connected

) 19 | } 20 | } 21 | } 22 | 23 | class Page extends React.Component { 24 | constructor(props) { 25 | super(props); 26 | this.state = { 27 | endpoint: "", 28 | user: "", 29 | team: "", 30 | apiKey: "", 31 | client: null, 32 | schema: null, 33 | entries: [], 34 | }; 35 | this.handleLoginChange = this.handleLoginChange.bind(this); 36 | this.handleSubmit = this.handleSubmit.bind(this); 37 | } 38 | 39 | async makeConnection(){ 40 | try{ 41 | const serverUrl = this.state.endpoint.concat("/", this.state.team, "/"); 42 | const client = new TerminusClient.WOQLClient( 43 | serverUrl,{ 44 | user:this.state.user, 45 | organization:this.state.team, 46 | token: this.state.apiKey 47 | } 48 | ); 49 | await client.connect() 50 | const schema = await client.getSchema("blog_app", "main") 51 | console.log("Schema"); 52 | console.log(schema); 53 | const entries = await client.getDocument({"graph_type":"instance","as_list":true,"type":"Entry"}) 54 | console.log("Entries"); 55 | console.log(entries); 56 | this.setState({ 57 | client: client, 58 | schema: schema, 59 | entries: entries, 60 | }); 61 | }catch(err){ 62 | console.error(err.message) 63 | } 64 | } 65 | 66 | handleLoginChange(event) { 67 | const target = event.target; 68 | const value = target.value; 69 | const name = target.name; 70 | 71 | this.setState({ 72 | [name]: value 73 | }); 74 | } 75 | 76 | handleSubmit(event) { 77 | if (this.state.endpoint && this.state.user && this.state.team && this.state.apiKey) { 78 | this.makeConnection(); 79 | } else { 80 | alert('Missing one or more input.' ) 81 | } 82 | event.preventDefault(); 83 | } 84 | 85 | 86 | render() { 87 | return ( 88 |
89 |
90 | 94 | 98 | 102 | 106 | 107 |
108 |
109 | < Content schema={this.state.schema} 110 | entries={this.state.entries}/> 111 |
112 | ); 113 | } 114 | } 115 | 116 | // ======================================== 117 | 118 | ReactDOM.render( 119 | , 120 | document.getElementById('root') 121 | ); 122 | -------------------------------------------------------------------------------- /react-example/src/index-2.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import ReactDOM from 'react-dom'; 3 | import TerminusClient from "@terminusdb/terminusdb-client"; 4 | import './index.css'; 5 | 6 | class Content extends React.Component { 7 | constructor(props) { 8 | super(props); 9 | this.state = { 10 | status: "Connected", 11 | title: "", 12 | contentText: "", 13 | }; 14 | this.handleContentChange = this.handleContentChange.bind(this); 15 | this.handleSubmit = this.handleSubmit.bind(this); 16 | } 17 | 18 | handleContentChange(event) { 19 | const target = event.target; 20 | const value = target.value; 21 | const name = target.name; 22 | 23 | this.setState({ 24 | [name]: value 25 | }); 26 | } 27 | 28 | handleSubmit(event) { 29 | if (this.state.title && this.state.contentText) { 30 | const lastUpdate = new Date(); 31 | const entryObj = {"@type": "Entry", 32 | "title": this.state.title, 33 | "content": this.state.contentText, 34 | "created_date": lastUpdate, 35 | "last_update": lastUpdate, 36 | }; 37 | this.props.handleEntry(entryObj).then(() => { 38 | this.setState({ 39 | status: "Entry Added", 40 | });} 41 | ) 42 | } else { 43 | alert('Missing one or more input.' ); 44 | } 45 | event.preventDefault(); 46 | } 47 | 48 | render() { 49 | if (this.props.schema) { 50 | return ( 51 |
52 |

{this.state.status}

53 |
54 | 58 |
59 |