├── test ├── unit │ ├── widgets │ │ ├── __init__.py │ │ └── test_package_version.py │ ├── graph_magic │ │ ├── parsing │ │ │ ├── __init__.py │ │ │ └── test_str_to_namespace_var.py │ │ ├── __init__.py │ │ ├── gremlin_profile_large_results_predicates.txt │ │ ├── GraphNotebookTest.py │ │ ├── test_graph_magic.py │ │ └── test_opencypher_metadata.py │ ├── seed │ │ ├── local_seed_test_propertygraph │ │ │ ├── 1_test_edges.txt │ │ │ ├── 0_test_nodes.txt │ │ │ └── __init__.py │ │ ├── local_seed_test_cypher │ │ │ ├── 0_test_data.txt │ │ │ └── __init__.py │ │ ├── local_seed_test_rdf │ │ │ ├── 0_test_data.txt │ │ │ └── __init__.py │ │ └── __init__.py │ ├── __init__.py │ ├── network │ │ ├── __init__.py │ │ ├── sparql │ │ │ ├── __init__.py │ │ │ ├── data │ │ │ │ ├── __init__.py │ │ │ │ ├── get_sparql_result.py │ │ │ │ ├── 005_incorrect_bindings.json │ │ │ │ ├── 008_duplicate_s_and_p_bindings.json │ │ │ │ └── 010_airroutes_no_literals.json │ │ │ ├── sparql_network_to_json.py │ │ │ └── test_sparql_network_processing.py │ │ ├── gremlin │ │ │ ├── __init__.py │ │ │ ├── test_pattern_list_parser.py │ │ │ ├── test_generate_id_from_dict.py │ │ │ └── test_add_results.py │ │ ├── opencypher │ │ │ └── __init__.py │ │ └── test_network.py │ ├── notebooks │ │ └── __init__.py │ ├── options │ │ └── __init__.py │ ├── sparql │ │ ├── __init__.py │ │ └── test_sparql.py │ ├── configuration │ │ └── __init__.py │ └── visualization │ │ └── __init__.py ├── integration │ ├── iam │ │ ├── load │ │ │ ├── __init__.py │ │ │ └── test_load_with_iam.py │ │ ├── notebook │ │ │ └── __init__.py │ │ ├── opencypher │ │ │ ├── __init__.py │ │ │ └── test_opencypher_query_with_iam.py │ │ ├── sparql │ │ │ ├── __init__.py │ │ │ └── test_sparql_query_with_iam.py │ │ ├── statistics │ │ │ ├── __init__.py │ │ │ ├── test_statistics_with_iam.py │ │ │ └── test_summary_with_iam.py │ │ ├── status │ │ │ ├── __init__.py │ │ │ └── test_status_with_iam.py │ │ ├── system │ │ │ ├── __init__.py │ │ │ └── test_system_with_iam.py │ │ ├── __init__.py │ │ ├── gremlin │ │ │ ├── __init__.py │ │ │ └── test_gremlin_with_iam.py │ │ └── ml │ │ │ ├── test_neptune_client_with_iam.py │ │ │ └── __init__.py │ ├── without_iam │ │ ├── __init__.py │ │ ├── gremlin │ │ │ ├── __init__.py │ │ │ ├── test_gremlin_query.py │ │ │ └── test_gremlin_metadata.py │ │ ├── system │ │ │ ├── __init__.py │ │ │ └── test_system_without_iam.py │ │ ├── opencypher │ │ │ ├── __init__.py │ │ │ └── test_opencypher_query_without_iam.py │ │ ├── network │ │ │ ├── __init__.py │ │ │ └── gremlin │ │ │ │ ├── __init__.py │ │ │ │ └── test_gremlin_network_with_pattern.py │ │ ├── notebook │ │ │ ├── __init__.py │ │ │ └── test_status_graph_notebook.py │ │ ├── sparql │ │ │ ├── __init__.py │ │ │ ├── test_sparql_query_without_iam.py │ │ │ └── test_sparql_metadata.py │ │ └── status │ │ │ ├── __init__.py │ │ │ └── test_status_without_iam.py │ ├── __init__.py │ ├── GraphNotebookIntegrationTest.py │ ├── DataDrivenSparqlTest.py │ ├── DataDrivenOpenCypherTest.py │ ├── DataDrivenGremlinTest.py │ └── IntegrationTest.py └── __init__.py ├── src └── graph_notebook │ ├── neptune │ ├── gremlin │ │ ├── __init__.py │ │ ├── hashable_dict_patch.py │ │ └── graphsonV3d0_MapType_objectify_patch.py │ ├── __init__.py │ └── bolt_auth_token.py │ ├── notebooks │ ├── 01-Neptune-Database │ │ ├── 03-Sample-Applications │ │ │ ├── 05-Healthcare-and-Life-Sciences-Graphs │ │ │ │ └── __init__.py │ │ │ ├── __init__.py │ │ │ ├── 01-Fraud-Graphs │ │ │ │ └── __init__.py │ │ │ ├── 02-Knowledge-Graphs │ │ │ │ └── __init__.py │ │ │ ├── 03-Identity-Graphs │ │ │ │ ├── __init__.py │ │ │ │ └── 03-Jumpstart-Identity-Graphs-Using-Canonical-Model-and-ETL │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── script │ │ │ │ │ └── neptune-glue-transactions.py │ │ │ ├── 04-Security-Graphs │ │ │ │ └── __init__.py │ │ │ ├── 06-Data-Science-Samples │ │ │ │ └── __init__.py │ │ │ └── 07-Games-Industry-Graphs │ │ │ │ └── __init__.py │ │ ├── __init__.py │ │ ├── 01-Getting-Started │ │ │ └── __init__.py │ │ └── 02-Visualization │ │ │ └── __init__.py │ ├── __init__.py │ ├── 03-Neptune-ML │ │ ├── __init__.py │ │ ├── 01-Gremlin │ │ │ └── __init__.py │ │ ├── 02-SPARQL │ │ │ ├── __init__.py │ │ │ └── neptune-ml-pretrained-rdf-model-config.json │ │ └── 03-Sample-Applications │ │ │ ├── __init__.py │ │ │ ├── 04-Telco-Networks │ │ │ ├── __init__.py │ │ │ ├── 1b-Graph_init.ipynb │ │ │ └── Transform2Neptune.py │ │ │ └── 01-People-Analytics │ │ │ └── __init__.py │ ├── 02-Neptune-Analytics │ │ ├── __init__.py │ │ ├── 01-Getting-Started │ │ │ └── __init__.py │ │ ├── 02-Graph-Algorithms │ │ │ ├── __init__.py │ │ │ └── 00-Amazon-Neptune-Analytics-Algorithm-Support.pdf │ │ ├── 03-Sample-Use-Cases │ │ │ ├── __init__.py │ │ │ ├── 01-FinTech │ │ │ │ └── __init__.py │ │ │ ├── 02-Investment-Analysis │ │ │ │ └── __init__.py │ │ │ ├── 03-Software-Bill-Of-Materials │ │ │ │ ├── __init__.py │ │ │ │ ├── sbom_code │ │ │ │ │ ├── __init__.py │ │ │ │ │ └── sbom_helper.py │ │ │ │ ├── example_sboms │ │ │ │ │ ├── __init__.py │ │ │ │ │ ├── 01 │ │ │ │ │ │ └── __init__.py │ │ │ │ │ └── 02 │ │ │ │ │ │ └── __init__.py │ │ │ │ └── nodestream_template.yaml │ │ │ └── Overview.ipynb │ │ └── 04-OpenCypher-Over-RDF │ │ │ ├── __init__.py │ │ │ └── Air-Routes-Ontology-Diagram.png │ └── 04-Language-Tutorials │ │ ├── __init__.py │ │ ├── 01-Gremlin │ │ └── __init__.py │ │ ├── 02-openCypher │ │ └── __init__.py │ │ └── 03-SPARQL │ │ └── __init__.py │ ├── widgets │ ├── .eslintignore │ ├── .prettierignore │ ├── graph_notebook_widgets.json │ ├── src │ │ ├── __init__.py │ │ ├── index.ts │ │ ├── index.dev.ts │ │ ├── extension.js │ │ ├── extension.dev.js │ │ ├── version.ts │ │ ├── extension.ts │ │ ├── force_widget.spec.ts │ │ ├── plugin.ts │ │ └── theme_manager.ts │ ├── css │ │ ├── __init__.py │ │ └── theme-variables.css │ ├── _version.py │ ├── force │ │ └── __init__.py │ ├── _frontend.py │ ├── .eslintrc.js │ ├── tsconfig.json │ ├── webpack.dev.config.js │ ├── karma.conf.js │ └── __init__.py │ ├── nbextensions │ ├── playable_cells │ │ ├── static │ │ │ ├── README.md │ │ │ ├── playable_cells.css │ │ │ ├── __init__.py │ │ │ ├── description.yaml │ │ │ └── main.js │ │ └── __init__.py │ ├── gremlin_syntax │ │ ├── static │ │ │ ├── README.md │ │ │ ├── __init__.py │ │ │ ├── description.yaml │ │ │ └── main.js │ │ └── __init__.py │ ├── sparql_syntax │ │ ├── static │ │ │ ├── README.md │ │ │ ├── __init__.py │ │ │ ├── description.yaml │ │ │ └── main.js │ │ └── __init__.py │ ├── neptune_menu │ │ ├── static │ │ │ ├── __init__.py │ │ │ ├── README.md │ │ │ └── description.yaml │ │ └── __init__.py │ ├── opencypher_syntax │ │ ├── static │ │ │ ├── README.md │ │ │ ├── __init__.py │ │ │ ├── description.yaml │ │ │ └── main.js │ │ └── __init__.py │ ├── __init__.py │ └── install.py │ ├── seed │ ├── __init__.py │ └── queries │ │ ├── __init__.py │ │ ├── rdf │ │ ├── __init__.py │ │ └── sparql │ │ │ ├── __init__.py │ │ │ ├── epl │ │ │ └── __init__.py │ │ │ └── airports │ │ │ └── __init__.py │ │ └── propertygraph │ │ ├── __init__.py │ │ ├── gremlin │ │ ├── __init__.py │ │ ├── epl │ │ │ └── __init__.py │ │ ├── security-graph │ │ │ └── __init__.py │ │ ├── airports │ │ │ └── __init__.py │ │ ├── fraud_graph │ │ │ └── __init__.py │ │ ├── knowledge-graph │ │ │ └── __init__.py │ │ ├── dining_by_friends │ │ │ └── __init__.py │ │ └── games-social-graph │ │ │ └── __init__.py │ │ └── opencypher │ │ ├── __init__.py │ │ ├── epl │ │ └── __init__.py │ │ └── airports │ │ └── __init__.py │ ├── configuration │ └── __init__.py │ ├── decorators │ └── __init__.py │ ├── ipython_profile │ ├── __init__.py │ └── configure_ipython_profile.py │ ├── jupyter_profile │ ├── __init__.py │ └── jupyter_notebook_config.py │ ├── magics │ ├── parsing │ │ ├── __init__.py │ │ └── replace_namespace_vars.py │ ├── completers │ │ └── __init__.py │ └── __init__.py │ ├── network │ ├── gremlin │ │ └── __init__.py │ ├── opencypher │ │ └── __init__.py │ ├── sparql │ │ └── __init__.py │ └── __init__.py │ ├── visualization │ ├── __init__.py │ ├── templates │ │ ├── __init__.py │ │ ├── error.html │ │ ├── pre_container.html │ │ ├── sparql_construct.html │ │ ├── loading_wheel.html │ │ ├── gremlin_explain_profile.html │ │ ├── opencypher_explain.html │ │ ├── sparql_explain.html │ │ ├── sparql_table.html │ │ ├── opencypher_table.html │ │ ├── gremlin_table.html │ │ └── tabs.html │ ├── escape_reserved_characters.py │ ├── template_retriever.py │ ├── visualizer.py │ └── rows_and_columns.py │ ├── static_resources │ ├── __init__.py │ └── install.py │ ├── __init__.py │ ├── options │ ├── __init__.py │ └── options.py │ ├── start_jupyterlab.py │ └── start_notebook.py ├── NOTICE ├── images ├── ColorfulGraph.png ├── OCQueryGraph.png ├── SPARQLQueryGraph.png ├── gremlin-notebook.png ├── sample-ec2rules.png ├── GremlinQueryGraph.png └── Create-Notebook-Instance.png ├── .dockerignore ├── .github ├── pull_request_template.md ├── ISSUE_TEMPLATE │ ├── feature_request.md │ └── bug_report.md └── workflows │ ├── unit.yml │ └── docker_publish.yml ├── additional-databases ├── README.md ├── dbpedia │ └── README.md ├── sagemaker │ ├── README.md │ ├── sagemaker-notebook-lifecycle │ │ └── README.md │ └── neptune-notebook-cloudformation │ │ └── README.md ├── fuseki │ └── README.md ├── graphdb │ └── README.md ├── blazegraph │ └── README.md └── gremlin-server │ └── README.md ├── install.json ├── CODE_OF_CONDUCT.md ├── MANIFEST.in ├── pytest.ini ├── .gitignore ├── requirements.txt ├── docker └── service.sh └── Dockerfile /test/unit/widgets/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/integration/iam/load/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/integration/iam/notebook/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/integration/iam/opencypher/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/integration/iam/sparql/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/integration/iam/statistics/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/integration/iam/status/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/integration/iam/system/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/integration/without_iam/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/unit/graph_magic/parsing/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /src/graph_notebook/neptune/gremlin/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/integration/without_iam/gremlin/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/integration/without_iam/system/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /test/integration/without_iam/opencypher/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /NOTICE: -------------------------------------------------------------------------------- 1 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | 3 | -------------------------------------------------------------------------------- /images/ColorfulGraph.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws/graph-notebook/HEAD/images/ColorfulGraph.png -------------------------------------------------------------------------------- /images/OCQueryGraph.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws/graph-notebook/HEAD/images/OCQueryGraph.png -------------------------------------------------------------------------------- /images/SPARQLQueryGraph.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws/graph-notebook/HEAD/images/SPARQLQueryGraph.png -------------------------------------------------------------------------------- /images/gremlin-notebook.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws/graph-notebook/HEAD/images/gremlin-notebook.png -------------------------------------------------------------------------------- /images/sample-ec2rules.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws/graph-notebook/HEAD/images/sample-ec2rules.png -------------------------------------------------------------------------------- /images/GremlinQueryGraph.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws/graph-notebook/HEAD/images/GremlinQueryGraph.png -------------------------------------------------------------------------------- /test/unit/seed/local_seed_test_propertygraph/1_test_edges.txt: -------------------------------------------------------------------------------- 1 | g.addE("edge_0").property(id,"0").from(V("0")).to(V("1")) -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/01-Neptune-Database/03-Sample-Applications/05-Healthcare-and-Life-Sciences-Graphs/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | # Ignore everything 2 | * 3 | 4 | # Allow files and directories 5 | !/.dockerignore 6 | !/Dockerfile 7 | !/docker -------------------------------------------------------------------------------- /images/Create-Notebook-Instance.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws/graph-notebook/HEAD/images/Create-Notebook-Instance.png -------------------------------------------------------------------------------- /src/graph_notebook/widgets/.eslintignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | nbextension 3 | dist 4 | docs 5 | lib 6 | coverage 7 | **/*.d.ts 8 | -------------------------------------------------------------------------------- /src/graph_notebook/widgets/.prettierignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | nbextension 3 | dist 4 | docs 5 | lib 6 | coverage 7 | **/*.d.ts 8 | -------------------------------------------------------------------------------- /test/unit/seed/local_seed_test_cypher/0_test_data.txt: -------------------------------------------------------------------------------- 1 | CREATE (:Person {name: 'Michael'})-[:WORKS_AT]->(:Company {name: 'AWS'}) -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/playable_cells/static/README.md: -------------------------------------------------------------------------------- 1 | # Playable Cells 2 | 3 | Adds a play button to all jupyter code cells -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/playable_cells/static/playable_cells.css: -------------------------------------------------------------------------------- 1 | div.run_this_cell { 2 | display: block !important; 3 | } -------------------------------------------------------------------------------- /test/unit/seed/local_seed_test_propertygraph/0_test_nodes.txt: -------------------------------------------------------------------------------- 1 | g.addV("node_0").property(id,"0") 2 | g.addV("node_1").property(id,"1") -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/unit/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/unit/seed/local_seed_test_rdf/0_test_data.txt: -------------------------------------------------------------------------------- 1 | %%sparql 2 | 3 | INSERT DATA { . } -------------------------------------------------------------------------------- /src/graph_notebook/widgets/graph_notebook_widgets.json: -------------------------------------------------------------------------------- 1 | { 2 | "load_extensions": { 3 | "graph_notebook_widgets/extension": true 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /test/unit/seed/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/unit/network/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/unit/notebooks/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/unit/options/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/unit/sparql/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/neptune/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/seed/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/integration/iam/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/unit/configuration/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/unit/graph_magic/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/unit/network/sparql/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/unit/visualization/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/configuration/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/decorators/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/ipython_profile/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /test/integration/iam/gremlin/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/unit/network/gremlin/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/unit/network/opencypher/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/unit/network/sparql/data/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/jupyter_profile/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/magics/parsing/__init__.py: -------------------------------------------------------------------------------- 1 | from .replace_namespace_vars import str_to_namespace_var 2 | from .replace_namespace_vars import replace_namespace_vars 3 | -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/gremlin_syntax/static/README.md: -------------------------------------------------------------------------------- 1 | # Gremlin Syntax 2 | 3 | Highlights syntax using Codemirror's SPARQL mode if the cell beings with %%gremlin -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/sparql_syntax/static/README.md: -------------------------------------------------------------------------------- 1 | # SPARQL Syntax 2 | 3 | Highlights syntax using Codemirror's SPARQL mode if the cell beings with %%sparql -------------------------------------------------------------------------------- /src/graph_notebook/network/gremlin/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/seed/queries/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/visualization/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/widgets/src/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/unit/seed/local_seed_test_rdf/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/magics/completers/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/network/opencypher/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/03-Neptune-ML/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/seed/queries/rdf/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/seed/queries/rdf/sparql/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/static_resources/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/widgets/css/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | -------------------------------------------------------------------------------- /test/integration/without_iam/network/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/integration/without_iam/notebook/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/integration/without_iam/sparql/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/integration/without_iam/status/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/unit/seed/local_seed_test_cypher/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/01-Neptune-Database/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/02-Neptune-Analytics/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/seed/queries/propertygraph/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /test/integration/without_iam/network/gremlin/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /test/unit/seed/local_seed_test_propertygraph/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | __version__ = '5.1.0' 7 | -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/03-Neptune-ML/01-Gremlin/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/03-Neptune-ML/02-SPARQL/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/04-Language-Tutorials/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/seed/queries/propertygraph/gremlin/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/seed/queries/rdf/sparql/epl/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/visualization/templates/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/gremlin_syntax/static/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/neptune_menu/static/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/opencypher_syntax/static/README.md: -------------------------------------------------------------------------------- 1 | # openCypher Syntax 2 | 3 | Highlights syntax using Codemirror's Cypher mode if the cell beings with %%oc or %%opencypher -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/opencypher_syntax/static/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/playable_cells/static/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/sparql_syntax/static/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/seed/queries/propertygraph/opencypher/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/seed/queries/rdf/sparql/airports/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/04-Language-Tutorials/01-Gremlin/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/04-Language-Tutorials/02-openCypher/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/04-Language-Tutorials/03-SPARQL/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/seed/queries/propertygraph/gremlin/epl/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/seed/queries/propertygraph/opencypher/epl/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/01-Neptune-Database/01-Getting-Started/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/01-Neptune-Database/02-Visualization/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/02-Neptune-Analytics/01-Getting-Started/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/02-Neptune-Analytics/02-Graph-Algorithms/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/02-Neptune-Analytics/03-Sample-Use-Cases/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/03-Neptune-ML/03-Sample-Applications/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/seed/queries/propertygraph/gremlin/security-graph/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/seed/queries/propertygraph/opencypher/airports/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/01-Neptune-Database/03-Sample-Applications/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/02-Neptune-Analytics/04-OpenCypher-Over-RDF/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/seed/queries/propertygraph/gremlin/airports/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | -------------------------------------------------------------------------------- /src/graph_notebook/seed/queries/propertygraph/gremlin/fraud_graph/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/seed/queries/propertygraph/gremlin/knowledge-graph/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | Issue #, if available: 2 | 3 | Description of changes: 4 | 5 | By submitting this pull request, I confirm that my contribution is made under the terms of the Apache 2.0 license. -------------------------------------------------------------------------------- /additional-databases/README.md: -------------------------------------------------------------------------------- 1 | # Additional Databases 2 | Subfolders within this part of the tree contain detailed instructions that help when configuring graph-notebook with different graph database engines. 3 | -------------------------------------------------------------------------------- /src/graph_notebook/seed/queries/propertygraph/gremlin/dining_by_friends/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/seed/queries/propertygraph/gremlin/games-social-graph/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/02-Neptune-Analytics/03-Sample-Use-Cases/01-FinTech/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/03-Neptune-ML/03-Sample-Applications/04-Telco-Networks/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /install.json: -------------------------------------------------------------------------------- 1 | { 2 | "packageManager": "python", 3 | "packageName": "graph-notebook", 4 | "uninstallInstructions": "Use your Python package manager (pip, conda, etc.) to uninstall the package graph-notebook" 5 | } 6 | -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/neptune_menu/static/README.md: -------------------------------------------------------------------------------- 1 | # Neptune Menu 2 | 3 | Adds a menu to the navbar for Neptune related information including: 4 | 1. Host 5 | 1. Port 6 | 1. Authentication Mode 7 | 1. TLS Setting -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/01-Neptune-Database/03-Sample-Applications/01-Fraud-Graphs/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/03-Neptune-ML/03-Sample-Applications/01-People-Analytics/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/01-Neptune-Database/03-Sample-Applications/02-Knowledge-Graphs/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/01-Neptune-Database/03-Sample-Applications/03-Identity-Graphs/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/01-Neptune-Database/03-Sample-Applications/04-Security-Graphs/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/02-Neptune-Analytics/03-Sample-Use-Cases/02-Investment-Analysis/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/01-Neptune-Database/03-Sample-Applications/06-Data-Science-Samples/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/01-Neptune-Database/03-Sample-Applications/07-Games-Industry-Graphs/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/02-Neptune-Analytics/03-Sample-Use-Cases/03-Software-Bill-Of-Materials/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/network/sparql/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | from .SPARQLNetwork import SPARQLNetwork # noqa F401 7 | -------------------------------------------------------------------------------- /src/graph_notebook/widgets/src/index.ts: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | export * from "./version"; 7 | export * from "./force_widget"; 8 | -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/02-Neptune-Analytics/03-Sample-Use-Cases/03-Software-Bill-Of-Materials/sbom_code/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/02-Neptune-Analytics/03-Sample-Use-Cases/03-Software-Bill-Of-Materials/example_sboms/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/widgets/_version.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import graph_notebook 7 | 8 | __version__ = graph_notebook.__version__ 9 | -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/02-Neptune-Analytics/03-Sample-Use-Cases/03-Software-Bill-Of-Materials/example_sboms/01/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/02-Neptune-Analytics/03-Sample-Use-Cases/03-Software-Bill-Of-Materials/example_sboms/02/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/options/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | from .options import OPTIONS_DEFAULT_DIRECTED, vis_options_merge # noqa F401 7 | -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/02-Neptune-Analytics/03-Sample-Use-Cases/03-Software-Bill-Of-Materials/nodestream_template.yaml: -------------------------------------------------------------------------------- 1 | plugins: 2 | - name: sbom 3 | config: 4 | paths: ./example_sboms/ 5 | 6 | targets: 7 | my-neptune: 8 | database: neptune -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/02-Neptune-Analytics/04-OpenCypher-Over-RDF/Air-Routes-Ontology-Diagram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws/graph-notebook/HEAD/src/graph_notebook/notebooks/02-Neptune-Analytics/04-OpenCypher-Over-RDF/Air-Routes-Ontology-Diagram.png -------------------------------------------------------------------------------- /src/graph_notebook/network/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | from .sparql import SPARQLNetwork # noqa F401 7 | from .gremlin import GremlinNetwork # noqa F401 8 | -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/01-Neptune-Database/03-Sample-Applications/03-Identity-Graphs/03-Jumpstart-Identity-Graphs-Using-Canonical-Model-and-ETL/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ -------------------------------------------------------------------------------- /src/graph_notebook/visualization/templates/error.html: -------------------------------------------------------------------------------- 1 |
2 | {% block style %} 3 | 8 | {% endblock %} 9 |
10 | {{ error|e }}
11 |     
12 |
-------------------------------------------------------------------------------- /src/graph_notebook/notebooks/02-Neptune-Analytics/02-Graph-Algorithms/00-Amazon-Neptune-Analytics-Algorithm-Support.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws/graph-notebook/HEAD/src/graph_notebook/notebooks/02-Neptune-Analytics/02-Graph-Algorithms/00-Amazon-Neptune-Analytics-Algorithm-Support.pdf -------------------------------------------------------------------------------- /src/graph_notebook/widgets/force/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | from .force_widget import Force # noqa F401 7 | from graph_notebook.options import OPTIONS_DEFAULT_DIRECTED # noqa F401 8 | -------------------------------------------------------------------------------- /src/graph_notebook/visualization/escape_reserved_characters.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | def escape_reserved_characters(content: str): 7 | return content.replace('&', '&').replace('<', '<').replace('>', '>') 8 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | ## Code of Conduct 2 | This project has adopted the [Amazon Open Source Code of Conduct](https://aws.github.io/code-of-conduct). 3 | For more information see the [Code of Conduct FAQ](https://aws.github.io/code-of-conduct-faq) or contact 4 | opensource-codeofconduct@amazon.com with any additional questions or comments. 5 | -------------------------------------------------------------------------------- /src/graph_notebook/jupyter_profile/jupyter_notebook_config.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import os 7 | 8 | static_path = os.path.expanduser('~/neptune_workbench/static/') 9 | c.NotebookApp.extra_static_paths = [static_path] # noqa: F821 10 | -------------------------------------------------------------------------------- /src/graph_notebook/widgets/src/index.dev.ts: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | export * from "./version"; 7 | export * from "./force_widget"; 8 | 9 | if (module.hot) { 10 | module.hot.accept(function () { 11 | console.log("new module failed"); 12 | }); 13 | } 14 | -------------------------------------------------------------------------------- /src/graph_notebook/widgets/_frontend.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | import graph_notebook 6 | 7 | """ 8 | Information about the frontend package of the widgets. 9 | """ 10 | 11 | module_name = "graph_notebook_widgets" 12 | module_version = graph_notebook.__version__ 13 | -------------------------------------------------------------------------------- /src/graph_notebook/widgets/src/extension.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | window["requirejs"].config({ 4 | map: { 5 | "*": { 6 | graph_notebook_widgets: "nbextensions/graph_notebook_widgets/index", 7 | }, 8 | }, 9 | }); 10 | // Export the required load_ipython_extension function 11 | const load_ipython_extension = function () {}; 12 | export { load_ipython_extension }; -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/playable_cells/static/description.yaml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Type: Jupyter Notebook Extension 5 | Compatibility: 5.x, 6.x 6 | Name: playable_cells 7 | Main: main.js 8 | Link: README.md 9 | Description: | 10 | Adds a play button to all jupyter code cells 11 | Parameters: 12 | - none 13 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include tsconfig.json 2 | include package.json 3 | include webpack.config.js 4 | 5 | # Javascript files 6 | recursive-include src/graph_notebook/widgets * 7 | prune src/graph_notebook/widgets/node_modules 8 | prune coverage 9 | 10 | # Patterns to exclude from any directory 11 | global-exclude *~ 12 | global-exclude *.pyc 13 | global-exclude *.pyo 14 | global-exclude .git 15 | global-exclude .ipynb_checkpoints -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/gremlin_syntax/static/description.yaml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Type: Jupyter Notebook Extension 5 | Compatibility: 5.x, 6.x 6 | Name: gremlin_syntax 7 | Main: main.js 8 | Link: README.md 9 | Description: | 10 | Enables syntax higlighting for cells with the %%gremlin magic 11 | Parameters: 12 | - none 13 | -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/opencypher_syntax/static/description.yaml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Type: Jupyter Notebook Extension 5 | Compatibility: 5.x, 6.x 6 | Name: opencypher_syntax 7 | Main: main.js 8 | Link: README.md 9 | Description: | 10 | Enables syntax higlighting for cells with the %%oc magic 11 | Parameters: 12 | - none 13 | -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/sparql_syntax/static/description.yaml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Type: Jupyter Notebook Extension 5 | Compatibility: 5.x, 6.x 6 | Name: sparql_syntax 7 | Main: main.js 8 | Link: README.md 9 | Description: | 10 | Enables syntax higlighting for cells with the %%sparql magic 11 | Parameters: 12 | - none 13 | -------------------------------------------------------------------------------- /src/graph_notebook/visualization/templates/pre_container.html: -------------------------------------------------------------------------------- 1 |
2 | {% block style %} 3 | 10 | {% endblock %} 11 |
{{content|e}}
12 |
13 | -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/neptune_menu/static/description.yaml: -------------------------------------------------------------------------------- 1 | # Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 2 | # SPDX-License-Identifier: Apache-2.0 3 | 4 | Type: Jupyter Notebook Extension 5 | Compatibility: 5.x, 6.x 6 | Name: neptune_menu 7 | Main: main.js 8 | Link: README.md 9 | Description: | 10 | Adds a menu item for Neptune related options along-side File, Edit, etc. 11 | Parameters: 12 | - none 13 | -------------------------------------------------------------------------------- /src/graph_notebook/magics/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | from .completers.graph_completer import get_completion_options 7 | from .graph_magic import Graph 8 | 9 | 10 | def load_ipython_extension(ipython): 11 | ipython.set_hook('complete_command', get_completion_options, re_key=".*") 12 | ipython.register_magics(Graph) 13 | -------------------------------------------------------------------------------- /test/unit/network/sparql/data/get_sparql_result.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import json 7 | import os 8 | 9 | 10 | dir_path = os.path.dirname(os.path.realpath(__file__)) 11 | 12 | 13 | def get_sparql_result(name): 14 | file_path = f'{dir_path}/{name}' 15 | with open(file_path) as f: 16 | data = json.load(f) 17 | return data 18 | -------------------------------------------------------------------------------- /additional-databases/dbpedia/README.md: -------------------------------------------------------------------------------- 1 | ## Connecting graph notebook to DBPedia SPARQL Endpoint 2 | 3 | The official SPARQL endpoint for DBPedia is available from https://dbpedia.org/sparql and is based on a Virtuoso engine. 4 | 5 | It is possible to connect to this endpoint using the following configuration: 6 | 7 | ``` 8 | %%graph_notebook_config 9 | { 10 | "host": "dbpedia.org", 11 | "port": 443, 12 | "ssl": true, 13 | "sparql": { 14 | "path": "sparql" 15 | } 16 | } 17 | ``` 18 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | markers = 3 | neptune: tests which have to run against neptune 4 | iam: tests which require iam authentication 5 | gremlin: tests which run against a gremlin endpoint 6 | sparql: tests which run against SPARQL1.1 endpoint 7 | neptuneml: tests which run Neptune ML workloads 8 | jupyter: tests which run against ipython/jupyter frameworks 9 | reset: test which performs a fast reset against Neptune, running this will wipe your database! 10 | 11 | 12 | -------------------------------------------------------------------------------- /src/graph_notebook/widgets/src/extension.dev.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | "use strict"; 7 | 8 | window["requirejs"].config({ 9 | map: { 10 | "*": { 11 | graph_notebook_widgets: "http://localhost:9000/index.js" 12 | }, 13 | }, 14 | }); 15 | // Export the required load_ipython_extension function 16 | const load_ipython_extension = function () {}; 17 | export { load_ipython_extension }; 18 | -------------------------------------------------------------------------------- /test/integration/without_iam/status/test_status_without_iam.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | import pytest 6 | 7 | from test.integration import IntegrationTest 8 | 9 | 10 | class TestStatusWithoutIAM(IntegrationTest): 11 | 12 | @pytest.mark.neptune 13 | def test_do_status(self): 14 | res = self.client.status() 15 | status = res.json() 16 | self.assertEqual(status['status'], 'healthy') 17 | -------------------------------------------------------------------------------- /test/integration/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | from .IntegrationTest import IntegrationTest # noqa F401 7 | from .DataDrivenGremlinTest import DataDrivenGremlinTest # noqa F401 8 | from .DataDrivenSparqlTest import DataDrivenSparqlTest # noqa F401 9 | from .GraphNotebookIntegrationTest import GraphNotebookIntegrationTest # noqa F401 10 | from .NeptuneIntegrationWorkflowSteps import TEST_CONFIG_PATH # noqa F401 11 | -------------------------------------------------------------------------------- /test/unit/widgets/test_package_version.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from graph_notebook import __version__ 4 | from graph_notebook.widgets import get_package_json 5 | 6 | 7 | class TestPackageVersion(unittest.TestCase): 8 | def test_package_versions_match(self): 9 | """ 10 | Verify that the version in package.json matches the version found under src/graph_notebook/__init__.py 11 | """ 12 | 13 | package_json = get_package_json() 14 | self.assertEqual(__version__, package_json['version']) 15 | -------------------------------------------------------------------------------- /test/integration/without_iam/notebook/test_status_graph_notebook.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | import pytest 6 | 7 | from test.integration import GraphNotebookIntegrationTest 8 | 9 | 10 | class TestGraphMagicStatus(GraphNotebookIntegrationTest): 11 | @pytest.mark.jupyter 12 | @pytest.mark.neptune 13 | def test_status(self): 14 | res = self.ip.run_line_magic('status', '') 15 | self.assertEqual('healthy', res['status']) 16 | -------------------------------------------------------------------------------- /src/graph_notebook/widgets/src/version.ts: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | const data = require("../package.json"); 7 | 8 | /** 9 | * The _model_module_version/_view_module_version this package implements. 10 | * 11 | * The html widget manager assumes that this is the same as the npm package 12 | * version number. 13 | */ 14 | export const MODULE_VERSION = data.version; 15 | 16 | /* 17 | * The current package name. 18 | */ 19 | export const MODULE_NAME = data.name; 20 | -------------------------------------------------------------------------------- /test/integration/iam/ml/test_neptune_client_with_iam.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | from graph_notebook.configuration.generate_config import Configuration 7 | from graph_notebook.neptune.client import Client 8 | 9 | client: Client 10 | config: Configuration 11 | 12 | TEST_BULKLOAD_SOURCE = 's3://aws-ml-customer-samples-%s/bulkload-datasets/%s/airroutes/v01' 13 | GREMLIN_TEST_LABEL = 'graph-notebook-test' 14 | SPARQL_TEST_PREDICATE = '' 15 | -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/neptune_menu/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | 7 | # template for this taken from 8 | # https://jupyter-notebook.readthedocs.io/en/stable/examples/Notebook/Distributing%20Jupyter%20Extensions%20as%20Python%20Packages.html#Defining-the-server-extension-and-nbextension 9 | def _jupyter_nbextension_paths(): 10 | return [dict( 11 | section="notebook", 12 | src="static", 13 | dest="neptune_menu", 14 | require="neptune_menu/main")] 15 | -------------------------------------------------------------------------------- /src/graph_notebook/visualization/templates/sparql_construct.html: -------------------------------------------------------------------------------- 1 |
2 | {% block style %} 3 | 13 | {% endblock %} 14 |
15 | {% for l in lines %} 16 | {{l|e}} 17 |
18 | {% endfor %} 19 |
20 |
-------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/gremlin_syntax/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | 7 | # template for this taken from 8 | # https://jupyter-notebook.readthedocs.io/en/stable/examples/Notebook/Distributing%20Jupyter%20Extensions%20as%20Python%20Packages.html#Defining-the-server-extension-and-nbextension 9 | def _jupyter_nbextension_paths(): 10 | return [dict( 11 | section="notebook", 12 | src="static", 13 | dest="gremlin_syntax", 14 | require="gremlin_syntax/main")] 15 | -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/playable_cells/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | 7 | # template for this taken from 8 | # https://jupyter-notebook.readthedocs.io/en/stable/examples/Notebook/Distributing%20Jupyter%20Extensions%20as%20Python%20Packages.html#Defining-the-server-extension-and-nbextension 9 | def _jupyter_nbextension_paths(): 10 | return [dict( 11 | section="notebook", 12 | src="static", 13 | dest="playable_cells", 14 | require="playable_cells/main")] 15 | -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/sparql_syntax/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | 7 | # template for this taken from 8 | # https://jupyter-notebook.readthedocs.io/en/stable/examples/Notebook/Distributing%20Jupyter%20Extensions%20as%20Python%20Packages.html#Defining-the-server-extension-and-nbextension 9 | def _jupyter_nbextension_paths(): 10 | return [dict( 11 | section="notebook", 12 | src="static", 13 | dest="sparql_syntax", 14 | require="sparql_syntax/main")] 15 | -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/opencypher_syntax/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | 7 | # template for this taken from 8 | # https://jupyter-notebook.readthedocs.io/en/stable/examples/Notebook/Distributing%20Jupyter%20Extensions%20as%20Python%20Packages.html#Defining-the-server-extension-and-nbextension 9 | def _jupyter_nbextension_paths(): 10 | return [dict( 11 | section="notebook", 12 | src="static", 13 | dest="opencypher_syntax", 14 | require="opencypher_syntax/main")] 15 | -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/gremlin_syntax/static/main.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | require(['notebook/js/codecell'], function(codecell) { 7 | codecell.CodeCell.options_default.highlight_modes['text/x-groovy'] = {'reg':["^%%gremlin"]} ; 8 | Jupyter.notebook.events.one('kernel_ready.Kernel', function(){ 9 | Jupyter.notebook.get_cells().map(function(cell) { 10 | if (cell.cell_type === 'code') { 11 | cell.auto_highlight(); 12 | } 13 | }); 14 | }); 15 | }); -------------------------------------------------------------------------------- /test/unit/graph_magic/gremlin_profile_large_results_predicates.txt: -------------------------------------------------------------------------------- 1 | Predicates 2 | ========== 3 | # of predicates: 999,999 4 | 5 | WARNING: reverse traversal with no edge label(s) - .in() / .both() may impact query performance 6 | 7 | Results 8 | ======= 9 | Count: 999.999 10 | Output: [v[3], v[3600], v[3614], v[4], v[5], v[6], v[7], v[8], v[9], v[10], v[11], v[12], v[47], v[49], v[136], v[13], v[15], v[16], v[17], v[18], v[389], v[20], v[21], v[22], v[23], v[24], v[25], v[26], v[27], v[28], v[416], v[29], v[30], v[430], v[31], v[9... 11 | Response serializer: application/vnd.gremlin-v3.0+json 12 | Response size (bytes): 23566 -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/sparql_syntax/static/main.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | require(['notebook/js/codecell'], function(codecell) { 7 | codecell.CodeCell.options_default.highlight_modes['application/sparql-query'] = {'reg':["^%%sparql"]} ; 8 | Jupyter.notebook.events.one('kernel_ready.Kernel', function(){ 9 | Jupyter.notebook.get_cells().map(function(cell) { 10 | if (cell.cell_type === 'code') { 11 | cell.auto_highlight(); 12 | } 13 | }); 14 | }); 15 | }); -------------------------------------------------------------------------------- /src/graph_notebook/visualization/template_retriever.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import os 7 | 8 | from jinja2.sandbox import SandboxedEnvironment 9 | 10 | dir_path = os.path.dirname(os.path.realpath(__file__)) 11 | 12 | 13 | def retrieve_template(template_name): 14 | with open('%s/templates/%s' % (dir_path, template_name), 'r') as tab_template_file: 15 | tab_template = tab_template_file.read().strip() 16 | 17 | env = SandboxedEnvironment() 18 | template = env.from_string(tab_template) 19 | 20 | return template 21 | -------------------------------------------------------------------------------- /src/graph_notebook/visualization/templates/loading_wheel.html: -------------------------------------------------------------------------------- 1 |
2 | {% block style %} 3 | 18 | {% endblock %} 19 | 20 |
21 |
-------------------------------------------------------------------------------- /test/unit/network/gremlin/test_pattern_list_parser.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import unittest 7 | 8 | from graph_notebook.network.gremlin.GremlinNetwork import parse_pattern_list_str 9 | from graph_notebook.network.gremlin.GremlinNetwork import PathPattern 10 | 11 | 12 | class TestPatternListParser(unittest.TestCase): 13 | def test_parse_v_e_v(self): 14 | pattern_str = " v ,e ,v " 15 | expected = [PathPattern.V, PathPattern.E, PathPattern.V] 16 | pattern = parse_pattern_list_str(pattern_str) 17 | self.assertEqual(expected, pattern) 18 | -------------------------------------------------------------------------------- /test/unit/network/sparql/data/005_incorrect_bindings.json: -------------------------------------------------------------------------------- 1 | { 2 | "head": { 3 | "vars": [ 4 | "subject", 5 | "p", 6 | "object" 7 | ] 8 | }, 9 | "results": { 10 | "bindings": [ 11 | { 12 | "subject": { 13 | "type": "uri", 14 | "value": "http://kelvinlawrence.net/air-routes/resource/12" 15 | }, 16 | "p": { 17 | "type": "uri", 18 | "value": "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" 19 | }, 20 | "object": { 21 | "type": "uri", 22 | "value": "http://kelvinlawrence.net/air-routes/class/Airport" 23 | } 24 | } 25 | ] 26 | } 27 | } -------------------------------------------------------------------------------- /test/unit/graph_magic/parsing/test_str_to_namespace_var.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from graph_notebook.magics.parsing import str_to_namespace_var 4 | 5 | 6 | class TestParsingStrToNamespaceVar(unittest.TestCase): 7 | def test_none_dict(self): 8 | key = 'foo' 9 | local_ns = None 10 | res = str_to_namespace_var(key, local_ns) 11 | self.assertEqual(key, res) 12 | 13 | def test_encapsulated_key(self): 14 | key = '${foo}' 15 | expected_value = 'test' 16 | local_ns = { 17 | 'foo': expected_value 18 | } 19 | 20 | res = str_to_namespace_var(key, local_ns) 21 | self.assertEqual(expected_value, res) 22 | -------------------------------------------------------------------------------- /test/unit/network/sparql/sparql_network_to_json.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import unittest 7 | 8 | from graph_notebook.network.sparql.SPARQLNetwork import SPARQLNetwork 9 | from test.unit.network.sparql.data.get_sparql_result import get_sparql_result 10 | 11 | 12 | class TestSPARQLNetworkToJSON(unittest.TestCase): 13 | def test_sparql_network_to_json(self): 14 | data = get_sparql_result("001_kelvin-airroutes.json") 15 | 16 | sparql_network = SPARQLNetwork() 17 | sparql_network.add_results(data) 18 | js = sparql_network.to_json() 19 | self.assertTrue('graph' in js) 20 | -------------------------------------------------------------------------------- /test/unit/graph_magic/GraphNotebookTest.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | 7 | import unittest 8 | 9 | from IPython import get_ipython 10 | from IPython.terminal.interactiveshell import TerminalInteractiveShell 11 | 12 | 13 | class GraphNotebookTest(unittest.TestCase): 14 | @classmethod 15 | def setUpClass(cls) -> None: 16 | super().setUpClass() 17 | ip = get_ipython() 18 | if ip is None: 19 | ip = TerminalInteractiveShell().instance() 20 | 21 | ip.magic('load_ext graph_notebook.magics') 22 | cls.config = ip.run_line_magic('graph_notebook_config', '') 23 | cls.ip = ip 24 | -------------------------------------------------------------------------------- /test/unit/network/gremlin/test_generate_id_from_dict.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import unittest 7 | 8 | from graph_notebook.network.gremlin.GremlinNetwork import generate_id_from_dict 9 | 10 | 11 | class TestGenerateIDFromDict(unittest.TestCase): 12 | def test_generate_id_from_dict_is_persistent(self): 13 | data = {'foo': 'val1', 'bar': 123, 'baz': ['a', 1]} 14 | generated_id = generate_id_from_dict(data) 15 | 16 | data_copy = {'foo': 'val1', 'bar': 123, 'baz': ['a', 1]} 17 | generated_id_again = generate_id_from_dict(data_copy) 18 | self.assertEqual(generated_id, generated_id_again) 19 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # IDE directories 2 | .vscode 3 | .idea 4 | **/.DS_Store 5 | 6 | # python-generated directories 7 | venv 8 | **/__pycache__ 9 | .pytest_cache 10 | src/graph_notebook.egg-info 11 | **.pyc 12 | build 13 | dist 14 | .python-version 15 | unins 16 | 17 | MANIFEST 18 | 19 | # do not include widget typescript output directories 20 | src/graph_notebook/widgets/dist/ 21 | src/graph_notebook/widgets/docs/ 22 | src/graph_notebook/widgets/labextension/ 23 | src/graph_notebook/widgets/nbextension/ 24 | src/graph_notebook/widgets/node_modules/ 25 | src/graph_notebook/widgets/lib/ 26 | 27 | # npm 28 | node_modules/ 29 | node_modules/.package-lock.json 30 | src/graph_notebook/widgets/package-lock.json 31 | blazegraph.jnl 32 | rules.log 33 | *.env 34 | -------------------------------------------------------------------------------- /src/graph_notebook/widgets/.eslintrc.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | parser: "@typescript-eslint/parser", // Specifies the ESLint parser 3 | parserOptions: { 4 | ecmaVersion: 2020, // Allows for the parsing of modern ECMAScript features 5 | sourceType: "module", // Allows for the use of imports 6 | }, 7 | extends: [ 8 | "plugin:@typescript-eslint/recommended", // Uses the recommended rules from the @typescript-eslint/eslint-plugin 9 | "prettier", 10 | ], 11 | plugins: ["prettier"], 12 | rules: { 13 | // Place to specify ESLint rules. Can be used to overwrite rules specified from the extended configs 14 | // e.g. "@typescript-eslint/explicit-function-return-type": "off", 15 | "prettier/prettier": ["error"], 16 | }, 17 | }; 18 | -------------------------------------------------------------------------------- /src/graph_notebook/widgets/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "declaration": true, 4 | "composite": true, 5 | "incremental": true, 6 | "tsBuildInfoFile": "./lib/.tsbuildinfo" , 7 | "esModuleInterop": true, 8 | "lib": ["ES2022", "DOM"], 9 | "module": "commonjs", 10 | "moduleResolution": "node", 11 | "noEmitOnError": true, 12 | "noUnusedLocals": true, 13 | "outDir": "lib", 14 | "resolveJsonModule": true, 15 | "rootDir": "src", 16 | "skipLibCheck": true, 17 | "sourceMap": false, 18 | "strict": true, 19 | "target": "ES2022", 20 | "noImplicitAny": false // to allow feather-icons 21 | }, 22 | "include": ["src/**/*.ts", "src/**/*.tsx"], 23 | "exclude": ["node_modules"] 24 | } 25 | -------------------------------------------------------------------------------- /src/graph_notebook/start_jupyterlab.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import os 7 | import argparse 8 | from graph_notebook.ipython_profile.configure_ipython_profile import configure_magics_extension 9 | 10 | 11 | def main(): 12 | parser = argparse.ArgumentParser() 13 | parser.add_argument('--jupyter-dir', default='', type=str, help='The directory to start Jupyter from.') 14 | 15 | args = parser.parse_args() 16 | 17 | configure_magics_extension() 18 | 19 | jupyter_dir = '~/notebook/destination/dir' if args.jupyter_dir == '' else args.jupyter_dir 20 | os.system(f'''jupyter lab {jupyter_dir}''') 21 | 22 | 23 | if __name__ == '__main__': 24 | main() 25 | -------------------------------------------------------------------------------- /test/integration/without_iam/system/test_system_without_iam.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from test.integration import IntegrationTest 3 | 4 | 5 | class TestStatusWithoutIAM(IntegrationTest): 6 | 7 | @pytest.mark.neptune 8 | def test_do_database_reset_initiate(self): 9 | res = self.client.initiate_reset() 10 | result = res.json() 11 | self.assertNotEqual(result['payload']['token'], '') 12 | 13 | @pytest.mark.neptune 14 | def test_do_database_reset_perform_with_wrong_token(self): 15 | res = self.client.perform_reset('invalid') 16 | assert res.status_code == 400 17 | expected_message = "System command parameter 'token' : 'invalid' does not match database reset token" 18 | assert expected_message == res.json()['detailedMessage'] 19 | -------------------------------------------------------------------------------- /src/graph_notebook/widgets/src/extension.ts: -------------------------------------------------------------------------------- 1 | // Copyright (c) Jupyter Development Team. 2 | // Distributed under the terms of the Modified BSD License. 3 | 4 | // Entry point for the notebook bundle containing custom model definitions. 5 | // 6 | // Setup notebook base URL 7 | // 8 | // Some static assets may be required by the custom widget javascript. The base 9 | // url for the notebook is not known at build time and is therefore computed 10 | // dynamically. 11 | 12 | declare global { 13 | interface Window { 14 | __webpack_public_path__: string; 15 | } 16 | } 17 | 18 | // eslint-disable-next-line @typescript-eslint/camelcase 19 | window.__webpack_public_path__ = 20 | document.body.getAttribute("data-base-url") + 21 | "nbextensions/graph_notebook_widgets"; 22 | 23 | export * from "./index"; 24 | -------------------------------------------------------------------------------- /src/graph_notebook/magics/parsing/replace_namespace_vars.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | 3 | 4 | def str_to_namespace_var(key: str, local_ns: dict) -> any: 5 | if local_ns is None: 6 | return key 7 | 8 | if type(key) is not str: 9 | return key 10 | 11 | tmp_key = key.strip() 12 | if not (tmp_key.startswith('${') and tmp_key.endswith('}')): 13 | return key 14 | else: 15 | tmp_key = tmp_key[2:-1].strip() 16 | return key if tmp_key not in local_ns else local_ns[tmp_key] 17 | 18 | 19 | def replace_namespace_vars(args: argparse.Namespace, local_ns: dict): 20 | if local_ns is None or local_ns == {}: 21 | return 22 | 23 | for key in list(args.__dict__.keys()): 24 | new_value = str_to_namespace_var(args.__dict__[key], local_ns) 25 | args.__dict__[key] = new_value 26 | -------------------------------------------------------------------------------- /additional-databases/sagemaker/README.md: -------------------------------------------------------------------------------- 1 | ## Using graph-notebook on Amazon Sagemaker 2 | 3 | Amazon Sagemaker Notebooks provide an easy and effective solution for hosting, configuring, and running `graph-notebook` against a graph database. These notebooks also serve as the base platform for [Neptune Workbench](https://docs.aws.amazon.com/neptune/latest/userguide/graph-notebooks.html). 4 | 5 | If you would like to manually deploy a Neptune Workbench instance via AWS CloudFormation, please see the instructions in the [`neptune-notebook-cloudformation`](https://github.com/aws/graph-notebook/blob/main/additional-databases/sagemaker/neptune-notebook-cloudformation) folder. 6 | 7 | For non-Neptune use cases, you can follow the instructions in the [`sagemaker-notebook-lifecycle`](https://github.com/aws/graph-notebook/blob/main/additional-databases/sagemaker/sagemaker-notebook-lifecycle) folder. -------------------------------------------------------------------------------- /src/graph_notebook/static_resources/install.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import os 7 | import site 8 | from shutil import copy2 9 | from os.path import join as pjoin 10 | 11 | files = [ 12 | 'datatables.css', 13 | 'datatables.js' 14 | ] 15 | 16 | 17 | def main(): 18 | sitepackages = site.getsitepackages() 19 | static_base_directory = sitepackages[0] if os.name != 'nt' else sitepackages[1] 20 | destination = pjoin(static_base_directory, 'notebook', 'static') 21 | 22 | dir_path = os.path.dirname(os.path.realpath(__file__)) 23 | for file in files: 24 | full_path = pjoin(dir_path, file) 25 | print(f'copying file {file} to {destination}') 26 | copy2(full_path, destination) 27 | 28 | 29 | if __name__ == '__main__': 30 | main() 31 | -------------------------------------------------------------------------------- /test/integration/GraphNotebookIntegrationTest.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import json 7 | 8 | from IPython import get_ipython 9 | from IPython.terminal.interactiveshell import TerminalInteractiveShell 10 | 11 | from test.integration import IntegrationTest 12 | 13 | 14 | class GraphNotebookIntegrationTest(IntegrationTest): 15 | @classmethod 16 | def setUpClass(cls) -> None: 17 | super().setUpClass() 18 | 19 | def setUp(self) -> None: 20 | super().setUp() 21 | 22 | ip = get_ipython() 23 | if ip is None: 24 | ip = TerminalInteractiveShell().instance() 25 | self.ip = ip 26 | 27 | self.ip.magic('load_ext graph_notebook.magics') 28 | self.ip.run_cell_magic('graph_notebook_config', '', json.dumps(self.config.to_dict())) 29 | -------------------------------------------------------------------------------- /additional-databases/sagemaker/sagemaker-notebook-lifecycle/README.md: -------------------------------------------------------------------------------- 1 | ## Launching graph-notebook on Amazon SageMaker using a lifecycle 2 | You can easily configure graph-notebook to run on an Amazon SageMaker Notebook instance by using a lifecycle configuration. To learn more about lifecycle configurations and how to create one, see [documentation](https://docs.aws.amazon.com/sagemaker/latest/dg/notebook-lifecycle-config.html). 3 | 4 | Use the sample lifecycle configuration in this folder, [`install-graph-notebook-lc.sh`](install-graph-notebook-lc.sh) ([`install-graph-notebook-lc-cn.sh`](install-graph-notebook-lc-cn.sh) if using `cn-north-1` or `cn-northwest-1` region) or create your own shell script. 5 | 6 | After you create a lifecycle configuration on SageMaker, you can create new notebook instances by specifying a saved lifecycle configuration: 7 | 8 | ![create-a-notebook](/images/Create-Notebook-Instance.png) 9 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | # Jupyter ecosystem 2 | ipyfilechooser==0.6.0 3 | ipykernel>=6.5.0 4 | ipython>=7.16.1,<=8.10.0 5 | ipywidgets>=8.0.0,<9.0.0 6 | jupyter-server>=2.0.0,<3.0.0 7 | jupyter-server-proxy>=4.0.0,<5.0.0 8 | jupyter_client>=8.0.0,<9.0.0 9 | jupyterlab>=4.3.5,<5.0.0 10 | jupyterlab-widgets>=3.0.0,<4.0.0 11 | nbclient>=0.7.3 12 | nbconvert>=6.3.0,<=7.2.8 13 | notebook>=7.0.0,<8.0.0 14 | nbclassic>=1.3.0 15 | 16 | # Data processing and visualization 17 | itables>=2.0.0,<=2.1.0 18 | networkx>3.0,<4.0 19 | numpy>1.24.0 20 | pandas>2.2.2 21 | 22 | # Graph databases and query languages 23 | gremlinpython>=3.5.1,<=3.7.2 24 | neo4j>=5.0.0,<=5.23.1 25 | rdflib==7.0.0 26 | SPARQLWrapper==2.0.0 27 | 28 | # AWS SDK 29 | boto3>=1.34.74 30 | botocore>=1.34.74 31 | 32 | # Utilities 33 | async-timeout>=4.0,<5.0 34 | jedi>=0.18.1,<=0.18.2 35 | Jinja2>=3.0.3,<=3.1.4 36 | json-repair==0.29.2 37 | nest_asyncio>=1.5.5,<=1.6.0 38 | requests>=2.32.0,<=2.32.2 -------------------------------------------------------------------------------- /src/graph_notebook/visualization/templates/gremlin_explain_profile.html: -------------------------------------------------------------------------------- 1 | 2 | 3 |
4 | {% block style %} 5 | 23 | {% endblock %} 24 | Download 25 |
26 |
{{content|e}}
27 |
28 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Community Note** 11 | * Please use a 👍 reaction to provide a +1/vote. This helps the community and maintainers prioritize this request. 12 | * If you are interested in working on this issue or have submitted a pull request, please leave a comment. 13 | 14 | **Graph Notebook Version (and Graph Database and Version used if applicable)** 15 | 16 | **Is your feature request related to a problem? Please describe.** 17 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 18 | 19 | **Describe the solution you'd like** 20 | A clear and concise description of what you want to happen. 21 | 22 | **Additional context** 23 | Add any other context or screenshots about the feature request here. Describe any alternatives you've considered. 24 | -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/opencypher_syntax/static/main.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | require(['notebook/js/codecell'], function(codecell) { 7 | codecell.CodeCell.options_default.highlight_modes['magic_application/x-cypher-query'] = {'reg':["^%%oc", "^%%opencypher"]} ; 8 | Jupyter.notebook.events.one('kernel_ready.Kernel', function(){ 9 | Jupyter.notebook.get_cells().map(function(cell) { 10 | if (cell.cell_type === 'code') { 11 | cell.auto_highlight(); 12 | const config = cell.config; 13 | let patch = { 14 | CodeCell:{ 15 | cm_config:{ 16 | smartIndent: false 17 | } 18 | } 19 | }; 20 | config.update(patch) 21 | } 22 | }); 23 | }); 24 | }); -------------------------------------------------------------------------------- /src/graph_notebook/widgets/css/theme-variables.css: -------------------------------------------------------------------------------- 1 | /** 2 | * Theme variables for graph-notebook 3 | * 4 | * Define custom CSS variables from the JupyterLabs' theme 5 | */ 6 | 7 | :root { 8 | /* Base colors */ 9 | --bg-primary: var(--jp-layout-color1, white); 10 | --bg-secondary: var(--jp-layout-color2, #f4f4f4); 11 | --font-color: var(--jp-content-font-color1, black); 12 | --border-color: var(--jp-border-color1, lightgrey); 13 | --shadow-color: var(--jp-shadow-base-color, grey); 14 | 15 | /* Interactive elements */ 16 | --accent-color: var(--jp-brand-color1, #4c6b9e); 17 | --accent-text-color: var(--jp-ui-inverse-font-color1, white); 18 | 19 | /* Table colors */ 20 | --table-row-odd: var(--jp-layout-color2, #f4f4f4); 21 | --table-row-even: var(--jp-layout-color1, white); 22 | 23 | /* Menu icon stroke */ 24 | --icon-stroke: black; 25 | } 26 | 27 | body.jp-mod-dark, 28 | body[data-jp-theme-name="JupyterLab Dark"] { 29 | --icon-stroke: white; 30 | } 31 | -------------------------------------------------------------------------------- /test/integration/DataDrivenSparqlTest.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import logging 7 | 8 | from graph_notebook.seed.load_query import get_queries 9 | from test.integration import IntegrationTest 10 | 11 | logger = logging.getLogger('DataDrivenSparqlTest') 12 | 13 | 14 | class DataDrivenSparqlTest(IntegrationTest): 15 | 16 | def setUp(self) -> None: 17 | super().setUp() 18 | 19 | epl_queries = get_queries('sparql', 'epl') 20 | for q in epl_queries: 21 | try: # we are deciding to try except because we do not know if the database we are connecting to has a partially complete set of airports data or not. 22 | res = self.client.sparql(q['content'].strip()) 23 | print(res) 24 | except Exception as e: 25 | logger.error(f'query {q["content"]} failed due to {e}') 26 | continue 27 | -------------------------------------------------------------------------------- /additional-databases/fuseki/README.md: -------------------------------------------------------------------------------- 1 | ## Connecting graph-notebook to a local Apache Fuseki Endpoint 2 | 3 | [Apache Fuseki](https://jena.apache.org/documentation/fuseki2/index.html) is a lightweight SPARQL 1.1 endpoint that is easy to setup locally. Check out the [Fuseki Quickstart](https://jena.apache.org/documentation/fuseki2/fuseki-quick-start.html) for setup instructions. 4 | 5 | Fuseki provides separate endpoints for each configured dataset. For instance, if a dataset named `ds` has been defined in Fuseki, the corresponding SPARQL endpoint will be available from `http://localhost:3030/ds/sparql`. 6 | 7 | Thus, it is necessary to define the path to the dataset's SPARQL endpoint in the `sparql.path` part of the configuration, 8 | like in the following example that connects to `http://localhost:3030/ds/sparql`: 9 | 10 | ``` 11 | %%graph_notebook_config 12 | { 13 | "host": "localhost", 14 | "port": 3030, 15 | "ssl": false, 16 | "sparql": { 17 | "path": "ds" 18 | } 19 | } 20 | ``` 21 | -------------------------------------------------------------------------------- /additional-databases/graphdb/README.md: -------------------------------------------------------------------------------- 1 | ## Connecting Graph Notebook to GraphDB SPARQL Endpoint 2 | 3 | [GraphDB](https://graphdb.ontotext.com//) is a highly efficient and robust graph database with RDF and SPARQL support. 4 | 5 | For instructions on setting up and running GraphDB locally, please refer to the [GraphDB Quickstart](https://graphdb.ontotext.com/documentation/standard/quick-start-guide.html) guide. 6 | 7 | After the local setup of GraphDB is complete, use one of the following configurations to connect: 8 | 9 | Queries: 10 | ``` 11 | %%graph_notebook_config 12 | 13 | { 14 | "host": "localhost", 15 | "port": 7200, 16 | "ssl": false, 17 | "sparql": { 18 | "path": "repositories/" 19 | } 20 | } 21 | ``` 22 | 23 | Updates: 24 | ``` 25 | %%graph_notebook_config 26 | 27 | { 28 | "host": "localhost", 29 | "port": 7200, 30 | "ssl": false, 31 | "sparql": { 32 | "path": "repositories//statements" 33 | } 34 | } 35 | ``` 36 | 37 | 38 | 39 | -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/playable_cells/static/main.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | define([ 7 | 'require', 8 | 'jquery', 9 | 'base/js/namespace', 10 | ], function ( 11 | requirejs, 12 | $, 13 | Jupyter, 14 | ) { 15 | "use strict"; 16 | 17 | var initialize = function () { 18 | // add our extension's css to the page 19 | $('') 20 | .attr({ 21 | rel: 'stylesheet', 22 | type: 'text/css', 23 | href: requirejs.toUrl('./playable_cells.css') 24 | }) 25 | .appendTo('head'); 26 | }; 27 | 28 | var load_ipython_extension = function () { 29 | return Jupyter.notebook.config.loaded.then(initialize); 30 | }; 31 | 32 | // return object to export public methods 33 | return { 34 | load_ipython_extension : load_ipython_extension 35 | }; 36 | }); -------------------------------------------------------------------------------- /src/graph_notebook/widgets/src/force_widget.spec.ts: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | import { Message } from "./types"; 7 | import { assert, expect } from "chai"; 8 | import { spy } from "sinon"; 9 | 10 | describe("force_widget", function () { 11 | describe("Message", function () { 12 | it("should initalize a message", function () { 13 | const method = "test"; 14 | const data = { a: 1 }; 15 | const message: Message = new Message(method, data); 16 | assert.equal(method, message.method); 17 | assert.deepEqual(data, message.data); 18 | }); 19 | }); 20 | 21 | describe("console", function () { 22 | it("should log an info", function () { 23 | const consoleSpy = spy(console, "info"); 24 | const message = "test"; 25 | const data = { a: 1 }; 26 | console.info(message, data); 27 | expect(consoleSpy.calledWith(message, data)).to.be.ok; 28 | }); 29 | }); 30 | }); 31 | -------------------------------------------------------------------------------- /test/integration/iam/status/test_status_with_iam.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | import pytest 6 | from botocore.session import get_session 7 | 8 | from test.integration import IntegrationTest 9 | 10 | 11 | class TestStatusWithIAM(IntegrationTest): 12 | def setUp(self) -> None: 13 | super().setUp() 14 | self.client = self.client_builder.with_iam(get_session()).build() 15 | 16 | @pytest.mark.neptune 17 | @pytest.mark.iam 18 | def test_do_status_with_iam_credentials(self): 19 | res = self.client.status() 20 | assert res.status_code == 200 21 | status = res.json() 22 | self.assertEqual(status['status'], 'healthy') 23 | 24 | @pytest.mark.neptune 25 | @pytest.mark.iam 26 | def test_do_status_without_iam_credentials(self): 27 | client = self.client_builder.with_iam(None).build() 28 | res = client.status() 29 | assert res.status_code != 200 30 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: "[BUG] " 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Community Note** 11 | * Please use a 👍 reaction to provide a +1/vote. This helps the community and maintainers prioritize this request. 12 | * If you are interested in working on this issue or have submitted a pull request, please leave a comment. 13 | 14 | **Describe the bug** 15 | A clear and concise description of what the bug is and the environment/context. 16 | 17 | - OS: [e.g. Amazon Linux 2] 18 | - Browser: [e.g. Google Chrome] 19 | - Graph Notebook Version: [e.g. 3.7.3] 20 | - Graph Database & Version: [e.g., Amazon Neptune 1.2.1.0] 21 | 22 | **To Reproduce** 23 | Steps to reproduce the behavior: 24 | 1. Go to '...' 25 | 2. Click on '....' 26 | 3. Scroll down to '....' 27 | 4. See error 28 | 29 | If applicable, add screenshots to help explain your problem. 30 | 31 | **Expected behavior** 32 | A clear and concise description of what you expected to happen. 33 | -------------------------------------------------------------------------------- /test/integration/without_iam/opencypher/test_opencypher_query_without_iam.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | import pytest 6 | 7 | from test.integration import IntegrationTest 8 | 9 | 10 | class TestOpenCypher(IntegrationTest): 11 | @pytest.mark.opencypher 12 | @pytest.mark.neptune 13 | def test_do_oc_query(self): 14 | query = 'MATCH (n) RETURN n LIMIT 1' 15 | oc_http = self.client.opencypher_http(query) 16 | assert oc_http.status_code == 200 17 | results = oc_http.json() 18 | assert type(results["results"]) is list 19 | 20 | for r in results["results"]: 21 | assert type(r) is dict 22 | 23 | self.assertEqual(type(results["results"]), list) 24 | 25 | @pytest.mark.opencypher 26 | @pytest.mark.bolt 27 | def test_do_oc_bolt_query(self): 28 | query = 'MATCH (p) RETURN p LIMIT 10' 29 | res = self.client.opencyper_bolt(query) 30 | assert len(res) == 10 31 | -------------------------------------------------------------------------------- /src/graph_notebook/widgets/webpack.dev.config.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | const path = require("path"); 7 | const webpackConfig = require("./webpack.config.js"); 8 | 9 | const devServer = { 10 | contentBase: path.resolve(__dirname, "nbextension", "static"), 11 | headers: { 12 | "Access-Control-Allow-Origin": "*", 13 | "Access-Control-Allow-Methods": "*", 14 | "Access-Control-Allow-Headers": "*", 15 | }, 16 | hotOnly: true, 17 | inline: true, 18 | port: 9000, 19 | }; 20 | 21 | const extensionModule = webpackConfig[0]; 22 | extensionModule.mode = "development"; 23 | extensionModule.entry = "extension.dev.js"; 24 | extensionModule.devServer = devServer; 25 | 26 | const indexModule = webpackConfig[1]; 27 | indexModule.mode = "development"; 28 | indexModule.entry = "index.dev.ts"; 29 | indexModule.output.publicPath = "http://localhost:9000/"; 30 | indexModule.devServer = devServer; 31 | 32 | module.exports = [extensionModule, indexModule]; 33 | -------------------------------------------------------------------------------- /test/unit/sparql/test_sparql.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import unittest 7 | 8 | from graph_notebook.magics.graph_magic import get_query_type, query_type_to_action 9 | 10 | 11 | class TestSparql(unittest.TestCase): 12 | def test_get_query_type(self): 13 | test_cases = [ 14 | { 15 | 'query': 'SELECT * WHERE { ?s ?p ?o }', 16 | 'expected': 'SELECT' 17 | } 18 | ] 19 | 20 | for t in test_cases: 21 | query_type = get_query_type(t['query']) 22 | self.assertEqual(t['expected'], query_type) 23 | 24 | def test_query_type_to_action(self): 25 | test_cases = [ 26 | { 27 | 'type': 'SELECT', 28 | 'expected': 'sparql' 29 | }, 30 | { 31 | 'type': 'INSERT', 32 | 'expected': 'sparqlupdate' 33 | } 34 | ] 35 | 36 | for t in test_cases: 37 | action = query_type_to_action(t['type']) 38 | self.assertEqual(t['expected'], action) 39 | -------------------------------------------------------------------------------- /src/graph_notebook/widgets/src/plugin.ts: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | import { JupyterFrontEnd, JupyterFrontEndPlugin } from '@jupyterlab/application'; 7 | import { IJupyterWidgetRegistry } from "@jupyter-widgets/base"; 8 | import { ForceModel, ForceView } from "./force_widget"; 9 | import { MODULE_NAME, MODULE_VERSION } from "./version"; 10 | 11 | const EXTENSION_ID = "graph_notebook_widgets:plugin"; 12 | 13 | /** 14 | * Activate the widget extension. 15 | */ 16 | function activate(app: JupyterFrontEnd, registry: IJupyterWidgetRegistry): void { 17 | console.log("🔧 Activating graph-notebook widget extension..."); 18 | registry.registerWidget({ 19 | name: MODULE_NAME, 20 | version: MODULE_VERSION, 21 | exports: { ForceModel, ForceView }, 22 | }); 23 | console.log("✅ Widget registration successful"); 24 | } 25 | 26 | /** 27 | * graph_notebook_widgets plugin definition. 28 | */ 29 | const plugin: JupyterFrontEndPlugin = { 30 | id: EXTENSION_ID, 31 | requires: [IJupyterWidgetRegistry], 32 | activate, 33 | autoStart: true, 34 | }; 35 | 36 | export default plugin; -------------------------------------------------------------------------------- /src/graph_notebook/neptune/gremlin/hashable_dict_patch.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | # Backport from TinkerPop 3.5.0 pre-release 7 | # https://github.com/apache/tinkerpop/blob/master/gremlin-python/src/main/python/gremlin_python/structure/io/util.py 8 | # https://github.com/apache/tinkerpop/pull/1314/files 9 | # https://github.com/apache/tinkerpop/pull/1383/files 10 | 11 | 12 | class HashableDict(dict): 13 | def __hash__(self): 14 | try: 15 | return hash(tuple(sorted(self.items()))) 16 | except: 17 | return hash(tuple(sorted(str(x) for x in self.items()))) 18 | 19 | @classmethod 20 | def of(cls, o): 21 | if isinstance(o, (tuple, set, list)): 22 | return tuple([cls.of(e) for e in o]) 23 | elif not isinstance(o, (dict, HashableDict)): 24 | return o 25 | 26 | new_o = HashableDict() 27 | for k, v in o.items(): 28 | if isinstance(k, (set, list)): 29 | new_o[tuple(k)] = cls.of(v) 30 | else: 31 | new_o[k] = cls.of(v) 32 | return new_o 33 | -------------------------------------------------------------------------------- /test/unit/network/gremlin/test_add_results.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import unittest 7 | 8 | from gremlin_python.structure.graph import Path 9 | 10 | from graph_notebook.network.gremlin.GremlinNetwork import GremlinNetwork 11 | 12 | 13 | class TestAddResults(unittest.TestCase): 14 | def test_add_primitive_path(self): 15 | a = 'a' 16 | edge = 'a_to_b' 17 | b = 'b' 18 | 19 | p = Path([], [a, edge, b]) 20 | paths = [p] 21 | 22 | gn = GremlinNetwork() 23 | gn.add_results(paths) 24 | 25 | self.assertTrue(gn.graph.has_node(a)) 26 | self.assertTrue(gn.graph.has_node(b)) 27 | self.assertTrue(gn.graph.has_node(edge)) # note, this is not of type Edge so we assume it is a node 28 | self.assertEqual(2, len(gn.graph.edges)) 29 | 30 | def test_add_dicts_without_ids(self): 31 | dict_1 = {'foo': 'value', 'bar': 'something'} 32 | dict_2 = {'foo': 'other_value'} 33 | p = Path([], [dict_1, dict_2]) 34 | gn = GremlinNetwork() 35 | gn.add_results([p]) 36 | self.assertEqual(len(p), len(gn.graph.nodes)) 37 | -------------------------------------------------------------------------------- /test/integration/without_iam/sparql/test_sparql_query_without_iam.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | import pytest 6 | 7 | from test.integration import IntegrationTest 8 | 9 | 10 | class TestSparqlQuery(IntegrationTest): 11 | @pytest.mark.sparql 12 | @pytest.mark.neptune 13 | def test_do_sparql_query(self): 14 | query = "SELECT * WHERE {?s ?p ?o} LIMIT 1" 15 | 16 | sparql_res = self.client.sparql(query) 17 | assert sparql_res.status_code == 200 18 | res = sparql_res.json() 19 | 20 | self.assertEqual(type(res), dict) 21 | self.assertTrue('s' in res['head']['vars']) 22 | self.assertTrue('p' in res['head']['vars']) 23 | self.assertTrue('o' in res['head']['vars']) 24 | 25 | @pytest.mark.sparql 26 | @pytest.mark.neptune 27 | def test_do_sparql_explain(self): 28 | query = "SELECT * WHERE {?s ?p ?o} LIMIT 1" 29 | query_res = self.client.sparql_explain(query) 30 | assert query_res.status_code == 200 31 | res = query_res.content.decode('utf-8') 32 | self.assertEqual(type(res), str) 33 | self.assertTrue(res.startswith('')) 34 | -------------------------------------------------------------------------------- /test/integration/iam/opencypher/test_opencypher_query_with_iam.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | import pytest 6 | from botocore.session import get_session 7 | 8 | from test.integration.DataDrivenOpenCypherTest import DataDrivenOpenCypherTest 9 | 10 | 11 | class TestOpenCypherQueryWithIam(DataDrivenOpenCypherTest): 12 | def setUp(self) -> None: 13 | super().setUp() 14 | self.client = self.client_builder.with_iam(get_session()).build() 15 | 16 | @pytest.mark.neptune 17 | @pytest.mark.opencypher 18 | def test_do_opencypher_query(self): 19 | expected_league_name = 'English Premier League' 20 | query = 'MATCH (l:League) RETURN l.name' 21 | oc_res = self.client.opencypher_http(query) 22 | assert oc_res.status_code == 200 23 | 24 | res = oc_res.json() 25 | assert isinstance(res, dict) 26 | assert expected_league_name == res['results'][0]['l.name'] 27 | 28 | @pytest.mark.opencypher 29 | @pytest.mark.bolt 30 | def test_do_opencypher_bolt_query(self): 31 | query = 'MATCH (p) RETURN p LIMIT 10' 32 | res = self.client.opencyper_bolt(query) 33 | assert len(res) == 10 34 | -------------------------------------------------------------------------------- /src/graph_notebook/visualization/visualizer.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import os 7 | 8 | from jinja2.sandbox import SandboxedEnvironment 9 | 10 | dir_path = os.path.dirname(os.path.realpath(__file__)) 11 | with open('%s/templates/tabs.html' % dir_path, 'r') as tab_template_file: 12 | tab_template = tab_template_file.read().strip() 13 | 14 | env = SandboxedEnvironment() 15 | template = env.from_string(tab_template) 16 | 17 | 18 | class Visualizer(object): 19 | def __init__(self, query_count=0): 20 | self.tabs = [] 21 | self.query_count = query_count 22 | 23 | def register_tab(self, tab): 24 | self.tabs.append(tab) 25 | 26 | def to_html(self): 27 | tabs = [] 28 | for t in self.tabs: 29 | tabs.append(t.__dict__) 30 | 31 | # set the first tab as the active one 32 | if len(tabs) > 0: 33 | tabs[0]['display_class'] = 'show' 34 | 35 | html = template.render(tabs=tabs) 36 | return html 37 | 38 | 39 | class Tab(object): 40 | def __init__(self, name, content): 41 | self.name = name 42 | self.content = content 43 | self.display_class = 'hide' 44 | -------------------------------------------------------------------------------- /test/integration/DataDrivenOpenCypherTest.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import logging 7 | 8 | from graph_notebook.seed.load_query import get_queries 9 | 10 | from test.integration import IntegrationTest 11 | 12 | logger = logging.getLogger('DataDrivenOpenCypherTest') 13 | 14 | 15 | class DataDrivenOpenCypherTest(IntegrationTest): 16 | def setUp(self): 17 | super().setUp() 18 | # check if the data is already loaded 19 | query = '''MATCH (norwichcity:Team)-[:CURRENT_LEAGUE]->(epl:League) RETURN norwichcity''' 20 | res = self.client.opencypher_http(query) 21 | res.raise_for_status() 22 | js = res.json() 23 | if len(js['results']) > 0: 24 | return 25 | airport_queries = get_queries('opencypher', 'epl') 26 | for q in airport_queries: 27 | try: # we are deciding to try except because we do not know if the database we are connecting to has a partially complete set of airports data or not. 28 | self.client.opencypher_http(q['content']) 29 | except Exception as e: 30 | logger.error(f'query {q["content"]} failed due to {e}') 31 | continue 32 | -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | 7 | def _jupyter_nbextension_paths(): 8 | return [ 9 | dict( 10 | section="notebook", 11 | src="gremlin_syntax/static", 12 | dest="gremlin_syntax", 13 | require="gremlin_syntax/main"), 14 | dict( 15 | section="notebook", 16 | src="sparql_syntax/static", 17 | dest="sparql_syntax", 18 | require="sparql_syntax/main"), 19 | dict( 20 | section="notebook", 21 | src="opencypher_syntax/static", 22 | dest="opencypher_syntax", 23 | require="opencypher_syntax/main"), 24 | dict( 25 | section="notebook", 26 | src="neptune_menu/static", 27 | dest="neptune_menu", 28 | require="neptune_menu/main"), 29 | dict( 30 | section="notebook", 31 | src="playable_cells/static", 32 | dest="playable_cells", 33 | require="playable_cells/main"), 34 | ] 35 | 36 | 37 | def _jupyter_server_extension_points(): 38 | return [{ 39 | "module": "graph_notebook.nbextensions" 40 | }] 41 | -------------------------------------------------------------------------------- /src/graph_notebook/widgets/karma.conf.js: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | const webpackConfig = require("./webpack.config.js"); 7 | 8 | const rules = webpackConfig[1].module.rules; 9 | rules.unshift({ 10 | test: /\.tsx?$/, 11 | enforce: "pre", 12 | exclude: /node_modules/, 13 | use: [{ loader: "eslint-loader", options: { emitWarning: true } }], 14 | }); 15 | 16 | module.exports = function (config) { 17 | config.set({ 18 | basePath: "", 19 | frameworks: ["mocha", "chai", "sinon"], 20 | files: ["src/**/*.spec.ts"], 21 | exclude: [], 22 | preprocessors: { 23 | "src/**/*.spec.ts": ["webpack"], 24 | }, 25 | webpack: { 26 | mode: "development", 27 | devtool: "eval-source-map", 28 | module: { 29 | rules: rules, 30 | }, 31 | resolve: webpackConfig[1].resolve, 32 | plugins: webpackConfig[1].plugins, 33 | }, 34 | webpackMiddleware: { 35 | stats: { 36 | colors: true, 37 | }, 38 | // stats: "detailed" 39 | }, 40 | reporters: ["progress", "mocha"], 41 | port: 9876, 42 | colors: true, 43 | logLevel: config.LOG_INFO, 44 | autoWatch: true, 45 | browsers: ["Firefox"], 46 | singleRun: true, 47 | concurrency: Infinity, 48 | }); 49 | }; 50 | -------------------------------------------------------------------------------- /test/unit/graph_magic/test_graph_magic.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import json 7 | 8 | from IPython.testing.globalipapp import get_ipython 9 | 10 | from graph_notebook.configuration.generate_config import Configuration 11 | from test.unit.graph_magic.GraphNotebookTest import GraphNotebookTest 12 | 13 | 14 | class TestGraphMagicLoadExt(GraphNotebookTest): 15 | def test_load_graph_magic_succeeds(self): 16 | res = self.ip.run_line_magic('lsmagic', '') 17 | self.assertTrue('graph_notebook_config' in res.magics_manager.magics['line']) 18 | 19 | def test_graph_notebook_config(self): 20 | ip = get_ipython() 21 | ip.magic('load_ext graph_notebook.magics') 22 | 23 | res: Configuration = ip.run_line_magic('graph_notebook_config', '') 24 | config_dict = res.to_dict() 25 | self.assertEqual(self.config.to_dict(), res.to_dict()) 26 | 27 | config_dict['host'] = 'this-was-changed' 28 | res2: Configuration = ip.run_cell_magic('graph_notebook_config', '', json.dumps(config_dict)) 29 | config_dict2 = res2.to_dict() 30 | 31 | res3: Configuration = ip.run_line_magic('graph_notebook_config', '') 32 | config_dict3 = res3.to_dict() 33 | 34 | self.assertEqual(config_dict2, config_dict3) 35 | -------------------------------------------------------------------------------- /additional-databases/blazegraph/README.md: -------------------------------------------------------------------------------- 1 | ## Connecting graph notebook to Blazegraph SPARQL Endpoint 2 | 3 | [Blazegraph](https://blazegraph.com/) is an open-source, high-performance RDF triple/quadstore graph-database that can be queried via a SPARQL endpoint. 4 | 5 | For instructions on setting up and running Blazegraph locally, refer to the [Blazegraph Quickstart](https://github.com/blazegraph/database/wiki/Quick_Start) guide. 6 | 7 | After local setup of Blazegraph is complete, set the following configuration to connect from graph-notebook: 8 | 9 | ``` 10 | %%graph_notebook_config 11 | 12 | { 13 | "host": "localhost", 14 | "port": 9999, 15 | "ssl": false, 16 | "sparql": { 17 | "path": "sparql" 18 | } 19 | } 20 | ``` 21 | 22 | Blazegraph also supports use of namespaces, which are used to refer to multiple triple or quad stores that are hosted in the same Blazegraph instance, and can be queried independently. 23 | 24 | To direct SPARQL queries executed from `graph-notebook` to a specific namespace, you can specify the namespace path in your config: 25 | 26 | ``` 27 | %%graph_notebook_config 28 | 29 | { 30 | "host": "localhost", 31 | "port": 9999, 32 | "ssl": false, 33 | "sparql": { 34 | "path": "blazegraph/namespace/foo/sparql" 35 | } 36 | } 37 | ``` 38 | 39 | This will result in the url `localhost:9999/blazegraph/namespace/foo/sparql` being used when executing any `%%sparql` magic commands. 40 | 41 | 42 | 43 | -------------------------------------------------------------------------------- /src/graph_notebook/neptune/bolt_auth_token.py: -------------------------------------------------------------------------------- 1 | import json 2 | 3 | from neo4j import Auth 4 | from botocore.awsrequest import AWSRequest 5 | from botocore.credentials import Credentials 6 | from botocore.compat import urlsplit 7 | from botocore.auth import SigV4Auth 8 | 9 | SCHEME = "basic" 10 | REALM = "realm" 11 | SERVICE_NAME = "neptune-db" 12 | DUMMY_USERNAME = "username" 13 | HTTP_METHOD_HDR = "HttpMethod" 14 | HTTP_METHOD = "GET" 15 | AUTHORIZATION = "Authorization" 16 | X_AMZ_DATE = "X-Amz-Date" 17 | X_AMZ_SECURITY_TOKEN = "X-Amz-Security-Token" 18 | HOST = "Host" 19 | 20 | 21 | class NeptuneBoltAuthToken(Auth): 22 | def __init__( 23 | self, 24 | credentials: Credentials, 25 | region: str, 26 | url: str, 27 | **parameters 28 | ): 29 | request = AWSRequest(method=HTTP_METHOD, url=url) 30 | 31 | url_parts = urlsplit(request.url) 32 | host_part = url_parts.hostname 33 | request.headers.add_header("Host", host_part) 34 | sigv4 = SigV4Auth(credentials, SERVICE_NAME, region) 35 | sigv4.add_auth(request) 36 | 37 | auth_obj = { 38 | hdr: request.headers[hdr] 39 | for hdr in [AUTHORIZATION, X_AMZ_DATE, X_AMZ_SECURITY_TOKEN, HOST] 40 | } 41 | auth_obj[HTTP_METHOD_HDR] = request.method 42 | creds: str = json.dumps(auth_obj) 43 | super().__init__(SCHEME, DUMMY_USERNAME, creds, REALM, **parameters) 44 | -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/02-Neptune-Analytics/03-Sample-Use-Cases/03-Software-Bill-Of-Materials/sbom_code/sbom_helper.py: -------------------------------------------------------------------------------- 1 | import json 2 | import os 3 | import yaml 4 | 5 | def set_nodesteam_yaml(config, directory): 6 | with open(f'nodestream_template.yaml','r') as f: 7 | file = yaml.safe_load(f) 8 | 9 | # Set the example sbom location and strip any prefixed slashes 10 | directory = directory.lstrip("/") 11 | file['plugins'][0]['config']['paths'] = f"{file['plugins'][0]['config']['paths']}{directory}" 12 | 13 | # Set the configuration based on if this is a Neptune Database or Neptune Analytics Graph 14 | if config['neptune_service'] == 'neptune-graph': 15 | print("Setting configuration for Neptune Analytics") 16 | file['targets']['my-neptune']['graph_id'] = config['host'].split('.')[0] 17 | file['targets']['my-neptune']['mode'] = 'analytics' 18 | else: 19 | print("Setting configuration for Neptune Database") 20 | file['targets']['my-neptune']['graph_id'] = f"https://{config['host']}:{config['port']}" 21 | file['targets']['my-neptune']['region'] = config['aws_region'] 22 | file['targets']['my-neptune']['mdoe'] = 'database' 23 | 24 | with open(f'nodestream.yaml','w') as f: 25 | yaml.dump(file, f, sort_keys = False) 26 | 27 | print("Nodestrean SBOM configuration written.") -------------------------------------------------------------------------------- /test/unit/network/sparql/test_sparql_network_processing.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import unittest 7 | 8 | from graph_notebook.network.sparql.SPARQLNetwork import SPARQLNetwork 9 | from test.unit.network.sparql.data.get_sparql_result import get_sparql_result 10 | 11 | 12 | class TestSPARQLNetworkLabelExtraction(unittest.TestCase): 13 | def test_node_and_edge_label_extraction(self): 14 | data = get_sparql_result("003_large_binding_set.json") 15 | 16 | sn = SPARQLNetwork() 17 | sn.add_results(data) 18 | self.assertEqual(443, len(sn.graph.nodes)) 19 | 20 | # pick out a few random nodes and ensure that they match the expected result from json file 21 | node_108 = sn.graph.nodes.get('http://kelvinlawrence.net/air-routes/resource/108') 22 | self.assertEqual('NCE', node_108['label']) 23 | 24 | node_1265 = sn.graph.nodes.get('http://kelvinlawrence.net/air-routes/resource/1265') 25 | self.assertEqual('resourc...', node_1265['label']) 26 | 27 | def test_highly_connected_node(self): 28 | data = get_sparql_result('002_airroutes-labels.json') 29 | sn = SPARQLNetwork() 30 | sn.add_results(data) 31 | center_node = sn.graph.nodes.get('http://kelvinlawrence.net/air-routes/resource/12') 32 | self.assertEqual('JFK', center_node['label']) 33 | self.assertEqual(14, len(center_node['properties'])) 34 | -------------------------------------------------------------------------------- /src/graph_notebook/nbextensions/install.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import argparse 7 | import os 8 | 9 | PLUGINS = [ 10 | 'neptune_menu', 11 | 'gremlin_syntax', 12 | 'sparql_syntax', 13 | 'opencypher_syntax', 14 | 'playable_cells' 15 | ] 16 | 17 | dir_path = os.path.dirname(os.path.realpath(__file__)) 18 | 19 | 20 | def main(): 21 | parser = argparse.ArgumentParser() 22 | parser.add_argument('--plugin-name', default='', type=str, help='install and enable this jupyter plugin') 23 | 24 | args = parser.parse_args() 25 | plugin_name = 'graph_notebook.nbextensions' if args.plugin_name == '' else args.plugin_name 26 | 27 | # JupyterLab 4 and Notebook 7+ use a different extension system with Prebuilt extensions instead of nbextensions 28 | # Therefore, we need to install as a labextension for modern environments 29 | # NOTE: Our custom extension plugins defined here will not work in notebook 7+ yet 30 | os.system(f'jupyter labextension install {plugin_name}') 31 | 32 | # For classic notebook features, We still need nbclassic for traditional nbextension support 33 | if os.system('jupyter nbclassic --version') == 0: # Check if nbclassic is available 34 | os.system(f'jupyter nbclassic-extension install --py {plugin_name} --sys-prefix') 35 | os.system(f'jupyter nbclassic-extension enable --py {plugin_name} --sys-prefix') 36 | 37 | if __name__ == '__main__': 38 | main() -------------------------------------------------------------------------------- /test/integration/DataDrivenGremlinTest.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import logging 7 | 8 | from graph_notebook.seed.load_query import get_queries 9 | 10 | from test.integration import IntegrationTest 11 | 12 | 13 | class DataDrivenGremlinTest(IntegrationTest): 14 | def setUp(self): 15 | super().setUp() 16 | 17 | self.client = self.client_builder.build() 18 | query_check_for_airports = "g.V('3745').outE().inV().has(id, '3195')" 19 | res = self.client.gremlin_query(query_check_for_airports) 20 | if len(res) < 1: 21 | logging.info('did not find final airports edge, seeding database now...') 22 | airport_queries = get_queries('gremlin', 'airports') 23 | for q in airport_queries: 24 | lines = q['content'].splitlines() 25 | for i in range(len(lines)): 26 | line = lines[i] 27 | logging.debug(f'executing line {i} of {len(lines)} for seeding DataDrivenGremlinTest') 28 | # we are deciding to try except because we do not know if the database 29 | # we are connecting to has a partially complete set of airports data or not. 30 | try: 31 | self.client.gremlin_query(line) 32 | except Exception as e: 33 | logging.error(f'query {q} failed due to {e}') 34 | continue 35 | -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/02-Neptune-Analytics/03-Sample-Use-Cases/Overview.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "dd9b6833-3967-45e1-87fc-a8a6fcb65532", 6 | "metadata": { 7 | "tags": [] 8 | }, 9 | "source": [ 10 | "# Sample User Case Overview\n", 11 | "\n", 12 | "- [FinTech]('./01-FinTech') - In this folder you will find use case examples focused on the FinTech industry. Currently the use case example focuses on how to perform a fraud ring analysis using graph algorithms and traversals to risk anomalous behaviors using a guilt by association approach.\n", 13 | "- [Investment Analysis]('./02-Investment-Analysis') - In this folder you will find use case examples focused on Investment analysis. Currently the use case example focuses on using data from the [EDGAR system](https://www.sec.gov/edgar/search-and-access) to demonstrate how to leverage, graphs, graph analytics, and vector similarity to perform investment analysis of stock holdings." 14 | ] 15 | } 16 | ], 17 | "metadata": { 18 | "kernelspec": { 19 | "display_name": "Python 3", 20 | "language": "python", 21 | "name": "python3" 22 | }, 23 | "language_info": { 24 | "codemirror_mode": { 25 | "name": "ipython", 26 | "version": 3 27 | }, 28 | "file_extension": ".py", 29 | "mimetype": "text/x-python", 30 | "name": "python", 31 | "nbconvert_exporter": "python", 32 | "pygments_lexer": "ipython3", 33 | "version": "3.10.13" 34 | } 35 | }, 36 | "nbformat": 4, 37 | "nbformat_minor": 5 38 | } 39 | -------------------------------------------------------------------------------- /src/graph_notebook/ipython_profile/configure_ipython_profile.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import os 7 | import json 8 | 9 | from IPython import paths as ipython_paths 10 | 11 | IPYTHON_DIR_TREE = ipython_paths.get_ipython_dir() + "/profile_default" 12 | IPYTHON_CFG_FILE_NAME = 'ipython_config.json' 13 | IPYTHON_CFG_PATH = IPYTHON_DIR_TREE + '/' + IPYTHON_CFG_FILE_NAME 14 | 15 | MAGICS_EXT_NAME = 'graph_notebook.magics' 16 | EXTENSIONS_CFG = { 17 | "extensions": [ 18 | MAGICS_EXT_NAME 19 | ] 20 | } 21 | 22 | 23 | def read_ipython_config(): 24 | try: 25 | os.makedirs(IPYTHON_DIR_TREE, exist_ok=True) 26 | with open(IPYTHON_CFG_PATH, 'r') as file: 27 | ipython_cfg = json.load(file) 28 | except (json.decoder.JSONDecodeError, FileNotFoundError) as e: 29 | ipython_cfg = {} 30 | 31 | return ipython_cfg 32 | 33 | 34 | def write_ipython_config(config): 35 | with open(IPYTHON_CFG_PATH, 'w') as file: 36 | json.dump(config, file, indent=2) 37 | 38 | 39 | def configure_magics_extension(): 40 | ipython_cfg = read_ipython_config() 41 | try: 42 | if MAGICS_EXT_NAME not in ipython_cfg["InteractiveShellApp"]["extensions"]: 43 | ipython_cfg["InteractiveShellApp"]["extensions"].append(MAGICS_EXT_NAME) 44 | except KeyError: 45 | ipython_cfg["InteractiveShellApp"] = EXTENSIONS_CFG 46 | 47 | write_ipython_config(ipython_cfg) 48 | 49 | 50 | if __name__ == '__main__': 51 | configure_magics_extension() 52 | -------------------------------------------------------------------------------- /src/graph_notebook/widgets/src/theme_manager.ts: -------------------------------------------------------------------------------- 1 | /* 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | */ 5 | 6 | /** 7 | * Theme manager for graph-notebook widgets 8 | * 9 | * Detect and adapt to JupyterLab themes by applying CSS variables from the current JupyterLab 10 | * theme to widget elements. 11 | */ 12 | 13 | /** 14 | * Initialize theme detection and apply the custom theme styles to make the widgets compatible 15 | * with JupyterLab's themes. 16 | */ 17 | export function initThemeDetection(): void { 18 | const isJupyterLab = document.body.classList.contains('jp-Notebook') || 19 | document.body.classList.contains('jp-NotebookPanel'); 20 | 21 | if (!isJupyterLab) { 22 | console.log('Not running in JupyterLab, skipping theme detection'); 23 | 24 | return; 25 | } 26 | 27 | applyThemeStyles(); 28 | 29 | const observer = new MutationObserver((mutations) => { 30 | mutations.forEach((mutation) => { 31 | if (mutation.attributeName === 'data-jp-theme-name' || 32 | mutation.attributeName === 'class') { 33 | applyThemeStyles(); 34 | } 35 | }); 36 | }); 37 | 38 | // Observe document body for theme change 39 | observer.observe(document.body, { 40 | attributes: true, 41 | attributeFilter: ['data-jp-theme-name', 'class'] 42 | }); 43 | } 44 | 45 | /** 46 | * Apply theme-specific styles 47 | */ 48 | function applyThemeStyles(): void { 49 | // Update SVG icon stroke 50 | const featherIcons = document.querySelectorAll('.feather'); 51 | 52 | featherIcons.forEach((icon: Element) => { 53 | (icon as SVGElement).style.stroke = 'var(--icon-stroke)'; 54 | }); 55 | } 56 | -------------------------------------------------------------------------------- /src/graph_notebook/visualization/templates/opencypher_explain.html: -------------------------------------------------------------------------------- 1 | 52 |
53 |
54 | Download 55 |
56 | {{ table }} 57 |
-------------------------------------------------------------------------------- /docker/service.sh: -------------------------------------------------------------------------------- 1 | source /tmp/venv/bin/activate 2 | cd "${WORKING_DIR}" 3 | if [ ${GRAPH_NOTEBOOK_SSL} = "" ]; then 4 | GRAPH_NOTEBOOK_SSL="True" 5 | fi 6 | 7 | if [ ${PROVIDE_EXAMPLES} -eq 1 ]; then 8 | python3 -m graph_notebook.notebooks.install --destination "${EXAMPLE_NOTEBOOK_DIR}" 9 | fi 10 | 11 | python3 -m graph_notebook.configuration.generate_config \ 12 | --host "${GRAPH_NOTEBOOK_HOST}" \ 13 | --port "${GRAPH_NOTEBOOK_PORT}" \ 14 | --proxy_host "${GRAPH_NOTEBOOK_HOST}" \ 15 | --proxy_port "${GRAPH_NOTEBOOK_PORT}" \ 16 | --auth_mode "${GRAPH_NOTEBOOK_AUTH_MODE}" \ 17 | --ssl "${GRAPH_NOTEBOOK_SSL}" \ 18 | --iam_credentials_provider "${GRAPH_NOTEBOOK_IAM_PROVIDER}" \ 19 | --load_from_s3_arn "${NEPTUNE_LOAD_FROM_S3_ROLE_ARN}" \ 20 | --aws_region "${AWS_REGION}" \ 21 | 22 | 23 | 24 | ##### Running The Notebook Service ##### 25 | mkdir ~/.jupyter 26 | if [ ! ${NOTEBOOK_PASSWORD} ]; 27 | then 28 | echo "c.NotebookApp.password='$(python -c "from notebook.auth import passwd; print(passwd('`curl -s 169.254.169.254/latest/meta-data/instance-id`'))")'" >> ~/.jupyter/jupyter_notebook_config.py 29 | else 30 | echo "c.NotebookApp.password='$(python -c "from notebook.auth import passwd; print(passwd('${NOTEBOOK_PASSWORD}'))")'" >> ~/.jupyter/jupyter_notebook_config.py 31 | fi 32 | echo "c.NotebookApp.allow_remote_access = True" >> ~/.jupyter/jupyter_notebook_config.py 33 | echo "c.InteractiveShellApp.extensions = ['graph_notebook.magics']" >> ~/.jupyter/jupyter_notebook_config.py 34 | 35 | nohup jupyter notebook --ip='*' --port ${NOTEBOOK_PORT} "${WORKING_DIR}/notebooks" --allow-root > jupyterserver.log & 36 | nohup jupyter lab --ip='*' --port ${LAB_PORT} "${WORKING_DIR}/notebooks" --allow-root > jupyterlab.log & 37 | tail -f /dev/null -------------------------------------------------------------------------------- /src/graph_notebook/neptune/gremlin/graphsonV3d0_MapType_objectify_patch.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | from gremlin_python.structure.io.graphsonV3d0 import MapType 7 | from graph_notebook.neptune.gremlin.hashable_dict_patch import HashableDict 8 | 9 | 10 | # Original code from Tinkerpop 3.4.1 11 | # 12 | # class MapType(_GraphSONTypeIO): 13 | # python_type = DictType 14 | # graphson_type = "g:Map" 15 | # 16 | # @classmethod 17 | # def dictify(cls, d, writer): 18 | # l = [] 19 | # for key in d: 20 | # l.append(writer.toDict(key)) 21 | # l.append(writer.toDict(d[key])) 22 | # return GraphSONUtil.typedValue("Map", l) 23 | # 24 | # @classmethod 25 | # def objectify(cls, l, reader): 26 | # new_dict = {} 27 | # if len(l) > 0: 28 | # x = 0 29 | # while x < len(l): 30 | # new_dict[reader.toObject(l[x])] = reader.toObject(l[x + 1]) 31 | # x = x + 2 32 | # return new_dict 33 | 34 | 35 | # Backport from TinkerPop 3.5.0 pre-release 36 | # https://github.com/apache/tinkerpop/blob/master/gremlin-python/src/main/python/gremlin_python/structure/io/graphsonV3d0.py#L474 37 | # https://github.com/apache/tinkerpop/pull/1314/files 38 | class MapType_patch: 39 | @classmethod 40 | def objectify(cls, l, reader): # noqa E741 41 | new_dict = {} 42 | if len(l) > 0: 43 | x = 0 44 | while x < len(l): 45 | new_dict[HashableDict.of(reader.toObject(l[x]))] = reader.toObject(l[x + 1]) 46 | x = x + 2 47 | return new_dict 48 | 49 | 50 | MapType.objectify = MapType_patch.objectify 51 | -------------------------------------------------------------------------------- /src/graph_notebook/visualization/templates/sparql_explain.html: -------------------------------------------------------------------------------- 1 | {% block style %} 2 | 52 | {% endblock %} 53 |
54 |
55 | Download 56 |
57 | 58 | {{ table }} 59 |
-------------------------------------------------------------------------------- /src/graph_notebook/widgets/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import json 7 | import os 8 | 9 | from .force import Force # noqa F401 10 | 11 | 12 | def get_package_json(): 13 | dir_path = os.path.dirname(os.path.realpath(__file__)) 14 | with open(os.path.join(dir_path, 'package.json'), 'r') as file: 15 | package_json = json.load(file) 16 | return package_json 17 | 18 | 19 | def _jupyter_nbextension_paths(): 20 | """Called by Jupyter Notebook Server to detect if it is a valid nbextension and 21 | to install the widget 22 | 23 | Returns 24 | ======= 25 | section: The section of the Jupyter Notebook Server to change. 26 | Must be 'notebook' for widget extensions 27 | src: Source directory name to copy files from. Webpack outputs generated files 28 | into this directory and Jupyter Notebook copies from this directory during 29 | widget installation 30 | dest: Destination directory name to install widget files to. Jupyter Notebook copies 31 | from `src` directory into /nbextensions/ directory 32 | during widget installation 33 | require: Path to importable AMD Javascript module inside the 34 | /nbextensions/ directory 35 | """ 36 | return [{ 37 | 'section': 'notebook', 38 | 'src': 'nbextension', 39 | 'dest': 'graph_notebook_widgets', 40 | 'require': 'graph_notebook_widgets/extension' 41 | }] 42 | 43 | def _jupyter_labextension_paths(): 44 | """Called by Jupyter Lab Server to detect if it is a valid labextension and 45 | to install the widget 46 | """ 47 | return [{ 48 | 'src': 'labextension', 49 | 'dest': 'graph_notebook_widgets' 50 | }] -------------------------------------------------------------------------------- /.github/workflows/unit.yml: -------------------------------------------------------------------------------- 1 | name: Unit Tests 2 | 3 | on: [push, pull_request] 4 | 5 | jobs: 6 | unit-tests: 7 | if: github.event_name != 'pull_request' || github.event.pull_request.head.repo.fork == true 8 | strategy: 9 | matrix: 10 | python-version: ['3.9', '3.10', '3.11', '3.12', '3.13'] 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v2 14 | - name: Set up Python ${{ matrix.python-version }} 15 | uses: actions/setup-python@v2 16 | with: 17 | python-version: ${{ matrix.python-version }} 18 | - name: Set up Node.js 20.x 19 | uses: actions/setup-node@v3 20 | with: 21 | node-version: '20.18.3' 22 | - name: Install dependencies 23 | run: | 24 | python -m pip install --upgrade pip 25 | pip install build hatch hatch-jupyter-builder 26 | pip install flake8 pytest 27 | pip install "jupyterlab>=4.3.5,<5" 28 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 29 | - name: Lint with flake8 30 | run: | 31 | # stop the build if there are Python syntax errors or undefined names 32 | flake8 ./src --count --select=E9,F63,F7,F82 --show-source --statistics 33 | flake8 ./test --max-complexity 10 --ignore E501,C901,W291 --show-source --statistics 34 | - name: Install 35 | run: | 36 | python -m build . 37 | pip install ./dist/graph_notebook-*-py3-none-any.whl 38 | - name: Post-install commands 39 | run: | 40 | jupyter nbclassic-extension enable --py --sys-prefix graph_notebook.widgets 41 | python -m graph_notebook.static_resources.install 42 | python -m graph_notebook.nbextensions.install 43 | python -m graph_notebook.notebooks.install 44 | - name: Test with pytest 45 | run: | 46 | pytest test/unit -------------------------------------------------------------------------------- /test/integration/iam/sparql/test_sparql_query_with_iam.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | from json import JSONDecodeError 6 | 7 | import pytest 8 | from botocore.session import get_session 9 | 10 | from test.integration import DataDrivenSparqlTest 11 | 12 | 13 | class TestSparqlQueryWithIam(DataDrivenSparqlTest): 14 | def setUp(self) -> None: 15 | super().setUp() 16 | self.client = self.client_builder.with_iam(get_session()).build() 17 | 18 | @pytest.mark.iam 19 | @pytest.mark.sparql 20 | def test_do_sparql_query(self): 21 | query = "SELECT * WHERE {?s ?p ?o} LIMIT 1" 22 | query_res = self.client.sparql(query) 23 | assert query_res.status_code == 200 24 | res = query_res.json() 25 | 26 | self.assertEqual(type(res), dict) 27 | self.assertTrue('s' in res['head']['vars']) 28 | self.assertTrue('p' in res['head']['vars']) 29 | self.assertTrue('o' in res['head']['vars']) 30 | 31 | @pytest.mark.iam 32 | @pytest.mark.sparql 33 | def test_do_sparql_explain(self): 34 | query = "SELECT * WHERE {?s ?p ?o} LIMIT 1" 35 | query_res = self.client.sparql_explain(query) 36 | assert query_res.status_code == 200 37 | res = query_res.content.decode('utf-8') 38 | self.assertEqual(type(res), str) 39 | self.assertTrue(res.startswith('')) 40 | 41 | @pytest.mark.iam 42 | @pytest.mark.sparql 43 | def test_iam_describe(self): 44 | query = '''PREFIX soccer: 45 | DESCRIBE soccer:Arsenal''' 46 | res = self.client.sparql(query) 47 | assert res.status_code == 200 48 | 49 | # test that we do not get back json 50 | with pytest.raises(JSONDecodeError): 51 | res.json() 52 | 53 | content = res.content.decode('utf-8') 54 | assert len(content.splitlines()) == 6 55 | -------------------------------------------------------------------------------- /test/integration/iam/gremlin/test_gremlin_with_iam.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | import pytest 6 | from botocore.session import get_session 7 | from gremlin_python.structure.graph import Vertex 8 | 9 | from test.integration import IntegrationTest 10 | 11 | 12 | class TestGremlinWithIam(IntegrationTest): 13 | def setUp(self) -> None: 14 | self.client = self.client_builder.with_iam(get_session()).build() 15 | 16 | @pytest.mark.iam 17 | @pytest.mark.gremlin 18 | def test_do_gremlin_query_with_iam(self): 19 | query = 'g.V().limit(1)' 20 | results = self.client.gremlin_query(query) 21 | assert type(results) is list 22 | for r in results: 23 | assert type(r) is Vertex 24 | 25 | @pytest.mark.iam 26 | @pytest.mark.gremlin 27 | def test_do_gremlin_explain_with_iam(self): 28 | query = 'g.V().limit(1)' 29 | res = self.client.gremlin_explain(query) 30 | assert res.status_code == 200 31 | results = res.content.decode('utf-8') 32 | self.assertTrue('Explain' in results) 33 | 34 | @pytest.mark.iam 35 | @pytest.mark.gremlin 36 | def test_do_gremlin_profile_with_iam(self): 37 | query = 'g.V().limit(1)' 38 | res = self.client.gremlin_profile(query) 39 | assert res.status_code == 200 40 | 41 | results = res.content.decode('utf-8') 42 | self.assertTrue('Profile' in results) 43 | 44 | @pytest.mark.iam 45 | @pytest.mark.gremlin 46 | def test_iam_gremlin_http_query(self): 47 | query = 'g.V().limit(1)' 48 | res = self.client.gremlin_http_query(query) 49 | assert res.status_code == 200 50 | assert 'result' in res.json() 51 | 52 | def test_iam_gremlin_connection(self): 53 | conn = self.client.get_gremlin_connection() 54 | conn.submit('g.V().limit(1)') 55 | assert True # if we got here then everything worked 56 | -------------------------------------------------------------------------------- /test/integration/without_iam/gremlin/test_gremlin_query.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | import pytest 6 | from gremlin_python.structure.graph import Vertex 7 | 8 | from test.integration import IntegrationTest 9 | 10 | 11 | class TestGremlin(IntegrationTest): 12 | @pytest.mark.gremlin 13 | def test_do_gremlin_query(self): 14 | query = 'g.V().limit(1)' 15 | results = self.client.gremlin_query(query) 16 | assert type(results) is list 17 | for r in results: 18 | assert type(r) is Vertex 19 | 20 | self.assertEqual(type(results), list) 21 | 22 | @pytest.mark.gremlin 23 | def test_do_gremlin_query_with_content_limit_exceeded(self): 24 | query = 'g.V().limit(1)' 25 | transport_args = {'max_content_length': 1} 26 | with self.assertRaises(RuntimeError): 27 | self.client.gremlin_query(query, transport_args=transport_args) 28 | 29 | @pytest.mark.gremlin 30 | def test_do_gremlin_query_with_content_limit_not_exceeded(self): 31 | query = 'g.V().limit(1)' 32 | transport_args = {'max_content_length': 10240} 33 | results = self.client.gremlin_query(query, transport_args=transport_args) 34 | self.assertEqual(type(results), list) 35 | 36 | @pytest.mark.gremlin 37 | @pytest.mark.neptune 38 | def test_do_gremlin_explain(self): 39 | query = 'g.V().limit(1)' 40 | res = self.client.gremlin_explain(query) 41 | assert res.status_code == 200 42 | results = res.content.decode('utf-8') 43 | self.assertTrue('Explain' in results) 44 | 45 | @pytest.mark.gremlin 46 | @pytest.mark.neptune 47 | def test_do_gremlin_profile(self): 48 | query = 'g.V().limit(1)' 49 | res = self.client.gremlin_profile(query) 50 | assert res.status_code == 200 51 | 52 | results = res.content.decode('utf-8') 53 | self.assertTrue('Profile' in results) 54 | -------------------------------------------------------------------------------- /src/graph_notebook/visualization/rows_and_columns.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | from itertools import chain 6 | from collections import OrderedDict 7 | 8 | 9 | def sparql_get_rows_and_columns(sparql_results): 10 | if type(sparql_results) is not dict: 11 | return None 12 | 13 | if 'head' in sparql_results and 'vars' in sparql_results['head'] and 'results' in sparql_results and 'bindings' in \ 14 | sparql_results['results']: 15 | columns = [] 16 | for v in sparql_results['head']['vars']: 17 | columns.append(v) 18 | 19 | rows = [] 20 | for binding in sparql_results['results']['bindings']: 21 | row = [] 22 | for c in columns: 23 | if c in binding: 24 | row.append(binding[c]['value']) 25 | else: 26 | row.append('-') # handle non-existent bindings for optional variables. 27 | rows.append(row) 28 | 29 | return { 30 | 'columns': columns, 31 | 'rows': rows 32 | } 33 | else: 34 | return None 35 | 36 | 37 | def opencypher_get_rows_and_columns(results, res_format: str = None): 38 | rows = [] 39 | columns = set() 40 | 41 | if not res_format: 42 | if results['results']: 43 | res = results['results'] 44 | else: 45 | return None 46 | else: 47 | res = results 48 | 49 | if res_format != 'jolt': 50 | if len(res) > 0: 51 | columns = res[0].keys() 52 | 53 | for r in res: 54 | row = [] 55 | for key, item in r.items(): 56 | row.append(item) 57 | rows.append(row) 58 | else: 59 | if len(res) > 0: 60 | columns.add("Result") 61 | 62 | for r in res: 63 | rows.append(r) 64 | 65 | return { 66 | 'columns': columns, 67 | 'rows': rows 68 | } 69 | 70 | -------------------------------------------------------------------------------- /test/unit/graph_magic/test_opencypher_metadata.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | from graph_notebook.magics.metadata import build_opencypher_metadata_from_query 4 | 5 | 6 | class TestOCMetadataClassFunctions(unittest.TestCase): 7 | def test_opencypher_default_query_metadata(self): 8 | results = { 9 | "results": [ 10 | { 11 | "n": { 12 | "~id": "100", 13 | "~entityType": "node", 14 | "~labels": [ 15 | "airport" 16 | ], 17 | "~properties": { 18 | "desc": "Manila, Ninoy Aquino International Airport", 19 | } 20 | } 21 | } 22 | ] 23 | } 24 | 25 | oc_metadata = build_opencypher_metadata_from_query(query_type='query', results=results, query_time=100.0) 26 | meta_dict = oc_metadata.to_dict() 27 | 28 | self.assertEqual(meta_dict["Query mode"], "query") 29 | self.assertEqual(meta_dict["Request execution time (ms)"], 100.0) 30 | self.assertEqual(meta_dict["# of results"], 1) 31 | self.assertIsInstance(meta_dict["Response size (bytes)"], int) 32 | 33 | def test_opencypher_bolt_query_metadata(self): 34 | results = [ 35 | { 36 | "n": { 37 | "desc": "Manila, Ninoy Aquino International Airport" 38 | } 39 | } 40 | ] 41 | 42 | oc_metadata = build_opencypher_metadata_from_query(query_type='bolt', results=results, results_type='bolt', 43 | query_time=100.0) 44 | meta_dict = oc_metadata.to_dict() 45 | 46 | self.assertEqual(meta_dict["Query mode"], "bolt") 47 | self.assertEqual(meta_dict["Request execution time (ms)"], 100.0) 48 | self.assertEqual(meta_dict["# of results"], 1) 49 | self.assertIsInstance(meta_dict["Response size (bytes)"], int) 50 | -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/03-Neptune-ML/03-Sample-Applications/04-Telco-Networks/1b-Graph_init.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "21451344-eff7-4c5b-b75d-046fbde1c9a2", 6 | "metadata": {}, 7 | "source": [ 8 | "# Loading the Graph data\n", 9 | "\n", 10 | "The data that we use is adapted from the [Stanford Network Analysis Project (SNAP) public Telecom dataset](https://snap.stanford.edu/data/#tcn)\n", 11 | "\n", 12 | "The relationships between users and cells in dataset were extracted and was augmented to add gnodeb nodes and edges between cells and gnodebs to make it similar to a 5G cell network.\n", 13 | "\n", 14 | "The data transformation implementation with AWS Glue can be found in the script `Transform2Neptune.py`\n", 15 | "\n", 16 | "The final dataset used in this application contains Users, Cells, gnodeb, etc. and the known links in between\n", 17 | "\n", 18 | "- s3://neptuneml-data/telcograph/data/node_users.csv\n", 19 | "\n", 20 | "- s3://neptuneml-data/telcograph/node_cell.csv\n", 21 | "\n", 22 | "- s3://neptuneml-data/telcograph/gnodeb_node.csv\n", 23 | "\n", 24 | "- s3://neptuneml-data/telcograph/edge_all.csv\n", 25 | "\n", 26 | "- s3://neptuneml-data/telcograph/edge_gnodeb.csv\n" 27 | ] 28 | }, 29 | { 30 | "cell_type": "code", 31 | "execution_count": 1, 32 | "id": "960dfe5d-67ce-43fd-85a4-6b1593790581", 33 | "metadata": { 34 | "tags": [] 35 | }, 36 | "outputs": [], 37 | "source": [ 38 | "# #load command for loading the graph from S3 to Neptunedb\n", 39 | "%load -s s3://neptuneml-data/telco-graph/data -f csv -p OVERSUBSCRIBE" 40 | ] 41 | } 42 | ], 43 | "metadata": { 44 | "kernelspec": { 45 | "display_name": "Python 3", 46 | "language": "python", 47 | "name": "python3" 48 | }, 49 | "language_info": { 50 | "codemirror_mode": { 51 | "name": "ipython", 52 | "version": 3 53 | }, 54 | "file_extension": ".py", 55 | "mimetype": "text/x-python", 56 | "name": "python", 57 | "nbconvert_exporter": "python", 58 | "pygments_lexer": "ipython3", 59 | "version": "3.9.6" 60 | } 61 | }, 62 | "nbformat": 4, 63 | "nbformat_minor": 5 64 | } 65 | -------------------------------------------------------------------------------- /test/unit/network/sparql/data/008_duplicate_s_and_p_bindings.json: -------------------------------------------------------------------------------- 1 | { 2 | "head": { 3 | "vars": [ 4 | "subject", 5 | "predicate", 6 | "object" 7 | ] 8 | }, 9 | "results": { 10 | "bindings": [ 11 | { 12 | "subject": { 13 | "type": "uri", 14 | "value": "http://kelvinlawrence.net/air-routes/resource/24" 15 | }, 16 | "predicate": { 17 | "type": "uri", 18 | "value": "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" 19 | }, 20 | "object": { 21 | "type": "uri", 22 | "value": "http://kelvinlawrence.net/air-routes/class/Airport" 23 | } 24 | }, 25 | { 26 | "subject": { 27 | "type": "uri", 28 | "value": "http://kelvinlawrence.net/air-routes/resource/24" 29 | }, 30 | "predicate": { 31 | "type": "uri", 32 | "value": "http://example/prop" 33 | }, 34 | "object": { 35 | "type": "literal", 36 | "value": "value1" 37 | } 38 | }, 39 | { 40 | "subject": { 41 | "type": "uri", 42 | "value": "http://kelvinlawrence.net/air-routes/resource/24" 43 | }, 44 | "predicate": { 45 | "type": "uri", 46 | "value": "http://example/prop" 47 | }, 48 | "object": { 49 | "type": "literal", 50 | "value": "value2" 51 | } 52 | }, 53 | { 54 | "subject": { 55 | "type": "uri", 56 | "value": "http://kelvinlawrence.net/air-routes/resource/24" 57 | }, 58 | "predicate": { 59 | "type": "literal", 60 | "value": "propLiteral" 61 | }, 62 | "object": { 63 | "type": "literal", 64 | "value": "value3" 65 | } 66 | }, 67 | { 68 | "subject": { 69 | "type": "uri", 70 | "value": "http://kelvinlawrence.net/air-routes/resource/24" 71 | }, 72 | "predicate": { 73 | "type": "literal", 74 | "value": "propLiteral" 75 | }, 76 | "object": { 77 | "type": "literal", 78 | "value": "value4" 79 | } 80 | } 81 | ] 82 | } 83 | } -------------------------------------------------------------------------------- /test/integration/iam/load/test_load_with_iam.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | import pytest 4 | import unittest 5 | 6 | from botocore.session import get_session 7 | 8 | from test.integration import IntegrationTest 9 | 10 | TEST_BULKLOAD_SOURCE = 's3://aws-ml-customer-samples-%s/bulkload-datasets/%s/airroutes/v01' 11 | 12 | 13 | @unittest.skip 14 | class TestLoadWithIAM(IntegrationTest): 15 | def setUp(self) -> None: 16 | assert self.config.load_from_s3_arn != '' 17 | self.client = self.client_builder.with_iam(get_session()).build() 18 | 19 | @pytest.mark.neptune 20 | def test_iam_load(self): 21 | load_format = 'turtle' 22 | source = TEST_BULKLOAD_SOURCE % (self.config.aws_region, 'turtle') 23 | 24 | # for a full list of options, see https://docs.aws.amazon.com/neptune/latest/userguide/bulk-load-data.html 25 | kwargs = { 26 | 'failOnError': "TRUE", 27 | } 28 | res = self.client.load(source, load_format, self.config.load_from_s3_arn, **kwargs) 29 | assert res.status_code == 200 30 | 31 | load_js = res.json() 32 | assert 'loadId' in load_js['payload'] 33 | load_id = load_js['payload']['loadId'] 34 | 35 | time.sleep(1) # brief wait to ensure the load job can be obtained 36 | 37 | res = self.client.load_status(load_id, details="TRUE") 38 | assert res.status_code == 200 39 | 40 | load_status = res.json() 41 | assert 'overallStatus' in load_status['payload'] 42 | status = load_status['payload']['overallStatus'] 43 | assert status['fullUri'] == source 44 | 45 | res = self.client.cancel_load(load_id) 46 | assert res.status_code == 200 47 | 48 | time.sleep(5) 49 | res = self.client.load_status(load_id, details="TRUE") 50 | cancelled_status = res.json() 51 | assert 'LOAD_CANCELLED_BY_USER' in cancelled_status['payload']['feedCount'][-1] 52 | 53 | @pytest.mark.neptune 54 | def test_iam_load_status(self): 55 | res = self.client.load_status() # This should only give a list of load ids 56 | assert res.status_code == 200 57 | 58 | js = res.json() 59 | assert 'loadIds' in js['payload'] 60 | assert len(js['payload'].keys()) == 1 61 | -------------------------------------------------------------------------------- /test/integration/iam/ml/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | from botocore.session import get_session 7 | 8 | from graph_notebook.configuration.generate_config import Configuration 9 | from graph_notebook.neptune.client import Client, ClientBuilder 10 | 11 | 12 | def setup_iam_client(config: Configuration) -> Client: 13 | client = ClientBuilder() \ 14 | .with_host(config.host) \ 15 | .with_port(config.port) \ 16 | .with_neptune_service(config.neptune_service) \ 17 | .with_region(config.aws_region) \ 18 | .with_tls(config.ssl) \ 19 | .with_ssl_verify(config.ssl_verify) \ 20 | .with_proxy_host(config.proxy_host) \ 21 | .with_proxy_port(config.proxy_port) \ 22 | .with_sparql_path(config.sparql.path) \ 23 | .with_gremlin_traversal_source(config.gremlin.traversal_source) \ 24 | .with_gremlin_login(config.gremlin.username, config.gremlin.password) \ 25 | .with_gremlin_serializer(config.gremlin.message_serializer) \ 26 | .with_neo4j_login(config.neo4j.username, config.neo4j.password, config.neo4j.auth, config.neo4j.database) \ 27 | .with_iam(get_session()) \ 28 | .build() 29 | 30 | assert client.host == config.host 31 | assert client.port == config.port 32 | assert client.neptune_service == config.neptune_service 33 | assert client.region == config.aws_region 34 | assert client.proxy_host == config.proxy_host 35 | assert client.proxy_port == config.proxy_port 36 | assert client.sparql_path == config.sparql.path 37 | assert client.gremlin_traversal_source == config.gremlin.traversal_source 38 | assert client.gremlin_username == config.gremlin.username 39 | assert client.gremlin_password == config.gremlin.password 40 | assert client.gremlin_serializer == config.gremlin.message_serializer 41 | assert client.neo4j_username == config.neo4j.username 42 | assert client.neo4j_password == config.neo4j.password 43 | assert client.neo4j_auth == config.neo4j.auth 44 | assert client.neo4j_database == config.neo4j.database 45 | assert client.ssl is config.ssl 46 | assert client.ssl_verify is config.ssl_verify 47 | return client 48 | -------------------------------------------------------------------------------- /additional-databases/sagemaker/neptune-notebook-cloudformation/README.md: -------------------------------------------------------------------------------- 1 | ## Launching graph-notebook as Amazon Neptune Workbench via AWS CloudFormation 2 | 3 | The AWS CloudFormation template in this folder, [`neptune-workbench-stack.yaml`](neptune-workbench-stack.yaml), deploys Amazon Neptune workbench notebooks as resources, and includes the base 'Getting Started' notebooks. The workbench lets you work with your Amazon Neptune Database cluster using Jupyter notebooks hosted by Amazon SageMaker. You are billed for workbench resources through Amazon SageMaker, separately from your Neptune billing. 4 | 5 | ### Parameter details 6 | #### Minimum permissions for the SageMakerNotebookRole 7 | You may opt to have your notebook instance assume an existing AWS IAM role, via the `SageMakerNotebookRoleArn` stack parameter. Make sure that this role has at least the following minimum permissions within its service role policy: 8 | 9 | ```json 10 | { 11 | "Version": "2012-10-17", 12 | "Statement": [ 13 | { 14 | "Effect": "Allow", 15 | "Action": [ 16 | "s3:GetObject", 17 | "s3:ListBucket" 18 | ], 19 | "Resource": [ 20 | "arn:(AWS Partition):s3:::aws-neptune-notebook-(AWS Region)", 21 | "arn:(AWS Partition):s3:::aws-neptune-notebook-(AWS Region)/*" 22 | ] 23 | }, 24 | { 25 | "Effect": "Allow", 26 | "Action": "neptune-db:connect", 27 | "Resource": [ 28 | "arn:(AWS Partition):neptune-db:(AWS Region):(AWS Account ID):(Cluster Resource ID)/*" 29 | ] 30 | } 31 | ] 32 | } 33 | ``` 34 | 35 | If you would like to enable CloudWatch logging, also add: 36 | ```json 37 | { 38 | "Effect": "Allow", 39 | "Action": [ 40 | "logs:CreateLogGroup", 41 | "logs:CreateLogStream", 42 | "logs:PutLogEvents" 43 | ], 44 | "Resource": [ 45 | "arn:(AWS Partition):logs:(AWS Region):(AWS Account ID):log-group:/aws/sagemaker/*" 46 | ] 47 | } 48 | ``` 49 | 50 | The role should also establish the following trust relationship: 51 | 52 | ```json 53 | { 54 | "Version": "2012-10-17", 55 | "Statement": [ 56 | { 57 | "Effect": "Allow", 58 | "Principal": { 59 | "Service": "sagemaker.amazonaws.com" 60 | }, 61 | "Action": "sts:AssumeRole" 62 | } 63 | ] 64 | } 65 | ``` 66 | -------------------------------------------------------------------------------- /test/integration/iam/system/test_system_with_iam.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import time 3 | 4 | import pytest 5 | from botocore.session import get_session 6 | from test.integration import IntegrationTest 7 | 8 | 9 | class TestStatusWithIAM(IntegrationTest): 10 | def setUp(self) -> None: 11 | self.client = self.client_builder.with_iam(get_session()).build() 12 | 13 | @pytest.mark.iam 14 | @pytest.mark.neptune 15 | def test_do_db_reset_initiate_with_iam_credentials(self): 16 | token = self.client.initiate_reset() 17 | result = token.json() 18 | self.assertNotEqual(result['payload']['token'], '') 19 | 20 | @pytest.mark.iam 21 | @pytest.mark.neptune 22 | def test_do_db_reset_perform_with_wrong_token_with_iam_credentials(self): 23 | res = self.client.perform_reset('invalid') 24 | assert res.status_code == 400 25 | 26 | expected_message = "System command parameter 'token' : 'invalid' does not match database reset token" 27 | assert expected_message == res.json()['detailedMessage'] 28 | 29 | @pytest.mark.iam 30 | @pytest.mark.neptune 31 | def test_do_db_reset_initiate_without_iam_credentials(self): 32 | client = self.client_builder.with_iam(None).build() 33 | res = client.initiate_reset() 34 | assert res.status_code == 403 35 | 36 | @pytest.mark.iam 37 | @pytest.mark.neptune 38 | @pytest.mark.reset 39 | def test_iam_fast_reset(self): 40 | initiate_reset_res = self.client.initiate_reset() 41 | assert initiate_reset_res.status_code == 200 42 | 43 | token = initiate_reset_res.json()['payload']['token'] 44 | reset_res = self.client.perform_reset(token) 45 | assert reset_res.json()['status'] == '200 OK' 46 | 47 | # check for status for 5 minutes while reset is performed 48 | end_time = datetime.datetime.now() + datetime.timedelta(minutes=5) 49 | status = None 50 | while end_time >= datetime.datetime.now(): 51 | try: 52 | status = self.client.status() 53 | if status.status_code != 200: 54 | time.sleep(5) # wait momentarily until we obtain the status again 55 | else: 56 | break 57 | except Exception: 58 | time.sleep(5) 59 | 60 | assert status.status_code == 200 61 | -------------------------------------------------------------------------------- /.github/workflows/docker_publish.yml: -------------------------------------------------------------------------------- 1 | name: Build Docker image and publish to ECR 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | image_tag: 7 | description: 'ECR image tag type' 8 | required: false 9 | type: choice 10 | options: 11 | - 'latest' 12 | - 'release' 13 | default: 'latest' 14 | push: 15 | branches: 16 | - main 17 | 18 | jobs: 19 | build-and-push-image: 20 | runs-on: ubuntu-latest 21 | permissions: 22 | contents: read 23 | packages: write 24 | 25 | steps: 26 | - name: Checkout 27 | uses: actions/checkout@v3 28 | 29 | - name: Get package version 30 | uses: tyankatsu0105/read-package-version-actions@v1 31 | with: 32 | path: "./src/graph_notebook/widgets" 33 | id: package-version 34 | 35 | - name: Get image tag 36 | id: get-image-tag 37 | run: | 38 | if ${{ github.event_name == 'workflow_dispatch' }} ; then 39 | if ${{ inputs.image_tag == 'release'}}; then 40 | echo "image_tag=${{ steps.package-version.outputs.version }}" >> $GITHUB_OUTPUT 41 | else 42 | echo "image_tag=latest" >> $GITHUB_OUTPUT 43 | fi 44 | else 45 | echo "image_tag=latest" >> $GITHUB_OUTPUT 46 | fi 47 | 48 | - name: Configure AWS Credentials 49 | uses: aws-actions/configure-aws-credentials@v1 50 | with: 51 | aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID_ECR }} 52 | aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY_ECR }} 53 | aws-region: us-east-1 54 | role-to-assume: ${{ secrets.AWS_ROLE_TO_ASSUME_ECR }} 55 | role-duration-seconds: 3600 56 | role-session-name: NotebookImageUpdate 57 | 58 | - name: Login to Amazon ECR 59 | id: login-ecr-public 60 | uses: aws-actions/amazon-ecr-login@v1 61 | with: 62 | registry-type: public 63 | 64 | - name: Build, tag, and push Docker image 65 | env: 66 | REGISTRY: ${{ steps.login-ecr-public.outputs.registry }} 67 | REGISTRY_ALIAS: neptune 68 | REPOSITORY: graph-notebook 69 | IMAGE_TAG: ${{ steps.get-image-tag.outputs.image_tag }} 70 | run: | 71 | docker build -t $REGISTRY/$REGISTRY_ALIAS/$REPOSITORY:$IMAGE_TAG . 72 | docker push $REGISTRY/$REGISTRY_ALIAS/$REPOSITORY:$IMAGE_TAG 73 | -------------------------------------------------------------------------------- /test/integration/without_iam/sparql/test_sparql_metadata.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import pytest 7 | 8 | from test.integration import DataDrivenSparqlTest 9 | from graph_notebook.magics.metadata import build_sparql_metadata_from_query 10 | 11 | 12 | class TestMetadataClassFunctions(DataDrivenSparqlTest): 13 | 14 | @pytest.mark.sparql 15 | def test_sparql_default_query_metadata(self): 16 | query = ''' 17 | PREFIX rdfs: 18 | PREFIX so: 19 | SELECT ?city 20 | WHERE { 21 | ?s a so:City . 22 | ?s rdfs:label ?city 23 | FILTER contains(?city,"ou") 24 | } 25 | ''' 26 | res = self.client.sparql(query) 27 | results = res.json() 28 | sparql_metadata = build_sparql_metadata_from_query(query_type='query', res=res, results=results, scd_query=True) 29 | meta_dict = sparql_metadata.to_dict() 30 | 31 | self.assertEqual(meta_dict["Query mode"], "query") 32 | self.assertIsInstance(meta_dict["Request execution time (ms)"], float) 33 | self.assertEqual(meta_dict["Status code"], 200) 34 | self.assertEqual(meta_dict["Status OK?"], True) 35 | self.assertEqual(meta_dict["# of results"], 2) 36 | self.assertIsInstance(meta_dict["Response content size (bytes)"], int) 37 | 38 | @pytest.mark.sparql 39 | @pytest.mark.neptune 40 | def test_sparql_explain_query_metadata(self): 41 | query = ''' 42 | PREFIX rdfs: 43 | PREFIX so: 44 | SELECT ?city 45 | WHERE { 46 | ?s a so:City . 47 | ?s rdfs:label ?city 48 | FILTER contains(?city,"ou") 49 | } 50 | ''' 51 | res = self.client.sparql_explain(query) 52 | sparql_metadata = build_sparql_metadata_from_query(query_type='explain', res=res) 53 | meta_dict = sparql_metadata.to_dict() 54 | 55 | self.assertEqual(meta_dict["Query mode"], "explain") 56 | self.assertIsInstance(meta_dict["Request execution time (ms)"], float) 57 | self.assertEqual(meta_dict["Status code"], 200) 58 | self.assertEqual(meta_dict["Status OK?"], True) 59 | self.assertIsInstance(meta_dict["Response content size (bytes)"], int) 60 | -------------------------------------------------------------------------------- /test/integration/IntegrationTest.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import unittest 7 | 8 | from botocore.session import get_session 9 | 10 | from graph_notebook.configuration.generate_config import Configuration, AuthModeEnum 11 | from graph_notebook.configuration.get_config import get_config 12 | from graph_notebook.neptune.client import ClientBuilder, NEPTUNE_CONFIG_HOST_IDENTIFIERS, is_allowed_neptune_host 13 | from test.integration.NeptuneIntegrationWorkflowSteps import TEST_CONFIG_PATH 14 | 15 | 16 | def setup_client_builder(config: Configuration) -> ClientBuilder: 17 | if is_allowed_neptune_host(config.host, NEPTUNE_CONFIG_HOST_IDENTIFIERS): 18 | builder = ClientBuilder() \ 19 | .with_host(config.host) \ 20 | .with_port(config.port) \ 21 | .with_region(config.aws_region) \ 22 | .with_tls(config.ssl) \ 23 | .with_ssl_verify(config.ssl_verify) \ 24 | .with_proxy_host(config.proxy_host) \ 25 | .with_proxy_port(config.proxy_port) \ 26 | .with_sparql_path(config.sparql.path) \ 27 | .with_gremlin_traversal_source(config.gremlin.traversal_source) \ 28 | .with_gremlin_serializer(config.gremlin.message_serializer) \ 29 | .with_neo4j_login(config.neo4j.username, config.neo4j.password, config.neo4j.auth, config.neo4j.database) 30 | if config.auth_mode == AuthModeEnum.IAM: 31 | builder = builder.with_iam(get_session()) 32 | else: 33 | builder = ClientBuilder() \ 34 | .with_host(config.host) \ 35 | .with_port(config.port) \ 36 | .with_tls(config.ssl) \ 37 | .with_ssl_verify(config.ssl_verify) \ 38 | .with_proxy_host(config.proxy_host) \ 39 | .with_proxy_port(config.proxy_port) \ 40 | .with_sparql_path(config.sparql.path) \ 41 | .with_gremlin_traversal_source(config.gremlin.traversal_source) \ 42 | .with_gremlin_login(config.gremlin.username, config.gremlin.password) \ 43 | .with_gremlin_serializer(config.gremlin.message_serializer) \ 44 | .with_neo4j_login(config.neo4j.username, config.neo4j.password, config.neo4j.auth, config.neo4j.database) 45 | 46 | return builder 47 | 48 | 49 | class IntegrationTest(unittest.TestCase): 50 | @classmethod 51 | def setUpClass(cls): 52 | super().setUpClass() 53 | cls.config = get_config(TEST_CONFIG_PATH) 54 | cls.client_builder = setup_client_builder(cls.config) 55 | 56 | def setUp(self) -> None: 57 | self.client = self.client_builder.build() 58 | -------------------------------------------------------------------------------- /test/integration/without_iam/network/gremlin/test_gremlin_network_with_pattern.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | from graph_notebook.network.gremlin.GremlinNetwork import GremlinNetwork, PathPattern 7 | 8 | from test.integration import DataDrivenGremlinTest 9 | 10 | 11 | class TestGremlinNetwork(DataDrivenGremlinTest): 12 | def test_add_path_with_edge_object(self): 13 | query = "g.V().has('airport','code','AUS').outE().inV().path().by('code').by().limit(10)" 14 | results = self.client.gremlin_query(query) 15 | gn = GremlinNetwork() 16 | pattern = [PathPattern.V, PathPattern.OUT_E, PathPattern.IN_V] 17 | gn.add_results_with_pattern(results, pattern) 18 | self.assertEqual(11, len(gn.graph.nodes)) 19 | self.assertEqual(10, len(gn.graph.edges)) 20 | 21 | def test_add_path_by_dist(self): 22 | query = """g.V().has('airport','code','AUS'). 23 | repeat(outE().inV().simplePath()). 24 | until(has('code','WLG')). 25 | limit(5). 26 | path(). 27 | by('code'). 28 | by('dist')""" 29 | results = self.client.gremlin_query(query) 30 | gn = GremlinNetwork() 31 | pattern = [PathPattern.V, PathPattern.OUT_E, PathPattern.IN_V, PathPattern.OUT_E] 32 | gn.add_results_with_pattern(results, pattern) 33 | self.assertEqual(8, len(gn.graph.nodes)) 34 | self.assertEqual(11, len(gn.graph.edges)) 35 | 36 | def test_path_with_dict(self): 37 | query = """g.V().has('airport','code','CZM'). 38 | out('route'). 39 | path(). 40 | by(valueMap('code','city','region','desc','lat','lon'). 41 | order(local). 42 | by(keys))""" 43 | results = self.client.gremlin_query(query) 44 | gn = GremlinNetwork() 45 | pattern = [PathPattern.V, PathPattern.IN_V] 46 | gn.add_results_with_pattern(results, pattern) 47 | self.assertEqual(12, len(gn.graph.nodes)) 48 | self.assertEqual(11, len(gn.graph.edges)) 49 | 50 | def test_out_v_unhashable_dict(self): 51 | query = """g.V(). 52 | hasLabel('country'). 53 | has('desc','Jamaica'). 54 | out(). 55 | path(). 56 | by(valueMap())""" 57 | results = self.client.gremlin_query(query) 58 | gn = GremlinNetwork() 59 | pattern = [PathPattern.V, PathPattern.OUT_V] 60 | gn.add_results_with_pattern(results, pattern) 61 | node = gn.graph.nodes.get('graph_notebook-2f363b2fa995d0567e638a240efd0a26') 62 | self.assertEqual(["Jamaica"], node['properties']['desc']) 63 | -------------------------------------------------------------------------------- /src/graph_notebook/visualization/templates/sparql_table.html: -------------------------------------------------------------------------------- 1 |
2 | 3 | 22 | 23 | 24 | 25 | 26 | {% for c in columns %} 27 | 28 | {% endfor %} 29 | 30 | 31 | 32 | {% for r in rows %} 33 | 34 | 35 | {% for cell in r%} 36 | 37 | {% endfor %} 38 | 39 | {% endfor %} 40 | 41 |
{{ c|e }}
{{loop.index}}{{ cell|e }}
42 | 78 |
-------------------------------------------------------------------------------- /src/graph_notebook/visualization/templates/opencypher_table.html: -------------------------------------------------------------------------------- 1 |
2 | 3 | 22 | 23 | 24 | 25 | 26 | {% for c in columns %} 27 | 28 | {% endfor %} 29 | 30 | 31 | 32 | {% for r in rows %} 33 | 34 | 35 | {% for cell in r%} 36 | 37 | {% endfor %} 38 | 39 | {% endfor %} 40 | 41 |
{{ c|e }}
{{loop.index}}{{ cell|e }}
42 | 78 |
-------------------------------------------------------------------------------- /src/graph_notebook/visualization/templates/gremlin_table.html: -------------------------------------------------------------------------------- 1 |
2 | 3 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | {% for r in results %} 26 | 27 | 28 | 31 | 32 | {% endfor %} 33 | 34 |
Console
{{loop.index}} 29 | {{r|e}} 30 |
35 | 82 |
-------------------------------------------------------------------------------- /src/graph_notebook/notebooks/03-Neptune-ML/03-Sample-Applications/04-Telco-Networks/Transform2Neptune.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from awsglue.transforms import * 3 | from awsglue.utils import getResolvedOptions 4 | from pyspark.context import SparkContext 5 | from awsglue.context import GlueContext 6 | from awsglue.job import Job 7 | import pyspark.sql.functions as f 8 | 9 | 10 | ## @params: [JOB_NAME] 11 | args = getResolvedOptions(sys.argv, ['JOB_NAME']) 12 | 13 | sc = SparkContext() 14 | glueContext = GlueContext(sc) 15 | spark = glueContext.spark_session 16 | 17 | def transform_edges(spark, s3_path: str, s3_dest: str, max_from: int = -1): 18 | _, basename = s3_path.rsplit('/', 1) 19 | basename = basename.split('.')[0] 20 | _, fro, verb, to = basename.split('_') 21 | 22 | df = ( 23 | spark.read.format("com.databricks.spark.csv") 24 | .option("header", "false") 25 | .option("inferSchema", "true") 26 | .load(s3_path) 27 | ) 28 | columns = df.columns 29 | label = f"{fro}_{verb}_{to}" 30 | names = ["~from", "~to"] + [f"{label}_attr_{i}:Float" for i in range(len(columns) - 2)] 31 | df = df.toDF(*names) 32 | if max_from > 0: 33 | df = df.where(df["~from"] < f.lit(max_from)) 34 | df = df.withColumn("~label", f.lit(label)) 35 | df = df.withColumn("~from",f.concat_ws("_", f.lit(fro), df["~from"])) 36 | df = df.withColumn("~to", f.concat_ws("_", f.lit(fro), df["~to"])) 37 | df = df.withColumn("~id", f.concat_ws("_", f.lit(label), f.monotonically_increasing_id())) 38 | columns = list(df.columns) 39 | columns.remove("~id") 40 | df = df.select("~id", *columns) 41 | df.write.format("com.databricks.spark.csv").option("header", "true").save(s3_dest) 42 | 43 | def transform_nodes(spark, s3_path: str, s3_dest: str, max_node: int = -1): 44 | _, basename = s3_path.rsplit('/', 1) 45 | basename = basename.split('.')[0] 46 | _, label = basename.split('_') 47 | 48 | df = ( 49 | spark.read.format("com.databricks.spark.csv") 50 | .option("header", "false") 51 | .option("inferSchema", "true") 52 | .load(s3_path) 53 | ) 54 | columns = df.columns 55 | names = ["~id"] + [f"{label}_attr_{i}:Float" for i in range(len(columns) - 1)] 56 | df = df.toDF(*names) 57 | if max_node > 0: 58 | df = df.where(df["~id"] < f.lit(max_node)) 59 | df = df.withColumn("~label", f.lit(label)) 60 | df = df.withColumn("~id", f.concat_ws("_", f.lit(label), df["~id"])) 61 | df.write.format("com.databricks.spark.csv").option("header", "true").save(s3_dest) 62 | 63 | transform_nodes(spark,"s3://pathtoyourdata/node_user.txt", "s3://pathtoyourdata/node_user.csv") 64 | transform_nodes(spark,"s3://apathtoyourdata/node_cell.txt", "s3://pathtoyourdata/node_cell.csv") 65 | 66 | 67 | transform_edges(spark,"s3://pathtoyourdata/edge_user_live_cell.txt", "s3://pathtoyourdata/edges_user_live_cell.csv") 68 | 69 | 70 | 71 | job = Job(glueContext) 72 | job.init(args['JOB_NAME'], args) 73 | job.commit() -------------------------------------------------------------------------------- /test/integration/without_iam/gremlin/test_gremlin_metadata.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import time 7 | import pytest 8 | 9 | from test.integration import DataDrivenGremlinTest 10 | from graph_notebook.magics.metadata import build_gremlin_metadata_from_query 11 | 12 | 13 | class TestMetadataClassFunctions(DataDrivenGremlinTest): 14 | 15 | @pytest.mark.gremlin 16 | def test_gremlin_default_query_metadata(self): 17 | query = "g.V().has('airport','code','CZM').out('route').path().by('code')" 18 | query_start = time.time() * 1000 19 | results = self.client.gremlin_query(query) 20 | query_time = time.time() * 1000 - query_start 21 | gremlin_metadata = build_gremlin_metadata_from_query(query_type='query', results=results, query_time=query_time) 22 | meta_dict = gremlin_metadata.to_dict() 23 | 24 | self.assertEqual(meta_dict["Query mode"], "query") 25 | self.assertIsInstance(meta_dict["Request execution time (ms)"], float) 26 | self.assertEqual(meta_dict["# of results"], 11) 27 | self.assertIsInstance(meta_dict["Response size (bytes)"], int) 28 | 29 | @pytest.mark.gremlin 30 | @pytest.mark.neptune 31 | def test_gremlin_explain_query_metadata(self): 32 | query = "g.V().has('airport','code','CZM').out('route').path().by('code')" 33 | res = self.client.gremlin_explain(query) 34 | query_res = res.content.decode('utf-8') 35 | gremlin_metadata = build_gremlin_metadata_from_query(query_type='explain', results=query_res, res=res) 36 | meta_dict = gremlin_metadata.to_dict() 37 | 38 | self.assertEqual(meta_dict["Query mode"], "explain") 39 | self.assertIsInstance(meta_dict["Request execution time (ms)"], float) 40 | self.assertEqual(meta_dict["Status code"], 200) 41 | self.assertEqual(meta_dict["Status OK?"], True) 42 | self.assertEqual(meta_dict["# of predicates"], 18) 43 | self.assertIsInstance(meta_dict["Response size (bytes)"], int) 44 | 45 | @pytest.mark.gremlin 46 | @pytest.mark.neptune 47 | def test_gremlin_profile_query_metadata(self): 48 | query = "g.V().has('airport','code','CZM').out('route').path().by('code')" 49 | res = self.client.gremlin_profile(query) 50 | query_res = res.content.decode('utf-8') 51 | gremlin_metadata = build_gremlin_metadata_from_query(query_type='profile', results=query_res, res=res) 52 | meta_dict = gremlin_metadata.to_dict() 53 | 54 | self.assertEqual(meta_dict["Query mode"], "profile") 55 | self.assertIsInstance(meta_dict["Query execution time (ms)"], float) 56 | self.assertIsInstance(meta_dict["Request execution time (ms)"], float) 57 | self.assertEqual(meta_dict["Status code"], 200) 58 | self.assertEqual(meta_dict["Status OK?"], True) 59 | self.assertEqual(meta_dict["# of predicates"], 18) 60 | self.assertEqual(meta_dict["# of results"], 11) 61 | self.assertIsInstance(meta_dict["Response size (bytes)"], int) 62 | -------------------------------------------------------------------------------- /test/unit/network/test_network.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import unittest 7 | 8 | from graph_notebook.network.Network import Network, network_to_json, network_from_json, ERROR_EDGE_NOT_FOUND, \ 9 | ERROR_INVALID_DATA 10 | 11 | 12 | def get_seed_network() -> Network: 13 | network = Network() 14 | node_1_data = {'foo': 'bar'} 15 | node_2_data = {'foo': 'baz'} 16 | network.add_node_data('1', node_1_data) 17 | network.add_node_data('2', node_2_data) 18 | 19 | network.add_edge('1', '2', '1_to_2', 'lorem') 20 | network.add_edge('2', '1', '2_to_1', 'ipsum') 21 | return network 22 | 23 | 24 | class TestNetwork(unittest.TestCase): 25 | def test_add_node(self): 26 | network = Network() 27 | node_id = '1' 28 | network.add_node(node_id) 29 | self.assertTrue(network.graph.has_node(node_id)) 30 | 31 | def test_add_node_with_properties(self): 32 | network = Network() 33 | node_id = '1' 34 | kwargs = { 35 | 'foo': 'bar' 36 | } 37 | network.add_node(node_id, kwargs) 38 | self.assertEqual('bar', network.graph.nodes[node_id]['foo']) 39 | 40 | def test_add_edge_data_does_not_exist(self): 41 | network = Network() 42 | network.add_node('1') 43 | network.add_node('2') 44 | with self.assertRaises(ValueError) as context: 45 | network.add_edge_data('1', '2', 'na', {'foo': 'bar'}) 46 | 47 | self.assertEqual(context.exception, ERROR_EDGE_NOT_FOUND) 48 | 49 | def test_add_edge_data_not_a_dict(self): 50 | network = Network() 51 | network.add_node('1') 52 | network.add_node('2') 53 | network.add_edge('1', '2', '1_to_2', '1_to_2') 54 | 55 | with self.assertRaises(ValueError) as context: 56 | network.add_edge_data('1', '2', '1_to_2', None) 57 | 58 | self.assertEqual(context.exception, ERROR_INVALID_DATA) 59 | 60 | def test_add_no_data_not_a_dict(self): 61 | network = Network() 62 | with self.assertRaises(ValueError) as context: 63 | network.add_node_data('1', None) 64 | 65 | self.assertEqual(context.exception, ERROR_INVALID_DATA) 66 | 67 | def test_network_to_json(self): 68 | network = get_seed_network() 69 | 70 | expected_nodes = [ 71 | {'foo': 'bar', 'id': '1'}, 72 | {'foo': 'baz', 'id': '2'} 73 | ] 74 | 75 | expected_edges = [ 76 | {'label': 'lorem', 'source': '1', 'target': '2', 'key': '1_to_2'}, 77 | {'label': 'ipsum', 'source': '2', 'target': '1', 'key': '2_to_1'} 78 | ] 79 | 80 | js = network.to_json() 81 | self.assertEqual(expected_nodes, js['graph']['nodes']) 82 | self.assertEqual(expected_edges, js['graph']['links']) 83 | 84 | def test_network_from_json(self): 85 | network = get_seed_network() 86 | js = network_to_json(network) 87 | loaded_network = network_from_json(js) 88 | self.assertEqual(network.to_json(), loaded_network.to_json()) 89 | 90 | 91 | if __name__ == '__main__': 92 | unittest.main() 93 | -------------------------------------------------------------------------------- /src/graph_notebook/options/options.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | # Documentation for these options: https://visjs.github.io/vis-network/docs/network 7 | OPTIONS_DEFAULT_DIRECTED = { 8 | "nodes": { 9 | "borderWidthSelected": 3, 10 | "borderWidth": 0, 11 | "chosen": True, 12 | "color": { 13 | "background": "rgba(210, 229, 255, 1)", 14 | "border": "transparent", 15 | "highlight": { 16 | "background": "rgba(9, 104, 178, 1)", 17 | "border": "rgba(8, 62, 100, 1)" 18 | } 19 | }, 20 | "shadow": { 21 | "enabled": False 22 | }, 23 | "shape": "circle", 24 | "widthConstraint": { 25 | "minimum": 70, 26 | "maximum": 70 27 | }, 28 | "font": { 29 | "face": "courier new", 30 | "color": "black", 31 | "size": 12 32 | }, 33 | }, 34 | "edges": { 35 | "color": { 36 | "inherit": False 37 | }, 38 | "smooth": { 39 | "enabled": True, 40 | "type": "straightCross" 41 | }, 42 | "arrows": { 43 | "to": { 44 | "enabled": True, 45 | "type": "arrow" 46 | } 47 | }, 48 | "font": { 49 | "face": "courier new" 50 | } 51 | }, 52 | "interaction": { 53 | "hover": True, 54 | "hoverConnectedEdges": True, 55 | "selectConnectedEdges": False 56 | }, 57 | "physics": { 58 | "simulationDuration": 1500, 59 | "disablePhysicsAfterInitialSimulation": False, 60 | "minVelocity": 0.75, 61 | "barnesHut": { 62 | "centralGravity": 0.1, 63 | "gravitationalConstant": -50450, 64 | "springLength": 95, 65 | "springConstant": 0.04, 66 | "damping": 0.09, 67 | "avoidOverlap": 0.1 68 | }, 69 | "solver": "barnesHut", 70 | "enabled": True, 71 | "adaptiveTimestep": True, 72 | "stabilization": { 73 | "enabled": True, 74 | "iterations": 1 75 | } 76 | } 77 | } 78 | 79 | 80 | def vis_options_merge(original, target): 81 | """Merge the target dict with the original dict, without modifying the input dicts. 82 | 83 | :param original: the original dict. 84 | :param target: the target dict that takes precedence when there are type conflicts or value conflicts. 85 | :return: a new dict containing references to objects in both inputs. 86 | """ 87 | resultdict = {} 88 | common_keys = original.keys() & target.keys() 89 | 90 | for key in common_keys: 91 | obj1 = original[key] 92 | obj2 = target[key] 93 | 94 | if type(obj1) is dict and type(obj2) is dict: 95 | resultdict[key] = vis_options_merge(obj1, obj2) 96 | else: 97 | resultdict[key] = obj2 98 | 99 | for key in (original.keys() - target.keys()): 100 | resultdict[key] = original[key] 101 | 102 | for key in (target.keys() - original.keys()): 103 | resultdict[key] = target[key] 104 | 105 | return resultdict 106 | -------------------------------------------------------------------------------- /test/unit/network/sparql/data/010_airroutes_no_literals.json: -------------------------------------------------------------------------------- 1 | { 2 | "head": { 3 | "vars": [ 4 | "s", 5 | "p", 6 | "o" 7 | ] 8 | }, 9 | "results": { 10 | "bindings": [ 11 | { 12 | "s": { 13 | "type": "uri", 14 | "value": "http://kelvinlawrence.net/air-routes/resource/365" 15 | }, 16 | "o": { 17 | "type": "uri", 18 | "value": "http://kelvinlawrence.net/air-routes/resource/85" 19 | }, 20 | "p": { 21 | "type": "uri", 22 | "value": "http://kelvinlawrence.net/air-routes/objectProperty/route" 23 | } 24 | }, 25 | { 26 | "s": { 27 | "type": "uri", 28 | "value": "http://kelvinlawrence.net/air-routes/resource/365" 29 | }, 30 | "o": { 31 | "type": "uri", 32 | "value": "http://kelvinlawrence.net/air-routes/resource/367" 33 | }, 34 | "p": { 35 | "type": "uri", 36 | "value": "http://kelvinlawrence.net/air-routes/objectProperty/route" 37 | } 38 | }, 39 | { 40 | "s": { 41 | "type": "uri", 42 | "value": "http://kelvinlawrence.net/air-routes/resource/365" 43 | }, 44 | "o": { 45 | "type": "uri", 46 | "value": "http://kelvinlawrence.net/air-routes/resource/16" 47 | }, 48 | "p": { 49 | "type": "uri", 50 | "value": "http://kelvinlawrence.net/air-routes/objectProperty/route" 51 | } 52 | }, 53 | { 54 | "s": { 55 | "type": "uri", 56 | "value": "http://kelvinlawrence.net/air-routes/resource/365" 57 | }, 58 | "o": { 59 | "type": "uri", 60 | "value": "http://kelvinlawrence.net/air-routes/resource/8" 61 | }, 62 | "p": { 63 | "type": "uri", 64 | "value": "http://kelvinlawrence.net/air-routes/objectProperty/route" 65 | } 66 | }, 67 | { 68 | "s": { 69 | "type": "uri", 70 | "value": "http://kelvinlawrence.net/air-routes/resource/365" 71 | }, 72 | "o": { 73 | "type": "uri", 74 | "value": "http://kelvinlawrence.net/air-routes/resource/11" 75 | }, 76 | "p": { 77 | "type": "uri", 78 | "value": "http://kelvinlawrence.net/air-routes/objectProperty/route" 79 | } 80 | }, 81 | { 82 | "s": { 83 | "type": "uri", 84 | "value": "http://kelvinlawrence.net/air-routes/resource/400" 85 | }, 86 | "o": { 87 | "type": "uri", 88 | "value": "http://www.example.com/example/ontology/airport" 89 | }, 90 | "p": { 91 | "type": "uri", 92 | "value": "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" 93 | } 94 | }, 95 | { 96 | "s": { 97 | "type": "uri", 98 | "value": "http://kelvinlawrence.net/air-routes/resource/400" 99 | }, 100 | "o": { 101 | "type": "uri", 102 | "value": "http://www.example.com/example/ontology/regional" 103 | }, 104 | "p": { 105 | "type": "uri", 106 | "value": "http://www.w3.org/1999/02/22-rdf-syntax-ns#type" 107 | } 108 | } 109 | ] 110 | } 111 | } -------------------------------------------------------------------------------- /test/integration/iam/statistics/test_statistics_with_iam.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | import pytest 6 | from botocore.session import get_session 7 | 8 | from test.integration import IntegrationTest 9 | from parameterized import parameterized 10 | 11 | lang_list = ["pg", "sparql"] 12 | 13 | 14 | class TestStatisticsWithIAM(IntegrationTest): 15 | def setUp(self) -> None: 16 | super().setUp() 17 | self.client = self.client_builder.with_iam(get_session()).build() 18 | 19 | @pytest.mark.neptune 20 | @pytest.mark.iam 21 | @parameterized.expand(lang_list) 22 | def test_statistics_status(self, lang): 23 | expected_payload_fields = ['autoCompute', 'active', 'statisticsId'] 24 | res = self.client.statistics(lang) 25 | assert res.status_code == 200 26 | statistics_status = res.json() 27 | self.assertEqual(statistics_status['status'], '200 OK') 28 | res_payload_fields = list(statistics_status['payload'].keys()) 29 | for x in expected_payload_fields: 30 | self.assertIn(x, res_payload_fields) 31 | 32 | @pytest.mark.neptune 33 | @pytest.mark.iam 34 | @parameterized.expand(lang_list) 35 | def test_statistics_disable_autocompute(self, lang): 36 | expected = { 37 | "status": "200 OK" 38 | } 39 | disable_res = self.client.statistics(lang, False, 'disableAutoCompute') 40 | assert disable_res.status_code == 200 41 | disable_status = disable_res.json() 42 | self.assertEqual(disable_status, expected) 43 | 44 | status_res = self.client.statistics(lang) 45 | statistics_status = status_res.json() 46 | self.assertEqual(statistics_status['payload']['autoCompute'], False) 47 | 48 | @pytest.mark.neptune 49 | @pytest.mark.iam 50 | @parameterized.expand(lang_list) 51 | def test_statistics_enable_autocompute(self, lang): 52 | expected = { 53 | "status": "200 OK" 54 | } 55 | enable_res = self.client.statistics(lang, False, 'enableAutoCompute') 56 | assert enable_res.status_code == 200 57 | enable_status = enable_res.json() 58 | self.assertEqual(enable_status, expected) 59 | 60 | status_res = self.client.statistics(lang) 61 | statistics_status = status_res.json() 62 | self.assertEqual(statistics_status['payload']['autoCompute'], True) 63 | 64 | @pytest.mark.neptune 65 | @pytest.mark.iam 66 | @parameterized.expand(lang_list) 67 | def test_statistics_refresh(self, lang): 68 | res = self.client.statistics(lang) 69 | assert res.status_code == 200 70 | statistics_status = res.json() 71 | self.assertEqual(statistics_status['status'], '200 OK') 72 | self.assertIn("statisticsId", statistics_status['payload']) 73 | 74 | @pytest.mark.neptune 75 | @pytest.mark.iam 76 | @parameterized.expand(lang_list) 77 | def test_statistics_delete(self, lang): 78 | expected = { 79 | "status": "200 OK", 80 | "payload": { 81 | "active": False, 82 | "statisticsId": -1 83 | } 84 | } 85 | res = self.client.statistics(lang, False, 'delete') 86 | assert res.status_code == 200 87 | statistics_status = res.json() 88 | self.assertEqual(statistics_status, expected) 89 | -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/03-Neptune-ML/02-SPARQL/neptune-ml-pretrained-rdf-model-config.json: -------------------------------------------------------------------------------- 1 | { 2 | "models": { 3 | "object_classification": "s3://aws-neptune-customer-samples/v2/neptune-ml/pretrained-models/v2/rdf/object-classification/model.tar.gz", 4 | "object_regression": "s3://aws-neptune-customer-samples/v2/neptune-ml/pretrained-models/v2/rdf/object-regression/model.tar.gz", 5 | "link_prediction": "s3://aws-neptune-customer-samples/v2/neptune-ml/pretrained-models/v2/rdf/link-prediction/model.tar.gz" 6 | }, 7 | "models_cn": { 8 | "object_classification": "s3://aws-neptune-customer-samples-cn-northwest-1/v2/neptune-ml/pretrained-models/v2/rdf/object-classification/model.tar.gz", 9 | "object_regression": "s3://aws-neptune-customer-samples-cn-northwest-1/v2/neptune-ml/pretrained-models/v2/rdf/object-regression/model.tar.gz", 10 | "link_prediction": "s3://aws-neptune-customer-samples-cn-northwest-1/v2/neptune-ml/pretrained-models/v2/rdf/link-prediction/model.tar.gz" 11 | }, 12 | "container_images": { 13 | "us-west-1":"891482049861.dkr.ecr.us-west-1.amazonaws.com/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 14 | "us-west-2":"891482049861.dkr.ecr.us-west-2.amazonaws.com/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 15 | "us-east-1":"891482049861.dkr.ecr.us-east-1.amazonaws.com/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 16 | "us-east-2":"891482049861.dkr.ecr.us-east-2.amazonaws.com/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 17 | "ap-northeast-1":"891482049861.dkr.ecr.ap-northeast-1.amazonaws.com/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 18 | "ap-northeast-2":"891482049861.dkr.ecr.ap-northeast-2.amazonaws.com/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 19 | "ap-southeast-1":"891482049861.dkr.ecr.ap-southeast-1.amazonaws.com/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 20 | "ap-southeast-2":"891482049861.dkr.ecr.ap-southeast-2.amazonaws.com/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 21 | "ap-south-1":"891482049861.dkr.ecr.ap-south-1.amazonaws.com/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 22 | "ap-east-1":"492694554042.dkr.ecr.ap-east-1.amazonaws.com/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 23 | "ca-central-1": "891482049861.dkr.ecr.ca-central-1.amazonaws.com/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 24 | "eu-central-1":"891482049861.dkr.ecr.eu-central-1.amazonaws.com/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 25 | "eu-north-1":"891482049861.dkr.ecr.eu-north-1.amazonaws.com/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 26 | "eu-west-1":"891482049861.dkr.ecr.eu-west-1.amazonaws.com/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 27 | "eu-west-2":"891482049861.dkr.ecr.eu-west-2.amazonaws.com/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 28 | "eu-west-3":"891482049861.dkr.ecr.eu-west-3.amazonaws.com/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 29 | "me-south-1":"931515848886.dkr.ecr.me-south-1.amazonaws.com/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 30 | "sa-east-1":"891482049861.dkr.ecr.sa-east-1.amazonaws.com/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 31 | "cn-north-1":"639043989634.dkr.ecr.cn-north-1.amazonaws.com.cn/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 32 | "cn-northwest-1":"639043989634.dkr.ecr.cn-northwest-1.amazonaws.com.cn/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3", 33 | "us-gov-west-1":"106643864031.dkr.ecr.sa-east-1.amazonaws.com/graphlytics-pytorch-inference:v1-1.6.0-cpu-py3" 34 | } 35 | } -------------------------------------------------------------------------------- /src/graph_notebook/start_notebook.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | 6 | import os 7 | import argparse 8 | import json 9 | 10 | HOME_PATH = os.path.expanduser("~") 11 | NBCONFIG_DIR_TREE = HOME_PATH + '/.jupyter/nbconfig' 12 | NOTEBOOK_CFG_PATH = NBCONFIG_DIR_TREE + '/notebook.json' 13 | 14 | CUSTOM_DIR_TREE = HOME_PATH + '/.jupyter/custom' 15 | NOTEBOOK_CUSTOMJS_PATH = CUSTOM_DIR_TREE + '/custom.js' 16 | 17 | 18 | def patch_cm_cypher_config(): 19 | cypher_cfg = { 20 | "cm_config": { 21 | "smartIndent": False 22 | } 23 | } 24 | 25 | try: 26 | os.makedirs(NBCONFIG_DIR_TREE, exist_ok=True) 27 | with open(NOTEBOOK_CFG_PATH, 'r') as file: 28 | notebook_cfg = json.load(file) 29 | except (json.decoder.JSONDecodeError, FileNotFoundError) as e: 30 | notebook_cfg = {} 31 | 32 | notebook_cfg["CodeCell"] = cypher_cfg 33 | 34 | with open(NOTEBOOK_CFG_PATH, 'w') as file: 35 | json.dump(notebook_cfg, file, indent=2) 36 | 37 | 38 | def patch_customjs(): 39 | # Increases time allotted to load nbextensions on large notebooks. Limit is set to 60s and can be increased further. 40 | # Reference: https://github.com/ipython-contrib/jupyter_contrib_nbextensions/blob/master/docs/source/troubleshooting.md#extensions-not-loading-for-large-notebooks 41 | limit = "60" 42 | increase_requirejs_timeout_prefix = "window.requirejs.config({waitSeconds:" 43 | increase_requirejs_timeout_suffix = "});" 44 | requirejs_timeout_full = increase_requirejs_timeout_prefix + limit + increase_requirejs_timeout_suffix 45 | 46 | try: 47 | os.makedirs(CUSTOM_DIR_TREE, exist_ok=True) 48 | with open(NOTEBOOK_CUSTOMJS_PATH, 'r') as file: 49 | customjs_content = file.read() 50 | except (json.decoder.JSONDecodeError, FileNotFoundError) as e: 51 | customjs_content = "" 52 | 53 | if increase_requirejs_timeout_prefix not in customjs_content: 54 | if customjs_content: 55 | customjs_content += "\n" 56 | customjs_content += requirejs_timeout_full 57 | with open(NOTEBOOK_CUSTOMJS_PATH, 'w') as file: 58 | file.write(customjs_content) 59 | print(f"Modified nbextensions loader timeout limit to {limit} seconds") 60 | 61 | 62 | def main(): 63 | parser = argparse.ArgumentParser() 64 | parser.add_argument('--notebooks-dir', default='', type=str, help='The directory to start Jupyter from.') 65 | 66 | args = parser.parse_args() 67 | 68 | patch_cm_cypher_config() 69 | patch_customjs() 70 | 71 | # Starting with Notebook 7.0+, the classic notebook interface was rewritten to use JupyterLab's architecture. 72 | # This means traditional nbextensions (which rely on requirejs and jQuery) are not directly supported. 73 | # We use nbclassic package to maintain compatibility 74 | # Reference: https://jupyter-notebook.readthedocs.io/en/latest/migrating/multiple-interfaces.html#simultaneous-usage-of-different-versions-of-notebook-7-and-the-classic-notebook-ui 75 | kernel_manager_option = "--NotebookApp.kernel_manager_class=notebook.services.kernels.kernelmanager.AsyncMappingKernelManager" 76 | notebooks_dir = '~/notebook/destination/dir' if args.notebooks_dir == '' else args.notebooks_dir 77 | os.system(f'''jupyter nbclassic {kernel_manager_option} {notebooks_dir}''') 78 | 79 | 80 | if __name__ == '__main__': 81 | main() 82 | -------------------------------------------------------------------------------- /src/graph_notebook/visualization/templates/tabs.html: -------------------------------------------------------------------------------- 1 |
2 | {% block style %} 3 | 63 | {% endblock %} 64 | 65 | {% block javascript %} 66 | 87 | {% endblock %} 88 |
89 |
90 | {% for t in tabs %} 91 | 92 | {% endfor %} 93 |
94 | 95 |
96 | {% for t in tabs %} 97 |
98 | {{t['content']}} 99 |
100 | {% endfor %} 101 |
102 |
103 |
104 | -------------------------------------------------------------------------------- /test/integration/iam/statistics/test_summary_with_iam.py: -------------------------------------------------------------------------------- 1 | """ 2 | Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved. 3 | SPDX-License-Identifier: Apache-2.0 4 | """ 5 | import pytest 6 | from botocore.session import get_session 7 | 8 | from test.integration import IntegrationTest 9 | from parameterized import parameterized 10 | 11 | lang_list = ["pg", "sparql"] 12 | summary_detailed_fields_pg = ["nodeStructures", "edgeStructures"] 13 | summary_detailed_fields_rdf = ["subjectStructures"] 14 | 15 | 16 | class TestSummaryWithIAM(IntegrationTest): 17 | def setUp(self) -> None: 18 | super().setUp() 19 | self.client = self.client_builder.with_iam(get_session()).build() 20 | 21 | @pytest.mark.neptune 22 | @pytest.mark.iam 23 | @parameterized.expand(lang_list) 24 | def test_summary_default(self, lang): 25 | expected_payload_fields = ['version', 'lastStatisticsComputationTime', 'graphSummary'] 26 | res = self.client.statistics(lang, True) 27 | assert res.status_code == 200 28 | summary_default = res.json() 29 | self.assertEqual(summary_default['status'], '200 OK') 30 | res_payload_fields = list(summary_default['payload'].keys()) 31 | for x in expected_payload_fields: 32 | self.assertIn(x, res_payload_fields) 33 | 34 | @pytest.mark.neptune 35 | @pytest.mark.iam 36 | def test_summary_basic_pg(self): 37 | res = self.client.statistics("pg", True, "basic") 38 | assert res.status_code == 200 39 | summary_pg_basic = res.json() 40 | self.assertEqual(summary_pg_basic['status'], '200 OK') 41 | summary_pg_fields = list(summary_pg_basic['payload']['graphSummary'].keys()) 42 | 43 | self.assertIn("numNodes", summary_pg_fields) 44 | for x in summary_detailed_fields_pg: 45 | self.assertNotIn(x, summary_pg_fields) 46 | 47 | @pytest.mark.neptune 48 | @pytest.mark.iam 49 | def test_summary_basic_rdf(self): 50 | res = self.client.statistics("rdf", True, "basic") 51 | assert res.status_code == 200 52 | summary_rdf_basic = res.json() 53 | self.assertEqual(summary_rdf_basic['status'], '200 OK') 54 | summary_rdf_fields = list(summary_rdf_basic['payload']['graphSummary'].keys()) 55 | 56 | self.assertIn("numDistinctSubjects", summary_rdf_fields) 57 | for x in summary_detailed_fields_rdf: 58 | self.assertNotIn(x, summary_rdf_fields) 59 | 60 | @pytest.mark.neptune 61 | @pytest.mark.iam 62 | def test_summary_detailed_pg(self): 63 | res = self.client.statistics("pg", True, "detailed") 64 | assert res.status_code == 200 65 | summary_pg_detailed = res.json() 66 | self.assertEqual(summary_pg_detailed['status'], '200 OK') 67 | summary_pg_fields = list(summary_pg_detailed['payload']['graphSummary'].keys()) 68 | 69 | for x in summary_detailed_fields_pg: 70 | self.assertIn(x, summary_pg_fields) 71 | 72 | @pytest.mark.neptune 73 | @pytest.mark.iam 74 | def test_summary_detailed_rdf(self): 75 | res = self.client.statistics("rdf", True, "detailed") 76 | assert res.status_code == 200 77 | summary_rdf_detailed = res.json() 78 | self.assertEqual(summary_rdf_detailed['status'], '200 OK') 79 | summary_rdf_fields = list(summary_rdf_detailed['payload']['graphSummary'].keys()) 80 | 81 | for x in summary_detailed_fields_rdf: 82 | self.assertIn(x, summary_rdf_fields) 83 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM amazonlinux:2022 2 | 3 | # Notebook Port 4 | EXPOSE 8888 5 | # Lab Port 6 | EXPOSE 8889 7 | USER root 8 | 9 | # May need to be set to `pipargs=' -i https://pypi.tuna.tsinghua.edu.cn/simple '` for china regions 10 | ENV pipargs="" 11 | ENV WORKING_DIR="/root" 12 | ENV NOTEBOOK_DIR="${WORKING_DIR}/notebooks" 13 | ENV EXAMPLE_NOTEBOOK_DIR="${NOTEBOOK_DIR}/Example Notebooks" 14 | ENV NODE_VERSION=20.18.3 15 | ENV PYTHON_VERSION=3.10 16 | ENV GRAPH_NOTEBOOK_AUTH_MODE="DEFAULT" 17 | ENV GRAPH_NOTEBOOK_HOST="neptune.cluster-XXXXXXXXXXXX.us-east-1.neptune.amazonaws.com" 18 | ENV GRAPH_NOTEBOOK_PROXY_PORT="8192" 19 | ENV GRAPH_NOTEBOOK_PROXY_HOST="" 20 | ENV GRAPH_NOTEBOOK_PORT="8182" 21 | ENV NEPTUNE_LOAD_FROM_S3_ROLE_ARN="" 22 | ENV AWS_REGION="us-east-1" 23 | ENV NOTEBOOK_PORT="8888" 24 | ENV LAB_PORT="8889" 25 | ENV GRAPH_NOTEBOOK_SSL="True" 26 | ENV NOTEBOOK_PASSWORD="admin" 27 | ENV PROVIDE_EXAMPLES=1 28 | 29 | 30 | # "when the SIGTERM signal is sent to the docker process, it immediately quits and all established connections are closed" 31 | # "graceful stop is triggered when the SIGUSR1 signal is sent to the docker process" 32 | STOPSIGNAL SIGUSR1 33 | 34 | 35 | RUN mkdir -p "${WORKING_DIR}" && \ 36 | mkdir -p "${NOTEBOOK_DIR}" && \ 37 | mkdir -p "${EXAMPLE_NOTEBOOK_DIR}" && \ 38 | # Yum Update and install dependencies 39 | yum update -y && \ 40 | yum install tar gzip git findutils -y && \ 41 | # Install NPM/Node 42 | curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.3/install.sh | bash && \ 43 | . ~/.nvm/nvm.sh && \ 44 | nvm install ${NODE_VERSION} && \ 45 | # Install Python 46 | yum install python${PYTHON_VERSION} -y && \ 47 | # update-alternatives --install /usr/bin/python3 python3 /usr/bin/python${PYTHON_VERSION} 1 && \ 48 | echo 'Using python version:' && \ 49 | python${PYTHON_VERSION} --version && \ 50 | python${PYTHON_VERSION} -m ensurepip --upgrade && \ 51 | python${PYTHON_VERSION} -m venv /tmp/venv && \ 52 | source /tmp/venv/bin/activate && \ 53 | cd "${WORKING_DIR}" && \ 54 | # Clone the repo and install python dependencies 55 | git clone https://github.com/aws/graph-notebook && \ 56 | cd "${WORKING_DIR}/graph-notebook" && \ 57 | pip3 install --upgrade pip setuptools wheel && \ 58 | pip3 install twine==3.7.1 && \ 59 | pip3 install -r requirements.txt && \ 60 | pip3 install "jupyterlab>=4.3.5,<5" && \ 61 | pip3 install --upgrade build hatch hatch-jupyter-builder && \ 62 | # Build the package 63 | python3 -m build . && \ 64 | # install the copied repo 65 | pip3 install . && \ 66 | # copy premade starter notebooks 67 | cd "${WORKING_DIR}/graph-notebook" && \ 68 | python3 -m graph_notebook.notebooks.install --destination "${EXAMPLE_NOTEBOOK_DIR}" && \ 69 | jupyter nbclassic-extension enable --py --sys-prefix graph_notebook.widgets && \ 70 | # This allows for the `.ipython` to be set 71 | python -m graph_notebook.start_jupyterlab --jupyter-dir "${NOTEBOOK_DIR}" && \ 72 | deactivate && \ 73 | # Cleanup 74 | yum clean all && \ 75 | yum remove wget tar git -y && \ 76 | rm -rf /var/cache/yum && \ 77 | rm -rf "${WORKING_DIR}/graph-notebook" && \ 78 | rm -rf /root/.cache && \ 79 | rm -rf /root/.npm/_cacache && \ 80 | cd /usr/share && \ 81 | rm -r $(ls -A | grep -v terminfo) 82 | 83 | ADD "docker/Example-Remote-Server-Setup.ipynb" "${NOTEBOOK_DIR}/Example-Remote-Server-Setup.ipynb" 84 | ADD ./docker/service.sh /usr/bin/service.sh 85 | RUN chmod +x /usr/bin/service.sh 86 | 87 | ENTRYPOINT [ "bash","-c","service.sh" ] 88 | -------------------------------------------------------------------------------- /additional-databases/gremlin-server/README.md: -------------------------------------------------------------------------------- 1 | ## Connecting graph-notebook to a Gremlin Server 2 | 3 | ![Gremlin](https://github.com/aws/graph-notebook/blob/main/images/gremlin-notebook.png?raw=true, "Picture of Gremlin holding a notebook") 4 | 5 | These notes explain how to connect the graph-notebook to a Gremlin server running locally on the same machine. The same steps should also work if you have a remote Gremlin Server. In such cases `localhost` should be replaced with the DNS or IP address of the remote server. It is assumed the [graph-notebook installation](https://github.com/aws/graph-notebook/blob/main/README.md) has been completed and the Jupyter environment is running before following these steps. 6 | 7 | ### Gremlin Server Configuration 8 | Several of the steps below are optional but please read each step carefully and decide if you want to apply it. 9 | 1. Download the Gremlin Server from https://tinkerpop.apache.org/ and unzip it. The remaining steps in this section assume you have made your working directory the place where you performed the unzip. 10 | 2. In conf/tinkergraph-empty.properties, change the ID manager from `LONG` to `ANY` to 11 | enable IDs that include text strings. 12 | ``` 13 | gremlin.tinkergraph.vertexIdManager=ANY 14 | ``` 15 | 3. Optionally add another line doing the same for edge IDs. 16 | ``` 17 | gremlin.tinkergraph.edgeIdManager=ANY 18 | 19 | ``` 20 | 4. To enable HTTP as well as Web Socket connections to the Gremlin Server, edit the file /conf/gremlin-server.yaml and change 21 | ``` 22 | channelizer: org.apache.tinkerpop.gremlin.server.channel.WebSocketChannelizer 23 | ``` 24 | to 25 | ``` 26 | channelizer: org.apache.tinkerpop.gremlin.server.channel.WsAndHttpChannelizer 27 | ``` 28 | This will allow you to access the Gremlin Server from Jupyter using commands like `curl` as well as using the `%%gremlin` cell magic. This step is optional if you do not need HTTP connectivity to the server. 29 | 5. Start the Gremlin server `bin/gremlin-server.sh start` 30 | 31 | 32 | ### Connecting to a local Gremlin Server from Jupyter 33 | 1. In the Jupyter Notebook disable SSL using `%%graph_notebook_config` and change the host to `localhost`. Keep the other defaults even though they are not used for configuring the Gremlin Server. 34 | ``` 35 | %%graph_notebook_config 36 | { 37 | "host": "localhost", 38 | "port": 8182, 39 | "ssl": false, 40 | "gremlin": { 41 | "traversal_source": "g", 42 | "username": "", 43 | "password": "", 44 | "message_serializer": "graphsonv3" 45 | } 46 | } 47 | ``` 48 | If the Gremlin Server you wish to connect to is remote, replacing `localhost` with the IP address or DNS of the remote server should work. This assumes you have access to that server from your local machine. 49 | 50 | ### Using `%seed` with Gremlin Server 51 | The graph-notebook has a `%seed` command that can be used to load sample data. For some data sets to load successfully, the stack size used by the Gremlin Server needs to be increased. If you do not plan to use the `%seed` command to load the `air-routes` data set this step can be ignored. 52 | 53 | 1. In order to load the `airports` data set into TinkerGraph via Gremlin Server using the graph-notebook `%seed` command, the size of the JVM thread stack needs to be increased. Editing the `gremlin-server.sh` file and adding `-Xss2m` to the JAVA_OPTIONS variable is one way to do that. Locate this section of the file and add the `-Xss2m` flag. 54 | 55 | ``` 56 | # Set Java options 57 | if [[ "$JAVA_OPTIONS" = "" ]] ; then 58 | JAVA_OPTIONS="-Xms512m -Xmx4096m -Xss2m" 59 | fi 60 | ``` 61 | -------------------------------------------------------------------------------- /src/graph_notebook/notebooks/01-Neptune-Database/03-Sample-Applications/03-Identity-Graphs/03-Jumpstart-Identity-Graphs-Using-Canonical-Model-and-ETL/script/neptune-glue-transactions.py: -------------------------------------------------------------------------------- 1 | import sys, boto3, os 2 | 3 | from awsglue.utils import getResolvedOptions 4 | from pyspark.context import SparkContext 5 | from awsglue.context import GlueContext 6 | from awsglue.job import Job 7 | from awsglue.transforms import ApplyMapping 8 | from awsglue.transforms import RenameField 9 | from awsglue.transforms import SelectFields 10 | from awsglue.dynamicframe import DynamicFrame 11 | from pyspark.sql.functions import lit 12 | from pyspark.sql.functions import format_string 13 | from gremlin_python import statics 14 | from gremlin_python.structure.graph import Graph 15 | from gremlin_python.process.graph_traversal import __ 16 | from gremlin_python.process.strategies import * 17 | from gremlin_python.driver.driver_remote_connection import DriverRemoteConnection 18 | from gremlin_python.process.traversal import * 19 | from neptune_python_utils.glue_neptune_connection_info import GlueNeptuneConnectionInfo 20 | from neptune_python_utils.glue_gremlin_client import GlueGremlinClient 21 | from neptune_python_utils.glue_gremlin_csv_transforms import GlueGremlinCsvTransforms 22 | from neptune_python_utils.endpoints import Endpoints 23 | from neptune_python_utils.gremlin_utils import GremlinUtils 24 | 25 | args = getResolvedOptions(sys.argv, ['JOB_NAME', 'DATABASE_NAME', 'NEPTUNE_CONNECTION_NAME', 'AWS_REGION', 'CONNECT_TO_NEPTUNE_ROLE_ARN']) 26 | sc = SparkContext() 27 | glueContext = GlueContext(sc) 28 | job = Job(glueContext) 29 | job.init(args['JOB_NAME'], args) 30 | database = args['DATABASE_NAME'] 31 | user_transactions_table = 'transactions' 32 | 33 | # Create Gremlin client 34 | gremlin_endpoints = GlueNeptuneConnectionInfo(args['AWS_REGION'], args['CONNECT_TO_NEPTUNE_ROLE_ARN']).neptune_endpoints(args['NEPTUNE_CONNECTION_NAME']) 35 | gremlin_client = GlueGremlinClient(gremlin_endpoints) 36 | 37 | # 1. Get data from source SQL database 38 | datasource0 = glueContext.create_dynamic_frame.from_catalog(database = database, table_name = user_transactions_table, transformation_ctx = "datasource0") 39 | 40 | # 2. Map fields to bulk load CSV column headings format 41 | applymapping1 = ApplyMapping.apply(frame = datasource0, mappings = [("transaction_id", "string", "transaction_id:String", "string"), 42 | ("user_id", "string", "user_id:String", "string"), ("product_id", "string", "product_id:String", "string"), 43 | ("product_name", "string", "product_name:String", "string"), ("purchased_date", "string", "purchased_date:String","string"),("review", "string", "review:String","string")], transformation_ctx = "applymapping1") 44 | 45 | # 3. create product vertices 46 | productDF = SelectFields.apply(frame = applymapping1, paths = ["product_id:String","product_name:String"], transformation_ctx = "productDF") 47 | productDF = GlueGremlinCsvTransforms.create_prefixed_columns(productDF, [('~id', 'product_id:String','product')]) 48 | productDF.toDF().foreachPartition(gremlin_client.upsert_vertices('Product', batch_size=100)) 49 | 50 | # 4. create user to product edges 51 | userToProductMapping = SelectFields.apply(frame = applymapping1, paths = ["user_id:String","product_id:String","purchased_date:String"], transformation_ctx = "userToProductMapping") 52 | userToProductMapping = GlueGremlinCsvTransforms.create_prefixed_columns(userToProductMapping, [('~from', 'user_id:String','user'),('~to', 'product_id:String','product')]) 53 | userToProductMapping = GlueGremlinCsvTransforms.create_edge_id_column(userToProductMapping, '~from', '~to') 54 | userToProductMapping.toDF().foreachPartition(gremlin_client.upsert_edges('purchased', batch_size=100)) 55 | 56 | job.commit() 57 | print("Done") 58 | --------------------------------------------------------------------------------