├── Dockerfile ├── LICENSE ├── README.md ├── app.py ├── packages.txt └── requirements.txt /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.11 2 | 3 | RUN apt-get update && apt-get install --yes graphviz=10.0.1 4 | 5 | COPY requirements.txt requirements.txt 6 | 7 | COPY app.py app.py 8 | 9 | RUN pip install --no-cache-dir --upgrade pip 10 | 11 | RUN pip install --no-cache-dir --upgrade -r requirements.txt 12 | 13 | ENTRYPOINT ["solara", "run", "app.py", "--host=0.0.0.0", "--port=80"] 14 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Alonso Silva Allende 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Knowledge Graph Generator 2 | 3 | Enter some text and the language model will try to describe it as a knowledge graph. 4 | 5 | Pure Python app: 6 | * [Instructor](https://python.useinstructor.com/) library 7 | * UI done with [Solara](https://solara.dev) 8 | * [OpenAI](https://openai.com/) language model 9 | * Deployed in [Ploomber](https://ploomber.io/) 10 | * [Deployed app](https://knowledgegraph.ploomberapp.io/) 11 | 12 | https://github.com/alonsosilvaallende/knowledge-graph-generator/assets/30263736/59f8011e-ad7c-42f8-9e21-09a512e31191 13 | 14 | -------------------------------------------------------------------------------- /app.py: -------------------------------------------------------------------------------- 1 | #from dotenv import load_dotenv, find_dotenv 2 | #_ = load_dotenv(find_dotenv()) 3 | 4 | import solara 5 | from typing import Any, Callable, Optional, TypeVar, Union, cast, overload, List 6 | from typing_extensions import TypedDict 7 | import time 8 | import ipyvue 9 | import reacton 10 | from solara.alias import rv as v 11 | import os 12 | import openai 13 | from openai import OpenAI 14 | import instructor 15 | from pydantic import BaseModel, Field 16 | from graphviz import Digraph 17 | 18 | 19 | 20 | from langsmith import traceable 21 | from langsmith.wrappers import wrap_openai 22 | 23 | # NEEDED FOR INPUT TEXT AREA INSTEAD OF INPUT TEXT 24 | def use_change(el: reacton.core.Element, on_value: Callable[[Any], Any], enabled=True): 25 | """Trigger a callback when a blur events occurs or the enter key is pressed.""" 26 | on_value_ref = solara.use_ref(on_value) 27 | on_value_ref.current = on_value 28 | def add_events(): 29 | def on_change(widget, event, data): 30 | if enabled: 31 | on_value_ref.current(widget.v_model) 32 | widget = cast(ipyvue.VueWidget, solara.get_widget(el)) 33 | if enabled: 34 | widget.on_event("blur", on_change) 35 | widget.on_event("keyup.enter", on_change) 36 | def cleanup(): 37 | if enabled: 38 | widget.on_event("blur", on_change, remove=True) 39 | widget.on_event("keyup.enter", on_change, remove=True) 40 | return cleanup 41 | solara.use_effect(add_events, [enabled]) 42 | 43 | 44 | @solara.component 45 | def InputTextarea( 46 | label: str, 47 | value: Union[str, solara.Reactive[str]] = "", 48 | on_value: Callable[[str], None] = None, 49 | disabled: bool = False, 50 | password: bool = False, 51 | continuous_update: bool = False, 52 | error: Union[bool, str] = False, 53 | message: Optional[str] = None, 54 | ): 55 | reactive_value = solara.use_reactive(value, on_value) 56 | del value, on_value 57 | def set_value_cast(value): 58 | reactive_value.value = str(value) 59 | def on_v_model(value): 60 | if continuous_update: 61 | set_value_cast(value) 62 | messages = [] 63 | if error and isinstance(error, str): 64 | messages.append(error) 65 | elif message: 66 | messages.append(message) 67 | text_area = v.Textarea( 68 | v_model=reactive_value.value, 69 | on_v_model=on_v_model, 70 | label=label, 71 | disabled=disabled, 72 | type="password" if password else None, 73 | error=bool(error), 74 | messages=messages, 75 | solo=True, 76 | hide_details=True, 77 | outlined=True, 78 | rows=1, 79 | auto_grow=True, 80 | ) 81 | use_change(text_area, set_value_cast, enabled=not continuous_update) 82 | return text_area 83 | 84 | # EXTRACTION 85 | openai.api_key = os.environ['OPENAI_API_KEY'] 86 | 87 | # Wrap the OpenAI client with LangSmith 88 | client = wrap_openai(OpenAI()) 89 | 90 | # Patch the client with instructor 91 | client = instructor.from_openai(client, mode=instructor.Mode.TOOLS) 92 | 93 | class Node(BaseModel): 94 | id: int 95 | label: str 96 | color: str 97 | 98 | class Edge(BaseModel): 99 | source: int 100 | target: int 101 | label: str 102 | color: str = "black" 103 | 104 | class KnowledgeGraph(BaseModel): 105 | nodes: List[Node] = Field(description="Nodes in the knowledge graph") 106 | edges: List[Edge] = Field(description="Edges in the knowledge graph") 107 | 108 | class MessageDict(TypedDict): 109 | role: str 110 | content: str 111 | 112 | def add_chunk_to_ai_message(chunk: str): 113 | messages.value = [ 114 | *messages.value[:-1], 115 | { 116 | "role": "assistant", 117 | "content": chunk, 118 | }, 119 | ] 120 | 121 | import ast 122 | 123 | # DISPLAYED OUTPUT 124 | @solara.component 125 | def ChatInterface(): 126 | with solara.lab.ChatBox(): 127 | if len(messages.value)>0: 128 | if messages.value[-1]["role"] != "user": 129 | obj = messages.value[-1]["content"] 130 | if f"{obj}" != "": 131 | obj = ast.literal_eval(f"{obj}") 132 | dot = Digraph(comment="Knowledge Graph") 133 | if obj['nodes'] not in [None, []]: 134 | if obj['nodes'][0]['label'] not in [None, '']: 135 | for i, node in enumerate(obj['nodes']): 136 | if obj['nodes'][i]['label'] not in [None, '']: 137 | dot.node( 138 | name=str(obj['nodes'][i]['id']), 139 | label=obj['nodes'][i]['label'], 140 | color=obj['nodes'][i]['color'] 141 | ) 142 | if obj['edges'] not in [None, []]: 143 | if obj['edges'][0]['label'] not in [None, '']: 144 | for i, edge in enumerate(obj['edges']): 145 | if obj['edges'][i]['source'] not in [None,''] and obj['edges'][i]['target'] not in [None,''] and obj['edges'][i]['label'] not in [None,'']: 146 | dot.edge( 147 | tail_name=str(obj['edges'][i]['source']), 148 | head_name=str(obj['edges'][i]['target']), 149 | label=obj['edges'][i]['label'], 150 | color=obj['edges'][i]['color'] 151 | ) 152 | with solara.Card(): 153 | solara.display(dot) 154 | 155 | messages: solara.Reactive[List[MessageDict]] = solara.reactive([]) 156 | aux = solara.reactive("") 157 | text_block = solara.reactive("Alice loves Bob while Charles hates both Alice and Bob.") 158 | @solara.component 159 | def Page(): 160 | title = "Knowledge Graph Generator" 161 | with solara.Head(): 162 | solara.Title(f"{title}") 163 | with solara.Column(style={"width": "100%", "padding": "50px"}): 164 | solara.Markdown(f"#{title}") 165 | solara.Markdown("Enter some text and the language model will try to describe it as a knowledge graph. Done with :heart: by [alonsosilva](https://twitter.com/alonsosilva)") 166 | user_message_count = len([m for m in messages.value if m["role"] == "user"]) 167 | def send(): 168 | messages.value = [*messages.value, {"role": "user", "content": " "}] 169 | def response(message): 170 | extraction_stream = client.chat.completions.create_partial( 171 | model="gpt-3.5-turbo", 172 | response_model=KnowledgeGraph, 173 | messages=[ 174 | { 175 | "role": "user", 176 | "content": f"Help me understand the following by describing it as small knowledge graph: {text_block.value}. It is important to add variety of colors in the nodes.", 177 | }, 178 | ], 179 | temperature=0, 180 | stream=True, 181 | ) 182 | for extraction in extraction_stream: 183 | obj = extraction.model_dump() 184 | if f"{obj}" != aux.value: 185 | add_chunk_to_ai_message(f"{obj}") 186 | aux.value = f"{obj}" 187 | def result(): 188 | if messages.value != []: 189 | if messages.value[-1]["role"] == "user": 190 | response(messages.value[-1]["content"]) 191 | result = solara.lab.use_task(result, dependencies=[user_message_count]) 192 | InputTextarea("Enter text:", value=text_block, continuous_update=False) 193 | solara.Button(label="Generate Knowledge Graph", on_click=send) 194 | ChatInterface() 195 | Page() 196 | -------------------------------------------------------------------------------- /packages.txt: -------------------------------------------------------------------------------- 1 | graphviz=10.0.1 2 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | solara==1.31.0 2 | openai==1.17.0 3 | instructor==1.1.0 4 | graphviz==0.20.3 5 | langsmith==0.1.47 6 | --------------------------------------------------------------------------------