├── .DS_Store ├── Car_Manual_Bot.json ├── Chat_with_Git_Repo.json ├── Chat_with_PDF.json ├── Customer_Service_Amazon.json ├── Dropbox_Text_Files_Chat.json ├── EdTech_Chatbot.json ├── Image_OCR.json ├── Indic_Language_Translation.json ├── Multi_Type_Input_Chat.json ├── Multilingual_RAG.json ├── OCR_Scanned_Chat.json ├── README.md ├── Simple_Chat.json ├── Whisper_Speech_to_Text.json ├── Youtube_Chat.json ├── multimodal_image.json └── multimodal_speech.json /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aiplanethub/ai-stacks/99968bbb056a6d3acbb0a957c2b708a303941b5a/.DS_Store -------------------------------------------------------------------------------- /Image_OCR.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "ff46f666-737c-4238-a69a-dd818965b1d3", 3 | "data": { 4 | "nodes": [ 5 | { 6 | "width": 384, 7 | "height": 548, 8 | "id": "AzureAIDocumentIntelligenceLoader-6ryDk", 9 | "type": "genericNode", 10 | "position": { 11 | "x": 249, 12 | "y": 202.265625 13 | }, 14 | "data": { 15 | "type": "AzureAIDocumentIntelligenceLoader", 16 | "node": { 17 | "template": { 18 | "code": { 19 | "dynamic": true, 20 | "required": true, 21 | "placeholder": "", 22 | "show": false, 23 | "multiline": true, 24 | "value": "from typing import List\nfrom genflow.interface.custom.custom_component import CustomComponent\nfrom langchain_community.document_loaders import AzureAIDocumentIntelligenceLoader\nfrom langchain.schema import Document\n\n\nclass AzureAIDocumentIntelligenceLoaderComponent(CustomComponent):\n display_name: str = \"AzureAIDocumentIntelligenceLoader\"\n description: str = \"Azure ai document intelligence Loader.\"\n documentation: str = \"https://docs.aiplanet.com/components/document-loaders#azureai-document-intelligence-loader\"\n\n def build_config(self):\n return {\n \"file_path\": {\n \"display_name\": \"File Path\",\n \"required\": True,\n \"field_type\": \"file\",\n \"file_types\": [\"pdf\", \"jpeg\", \"png\", \"bmp\", \"tiff\"],\n \"suffixes\": [\n \".pdf\",\n \".jpg\",\n \".jpeg\",\n \".png\",\n \".bmp\",\n \".dib\" \".tiff\",\n \".tif\",\n ],\n \"input_types\": [\"Input\"],\n },\n \"endpoint\": {\"display_name\": \"Endpoint\", \"required\": True},\n \"key\": {\"display_name\": \"Key\", \"required\": True, \"password\": True},\n \"api_model\": {\n \"display_name\": \"API Model\",\n \"required\": True,\n \"value\": \"prebuilt-layout\",\n },\n \"code\": {\"show\": False},\n \"file_size\": {\n \"display_name\": \"File Size\",\n \"required\": True,\n \"advanced\": True,\n \"field_type\": \"int\",\n \"value\": 20,\n },\n }\n\n def build(\n self, file_path: str, endpoint: str, key: str, api_model: str, file_size: int\n ) -> List[Document]:\n loader = AzureAIDocumentIntelligenceLoader(\n api_endpoint=endpoint, api_key=key, file_path=file_path, api_model=api_model\n )\n results = loader.load()\n return results\n", 25 | "password": false, 26 | "name": "code", 27 | "advanced": false, 28 | "type": "code", 29 | "list": false 30 | }, 31 | "_type": "CustomComponent", 32 | "api_model": { 33 | "required": true, 34 | "placeholder": "", 35 | "show": true, 36 | "multiline": false, 37 | "value": "prebuilt-layout", 38 | "password": false, 39 | "name": "api_model", 40 | "display_name": "API Model", 41 | "advanced": false, 42 | "dynamic": false, 43 | "info": "", 44 | "type": "str", 45 | "list": false 46 | }, 47 | "endpoint": { 48 | "required": true, 49 | "placeholder": "", 50 | "show": true, 51 | "multiline": false, 52 | "password": false, 53 | "name": "endpoint", 54 | "display_name": "Endpoint", 55 | "advanced": false, 56 | "dynamic": false, 57 | "info": "", 58 | "type": "str", 59 | "list": false, 60 | "value": "" 61 | }, 62 | "file_path": { 63 | "required": true, 64 | "placeholder": "", 65 | "show": true, 66 | "multiline": false, 67 | "suffixes": [ 68 | ".pdf", 69 | ".jpg", 70 | ".jpeg", 71 | ".png", 72 | ".bmp", 73 | ".dib.tiff", 74 | ".tif" 75 | ], 76 | "password": false, 77 | "name": "file_path", 78 | "display_name": "File Path", 79 | "advanced": false, 80 | "input_types": [ 81 | "Input" 82 | ], 83 | "dynamic": false, 84 | "info": "", 85 | "type": "file", 86 | "list": false, 87 | "fileTypes": [ 88 | "pdf", 89 | "jpeg", 90 | "png", 91 | "bmp", 92 | "tiff" 93 | ], 94 | "file_path": "/mnt/models/files/ff46f666-737c-4238-a69a-dd818965b1d3/7992fa1651259e0e89346e0cc65a5b8bea632589efa1af3c4b52875b27826410.png", 95 | "value": "ai.png" 96 | }, 97 | "file_size": { 98 | "required": true, 99 | "placeholder": "", 100 | "show": true, 101 | "multiline": false, 102 | "value": 20, 103 | "password": false, 104 | "name": "file_size", 105 | "display_name": "File Size", 106 | "advanced": true, 107 | "dynamic": false, 108 | "info": "", 109 | "type": "int", 110 | "list": false 111 | }, 112 | "key": { 113 | "required": true, 114 | "placeholder": "", 115 | "show": true, 116 | "multiline": false, 117 | "password": true, 118 | "name": "key", 119 | "display_name": "Key", 120 | "advanced": false, 121 | "dynamic": false, 122 | "info": "", 123 | "type": "str", 124 | "list": false, 125 | "value": "" 126 | } 127 | }, 128 | "description": "Azure ai document intelligence Loader.", 129 | "base_classes": [ 130 | "Document" 131 | ], 132 | "display_name": "AzureAIDocumentIntelligenceLoader", 133 | "custom_fields": { 134 | "api_model": null, 135 | "endpoint": null, 136 | "file_path": null, 137 | "file_size": null, 138 | "key": null 139 | }, 140 | "output_types": [ 141 | "AzureAIDocumentIntelligenceLoader" 142 | ], 143 | "documentation": "https://docs.aiplanet.com/components/document-loaders#azureai-document-intelligence-loader", 144 | "beta": true, 145 | "error": null 146 | }, 147 | "id": "AzureAIDocumentIntelligenceLoader-6ryDk" 148 | }, 149 | "positionAbsolute": { 150 | "x": 249, 151 | "y": 202.265625 152 | }, 153 | "selected": false, 154 | "dragging": false 155 | }, 156 | { 157 | "width": 384, 158 | "height": 502, 159 | "id": "RecursiveCharacterTextSplitter-7Jg1J", 160 | "type": "genericNode", 161 | "position": { 162 | "x": 771.105649062728, 163 | "y": 458.7525810390156 164 | }, 165 | "data": { 166 | "type": "RecursiveCharacterTextSplitter", 167 | "node": { 168 | "template": { 169 | "code": { 170 | "dynamic": true, 171 | "required": true, 172 | "placeholder": "", 173 | "show": false, 174 | "multiline": true, 175 | "value": "from typing import Optional\nfrom genflow import CustomComponent\nfrom langchain.schema import Document\nfrom genflow.utils.util import build_loader_repr_from_documents\n\n\nclass RecursiveCharacterTextSplitterComponent(CustomComponent):\n display_name: str = \"Recursive Character Text Splitter\"\n description: str = \"Split text into chunks of a specified length.\"\n documentation: str = \"https://docs.aiplanet.com/components/text-splitters#recursivecharactertextsplitter\"\n \n def build_config(self):\n return {\n \"documents\": {\n \"display_name\": \"Documents\",\n \"info\": \"The documents to split.\",\n },\n \"separators\": {\n \"display_name\": \"Separators\",\n \"info\": 'The characters to split on.\\nIf left empty defaults to [\"\\\\n\\\\n\", \"\\\\n\", \" \", \"\"].',\n \"is_list\": True,\n },\n \"chunk_size\": {\n \"display_name\": \"Chunk Size\",\n \"info\": \"The maximum length of each chunk.\",\n \"field_type\": \"int\",\n \"value\": 1000,\n },\n \"chunk_overlap\": {\n \"display_name\": \"Chunk Overlap\",\n \"info\": \"The amount of overlap between chunks.\",\n \"field_type\": \"int\",\n \"value\": 200,\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n documents: list[Document],\n separators: Optional[list[str]] = None,\n chunk_size: Optional[int] = 1000,\n chunk_overlap: Optional[int] = 200,\n ) -> list[Document]:\n \"\"\"\n Split text into chunks of a specified length.\n\n Args:\n separators (list[str]): The characters to split on.\n chunk_size (int): The maximum length of each chunk.\n chunk_overlap (int): The amount of overlap between chunks.\n length_function (function): The function to use to calculate the length of the text.\n\n Returns:\n list[str]: The chunks of text.\n \"\"\"\n from langchain.text_splitter import RecursiveCharacterTextSplitter\n\n if separators == \"\":\n separators = None\n elif separators:\n # check if the separators list has escaped characters\n # if there are escaped characters, unescape them\n separators = [x.encode().decode(\"unicode-escape\") for x in separators]\n\n # Make sure chunk_size and chunk_overlap are ints\n if isinstance(chunk_size, str):\n chunk_size = int(chunk_size)\n if isinstance(chunk_overlap, str):\n chunk_overlap = int(chunk_overlap)\n splitter = RecursiveCharacterTextSplitter(\n separators=separators,\n chunk_size=chunk_size,\n chunk_overlap=chunk_overlap,\n )\n\n docs = splitter.split_documents(documents)\n self.repr_value = build_loader_repr_from_documents(docs)\n return docs\n", 176 | "password": false, 177 | "name": "code", 178 | "advanced": false, 179 | "type": "code", 180 | "list": false 181 | }, 182 | "_type": "CustomComponent", 183 | "chunk_overlap": { 184 | "required": false, 185 | "placeholder": "", 186 | "show": true, 187 | "multiline": false, 188 | "value": 200, 189 | "password": false, 190 | "name": "chunk_overlap", 191 | "display_name": "Chunk Overlap", 192 | "advanced": false, 193 | "dynamic": false, 194 | "info": "The amount of overlap between chunks.", 195 | "type": "int", 196 | "list": false 197 | }, 198 | "chunk_size": { 199 | "required": false, 200 | "placeholder": "", 201 | "show": true, 202 | "multiline": false, 203 | "value": 1000, 204 | "password": false, 205 | "name": "chunk_size", 206 | "display_name": "Chunk Size", 207 | "advanced": false, 208 | "dynamic": false, 209 | "info": "The maximum length of each chunk.", 210 | "type": "int", 211 | "list": false 212 | }, 213 | "documents": { 214 | "required": true, 215 | "placeholder": "", 216 | "show": true, 217 | "multiline": false, 218 | "password": false, 219 | "name": "documents", 220 | "display_name": "Documents", 221 | "advanced": false, 222 | "dynamic": false, 223 | "info": "The documents to split.", 224 | "type": "Document", 225 | "list": true 226 | }, 227 | "separators": { 228 | "required": false, 229 | "placeholder": "", 230 | "show": true, 231 | "multiline": false, 232 | "password": false, 233 | "name": "separators", 234 | "display_name": "Separators", 235 | "advanced": false, 236 | "dynamic": false, 237 | "info": "The characters to split on.\nIf left empty defaults to [\"\\n\\n\", \"\\n\", \" \", \"\"].", 238 | "type": "str", 239 | "list": true 240 | } 241 | }, 242 | "description": "Split text into chunks of a specified length.", 243 | "base_classes": [ 244 | "Document" 245 | ], 246 | "display_name": "Recursive Character Text Splitter", 247 | "custom_fields": { 248 | "chunk_overlap": null, 249 | "chunk_size": null, 250 | "documents": null, 251 | "separators": null 252 | }, 253 | "output_types": [ 254 | "RecursiveCharacterTextSplitter" 255 | ], 256 | "documentation": "https://docs.aiplanet.com/components/text-splitters#recursivecharactertextsplitter", 257 | "beta": true, 258 | "error": null 259 | }, 260 | "id": "RecursiveCharacterTextSplitter-7Jg1J" 261 | }, 262 | "positionAbsolute": { 263 | "x": 771.105649062728, 264 | "y": 458.7525810390156 265 | }, 266 | "selected": false 267 | }, 268 | { 269 | "width": 384, 270 | "height": 564, 271 | "id": "PromptTemplate-dPWZM", 272 | "type": "genericNode", 273 | "position": { 274 | "x": 1305.7564930482927, 275 | "y": 657.2606326353235 276 | }, 277 | "data": { 278 | "type": "PromptTemplate", 279 | "node": { 280 | "template": { 281 | "output_parser": { 282 | "required": false, 283 | "placeholder": "", 284 | "show": false, 285 | "multiline": false, 286 | "password": false, 287 | "name": "output_parser", 288 | "advanced": false, 289 | "dynamic": true, 290 | "info": "", 291 | "type": "BaseOutputParser", 292 | "list": false 293 | }, 294 | "input_types": { 295 | "required": false, 296 | "placeholder": "", 297 | "show": false, 298 | "multiline": false, 299 | "password": false, 300 | "name": "input_types", 301 | "advanced": false, 302 | "dynamic": true, 303 | "info": "", 304 | "type": "dict", 305 | "list": false 306 | }, 307 | "input_variables": { 308 | "required": true, 309 | "placeholder": "", 310 | "show": false, 311 | "multiline": false, 312 | "password": false, 313 | "name": "input_variables", 314 | "advanced": false, 315 | "dynamic": true, 316 | "info": "", 317 | "type": "str", 318 | "list": true, 319 | "value": [ 320 | "context", 321 | "chat_history", 322 | "question" 323 | ] 324 | }, 325 | "partial_variables": { 326 | "required": false, 327 | "placeholder": "", 328 | "show": false, 329 | "multiline": false, 330 | "password": false, 331 | "name": "partial_variables", 332 | "advanced": false, 333 | "dynamic": true, 334 | "info": "", 335 | "type": "dict", 336 | "list": false 337 | }, 338 | "template": { 339 | "required": true, 340 | "placeholder": "", 341 | "show": true, 342 | "multiline": true, 343 | "password": false, 344 | "name": "template", 345 | "advanced": false, 346 | "dynamic": true, 347 | "info": "", 348 | "type": "prompt", 349 | "list": false, 350 | "value": "You are an Image OCR agent, that describes and answer questions based on the given user query. You will be provided with the CONTEXT. Based on the CONTEXT answer the user query with truthfulness and honesty\n\nCONTEXT: {context}\n\n{chat_history}\nQUERY: {question}" 351 | }, 352 | "template_format": { 353 | "required": false, 354 | "placeholder": "", 355 | "show": false, 356 | "multiline": false, 357 | "value": "f-string", 358 | "password": false, 359 | "name": "template_format", 360 | "advanced": false, 361 | "dynamic": true, 362 | "info": "", 363 | "type": "str", 364 | "list": false 365 | }, 366 | "validate_template": { 367 | "required": false, 368 | "placeholder": "", 369 | "show": false, 370 | "multiline": false, 371 | "value": false, 372 | "password": false, 373 | "name": "validate_template", 374 | "advanced": false, 375 | "dynamic": true, 376 | "info": "", 377 | "type": "bool", 378 | "list": false 379 | }, 380 | "_type": "PromptTemplate", 381 | "context": { 382 | "required": false, 383 | "placeholder": "", 384 | "show": true, 385 | "multiline": true, 386 | "value": "", 387 | "password": false, 388 | "name": "context", 389 | "display_name": "context", 390 | "advanced": false, 391 | "input_types": [ 392 | "Document", 393 | "BaseOutputParser", 394 | "Input" 395 | ], 396 | "dynamic": false, 397 | "info": "", 398 | "type": "str", 399 | "list": false 400 | }, 401 | "chat_history": { 402 | "required": false, 403 | "placeholder": "", 404 | "show": true, 405 | "multiline": true, 406 | "value": "", 407 | "password": false, 408 | "name": "chat_history", 409 | "display_name": "chat_history", 410 | "advanced": false, 411 | "input_types": [ 412 | "Document", 413 | "BaseOutputParser", 414 | "Input" 415 | ], 416 | "dynamic": false, 417 | "info": "", 418 | "type": "str", 419 | "list": false 420 | }, 421 | "question": { 422 | "required": false, 423 | "placeholder": "", 424 | "show": true, 425 | "multiline": true, 426 | "value": "", 427 | "password": false, 428 | "name": "question", 429 | "display_name": "question", 430 | "advanced": false, 431 | "input_types": [ 432 | "Document", 433 | "BaseOutputParser", 434 | "Input" 435 | ], 436 | "dynamic": false, 437 | "info": "", 438 | "type": "str", 439 | "list": false 440 | } 441 | }, 442 | "description": "A prompt template for a language model.", 443 | "base_classes": [ 444 | "StringPromptTemplate", 445 | "BasePromptTemplate", 446 | "PromptTemplate" 447 | ], 448 | "name": "", 449 | "display_name": "PromptTemplate", 450 | "documentation": "https://docs.aiplanet.com/components/prompts#prompt-template", 451 | "custom_fields": { 452 | "": [ 453 | "context", 454 | "chat_history", 455 | "question" 456 | ], 457 | "template": [ 458 | "context", 459 | "chat_history", 460 | "query" 461 | ] 462 | }, 463 | "output_types": [], 464 | "field_formatters": {}, 465 | "beta": false, 466 | "error": null 467 | }, 468 | "id": "PromptTemplate-dPWZM" 469 | }, 470 | "selected": false, 471 | "positionAbsolute": { 472 | "x": 1305.7564930482927, 473 | "y": 657.2606326353235 474 | }, 475 | "dragging": false 476 | }, 477 | { 478 | "width": 384, 479 | "height": 736, 480 | "id": "AzureChatOpenAI-nZwtv", 481 | "type": "genericNode", 482 | "position": { 483 | "x": 1229.8097186716457, 484 | "y": -788.0258182413977 485 | }, 486 | "data": { 487 | "type": "AzureChatOpenAI", 488 | "node": { 489 | "template": { 490 | "code": { 491 | "dynamic": true, 492 | "required": true, 493 | "placeholder": "", 494 | "show": false, 495 | "multiline": true, 496 | "value": "from typing import Optional\nfrom genflow.interface.custom import CustomComponent\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chat_models import AzureChatOpenAI\n\n\nclass AzureChatOpenAILLM(CustomComponent):\n display_name: str = \"AzureChatOpenAI\"\n description: str = \"Azure Chat Open AI Chat&Completion large language models.\"\n\n AZURE_OPENAI_MODELS = [\n \"gpt-4\",\n \"gpt-4-32k\",\n \"gpt-4-vision\",\n ]\n\n def build_config(self):\n return {\n \"model\": {\n \"display_name\": \"Model Name\",\n \"value\": \"gpt-4\",\n \"options\": self.AZURE_OPENAI_MODELS,\n \"required\": True,\n },\n \"api_key\": {\n \"display_name\": \"AzureChatOpenAI API Key\",\n \"required\": True,\n \"password\": True,\n },\n \"api_base\": {\n \"display_name\": \"AzureChatOpenAI API Base\",\n \"required\": True,\n },\n \"api_type\": {\"display_name\": \"AzureChatOpenAI API Type\", \"required\": True},\n \"azure_deployment\": {\n \"display_name\": \"Deployment Name\",\n \"required\": True,\n },\n \"api_version\": {\n \"display_name\": \"API Version\",\n \"value\": \"2023-07-01-preview\",\n \"required\": True,\n \"advanced\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"value\": 0.5,\n \"field_type\": \"float\",\n \"required\": False,\n },\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"value\": 512,\n \"required\": False,\n \"field_type\": \"int\",\n \"advanced\": True,\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n model: str,\n api_base: str,\n api_type: str,\n api_key: str,\n azure_deployment: str,\n api_version: str = \"2023-05-15\",\n temperature: Optional[float] = 0.7,\n max_tokens: Optional[int] = 512,\n ) -> BaseLLM:\n try:\n output = AzureChatOpenAI(\n model_name=model,\n openai_api_base=api_base,\n openai_api_type=api_type,\n openai_api_key=api_key,\n openai_api_version=api_version,\n deployment_name=azure_deployment,\n temperature=temperature,\n max_tokens=max_tokens,\n )\n except Exception as e:\n raise ValueError(\"Could not connect to Azure ChatOpenAI model.\") from e\n return output\n", 497 | "password": false, 498 | "name": "code", 499 | "advanced": false, 500 | "type": "code", 501 | "list": false 502 | }, 503 | "_type": "CustomComponent", 504 | "api_base": { 505 | "required": true, 506 | "placeholder": "", 507 | "show": true, 508 | "multiline": false, 509 | "password": false, 510 | "name": "api_base", 511 | "display_name": "AzureChatOpenAI API Base", 512 | "advanced": false, 513 | "dynamic": false, 514 | "info": "", 515 | "type": "str", 516 | "list": false, 517 | "value": "" 518 | }, 519 | "api_key": { 520 | "required": true, 521 | "placeholder": "", 522 | "show": true, 523 | "multiline": false, 524 | "password": true, 525 | "name": "api_key", 526 | "display_name": "AzureChatOpenAI API Key", 527 | "advanced": false, 528 | "dynamic": false, 529 | "info": "", 530 | "type": "str", 531 | "list": false, 532 | "value": "" 533 | }, 534 | "api_type": { 535 | "required": true, 536 | "placeholder": "", 537 | "show": true, 538 | "multiline": false, 539 | "password": false, 540 | "name": "api_type", 541 | "display_name": "AzureChatOpenAI API Type", 542 | "advanced": false, 543 | "dynamic": false, 544 | "info": "", 545 | "type": "str", 546 | "list": false, 547 | "value": "" 548 | }, 549 | "api_version": { 550 | "required": true, 551 | "placeholder": "", 552 | "show": true, 553 | "multiline": false, 554 | "value": "2023-07-01-preview", 555 | "password": false, 556 | "name": "api_version", 557 | "display_name": "API Version", 558 | "advanced": true, 559 | "dynamic": false, 560 | "info": "", 561 | "type": "str", 562 | "list": false 563 | }, 564 | "azure_deployment": { 565 | "required": true, 566 | "placeholder": "", 567 | "show": true, 568 | "multiline": false, 569 | "password": false, 570 | "name": "azure_deployment", 571 | "display_name": "Deployment Name", 572 | "advanced": false, 573 | "dynamic": false, 574 | "info": "", 575 | "type": "str", 576 | "list": false, 577 | "value": "" 578 | }, 579 | "max_tokens": { 580 | "required": false, 581 | "placeholder": "", 582 | "show": true, 583 | "multiline": false, 584 | "value": 512, 585 | "password": false, 586 | "name": "max_tokens", 587 | "display_name": "Max Tokens", 588 | "advanced": true, 589 | "dynamic": false, 590 | "info": "", 591 | "type": "int", 592 | "list": false 593 | }, 594 | "model": { 595 | "required": true, 596 | "placeholder": "", 597 | "show": true, 598 | "multiline": false, 599 | "value": "gpt-4", 600 | "password": false, 601 | "options": [ 602 | "gpt-4", 603 | "gpt-4-32k", 604 | "gpt-4-vision" 605 | ], 606 | "name": "model", 607 | "display_name": "Model Name", 608 | "advanced": false, 609 | "dynamic": false, 610 | "info": "", 611 | "type": "str", 612 | "list": true 613 | }, 614 | "temperature": { 615 | "required": false, 616 | "placeholder": "", 617 | "show": true, 618 | "multiline": false, 619 | "value": 0.5, 620 | "password": false, 621 | "name": "temperature", 622 | "display_name": "Temperature", 623 | "advanced": false, 624 | "dynamic": false, 625 | "info": "", 626 | "type": "float", 627 | "list": false 628 | } 629 | }, 630 | "description": "Azure Chat Open AI Chat&Completion large language models.", 631 | "base_classes": [ 632 | "BaseLanguageModel", 633 | "BaseLLM" 634 | ], 635 | "display_name": "AzureChatOpenAI", 636 | "custom_fields": { 637 | "api_base": null, 638 | "api_key": null, 639 | "api_type": null, 640 | "api_version": null, 641 | "azure_deployment": null, 642 | "max_tokens": null, 643 | "model": null, 644 | "temperature": null 645 | }, 646 | "output_types": [ 647 | "AzureChatOpenAI" 648 | ], 649 | "documentation": "", 650 | "beta": true, 651 | "error": null 652 | }, 653 | "id": "AzureChatOpenAI-nZwtv" 654 | }, 655 | "selected": false, 656 | "positionAbsolute": { 657 | "x": 1229.8097186716457, 658 | "y": -788.0258182413977 659 | }, 660 | "dragging": false 661 | }, 662 | { 663 | "width": 384, 664 | "height": 531, 665 | "id": "ConversationBufferMemory-W1iL5", 666 | "type": "genericNode", 667 | "position": { 668 | "x": 765.0747942896021, 669 | "y": -219.39609653519832 670 | }, 671 | "data": { 672 | "type": "ConversationBufferMemory", 673 | "node": { 674 | "template": { 675 | "code": { 676 | "dynamic": true, 677 | "required": true, 678 | "placeholder": "", 679 | "show": false, 680 | "multiline": true, 681 | "value": "from typing import Optional, Union\nfrom langchain.memory.chat_memory import BaseMemory, BaseChatMemory\nfrom langchain.memory.buffer import ConversationBufferMemory\nfrom langchain.memory.chat_message_histories.postgres import PostgresChatMessageHistory\n\nfrom genflow import CustomComponent\n\n\nclass ConversationBufferMemoryComponent(CustomComponent):\n display_name: str = \"ConversationBufferMemory\"\n description: str = \"Buffer for storing conversation memory.\"\n documentation: str = (\n \"https://docs.aiplanet.com/components/memories#conversationbuffermemory\"\n )\n beta = False\n\n def build_config(self):\n return {\n \"input_key\": {\n \"display_name\": \"Input Key\",\n \"required\": False,\n \"value\": \"\",\n \"info\": \"The variable to be used as Chat Input when more than one variable is available.\",\n },\n \"memory_key\": {\n \"display_name\": \"Memory Key\",\n \"required\": False,\n \"value\": \"history\",\n },\n \"output_key\": {\n \"display_name\": \"Output Key\",\n \"required\": False,\n \"value\": \"\",\n \"info\": \"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\n },\n \"return_messages\": {\n \"display_name\": \"Return Messages\",\n \"field_type\": \"bool\",\n \"required\": False,\n \"value\": True,\n },\n \"session_id\": {\n \"display_name\": \"Session ID\",\n \"required\": False,\n \"advanced\": True,\n \"value\": \"genflow_memory_db\",\n },\n \"connection_string\": {\n \"display_name\": \"Connection String\",\n \"required\": False,\n \"advanced\": True,\n \"value\": \"\",\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n input_key: Optional[str] = \"\",\n memory_key: Optional[str] = \"history\",\n output_key: Optional[str] = \"\",\n return_messages: Optional[bool] = True,\n session_id: Optional[str] = \"genflow_memory_db\",\n connection_string: Optional[str] = \"\",\n ) -> Union[BaseMemory, BaseChatMemory]:\n chat_memory = PostgresChatMessageHistory(\n session_id=session_id, connection_string=connection_string\n )\n\n keys = {}\n\n if input_key and input_key != \"\":\n keys[\"input_key\"] = input_key\n\n if output_key and output_key != \"\":\n keys[\"output_key\"] = output_key\n\n return ConversationBufferMemory(\n chat_memory=chat_memory,\n memory_key=memory_key,\n return_messages=return_messages,\n **keys\n )\n", 682 | "password": false, 683 | "name": "code", 684 | "advanced": false, 685 | "type": "code", 686 | "list": false 687 | }, 688 | "_type": "CustomComponent", 689 | "connection_string": { 690 | "required": false, 691 | "placeholder": "", 692 | "show": true, 693 | "multiline": false, 694 | "value": "", 695 | "password": false, 696 | "name": "connection_string", 697 | "display_name": "Connection String", 698 | "advanced": true, 699 | "dynamic": false, 700 | "info": "", 701 | "type": "str", 702 | "list": false 703 | }, 704 | "input_key": { 705 | "required": false, 706 | "placeholder": "", 707 | "show": true, 708 | "multiline": false, 709 | "value": "question", 710 | "password": false, 711 | "name": "input_key", 712 | "display_name": "Input Key", 713 | "advanced": false, 714 | "dynamic": false, 715 | "info": "The variable to be used as Chat Input when more than one variable is available.", 716 | "type": "str", 717 | "list": false 718 | }, 719 | "memory_key": { 720 | "required": false, 721 | "placeholder": "", 722 | "show": true, 723 | "multiline": false, 724 | "value": "chat_history", 725 | "password": false, 726 | "name": "memory_key", 727 | "display_name": "Memory Key", 728 | "advanced": false, 729 | "dynamic": false, 730 | "info": "", 731 | "type": "str", 732 | "list": false 733 | }, 734 | "output_key": { 735 | "required": false, 736 | "placeholder": "", 737 | "show": true, 738 | "multiline": false, 739 | "value": "", 740 | "password": false, 741 | "name": "output_key", 742 | "display_name": "Output Key", 743 | "advanced": false, 744 | "dynamic": false, 745 | "info": "The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)", 746 | "type": "str", 747 | "list": false 748 | }, 749 | "return_messages": { 750 | "required": false, 751 | "placeholder": "", 752 | "show": true, 753 | "multiline": false, 754 | "value": true, 755 | "password": false, 756 | "name": "return_messages", 757 | "display_name": "Return Messages", 758 | "advanced": false, 759 | "dynamic": false, 760 | "info": "", 761 | "type": "bool", 762 | "list": false 763 | }, 764 | "session_id": { 765 | "required": false, 766 | "placeholder": "", 767 | "show": true, 768 | "multiline": false, 769 | "value": "genflow_memory_db", 770 | "password": false, 771 | "name": "session_id", 772 | "display_name": "Session ID", 773 | "advanced": true, 774 | "dynamic": false, 775 | "info": "", 776 | "type": "str", 777 | "list": false 778 | } 779 | }, 780 | "description": "Buffer for storing conversation memory.", 781 | "base_classes": [ 782 | "BaseMemory", 783 | "BaseChatMemory", 784 | "BaseMemory" 785 | ], 786 | "display_name": "ConversationBufferMemory", 787 | "custom_fields": { 788 | "connection_string": null, 789 | "input_key": null, 790 | "memory_key": null, 791 | "output_key": null, 792 | "return_messages": null, 793 | "session_id": null 794 | }, 795 | "output_types": [ 796 | "ConversationBufferMemory" 797 | ], 798 | "documentation": "https://docs.aiplanet.com/components/memories#conversationbuffermemory", 799 | "beta": false, 800 | "error": null 801 | }, 802 | "id": "ConversationBufferMemory-W1iL5" 803 | }, 804 | "selected": false, 805 | "positionAbsolute": { 806 | "x": 765.0747942896021, 807 | "y": -219.39609653519832 808 | }, 809 | "dragging": false 810 | }, 811 | { 812 | "width": 384, 813 | "height": 339, 814 | "id": "LLMChain-Y4l4A", 815 | "type": "genericNode", 816 | "position": { 817 | "x": 1850.077873771481, 818 | "y": 110.90486000931321 819 | }, 820 | "data": { 821 | "type": "LLMChain", 822 | "node": { 823 | "template": { 824 | "code": { 825 | "dynamic": true, 826 | "required": true, 827 | "placeholder": "", 828 | "show": false, 829 | "multiline": true, 830 | "value": "from typing import Optional\n\nfrom langchain.chains import LLMChain\n\nfrom genflow import CustomComponent\nfrom genflow.field_typing import (\n BaseLanguageModel,\n BaseMemory,\n BasePromptTemplate,\n Chain,\n)\n\n\nclass LLMChainComponent(CustomComponent):\n display_name = \"LLMChain\"\n description = \"Chain to run queries against LLMs\"\n documentation: str = \"https://docs.aiplanet.com/components/chains#llm-chain\"\n\n def build_config(self):\n return {\n \"prompt\": {\"display_name\": \"Prompt\"},\n \"llm\": {\"display_name\": \"LLM\"},\n \"memory\": {\"display_name\": \"Memory\"},\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n prompt: BasePromptTemplate,\n llm: BaseLanguageModel,\n memory: Optional[BaseMemory] = None,\n ) -> Chain:\n return LLMChain(prompt=prompt, llm=llm, memory=memory)\n", 831 | "password": false, 832 | "name": "code", 833 | "advanced": false, 834 | "type": "code", 835 | "list": false 836 | }, 837 | "_type": "CustomComponent", 838 | "llm": { 839 | "required": true, 840 | "placeholder": "", 841 | "show": true, 842 | "multiline": false, 843 | "password": false, 844 | "name": "llm", 845 | "display_name": "LLM", 846 | "advanced": false, 847 | "dynamic": false, 848 | "info": "", 849 | "type": "BaseLanguageModel", 850 | "list": false 851 | }, 852 | "memory": { 853 | "required": false, 854 | "placeholder": "", 855 | "show": true, 856 | "multiline": false, 857 | "password": false, 858 | "name": "memory", 859 | "display_name": "Memory", 860 | "advanced": false, 861 | "dynamic": false, 862 | "info": "", 863 | "type": "BaseMemory", 864 | "list": false 865 | }, 866 | "prompt": { 867 | "required": true, 868 | "placeholder": "", 869 | "show": true, 870 | "multiline": false, 871 | "password": false, 872 | "name": "prompt", 873 | "display_name": "Prompt", 874 | "advanced": false, 875 | "dynamic": false, 876 | "info": "", 877 | "type": "BasePromptTemplate", 878 | "list": false 879 | } 880 | }, 881 | "description": "Chain to run queries against LLMs", 882 | "base_classes": [ 883 | "Chain" 884 | ], 885 | "display_name": "LLMChain", 886 | "custom_fields": { 887 | "llm": null, 888 | "memory": null, 889 | "prompt": null 890 | }, 891 | "output_types": [ 892 | "LLMChain" 893 | ], 894 | "documentation": "https://docs.aiplanet.com/components/chains#llm-chain", 895 | "beta": true, 896 | "error": null 897 | }, 898 | "id": "LLMChain-Y4l4A" 899 | }, 900 | "selected": false, 901 | "positionAbsolute": { 902 | "x": 1850.077873771481, 903 | "y": 110.90486000931321 904 | }, 905 | "dragging": false 906 | } 907 | ], 908 | "edges": [ 909 | { 910 | "source": "AzureAIDocumentIntelligenceLoader-6ryDk", 911 | "sourceHandle": "{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œAzureAIDocumentIntelligenceLoaderœ,œidœ:œAzureAIDocumentIntelligenceLoader-6ryDkœ}", 912 | "target": "RecursiveCharacterTextSplitter-7Jg1J", 913 | "targetHandle": "{œfieldNameœ:œdocumentsœ,œidœ:œRecursiveCharacterTextSplitter-7Jg1Jœ,œinputTypesœ:null,œtypeœ:œDocumentœ}", 914 | "data": { 915 | "targetHandle": { 916 | "fieldName": "documents", 917 | "id": "RecursiveCharacterTextSplitter-7Jg1J", 918 | "inputTypes": null, 919 | "type": "Document" 920 | }, 921 | "sourceHandle": { 922 | "baseClasses": [ 923 | "Document" 924 | ], 925 | "dataType": "AzureAIDocumentIntelligenceLoader", 926 | "id": "AzureAIDocumentIntelligenceLoader-6ryDk" 927 | } 928 | }, 929 | "style": { 930 | "stroke": "#555" 931 | }, 932 | "className": "stroke-gray-900 stroke-connection", 933 | "animated": false, 934 | "id": "reactflow__edge-AzureAIDocumentIntelligenceLoader-6ryDk{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œAzureAIDocumentIntelligenceLoaderœ,œidœ:œAzureAIDocumentIntelligenceLoader-6ryDkœ}-RecursiveCharacterTextSplitter-7Jg1J{œfieldNameœ:œdocumentsœ,œidœ:œRecursiveCharacterTextSplitter-7Jg1Jœ,œinputTypesœ:null,œtypeœ:œDocumentœ}" 935 | }, 936 | { 937 | "source": "RecursiveCharacterTextSplitter-7Jg1J", 938 | "sourceHandle": "{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-7Jg1Jœ}", 939 | "target": "PromptTemplate-dPWZM", 940 | "targetHandle": "{œfieldNameœ:œcontextœ,œidœ:œPromptTemplate-dPWZMœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œInputœ],œtypeœ:œstrœ}", 941 | "data": { 942 | "targetHandle": { 943 | "fieldName": "context", 944 | "id": "PromptTemplate-dPWZM", 945 | "inputTypes": [ 946 | "Document", 947 | "BaseOutputParser", 948 | "Input" 949 | ], 950 | "type": "str" 951 | }, 952 | "sourceHandle": { 953 | "baseClasses": [ 954 | "Document" 955 | ], 956 | "dataType": "RecursiveCharacterTextSplitter", 957 | "id": "RecursiveCharacterTextSplitter-7Jg1J" 958 | } 959 | }, 960 | "style": { 961 | "stroke": "#555" 962 | }, 963 | "className": "stroke-gray-900 stroke-connection", 964 | "animated": false, 965 | "id": "reactflow__edge-RecursiveCharacterTextSplitter-7Jg1J{œbaseClassesœ:[œDocumentœ],œdataTypeœ:œRecursiveCharacterTextSplitterœ,œidœ:œRecursiveCharacterTextSplitter-7Jg1Jœ}-PromptTemplate-dPWZM{œfieldNameœ:œcontextœ,œidœ:œPromptTemplate-dPWZMœ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œInputœ],œtypeœ:œstrœ}" 966 | }, 967 | { 968 | "source": "AzureChatOpenAI-nZwtv", 969 | "sourceHandle": "{œbaseClassesœ:[œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œAzureChatOpenAIœ,œidœ:œAzureChatOpenAI-nZwtvœ}", 970 | "target": "LLMChain-Y4l4A", 971 | "targetHandle": "{œfieldNameœ:œllmœ,œidœ:œLLMChain-Y4l4Aœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}", 972 | "data": { 973 | "targetHandle": { 974 | "fieldName": "llm", 975 | "id": "LLMChain-Y4l4A", 976 | "inputTypes": null, 977 | "type": "BaseLanguageModel" 978 | }, 979 | "sourceHandle": { 980 | "baseClasses": [ 981 | "BaseLanguageModel", 982 | "BaseLLM" 983 | ], 984 | "dataType": "AzureChatOpenAI", 985 | "id": "AzureChatOpenAI-nZwtv" 986 | } 987 | }, 988 | "style": { 989 | "stroke": "#555" 990 | }, 991 | "className": "stroke-foreground stroke-connection", 992 | "animated": false, 993 | "id": "reactflow__edge-AzureChatOpenAI-nZwtv{œbaseClassesœ:[œBaseLanguageModelœ,œBaseLLMœ],œdataTypeœ:œAzureChatOpenAIœ,œidœ:œAzureChatOpenAI-nZwtvœ}-LLMChain-Y4l4A{œfieldNameœ:œllmœ,œidœ:œLLMChain-Y4l4Aœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}" 994 | }, 995 | { 996 | "source": "PromptTemplate-dPWZM", 997 | "sourceHandle": "{œbaseClassesœ:[œStringPromptTemplateœ,œBasePromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-dPWZMœ}", 998 | "target": "LLMChain-Y4l4A", 999 | "targetHandle": "{œfieldNameœ:œpromptœ,œidœ:œLLMChain-Y4l4Aœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}", 1000 | "data": { 1001 | "targetHandle": { 1002 | "fieldName": "prompt", 1003 | "id": "LLMChain-Y4l4A", 1004 | "inputTypes": null, 1005 | "type": "BasePromptTemplate" 1006 | }, 1007 | "sourceHandle": { 1008 | "baseClasses": [ 1009 | "StringPromptTemplate", 1010 | "BasePromptTemplate", 1011 | "PromptTemplate" 1012 | ], 1013 | "dataType": "PromptTemplate", 1014 | "id": "PromptTemplate-dPWZM" 1015 | } 1016 | }, 1017 | "style": { 1018 | "stroke": "#555" 1019 | }, 1020 | "className": "stroke-foreground stroke-connection", 1021 | "animated": false, 1022 | "id": "reactflow__edge-PromptTemplate-dPWZM{œbaseClassesœ:[œStringPromptTemplateœ,œBasePromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-dPWZMœ}-LLMChain-Y4l4A{œfieldNameœ:œpromptœ,œidœ:œLLMChain-Y4l4Aœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}" 1023 | }, 1024 | { 1025 | "source": "ConversationBufferMemory-W1iL5", 1026 | "sourceHandle": "{œbaseClassesœ:[œBaseMemoryœ,œBaseChatMemoryœ,œBaseMemoryœ],œdataTypeœ:œConversationBufferMemoryœ,œidœ:œConversationBufferMemory-W1iL5œ}", 1027 | "target": "LLMChain-Y4l4A", 1028 | "targetHandle": "{œfieldNameœ:œmemoryœ,œidœ:œLLMChain-Y4l4Aœ,œinputTypesœ:null,œtypeœ:œBaseMemoryœ}", 1029 | "data": { 1030 | "targetHandle": { 1031 | "fieldName": "memory", 1032 | "id": "LLMChain-Y4l4A", 1033 | "inputTypes": null, 1034 | "type": "BaseMemory" 1035 | }, 1036 | "sourceHandle": { 1037 | "baseClasses": [ 1038 | "BaseMemory", 1039 | "BaseChatMemory", 1040 | "BaseMemory" 1041 | ], 1042 | "dataType": "ConversationBufferMemory", 1043 | "id": "ConversationBufferMemory-W1iL5" 1044 | } 1045 | }, 1046 | "style": { 1047 | "stroke": "#555" 1048 | }, 1049 | "className": "stroke-foreground stroke-connection", 1050 | "animated": false, 1051 | "id": "reactflow__edge-ConversationBufferMemory-W1iL5{œbaseClassesœ:[œBaseMemoryœ,œBaseChatMemoryœ,œBaseMemoryœ],œdataTypeœ:œConversationBufferMemoryœ,œidœ:œConversationBufferMemory-W1iL5œ}-LLMChain-Y4l4A{œfieldNameœ:œmemoryœ,œidœ:œLLMChain-Y4l4Aœ,œinputTypesœ:null,œtypeœ:œBaseMemoryœ}" 1052 | } 1053 | ], 1054 | "viewport": { 1055 | "x": 71.0595350165986, 1056 | "y": 374.6796023163354, 1057 | "zoom": 0.6334267601643141 1058 | } 1059 | }, 1060 | "description": "Chat with the text on your image", 1061 | "name": "Image OCR chat", 1062 | "flow_type": "chat" 1063 | } -------------------------------------------------------------------------------- /Indic_Language_Translation.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "b45ab6f8-0816-4e15-bae6-881c76a5c569", 3 | "data": { 4 | "nodes": [ 5 | { 6 | "width": 384, 7 | "height": 470, 8 | "id": "PromptTemplate-NBiBO", 9 | "type": "genericNode", 10 | "position": { 11 | "x": 205.66380483112482, 12 | "y": 1040.1365474246986 13 | }, 14 | "data": { 15 | "type": "PromptTemplate", 16 | "node": { 17 | "template": { 18 | "output_parser": { 19 | "required": false, 20 | "placeholder": "", 21 | "show": false, 22 | "multiline": false, 23 | "password": false, 24 | "name": "output_parser", 25 | "advanced": false, 26 | "dynamic": true, 27 | "info": "", 28 | "type": "BaseOutputParser", 29 | "list": false 30 | }, 31 | "input_types": { 32 | "required": false, 33 | "placeholder": "", 34 | "show": false, 35 | "multiline": false, 36 | "password": false, 37 | "name": "input_types", 38 | "advanced": false, 39 | "dynamic": true, 40 | "info": "", 41 | "type": "dict", 42 | "list": false 43 | }, 44 | "input_variables": { 45 | "required": true, 46 | "placeholder": "", 47 | "show": false, 48 | "multiline": false, 49 | "password": false, 50 | "name": "input_variables", 51 | "advanced": false, 52 | "dynamic": true, 53 | "info": "", 54 | "type": "str", 55 | "list": true, 56 | "value": [ 57 | "text", 58 | "language" 59 | ] 60 | }, 61 | "partial_variables": { 62 | "required": false, 63 | "placeholder": "", 64 | "show": false, 65 | "multiline": false, 66 | "password": false, 67 | "name": "partial_variables", 68 | "advanced": false, 69 | "dynamic": true, 70 | "info": "", 71 | "type": "dict", 72 | "list": false 73 | }, 74 | "template": { 75 | "required": true, 76 | "placeholder": "", 77 | "show": true, 78 | "multiline": true, 79 | "password": false, 80 | "name": "template", 81 | "advanced": false, 82 | "dynamic": true, 83 | "info": "", 84 | "type": "prompt", 85 | "list": false, 86 | "value": "Translate the given TEXT into the given LANGUAGE. You are only Indic language translator, you only support Hindi, Kannada, Telugu, Tamil, Punjabi and Gujrati LANGUAGE. If they LANGUAGE is not among languages, give a feedback to a user saying LANGUAGE not supported. \n\nTEXT: {text}\nLANGUAGE: {language}" 87 | }, 88 | "template_format": { 89 | "required": false, 90 | "placeholder": "", 91 | "show": false, 92 | "multiline": false, 93 | "value": "f-string", 94 | "password": false, 95 | "name": "template_format", 96 | "advanced": false, 97 | "dynamic": true, 98 | "info": "", 99 | "type": "str", 100 | "list": false 101 | }, 102 | "validate_template": { 103 | "required": false, 104 | "placeholder": "", 105 | "show": false, 106 | "multiline": false, 107 | "value": false, 108 | "password": false, 109 | "name": "validate_template", 110 | "advanced": false, 111 | "dynamic": true, 112 | "info": "", 113 | "type": "bool", 114 | "list": false 115 | }, 116 | "_type": "PromptTemplate", 117 | "text": { 118 | "required": false, 119 | "placeholder": "", 120 | "show": true, 121 | "multiline": true, 122 | "value": "", 123 | "password": false, 124 | "name": "text", 125 | "display_name": "text", 126 | "advanced": false, 127 | "input_types": [ 128 | "Document", 129 | "BaseOutputParser", 130 | "Input" 131 | ], 132 | "dynamic": false, 133 | "info": "", 134 | "type": "str", 135 | "list": false 136 | }, 137 | "language": { 138 | "required": false, 139 | "placeholder": "", 140 | "show": true, 141 | "multiline": true, 142 | "value": "", 143 | "password": false, 144 | "name": "language", 145 | "display_name": "language", 146 | "advanced": false, 147 | "input_types": [ 148 | "Document", 149 | "BaseOutputParser", 150 | "Input" 151 | ], 152 | "dynamic": false, 153 | "info": "", 154 | "type": "str", 155 | "list": false 156 | } 157 | }, 158 | "description": "A prompt template for a language model.", 159 | "base_classes": [ 160 | "BasePromptTemplate", 161 | "StringPromptTemplate", 162 | "PromptTemplate" 163 | ], 164 | "name": "", 165 | "display_name": "PromptTemplate", 166 | "documentation": "https://python.langchain.com/docs/modules/model_io/prompts/prompt_templates/", 167 | "custom_fields": { 168 | "": [ 169 | "text", 170 | "language" 171 | ], 172 | "template": [ 173 | "text", 174 | "lang" 175 | ] 176 | }, 177 | "output_types": [], 178 | "field_formatters": {}, 179 | "beta": false, 180 | "error": null 181 | }, 182 | "id": "PromptTemplate-NBiBO" 183 | }, 184 | "positionAbsolute": { 185 | "x": 205.66380483112482, 186 | "y": 1040.1365474246986 187 | }, 188 | "selected": false, 189 | "dragging": false 190 | }, 191 | { 192 | "width": 384, 193 | "height": 339, 194 | "id": "LLMChain-owwbr", 195 | "type": "genericNode", 196 | "position": { 197 | "x": 935.6327267229701, 198 | "y": 805.7504241146173 199 | }, 200 | "data": { 201 | "type": "LLMChain", 202 | "node": { 203 | "template": { 204 | "code": { 205 | "dynamic": true, 206 | "required": true, 207 | "placeholder": "", 208 | "show": false, 209 | "multiline": true, 210 | "value": "from genflow import CustomComponent\nfrom langchain.chains import LLMChain\nfrom typing import Optional, Union, Callable\nfrom genflow.field_typing import (\n BasePromptTemplate,\n BaseLanguageModel,\n BaseMemory,\n Chain,\n)\n\n\nclass LLMChainComponent(CustomComponent):\n display_name = \"LLMChain\"\n description = \"Chain to run queries against LLMs\"\n\n def build_config(self):\n return {\n \"prompt\": {\"display_name\": \"Prompt\"},\n \"llm\": {\"display_name\": \"LLM\"},\n \"memory\": {\"display_name\": \"Memory\"},\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n prompt: BasePromptTemplate,\n llm: BaseLanguageModel,\n memory: Optional[BaseMemory] = None,\n ) -> Chain:\n return LLMChain(prompt=prompt, llm=llm, memory=memory)\n", 211 | "password": false, 212 | "name": "code", 213 | "advanced": false, 214 | "type": "code", 215 | "list": false 216 | }, 217 | "_type": "CustomComponent", 218 | "llm": { 219 | "required": true, 220 | "placeholder": "", 221 | "show": true, 222 | "multiline": false, 223 | "password": false, 224 | "name": "llm", 225 | "display_name": "LLM", 226 | "advanced": false, 227 | "dynamic": false, 228 | "info": "", 229 | "type": "BaseLanguageModel", 230 | "list": false 231 | }, 232 | "memory": { 233 | "required": false, 234 | "placeholder": "", 235 | "show": true, 236 | "multiline": false, 237 | "password": false, 238 | "name": "memory", 239 | "display_name": "Memory", 240 | "advanced": false, 241 | "dynamic": false, 242 | "info": "", 243 | "type": "BaseMemory", 244 | "list": false 245 | }, 246 | "prompt": { 247 | "required": true, 248 | "placeholder": "", 249 | "show": true, 250 | "multiline": false, 251 | "password": false, 252 | "name": "prompt", 253 | "display_name": "Prompt", 254 | "advanced": false, 255 | "dynamic": false, 256 | "info": "", 257 | "type": "BasePromptTemplate", 258 | "list": false 259 | } 260 | }, 261 | "description": "Chain to run queries against LLMs", 262 | "base_classes": [ 263 | "Chain" 264 | ], 265 | "display_name": "LLMChain", 266 | "custom_fields": { 267 | "llm": null, 268 | "memory": null, 269 | "prompt": null 270 | }, 271 | "output_types": [ 272 | "LLMChain" 273 | ], 274 | "documentation": "", 275 | "beta": true, 276 | "error": null 277 | }, 278 | "id": "LLMChain-owwbr" 279 | }, 280 | "selected": false, 281 | "positionAbsolute": { 282 | "x": 935.6327267229701, 283 | "y": 805.7504241146173 284 | }, 285 | "dragging": false 286 | }, 287 | { 288 | "width": 384, 289 | "height": 736, 290 | "id": "AzureChatOpenAI-TCvCM", 291 | "type": "genericNode", 292 | "position": { 293 | "x": 179.09529943012853, 294 | "y": 172.0221035260039 295 | }, 296 | "data": { 297 | "type": "AzureChatOpenAI", 298 | "node": { 299 | "template": { 300 | "code": { 301 | "dynamic": true, 302 | "required": true, 303 | "placeholder": "", 304 | "show": false, 305 | "multiline": true, 306 | "value": "from typing import Optional\nfrom genflow.interface.custom import CustomComponent\nfrom langchain.llms.base import BaseLLM\nfrom langchain.chat_models import AzureChatOpenAI\n\n\nclass AzureChatOpenAILLM(CustomComponent):\n display_name: str = \"AzureChatOpenAI\"\n description: str = \"Azure Chat Open AI Chat&Completion large language models.\"\n\n AZURE_OPENAI_MODELS = [\n \"gpt-4\",\n \"gpt-4-32k\",\n \"gpt-4-vision\",\n ]\n\n def build_config(self):\n return {\n \"model\": {\n \"display_name\": \"Model Name\",\n \"value\": \"gpt-4\",\n \"options\": self.AZURE_OPENAI_MODELS,\n \"required\": True,\n },\n \"api_key\": {\n \"display_name\": \"AzureChatOpenAI API Key\",\n \"required\": True,\n \"password\": True,\n },\n \"api_base\": {\n \"display_name\": \"AzureChatOpenAI API Base\",\n \"required\": True,\n },\n \"api_type\": {\"display_name\": \"AzureChatOpenAI API Type\", \"required\": True},\n \"azure_deployment\": {\n \"display_name\": \"Deployment Name\",\n \"required\": True,\n },\n \"api_version\": {\n \"display_name\": \"API Version\",\n \"value\": \"2023-07-01-preview\",\n \"required\": True,\n \"advanced\": True,\n },\n \"temperature\": {\n \"display_name\": \"Temperature\",\n \"value\": 0.5,\n \"field_type\": \"float\",\n \"required\": False,\n },\n \"max_tokens\": {\n \"display_name\": \"Max Tokens\",\n \"value\": 512,\n \"required\": False,\n \"field_type\": \"int\",\n \"advanced\": True,\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n model: str,\n api_base: str,\n api_type: str,\n api_key: str,\n azure_deployment: str,\n api_version: str = \"2023-05-15\",\n temperature: Optional[float] = 0.7,\n max_tokens: Optional[int] = 512,\n ) -> BaseLLM:\n try:\n output = AzureChatOpenAI(\n model_name=model,\n openai_api_base=api_base,\n openai_api_type=api_type,\n openai_api_key=api_key,\n openai_api_version=api_version,\n deployment_name=azure_deployment,\n temperature=temperature,\n max_tokens=max_tokens,\n )\n except Exception as e:\n raise ValueError(\"Could not connect to Azure ChatOpenAI model.\") from e\n return output\n", 307 | "password": false, 308 | "name": "code", 309 | "advanced": false, 310 | "type": "code", 311 | "list": false 312 | }, 313 | "_type": "CustomComponent", 314 | "api_base": { 315 | "required": true, 316 | "placeholder": "", 317 | "show": true, 318 | "multiline": false, 319 | "password": false, 320 | "name": "api_base", 321 | "display_name": "AzureChatOpenAI API Base", 322 | "advanced": false, 323 | "dynamic": false, 324 | "info": "", 325 | "type": "str", 326 | "list": false, 327 | "value": "" 328 | }, 329 | "api_key": { 330 | "required": true, 331 | "placeholder": "", 332 | "show": true, 333 | "multiline": false, 334 | "password": true, 335 | "name": "api_key", 336 | "display_name": "AzureChatOpenAI API Key", 337 | "advanced": false, 338 | "dynamic": false, 339 | "info": "", 340 | "type": "str", 341 | "list": false, 342 | "value": "" 343 | }, 344 | "api_type": { 345 | "required": true, 346 | "placeholder": "", 347 | "show": true, 348 | "multiline": false, 349 | "password": false, 350 | "name": "api_type", 351 | "display_name": "AzureChatOpenAI API Type", 352 | "advanced": false, 353 | "dynamic": false, 354 | "info": "", 355 | "type": "str", 356 | "list": false, 357 | "value": "azure" 358 | }, 359 | "api_version": { 360 | "required": true, 361 | "placeholder": "", 362 | "show": true, 363 | "multiline": false, 364 | "value": "2023-07-01-preview", 365 | "password": false, 366 | "name": "api_version", 367 | "display_name": "API Version", 368 | "advanced": true, 369 | "dynamic": false, 370 | "info": "", 371 | "type": "str", 372 | "list": false 373 | }, 374 | "azure_deployment": { 375 | "required": true, 376 | "placeholder": "", 377 | "show": true, 378 | "multiline": false, 379 | "password": false, 380 | "name": "azure_deployment", 381 | "display_name": "Deployment Name", 382 | "advanced": false, 383 | "dynamic": false, 384 | "info": "", 385 | "type": "str", 386 | "list": false, 387 | "value": "" 388 | }, 389 | "max_tokens": { 390 | "required": false, 391 | "placeholder": "", 392 | "show": true, 393 | "multiline": false, 394 | "value": 512, 395 | "password": false, 396 | "name": "max_tokens", 397 | "display_name": "Max Tokens", 398 | "advanced": true, 399 | "dynamic": false, 400 | "info": "", 401 | "type": "int", 402 | "list": false 403 | }, 404 | "model": { 405 | "required": true, 406 | "placeholder": "", 407 | "show": true, 408 | "multiline": false, 409 | "value": "gpt-4", 410 | "password": false, 411 | "options": [ 412 | "gpt-4", 413 | "gpt-4-32k", 414 | "gpt-4-vision" 415 | ], 416 | "name": "model", 417 | "display_name": "Model Name", 418 | "advanced": false, 419 | "dynamic": false, 420 | "info": "", 421 | "type": "str", 422 | "list": true 423 | }, 424 | "temperature": { 425 | "required": false, 426 | "placeholder": "", 427 | "show": true, 428 | "multiline": false, 429 | "value": "0.1", 430 | "password": false, 431 | "name": "temperature", 432 | "display_name": "Temperature", 433 | "advanced": false, 434 | "dynamic": false, 435 | "info": "", 436 | "type": "float", 437 | "list": false 438 | } 439 | }, 440 | "description": "Azure Chat Open AI Chat&Completion large language models.", 441 | "base_classes": [ 442 | "BaseLLM", 443 | "BaseLanguageModel" 444 | ], 445 | "display_name": "AzureChatOpenAI", 446 | "custom_fields": { 447 | "api_base": null, 448 | "api_key": null, 449 | "api_type": null, 450 | "api_version": null, 451 | "azure_deployment": null, 452 | "max_tokens": null, 453 | "model": null, 454 | "temperature": null 455 | }, 456 | "output_types": [ 457 | "AzureChatOpenAI" 458 | ], 459 | "documentation": "", 460 | "beta": true, 461 | "error": null 462 | }, 463 | "id": "AzureChatOpenAI-TCvCM" 464 | }, 465 | "selected": false, 466 | "positionAbsolute": { 467 | "x": 179.09529943012853, 468 | "y": 172.0221035260039 469 | }, 470 | "dragging": false 471 | }, 472 | { 473 | "width": 384, 474 | "height": 531, 475 | "id": "ConversationBufferMemory-3h7pZ", 476 | "type": "genericNode", 477 | "position": { 478 | "x": -320.6140741652454, 479 | "y": 470.76547255919036 480 | }, 481 | "data": { 482 | "type": "ConversationBufferMemory", 483 | "node": { 484 | "template": { 485 | "code": { 486 | "dynamic": true, 487 | "required": true, 488 | "placeholder": "", 489 | "show": false, 490 | "multiline": true, 491 | "value": "from typing import Optional, Union\nfrom langchain.memory.chat_memory import BaseMemory, BaseChatMemory\nfrom langchain.memory.buffer import ConversationBufferMemory\nfrom langchain.memory.chat_message_histories.postgres import PostgresChatMessageHistory\n\nfrom genflow import CustomComponent\n\n\nclass ConversationBufferMemoryComponent(CustomComponent):\n display_name: str = \"ConversationBufferMemory\"\n description: str = \"Buffer for storing conversation memory.\"\n documentation: str = (\n \"https://docs.aiplanet.com/components/memories#conversationbuffermemory\"\n )\n beta = False\n\n def build_config(self):\n return {\n \"input_key\": {\n \"display_name\": \"Input Key\",\n \"required\": False,\n \"value\": \"\",\n \"info\": \"The variable to be used as Chat Input when more than one variable is available.\",\n },\n \"memory_key\": {\n \"display_name\": \"Memory Key\",\n \"required\": False,\n \"value\": \"history\",\n },\n \"output_key\": {\n \"display_name\": \"Output Key\",\n \"required\": False,\n \"value\": \"\",\n \"info\": \"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\n },\n \"return_messages\": {\n \"display_name\": \"Return Messages\",\n \"field_type\": \"bool\",\n \"required\": False,\n \"value\": True,\n },\n \"session_id\": {\n \"display_name\": \"Session ID\",\n \"required\": False,\n \"advanced\": True,\n \"value\": \"genflow_memory_db\",\n },\n \"connection_string\": {\n \"display_name\": \"Connection String\",\n \"required\": False,\n \"advanced\": True,\n \"value\": \"\",\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n input_key: Optional[str] = \"\",\n memory_key: Optional[str] = \"history\",\n output_key: Optional[str] = \"\",\n return_messages: Optional[bool] = True,\n session_id: Optional[str] = \"genflow_memory_db\",\n connection_string: Optional[str] = \"\",\n ) -> Union[BaseMemory, BaseChatMemory]:\n chat_memory = PostgresChatMessageHistory(\n session_id=session_id, connection_string=connection_string\n )\n\n keys = {}\n\n if input_key and input_key != \"\":\n keys[\"input_key\"] = input_key\n\n if output_key and output_key != \"\":\n keys[\"output_key\"] = output_key\n\n return ConversationBufferMemory(\n chat_memory=chat_memory,\n memory_key=memory_key,\n return_messages=return_messages,\n **keys\n )\n", 492 | "password": false, 493 | "name": "code", 494 | "advanced": false, 495 | "type": "code", 496 | "list": false 497 | }, 498 | "_type": "CustomComponent", 499 | "connection_string": { 500 | "required": false, 501 | "placeholder": "", 502 | "show": true, 503 | "multiline": false, 504 | "value": "", 505 | "password": false, 506 | "name": "connection_string", 507 | "display_name": "Connection String", 508 | "advanced": true, 509 | "dynamic": false, 510 | "info": "", 511 | "type": "str", 512 | "list": false 513 | }, 514 | "input_key": { 515 | "required": false, 516 | "placeholder": "", 517 | "show": true, 518 | "multiline": false, 519 | "value": "text", 520 | "password": false, 521 | "name": "input_key", 522 | "display_name": "Input Key", 523 | "advanced": false, 524 | "dynamic": false, 525 | "info": "The variable to be used as Chat Input when more than one variable is available.", 526 | "type": "str", 527 | "list": false 528 | }, 529 | "memory_key": { 530 | "required": false, 531 | "placeholder": "", 532 | "show": true, 533 | "multiline": false, 534 | "value": "history", 535 | "password": false, 536 | "name": "memory_key", 537 | "display_name": "Memory Key", 538 | "advanced": false, 539 | "dynamic": false, 540 | "info": "", 541 | "type": "str", 542 | "list": false 543 | }, 544 | "output_key": { 545 | "required": false, 546 | "placeholder": "", 547 | "show": true, 548 | "multiline": false, 549 | "value": "", 550 | "password": false, 551 | "name": "output_key", 552 | "display_name": "Output Key", 553 | "advanced": false, 554 | "dynamic": false, 555 | "info": "The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)", 556 | "type": "str", 557 | "list": false 558 | }, 559 | "return_messages": { 560 | "required": false, 561 | "placeholder": "", 562 | "show": true, 563 | "multiline": false, 564 | "value": true, 565 | "password": false, 566 | "name": "return_messages", 567 | "display_name": "Return Messages", 568 | "advanced": false, 569 | "dynamic": false, 570 | "info": "", 571 | "type": "bool", 572 | "list": false 573 | }, 574 | "session_id": { 575 | "required": false, 576 | "placeholder": "", 577 | "show": true, 578 | "multiline": false, 579 | "value": "genflow_memory_db", 580 | "password": false, 581 | "name": "session_id", 582 | "display_name": "Session ID", 583 | "advanced": true, 584 | "dynamic": false, 585 | "info": "", 586 | "type": "str", 587 | "list": false 588 | } 589 | }, 590 | "description": "Buffer for storing conversation memory.", 591 | "base_classes": [ 592 | "BaseMemory", 593 | "BaseChatMemory", 594 | "BaseMemory" 595 | ], 596 | "display_name": "ConversationBufferMemory", 597 | "custom_fields": { 598 | "connection_string": null, 599 | "input_key": null, 600 | "memory_key": null, 601 | "output_key": null, 602 | "return_messages": null, 603 | "session_id": null 604 | }, 605 | "output_types": [ 606 | "ConversationBufferMemory" 607 | ], 608 | "documentation": "https://docs.aiplanet.com/components/memories#conversationbuffermemory", 609 | "beta": false, 610 | "error": null 611 | }, 612 | "id": "ConversationBufferMemory-3h7pZ" 613 | }, 614 | "selected": false, 615 | "positionAbsolute": { 616 | "x": -320.6140741652454, 617 | "y": 470.76547255919036 618 | }, 619 | "dragging": false 620 | }, 621 | { 622 | "width": 384, 623 | "height": 505, 624 | "id": "TextGenerationOutput-ZOU1O", 625 | "type": "genericNode", 626 | "position": { 627 | "x": 1534.1845768426365, 628 | "y": 977.1507829611686 629 | }, 630 | "data": { 631 | "type": "TextGenerationOutput", 632 | "node": { 633 | "template": { 634 | "chain": { 635 | "required": false, 636 | "placeholder": "", 637 | "show": true, 638 | "multiline": false, 639 | "password": false, 640 | "name": "chain", 641 | "display_name": "Chain", 642 | "advanced": false, 643 | "dynamic": false, 644 | "info": "", 645 | "type": "Chain", 646 | "list": false 647 | }, 648 | "documents": { 649 | "required": false, 650 | "placeholder": "", 651 | "show": true, 652 | "multiline": false, 653 | "password": false, 654 | "name": "documents", 655 | "display_name": "Documents", 656 | "advanced": false, 657 | "dynamic": false, 658 | "info": "", 659 | "type": "Document", 660 | "list": true 661 | }, 662 | "download": { 663 | "required": false, 664 | "placeholder": "", 665 | "show": true, 666 | "multiline": false, 667 | "password": false, 668 | "name": "download", 669 | "advanced": false, 670 | "dynamic": false, 671 | "info": "", 672 | "type": "output", 673 | "list": false 674 | }, 675 | "query": { 676 | "required": false, 677 | "placeholder": "", 678 | "show": true, 679 | "multiline": false, 680 | "password": false, 681 | "name": "query", 682 | "display_name": "Query", 683 | "advanced": false, 684 | "input_types": [ 685 | "Input" 686 | ], 687 | "dynamic": false, 688 | "info": "Provide a Query to invoke a chain if stack doesn't contain prompt template and memory.", 689 | "type": "str", 690 | "list": false 691 | }, 692 | "_type": "TextGenerationOutput" 693 | }, 694 | "description": "Text Generation Output is used to download the Documents, Connect a component that returns Documents or connect a Chain Component and provide the Query to invoke a chain.", 695 | "base_classes": [ 696 | "TextGenerationOutput" 697 | ], 698 | "display_name": "TextGenerationOutput", 699 | "custom_fields": {}, 700 | "output_types": [ 701 | "Output" 702 | ], 703 | "documentation": "https://docs.aiplanet.com/components/outputs", 704 | "beta": false, 705 | "error": null 706 | }, 707 | "id": "TextGenerationOutput-ZOU1O" 708 | }, 709 | "selected": false, 710 | "positionAbsolute": { 711 | "x": 1534.1845768426365, 712 | "y": 977.1507829611686 713 | }, 714 | "dragging": false 715 | } 716 | ], 717 | "edges": [ 718 | { 719 | "source": "AzureChatOpenAI-TCvCM", 720 | "sourceHandle": "{œbaseClassesœ:[œBaseLLMœ,œBaseLanguageModelœ],œdataTypeœ:œAzureChatOpenAIœ,œidœ:œAzureChatOpenAI-TCvCMœ}", 721 | "target": "LLMChain-owwbr", 722 | "targetHandle": "{œfieldNameœ:œllmœ,œidœ:œLLMChain-owwbrœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}", 723 | "data": { 724 | "targetHandle": { 725 | "fieldName": "llm", 726 | "id": "LLMChain-owwbr", 727 | "inputTypes": null, 728 | "type": "BaseLanguageModel" 729 | }, 730 | "sourceHandle": { 731 | "baseClasses": [ 732 | "BaseLLM", 733 | "BaseLanguageModel" 734 | ], 735 | "dataType": "AzureChatOpenAI", 736 | "id": "AzureChatOpenAI-TCvCM" 737 | } 738 | }, 739 | "style": { 740 | "stroke": "#555" 741 | }, 742 | "className": "stroke-gray-900 stroke-connection", 743 | "animated": false, 744 | "id": "reactflow__edge-AzureChatOpenAI-TCvCM{œbaseClassesœ:[œBaseLLMœ,œBaseLanguageModelœ],œdataTypeœ:œAzureChatOpenAIœ,œidœ:œAzureChatOpenAI-TCvCMœ}-LLMChain-owwbr{œfieldNameœ:œllmœ,œidœ:œLLMChain-owwbrœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}" 745 | }, 746 | { 747 | "source": "PromptTemplate-NBiBO", 748 | "sourceHandle": "{œbaseClassesœ:[œBasePromptTemplateœ,œStringPromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-NBiBOœ}", 749 | "target": "LLMChain-owwbr", 750 | "targetHandle": "{œfieldNameœ:œpromptœ,œidœ:œLLMChain-owwbrœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}", 751 | "data": { 752 | "targetHandle": { 753 | "fieldName": "prompt", 754 | "id": "LLMChain-owwbr", 755 | "inputTypes": null, 756 | "type": "BasePromptTemplate" 757 | }, 758 | "sourceHandle": { 759 | "baseClasses": [ 760 | "BasePromptTemplate", 761 | "StringPromptTemplate", 762 | "PromptTemplate" 763 | ], 764 | "dataType": "PromptTemplate", 765 | "id": "PromptTemplate-NBiBO" 766 | } 767 | }, 768 | "style": { 769 | "stroke": "#555" 770 | }, 771 | "className": "stroke-gray-900 stroke-connection", 772 | "animated": false, 773 | "id": "reactflow__edge-PromptTemplate-NBiBO{œbaseClassesœ:[œBasePromptTemplateœ,œStringPromptTemplateœ,œPromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-NBiBOœ}-LLMChain-owwbr{œfieldNameœ:œpromptœ,œidœ:œLLMChain-owwbrœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}" 774 | }, 775 | { 776 | "source": "ConversationBufferMemory-3h7pZ", 777 | "sourceHandle": "{œbaseClassesœ:[œBaseMemoryœ,œBaseChatMemoryœ,œBaseMemoryœ],œdataTypeœ:œConversationBufferMemoryœ,œidœ:œConversationBufferMemory-3h7pZœ}", 778 | "target": "LLMChain-owwbr", 779 | "targetHandle": "{œfieldNameœ:œmemoryœ,œidœ:œLLMChain-owwbrœ,œinputTypesœ:null,œtypeœ:œBaseMemoryœ}", 780 | "data": { 781 | "targetHandle": { 782 | "fieldName": "memory", 783 | "id": "LLMChain-owwbr", 784 | "inputTypes": null, 785 | "type": "BaseMemory" 786 | }, 787 | "sourceHandle": { 788 | "baseClasses": [ 789 | "BaseMemory", 790 | "BaseChatMemory", 791 | "BaseMemory" 792 | ], 793 | "dataType": "ConversationBufferMemory", 794 | "id": "ConversationBufferMemory-3h7pZ" 795 | } 796 | }, 797 | "style": { 798 | "stroke": "#555" 799 | }, 800 | "className": "stroke-foreground stroke-connection", 801 | "animated": false, 802 | "id": "reactflow__edge-ConversationBufferMemory-3h7pZ{œbaseClassesœ:[œBaseMemoryœ,œBaseChatMemoryœ,œBaseMemoryœ],œdataTypeœ:œConversationBufferMemoryœ,œidœ:œConversationBufferMemory-3h7pZœ}-LLMChain-owwbr{œfieldNameœ:œmemoryœ,œidœ:œLLMChain-owwbrœ,œinputTypesœ:null,œtypeœ:œBaseMemoryœ}" 803 | }, 804 | { 805 | "source": "LLMChain-owwbr", 806 | "sourceHandle": "{œbaseClassesœ:[œChainœ],œdataTypeœ:œLLMChainœ,œidœ:œLLMChain-owwbrœ}", 807 | "target": "TextGenerationOutput-ZOU1O", 808 | "targetHandle": "{œfieldNameœ:œchainœ,œidœ:œTextGenerationOutput-ZOU1Oœ,œinputTypesœ:null,œtypeœ:œChainœ}", 809 | "data": { 810 | "targetHandle": { 811 | "fieldName": "chain", 812 | "id": "TextGenerationOutput-ZOU1O", 813 | "inputTypes": null, 814 | "type": "Chain" 815 | }, 816 | "sourceHandle": { 817 | "baseClasses": [ 818 | "Chain" 819 | ], 820 | "dataType": "LLMChain", 821 | "id": "LLMChain-owwbr" 822 | } 823 | }, 824 | "style": { 825 | "stroke": "#555" 826 | }, 827 | "className": "stroke-foreground stroke-connection", 828 | "animated": false, 829 | "id": "reactflow__edge-LLMChain-owwbr{œbaseClassesœ:[œChainœ],œdataTypeœ:œLLMChainœ,œidœ:œLLMChain-owwbrœ}-TextGenerationOutput-ZOU1O{œfieldNameœ:œchainœ,œidœ:œTextGenerationOutput-ZOU1Oœ,œinputTypesœ:null,œtypeœ:œChainœ}" 830 | } 831 | ], 832 | "viewport": { 833 | "x": 426.4187598211104, 834 | "y": 57.07638393158118, 835 | "zoom": 0.6003873170372308 836 | } 837 | }, 838 | "description": "Language Translation assistant for Indian language support: Hindi, Kannada, Tamil, Telugu, Punjabi and Gujarati ", 839 | "name": "Indic_Language", 840 | "flow_type": "chat" 841 | } -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Example Stacks for GenAI Stack Platform 📄 2 | 3 | Welcome to GenAI Stack! Here, you'll discover a variety of use cases and example stacks powered by Large Language Models (LLMs). You can seamlessly download, deploy and run them on the GenAI Stack platform. [GenAI Stack platform]([url](https://app.aiplanet.com)). Go give it a spin! 4 | 5 | ## Getting Started 🚀 6 | 7 | To use a stack example in GenAI Stack platform, follow these simple steps: 8 | 9 | 1. **Browse Stacks**: Explore the available stacks in this repository to find one suitable for your needs. 10 | 11 | 2. **Download Stack**: Once you've found a stack you'd like to use, download that stack file to your local machine. 12 | 13 | 3. **Import into GenAI Stack**: 14 | 15 | - Go to the GenAI Stack platform. 16 | - Navigate to the My Stacks Page. 17 | - On top right corner, click on New Stack. 18 | - A Empty Stack page opens, On top left corner, click on ⬆️ (Import) button and select the stack file which you downloaded. 19 | 20 | 4. **Start Using**: Once imported, you can start using it. 21 | -------------------------------------------------------------------------------- /Simple_Chat.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "99dd3c18-61b2-463c-ace1-afe78c58b139", 3 | "data": { 4 | "nodes": [ 5 | { 6 | "width": 384, 7 | "height": 629, 8 | "id": "ChatOpenAI-LaMPL", 9 | "type": "genericNode", 10 | "position": { 11 | "x": 597.1974640856098, 12 | "y": -74.99878439797845 13 | }, 14 | "data": { 15 | "type": "ChatOpenAI", 16 | "node": { 17 | "template": { 18 | "callbacks": { 19 | "required": false, 20 | "placeholder": "", 21 | "show": false, 22 | "multiline": false, 23 | "password": false, 24 | "name": "callbacks", 25 | "advanced": false, 26 | "dynamic": false, 27 | "info": "", 28 | "type": "langchain.callbacks.base.BaseCallbackHandler", 29 | "list": true 30 | }, 31 | "async_client": { 32 | "required": false, 33 | "placeholder": "", 34 | "show": false, 35 | "multiline": false, 36 | "password": false, 37 | "name": "async_client", 38 | "advanced": false, 39 | "dynamic": false, 40 | "info": "", 41 | "type": "Any", 42 | "list": false 43 | }, 44 | "cache": { 45 | "required": false, 46 | "placeholder": "", 47 | "show": false, 48 | "multiline": false, 49 | "password": false, 50 | "name": "cache", 51 | "advanced": false, 52 | "dynamic": false, 53 | "info": "", 54 | "type": "bool", 55 | "list": false 56 | }, 57 | "client": { 58 | "required": false, 59 | "placeholder": "", 60 | "show": false, 61 | "multiline": false, 62 | "password": false, 63 | "name": "client", 64 | "advanced": false, 65 | "dynamic": false, 66 | "info": "", 67 | "type": "Any", 68 | "list": false 69 | }, 70 | "default_headers": { 71 | "required": false, 72 | "placeholder": "", 73 | "show": false, 74 | "multiline": false, 75 | "password": false, 76 | "name": "default_headers", 77 | "advanced": false, 78 | "dynamic": false, 79 | "info": "", 80 | "type": "dict", 81 | "list": false 82 | }, 83 | "default_query": { 84 | "required": false, 85 | "placeholder": "", 86 | "show": false, 87 | "multiline": false, 88 | "password": false, 89 | "name": "default_query", 90 | "advanced": false, 91 | "dynamic": false, 92 | "info": "", 93 | "type": "dict", 94 | "list": false 95 | }, 96 | "http_client": { 97 | "required": false, 98 | "placeholder": "", 99 | "show": false, 100 | "multiline": false, 101 | "password": false, 102 | "name": "http_client", 103 | "advanced": false, 104 | "dynamic": false, 105 | "info": "", 106 | "type": "Any", 107 | "list": false 108 | }, 109 | "max_retries": { 110 | "required": false, 111 | "placeholder": "", 112 | "show": false, 113 | "multiline": false, 114 | "value": 2, 115 | "password": false, 116 | "name": "max_retries", 117 | "advanced": false, 118 | "dynamic": false, 119 | "info": "", 120 | "type": "int", 121 | "list": false 122 | }, 123 | "max_tokens": { 124 | "required": false, 125 | "placeholder": "", 126 | "show": true, 127 | "multiline": false, 128 | "password": true, 129 | "name": "max_tokens", 130 | "advanced": false, 131 | "dynamic": false, 132 | "info": "", 133 | "type": "int", 134 | "list": false, 135 | "value": "" 136 | }, 137 | "metadata": { 138 | "required": false, 139 | "placeholder": "", 140 | "show": false, 141 | "multiline": false, 142 | "password": false, 143 | "name": "metadata", 144 | "advanced": false, 145 | "dynamic": false, 146 | "info": "", 147 | "type": "dict", 148 | "list": false 149 | }, 150 | "model_kwargs": { 151 | "required": false, 152 | "placeholder": "", 153 | "show": true, 154 | "multiline": false, 155 | "password": false, 156 | "name": "model_kwargs", 157 | "advanced": true, 158 | "dynamic": false, 159 | "info": "", 160 | "type": "dict", 161 | "list": false 162 | }, 163 | "model_name": { 164 | "required": false, 165 | "placeholder": "", 166 | "show": true, 167 | "multiline": false, 168 | "value": "gpt-4-1106-preview", 169 | "password": false, 170 | "options": [ 171 | "gpt-4-1106-preview", 172 | "gpt-4", 173 | "gpt-4-32k", 174 | "gpt-3.5-turbo", 175 | "gpt-3.5-turbo-16k" 176 | ], 177 | "name": "model_name", 178 | "advanced": false, 179 | "dynamic": false, 180 | "info": "", 181 | "type": "str", 182 | "list": true 183 | }, 184 | "n": { 185 | "required": false, 186 | "placeholder": "", 187 | "show": false, 188 | "multiline": false, 189 | "value": 1, 190 | "password": false, 191 | "name": "n", 192 | "advanced": false, 193 | "dynamic": false, 194 | "info": "", 195 | "type": "int", 196 | "list": false 197 | }, 198 | "openai_api_base": { 199 | "required": false, 200 | "placeholder": "", 201 | "show": true, 202 | "multiline": false, 203 | "password": false, 204 | "name": "openai_api_base", 205 | "display_name": "OpenAI API Base", 206 | "advanced": false, 207 | "dynamic": false, 208 | "info": "\nThe base URL of the OpenAI API. Defaults to https://api.openai.com/v1.\n\nYou can change this to use other APIs like JinaChat, LocalAI and Prem.\n", 209 | "type": "str", 210 | "list": false, 211 | "value": "" 212 | }, 213 | "openai_api_key": { 214 | "required": false, 215 | "placeholder": "", 216 | "show": true, 217 | "multiline": false, 218 | "value": "", 219 | "password": true, 220 | "name": "openai_api_key", 221 | "display_name": "OpenAI API Key", 222 | "advanced": false, 223 | "dynamic": false, 224 | "info": "", 225 | "type": "str", 226 | "list": false 227 | }, 228 | "openai_organization": { 229 | "required": false, 230 | "placeholder": "", 231 | "show": false, 232 | "multiline": false, 233 | "password": false, 234 | "name": "openai_organization", 235 | "display_name": "OpenAI Organization", 236 | "advanced": false, 237 | "dynamic": false, 238 | "info": "", 239 | "type": "str", 240 | "list": false 241 | }, 242 | "openai_proxy": { 243 | "required": false, 244 | "placeholder": "", 245 | "show": false, 246 | "multiline": false, 247 | "password": false, 248 | "name": "openai_proxy", 249 | "display_name": "OpenAI Proxy", 250 | "advanced": false, 251 | "dynamic": false, 252 | "info": "", 253 | "type": "str", 254 | "list": false 255 | }, 256 | "request_timeout": { 257 | "required": false, 258 | "placeholder": "", 259 | "show": false, 260 | "multiline": false, 261 | "password": false, 262 | "name": "request_timeout", 263 | "advanced": false, 264 | "dynamic": false, 265 | "info": "", 266 | "type": "float", 267 | "list": false 268 | }, 269 | "streaming": { 270 | "required": false, 271 | "placeholder": "", 272 | "show": false, 273 | "multiline": false, 274 | "value": false, 275 | "password": false, 276 | "name": "streaming", 277 | "advanced": false, 278 | "dynamic": false, 279 | "info": "", 280 | "type": "bool", 281 | "list": false 282 | }, 283 | "tags": { 284 | "required": false, 285 | "placeholder": "", 286 | "show": false, 287 | "multiline": false, 288 | "password": false, 289 | "name": "tags", 290 | "advanced": false, 291 | "dynamic": false, 292 | "info": "", 293 | "type": "str", 294 | "list": true 295 | }, 296 | "temperature": { 297 | "required": false, 298 | "placeholder": "", 299 | "show": true, 300 | "multiline": false, 301 | "value": 0.7, 302 | "password": false, 303 | "name": "temperature", 304 | "advanced": false, 305 | "dynamic": false, 306 | "info": "", 307 | "type": "float", 308 | "list": false 309 | }, 310 | "tiktoken_model_name": { 311 | "required": false, 312 | "placeholder": "", 313 | "show": false, 314 | "multiline": false, 315 | "password": false, 316 | "name": "tiktoken_model_name", 317 | "advanced": false, 318 | "dynamic": false, 319 | "info": "", 320 | "type": "str", 321 | "list": false 322 | }, 323 | "verbose": { 324 | "required": false, 325 | "placeholder": "", 326 | "show": false, 327 | "multiline": false, 328 | "value": false, 329 | "password": false, 330 | "name": "verbose", 331 | "advanced": false, 332 | "dynamic": false, 333 | "info": "", 334 | "type": "bool", 335 | "list": false 336 | }, 337 | "_type": "ChatOpenAI" 338 | }, 339 | "description": "`OpenAI` Chat large language models API.", 340 | "base_classes": [ 341 | "ChatOpenAI", 342 | "BaseLanguageModel", 343 | "BaseChatModel", 344 | "BaseLLM" 345 | ], 346 | "display_name": "ChatOpenAI", 347 | "custom_fields": {}, 348 | "output_types": [], 349 | "documentation": "https://docs.aiplanet.com/components/large-language-models#chatopenai", 350 | "beta": false, 351 | "error": null 352 | }, 353 | "id": "ChatOpenAI-LaMPL" 354 | }, 355 | "selected": false, 356 | "positionAbsolute": { 357 | "x": 597.1974640856098, 358 | "y": -74.99878439797845 359 | }, 360 | "dragging": false 361 | }, 362 | { 363 | "width": 384, 364 | "height": 311, 365 | "id": "ConversationChain-zrNbw", 366 | "type": "genericNode", 367 | "position": { 368 | "x": 1241.9895683290458, 369 | "y": 420.29741173043556 370 | }, 371 | "data": { 372 | "type": "ConversationChain", 373 | "node": { 374 | "template": { 375 | "code": { 376 | "dynamic": true, 377 | "required": true, 378 | "placeholder": "", 379 | "show": false, 380 | "multiline": true, 381 | "value": "from genflow import CustomComponent\nfrom langchain.chains import ConversationChain\nfrom typing import Optional, Union, Callable\nfrom genflow.field_typing import BaseLanguageModel, BaseMemory, Chain\n\n\nclass ConversationChainComponent(CustomComponent):\n display_name = \"ConversationChain\"\n description = \"Chain to have a conversation and load context from memory.\"\n documentation: str = \"https://docs.aiplanet.com/components/chains#conversationalretrievalchain\"\n\n def build_config(self):\n return {\n \"prompt\": {\"display_name\": \"Prompt\"},\n \"llm\": {\"display_name\": \"LLM\"},\n \"memory\": {\n \"display_name\": \"Memory\",\n \"info\": \"Memory to load context from. If none is provided, a ConversationBufferMemory will be used.\",\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n llm: BaseLanguageModel,\n memory: Optional[BaseMemory] = None,\n ) -> Chain:\n if memory is None:\n return ConversationChain(llm=llm)\n return ConversationChain(llm=llm, memory=memory)\n", 382 | "password": false, 383 | "name": "code", 384 | "advanced": false, 385 | "type": "code", 386 | "list": false 387 | }, 388 | "_type": "CustomComponent", 389 | "llm": { 390 | "required": true, 391 | "placeholder": "", 392 | "show": true, 393 | "multiline": false, 394 | "password": false, 395 | "name": "llm", 396 | "display_name": "LLM", 397 | "advanced": false, 398 | "dynamic": false, 399 | "info": "", 400 | "type": "BaseLanguageModel", 401 | "list": false 402 | }, 403 | "memory": { 404 | "required": false, 405 | "placeholder": "", 406 | "show": true, 407 | "multiline": false, 408 | "password": false, 409 | "name": "memory", 410 | "display_name": "Memory", 411 | "advanced": false, 412 | "dynamic": false, 413 | "info": "Memory to load context from. If none is provided, a ConversationBufferMemory will be used.", 414 | "type": "BaseMemory", 415 | "list": false 416 | } 417 | }, 418 | "description": "Chain to have a conversation and load context from memory.", 419 | "base_classes": [ 420 | "Chain" 421 | ], 422 | "display_name": "ConversationChain", 423 | "custom_fields": { 424 | "llm": null, 425 | "memory": null 426 | }, 427 | "output_types": [ 428 | "ConversationChain" 429 | ], 430 | "documentation": "https://docs.aiplanet.com/components/chains#conversationalretrievalchain", 431 | "beta": true, 432 | "error": null 433 | }, 434 | "id": "ConversationChain-zrNbw" 435 | }, 436 | "positionAbsolute": { 437 | "x": 1241.9895683290458, 438 | "y": 420.29741173043556 439 | } 440 | }, 441 | { 442 | "width": 384, 443 | "height": 529, 444 | "id": "ConversationBufferMemory-YTdKE", 445 | "type": "genericNode", 446 | "position": { 447 | "x": 609.7708523821489, 448 | "y": 633.1654709389743 449 | }, 450 | "data": { 451 | "type": "ConversationBufferMemory", 452 | "node": { 453 | "template": { 454 | "code": { 455 | "dynamic": true, 456 | "required": true, 457 | "placeholder": "", 458 | "show": false, 459 | "multiline": true, 460 | "value": "from typing import Optional, Union\nfrom langchain.memory.chat_memory import BaseMemory, BaseChatMemory\nfrom langchain.memory.buffer import ConversationBufferMemory\nfrom langchain.memory.chat_message_histories.postgres import PostgresChatMessageHistory\n\nfrom genflow import CustomComponent\n\n\nclass ConversationBufferMemoryComponent(CustomComponent):\n display_name: str = \"ConversationBufferMemory\"\n description: str = \"Buffer for storing conversation memory.\"\n documentation: str = (\n \"https://docs.aiplanet.com/components/memories#conversationbuffermemory\"\n )\n beta = False\n\n def build_config(self):\n return {\n \"input_key\": {\n \"display_name\": \"Input Key\",\n \"required\": False,\n \"value\": \"\",\n \"info\": \"The variable to be used as Chat Input when more than one variable is available.\",\n },\n \"memory_key\": {\n \"display_name\": \"Memory Key\",\n \"required\": False,\n \"value\": \"history\",\n },\n \"output_key\": {\n \"display_name\": \"Output Key\",\n \"required\": False,\n \"value\": \"\",\n \"info\": \"The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)\",\n },\n \"return_messages\": {\n \"display_name\": \"Return Messages\",\n \"field_type\": \"bool\",\n \"required\": False,\n \"value\": True,\n },\n \"session_id\": {\n \"display_name\": \"Session ID\",\n \"required\": False,\n \"advanced\": True,\n \"value\": \"genflow_memory_db\",\n },\n \"connection_string\": {\n \"display_name\": \"Connection String\",\n \"required\": False,\n \"advanced\": True,\n \"value\": \"\",\n },\n \"code\": {\"show\": False},\n }\n\n def build(\n self,\n input_key: Optional[str] = \"\",\n memory_key: Optional[str] = \"history\",\n output_key: Optional[str] = \"\",\n return_messages: Optional[bool] = True,\n session_id: Optional[str] = \"genflow_memory_db\",\n connection_string: Optional[str] = \"\",\n ) -> Union[BaseMemory, BaseChatMemory]:\n chat_memory = PostgresChatMessageHistory(\n session_id=session_id, connection_string=connection_string\n )\n\n keys = {}\n\n if input_key and input_key != \"\":\n keys[\"input_key\"] = input_key\n\n if output_key and output_key != \"\":\n keys[\"output_key\"] = output_key\n\n return ConversationBufferMemory(\n chat_memory=chat_memory,\n memory_key=memory_key,\n return_messages=return_messages,\n **keys\n )\n", 461 | "password": false, 462 | "name": "code", 463 | "advanced": false, 464 | "type": "code", 465 | "list": false 466 | }, 467 | "_type": "CustomComponent", 468 | "connection_string": { 469 | "required": false, 470 | "placeholder": "", 471 | "show": true, 472 | "multiline": false, 473 | "value": "", 474 | "password": false, 475 | "name": "connection_string", 476 | "display_name": "Connection String", 477 | "advanced": true, 478 | "dynamic": false, 479 | "info": "", 480 | "type": "str", 481 | "list": false 482 | }, 483 | "input_key": { 484 | "required": false, 485 | "placeholder": "", 486 | "show": true, 487 | "multiline": false, 488 | "value": "input", 489 | "password": false, 490 | "name": "input_key", 491 | "display_name": "Input Key", 492 | "advanced": false, 493 | "dynamic": false, 494 | "info": "The variable to be used as Chat Input when more than one variable is available.", 495 | "type": "str", 496 | "list": false 497 | }, 498 | "memory_key": { 499 | "required": false, 500 | "placeholder": "", 501 | "show": true, 502 | "multiline": false, 503 | "value": "history", 504 | "password": false, 505 | "name": "memory_key", 506 | "display_name": "Memory Key", 507 | "advanced": false, 508 | "dynamic": false, 509 | "info": "", 510 | "type": "str", 511 | "list": false 512 | }, 513 | "output_key": { 514 | "required": false, 515 | "placeholder": "", 516 | "show": true, 517 | "multiline": false, 518 | "value": "", 519 | "password": false, 520 | "name": "output_key", 521 | "display_name": "Output Key", 522 | "advanced": false, 523 | "dynamic": false, 524 | "info": "The variable to be used as Chat Output (e.g. answer in a ConversationalRetrievalChain)", 525 | "type": "str", 526 | "list": false 527 | }, 528 | "return_messages": { 529 | "required": false, 530 | "placeholder": "", 531 | "show": true, 532 | "multiline": false, 533 | "value": true, 534 | "password": false, 535 | "name": "return_messages", 536 | "display_name": "Return Messages", 537 | "advanced": false, 538 | "dynamic": false, 539 | "info": "", 540 | "type": "bool", 541 | "list": false 542 | }, 543 | "session_id": { 544 | "required": false, 545 | "placeholder": "", 546 | "show": true, 547 | "multiline": false, 548 | "value": "genflow_memory_db", 549 | "password": false, 550 | "name": "session_id", 551 | "display_name": "Session ID", 552 | "advanced": true, 553 | "dynamic": false, 554 | "info": "", 555 | "type": "str", 556 | "list": false 557 | } 558 | }, 559 | "description": "Buffer for storing conversation memory.", 560 | "base_classes": [ 561 | "BaseMemory", 562 | "BaseChatMemory", 563 | "BaseMemory" 564 | ], 565 | "display_name": "ConversationBufferMemory", 566 | "custom_fields": { 567 | "connection_string": null, 568 | "input_key": null, 569 | "memory_key": null, 570 | "output_key": null, 571 | "return_messages": null, 572 | "session_id": null 573 | }, 574 | "output_types": [ 575 | "ConversationBufferMemory" 576 | ], 577 | "documentation": "https://docs.aiplanet.com/components/memories#conversationbuffermemory", 578 | "beta": false, 579 | "error": null 580 | }, 581 | "id": "ConversationBufferMemory-YTdKE" 582 | }, 583 | "positionAbsolute": { 584 | "x": 609.7708523821489, 585 | "y": 633.1654709389743 586 | } 587 | } 588 | ], 589 | "edges": [ 590 | { 591 | "source": "ChatOpenAI-LaMPL", 592 | "sourceHandle": "{œbaseClassesœ:[œChatOpenAIœ,œBaseLanguageModelœ,œBaseChatModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-LaMPLœ}", 593 | "target": "ConversationChain-zrNbw", 594 | "targetHandle": "{œfieldNameœ:œllmœ,œidœ:œConversationChain-zrNbwœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}", 595 | "data": { 596 | "targetHandle": { 597 | "fieldName": "llm", 598 | "id": "ConversationChain-zrNbw", 599 | "inputTypes": null, 600 | "type": "BaseLanguageModel" 601 | }, 602 | "sourceHandle": { 603 | "baseClasses": [ 604 | "ChatOpenAI", 605 | "BaseLanguageModel", 606 | "BaseChatModel", 607 | "BaseLLM" 608 | ], 609 | "dataType": "ChatOpenAI", 610 | "id": "ChatOpenAI-LaMPL" 611 | } 612 | }, 613 | "style": { 614 | "stroke": "#555" 615 | }, 616 | "className": "stroke-gray-900 stroke-connection", 617 | "animated": false, 618 | "id": "reactflow__edge-ChatOpenAI-LaMPL{œbaseClassesœ:[œChatOpenAIœ,œBaseLanguageModelœ,œBaseChatModelœ,œBaseLLMœ],œdataTypeœ:œChatOpenAIœ,œidœ:œChatOpenAI-LaMPLœ}-ConversationChain-zrNbw{œfieldNameœ:œllmœ,œidœ:œConversationChain-zrNbwœ,œinputTypesœ:null,œtypeœ:œBaseLanguageModelœ}" 619 | }, 620 | { 621 | "source": "ConversationBufferMemory-YTdKE", 622 | "sourceHandle": "{œbaseClassesœ:[œBaseMemoryœ,œBaseChatMemoryœ,œBaseMemoryœ],œdataTypeœ:œConversationBufferMemoryœ,œidœ:œConversationBufferMemory-YTdKEœ}", 623 | "target": "ConversationChain-zrNbw", 624 | "targetHandle": "{œfieldNameœ:œmemoryœ,œidœ:œConversationChain-zrNbwœ,œinputTypesœ:null,œtypeœ:œBaseMemoryœ}", 625 | "data": { 626 | "targetHandle": { 627 | "fieldName": "memory", 628 | "id": "ConversationChain-zrNbw", 629 | "inputTypes": null, 630 | "type": "BaseMemory" 631 | }, 632 | "sourceHandle": { 633 | "baseClasses": [ 634 | "BaseMemory", 635 | "BaseChatMemory", 636 | "BaseMemory" 637 | ], 638 | "dataType": "ConversationBufferMemory", 639 | "id": "ConversationBufferMemory-YTdKE" 640 | } 641 | }, 642 | "style": { 643 | "stroke": "#555" 644 | }, 645 | "className": "stroke-foreground stroke-connection", 646 | "animated": false, 647 | "id": "reactflow__edge-ConversationBufferMemory-YTdKE{œbaseClassesœ:[œBaseMemoryœ,œBaseChatMemoryœ,œBaseMemoryœ],œdataTypeœ:œConversationBufferMemoryœ,œidœ:œConversationBufferMemory-YTdKEœ}-ConversationChain-zrNbw{œfieldNameœ:œmemoryœ,œidœ:œConversationChain-zrNbwœ,œinputTypesœ:null,œtypeœ:œBaseMemoryœ}" 648 | } 649 | ], 650 | "viewport": { 651 | "x": 48.959789688735555, 652 | "y": 78.53972697997804, 653 | "zoom": 0.5232132842442248 654 | } 655 | }, 656 | "description": "Simple Chat with OpenAI and a Memory component on GenAI Stack.", 657 | "name": "Simple Chat", 658 | "flow_type": "chat" 659 | } -------------------------------------------------------------------------------- /multimodal_image.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "dbd35c95-7581-499b-a03b-9ae760d4d221", 3 | "data": { 4 | "nodes": [ 5 | { 6 | "width": 384, 7 | "height": 513, 8 | "id": "TextGenerationOutput-k7U9e", 9 | "type": "genericNode", 10 | "position": { 11 | "x": 598.2061021756518, 12 | "y": 114.53046984864216 13 | }, 14 | "data": { 15 | "type": "TextGenerationOutput", 16 | "node": { 17 | "template": { 18 | "chain": { 19 | "required": false, 20 | "placeholder": "", 21 | "show": true, 22 | "multiline": false, 23 | "password": false, 24 | "name": "chain", 25 | "display_name": "Chain", 26 | "advanced": false, 27 | "dynamic": false, 28 | "info": "", 29 | "type": "Chain", 30 | "list": false 31 | }, 32 | "documents": { 33 | "required": false, 34 | "placeholder": "", 35 | "show": true, 36 | "multiline": false, 37 | "password": false, 38 | "name": "documents", 39 | "display_name": "Documents", 40 | "advanced": false, 41 | "dynamic": false, 42 | "info": "", 43 | "type": "Document", 44 | "list": true 45 | }, 46 | "multimodal": { 47 | "required": false, 48 | "placeholder": "", 49 | "show": true, 50 | "multiline": false, 51 | "password": false, 52 | "name": "multimodal", 53 | "display_name": "Multi Modal", 54 | "advanced": false, 55 | "dynamic": false, 56 | "info": "", 57 | "type": "MultiModal", 58 | "list": false 59 | }, 60 | "query": { 61 | "required": false, 62 | "placeholder": "", 63 | "show": true, 64 | "multiline": false, 65 | "password": false, 66 | "name": "query", 67 | "display_name": "Query", 68 | "advanced": false, 69 | "input_types": [ 70 | "Input" 71 | ], 72 | "dynamic": false, 73 | "info": "Provide a Query to invoke a chain if stack doesn't contain prompt template and memory.", 74 | "type": "str", 75 | "list": false 76 | }, 77 | "_type": "TextGenerationOutput" 78 | }, 79 | "description": "TextGenerationOutput component is used to view and download the generated output. Connect a component that returns Documents or connect a MultiModal component or connect a Chain Component and provide the Query to invoke a chain.", 80 | "base_classes": [ 81 | "TextGenerationOutput" 82 | ], 83 | "display_name": "TextGenerationOutput", 84 | "custom_fields": {}, 85 | "output_types": [ 86 | "Output" 87 | ], 88 | "documentation": "https://docs.aiplanet.com/components/outputs", 89 | "beta": false, 90 | "error": null 91 | }, 92 | "id": "TextGenerationOutput-k7U9e" 93 | }, 94 | "positionAbsolute": { 95 | "x": 598.2061021756518, 96 | "y": 114.53046984864216 97 | }, 98 | "selected": false, 99 | "dragging": false 100 | }, 101 | { 102 | "width": 384, 103 | "height": 419, 104 | "id": "OpenAITextToImage-9rHCV", 105 | "type": "genericNode", 106 | "position": { 107 | "x": -51.57221614853313, 108 | "y": 218.86653186498876 109 | }, 110 | "data": { 111 | "type": "OpenAITextToImage", 112 | "node": { 113 | "template": { 114 | "code": { 115 | "dynamic": true, 116 | "required": true, 117 | "placeholder": "", 118 | "show": false, 119 | "multiline": true, 120 | "value": "from genflow import CustomComponent\nfrom langchain.schema import BasePromptTemplate\nfrom openai import OpenAI\nfrom genflow.base.multimodal import MultiModal, SourceType, MediaType, MediaFormat\n\n\nclass OpenAITextToImage(CustomComponent):\n display_name: str = \"OpenAITextToImage\"\n description: str = \"Convert your text into Image using Dall-E-3\"\n\n QUALITY = [\"standard\", \"hd\"]\n\n def build_config(self):\n return {\n \"api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"password\": True,\n \"required\": True,\n },\n \"quality\": {\n \"display_name\": \"Quality of Image\",\n \"value\": \"standard\",\n \"options\": self.QUALITY,\n },\n \"prompt\": {\"display_name\": \"Prompt\", \"required\": True},\n \"code\": {\"show\": False},\n }\n\n def build(\n self, api_key: str, quality: str, prompt: BasePromptTemplate\n ) -> MultiModal:\n client = OpenAI(api_key=api_key)\n\n template_text: str = prompt.template\n\n response = client.images.generate(\n model=\"dall-e-3\", prompt=template_text, size=\"1024x1024\", quality=quality, n=1, response_format=\"b64_json\"\n )\n\n return MultiModal(\n source=response.data[0].b64_json,\n source_type=SourceType.BASE64,\n media_type=MediaType.IMAGE,\n media_format=MediaFormat.PNG,\n prompt=template_text,\n )\n", 121 | "password": false, 122 | "name": "code", 123 | "advanced": false, 124 | "type": "code", 125 | "list": false 126 | }, 127 | "_type": "CustomComponent", 128 | "api_key": { 129 | "required": true, 130 | "placeholder": "", 131 | "show": true, 132 | "multiline": false, 133 | "password": true, 134 | "name": "api_key", 135 | "display_name": "OpenAI API Key", 136 | "advanced": false, 137 | "dynamic": false, 138 | "info": "", 139 | "type": "str", 140 | "list": false, 141 | "value": "" 142 | }, 143 | "prompt": { 144 | "required": true, 145 | "placeholder": "", 146 | "show": true, 147 | "multiline": false, 148 | "password": false, 149 | "name": "prompt", 150 | "display_name": "Prompt", 151 | "advanced": false, 152 | "dynamic": false, 153 | "info": "", 154 | "type": "BasePromptTemplate", 155 | "list": false 156 | }, 157 | "quality": { 158 | "required": true, 159 | "placeholder": "", 160 | "show": true, 161 | "multiline": false, 162 | "value": "hd", 163 | "password": false, 164 | "options": [ 165 | "standard", 166 | "hd" 167 | ], 168 | "name": "quality", 169 | "display_name": "Quality of Image", 170 | "advanced": false, 171 | "dynamic": false, 172 | "info": "", 173 | "type": "str", 174 | "list": true 175 | } 176 | }, 177 | "description": "Convert your text into Image using Dall-E-3", 178 | "base_classes": [ 179 | "MultiModal" 180 | ], 181 | "display_name": "OpenAITextToImage", 182 | "custom_fields": { 183 | "api_key": null, 184 | "prompt": null, 185 | "quality": null 186 | }, 187 | "output_types": [ 188 | "OpenAITextToImage" 189 | ], 190 | "documentation": "", 191 | "beta": true, 192 | "error": null 193 | }, 194 | "id": "OpenAITextToImage-9rHCV" 195 | }, 196 | "selected": false, 197 | "positionAbsolute": { 198 | "x": -51.57221614853313, 199 | "y": 218.86653186498876 200 | }, 201 | "dragging": false 202 | }, 203 | { 204 | "width": 384, 205 | "height": 375, 206 | "id": "PromptTemplate-yR8h2", 207 | "type": "genericNode", 208 | "position": { 209 | "x": -578.1124989676225, 210 | "y": 250.5456495773469 211 | }, 212 | "data": { 213 | "type": "PromptTemplate", 214 | "node": { 215 | "template": { 216 | "output_parser": { 217 | "required": false, 218 | "placeholder": "", 219 | "show": false, 220 | "multiline": false, 221 | "password": false, 222 | "name": "output_parser", 223 | "advanced": false, 224 | "dynamic": true, 225 | "info": "", 226 | "type": "BaseOutputParser", 227 | "list": false 228 | }, 229 | "input_types": { 230 | "required": false, 231 | "placeholder": "", 232 | "show": false, 233 | "multiline": false, 234 | "password": false, 235 | "name": "input_types", 236 | "advanced": false, 237 | "dynamic": true, 238 | "info": "", 239 | "type": "dict", 240 | "list": false 241 | }, 242 | "input_variables": { 243 | "required": true, 244 | "placeholder": "", 245 | "show": false, 246 | "multiline": false, 247 | "password": false, 248 | "name": "input_variables", 249 | "advanced": false, 250 | "dynamic": true, 251 | "info": "", 252 | "type": "str", 253 | "list": true, 254 | "value": [ 255 | "animal" 256 | ] 257 | }, 258 | "partial_variables": { 259 | "required": false, 260 | "placeholder": "", 261 | "show": false, 262 | "multiline": false, 263 | "password": false, 264 | "name": "partial_variables", 265 | "advanced": false, 266 | "dynamic": true, 267 | "info": "", 268 | "type": "dict", 269 | "list": false 270 | }, 271 | "template": { 272 | "required": true, 273 | "placeholder": "", 274 | "show": true, 275 | "multiline": true, 276 | "password": false, 277 | "name": "template", 278 | "advanced": false, 279 | "dynamic": true, 280 | "info": "", 281 | "type": "prompt", 282 | "list": false, 283 | "value": "Generate the image of an {animal} as captain America." 284 | }, 285 | "template_format": { 286 | "required": false, 287 | "placeholder": "", 288 | "show": false, 289 | "multiline": false, 290 | "value": "f-string", 291 | "password": false, 292 | "name": "template_format", 293 | "advanced": false, 294 | "dynamic": true, 295 | "info": "", 296 | "type": "str", 297 | "list": false 298 | }, 299 | "validate_template": { 300 | "required": false, 301 | "placeholder": "", 302 | "show": false, 303 | "multiline": false, 304 | "value": false, 305 | "password": false, 306 | "name": "validate_template", 307 | "advanced": false, 308 | "dynamic": true, 309 | "info": "", 310 | "type": "bool", 311 | "list": false 312 | }, 313 | "_type": "PromptTemplate", 314 | "animal": { 315 | "required": false, 316 | "placeholder": "", 317 | "show": true, 318 | "multiline": true, 319 | "value": "", 320 | "password": false, 321 | "name": "animal", 322 | "display_name": "animal", 323 | "advanced": false, 324 | "input_types": [ 325 | "Document", 326 | "BaseOutputParser", 327 | "Input" 328 | ], 329 | "dynamic": false, 330 | "info": "", 331 | "type": "str", 332 | "list": false 333 | } 334 | }, 335 | "description": "A prompt template for a language model.", 336 | "base_classes": [ 337 | "PromptTemplate", 338 | "StringPromptTemplate", 339 | "BasePromptTemplate" 340 | ], 341 | "name": "", 342 | "display_name": "PromptTemplate", 343 | "documentation": "https://docs.aiplanet.com/components/prompts#prompt-template", 344 | "custom_fields": { 345 | "": [ 346 | "animal" 347 | ], 348 | "template": [ 349 | "animal" 350 | ] 351 | }, 352 | "output_types": [], 353 | "field_formatters": {}, 354 | "beta": false, 355 | "error": null 356 | }, 357 | "id": "PromptTemplate-yR8h2" 358 | }, 359 | "selected": false, 360 | "positionAbsolute": { 361 | "x": -578.1124989676225, 362 | "y": 250.5456495773469 363 | }, 364 | "dragging": false 365 | }, 366 | { 367 | "width": 384, 368 | "height": 457, 369 | "id": "Input-Xdqi8", 370 | "type": "genericNode", 371 | "position": { 372 | "x": -1071.3063420894928, 373 | "y": 128.8311446824973 374 | }, 375 | "data": { 376 | "type": "Input", 377 | "node": { 378 | "template": { 379 | "input_value": { 380 | "required": false, 381 | "placeholder": "", 382 | "show": true, 383 | "multiline": true, 384 | "value": "racoon", 385 | "password": false, 386 | "name": "input_value", 387 | "display_name": "Input Value", 388 | "advanced": false, 389 | "dynamic": false, 390 | "info": "", 391 | "type": "str", 392 | "list": false, 393 | "file_path": null 394 | }, 395 | "input_key": { 396 | "required": true, 397 | "placeholder": "", 398 | "show": true, 399 | "multiline": false, 400 | "value": "animal", 401 | "password": false, 402 | "name": "input_key", 403 | "display_name": "Input Key", 404 | "advanced": false, 405 | "dynamic": false, 406 | "info": "", 407 | "type": "str", 408 | "list": false 409 | }, 410 | "input_type": { 411 | "required": true, 412 | "placeholder": "", 413 | "show": true, 414 | "multiline": false, 415 | "value": "Text", 416 | "password": false, 417 | "options": [ 418 | "File", 419 | "Url", 420 | "Text" 421 | ], 422 | "name": "input_type", 423 | "display_name": "Input Type", 424 | "advanced": false, 425 | "dynamic": false, 426 | "info": "", 427 | "type": "str", 428 | "list": false 429 | }, 430 | "_type": "Input" 431 | }, 432 | "description": "Input is used to specify the type of input.", 433 | "base_classes": [ 434 | "Input" 435 | ], 436 | "display_name": "Input", 437 | "custom_fields": {}, 438 | "output_types": [ 439 | "Input" 440 | ], 441 | "documentation": "https://docs.aiplanet.com/components/inputs", 442 | "beta": false, 443 | "error": null 444 | }, 445 | "id": "Input-Xdqi8" 446 | }, 447 | "selected": false, 448 | "positionAbsolute": { 449 | "x": -1071.3063420894928, 450 | "y": 128.8311446824973 451 | }, 452 | "dragging": false 453 | } 454 | ], 455 | "edges": [ 456 | { 457 | "source": "OpenAITextToImage-9rHCV", 458 | "sourceHandle": "{œbaseClassesœ:[œMultiModalœ],œdataTypeœ:œOpenAITextToImageœ,œidœ:œOpenAITextToImage-9rHCVœ}", 459 | "target": "TextGenerationOutput-k7U9e", 460 | "targetHandle": "{œfieldNameœ:œmultimodalœ,œidœ:œTextGenerationOutput-k7U9eœ,œinputTypesœ:null,œtypeœ:œMultiModalœ}", 461 | "data": { 462 | "targetHandle": { 463 | "fieldName": "multimodal", 464 | "id": "TextGenerationOutput-k7U9e", 465 | "inputTypes": null, 466 | "type": "MultiModal" 467 | }, 468 | "sourceHandle": { 469 | "baseClasses": [ 470 | "MultiModal" 471 | ], 472 | "dataType": "OpenAITextToImage", 473 | "id": "OpenAITextToImage-9rHCV" 474 | } 475 | }, 476 | "style": { 477 | "stroke": "#555" 478 | }, 479 | "className": "stroke-foreground stroke-connection", 480 | "animated": false, 481 | "id": "reactflow__edge-OpenAITextToImage-9rHCV{œbaseClassesœ:[œMultiModalœ],œdataTypeœ:œOpenAITextToImageœ,œidœ:œOpenAITextToImage-9rHCVœ}-TextGenerationOutput-k7U9e{œfieldNameœ:œmultimodalœ,œidœ:œTextGenerationOutput-k7U9eœ,œinputTypesœ:null,œtypeœ:œMultiModalœ}" 482 | }, 483 | { 484 | "source": "PromptTemplate-yR8h2", 485 | "sourceHandle": "{œbaseClassesœ:[œPromptTemplateœ,œStringPromptTemplateœ,œBasePromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-yR8h2œ}", 486 | "target": "OpenAITextToImage-9rHCV", 487 | "targetHandle": "{œfieldNameœ:œpromptœ,œidœ:œOpenAITextToImage-9rHCVœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}", 488 | "data": { 489 | "targetHandle": { 490 | "fieldName": "prompt", 491 | "id": "OpenAITextToImage-9rHCV", 492 | "inputTypes": null, 493 | "type": "BasePromptTemplate" 494 | }, 495 | "sourceHandle": { 496 | "baseClasses": [ 497 | "PromptTemplate", 498 | "StringPromptTemplate", 499 | "BasePromptTemplate" 500 | ], 501 | "dataType": "PromptTemplate", 502 | "id": "PromptTemplate-yR8h2" 503 | } 504 | }, 505 | "style": { 506 | "stroke": "#555" 507 | }, 508 | "className": "stroke-foreground stroke-connection", 509 | "animated": false, 510 | "id": "reactflow__edge-PromptTemplate-yR8h2{œbaseClassesœ:[œPromptTemplateœ,œStringPromptTemplateœ,œBasePromptTemplateœ],œdataTypeœ:œPromptTemplateœ,œidœ:œPromptTemplate-yR8h2œ}-OpenAITextToImage-9rHCV{œfieldNameœ:œpromptœ,œidœ:œOpenAITextToImage-9rHCVœ,œinputTypesœ:null,œtypeœ:œBasePromptTemplateœ}" 511 | }, 512 | { 513 | "source": "Input-Xdqi8", 514 | "sourceHandle": "{œbaseClassesœ:[œInputœ],œdataTypeœ:œInputœ,œidœ:œInput-Xdqi8œ}", 515 | "target": "PromptTemplate-yR8h2", 516 | "targetHandle": "{œfieldNameœ:œanimalœ,œidœ:œPromptTemplate-yR8h2œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œInputœ],œtypeœ:œstrœ}", 517 | "data": { 518 | "targetHandle": { 519 | "fieldName": "animal", 520 | "id": "PromptTemplate-yR8h2", 521 | "inputTypes": [ 522 | "Document", 523 | "BaseOutputParser", 524 | "Input" 525 | ], 526 | "type": "str" 527 | }, 528 | "sourceHandle": { 529 | "baseClasses": [ 530 | "Input" 531 | ], 532 | "dataType": "Input", 533 | "id": "Input-Xdqi8" 534 | } 535 | }, 536 | "style": { 537 | "stroke": "#555" 538 | }, 539 | "className": "stroke-foreground stroke-connection", 540 | "animated": false, 541 | "id": "reactflow__edge-Input-Xdqi8{œbaseClassesœ:[œInputœ],œdataTypeœ:œInputœ,œidœ:œInput-Xdqi8œ}-PromptTemplate-yR8h2{œfieldNameœ:œanimalœ,œidœ:œPromptTemplate-yR8h2œ,œinputTypesœ:[œDocumentœ,œBaseOutputParserœ,œInputœ],œtypeœ:œstrœ}" 542 | } 543 | ], 544 | "viewport": { 545 | "x": 627.5311678348885, 546 | "y": 54.55200688640784, 547 | "zoom": 0.5997641781730569 548 | } 549 | }, 550 | "description": "Stack that generates an image of an animal in a Captain America suit with a simple text prompt and the dall-e-3 model from OpenAI.", 551 | "name": "multimodal_image", 552 | "flow_type": "text_generation" 553 | } -------------------------------------------------------------------------------- /multimodal_speech.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "3d2b943f-40bf-4440-8cb9-ab5ffdcb1f37", 3 | "data": { 4 | "nodes": [ 5 | { 6 | "width": 384, 7 | "height": 493, 8 | "id": "TextGenerationOutput-te6yr", 9 | "type": "genericNode", 10 | "position": { 11 | "x": 1062.2121145634685, 12 | "y": 945.8848653186284 13 | }, 14 | "data": { 15 | "type": "TextGenerationOutput", 16 | "node": { 17 | "template": { 18 | "chain": { 19 | "required": false, 20 | "placeholder": "", 21 | "show": true, 22 | "multiline": false, 23 | "password": false, 24 | "name": "chain", 25 | "display_name": "Chain", 26 | "advanced": false, 27 | "dynamic": false, 28 | "info": "", 29 | "type": "Chain", 30 | "list": false 31 | }, 32 | "documents": { 33 | "required": false, 34 | "placeholder": "", 35 | "show": true, 36 | "multiline": false, 37 | "password": false, 38 | "name": "documents", 39 | "display_name": "Documents", 40 | "advanced": false, 41 | "dynamic": false, 42 | "info": "", 43 | "type": "Document", 44 | "list": true 45 | }, 46 | "multimodal": { 47 | "required": false, 48 | "placeholder": "", 49 | "show": true, 50 | "multiline": false, 51 | "password": false, 52 | "name": "multimodal", 53 | "display_name": "Multi Modal", 54 | "advanced": false, 55 | "dynamic": false, 56 | "info": "", 57 | "type": "MultiModal", 58 | "list": false 59 | }, 60 | "query": { 61 | "required": false, 62 | "placeholder": "", 63 | "show": true, 64 | "multiline": false, 65 | "password": false, 66 | "name": "query", 67 | "display_name": "Query", 68 | "advanced": false, 69 | "input_types": [ 70 | "Input" 71 | ], 72 | "dynamic": false, 73 | "info": "Provide a Query to invoke a chain if stack doesn't contain prompt template and memory.", 74 | "type": "str", 75 | "list": false, 76 | "value": "" 77 | }, 78 | "_type": "TextGenerationOutput" 79 | }, 80 | "description": "Text Generation Output is used to download the Documents, Connect a component that returns Documents or connect a Chain Component and provide the Query to invoke a chain.", 81 | "base_classes": [ 82 | "TextGenerationOutput" 83 | ], 84 | "display_name": "TextGenerationOutput", 85 | "custom_fields": {}, 86 | "output_types": [ 87 | "Output" 88 | ], 89 | "documentation": "https://docs.aiplanet.com/components/outputs", 90 | "beta": false, 91 | "error": null 92 | }, 93 | "id": "TextGenerationOutput-te6yr" 94 | }, 95 | "positionAbsolute": { 96 | "x": 1062.2121145634685, 97 | "y": 945.8848653186284 98 | } 99 | }, 100 | { 101 | "width": 384, 102 | "height": 465, 103 | "id": "OpenAITextToSpeech-uyCJz", 104 | "type": "genericNode", 105 | "position": { 106 | "x": 439.8101166484677, 107 | "y": 1067.81775960124 108 | }, 109 | "data": { 110 | "type": "OpenAITextToSpeech", 111 | "node": { 112 | "template": { 113 | "code": { 114 | "dynamic": true, 115 | "required": true, 116 | "placeholder": "", 117 | "show": false, 118 | "multiline": true, 119 | "value": "from genflow import CustomComponent\nfrom genflow.base.multimodal.utils import MediaType, MultiModal, SourceType, MediaFormat\nfrom openai import OpenAI\nimport base64\n\n\nclass OpenAITextToSpeech(CustomComponent):\n display_name: str = \"OpenAITextToSpeech\"\n description: str = \"Convert your text into Speech\"\n\n VOICES = [\"alloy\", \"echo\", \"fable\", \"onyx\", \"nova\", \"shimmer\"]\n\n def build_config(self):\n return {\n \"api_key\": {\n \"display_name\": \"OpenAI API Key\",\n \"field_type\": \"str\",\n \"password\": True,\n \"required\": True,\n },\n \"voice\": {\n \"display_name\": \"Choose the Voice\",\n \"value\": \"alloy\",\n \"options\": self.VOICES,\n \"required\": True,\n },\n \"text_input\": {\n \"display_name\": \"Text Input\",\n \"field_type\": \"str\",\n \"value\": \"\",\n \"input_types\": [\"Input\"],\n \"required\": True,\n },\n \"code\": {\"show\": False},\n }\n\n def build(self, api_key: str, voice: str, text_input: str) -> MultiModal:\n client = OpenAI(api_key=api_key)\n response = client.audio.speech.create(\n model=\"tts-1\", voice=voice, input=text_input\n )\n\n encoded_data = base64.b64encode(response.content)\n encoded_string = encoded_data.decode(\"utf-8\")\n\n return MultiModal(\n source=encoded_string,\n source_type=SourceType.BASE64,\n media_type=MediaType.AUDIO,\n media_format=MediaFormat.MP3,\n prompt=text_input,\n )\n", 120 | "password": false, 121 | "name": "code", 122 | "advanced": false, 123 | "type": "code", 124 | "list": false 125 | }, 126 | "_type": "CustomComponent", 127 | "api_key": { 128 | "required": true, 129 | "placeholder": "", 130 | "show": true, 131 | "multiline": false, 132 | "password": true, 133 | "name": "api_key", 134 | "display_name": "OpenAI API Key", 135 | "advanced": false, 136 | "dynamic": false, 137 | "info": "", 138 | "type": "str", 139 | "list": false, 140 | "value": "" 141 | }, 142 | "text_input": { 143 | "required": true, 144 | "placeholder": "", 145 | "show": true, 146 | "multiline": false, 147 | "value": "", 148 | "password": false, 149 | "name": "text_input", 150 | "display_name": "Text Input", 151 | "advanced": false, 152 | "input_types": [ 153 | "Input" 154 | ], 155 | "dynamic": false, 156 | "info": "", 157 | "type": "str", 158 | "list": false 159 | }, 160 | "voice": { 161 | "required": true, 162 | "placeholder": "", 163 | "show": true, 164 | "multiline": false, 165 | "value": "alloy", 166 | "password": false, 167 | "options": [ 168 | "alloy", 169 | "echo", 170 | "fable", 171 | "onyx", 172 | "nova", 173 | "shimmer" 174 | ], 175 | "name": "voice", 176 | "display_name": "Choose the Voice", 177 | "advanced": false, 178 | "dynamic": false, 179 | "info": "", 180 | "type": "str", 181 | "list": true 182 | } 183 | }, 184 | "description": "Convert your text into Speech", 185 | "base_classes": [ 186 | "MultiModal" 187 | ], 188 | "display_name": "OpenAITextToSpeech", 189 | "custom_fields": { 190 | "api_key": null, 191 | "text_input": null, 192 | "voice": null 193 | }, 194 | "output_types": [ 195 | "OpenAITextToSpeech" 196 | ], 197 | "documentation": "", 198 | "beta": true, 199 | "error": null 200 | }, 201 | "id": "OpenAITextToSpeech-uyCJz" 202 | }, 203 | "selected": false, 204 | "dragging": false, 205 | "positionAbsolute": { 206 | "x": 439.8101166484677, 207 | "y": 1067.81775960124 208 | } 209 | }, 210 | { 211 | "width": 384, 212 | "height": 457, 213 | "id": "Input-fFqOQ", 214 | "type": "genericNode", 215 | "position": { 216 | "x": -60.44911631733934, 217 | "y": 1036.332912771224 218 | }, 219 | "data": { 220 | "type": "Input", 221 | "node": { 222 | "template": { 223 | "input_value": { 224 | "required": false, 225 | "placeholder": "", 226 | "show": true, 227 | "multiline": true, 228 | "value": "Hi There I am the tts model from OpenAI.", 229 | "password": false, 230 | "name": "input_value", 231 | "display_name": "Input Value", 232 | "advanced": false, 233 | "dynamic": false, 234 | "info": "", 235 | "type": "str", 236 | "list": false, 237 | "file_path": null 238 | }, 239 | "input_key": { 240 | "required": true, 241 | "placeholder": "", 242 | "show": true, 243 | "multiline": false, 244 | "value": "input", 245 | "password": false, 246 | "name": "input_key", 247 | "display_name": "Input Key", 248 | "advanced": false, 249 | "dynamic": false, 250 | "info": "", 251 | "type": "str", 252 | "list": false 253 | }, 254 | "input_type": { 255 | "required": true, 256 | "placeholder": "", 257 | "show": true, 258 | "multiline": false, 259 | "value": "Text", 260 | "password": false, 261 | "options": [ 262 | "File", 263 | "Url", 264 | "Text" 265 | ], 266 | "name": "input_type", 267 | "display_name": "Input Type", 268 | "advanced": false, 269 | "dynamic": false, 270 | "info": "", 271 | "type": "str", 272 | "list": false 273 | }, 274 | "_type": "Input" 275 | }, 276 | "description": "Input is used to specify the type of input.", 277 | "base_classes": [ 278 | "Input" 279 | ], 280 | "display_name": "Input", 281 | "custom_fields": {}, 282 | "output_types": [ 283 | "Input" 284 | ], 285 | "documentation": "https://docs.aiplanet.com/components/inputs", 286 | "beta": false, 287 | "error": null 288 | }, 289 | "id": "Input-fFqOQ" 290 | }, 291 | "selected": false, 292 | "dragging": false, 293 | "positionAbsolute": { 294 | "x": -60.44911631733934, 295 | "y": 1036.332912771224 296 | } 297 | } 298 | ], 299 | "edges": [ 300 | { 301 | "source": "OpenAITextToSpeech-uyCJz", 302 | "sourceHandle": "{œbaseClassesœ:[œMultiModalœ],œdataTypeœ:œOpenAITextToSpeechœ,œidœ:œOpenAITextToSpeech-uyCJzœ}", 303 | "target": "TextGenerationOutput-te6yr", 304 | "targetHandle": "{œfieldNameœ:œmultimodalœ,œidœ:œTextGenerationOutput-te6yrœ,œinputTypesœ:null,œtypeœ:œMultiModalœ}", 305 | "data": { 306 | "targetHandle": { 307 | "fieldName": "multimodal", 308 | "id": "TextGenerationOutput-te6yr", 309 | "inputTypes": null, 310 | "type": "MultiModal" 311 | }, 312 | "sourceHandle": { 313 | "baseClasses": [ 314 | "MultiModal" 315 | ], 316 | "dataType": "OpenAITextToSpeech", 317 | "id": "OpenAITextToSpeech-uyCJz" 318 | } 319 | }, 320 | "style": { 321 | "stroke": "#555" 322 | }, 323 | "className": "stroke-foreground stroke-connection", 324 | "animated": false, 325 | "id": "reactflow__edge-OpenAITextToSpeech-uyCJz{œbaseClassesœ:[œMultiModalœ],œdataTypeœ:œOpenAITextToSpeechœ,œidœ:œOpenAITextToSpeech-uyCJzœ}-TextGenerationOutput-te6yr{œfieldNameœ:œmultimodalœ,œidœ:œTextGenerationOutput-te6yrœ,œinputTypesœ:null,œtypeœ:œMultiModalœ}" 326 | }, 327 | { 328 | "source": "Input-fFqOQ", 329 | "sourceHandle": "{œbaseClassesœ:[œInputœ],œdataTypeœ:œInputœ,œidœ:œInput-fFqOQœ}", 330 | "target": "OpenAITextToSpeech-uyCJz", 331 | "targetHandle": "{œfieldNameœ:œtext_inputœ,œidœ:œOpenAITextToSpeech-uyCJzœ,œinputTypesœ:[œInputœ],œtypeœ:œstrœ}", 332 | "data": { 333 | "targetHandle": { 334 | "fieldName": "text_input", 335 | "id": "OpenAITextToSpeech-uyCJz", 336 | "inputTypes": [ 337 | "Input" 338 | ], 339 | "type": "str" 340 | }, 341 | "sourceHandle": { 342 | "baseClasses": [ 343 | "Input" 344 | ], 345 | "dataType": "Input", 346 | "id": "Input-fFqOQ" 347 | } 348 | }, 349 | "style": { 350 | "stroke": "#555" 351 | }, 352 | "className": "stroke-foreground stroke-connection", 353 | "animated": false, 354 | "id": "reactflow__edge-Input-fFqOQ{œbaseClassesœ:[œInputœ],œdataTypeœ:œInputœ,œidœ:œInput-fFqOQœ}-OpenAITextToSpeech-uyCJz{œfieldNameœ:œtext_inputœ,œidœ:œOpenAITextToSpeech-uyCJzœ,œinputTypesœ:[œInputœ],œtypeœ:œstrœ}" 355 | } 356 | ], 357 | "viewport": { 358 | "x": 362.55897688137367, 359 | "y": -303.6549353737929, 360 | "zoom": 0.5717035911649995 361 | } 362 | }, 363 | "description": "This stack uses the OpenAI text to speech model (TTS) to convert text given in the input to a voice speech.", 364 | "name": "multimodal_speech", 365 | "flow_type": "text_generation" 366 | } --------------------------------------------------------------------------------