├── .gitignore ├── LICENSE.md ├── README.md ├── examples └── example.py ├── flowise ├── __init__.py └── client.py ├── pyproject.toml ├── setup.py └── tests └── test_flowise.py /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .nox/ 42 | .coverage 43 | .coverage.* 44 | .cache 45 | nosetests.xml 46 | coverage.xml 47 | *.cover 48 | *.py,cover 49 | .hypothesis/ 50 | .pytest_cache/ 51 | 52 | # Translations 53 | *.mo 54 | *.pot 55 | 56 | # Django stuff: 57 | *.log 58 | local_settings.py 59 | 60 | # Flask stuff: 61 | instance/ 62 | .webassets-cache 63 | 64 | # Scrapy stuff: 65 | .scrapy 66 | 67 | # Sphinx documentation 68 | docs/_build/ 69 | 70 | # PyBuilder 71 | target/ 72 | 73 | # Jupyter Notebook 74 | .ipynb_checkpoints 75 | 76 | # IPython 77 | profile_default/ 78 | ipython_config.py 79 | 80 | # pyenv 81 | .python-version 82 | 83 | # Pipenv 84 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 85 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 86 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 87 | # install all needed dependencies. 88 | Pipfile.lock 89 | 90 | # poetry 91 | poetry.lock 92 | 93 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 94 | __pypackages__/ 95 | 96 | # Celery stuff 97 | celerybeat-schedule 98 | *.celerybeat.pid 99 | 100 | # SageMath parsed files 101 | *.sage.py 102 | 103 | # dotenv 104 | .env 105 | .envrc 106 | .venv 107 | env/ 108 | venv/ 109 | ENV/ 110 | env.bak/ 111 | venv.bak/ 112 | 113 | # Spyder project settings 114 | .spyderproject 115 | .spyproject 116 | 117 | # Rope project settings 118 | .ropeproject 119 | 120 | # mkdocs documentation 121 | /site 122 | 123 | # mypy 124 | .mypy_cache/ 125 | .dmypy.json 126 | dmypy.json 127 | 128 | # Pyre type checker 129 | .pyre/ 130 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 FlowiseAI 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Flowise SDK - Python 2 | 3 | The **Flowise SDK** for Python provides an easy way to interact with the Flowise API for creating predictions, supporting both streaming and non-streaming responses. This SDK allows users to create predictions with customizable options, including history, file uploads, and more. 4 | 5 | ## Features 6 | 7 | - Support for streaming and non-streaming API responses 8 | - Ability to include message history and file uploads 9 | 10 | ## Installation 11 | 12 | You can install the SDK via pip: 13 | 14 | ```bash 15 | pip install flowise 16 | ``` 17 | 18 | Upgrade version: 19 | 20 | ```bash 21 | pip install --upgrade flowise 22 | ``` 23 | 24 | ## Example 25 | 26 | ```py 27 | from flowise import Flowise, PredictionData, IMessage, IFileUpload 28 | 29 | def example_non_streaming(): 30 | # Initialize Flowise client 31 | client = Flowise() 32 | 33 | # Create a prediction without streaming 34 | completion = client.create_prediction( 35 | PredictionData( 36 | chatflowId="abc", 37 | question="What is the capital of France?", 38 | streaming=False # Non-streaming mode 39 | ) 40 | ) 41 | 42 | # Process and print the full response 43 | for response in completion: 44 | print("Non-streaming response:", response) 45 | 46 | def example_streaming(): 47 | # Initialize Flowise client 48 | client = Flowise() 49 | 50 | # Create a prediction with streaming enabled 51 | completion = client.create_prediction( 52 | PredictionData( 53 | chatflowId="abc", 54 | question="Tell me a joke!", 55 | streaming=True # Enable streaming 56 | ) 57 | ) 58 | 59 | # Process and print each streamed chunk 60 | print("Streaming response:") 61 | for chunk in completion: 62 | print(chunk) 63 | 64 | 65 | if __name__ == "__main__": 66 | # Run the non-streaming example 67 | example_non_streaming() 68 | 69 | # Run the streaming example 70 | example_streaming() 71 | ``` 72 | 73 | ## Build & Publish 74 | 75 | 1. Increment version on `setup.py` 76 | 1. `pip install wheel` 77 | 2. `python setup.py sdist bdist_wheel` 78 | 3. `twine upload --skip-existing dist/*` 79 | -------------------------------------------------------------------------------- /examples/example.py: -------------------------------------------------------------------------------- 1 | from flowise import Flowise, PredictionData, IMessage, IFileUpload 2 | 3 | def example_non_streaming(): 4 | # Initialize Flowise client 5 | client = Flowise() 6 | 7 | # Create a prediction without streaming 8 | completion = client.create_prediction( 9 | PredictionData( 10 | chatflowId="abc", 11 | question="What is the capital of France?", 12 | streaming=False # Non-streaming mode 13 | ) 14 | ) 15 | 16 | # Process and print the full response 17 | for response in completion: 18 | print("Non-streaming response:", response) 19 | 20 | def example_streaming(): 21 | # Initialize Flowise client 22 | client = Flowise() 23 | 24 | # Create a prediction with streaming enabled 25 | completion = client.create_prediction( 26 | PredictionData( 27 | chatflowId="abc", 28 | question="Tell me a joke!", 29 | streaming=True # Enable streaming 30 | ) 31 | ) 32 | 33 | # Process and print each streamed chunk 34 | print("Streaming response:") 35 | for chunk in completion: 36 | print(chunk) 37 | 38 | def example_with_history_and_uploads(): 39 | # Initialize Flowise client 40 | client = Flowise() 41 | 42 | # Example message history 43 | history = [ 44 | IMessage(content="What is the weather?", role="userMessage"), 45 | IMessage(content="It is sunny today.", role="apiMessage") 46 | ] 47 | 48 | # Example file upload 49 | uploads = [ 50 | IFileUpload(data="base64EncodedData", type="file", name="example.txt", mime="text/plain") 51 | ] 52 | 53 | # Create a prediction with history and uploads 54 | completion = client.create_prediction( 55 | PredictionData( 56 | chatflowId="abc", 57 | question="Analyze the attached file.", 58 | streaming=True, 59 | history=history, # Pass message history 60 | uploads=uploads # Pass file uploads 61 | ) 62 | ) 63 | 64 | # Process and print each streamed chunk 65 | print("Streaming response with history and uploads:") 66 | for chunk in completion: 67 | print(chunk) 68 | 69 | 70 | if __name__ == "__main__": 71 | # Run the non-streaming example 72 | example_non_streaming() 73 | 74 | # Run the streaming example 75 | example_streaming() 76 | 77 | # Run the example with history and file uploads 78 | example_with_history_and_uploads() 79 | -------------------------------------------------------------------------------- /flowise/__init__.py: -------------------------------------------------------------------------------- 1 | from .client import Flowise, PredictionData, IMessage, IFileUpload 2 | 3 | __all__ = ["Flowise", "PredictionData", "IMessage", "IFileUpload"] 4 | -------------------------------------------------------------------------------- /flowise/client.py: -------------------------------------------------------------------------------- 1 | import requests 2 | from typing import List, Dict, Optional, Generator 3 | 4 | class IFileUpload: 5 | def __init__(self, data: Optional[str], type: str, name: str, mime: str): 6 | self.data = data 7 | self.type = type 8 | self.name = name 9 | self.mime = mime 10 | 11 | 12 | class IMessage: 13 | def __init__(self, message: str, type: str, role: Optional[str] = None, content: Optional[str] = None): 14 | self.message = message 15 | self.type = type 16 | self.role = role 17 | self.content = content 18 | 19 | 20 | class PredictionData: 21 | def __init__( 22 | self, 23 | chatflowId: str, 24 | question: str, 25 | overrideConfig: Optional[Dict] = None, 26 | chatId: Optional[str] = None, 27 | streaming: Optional[bool] = False, 28 | history: Optional[List[IMessage]] = None, 29 | uploads: Optional[List[IFileUpload]] = None 30 | ): 31 | self.chatflowId = chatflowId 32 | self.question = question 33 | self.overrideConfig = overrideConfig 34 | self.chatId = chatId 35 | self.streaming = streaming 36 | self.history = history 37 | self.uploads = uploads 38 | 39 | 40 | class Flowise: 41 | def __init__(self, base_url: Optional[str] = None, api_key: Optional[str] = None): 42 | self.base_url = base_url or 'http://localhost:3000' 43 | self.api_key = api_key or '' 44 | 45 | def _get_headers(self) -> Dict[str, str]: 46 | headers = {} 47 | if self.api_key: 48 | headers['Authorization'] = f'Bearer {self.api_key}' 49 | return headers 50 | 51 | def create_prediction(self, data: PredictionData) -> Generator[str, None, None]: 52 | # Step 1: Check if chatflow is available for streaming 53 | chatflow_stream_url = f'{self.base_url}/api/v1/chatflows-streaming/{data.chatflowId}' 54 | response = requests.get(chatflow_stream_url) 55 | response.raise_for_status() 56 | 57 | chatflow_stream_data = response.json() 58 | is_streaming_available = chatflow_stream_data.get("isStreaming", False) 59 | 60 | prediction_url = f'{self.base_url}/api/v1/prediction/{data.chatflowId}' 61 | 62 | # Step 2: Handle streaming prediction 63 | if is_streaming_available and data.streaming: 64 | prediction_payload = { 65 | 'chatflowId': data.chatflowId, 66 | 'question': data.question, 67 | 'overrideConfig': data.overrideConfig, 68 | 'chatId': data.chatId, 69 | 'streaming': data.streaming, 70 | 'history': [msg.__dict__ for msg in (data.history or [])], 71 | 'uploads': [upload.__dict__ for upload in (data.uploads or [])] 72 | } 73 | 74 | with requests.post(prediction_url, json=prediction_payload, stream=True, headers=self._get_headers()) as r: 75 | r.raise_for_status() 76 | for line in r.iter_lines(): 77 | if line: 78 | line_str = line.decode('utf-8') 79 | if line_str.startswith('data:'): 80 | event = line_str.replace('data:', '').strip() 81 | yield event 82 | 83 | # Step 3: Handle non-streaming prediction 84 | else: 85 | prediction_payload = { 86 | 'chatflowId': data.chatflowId, 87 | 'question': data.question, 88 | 'overrideConfig': data.overrideConfig, 89 | 'chatId': data.chatId, 90 | 'history': [msg.__dict__ for msg in (data.history or [])], 91 | 'uploads': [upload.__dict__ for upload in (data.uploads or [])] 92 | } 93 | 94 | response = requests.post(prediction_url, json=prediction_payload, headers=self._get_headers()) 95 | response.raise_for_status() 96 | yield response.json() 97 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=42", "wheel"] 3 | build-backend = "setuptools.build_meta" 4 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | import os 3 | 4 | # Read the contents of your README file 5 | with open(os.path.join(os.path.dirname(__file__), 'README.md'), 'r', encoding='utf-8') as fh: 6 | long_description = fh.read() 7 | 8 | setup( 9 | name="flowise", 10 | version="1.0.4", 11 | description="Flowise SDK for Python to interact with the Flowise API.", 12 | long_description=long_description, # Use README.md as the long description 13 | long_description_content_type='text/markdown', # This specifies the format 14 | author="Henry Heng", 15 | author_email="support@flowiseai.com", 16 | url="https://github.com/FlowiseAI/FlowisePy", 17 | packages=find_packages(), 18 | install_requires=[ 19 | "requests>=2.25.1" 20 | ], 21 | python_requires='>=3.7', 22 | classifiers=[ 23 | "Programming Language :: Python :: 3", 24 | "License :: OSI Approved :: MIT License", 25 | "Operating System :: OS Independent", 26 | ], 27 | ) 28 | -------------------------------------------------------------------------------- /tests/test_flowise.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from unittest.mock import patch, MagicMock 3 | from flowise import Flowise, PredictionData, IMessage, IFileUpload 4 | 5 | class TestFlowiseClient(unittest.TestCase): 6 | 7 | @patch('flowise.client.requests.post') 8 | @patch('flowise.client.requests.get') 9 | def test_create_prediction_non_streaming(self, mock_get, mock_post): 10 | # Mock the response for the streaming check (non-streaming scenario) 11 | mock_get.return_value.json.return_value = {"isStreaming": False} 12 | 13 | # Mock the non-streaming POST response 14 | mock_post.return_value.json.return_value = {"answer": "The capital of France is Paris."} 15 | 16 | # Create a client instance 17 | client = Flowise() 18 | 19 | # Make a non-streaming request 20 | completion = client.create_prediction( 21 | PredictionData( 22 | chatflowId="abc", 23 | question="What is the capital of France?", 24 | streaming=False 25 | ) 26 | ) 27 | 28 | # Verify the full JSON response 29 | response = list(completion) 30 | self.assertEqual(response[0], {"answer": "The capital of France is Paris."}) 31 | 32 | @patch('flowise.client.requests.post') 33 | @patch('flowise.client.requests.get') 34 | def test_create_prediction_streaming(self, mock_get, mock_post): 35 | # Mock the response for the streaming check (streaming is available) 36 | mock_get.return_value.json.return_value = {"isStreaming": True} 37 | 38 | # Mock the streaming POST response 39 | mock_post.return_value.iter_lines.return_value = [ 40 | b'data: {"event": "token", "data": "Why don\'t scientists trust atoms?"}', 41 | b'data: {"event": "token", "data": "Because they make up everything!"}' 42 | ] 43 | 44 | # Create a client instance 45 | client = Flowise() 46 | 47 | # Make a streaming request 48 | completion = client.create_prediction( 49 | PredictionData( 50 | chatflowId="abc", 51 | question="Tell me a joke!", 52 | streaming=True 53 | ) 54 | ) 55 | 56 | # Collect and verify the streamed chunks 57 | response = list(completion) 58 | self.assertEqual(response, [ 59 | '{"event": "token", "data": "Why don\'t scientists trust atoms?"}', 60 | '{"event": "token", "data": "Because they make up everything!"}' 61 | ]) 62 | 63 | if __name__ == '__main__': 64 | unittest.main() 65 | --------------------------------------------------------------------------------