├── .fernignore
├── .github
└── workflows
│ └── ci.yml
├── .gitignore
├── README.md
├── poetry.lock
├── pyproject.toml
├── reference.md
├── requirements.txt
├── src
└── scrapybara
│ ├── __init__.py
│ ├── anthropic
│ └── __init__.py
│ ├── base_client.py
│ ├── browser
│ ├── __init__.py
│ └── client.py
│ ├── client.py
│ ├── code
│ ├── __init__.py
│ └── client.py
│ ├── core
│ ├── __init__.py
│ ├── api_error.py
│ ├── client_wrapper.py
│ ├── datetime_utils.py
│ ├── file.py
│ ├── http_client.py
│ ├── jsonable_encoder.py
│ ├── pydantic_utilities.py
│ ├── query_encoder.py
│ ├── remove_none_from_dict.py
│ ├── request_options.py
│ └── serialization.py
│ ├── env
│ ├── __init__.py
│ └── client.py
│ ├── environment.py
│ ├── errors
│ ├── __init__.py
│ └── unprocessable_entity_error.py
│ ├── herd
│ └── __init__.py
│ ├── instance
│ ├── __init__.py
│ ├── client.py
│ └── types
│ │ ├── __init__.py
│ │ ├── command.py
│ │ └── request.py
│ ├── notebook
│ ├── __init__.py
│ └── client.py
│ ├── openai
│ └── __init__.py
│ ├── prompts
│ └── __init__.py
│ ├── py.typed
│ ├── tools
│ └── __init__.py
│ ├── types
│ ├── __init__.py
│ ├── act.py
│ ├── auth_state_response.py
│ ├── bash_response.py
│ ├── browser_authenticate_response.py
│ ├── browser_get_cdp_url_response.py
│ ├── browser_get_current_url_response.py
│ ├── button.py
│ ├── cell_type.py
│ ├── click_mouse_action.py
│ ├── click_mouse_action_click_type.py
│ ├── computer_response.py
│ ├── deployment_config_instance_type.py
│ ├── drag_mouse_action.py
│ ├── edit_response.py
│ ├── env_get_response.py
│ ├── env_response.py
│ ├── execute_cell_request.py
│ ├── file_response.py
│ ├── get_cursor_position_action.py
│ ├── get_instance_response.py
│ ├── get_instance_response_instance_type.py
│ ├── http_validation_error.py
│ ├── instance_get_stream_url_response.py
│ ├── instance_screenshot_response.py
│ ├── kernel_info.py
│ ├── modify_browser_auth_response.py
│ ├── move_mouse_action.py
│ ├── notebook.py
│ ├── notebook_cell.py
│ ├── press_key_action.py
│ ├── save_browser_auth_response.py
│ ├── scroll_action.py
│ ├── start_browser_response.py
│ ├── status.py
│ ├── stop_browser_response.py
│ ├── stop_instance_response.py
│ ├── take_screenshot_action.py
│ ├── tool.py
│ ├── type_text_action.py
│ ├── upload_response.py
│ ├── validation_error.py
│ ├── validation_error_loc_item.py
│ └── wait_action.py
│ └── version.py
└── tests
├── custom
└── test_client.py
└── utils
├── __init__.py
├── assets
└── models
│ ├── __init__.py
│ ├── circle.py
│ ├── color.py
│ ├── object_with_defaults.py
│ ├── object_with_optional_field.py
│ ├── shape.py
│ ├── square.py
│ └── undiscriminated_shape.py
├── test_http_client.py
├── test_query_encoding.py
└── test_serialization.py
/.fernignore:
--------------------------------------------------------------------------------
1 | # Specify files that shouldn't be modified by Fern
2 |
3 | src/scrapybara/client.py
4 | src/scrapybara/anthropic/
5 | src/scrapybara/herd/
6 | src/scrapybara/openai/
7 | src/scrapybara/prompts/
8 | src/scrapybara/tools/
9 | src/scrapybara/types/__init__.py
10 | src/scrapybara/types/act.py
11 | src/scrapybara/types/tool.py
12 | tests/custom/test_client.py
13 | .github/workflows/ci.yml
14 | README.md
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: ci
2 |
3 | on: [push]
4 | jobs:
5 | compile:
6 | runs-on: ubuntu-22.04
7 | steps:
8 | - name: Checkout repo
9 | uses: actions/checkout@v3
10 | - name: Set up python
11 | uses: actions/setup-python@v4
12 | with:
13 | python-version: 3.8
14 | - name: Bootstrap poetry
15 | run: |
16 | curl -sSL https://install.python-poetry.org | python - -y --version 1.5.1
17 | - name: Install dependencies
18 | run: poetry install
19 | - name: Compile
20 | run: poetry run mypy .
21 | test:
22 | runs-on: ubuntu-22.04
23 | steps:
24 | - name: Checkout repo
25 | uses: actions/checkout@v3
26 | - name: Set up python
27 | uses: actions/setup-python@v4
28 | with:
29 | python-version: 3.8
30 | - name: Bootstrap poetry
31 | run: |
32 | curl -sSL https://install.python-poetry.org | python - -y --version 1.5.1
33 | - name: Install dependencies
34 | run: poetry install
35 |
36 | - name: Test
37 | run: poetry run pytest -rP .
38 | env:
39 | SCRAPYBARA_API_KEY: ${{ secrets.SCRAPYBARA_API_KEY }}
40 |
41 | publish:
42 | needs: [compile, test]
43 | if: github.event_name == 'push' && contains(github.ref, 'refs/tags/')
44 | runs-on: ubuntu-22.04
45 | steps:
46 | - name: Checkout repo
47 | uses: actions/checkout@v3
48 | - name: Set up python
49 | uses: actions/setup-python@v4
50 | with:
51 | python-version: 3.8
52 | - name: Bootstrap poetry
53 | run: |
54 | curl -sSL https://install.python-poetry.org | python - -y --version 1.5.1
55 | - name: Install dependencies
56 | run: poetry install
57 | - name: Publish to pypi
58 | run: |
59 | poetry config pypi-token.pypi ${{ secrets.PYPI_TOKEN }}
60 | poetry --no-interaction -v publish --build
61 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | dist/
2 | .mypy_cache/
3 | __pycache__/
4 | poetry.toml
5 | .ruff_cache/
6 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Scrapybara Python Library
2 |
3 | [](https://buildwithfern.com?utm_source=github&utm_medium=github&utm_campaign=readme&utm_source=https%3A%2F%2Fgithub.com%2Fscrapybara%2Fscrapybara-python)
4 | [](https://pypi.python.org/pypi/scrapybara)
5 |
6 | The Scrapybara Python library provides convenient access to the Scrapybara API from Python.
7 |
8 | ## Installation
9 |
10 | ```sh
11 | pip install scrapybara
12 | ```
13 |
14 | ## Reference
15 |
16 | Please refer to [docs](https://docs.scrapybara.com) for more information.
17 |
18 | ## Usage
19 |
20 | Instantiate and use the client with the following:
21 |
22 | ```python
23 | from scrapybara import Scrapybara
24 |
25 | client = Scrapybara(
26 | api_key="YOUR_API_KEY",
27 | )
28 | client.start_ubuntu()
29 | ```
30 |
31 | ## Async Client
32 |
33 | The SDK also exports an `async` client so that you can make non-blocking calls to our API.
34 |
35 | ```python
36 | import asyncio
37 |
38 | from scrapybara import AsyncScrapybara
39 |
40 | client = AsyncScrapybara(
41 | api_key="YOUR_API_KEY",
42 | )
43 |
44 |
45 | async def main() -> None:
46 | await client.start_ubuntu()
47 |
48 |
49 | asyncio.run(main())
50 | ```
51 |
52 | ## Exception Handling
53 |
54 | When the API returns a non-success status code (4xx or 5xx response), a subclass of the following error
55 | will be thrown.
56 |
57 | ```python
58 | from scrapybara.core.api_error import ApiError
59 |
60 | try:
61 | client.start_ubuntu()
62 | except ApiError as e:
63 | print(e.status_code)
64 | print(e.body)
65 | ```
66 |
67 | ## Advanced
68 |
69 | ### Retries
70 |
71 | The SDK is instrumented with automatic retries with exponential backoff. A request will be retried as long
72 | as the request is deemed retriable and the number of retry attempts has not grown larger than the configured
73 | retry limit (default: 2).
74 |
75 | A request is deemed retriable when any of the following HTTP status codes is returned:
76 |
77 | - [408](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/408) (Timeout)
78 | - [429](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/429) (Too Many Requests)
79 | - [5XX](https://developer.mozilla.org/en-US/docs/Web/HTTP/Status/500) (Internal Server Errors)
80 |
81 | Use the `max_retries` request option to configure this behavior.
82 |
83 | ```python
84 | client.start_ubuntu(..., request_options={
85 | "max_retries": 1
86 | })
87 | ```
88 |
89 | ### Timeouts
90 |
91 | The SDK defaults to a 60 second timeout. You can configure this with a timeout option at the client or request level.
92 |
93 | ```python
94 |
95 | from scrapybara import Scrapybara
96 |
97 | client = Scrapybara(
98 | ...,
99 | timeout=20.0,
100 | )
101 |
102 |
103 | # Override timeout for a specific method
104 | client.start_ubuntu(..., request_options={
105 | "timeout_in_seconds": 1
106 | })
107 | ```
108 |
109 | ### Custom Client
110 |
111 | You can override the `httpx` client to customize it for your use-case. Some common use-cases include support for proxies
112 | and transports.
113 |
114 | ```python
115 | import httpx
116 | from scrapybara import Scrapybara
117 |
118 | client = Scrapybara(
119 | ...,
120 | httpx_client=httpx.Client(
121 | proxies="http://my.test.proxy.example.com",
122 | transport=httpx.HTTPTransport(local_address="0.0.0.0"),
123 | ),
124 | )
125 | ```
126 |
127 | ## Contributing
128 |
129 | While we value open-source contributions to this SDK, this library is generated programmatically.
130 | Additions made directly to this library would have to be moved over to our generation code,
131 | otherwise they would be overwritten upon the next generated release. Feel free to open a PR as
132 | a proof of concept, but know that we will not be able to merge it as-is. We suggest opening
133 | an issue first to discuss with us!
134 |
135 | On the other hand, contributions to the README are always very welcome!
136 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = "scrapybara"
3 |
4 | [tool.poetry]
5 | name = "scrapybara"
6 | version = "2.5.3"
7 | description = ""
8 | readme = "README.md"
9 | authors = []
10 | keywords = []
11 |
12 | classifiers = [
13 | "Intended Audience :: Developers",
14 | "Programming Language :: Python",
15 | "Programming Language :: Python :: 3",
16 | "Programming Language :: Python :: 3.8",
17 | "Programming Language :: Python :: 3.9",
18 | "Programming Language :: Python :: 3.10",
19 | "Programming Language :: Python :: 3.11",
20 | "Programming Language :: Python :: 3.12",
21 | "Operating System :: OS Independent",
22 | "Operating System :: POSIX",
23 | "Operating System :: MacOS",
24 | "Operating System :: POSIX :: Linux",
25 | "Operating System :: Microsoft :: Windows",
26 | "Topic :: Software Development :: Libraries :: Python Modules",
27 | "Typing :: Typed"
28 | ]
29 | packages = [
30 | { include = "scrapybara", from = "src"}
31 | ]
32 |
33 | [project.urls]
34 | Repository = 'https://github.com/scrapybara/scrapybara-python'
35 |
36 | [tool.poetry.dependencies]
37 | python = "^3.8"
38 | httpx = ">=0.21.2"
39 | pydantic = ">= 1.9.2"
40 | pydantic-core = "^2.18.2"
41 | typing_extensions = ">= 4.0.0"
42 |
43 | [tool.poetry.dev-dependencies]
44 | mypy = "1.0.1"
45 | pytest = "^7.4.0"
46 | pytest-asyncio = "^0.23.5"
47 | python-dateutil = "^2.9.0"
48 | types-python-dateutil = "^2.9.0.20240316"
49 | ruff = "^0.5.6"
50 |
51 | [tool.pytest.ini_options]
52 | testpaths = [ "tests" ]
53 | asyncio_mode = "auto"
54 |
55 | [tool.mypy]
56 | plugins = ["pydantic.mypy"]
57 |
58 | [tool.ruff]
59 | line-length = 120
60 |
61 |
62 | [build-system]
63 | requires = ["poetry-core"]
64 | build-backend = "poetry.core.masonry.api"
65 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | httpx>=0.21.2
2 | pydantic>= 1.9.2
3 | pydantic-core==^2.18.2
4 | typing_extensions>= 4.0.0
5 |
--------------------------------------------------------------------------------
/src/scrapybara/__init__.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from .types import (
4 | AuthStateResponse,
5 | BashResponse,
6 | BrowserAuthenticateResponse,
7 | BrowserGetCdpUrlResponse,
8 | BrowserGetCurrentUrlResponse,
9 | Button,
10 | CellType,
11 | ClickMouseAction,
12 | ClickMouseActionClickType,
13 | ComputerResponse,
14 | DeploymentConfigInstanceType,
15 | DragMouseAction,
16 | EditResponse,
17 | EnvGetResponse,
18 | EnvResponse,
19 | ExecuteCellRequest,
20 | FileResponse,
21 | GetCursorPositionAction,
22 | GetInstanceResponse,
23 | GetInstanceResponseInstanceType,
24 | HttpValidationError,
25 | InstanceGetStreamUrlResponse,
26 | InstanceScreenshotResponse,
27 | KernelInfo,
28 | ModifyBrowserAuthResponse,
29 | MoveMouseAction,
30 | Notebook,
31 | NotebookCell,
32 | PressKeyAction,
33 | SaveBrowserAuthResponse,
34 | ScrollAction,
35 | StartBrowserResponse,
36 | Status,
37 | StopBrowserResponse,
38 | StopInstanceResponse,
39 | TakeScreenshotAction,
40 | TypeTextAction,
41 | UploadResponse,
42 | ValidationError,
43 | ValidationErrorLocItem,
44 | WaitAction,
45 | )
46 | from .errors import UnprocessableEntityError
47 | from . import browser, code, env, instance, notebook
48 | from .client import AsyncScrapybara, Scrapybara
49 | from .environment import ScrapybaraEnvironment
50 | from .instance import (
51 | Command,
52 | Request,
53 | Request_ClickMouse,
54 | Request_DragMouse,
55 | Request_GetCursorPosition,
56 | Request_MoveMouse,
57 | Request_PressKey,
58 | Request_Scroll,
59 | Request_TakeScreenshot,
60 | Request_TypeText,
61 | Request_Wait,
62 | )
63 | from .version import __version__
64 |
65 | __all__ = [
66 | "AsyncScrapybara",
67 | "AuthStateResponse",
68 | "BashResponse",
69 | "BrowserAuthenticateResponse",
70 | "BrowserGetCdpUrlResponse",
71 | "BrowserGetCurrentUrlResponse",
72 | "Button",
73 | "CellType",
74 | "ClickMouseAction",
75 | "ClickMouseActionClickType",
76 | "Command",
77 | "ComputerResponse",
78 | "DeploymentConfigInstanceType",
79 | "DragMouseAction",
80 | "EditResponse",
81 | "EnvGetResponse",
82 | "EnvResponse",
83 | "ExecuteCellRequest",
84 | "FileResponse",
85 | "GetCursorPositionAction",
86 | "GetInstanceResponse",
87 | "GetInstanceResponseInstanceType",
88 | "HttpValidationError",
89 | "InstanceGetStreamUrlResponse",
90 | "InstanceScreenshotResponse",
91 | "KernelInfo",
92 | "ModifyBrowserAuthResponse",
93 | "MoveMouseAction",
94 | "Notebook",
95 | "NotebookCell",
96 | "PressKeyAction",
97 | "Request",
98 | "Request_ClickMouse",
99 | "Request_DragMouse",
100 | "Request_GetCursorPosition",
101 | "Request_MoveMouse",
102 | "Request_PressKey",
103 | "Request_Scroll",
104 | "Request_TakeScreenshot",
105 | "Request_TypeText",
106 | "Request_Wait",
107 | "SaveBrowserAuthResponse",
108 | "Scrapybara",
109 | "ScrapybaraEnvironment",
110 | "ScrollAction",
111 | "StartBrowserResponse",
112 | "Status",
113 | "StopBrowserResponse",
114 | "StopInstanceResponse",
115 | "TakeScreenshotAction",
116 | "TypeTextAction",
117 | "UnprocessableEntityError",
118 | "UploadResponse",
119 | "ValidationError",
120 | "ValidationErrorLocItem",
121 | "WaitAction",
122 | "__version__",
123 | "browser",
124 | "code",
125 | "env",
126 | "instance",
127 | "notebook",
128 | ]
129 |
--------------------------------------------------------------------------------
/src/scrapybara/anthropic/__init__.py:
--------------------------------------------------------------------------------
1 | from typing import Literal, Optional
2 |
3 | from pydantic import Field
4 |
5 | from ..types.act import Model
6 | from datetime import datetime
7 |
8 |
9 | class Anthropic(Model):
10 | """Model adapter for Anthropic.
11 |
12 | Supported models:
13 | - claude-3-7-sonnet-20250219 (1x agent credit if no api_key)
14 | - claude-3-7-sonnet-20250219-thinking (1x agent credit if no api_key)
15 | - claude-3-5-sonnet-20241022 (1x agent credit if no api_key)
16 |
17 | Args:
18 | name: Anthropic model name, defaults to "claude-3-7-sonnet-20250219"
19 | api_key: Optional Anthropic API key
20 |
21 | Returns:
22 | A Model configuration object
23 | """
24 |
25 | provider: Literal["anthropic"] = Field(default="anthropic", frozen=True)
26 |
27 | def __init__(
28 | self,
29 | name: Optional[str] = "claude-3-7-sonnet-20250219",
30 | api_key: Optional[str] = None,
31 | ) -> None:
32 | super().__init__(provider="anthropic", name=name, api_key=api_key)
33 |
34 |
35 | UBUNTU_SYSTEM_PROMPT = f"""
36 | * You have access to an Ubuntu VM with internet connectivity
37 | * You can install Ubuntu applications using the bash tool (use curl over wget)
38 | * To run GUI applications with the bash tool, use a subshell, e.g. "(DISPLAY=:1 xterm &)", make sure to include the parantheses
39 | * GUI apps will appear but may take time to load - confirm with an extra screenshot
40 | * Chromium is the default browser
41 | * Start Chromium via the bash tool "(DISPLAY=:1 chromium &)", but interact with it visually via the computer tool
42 | * If you need to read a HTML file:
43 | - Open with the address bar in Chromium
44 | * For commands with large text output:
45 | - Redirect to a temp file
46 | - Use str_replace_editor or grep with context (-B and -A flags) to view output
47 | * When viewing pages:
48 | - Zoom out to see full content, or
49 | - Scroll to ensure you see everything
50 | * When interacting with a field, always clear the field first using "ctrl+A" and "delete"
51 | - Take an extra screenshot after clicking "enter" to confirm the field is properly submitted and move the mouse to the next field
52 | * Computer function calls take time, string together calls when possible
53 | * You are allowed to take actions on behalf of the user on sites that are authenticated
54 | * If the user asks you to access a site, assume that the user has already authenticated
55 | * To login additional sites, ask the user to use Auth Contexts or the Interactive Desktop
56 | * If first screenshot shows black screen:
57 | - Click mouse in screen center
58 | - Take another screenshot
59 | * Today's date is {datetime.today().strftime('%A, %B %d, %Y')}
60 |
61 |
62 |
63 | * If given a complex task, break down into smaller steps and ask the user for details only if necessary
64 | * Read through web pages thoroughly by scrolling down till you have gathered enough info
65 | * Be concise!
66 | """
67 | """Recommended Anthropic system prompt for Ubuntu instances"""
68 |
69 |
70 | BROWSER_SYSTEM_PROMPT = f"""
71 | * You have access to a Chromium VM with internet connectivity
72 | * Chromium should already be open and running
73 | * You can interact with web pages using the computer tool
74 | * When viewing pages:
75 | - Zoom out to see full content, or
76 | - Scroll to ensure you see everything
77 | * When interacting with a field, always clear the field first using "ctrl+A" and "delete"
78 | - Take an extra screenshot after clicking "enter" to confirm the field is properly submitted and move the mouse to the next field
79 | * Computer function calls take time, string together calls when possible
80 | * You are allowed to take actions on behalf of the user on sites that are authenticated
81 | * If the user asks you to access a site, assume that the user has already authenticated
82 | * To login additional sites, ask the user to use Auth Contexts
83 | * If first screenshot shows black screen:
84 | - Click mouse in screen center
85 | - Take another screenshot
86 | * Today's date is {datetime.today().strftime('%A, %B %d, %Y')}
87 |
88 |
89 |
90 | * If given a complex task, break down into smaller steps and ask the user for details only if necessary
91 | * Read through web pages thoroughly by scrolling down till you have gathered enough info
92 | * Be concise!
93 | """
94 | """Recommended Anthropic system prompt for Browser instances"""
95 |
96 |
97 | WINDOWS_SYSTEM_PROMPT = f"""
98 | * You wave access to a Windows VM with internet connectivity
99 | * You can interact with the Windows desktop using the computer tool
100 | * GUI apps will appear but may take time to load - confirm with an extra screenshot
101 | * Edge is the default browser
102 | * When viewing pages:
103 | - Zoom out to see full content, or
104 | - Scroll to ensure you see everything
105 | * When interacting with a field, always clear the field first using "ctrl+A" and "delete"
106 | - Take an extra screenshot after clicking "enter" to confirm the field is properly submitted and move the mouse to the next field
107 | * Computer function calls take time, string together calls when possible
108 | * You are allowed to take actions on behalf of the user on sites that are authenticated
109 | * If the user asks you to access a site, assume that the user has already authenticated
110 | * To login additional sites, ask the user to use Auth Contexts or the Interactive Desktop
111 | * If first screenshot shows black screen:
112 | - Click mouse in screen center
113 | - Take another screenshot
114 | * Today's date is {datetime.today().strftime('%A, %B %d, %Y')}
115 |
116 |
117 |
118 | * If given a complex task, break down into smaller steps and ask the user for details only if necessary
119 | * Read through web pages thoroughly by scrolling down till you have gathered enough info
120 | * Be concise!
121 | """
122 | """Recommended Anthropic system prompt for Windows instances"""
123 |
124 | STRUCTURED_OUTPUT_SECTION = """
125 | * When you have completed your task and are ready to provide the final result to the user, use the 'structured_output' tool
126 | * This tool allows you to output structured data according to the provided schema
127 | * Ensure that your output matches the expected schema by providing the correct fields and data types
128 | * The output from this tool will be passed directly back to the user as the final result
129 | * Do not present the final result in plain text; always use the 'structured_output' tool for the final output
130 | """
131 | """Section to add to system prompt when structured output is being used"""
--------------------------------------------------------------------------------
/src/scrapybara/base_client.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | import typing
4 | from .environment import ScrapybaraEnvironment
5 | import os
6 | import httpx
7 | from .core.api_error import ApiError
8 | from .core.client_wrapper import SyncClientWrapper
9 | from .instance.client import InstanceClient
10 | from .browser.client import BrowserClient
11 | from .code.client import CodeClient
12 | from .notebook.client import NotebookClient
13 | from .env.client import EnvClient
14 | from .types.deployment_config_instance_type import DeploymentConfigInstanceType
15 | from .core.request_options import RequestOptions
16 | from .types.get_instance_response import GetInstanceResponse
17 | from .core.pydantic_utilities import parse_obj_as
18 | from .errors.unprocessable_entity_error import UnprocessableEntityError
19 | from .types.http_validation_error import HttpValidationError
20 | from json.decoder import JSONDecodeError
21 | from .core.jsonable_encoder import jsonable_encoder
22 | from .types.auth_state_response import AuthStateResponse
23 | from .core.client_wrapper import AsyncClientWrapper
24 | from .instance.client import AsyncInstanceClient
25 | from .browser.client import AsyncBrowserClient
26 | from .code.client import AsyncCodeClient
27 | from .notebook.client import AsyncNotebookClient
28 | from .env.client import AsyncEnvClient
29 |
30 | # this is used as the default value for optional parameters
31 | OMIT = typing.cast(typing.Any, ...)
32 |
33 |
34 | class BaseClient:
35 | """
36 | Use this class to access the different functions within the SDK. You can instantiate any number of clients with different configuration that will propagate to these functions.
37 |
38 | Parameters
39 | ----------
40 | base_url : typing.Optional[str]
41 | The base url to use for requests from the client.
42 |
43 | environment : ScrapybaraEnvironment
44 | The environment to use for requests from the client. from .environment import ScrapybaraEnvironment
45 |
46 |
47 |
48 | Defaults to ScrapybaraEnvironment.PRODUCTION
49 |
50 |
51 |
52 | api_key : typing.Optional[str]
53 | timeout : typing.Optional[float]
54 | The timeout to be used, in seconds, for requests. By default the timeout is 600 seconds, unless a custom httpx client is used, in which case this default is not enforced.
55 |
56 | follow_redirects : typing.Optional[bool]
57 | Whether the default httpx client follows redirects or not, this is irrelevant if a custom httpx client is passed in.
58 |
59 | httpx_client : typing.Optional[httpx.Client]
60 | The httpx client to use for making requests, a preconfigured client is used by default, however this is useful should you want to pass in any custom httpx configuration.
61 |
62 | Examples
63 | --------
64 | from scrapybara import Scrapybara
65 |
66 | client = Scrapybara(
67 | api_key="YOUR_API_KEY",
68 | )
69 | """
70 |
71 | def __init__(
72 | self,
73 | *,
74 | base_url: typing.Optional[str] = None,
75 | environment: ScrapybaraEnvironment = ScrapybaraEnvironment.PRODUCTION,
76 | api_key: typing.Optional[str] = os.getenv("SCRAPYBARA_API_KEY"),
77 | timeout: typing.Optional[float] = None,
78 | follow_redirects: typing.Optional[bool] = True,
79 | httpx_client: typing.Optional[httpx.Client] = None,
80 | ):
81 | _defaulted_timeout = timeout if timeout is not None else 600 if httpx_client is None else None
82 | if api_key is None:
83 | raise ApiError(
84 | body="The client must be instantiated be either passing in api_key or setting SCRAPYBARA_API_KEY"
85 | )
86 | self._client_wrapper = SyncClientWrapper(
87 | base_url=_get_base_url(base_url=base_url, environment=environment),
88 | api_key=api_key,
89 | httpx_client=httpx_client
90 | if httpx_client is not None
91 | else httpx.Client(timeout=_defaulted_timeout, follow_redirects=follow_redirects)
92 | if follow_redirects is not None
93 | else httpx.Client(timeout=_defaulted_timeout),
94 | timeout=_defaulted_timeout,
95 | )
96 | self.instance = InstanceClient(client_wrapper=self._client_wrapper)
97 | self.browser = BrowserClient(client_wrapper=self._client_wrapper)
98 | self.code = CodeClient(client_wrapper=self._client_wrapper)
99 | self.notebook = NotebookClient(client_wrapper=self._client_wrapper)
100 | self.env = EnvClient(client_wrapper=self._client_wrapper)
101 |
102 | def start(
103 | self,
104 | *,
105 | instance_type: typing.Optional[DeploymentConfigInstanceType] = OMIT,
106 | timeout_hours: typing.Optional[float] = OMIT,
107 | blocked_domains: typing.Optional[typing.Sequence[str]] = OMIT,
108 | resolution: typing.Optional[typing.Sequence[int]] = OMIT,
109 | request_options: typing.Optional[RequestOptions] = None,
110 | ) -> GetInstanceResponse:
111 | """
112 | Parameters
113 | ----------
114 | instance_type : typing.Optional[DeploymentConfigInstanceType]
115 |
116 | timeout_hours : typing.Optional[float]
117 |
118 | blocked_domains : typing.Optional[typing.Sequence[str]]
119 |
120 | resolution : typing.Optional[typing.Sequence[int]]
121 |
122 | request_options : typing.Optional[RequestOptions]
123 | Request-specific configuration.
124 |
125 | Returns
126 | -------
127 | GetInstanceResponse
128 | Successful Response
129 |
130 | Examples
131 | --------
132 | from scrapybara import Scrapybara
133 |
134 | client = Scrapybara(
135 | api_key="YOUR_API_KEY",
136 | )
137 | client.start()
138 | """
139 | _response = self._client_wrapper.httpx_client.request(
140 | "v1/start",
141 | method="POST",
142 | json={
143 | "instance_type": instance_type,
144 | "timeout_hours": timeout_hours,
145 | "blocked_domains": blocked_domains,
146 | "resolution": resolution,
147 | },
148 | headers={
149 | "content-type": "application/json",
150 | },
151 | request_options=request_options,
152 | omit=OMIT,
153 | )
154 | try:
155 | if 200 <= _response.status_code < 300:
156 | return typing.cast(
157 | GetInstanceResponse,
158 | parse_obj_as(
159 | type_=GetInstanceResponse, # type: ignore
160 | object_=_response.json(),
161 | ),
162 | )
163 | if _response.status_code == 422:
164 | raise UnprocessableEntityError(
165 | typing.cast(
166 | HttpValidationError,
167 | parse_obj_as(
168 | type_=HttpValidationError, # type: ignore
169 | object_=_response.json(),
170 | ),
171 | )
172 | )
173 | _response_json = _response.json()
174 | except JSONDecodeError:
175 | raise ApiError(status_code=_response.status_code, body=_response.text)
176 | raise ApiError(status_code=_response.status_code, body=_response_json)
177 |
178 | def get(self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> GetInstanceResponse:
179 | """
180 | Parameters
181 | ----------
182 | instance_id : str
183 |
184 | request_options : typing.Optional[RequestOptions]
185 | Request-specific configuration.
186 |
187 | Returns
188 | -------
189 | GetInstanceResponse
190 | Successful Response
191 |
192 | Examples
193 | --------
194 | from scrapybara import Scrapybara
195 |
196 | client = Scrapybara(
197 | api_key="YOUR_API_KEY",
198 | )
199 | client.get(
200 | instance_id="instance_id",
201 | )
202 | """
203 | _response = self._client_wrapper.httpx_client.request(
204 | f"v1/instance/{jsonable_encoder(instance_id)}",
205 | method="GET",
206 | request_options=request_options,
207 | )
208 | try:
209 | if 200 <= _response.status_code < 300:
210 | return typing.cast(
211 | GetInstanceResponse,
212 | parse_obj_as(
213 | type_=GetInstanceResponse, # type: ignore
214 | object_=_response.json(),
215 | ),
216 | )
217 | if _response.status_code == 422:
218 | raise UnprocessableEntityError(
219 | typing.cast(
220 | HttpValidationError,
221 | parse_obj_as(
222 | type_=HttpValidationError, # type: ignore
223 | object_=_response.json(),
224 | ),
225 | )
226 | )
227 | _response_json = _response.json()
228 | except JSONDecodeError:
229 | raise ApiError(status_code=_response.status_code, body=_response.text)
230 | raise ApiError(status_code=_response.status_code, body=_response_json)
231 |
232 | def get_instances(
233 | self, *, request_options: typing.Optional[RequestOptions] = None
234 | ) -> typing.List[GetInstanceResponse]:
235 | """
236 | Parameters
237 | ----------
238 | request_options : typing.Optional[RequestOptions]
239 | Request-specific configuration.
240 |
241 | Returns
242 | -------
243 | typing.List[GetInstanceResponse]
244 | Successful Response
245 |
246 | Examples
247 | --------
248 | from scrapybara import Scrapybara
249 |
250 | client = Scrapybara(
251 | api_key="YOUR_API_KEY",
252 | )
253 | client.get_instances()
254 | """
255 | _response = self._client_wrapper.httpx_client.request(
256 | "v1/instances",
257 | method="GET",
258 | request_options=request_options,
259 | )
260 | try:
261 | if 200 <= _response.status_code < 300:
262 | return typing.cast(
263 | typing.List[GetInstanceResponse],
264 | parse_obj_as(
265 | type_=typing.List[GetInstanceResponse], # type: ignore
266 | object_=_response.json(),
267 | ),
268 | )
269 | _response_json = _response.json()
270 | except JSONDecodeError:
271 | raise ApiError(status_code=_response.status_code, body=_response.text)
272 | raise ApiError(status_code=_response.status_code, body=_response_json)
273 |
274 | def get_auth_states(
275 | self, *, request_options: typing.Optional[RequestOptions] = None
276 | ) -> typing.List[AuthStateResponse]:
277 | """
278 | Parameters
279 | ----------
280 | request_options : typing.Optional[RequestOptions]
281 | Request-specific configuration.
282 |
283 | Returns
284 | -------
285 | typing.List[AuthStateResponse]
286 | Successful Response
287 |
288 | Examples
289 | --------
290 | from scrapybara import Scrapybara
291 |
292 | client = Scrapybara(
293 | api_key="YOUR_API_KEY",
294 | )
295 | client.get_auth_states()
296 | """
297 | _response = self._client_wrapper.httpx_client.request(
298 | "v1/auth_states",
299 | method="GET",
300 | request_options=request_options,
301 | )
302 | try:
303 | if 200 <= _response.status_code < 300:
304 | return typing.cast(
305 | typing.List[AuthStateResponse],
306 | parse_obj_as(
307 | type_=typing.List[AuthStateResponse], # type: ignore
308 | object_=_response.json(),
309 | ),
310 | )
311 | _response_json = _response.json()
312 | except JSONDecodeError:
313 | raise ApiError(status_code=_response.status_code, body=_response.text)
314 | raise ApiError(status_code=_response.status_code, body=_response_json)
315 |
316 |
317 | class AsyncBaseClient:
318 | """
319 | Use this class to access the different functions within the SDK. You can instantiate any number of clients with different configuration that will propagate to these functions.
320 |
321 | Parameters
322 | ----------
323 | base_url : typing.Optional[str]
324 | The base url to use for requests from the client.
325 |
326 | environment : ScrapybaraEnvironment
327 | The environment to use for requests from the client. from .environment import ScrapybaraEnvironment
328 |
329 |
330 |
331 | Defaults to ScrapybaraEnvironment.PRODUCTION
332 |
333 |
334 |
335 | api_key : typing.Optional[str]
336 | timeout : typing.Optional[float]
337 | The timeout to be used, in seconds, for requests. By default the timeout is 600 seconds, unless a custom httpx client is used, in which case this default is not enforced.
338 |
339 | follow_redirects : typing.Optional[bool]
340 | Whether the default httpx client follows redirects or not, this is irrelevant if a custom httpx client is passed in.
341 |
342 | httpx_client : typing.Optional[httpx.AsyncClient]
343 | The httpx client to use for making requests, a preconfigured client is used by default, however this is useful should you want to pass in any custom httpx configuration.
344 |
345 | Examples
346 | --------
347 | from scrapybara import AsyncScrapybara
348 |
349 | client = AsyncScrapybara(
350 | api_key="YOUR_API_KEY",
351 | )
352 | """
353 |
354 | def __init__(
355 | self,
356 | *,
357 | base_url: typing.Optional[str] = None,
358 | environment: ScrapybaraEnvironment = ScrapybaraEnvironment.PRODUCTION,
359 | api_key: typing.Optional[str] = os.getenv("SCRAPYBARA_API_KEY"),
360 | timeout: typing.Optional[float] = None,
361 | follow_redirects: typing.Optional[bool] = True,
362 | httpx_client: typing.Optional[httpx.AsyncClient] = None,
363 | ):
364 | _defaulted_timeout = timeout if timeout is not None else 600 if httpx_client is None else None
365 | if api_key is None:
366 | raise ApiError(
367 | body="The client must be instantiated be either passing in api_key or setting SCRAPYBARA_API_KEY"
368 | )
369 | self._client_wrapper = AsyncClientWrapper(
370 | base_url=_get_base_url(base_url=base_url, environment=environment),
371 | api_key=api_key,
372 | httpx_client=httpx_client
373 | if httpx_client is not None
374 | else httpx.AsyncClient(timeout=_defaulted_timeout, follow_redirects=follow_redirects)
375 | if follow_redirects is not None
376 | else httpx.AsyncClient(timeout=_defaulted_timeout),
377 | timeout=_defaulted_timeout,
378 | )
379 | self.instance = AsyncInstanceClient(client_wrapper=self._client_wrapper)
380 | self.browser = AsyncBrowserClient(client_wrapper=self._client_wrapper)
381 | self.code = AsyncCodeClient(client_wrapper=self._client_wrapper)
382 | self.notebook = AsyncNotebookClient(client_wrapper=self._client_wrapper)
383 | self.env = AsyncEnvClient(client_wrapper=self._client_wrapper)
384 |
385 | async def start(
386 | self,
387 | *,
388 | instance_type: typing.Optional[DeploymentConfigInstanceType] = OMIT,
389 | timeout_hours: typing.Optional[float] = OMIT,
390 | blocked_domains: typing.Optional[typing.Sequence[str]] = OMIT,
391 | resolution: typing.Optional[typing.Sequence[int]] = OMIT,
392 | request_options: typing.Optional[RequestOptions] = None,
393 | ) -> GetInstanceResponse:
394 | """
395 | Parameters
396 | ----------
397 | instance_type : typing.Optional[DeploymentConfigInstanceType]
398 |
399 | timeout_hours : typing.Optional[float]
400 |
401 | blocked_domains : typing.Optional[typing.Sequence[str]]
402 |
403 | resolution : typing.Optional[typing.Sequence[int]]
404 |
405 | request_options : typing.Optional[RequestOptions]
406 | Request-specific configuration.
407 |
408 | Returns
409 | -------
410 | GetInstanceResponse
411 | Successful Response
412 |
413 | Examples
414 | --------
415 | import asyncio
416 |
417 | from scrapybara import AsyncScrapybara
418 |
419 | client = AsyncScrapybara(
420 | api_key="YOUR_API_KEY",
421 | )
422 |
423 |
424 | async def main() -> None:
425 | await client.start()
426 |
427 |
428 | asyncio.run(main())
429 | """
430 | _response = await self._client_wrapper.httpx_client.request(
431 | "v1/start",
432 | method="POST",
433 | json={
434 | "instance_type": instance_type,
435 | "timeout_hours": timeout_hours,
436 | "blocked_domains": blocked_domains,
437 | "resolution": resolution,
438 | },
439 | headers={
440 | "content-type": "application/json",
441 | },
442 | request_options=request_options,
443 | omit=OMIT,
444 | )
445 | try:
446 | if 200 <= _response.status_code < 300:
447 | return typing.cast(
448 | GetInstanceResponse,
449 | parse_obj_as(
450 | type_=GetInstanceResponse, # type: ignore
451 | object_=_response.json(),
452 | ),
453 | )
454 | if _response.status_code == 422:
455 | raise UnprocessableEntityError(
456 | typing.cast(
457 | HttpValidationError,
458 | parse_obj_as(
459 | type_=HttpValidationError, # type: ignore
460 | object_=_response.json(),
461 | ),
462 | )
463 | )
464 | _response_json = _response.json()
465 | except JSONDecodeError:
466 | raise ApiError(status_code=_response.status_code, body=_response.text)
467 | raise ApiError(status_code=_response.status_code, body=_response_json)
468 |
469 | async def get(
470 | self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None
471 | ) -> GetInstanceResponse:
472 | """
473 | Parameters
474 | ----------
475 | instance_id : str
476 |
477 | request_options : typing.Optional[RequestOptions]
478 | Request-specific configuration.
479 |
480 | Returns
481 | -------
482 | GetInstanceResponse
483 | Successful Response
484 |
485 | Examples
486 | --------
487 | import asyncio
488 |
489 | from scrapybara import AsyncScrapybara
490 |
491 | client = AsyncScrapybara(
492 | api_key="YOUR_API_KEY",
493 | )
494 |
495 |
496 | async def main() -> None:
497 | await client.get(
498 | instance_id="instance_id",
499 | )
500 |
501 |
502 | asyncio.run(main())
503 | """
504 | _response = await self._client_wrapper.httpx_client.request(
505 | f"v1/instance/{jsonable_encoder(instance_id)}",
506 | method="GET",
507 | request_options=request_options,
508 | )
509 | try:
510 | if 200 <= _response.status_code < 300:
511 | return typing.cast(
512 | GetInstanceResponse,
513 | parse_obj_as(
514 | type_=GetInstanceResponse, # type: ignore
515 | object_=_response.json(),
516 | ),
517 | )
518 | if _response.status_code == 422:
519 | raise UnprocessableEntityError(
520 | typing.cast(
521 | HttpValidationError,
522 | parse_obj_as(
523 | type_=HttpValidationError, # type: ignore
524 | object_=_response.json(),
525 | ),
526 | )
527 | )
528 | _response_json = _response.json()
529 | except JSONDecodeError:
530 | raise ApiError(status_code=_response.status_code, body=_response.text)
531 | raise ApiError(status_code=_response.status_code, body=_response_json)
532 |
533 | async def get_instances(
534 | self, *, request_options: typing.Optional[RequestOptions] = None
535 | ) -> typing.List[GetInstanceResponse]:
536 | """
537 | Parameters
538 | ----------
539 | request_options : typing.Optional[RequestOptions]
540 | Request-specific configuration.
541 |
542 | Returns
543 | -------
544 | typing.List[GetInstanceResponse]
545 | Successful Response
546 |
547 | Examples
548 | --------
549 | import asyncio
550 |
551 | from scrapybara import AsyncScrapybara
552 |
553 | client = AsyncScrapybara(
554 | api_key="YOUR_API_KEY",
555 | )
556 |
557 |
558 | async def main() -> None:
559 | await client.get_instances()
560 |
561 |
562 | asyncio.run(main())
563 | """
564 | _response = await self._client_wrapper.httpx_client.request(
565 | "v1/instances",
566 | method="GET",
567 | request_options=request_options,
568 | )
569 | try:
570 | if 200 <= _response.status_code < 300:
571 | return typing.cast(
572 | typing.List[GetInstanceResponse],
573 | parse_obj_as(
574 | type_=typing.List[GetInstanceResponse], # type: ignore
575 | object_=_response.json(),
576 | ),
577 | )
578 | _response_json = _response.json()
579 | except JSONDecodeError:
580 | raise ApiError(status_code=_response.status_code, body=_response.text)
581 | raise ApiError(status_code=_response.status_code, body=_response_json)
582 |
583 | async def get_auth_states(
584 | self, *, request_options: typing.Optional[RequestOptions] = None
585 | ) -> typing.List[AuthStateResponse]:
586 | """
587 | Parameters
588 | ----------
589 | request_options : typing.Optional[RequestOptions]
590 | Request-specific configuration.
591 |
592 | Returns
593 | -------
594 | typing.List[AuthStateResponse]
595 | Successful Response
596 |
597 | Examples
598 | --------
599 | import asyncio
600 |
601 | from scrapybara import AsyncScrapybara
602 |
603 | client = AsyncScrapybara(
604 | api_key="YOUR_API_KEY",
605 | )
606 |
607 |
608 | async def main() -> None:
609 | await client.get_auth_states()
610 |
611 |
612 | asyncio.run(main())
613 | """
614 | _response = await self._client_wrapper.httpx_client.request(
615 | "v1/auth_states",
616 | method="GET",
617 | request_options=request_options,
618 | )
619 | try:
620 | if 200 <= _response.status_code < 300:
621 | return typing.cast(
622 | typing.List[AuthStateResponse],
623 | parse_obj_as(
624 | type_=typing.List[AuthStateResponse], # type: ignore
625 | object_=_response.json(),
626 | ),
627 | )
628 | _response_json = _response.json()
629 | except JSONDecodeError:
630 | raise ApiError(status_code=_response.status_code, body=_response.text)
631 | raise ApiError(status_code=_response.status_code, body=_response_json)
632 |
633 |
634 | def _get_base_url(*, base_url: typing.Optional[str] = None, environment: ScrapybaraEnvironment) -> str:
635 | if base_url is not None:
636 | return base_url
637 | elif environment is not None:
638 | return environment.value
639 | else:
640 | raise Exception("Please pass in either base_url or environment to construct the client")
641 |
--------------------------------------------------------------------------------
/src/scrapybara/browser/__init__.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 |
--------------------------------------------------------------------------------
/src/scrapybara/code/__init__.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 |
--------------------------------------------------------------------------------
/src/scrapybara/code/client.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | import typing
4 | from ..core.client_wrapper import SyncClientWrapper
5 | from ..core.request_options import RequestOptions
6 | from ..core.jsonable_encoder import jsonable_encoder
7 | from ..core.pydantic_utilities import parse_obj_as
8 | from ..errors.unprocessable_entity_error import UnprocessableEntityError
9 | from ..types.http_validation_error import HttpValidationError
10 | from json.decoder import JSONDecodeError
11 | from ..core.api_error import ApiError
12 | from ..core.client_wrapper import AsyncClientWrapper
13 |
14 | # this is used as the default value for optional parameters
15 | OMIT = typing.cast(typing.Any, ...)
16 |
17 |
18 | class CodeClient:
19 | def __init__(self, *, client_wrapper: SyncClientWrapper):
20 | self._client_wrapper = client_wrapper
21 |
22 | def execute(
23 | self,
24 | instance_id: str,
25 | *,
26 | code: str,
27 | kernel_name: typing.Optional[str] = OMIT,
28 | timeout: typing.Optional[int] = OMIT,
29 | request_options: typing.Optional[RequestOptions] = None,
30 | ) -> typing.Optional[typing.Any]:
31 | """
32 | Parameters
33 | ----------
34 | instance_id : str
35 |
36 | code : str
37 |
38 | kernel_name : typing.Optional[str]
39 |
40 | timeout : typing.Optional[int]
41 |
42 | request_options : typing.Optional[RequestOptions]
43 | Request-specific configuration.
44 |
45 | Returns
46 | -------
47 | typing.Optional[typing.Any]
48 | Successful Response
49 |
50 | Examples
51 | --------
52 | from scrapybara import Scrapybara
53 |
54 | client = Scrapybara(
55 | api_key="YOUR_API_KEY",
56 | )
57 | client.code.execute(
58 | instance_id="instance_id",
59 | code="code",
60 | )
61 | """
62 | _response = self._client_wrapper.httpx_client.request(
63 | f"v1/instance/{jsonable_encoder(instance_id)}/code/execute",
64 | method="POST",
65 | json={
66 | "code": code,
67 | "kernel_name": kernel_name,
68 | "timeout": timeout,
69 | },
70 | headers={
71 | "content-type": "application/json",
72 | },
73 | request_options=request_options,
74 | omit=OMIT,
75 | )
76 | try:
77 | if 200 <= _response.status_code < 300:
78 | return typing.cast(
79 | typing.Optional[typing.Any],
80 | parse_obj_as(
81 | type_=typing.Optional[typing.Any], # type: ignore
82 | object_=_response.json(),
83 | ),
84 | )
85 | if _response.status_code == 422:
86 | raise UnprocessableEntityError(
87 | typing.cast(
88 | HttpValidationError,
89 | parse_obj_as(
90 | type_=HttpValidationError, # type: ignore
91 | object_=_response.json(),
92 | ),
93 | )
94 | )
95 | _response_json = _response.json()
96 | except JSONDecodeError:
97 | raise ApiError(status_code=_response.status_code, body=_response.text)
98 | raise ApiError(status_code=_response.status_code, body=_response_json)
99 |
100 |
101 | class AsyncCodeClient:
102 | def __init__(self, *, client_wrapper: AsyncClientWrapper):
103 | self._client_wrapper = client_wrapper
104 |
105 | async def execute(
106 | self,
107 | instance_id: str,
108 | *,
109 | code: str,
110 | kernel_name: typing.Optional[str] = OMIT,
111 | timeout: typing.Optional[int] = OMIT,
112 | request_options: typing.Optional[RequestOptions] = None,
113 | ) -> typing.Optional[typing.Any]:
114 | """
115 | Parameters
116 | ----------
117 | instance_id : str
118 |
119 | code : str
120 |
121 | kernel_name : typing.Optional[str]
122 |
123 | timeout : typing.Optional[int]
124 |
125 | request_options : typing.Optional[RequestOptions]
126 | Request-specific configuration.
127 |
128 | Returns
129 | -------
130 | typing.Optional[typing.Any]
131 | Successful Response
132 |
133 | Examples
134 | --------
135 | import asyncio
136 |
137 | from scrapybara import AsyncScrapybara
138 |
139 | client = AsyncScrapybara(
140 | api_key="YOUR_API_KEY",
141 | )
142 |
143 |
144 | async def main() -> None:
145 | await client.code.execute(
146 | instance_id="instance_id",
147 | code="code",
148 | )
149 |
150 |
151 | asyncio.run(main())
152 | """
153 | _response = await self._client_wrapper.httpx_client.request(
154 | f"v1/instance/{jsonable_encoder(instance_id)}/code/execute",
155 | method="POST",
156 | json={
157 | "code": code,
158 | "kernel_name": kernel_name,
159 | "timeout": timeout,
160 | },
161 | headers={
162 | "content-type": "application/json",
163 | },
164 | request_options=request_options,
165 | omit=OMIT,
166 | )
167 | try:
168 | if 200 <= _response.status_code < 300:
169 | return typing.cast(
170 | typing.Optional[typing.Any],
171 | parse_obj_as(
172 | type_=typing.Optional[typing.Any], # type: ignore
173 | object_=_response.json(),
174 | ),
175 | )
176 | if _response.status_code == 422:
177 | raise UnprocessableEntityError(
178 | typing.cast(
179 | HttpValidationError,
180 | parse_obj_as(
181 | type_=HttpValidationError, # type: ignore
182 | object_=_response.json(),
183 | ),
184 | )
185 | )
186 | _response_json = _response.json()
187 | except JSONDecodeError:
188 | raise ApiError(status_code=_response.status_code, body=_response.text)
189 | raise ApiError(status_code=_response.status_code, body=_response_json)
190 |
--------------------------------------------------------------------------------
/src/scrapybara/core/__init__.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from .api_error import ApiError
4 | from .client_wrapper import AsyncClientWrapper, BaseClientWrapper, SyncClientWrapper
5 | from .datetime_utils import serialize_datetime
6 | from .file import File, convert_file_dict_to_httpx_tuples, with_content_type
7 | from .http_client import AsyncHttpClient, HttpClient
8 | from .jsonable_encoder import jsonable_encoder
9 | from .pydantic_utilities import (
10 | IS_PYDANTIC_V2,
11 | UniversalBaseModel,
12 | UniversalRootModel,
13 | parse_obj_as,
14 | universal_field_validator,
15 | universal_root_validator,
16 | update_forward_refs,
17 | )
18 | from .query_encoder import encode_query
19 | from .remove_none_from_dict import remove_none_from_dict
20 | from .request_options import RequestOptions
21 | from .serialization import FieldMetadata, convert_and_respect_annotation_metadata
22 |
23 | __all__ = [
24 | "ApiError",
25 | "AsyncClientWrapper",
26 | "AsyncHttpClient",
27 | "BaseClientWrapper",
28 | "FieldMetadata",
29 | "File",
30 | "HttpClient",
31 | "IS_PYDANTIC_V2",
32 | "RequestOptions",
33 | "SyncClientWrapper",
34 | "UniversalBaseModel",
35 | "UniversalRootModel",
36 | "convert_and_respect_annotation_metadata",
37 | "convert_file_dict_to_httpx_tuples",
38 | "encode_query",
39 | "jsonable_encoder",
40 | "parse_obj_as",
41 | "remove_none_from_dict",
42 | "serialize_datetime",
43 | "universal_field_validator",
44 | "universal_root_validator",
45 | "update_forward_refs",
46 | "with_content_type",
47 | ]
48 |
--------------------------------------------------------------------------------
/src/scrapybara/core/api_error.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | import typing
4 |
5 |
6 | class ApiError(Exception):
7 | status_code: typing.Optional[int]
8 | body: typing.Any
9 |
10 | def __init__(self, *, status_code: typing.Optional[int] = None, body: typing.Any = None):
11 | self.status_code = status_code
12 | self.body = body
13 |
14 | def __str__(self) -> str:
15 | return f"status_code: {self.status_code}, body: {self.body}"
16 |
--------------------------------------------------------------------------------
/src/scrapybara/core/client_wrapper.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | import typing
4 | import httpx
5 | from .http_client import HttpClient
6 | from .http_client import AsyncHttpClient
7 |
8 |
9 | class BaseClientWrapper:
10 | def __init__(self, *, api_key: str, base_url: str, timeout: typing.Optional[float] = None):
11 | self.api_key = api_key
12 | self._base_url = base_url
13 | self._timeout = timeout
14 |
15 | def get_headers(self) -> typing.Dict[str, str]:
16 | headers: typing.Dict[str, str] = {
17 | "X-Fern-Language": "Python",
18 | "X-Fern-SDK-Name": "scrapybara",
19 | "X-Fern-SDK-Version": "2.5.3",
20 | }
21 | headers["x-api-key"] = self.api_key
22 | return headers
23 |
24 | def get_base_url(self) -> str:
25 | return self._base_url
26 |
27 | def get_timeout(self) -> typing.Optional[float]:
28 | return self._timeout
29 |
30 |
31 | class SyncClientWrapper(BaseClientWrapper):
32 | def __init__(
33 | self, *, api_key: str, base_url: str, timeout: typing.Optional[float] = None, httpx_client: httpx.Client
34 | ):
35 | super().__init__(api_key=api_key, base_url=base_url, timeout=timeout)
36 | self.httpx_client = HttpClient(
37 | httpx_client=httpx_client,
38 | base_headers=self.get_headers,
39 | base_timeout=self.get_timeout,
40 | base_url=self.get_base_url,
41 | )
42 |
43 |
44 | class AsyncClientWrapper(BaseClientWrapper):
45 | def __init__(
46 | self, *, api_key: str, base_url: str, timeout: typing.Optional[float] = None, httpx_client: httpx.AsyncClient
47 | ):
48 | super().__init__(api_key=api_key, base_url=base_url, timeout=timeout)
49 | self.httpx_client = AsyncHttpClient(
50 | httpx_client=httpx_client,
51 | base_headers=self.get_headers,
52 | base_timeout=self.get_timeout,
53 | base_url=self.get_base_url,
54 | )
55 |
--------------------------------------------------------------------------------
/src/scrapybara/core/datetime_utils.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | import datetime as dt
4 |
5 |
6 | def serialize_datetime(v: dt.datetime) -> str:
7 | """
8 | Serialize a datetime including timezone info.
9 |
10 | Uses the timezone info provided if present, otherwise uses the current runtime's timezone info.
11 |
12 | UTC datetimes end in "Z" while all other timezones are represented as offset from UTC, e.g. +05:00.
13 | """
14 |
15 | def _serialize_zoned_datetime(v: dt.datetime) -> str:
16 | if v.tzinfo is not None and v.tzinfo.tzname(None) == dt.timezone.utc.tzname(None):
17 | # UTC is a special case where we use "Z" at the end instead of "+00:00"
18 | return v.isoformat().replace("+00:00", "Z")
19 | else:
20 | # Delegate to the typical +/- offset format
21 | return v.isoformat()
22 |
23 | if v.tzinfo is not None:
24 | return _serialize_zoned_datetime(v)
25 | else:
26 | local_tz = dt.datetime.now().astimezone().tzinfo
27 | localized_dt = v.replace(tzinfo=local_tz)
28 | return _serialize_zoned_datetime(localized_dt)
29 |
--------------------------------------------------------------------------------
/src/scrapybara/core/file.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from typing import IO, Dict, List, Mapping, Optional, Tuple, Union, cast
4 |
5 | # File typing inspired by the flexibility of types within the httpx library
6 | # https://github.com/encode/httpx/blob/master/httpx/_types.py
7 | FileContent = Union[IO[bytes], bytes, str]
8 | File = Union[
9 | # file (or bytes)
10 | FileContent,
11 | # (filename, file (or bytes))
12 | Tuple[Optional[str], FileContent],
13 | # (filename, file (or bytes), content_type)
14 | Tuple[Optional[str], FileContent, Optional[str]],
15 | # (filename, file (or bytes), content_type, headers)
16 | Tuple[
17 | Optional[str],
18 | FileContent,
19 | Optional[str],
20 | Mapping[str, str],
21 | ],
22 | ]
23 |
24 |
25 | def convert_file_dict_to_httpx_tuples(
26 | d: Dict[str, Union[File, List[File]]],
27 | ) -> List[Tuple[str, File]]:
28 | """
29 | The format we use is a list of tuples, where the first element is the
30 | name of the file and the second is the file object. Typically HTTPX wants
31 | a dict, but to be able to send lists of files, you have to use the list
32 | approach (which also works for non-lists)
33 | https://github.com/encode/httpx/pull/1032
34 | """
35 |
36 | httpx_tuples = []
37 | for key, file_like in d.items():
38 | if isinstance(file_like, list):
39 | for file_like_item in file_like:
40 | httpx_tuples.append((key, file_like_item))
41 | else:
42 | httpx_tuples.append((key, file_like))
43 | return httpx_tuples
44 |
45 |
46 | def with_content_type(*, file: File, default_content_type: str) -> File:
47 | """
48 | This function resolves to the file's content type, if provided, and defaults
49 | to the default_content_type value if not.
50 | """
51 | if isinstance(file, tuple):
52 | if len(file) == 2:
53 | filename, content = cast(Tuple[Optional[str], FileContent], file) # type: ignore
54 | return (filename, content, default_content_type)
55 | elif len(file) == 3:
56 | filename, content, file_content_type = cast(Tuple[Optional[str], FileContent, Optional[str]], file) # type: ignore
57 | out_content_type = file_content_type or default_content_type
58 | return (filename, content, out_content_type)
59 | elif len(file) == 4:
60 | filename, content, file_content_type, headers = cast( # type: ignore
61 | Tuple[Optional[str], FileContent, Optional[str], Mapping[str, str]], file
62 | )
63 | out_content_type = file_content_type or default_content_type
64 | return (filename, content, out_content_type, headers)
65 | else:
66 | raise ValueError(f"Unexpected tuple length: {len(file)}")
67 | return (None, file, default_content_type)
68 |
--------------------------------------------------------------------------------
/src/scrapybara/core/http_client.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | import asyncio
4 | import email.utils
5 | import json
6 | import re
7 | import time
8 | import typing
9 | import urllib.parse
10 | from contextlib import asynccontextmanager, contextmanager
11 | from random import random
12 |
13 | import httpx
14 |
15 | from .file import File, convert_file_dict_to_httpx_tuples
16 | from .jsonable_encoder import jsonable_encoder
17 | from .query_encoder import encode_query
18 | from .remove_none_from_dict import remove_none_from_dict
19 | from .request_options import RequestOptions
20 |
21 | INITIAL_RETRY_DELAY_SECONDS = 0.5
22 | MAX_RETRY_DELAY_SECONDS = 10
23 | MAX_RETRY_DELAY_SECONDS_FROM_HEADER = 30
24 |
25 |
26 | def _parse_retry_after(response_headers: httpx.Headers) -> typing.Optional[float]:
27 | """
28 | This function parses the `Retry-After` header in a HTTP response and returns the number of seconds to wait.
29 |
30 | Inspired by the urllib3 retry implementation.
31 | """
32 | retry_after_ms = response_headers.get("retry-after-ms")
33 | if retry_after_ms is not None:
34 | try:
35 | return int(retry_after_ms) / 1000 if retry_after_ms > 0 else 0
36 | except Exception:
37 | pass
38 |
39 | retry_after = response_headers.get("retry-after")
40 | if retry_after is None:
41 | return None
42 |
43 | # Attempt to parse the header as an int.
44 | if re.match(r"^\s*[0-9]+\s*$", retry_after):
45 | seconds = float(retry_after)
46 | # Fallback to parsing it as a date.
47 | else:
48 | retry_date_tuple = email.utils.parsedate_tz(retry_after)
49 | if retry_date_tuple is None:
50 | return None
51 | if retry_date_tuple[9] is None: # Python 2
52 | # Assume UTC if no timezone was specified
53 | # On Python2.7, parsedate_tz returns None for a timezone offset
54 | # instead of 0 if no timezone is given, where mktime_tz treats
55 | # a None timezone offset as local time.
56 | retry_date_tuple = retry_date_tuple[:9] + (0,) + retry_date_tuple[10:]
57 |
58 | retry_date = email.utils.mktime_tz(retry_date_tuple)
59 | seconds = retry_date - time.time()
60 |
61 | if seconds < 0:
62 | seconds = 0
63 |
64 | return seconds
65 |
66 |
67 | def _retry_timeout(response: httpx.Response, retries: int) -> float:
68 | """
69 | Determine the amount of time to wait before retrying a request.
70 | This function begins by trying to parse a retry-after header from the response, and then proceeds to use exponential backoff
71 | with a jitter to determine the number of seconds to wait.
72 | """
73 |
74 | # If the API asks us to wait a certain amount of time (and it's a reasonable amount), just do what it says.
75 | retry_after = _parse_retry_after(response.headers)
76 | if retry_after is not None and retry_after <= MAX_RETRY_DELAY_SECONDS_FROM_HEADER:
77 | return retry_after
78 |
79 | # Apply exponential backoff, capped at MAX_RETRY_DELAY_SECONDS.
80 | retry_delay = min(INITIAL_RETRY_DELAY_SECONDS * pow(2.0, retries), MAX_RETRY_DELAY_SECONDS)
81 |
82 | # Add a randomness / jitter to the retry delay to avoid overwhelming the server with retries.
83 | timeout = retry_delay * (1 - 0.25 * random())
84 | return timeout if timeout >= 0 else 0
85 |
86 |
87 | def _should_retry(response: httpx.Response) -> bool:
88 | retryable_400s = [429, 408, 409]
89 | return response.status_code >= 500 or response.status_code in retryable_400s
90 |
91 |
92 | def remove_omit_from_dict(
93 | original: typing.Dict[str, typing.Optional[typing.Any]],
94 | omit: typing.Optional[typing.Any],
95 | ) -> typing.Dict[str, typing.Any]:
96 | if omit is None:
97 | return original
98 | new: typing.Dict[str, typing.Any] = {}
99 | for key, value in original.items():
100 | if value is not omit:
101 | new[key] = value
102 | return new
103 |
104 |
105 | def maybe_filter_request_body(
106 | data: typing.Optional[typing.Any],
107 | request_options: typing.Optional[RequestOptions],
108 | omit: typing.Optional[typing.Any],
109 | ) -> typing.Optional[typing.Any]:
110 | if data is None:
111 | return (
112 | jsonable_encoder(request_options.get("additional_body_parameters", {})) or {}
113 | if request_options is not None
114 | else None
115 | )
116 | elif not isinstance(data, typing.Mapping):
117 | data_content = jsonable_encoder(data)
118 | else:
119 | data_content = {
120 | **(jsonable_encoder(remove_omit_from_dict(data, omit))), # type: ignore
121 | **(
122 | jsonable_encoder(request_options.get("additional_body_parameters", {})) or {}
123 | if request_options is not None
124 | else {}
125 | ),
126 | }
127 | return data_content
128 |
129 |
130 | # Abstracted out for testing purposes
131 | def get_request_body(
132 | *,
133 | json: typing.Optional[typing.Any],
134 | data: typing.Optional[typing.Any],
135 | request_options: typing.Optional[RequestOptions],
136 | omit: typing.Optional[typing.Any],
137 | ) -> typing.Tuple[typing.Optional[typing.Any], typing.Optional[typing.Any]]:
138 | json_body = None
139 | data_body = None
140 | if data is not None:
141 | data_body = maybe_filter_request_body(data, request_options, omit)
142 | else:
143 | # If both data and json are None, we send json data in the event extra properties are specified
144 | json_body = maybe_filter_request_body(json, request_options, omit)
145 |
146 | # If you have an empty JSON body, you should just send None
147 | return (json_body if json_body != {} else None), data_body if data_body != {} else None
148 |
149 |
150 | class HttpClient:
151 | def __init__(
152 | self,
153 | *,
154 | httpx_client: httpx.Client,
155 | base_timeout: typing.Callable[[], typing.Optional[float]],
156 | base_headers: typing.Callable[[], typing.Dict[str, str]],
157 | base_url: typing.Optional[typing.Callable[[], str]] = None,
158 | ):
159 | self.base_url = base_url
160 | self.base_timeout = base_timeout
161 | self.base_headers = base_headers
162 | self.httpx_client = httpx_client
163 |
164 | def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
165 | base_url = maybe_base_url
166 | if self.base_url is not None and base_url is None:
167 | base_url = self.base_url()
168 |
169 | if base_url is None:
170 | raise ValueError("A base_url is required to make this request, please provide one and try again.")
171 | return base_url
172 |
173 | def request(
174 | self,
175 | path: typing.Optional[str] = None,
176 | *,
177 | method: str,
178 | base_url: typing.Optional[str] = None,
179 | params: typing.Optional[typing.Dict[str, typing.Any]] = None,
180 | json: typing.Optional[typing.Any] = None,
181 | data: typing.Optional[typing.Any] = None,
182 | content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
183 | files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None,
184 | headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
185 | request_options: typing.Optional[RequestOptions] = None,
186 | retries: int = 2,
187 | omit: typing.Optional[typing.Any] = None,
188 | ) -> httpx.Response:
189 | base_url = self.get_base_url(base_url)
190 | timeout = (
191 | request_options.get("timeout_in_seconds")
192 | if request_options is not None and request_options.get("timeout_in_seconds") is not None
193 | else self.base_timeout()
194 | )
195 |
196 | json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
197 |
198 | response = self.httpx_client.request(
199 | method=method,
200 | url=urllib.parse.urljoin(f"{base_url}/", path),
201 | headers=jsonable_encoder(
202 | remove_none_from_dict(
203 | {
204 | **self.base_headers(),
205 | **(headers if headers is not None else {}),
206 | **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
207 | }
208 | )
209 | ),
210 | params=encode_query(
211 | jsonable_encoder(
212 | remove_none_from_dict(
213 | remove_omit_from_dict(
214 | {
215 | **(params if params is not None else {}),
216 | **(
217 | request_options.get("additional_query_parameters", {}) or {}
218 | if request_options is not None
219 | else {}
220 | ),
221 | },
222 | omit,
223 | )
224 | )
225 | )
226 | ),
227 | json=json_body,
228 | data=data_body,
229 | content=content,
230 | files=(
231 | convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
232 | if (files is not None and files is not omit)
233 | else None
234 | ),
235 | timeout=timeout,
236 | )
237 |
238 | max_retries: int = request_options.get("max_retries", 0) if request_options is not None else 0
239 | if _should_retry(response=response):
240 | if max_retries > retries:
241 | time.sleep(_retry_timeout(response=response, retries=retries))
242 | return self.request(
243 | path=path,
244 | method=method,
245 | base_url=base_url,
246 | params=params,
247 | json=json,
248 | content=content,
249 | files=files,
250 | headers=headers,
251 | request_options=request_options,
252 | retries=retries + 1,
253 | omit=omit,
254 | )
255 |
256 | return response
257 |
258 | @contextmanager
259 | def stream(
260 | self,
261 | path: typing.Optional[str] = None,
262 | *,
263 | method: str,
264 | base_url: typing.Optional[str] = None,
265 | params: typing.Optional[typing.Dict[str, typing.Any]] = None,
266 | json: typing.Optional[typing.Any] = None,
267 | data: typing.Optional[typing.Any] = None,
268 | content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
269 | files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None,
270 | headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
271 | request_options: typing.Optional[RequestOptions] = None,
272 | retries: int = 2,
273 | omit: typing.Optional[typing.Any] = None,
274 | ) -> typing.Iterator[httpx.Response]:
275 | base_url = self.get_base_url(base_url)
276 | timeout = (
277 | request_options.get("timeout_in_seconds")
278 | if request_options is not None and request_options.get("timeout_in_seconds") is not None
279 | else self.base_timeout()
280 | )
281 |
282 | json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
283 |
284 | with self.httpx_client.stream(
285 | method=method,
286 | url=urllib.parse.urljoin(f"{base_url}/", path),
287 | headers=jsonable_encoder(
288 | remove_none_from_dict(
289 | {
290 | **self.base_headers(),
291 | **(headers if headers is not None else {}),
292 | **(request_options.get("additional_headers", {}) if request_options is not None else {}),
293 | }
294 | )
295 | ),
296 | params=encode_query(
297 | jsonable_encoder(
298 | remove_none_from_dict(
299 | remove_omit_from_dict(
300 | {
301 | **(params if params is not None else {}),
302 | **(
303 | request_options.get("additional_query_parameters", {})
304 | if request_options is not None
305 | else {}
306 | ),
307 | },
308 | omit,
309 | )
310 | )
311 | )
312 | ),
313 | json=json_body,
314 | data=data_body,
315 | content=content,
316 | files=(
317 | convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
318 | if (files is not None and files is not omit)
319 | else None
320 | ),
321 | timeout=timeout,
322 | ) as stream:
323 | yield stream
324 |
325 |
326 | class AsyncHttpClient:
327 | def __init__(
328 | self,
329 | *,
330 | httpx_client: httpx.AsyncClient,
331 | base_timeout: typing.Callable[[], typing.Optional[float]],
332 | base_headers: typing.Callable[[], typing.Dict[str, str]],
333 | base_url: typing.Optional[typing.Callable[[], str]] = None,
334 | ):
335 | self.base_url = base_url
336 | self.base_timeout = base_timeout
337 | self.base_headers = base_headers
338 | self.httpx_client = httpx_client
339 |
340 | def get_base_url(self, maybe_base_url: typing.Optional[str]) -> str:
341 | base_url = maybe_base_url
342 | if self.base_url is not None and base_url is None:
343 | base_url = self.base_url()
344 |
345 | if base_url is None:
346 | raise ValueError("A base_url is required to make this request, please provide one and try again.")
347 | return base_url
348 |
349 | async def request(
350 | self,
351 | path: typing.Optional[str] = None,
352 | *,
353 | method: str,
354 | base_url: typing.Optional[str] = None,
355 | params: typing.Optional[typing.Dict[str, typing.Any]] = None,
356 | json: typing.Optional[typing.Any] = None,
357 | data: typing.Optional[typing.Any] = None,
358 | content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
359 | files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None,
360 | headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
361 | request_options: typing.Optional[RequestOptions] = None,
362 | retries: int = 2,
363 | omit: typing.Optional[typing.Any] = None,
364 | ) -> httpx.Response:
365 | base_url = self.get_base_url(base_url)
366 | timeout = (
367 | request_options.get("timeout_in_seconds")
368 | if request_options is not None and request_options.get("timeout_in_seconds") is not None
369 | else self.base_timeout()
370 | )
371 |
372 | json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
373 |
374 | # Add the input to each of these and do None-safety checks
375 | response = await self.httpx_client.request(
376 | method=method,
377 | url=urllib.parse.urljoin(f"{base_url}/", path),
378 | headers=jsonable_encoder(
379 | remove_none_from_dict(
380 | {
381 | **self.base_headers(),
382 | **(headers if headers is not None else {}),
383 | **(request_options.get("additional_headers", {}) or {} if request_options is not None else {}),
384 | }
385 | )
386 | ),
387 | params=encode_query(
388 | jsonable_encoder(
389 | remove_none_from_dict(
390 | remove_omit_from_dict(
391 | {
392 | **(params if params is not None else {}),
393 | **(
394 | request_options.get("additional_query_parameters", {}) or {}
395 | if request_options is not None
396 | else {}
397 | ),
398 | },
399 | omit,
400 | )
401 | )
402 | )
403 | ),
404 | json=json_body,
405 | data=data_body,
406 | content=content,
407 | files=(
408 | convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
409 | if files is not None
410 | else None
411 | ),
412 | timeout=timeout,
413 | )
414 |
415 | max_retries: int = request_options.get("max_retries", 0) if request_options is not None else 0
416 | if _should_retry(response=response):
417 | if max_retries > retries:
418 | await asyncio.sleep(_retry_timeout(response=response, retries=retries))
419 | return await self.request(
420 | path=path,
421 | method=method,
422 | base_url=base_url,
423 | params=params,
424 | json=json,
425 | content=content,
426 | files=files,
427 | headers=headers,
428 | request_options=request_options,
429 | retries=retries + 1,
430 | omit=omit,
431 | )
432 | return response
433 |
434 | @asynccontextmanager
435 | async def stream(
436 | self,
437 | path: typing.Optional[str] = None,
438 | *,
439 | method: str,
440 | base_url: typing.Optional[str] = None,
441 | params: typing.Optional[typing.Dict[str, typing.Any]] = None,
442 | json: typing.Optional[typing.Any] = None,
443 | data: typing.Optional[typing.Any] = None,
444 | content: typing.Optional[typing.Union[bytes, typing.Iterator[bytes], typing.AsyncIterator[bytes]]] = None,
445 | files: typing.Optional[typing.Dict[str, typing.Optional[typing.Union[File, typing.List[File]]]]] = None,
446 | headers: typing.Optional[typing.Dict[str, typing.Any]] = None,
447 | request_options: typing.Optional[RequestOptions] = None,
448 | retries: int = 2,
449 | omit: typing.Optional[typing.Any] = None,
450 | ) -> typing.AsyncIterator[httpx.Response]:
451 | base_url = self.get_base_url(base_url)
452 | timeout = (
453 | request_options.get("timeout_in_seconds")
454 | if request_options is not None and request_options.get("timeout_in_seconds") is not None
455 | else self.base_timeout()
456 | )
457 |
458 | json_body, data_body = get_request_body(json=json, data=data, request_options=request_options, omit=omit)
459 |
460 | async with self.httpx_client.stream(
461 | method=method,
462 | url=urllib.parse.urljoin(f"{base_url}/", path),
463 | headers=jsonable_encoder(
464 | remove_none_from_dict(
465 | {
466 | **self.base_headers(),
467 | **(headers if headers is not None else {}),
468 | **(request_options.get("additional_headers", {}) if request_options is not None else {}),
469 | }
470 | )
471 | ),
472 | params=encode_query(
473 | jsonable_encoder(
474 | remove_none_from_dict(
475 | remove_omit_from_dict(
476 | {
477 | **(params if params is not None else {}),
478 | **(
479 | request_options.get("additional_query_parameters", {})
480 | if request_options is not None
481 | else {}
482 | ),
483 | },
484 | omit=omit,
485 | )
486 | )
487 | )
488 | ),
489 | json=json_body,
490 | data=data_body,
491 | content=content,
492 | files=(
493 | convert_file_dict_to_httpx_tuples(remove_omit_from_dict(remove_none_from_dict(files), omit))
494 | if files is not None
495 | else None
496 | ),
497 | timeout=timeout,
498 | ) as stream:
499 | yield stream
500 |
--------------------------------------------------------------------------------
/src/scrapybara/core/jsonable_encoder.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | """
4 | jsonable_encoder converts a Python object to a JSON-friendly dict
5 | (e.g. datetimes to strings, Pydantic models to dicts).
6 |
7 | Taken from FastAPI, and made a bit simpler
8 | https://github.com/tiangolo/fastapi/blob/master/fastapi/encoders.py
9 | """
10 |
11 | import base64
12 | import dataclasses
13 | import datetime as dt
14 | from enum import Enum
15 | from pathlib import PurePath
16 | from types import GeneratorType
17 | from typing import Any, Callable, Dict, List, Optional, Set, Union
18 |
19 | import pydantic
20 |
21 | from .datetime_utils import serialize_datetime
22 | from .pydantic_utilities import (
23 | IS_PYDANTIC_V2,
24 | encode_by_type,
25 | to_jsonable_with_fallback,
26 | )
27 |
28 | SetIntStr = Set[Union[int, str]]
29 | DictIntStrAny = Dict[Union[int, str], Any]
30 |
31 |
32 | def jsonable_encoder(obj: Any, custom_encoder: Optional[Dict[Any, Callable[[Any], Any]]] = None) -> Any:
33 | custom_encoder = custom_encoder or {}
34 | if custom_encoder:
35 | if type(obj) in custom_encoder:
36 | return custom_encoder[type(obj)](obj)
37 | else:
38 | for encoder_type, encoder_instance in custom_encoder.items():
39 | if isinstance(obj, encoder_type):
40 | return encoder_instance(obj)
41 | if isinstance(obj, pydantic.BaseModel):
42 | if IS_PYDANTIC_V2:
43 | encoder = getattr(obj.model_config, "json_encoders", {}) # type: ignore # Pydantic v2
44 | else:
45 | encoder = getattr(obj.__config__, "json_encoders", {}) # type: ignore # Pydantic v1
46 | if custom_encoder:
47 | encoder.update(custom_encoder)
48 | obj_dict = obj.dict(by_alias=True)
49 | if "__root__" in obj_dict:
50 | obj_dict = obj_dict["__root__"]
51 | if "root" in obj_dict:
52 | obj_dict = obj_dict["root"]
53 | return jsonable_encoder(obj_dict, custom_encoder=encoder)
54 | if dataclasses.is_dataclass(obj):
55 | obj_dict = dataclasses.asdict(obj) # type: ignore
56 | return jsonable_encoder(obj_dict, custom_encoder=custom_encoder)
57 | if isinstance(obj, bytes):
58 | return base64.b64encode(obj).decode("utf-8")
59 | if isinstance(obj, Enum):
60 | return obj.value
61 | if isinstance(obj, PurePath):
62 | return str(obj)
63 | if isinstance(obj, (str, int, float, type(None))):
64 | return obj
65 | if isinstance(obj, dt.datetime):
66 | return serialize_datetime(obj)
67 | if isinstance(obj, dt.date):
68 | return str(obj)
69 | if isinstance(obj, dict):
70 | encoded_dict = {}
71 | allowed_keys = set(obj.keys())
72 | for key, value in obj.items():
73 | if key in allowed_keys:
74 | encoded_key = jsonable_encoder(key, custom_encoder=custom_encoder)
75 | encoded_value = jsonable_encoder(value, custom_encoder=custom_encoder)
76 | encoded_dict[encoded_key] = encoded_value
77 | return encoded_dict
78 | if isinstance(obj, (list, set, frozenset, GeneratorType, tuple)):
79 | encoded_list = []
80 | for item in obj:
81 | encoded_list.append(jsonable_encoder(item, custom_encoder=custom_encoder))
82 | return encoded_list
83 |
84 | def fallback_serializer(o: Any) -> Any:
85 | attempt_encode = encode_by_type(o)
86 | if attempt_encode is not None:
87 | return attempt_encode
88 |
89 | try:
90 | data = dict(o)
91 | except Exception as e:
92 | errors: List[Exception] = []
93 | errors.append(e)
94 | try:
95 | data = vars(o)
96 | except Exception as e:
97 | errors.append(e)
98 | raise ValueError(errors) from e
99 | return jsonable_encoder(data, custom_encoder=custom_encoder)
100 |
101 | return to_jsonable_with_fallback(obj, fallback_serializer)
102 |
--------------------------------------------------------------------------------
/src/scrapybara/core/pydantic_utilities.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | # nopycln: file
4 | import datetime as dt
5 | import typing
6 | from collections import defaultdict
7 |
8 | import typing_extensions
9 |
10 | import pydantic
11 |
12 | from .datetime_utils import serialize_datetime
13 | from .serialization import convert_and_respect_annotation_metadata
14 |
15 | IS_PYDANTIC_V2 = pydantic.VERSION.startswith("2.")
16 |
17 | if IS_PYDANTIC_V2:
18 | # isort will try to reformat the comments on these imports, which breaks mypy
19 | # isort: off
20 | from pydantic.v1.datetime_parse import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2
21 | parse_date as parse_date,
22 | )
23 | from pydantic.v1.datetime_parse import ( # pyright: ignore[reportMissingImports] # Pydantic v2
24 | parse_datetime as parse_datetime,
25 | )
26 | from pydantic.v1.json import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2
27 | ENCODERS_BY_TYPE as encoders_by_type,
28 | )
29 | from pydantic.v1.typing import ( # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2
30 | get_args as get_args,
31 | )
32 | from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2
33 | get_origin as get_origin,
34 | )
35 | from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2
36 | is_literal_type as is_literal_type,
37 | )
38 | from pydantic.v1.typing import ( # pyright: ignore[reportMissingImports] # Pydantic v2
39 | is_union as is_union,
40 | )
41 | from pydantic.v1.fields import ModelField as ModelField # type: ignore # pyright: ignore[reportMissingImports] # Pydantic v2
42 | else:
43 | from pydantic.datetime_parse import parse_date as parse_date # type: ignore # Pydantic v1
44 | from pydantic.datetime_parse import parse_datetime as parse_datetime # type: ignore # Pydantic v1
45 | from pydantic.fields import ModelField as ModelField # type: ignore # Pydantic v1
46 | from pydantic.json import ENCODERS_BY_TYPE as encoders_by_type # type: ignore # Pydantic v1
47 | from pydantic.typing import get_args as get_args # type: ignore # Pydantic v1
48 | from pydantic.typing import get_origin as get_origin # type: ignore # Pydantic v1
49 | from pydantic.typing import is_literal_type as is_literal_type # type: ignore # Pydantic v1
50 | from pydantic.typing import is_union as is_union # type: ignore # Pydantic v1
51 |
52 | # isort: on
53 |
54 |
55 | T = typing.TypeVar("T")
56 | Model = typing.TypeVar("Model", bound=pydantic.BaseModel)
57 |
58 |
59 | def parse_obj_as(type_: typing.Type[T], object_: typing.Any) -> T:
60 | dealiased_object = convert_and_respect_annotation_metadata(object_=object_, annotation=type_, direction="read")
61 | if IS_PYDANTIC_V2:
62 | adapter = pydantic.TypeAdapter(type_) # type: ignore # Pydantic v2
63 | return adapter.validate_python(dealiased_object)
64 | else:
65 | return pydantic.parse_obj_as(type_, dealiased_object)
66 |
67 |
68 | def to_jsonable_with_fallback(
69 | obj: typing.Any, fallback_serializer: typing.Callable[[typing.Any], typing.Any]
70 | ) -> typing.Any:
71 | if IS_PYDANTIC_V2:
72 | from pydantic_core import to_jsonable_python
73 |
74 | return to_jsonable_python(obj, fallback=fallback_serializer)
75 | else:
76 | return fallback_serializer(obj)
77 |
78 |
79 | class UniversalBaseModel(pydantic.BaseModel):
80 | if IS_PYDANTIC_V2:
81 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(
82 | # Allow fields beginning with `model_` to be used in the model
83 | protected_namespaces=(),
84 | ) # type: ignore # Pydantic v2
85 |
86 | @pydantic.model_serializer(mode="wrap", when_used="json") # type: ignore # Pydantic v2
87 | def serialize_model(self, handler: pydantic.SerializerFunctionWrapHandler) -> typing.Any: # type: ignore # Pydantic v2
88 | serialized = handler(self)
89 | data = {k: serialize_datetime(v) if isinstance(v, dt.datetime) else v for k, v in serialized.items()}
90 | return data
91 |
92 | else:
93 |
94 | class Config:
95 | smart_union = True
96 | json_encoders = {dt.datetime: serialize_datetime}
97 |
98 | @classmethod
99 | def model_construct(
100 | cls: typing.Type["Model"], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any
101 | ) -> "Model":
102 | dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read")
103 | return cls.construct(_fields_set, **dealiased_object)
104 |
105 | @classmethod
106 | def construct(
107 | cls: typing.Type["Model"], _fields_set: typing.Optional[typing.Set[str]] = None, **values: typing.Any
108 | ) -> "Model":
109 | dealiased_object = convert_and_respect_annotation_metadata(object_=values, annotation=cls, direction="read")
110 | if IS_PYDANTIC_V2:
111 | return super().model_construct(_fields_set, **dealiased_object) # type: ignore # Pydantic v2
112 | else:
113 | return super().construct(_fields_set, **dealiased_object)
114 |
115 | def json(self, **kwargs: typing.Any) -> str:
116 | kwargs_with_defaults: typing.Any = {
117 | "by_alias": True,
118 | "exclude_unset": True,
119 | **kwargs,
120 | }
121 | if IS_PYDANTIC_V2:
122 | return super().model_dump_json(**kwargs_with_defaults) # type: ignore # Pydantic v2
123 | else:
124 | return super().json(**kwargs_with_defaults)
125 |
126 | def dict(self, **kwargs: typing.Any) -> typing.Dict[str, typing.Any]:
127 | """
128 | Override the default dict method to `exclude_unset` by default. This function patches
129 | `exclude_unset` to work include fields within non-None default values.
130 | """
131 | # Note: the logic here is multiplexed given the levers exposed in Pydantic V1 vs V2
132 | # Pydantic V1's .dict can be extremely slow, so we do not want to call it twice.
133 | #
134 | # We'd ideally do the same for Pydantic V2, but it shells out to a library to serialize models
135 | # that we have less control over, and this is less intrusive than custom serializers for now.
136 | if IS_PYDANTIC_V2:
137 | kwargs_with_defaults_exclude_unset: typing.Any = {
138 | **kwargs,
139 | "by_alias": True,
140 | "exclude_unset": True,
141 | "exclude_none": False,
142 | }
143 | kwargs_with_defaults_exclude_none: typing.Any = {
144 | **kwargs,
145 | "by_alias": True,
146 | "exclude_none": True,
147 | "exclude_unset": False,
148 | }
149 | dict_dump = deep_union_pydantic_dicts(
150 | super().model_dump(**kwargs_with_defaults_exclude_unset), # type: ignore # Pydantic v2
151 | super().model_dump(**kwargs_with_defaults_exclude_none), # type: ignore # Pydantic v2
152 | )
153 |
154 | else:
155 | _fields_set = self.__fields_set__.copy()
156 |
157 | fields = _get_model_fields(self.__class__)
158 | for name, field in fields.items():
159 | if name not in _fields_set:
160 | default = _get_field_default(field)
161 |
162 | # If the default values are non-null act like they've been set
163 | # This effectively allows exclude_unset to work like exclude_none where
164 | # the latter passes through intentionally set none values.
165 | if default is not None or ("exclude_unset" in kwargs and not kwargs["exclude_unset"]):
166 | _fields_set.add(name)
167 |
168 | if default is not None:
169 | self.__fields_set__.add(name)
170 |
171 | kwargs_with_defaults_exclude_unset_include_fields: typing.Any = {
172 | "by_alias": True,
173 | "exclude_unset": True,
174 | "include": _fields_set,
175 | **kwargs,
176 | }
177 |
178 | dict_dump = super().dict(**kwargs_with_defaults_exclude_unset_include_fields)
179 |
180 | return convert_and_respect_annotation_metadata(object_=dict_dump, annotation=self.__class__, direction="write")
181 |
182 |
183 | def _union_list_of_pydantic_dicts(
184 | source: typing.List[typing.Any], destination: typing.List[typing.Any]
185 | ) -> typing.List[typing.Any]:
186 | converted_list: typing.List[typing.Any] = []
187 | for i, item in enumerate(source):
188 | destination_value = destination[i] # type: ignore
189 | if isinstance(item, dict):
190 | converted_list.append(deep_union_pydantic_dicts(item, destination_value))
191 | elif isinstance(item, list):
192 | converted_list.append(_union_list_of_pydantic_dicts(item, destination_value))
193 | else:
194 | converted_list.append(item)
195 | return converted_list
196 |
197 |
198 | def deep_union_pydantic_dicts(
199 | source: typing.Dict[str, typing.Any], destination: typing.Dict[str, typing.Any]
200 | ) -> typing.Dict[str, typing.Any]:
201 | for key, value in source.items():
202 | node = destination.setdefault(key, {})
203 | if isinstance(value, dict):
204 | deep_union_pydantic_dicts(value, node)
205 | # Note: we do not do this same processing for sets given we do not have sets of models
206 | # and given the sets are unordered, the processing of the set and matching objects would
207 | # be non-trivial.
208 | elif isinstance(value, list):
209 | destination[key] = _union_list_of_pydantic_dicts(value, node)
210 | else:
211 | destination[key] = value
212 |
213 | return destination
214 |
215 |
216 | if IS_PYDANTIC_V2:
217 |
218 | class V2RootModel(UniversalBaseModel, pydantic.RootModel): # type: ignore # Pydantic v2
219 | pass
220 |
221 | UniversalRootModel: typing_extensions.TypeAlias = V2RootModel # type: ignore
222 | else:
223 | UniversalRootModel: typing_extensions.TypeAlias = UniversalBaseModel # type: ignore
224 |
225 |
226 | def encode_by_type(o: typing.Any) -> typing.Any:
227 | encoders_by_class_tuples: typing.Dict[typing.Callable[[typing.Any], typing.Any], typing.Tuple[typing.Any, ...]] = (
228 | defaultdict(tuple)
229 | )
230 | for type_, encoder in encoders_by_type.items():
231 | encoders_by_class_tuples[encoder] += (type_,)
232 |
233 | if type(o) in encoders_by_type:
234 | return encoders_by_type[type(o)](o)
235 | for encoder, classes_tuple in encoders_by_class_tuples.items():
236 | if isinstance(o, classes_tuple):
237 | return encoder(o)
238 |
239 |
240 | def update_forward_refs(model: typing.Type["Model"], **localns: typing.Any) -> None:
241 | if IS_PYDANTIC_V2:
242 | model.model_rebuild(raise_errors=False) # type: ignore # Pydantic v2
243 | else:
244 | model.update_forward_refs(**localns)
245 |
246 |
247 | # Mirrors Pydantic's internal typing
248 | AnyCallable = typing.Callable[..., typing.Any]
249 |
250 |
251 | def universal_root_validator(
252 | pre: bool = False,
253 | ) -> typing.Callable[[AnyCallable], AnyCallable]:
254 | def decorator(func: AnyCallable) -> AnyCallable:
255 | if IS_PYDANTIC_V2:
256 | return pydantic.model_validator(mode="before" if pre else "after")(func) # type: ignore # Pydantic v2
257 | else:
258 | return pydantic.root_validator(pre=pre)(func) # type: ignore # Pydantic v1
259 |
260 | return decorator
261 |
262 |
263 | def universal_field_validator(field_name: str, pre: bool = False) -> typing.Callable[[AnyCallable], AnyCallable]:
264 | def decorator(func: AnyCallable) -> AnyCallable:
265 | if IS_PYDANTIC_V2:
266 | return pydantic.field_validator(field_name, mode="before" if pre else "after")(func) # type: ignore # Pydantic v2
267 | else:
268 | return pydantic.validator(field_name, pre=pre)(func) # type: ignore # Pydantic v1
269 |
270 | return decorator
271 |
272 |
273 | PydanticField = typing.Union[ModelField, pydantic.fields.FieldInfo]
274 |
275 |
276 | def _get_model_fields(
277 | model: typing.Type["Model"],
278 | ) -> typing.Mapping[str, PydanticField]:
279 | if IS_PYDANTIC_V2:
280 | return model.model_fields # type: ignore # Pydantic v2
281 | else:
282 | return model.__fields__ # type: ignore # Pydantic v1
283 |
284 |
285 | def _get_field_default(field: PydanticField) -> typing.Any:
286 | try:
287 | value = field.get_default() # type: ignore # Pydantic < v1.10.15
288 | except:
289 | value = field.default
290 | if IS_PYDANTIC_V2:
291 | from pydantic_core import PydanticUndefined
292 |
293 | if value == PydanticUndefined:
294 | return None
295 | return value
296 | return value
297 |
--------------------------------------------------------------------------------
/src/scrapybara/core/query_encoder.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from typing import Any, Dict, List, Optional, Tuple
4 |
5 | import pydantic
6 |
7 |
8 | # Flattens dicts to be of the form {"key[subkey][subkey2]": value} where value is not a dict
9 | def traverse_query_dict(dict_flat: Dict[str, Any], key_prefix: Optional[str] = None) -> List[Tuple[str, Any]]:
10 | result = []
11 | for k, v in dict_flat.items():
12 | key = f"{key_prefix}[{k}]" if key_prefix is not None else k
13 | if isinstance(v, dict):
14 | result.extend(traverse_query_dict(v, key))
15 | elif isinstance(v, list):
16 | for arr_v in v:
17 | if isinstance(arr_v, dict):
18 | result.extend(traverse_query_dict(arr_v, key))
19 | else:
20 | result.append((key, arr_v))
21 | else:
22 | result.append((key, v))
23 | return result
24 |
25 |
26 | def single_query_encoder(query_key: str, query_value: Any) -> List[Tuple[str, Any]]:
27 | if isinstance(query_value, pydantic.BaseModel) or isinstance(query_value, dict):
28 | if isinstance(query_value, pydantic.BaseModel):
29 | obj_dict = query_value.dict(by_alias=True)
30 | else:
31 | obj_dict = query_value
32 | return traverse_query_dict(obj_dict, query_key)
33 | elif isinstance(query_value, list):
34 | encoded_values: List[Tuple[str, Any]] = []
35 | for value in query_value:
36 | if isinstance(value, pydantic.BaseModel) or isinstance(value, dict):
37 | if isinstance(value, pydantic.BaseModel):
38 | obj_dict = value.dict(by_alias=True)
39 | elif isinstance(value, dict):
40 | obj_dict = value
41 |
42 | encoded_values.extend(single_query_encoder(query_key, obj_dict))
43 | else:
44 | encoded_values.append((query_key, value))
45 |
46 | return encoded_values
47 |
48 | return [(query_key, query_value)]
49 |
50 |
51 | def encode_query(query: Optional[Dict[str, Any]]) -> Optional[List[Tuple[str, Any]]]:
52 | if query is None:
53 | return None
54 |
55 | encoded_query = []
56 | for k, v in query.items():
57 | encoded_query.extend(single_query_encoder(k, v))
58 | return encoded_query
59 |
--------------------------------------------------------------------------------
/src/scrapybara/core/remove_none_from_dict.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from typing import Any, Dict, Mapping, Optional
4 |
5 |
6 | def remove_none_from_dict(original: Mapping[str, Optional[Any]]) -> Dict[str, Any]:
7 | new: Dict[str, Any] = {}
8 | for key, value in original.items():
9 | if value is not None:
10 | new[key] = value
11 | return new
12 |
--------------------------------------------------------------------------------
/src/scrapybara/core/request_options.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | import typing
4 |
5 | try:
6 | from typing import NotRequired # type: ignore
7 | except ImportError:
8 | from typing_extensions import NotRequired
9 |
10 |
11 | class RequestOptions(typing.TypedDict, total=False):
12 | """
13 | Additional options for request-specific configuration when calling APIs via the SDK.
14 | This is used primarily as an optional final parameter for service functions.
15 |
16 | Attributes:
17 | - timeout_in_seconds: int. The number of seconds to await an API call before timing out.
18 |
19 | - max_retries: int. The max number of retries to attempt if the API call fails.
20 |
21 | - additional_headers: typing.Dict[str, typing.Any]. A dictionary containing additional parameters to spread into the request's header dict
22 |
23 | - additional_query_parameters: typing.Dict[str, typing.Any]. A dictionary containing additional parameters to spread into the request's query parameters dict
24 |
25 | - additional_body_parameters: typing.Dict[str, typing.Any]. A dictionary containing additional parameters to spread into the request's body parameters dict
26 |
27 | - chunk_size: int. The size, in bytes, to process each chunk of data being streamed back within the response. This equates to leveraging `chunk_size` within `requests` or `httpx`, and is only leveraged for file downloads.
28 | """
29 |
30 | timeout_in_seconds: NotRequired[int]
31 | max_retries: NotRequired[int]
32 | additional_headers: NotRequired[typing.Dict[str, typing.Any]]
33 | additional_query_parameters: NotRequired[typing.Dict[str, typing.Any]]
34 | additional_body_parameters: NotRequired[typing.Dict[str, typing.Any]]
35 | chunk_size: NotRequired[int]
36 |
--------------------------------------------------------------------------------
/src/scrapybara/core/serialization.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | import collections
4 | import inspect
5 | import typing
6 |
7 | import typing_extensions
8 |
9 | import pydantic
10 |
11 |
12 | class FieldMetadata:
13 | """
14 | Metadata class used to annotate fields to provide additional information.
15 |
16 | Example:
17 | class MyDict(TypedDict):
18 | field: typing.Annotated[str, FieldMetadata(alias="field_name")]
19 |
20 | Will serialize: `{"field": "value"}`
21 | To: `{"field_name": "value"}`
22 | """
23 |
24 | alias: str
25 |
26 | def __init__(self, *, alias: str) -> None:
27 | self.alias = alias
28 |
29 |
30 | def convert_and_respect_annotation_metadata(
31 | *,
32 | object_: typing.Any,
33 | annotation: typing.Any,
34 | inner_type: typing.Optional[typing.Any] = None,
35 | direction: typing.Literal["read", "write"],
36 | ) -> typing.Any:
37 | """
38 | Respect the metadata annotations on a field, such as aliasing. This function effectively
39 | manipulates the dict-form of an object to respect the metadata annotations. This is primarily used for
40 | TypedDicts, which cannot support aliasing out of the box, and can be extended for additional
41 | utilities, such as defaults.
42 |
43 | Parameters
44 | ----------
45 | object_ : typing.Any
46 |
47 | annotation : type
48 | The type we're looking to apply typing annotations from
49 |
50 | inner_type : typing.Optional[type]
51 |
52 | Returns
53 | -------
54 | typing.Any
55 | """
56 |
57 | if object_ is None:
58 | return None
59 | if inner_type is None:
60 | inner_type = annotation
61 |
62 | clean_type = _remove_annotations(inner_type)
63 | # Pydantic models
64 | if (
65 | inspect.isclass(clean_type)
66 | and issubclass(clean_type, pydantic.BaseModel)
67 | and isinstance(object_, typing.Mapping)
68 | ):
69 | return _convert_mapping(object_, clean_type, direction)
70 | # TypedDicts
71 | if typing_extensions.is_typeddict(clean_type) and isinstance(object_, typing.Mapping):
72 | return _convert_mapping(object_, clean_type, direction)
73 |
74 | if (
75 | typing_extensions.get_origin(clean_type) == typing.Dict
76 | or typing_extensions.get_origin(clean_type) == dict
77 | or clean_type == typing.Dict
78 | ) and isinstance(object_, typing.Dict):
79 | key_type = typing_extensions.get_args(clean_type)[0]
80 | value_type = typing_extensions.get_args(clean_type)[1]
81 |
82 | return {
83 | key: convert_and_respect_annotation_metadata(
84 | object_=value,
85 | annotation=annotation,
86 | inner_type=value_type,
87 | direction=direction,
88 | )
89 | for key, value in object_.items()
90 | }
91 |
92 | # If you're iterating on a string, do not bother to coerce it to a sequence.
93 | if not isinstance(object_, str):
94 | if (
95 | typing_extensions.get_origin(clean_type) == typing.Set
96 | or typing_extensions.get_origin(clean_type) == set
97 | or clean_type == typing.Set
98 | ) and isinstance(object_, typing.Set):
99 | inner_type = typing_extensions.get_args(clean_type)[0]
100 | return {
101 | convert_and_respect_annotation_metadata(
102 | object_=item,
103 | annotation=annotation,
104 | inner_type=inner_type,
105 | direction=direction,
106 | )
107 | for item in object_
108 | }
109 | elif (
110 | (
111 | typing_extensions.get_origin(clean_type) == typing.List
112 | or typing_extensions.get_origin(clean_type) == list
113 | or clean_type == typing.List
114 | )
115 | and isinstance(object_, typing.List)
116 | ) or (
117 | (
118 | typing_extensions.get_origin(clean_type) == typing.Sequence
119 | or typing_extensions.get_origin(clean_type) == collections.abc.Sequence
120 | or clean_type == typing.Sequence
121 | )
122 | and isinstance(object_, typing.Sequence)
123 | ):
124 | inner_type = typing_extensions.get_args(clean_type)[0]
125 | return [
126 | convert_and_respect_annotation_metadata(
127 | object_=item,
128 | annotation=annotation,
129 | inner_type=inner_type,
130 | direction=direction,
131 | )
132 | for item in object_
133 | ]
134 |
135 | if typing_extensions.get_origin(clean_type) == typing.Union:
136 | # We should be able to ~relatively~ safely try to convert keys against all
137 | # member types in the union, the edge case here is if one member aliases a field
138 | # of the same name to a different name from another member
139 | # Or if another member aliases a field of the same name that another member does not.
140 | for member in typing_extensions.get_args(clean_type):
141 | object_ = convert_and_respect_annotation_metadata(
142 | object_=object_,
143 | annotation=annotation,
144 | inner_type=member,
145 | direction=direction,
146 | )
147 | return object_
148 |
149 | annotated_type = _get_annotation(annotation)
150 | if annotated_type is None:
151 | return object_
152 |
153 | # If the object is not a TypedDict, a Union, or other container (list, set, sequence, etc.)
154 | # Then we can safely call it on the recursive conversion.
155 | return object_
156 |
157 |
158 | def _convert_mapping(
159 | object_: typing.Mapping[str, object],
160 | expected_type: typing.Any,
161 | direction: typing.Literal["read", "write"],
162 | ) -> typing.Mapping[str, object]:
163 | converted_object: typing.Dict[str, object] = {}
164 | annotations = typing_extensions.get_type_hints(expected_type, include_extras=True)
165 | aliases_to_field_names = _get_alias_to_field_name(annotations)
166 | for key, value in object_.items():
167 | if direction == "read" and key in aliases_to_field_names:
168 | dealiased_key = aliases_to_field_names.get(key)
169 | if dealiased_key is not None:
170 | type_ = annotations.get(dealiased_key)
171 | else:
172 | type_ = annotations.get(key)
173 | # Note you can't get the annotation by the field name if you're in read mode, so you must check the aliases map
174 | #
175 | # So this is effectively saying if we're in write mode, and we don't have a type, or if we're in read mode and we don't have an alias
176 | # then we can just pass the value through as is
177 | if type_ is None:
178 | converted_object[key] = value
179 | elif direction == "read" and key not in aliases_to_field_names:
180 | converted_object[key] = convert_and_respect_annotation_metadata(
181 | object_=value, annotation=type_, direction=direction
182 | )
183 | else:
184 | converted_object[_alias_key(key, type_, direction, aliases_to_field_names)] = (
185 | convert_and_respect_annotation_metadata(object_=value, annotation=type_, direction=direction)
186 | )
187 | return converted_object
188 |
189 |
190 | def _get_annotation(type_: typing.Any) -> typing.Optional[typing.Any]:
191 | maybe_annotated_type = typing_extensions.get_origin(type_)
192 | if maybe_annotated_type is None:
193 | return None
194 |
195 | if maybe_annotated_type == typing_extensions.NotRequired:
196 | type_ = typing_extensions.get_args(type_)[0]
197 | maybe_annotated_type = typing_extensions.get_origin(type_)
198 |
199 | if maybe_annotated_type == typing_extensions.Annotated:
200 | return type_
201 |
202 | return None
203 |
204 |
205 | def _remove_annotations(type_: typing.Any) -> typing.Any:
206 | maybe_annotated_type = typing_extensions.get_origin(type_)
207 | if maybe_annotated_type is None:
208 | return type_
209 |
210 | if maybe_annotated_type == typing_extensions.NotRequired:
211 | return _remove_annotations(typing_extensions.get_args(type_)[0])
212 |
213 | if maybe_annotated_type == typing_extensions.Annotated:
214 | return _remove_annotations(typing_extensions.get_args(type_)[0])
215 |
216 | return type_
217 |
218 |
219 | def get_alias_to_field_mapping(type_: typing.Any) -> typing.Dict[str, str]:
220 | annotations = typing_extensions.get_type_hints(type_, include_extras=True)
221 | return _get_alias_to_field_name(annotations)
222 |
223 |
224 | def get_field_to_alias_mapping(type_: typing.Any) -> typing.Dict[str, str]:
225 | annotations = typing_extensions.get_type_hints(type_, include_extras=True)
226 | return _get_field_to_alias_name(annotations)
227 |
228 |
229 | def _get_alias_to_field_name(
230 | field_to_hint: typing.Dict[str, typing.Any],
231 | ) -> typing.Dict[str, str]:
232 | aliases = {}
233 | for field, hint in field_to_hint.items():
234 | maybe_alias = _get_alias_from_type(hint)
235 | if maybe_alias is not None:
236 | aliases[maybe_alias] = field
237 | return aliases
238 |
239 |
240 | def _get_field_to_alias_name(
241 | field_to_hint: typing.Dict[str, typing.Any],
242 | ) -> typing.Dict[str, str]:
243 | aliases = {}
244 | for field, hint in field_to_hint.items():
245 | maybe_alias = _get_alias_from_type(hint)
246 | if maybe_alias is not None:
247 | aliases[field] = maybe_alias
248 | return aliases
249 |
250 |
251 | def _get_alias_from_type(type_: typing.Any) -> typing.Optional[str]:
252 | maybe_annotated_type = _get_annotation(type_)
253 |
254 | if maybe_annotated_type is not None:
255 | # The actual annotations are 1 onward, the first is the annotated type
256 | annotations = typing_extensions.get_args(maybe_annotated_type)[1:]
257 |
258 | for annotation in annotations:
259 | if isinstance(annotation, FieldMetadata) and annotation.alias is not None:
260 | return annotation.alias
261 | return None
262 |
263 |
264 | def _alias_key(
265 | key: str,
266 | type_: typing.Any,
267 | direction: typing.Literal["read", "write"],
268 | aliases_to_field_names: typing.Dict[str, str],
269 | ) -> str:
270 | if direction == "read":
271 | return aliases_to_field_names.get(key, key)
272 | return _get_alias_from_type(type_=type_) or key
273 |
--------------------------------------------------------------------------------
/src/scrapybara/env/__init__.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 |
--------------------------------------------------------------------------------
/src/scrapybara/env/client.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | import typing
4 | from ..core.client_wrapper import SyncClientWrapper
5 | from ..core.request_options import RequestOptions
6 | from ..types.env_get_response import EnvGetResponse
7 | from ..core.jsonable_encoder import jsonable_encoder
8 | from ..core.pydantic_utilities import parse_obj_as
9 | from ..errors.unprocessable_entity_error import UnprocessableEntityError
10 | from ..types.http_validation_error import HttpValidationError
11 | from json.decoder import JSONDecodeError
12 | from ..core.api_error import ApiError
13 | from ..types.env_response import EnvResponse
14 | from ..core.client_wrapper import AsyncClientWrapper
15 |
16 | # this is used as the default value for optional parameters
17 | OMIT = typing.cast(typing.Any, ...)
18 |
19 |
20 | class EnvClient:
21 | def __init__(self, *, client_wrapper: SyncClientWrapper):
22 | self._client_wrapper = client_wrapper
23 |
24 | def get(self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> EnvGetResponse:
25 | """
26 | Parameters
27 | ----------
28 | instance_id : str
29 |
30 | request_options : typing.Optional[RequestOptions]
31 | Request-specific configuration.
32 |
33 | Returns
34 | -------
35 | EnvGetResponse
36 | Successful Response
37 |
38 | Examples
39 | --------
40 | from scrapybara import Scrapybara
41 |
42 | client = Scrapybara(
43 | api_key="YOUR_API_KEY",
44 | )
45 | client.env.get(
46 | instance_id="instance_id",
47 | )
48 | """
49 | _response = self._client_wrapper.httpx_client.request(
50 | f"v1/instance/{jsonable_encoder(instance_id)}/env",
51 | method="GET",
52 | request_options=request_options,
53 | )
54 | try:
55 | if 200 <= _response.status_code < 300:
56 | return typing.cast(
57 | EnvGetResponse,
58 | parse_obj_as(
59 | type_=EnvGetResponse, # type: ignore
60 | object_=_response.json(),
61 | ),
62 | )
63 | if _response.status_code == 422:
64 | raise UnprocessableEntityError(
65 | typing.cast(
66 | HttpValidationError,
67 | parse_obj_as(
68 | type_=HttpValidationError, # type: ignore
69 | object_=_response.json(),
70 | ),
71 | )
72 | )
73 | _response_json = _response.json()
74 | except JSONDecodeError:
75 | raise ApiError(status_code=_response.status_code, body=_response.text)
76 | raise ApiError(status_code=_response.status_code, body=_response_json)
77 |
78 | def set(
79 | self,
80 | instance_id: str,
81 | *,
82 | variables: typing.Dict[str, str],
83 | request_options: typing.Optional[RequestOptions] = None,
84 | ) -> EnvResponse:
85 | """
86 | Parameters
87 | ----------
88 | instance_id : str
89 |
90 | variables : typing.Dict[str, str]
91 |
92 | request_options : typing.Optional[RequestOptions]
93 | Request-specific configuration.
94 |
95 | Returns
96 | -------
97 | EnvResponse
98 | Successful Response
99 |
100 | Examples
101 | --------
102 | from scrapybara import Scrapybara
103 |
104 | client = Scrapybara(
105 | api_key="YOUR_API_KEY",
106 | )
107 | client.env.set(
108 | instance_id="instance_id",
109 | variables={"key": "value"},
110 | )
111 | """
112 | _response = self._client_wrapper.httpx_client.request(
113 | f"v1/instance/{jsonable_encoder(instance_id)}/env",
114 | method="POST",
115 | json={
116 | "variables": variables,
117 | },
118 | headers={
119 | "content-type": "application/json",
120 | },
121 | request_options=request_options,
122 | omit=OMIT,
123 | )
124 | try:
125 | if 200 <= _response.status_code < 300:
126 | return typing.cast(
127 | EnvResponse,
128 | parse_obj_as(
129 | type_=EnvResponse, # type: ignore
130 | object_=_response.json(),
131 | ),
132 | )
133 | if _response.status_code == 422:
134 | raise UnprocessableEntityError(
135 | typing.cast(
136 | HttpValidationError,
137 | parse_obj_as(
138 | type_=HttpValidationError, # type: ignore
139 | object_=_response.json(),
140 | ),
141 | )
142 | )
143 | _response_json = _response.json()
144 | except JSONDecodeError:
145 | raise ApiError(status_code=_response.status_code, body=_response.text)
146 | raise ApiError(status_code=_response.status_code, body=_response_json)
147 |
148 | def delete(
149 | self, instance_id: str, *, keys: typing.Sequence[str], request_options: typing.Optional[RequestOptions] = None
150 | ) -> EnvResponse:
151 | """
152 | Parameters
153 | ----------
154 | instance_id : str
155 |
156 | keys : typing.Sequence[str]
157 |
158 | request_options : typing.Optional[RequestOptions]
159 | Request-specific configuration.
160 |
161 | Returns
162 | -------
163 | EnvResponse
164 | Successful Response
165 |
166 | Examples
167 | --------
168 | from scrapybara import Scrapybara
169 |
170 | client = Scrapybara(
171 | api_key="YOUR_API_KEY",
172 | )
173 | client.env.delete(
174 | instance_id="instance_id",
175 | keys=["keys"],
176 | )
177 | """
178 | _response = self._client_wrapper.httpx_client.request(
179 | f"v1/instance/{jsonable_encoder(instance_id)}/env/delete",
180 | method="POST",
181 | json={
182 | "keys": keys,
183 | },
184 | headers={
185 | "content-type": "application/json",
186 | },
187 | request_options=request_options,
188 | omit=OMIT,
189 | )
190 | try:
191 | if 200 <= _response.status_code < 300:
192 | return typing.cast(
193 | EnvResponse,
194 | parse_obj_as(
195 | type_=EnvResponse, # type: ignore
196 | object_=_response.json(),
197 | ),
198 | )
199 | if _response.status_code == 422:
200 | raise UnprocessableEntityError(
201 | typing.cast(
202 | HttpValidationError,
203 | parse_obj_as(
204 | type_=HttpValidationError, # type: ignore
205 | object_=_response.json(),
206 | ),
207 | )
208 | )
209 | _response_json = _response.json()
210 | except JSONDecodeError:
211 | raise ApiError(status_code=_response.status_code, body=_response.text)
212 | raise ApiError(status_code=_response.status_code, body=_response_json)
213 |
214 |
215 | class AsyncEnvClient:
216 | def __init__(self, *, client_wrapper: AsyncClientWrapper):
217 | self._client_wrapper = client_wrapper
218 |
219 | async def get(self, instance_id: str, *, request_options: typing.Optional[RequestOptions] = None) -> EnvGetResponse:
220 | """
221 | Parameters
222 | ----------
223 | instance_id : str
224 |
225 | request_options : typing.Optional[RequestOptions]
226 | Request-specific configuration.
227 |
228 | Returns
229 | -------
230 | EnvGetResponse
231 | Successful Response
232 |
233 | Examples
234 | --------
235 | import asyncio
236 |
237 | from scrapybara import AsyncScrapybara
238 |
239 | client = AsyncScrapybara(
240 | api_key="YOUR_API_KEY",
241 | )
242 |
243 |
244 | async def main() -> None:
245 | await client.env.get(
246 | instance_id="instance_id",
247 | )
248 |
249 |
250 | asyncio.run(main())
251 | """
252 | _response = await self._client_wrapper.httpx_client.request(
253 | f"v1/instance/{jsonable_encoder(instance_id)}/env",
254 | method="GET",
255 | request_options=request_options,
256 | )
257 | try:
258 | if 200 <= _response.status_code < 300:
259 | return typing.cast(
260 | EnvGetResponse,
261 | parse_obj_as(
262 | type_=EnvGetResponse, # type: ignore
263 | object_=_response.json(),
264 | ),
265 | )
266 | if _response.status_code == 422:
267 | raise UnprocessableEntityError(
268 | typing.cast(
269 | HttpValidationError,
270 | parse_obj_as(
271 | type_=HttpValidationError, # type: ignore
272 | object_=_response.json(),
273 | ),
274 | )
275 | )
276 | _response_json = _response.json()
277 | except JSONDecodeError:
278 | raise ApiError(status_code=_response.status_code, body=_response.text)
279 | raise ApiError(status_code=_response.status_code, body=_response_json)
280 |
281 | async def set(
282 | self,
283 | instance_id: str,
284 | *,
285 | variables: typing.Dict[str, str],
286 | request_options: typing.Optional[RequestOptions] = None,
287 | ) -> EnvResponse:
288 | """
289 | Parameters
290 | ----------
291 | instance_id : str
292 |
293 | variables : typing.Dict[str, str]
294 |
295 | request_options : typing.Optional[RequestOptions]
296 | Request-specific configuration.
297 |
298 | Returns
299 | -------
300 | EnvResponse
301 | Successful Response
302 |
303 | Examples
304 | --------
305 | import asyncio
306 |
307 | from scrapybara import AsyncScrapybara
308 |
309 | client = AsyncScrapybara(
310 | api_key="YOUR_API_KEY",
311 | )
312 |
313 |
314 | async def main() -> None:
315 | await client.env.set(
316 | instance_id="instance_id",
317 | variables={"key": "value"},
318 | )
319 |
320 |
321 | asyncio.run(main())
322 | """
323 | _response = await self._client_wrapper.httpx_client.request(
324 | f"v1/instance/{jsonable_encoder(instance_id)}/env",
325 | method="POST",
326 | json={
327 | "variables": variables,
328 | },
329 | headers={
330 | "content-type": "application/json",
331 | },
332 | request_options=request_options,
333 | omit=OMIT,
334 | )
335 | try:
336 | if 200 <= _response.status_code < 300:
337 | return typing.cast(
338 | EnvResponse,
339 | parse_obj_as(
340 | type_=EnvResponse, # type: ignore
341 | object_=_response.json(),
342 | ),
343 | )
344 | if _response.status_code == 422:
345 | raise UnprocessableEntityError(
346 | typing.cast(
347 | HttpValidationError,
348 | parse_obj_as(
349 | type_=HttpValidationError, # type: ignore
350 | object_=_response.json(),
351 | ),
352 | )
353 | )
354 | _response_json = _response.json()
355 | except JSONDecodeError:
356 | raise ApiError(status_code=_response.status_code, body=_response.text)
357 | raise ApiError(status_code=_response.status_code, body=_response_json)
358 |
359 | async def delete(
360 | self, instance_id: str, *, keys: typing.Sequence[str], request_options: typing.Optional[RequestOptions] = None
361 | ) -> EnvResponse:
362 | """
363 | Parameters
364 | ----------
365 | instance_id : str
366 |
367 | keys : typing.Sequence[str]
368 |
369 | request_options : typing.Optional[RequestOptions]
370 | Request-specific configuration.
371 |
372 | Returns
373 | -------
374 | EnvResponse
375 | Successful Response
376 |
377 | Examples
378 | --------
379 | import asyncio
380 |
381 | from scrapybara import AsyncScrapybara
382 |
383 | client = AsyncScrapybara(
384 | api_key="YOUR_API_KEY",
385 | )
386 |
387 |
388 | async def main() -> None:
389 | await client.env.delete(
390 | instance_id="instance_id",
391 | keys=["keys"],
392 | )
393 |
394 |
395 | asyncio.run(main())
396 | """
397 | _response = await self._client_wrapper.httpx_client.request(
398 | f"v1/instance/{jsonable_encoder(instance_id)}/env/delete",
399 | method="POST",
400 | json={
401 | "keys": keys,
402 | },
403 | headers={
404 | "content-type": "application/json",
405 | },
406 | request_options=request_options,
407 | omit=OMIT,
408 | )
409 | try:
410 | if 200 <= _response.status_code < 300:
411 | return typing.cast(
412 | EnvResponse,
413 | parse_obj_as(
414 | type_=EnvResponse, # type: ignore
415 | object_=_response.json(),
416 | ),
417 | )
418 | if _response.status_code == 422:
419 | raise UnprocessableEntityError(
420 | typing.cast(
421 | HttpValidationError,
422 | parse_obj_as(
423 | type_=HttpValidationError, # type: ignore
424 | object_=_response.json(),
425 | ),
426 | )
427 | )
428 | _response_json = _response.json()
429 | except JSONDecodeError:
430 | raise ApiError(status_code=_response.status_code, body=_response.text)
431 | raise ApiError(status_code=_response.status_code, body=_response_json)
432 |
--------------------------------------------------------------------------------
/src/scrapybara/environment.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | import enum
4 |
5 |
6 | class ScrapybaraEnvironment(enum.Enum):
7 | PRODUCTION = "https://api.scrapybara.com"
8 |
--------------------------------------------------------------------------------
/src/scrapybara/errors/__init__.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from .unprocessable_entity_error import UnprocessableEntityError
4 |
5 | __all__ = ["UnprocessableEntityError"]
6 |
--------------------------------------------------------------------------------
/src/scrapybara/errors/unprocessable_entity_error.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.api_error import ApiError
4 | from ..types.http_validation_error import HttpValidationError
5 |
6 |
7 | class UnprocessableEntityError(ApiError):
8 | def __init__(self, body: HttpValidationError):
9 | super().__init__(status_code=422, body=body)
10 |
--------------------------------------------------------------------------------
/src/scrapybara/herd/__init__.py:
--------------------------------------------------------------------------------
1 | from ..types.act import Model
2 | from typing import Literal
3 | from pydantic import Field
4 |
5 |
6 | class Herd(Model):
7 | """Model adapter for Herd (Scrapybara-hosted LLMs).
8 |
9 | Args:
10 | name: Herd model name
11 |
12 | Returns:
13 | A Model configuration object
14 | """
15 |
16 | provider: Literal["herd"] = Field(default="herd", frozen=True)
17 |
18 | def __init__(self, name: str) -> None:
19 | super().__init__(provider="herd", name=name)
20 |
--------------------------------------------------------------------------------
/src/scrapybara/instance/__init__.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from .types import (
4 | Command,
5 | Request,
6 | Request_ClickMouse,
7 | Request_DragMouse,
8 | Request_GetCursorPosition,
9 | Request_MoveMouse,
10 | Request_PressKey,
11 | Request_Scroll,
12 | Request_TakeScreenshot,
13 | Request_TypeText,
14 | Request_Wait,
15 | )
16 |
17 | __all__ = [
18 | "Command",
19 | "Request",
20 | "Request_ClickMouse",
21 | "Request_DragMouse",
22 | "Request_GetCursorPosition",
23 | "Request_MoveMouse",
24 | "Request_PressKey",
25 | "Request_Scroll",
26 | "Request_TakeScreenshot",
27 | "Request_TypeText",
28 | "Request_Wait",
29 | ]
30 |
--------------------------------------------------------------------------------
/src/scrapybara/instance/types/__init__.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from .command import Command
4 | from .request import (
5 | Request,
6 | Request_ClickMouse,
7 | Request_DragMouse,
8 | Request_GetCursorPosition,
9 | Request_MoveMouse,
10 | Request_PressKey,
11 | Request_Scroll,
12 | Request_TakeScreenshot,
13 | Request_TypeText,
14 | Request_Wait,
15 | )
16 |
17 | __all__ = [
18 | "Command",
19 | "Request",
20 | "Request_ClickMouse",
21 | "Request_DragMouse",
22 | "Request_GetCursorPosition",
23 | "Request_MoveMouse",
24 | "Request_PressKey",
25 | "Request_Scroll",
26 | "Request_TakeScreenshot",
27 | "Request_TypeText",
28 | "Request_Wait",
29 | ]
30 |
--------------------------------------------------------------------------------
/src/scrapybara/instance/types/command.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | import typing
4 |
5 | Command = typing.Union[typing.Literal["view", "create", "str_replace", "insert", "undo_edit"], typing.Any]
6 |
--------------------------------------------------------------------------------
/src/scrapybara/instance/types/request.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from __future__ import annotations
4 | from ...core.pydantic_utilities import UniversalBaseModel
5 | import typing
6 | from ...core.pydantic_utilities import IS_PYDANTIC_V2
7 | import pydantic
8 | from ...types.button import Button
9 | from ...types.click_mouse_action_click_type import ClickMouseActionClickType
10 |
11 |
12 | class Request_MoveMouse(UniversalBaseModel):
13 | action: typing.Literal["move_mouse"] = "move_mouse"
14 | coordinates: typing.List[int]
15 | hold_keys: typing.Optional[typing.List[str]] = None
16 | screenshot: typing.Optional[bool] = None
17 |
18 | if IS_PYDANTIC_V2:
19 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
20 | else:
21 |
22 | class Config:
23 | frozen = True
24 | smart_union = True
25 | extra = pydantic.Extra.allow
26 |
27 |
28 | class Request_ClickMouse(UniversalBaseModel):
29 | action: typing.Literal["click_mouse"] = "click_mouse"
30 | button: Button
31 | click_type: typing.Optional[ClickMouseActionClickType] = None
32 | coordinates: typing.Optional[typing.List[int]] = None
33 | num_clicks: typing.Optional[int] = None
34 | hold_keys: typing.Optional[typing.List[str]] = None
35 | screenshot: typing.Optional[bool] = None
36 |
37 | if IS_PYDANTIC_V2:
38 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
39 | else:
40 |
41 | class Config:
42 | frozen = True
43 | smart_union = True
44 | extra = pydantic.Extra.allow
45 |
46 |
47 | class Request_DragMouse(UniversalBaseModel):
48 | action: typing.Literal["drag_mouse"] = "drag_mouse"
49 | path: typing.List[typing.List[int]]
50 | hold_keys: typing.Optional[typing.List[str]] = None
51 | screenshot: typing.Optional[bool] = None
52 |
53 | if IS_PYDANTIC_V2:
54 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
55 | else:
56 |
57 | class Config:
58 | frozen = True
59 | smart_union = True
60 | extra = pydantic.Extra.allow
61 |
62 |
63 | class Request_Scroll(UniversalBaseModel):
64 | action: typing.Literal["scroll"] = "scroll"
65 | coordinates: typing.Optional[typing.List[int]] = None
66 | delta_x: typing.Optional[float] = None
67 | delta_y: typing.Optional[float] = None
68 | hold_keys: typing.Optional[typing.List[str]] = None
69 | screenshot: typing.Optional[bool] = None
70 |
71 | if IS_PYDANTIC_V2:
72 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
73 | else:
74 |
75 | class Config:
76 | frozen = True
77 | smart_union = True
78 | extra = pydantic.Extra.allow
79 |
80 |
81 | class Request_PressKey(UniversalBaseModel):
82 | action: typing.Literal["press_key"] = "press_key"
83 | keys: typing.List[str]
84 | duration: typing.Optional[float] = None
85 | screenshot: typing.Optional[bool] = None
86 |
87 | if IS_PYDANTIC_V2:
88 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
89 | else:
90 |
91 | class Config:
92 | frozen = True
93 | smart_union = True
94 | extra = pydantic.Extra.allow
95 |
96 |
97 | class Request_TypeText(UniversalBaseModel):
98 | action: typing.Literal["type_text"] = "type_text"
99 | text: str
100 | hold_keys: typing.Optional[typing.List[str]] = None
101 | screenshot: typing.Optional[bool] = None
102 |
103 | if IS_PYDANTIC_V2:
104 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
105 | else:
106 |
107 | class Config:
108 | frozen = True
109 | smart_union = True
110 | extra = pydantic.Extra.allow
111 |
112 |
113 | class Request_Wait(UniversalBaseModel):
114 | action: typing.Literal["wait"] = "wait"
115 | duration: float
116 | screenshot: typing.Optional[bool] = None
117 |
118 | if IS_PYDANTIC_V2:
119 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
120 | else:
121 |
122 | class Config:
123 | frozen = True
124 | smart_union = True
125 | extra = pydantic.Extra.allow
126 |
127 |
128 | class Request_TakeScreenshot(UniversalBaseModel):
129 | action: typing.Literal["take_screenshot"] = "take_screenshot"
130 |
131 | if IS_PYDANTIC_V2:
132 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
133 | else:
134 |
135 | class Config:
136 | frozen = True
137 | smart_union = True
138 | extra = pydantic.Extra.allow
139 |
140 |
141 | class Request_GetCursorPosition(UniversalBaseModel):
142 | action: typing.Literal["get_cursor_position"] = "get_cursor_position"
143 |
144 | if IS_PYDANTIC_V2:
145 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
146 | else:
147 |
148 | class Config:
149 | frozen = True
150 | smart_union = True
151 | extra = pydantic.Extra.allow
152 |
153 |
154 | Request = typing.Union[
155 | Request_MoveMouse,
156 | Request_ClickMouse,
157 | Request_DragMouse,
158 | Request_Scroll,
159 | Request_PressKey,
160 | Request_TypeText,
161 | Request_Wait,
162 | Request_TakeScreenshot,
163 | Request_GetCursorPosition,
164 | ]
165 |
--------------------------------------------------------------------------------
/src/scrapybara/notebook/__init__.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 |
--------------------------------------------------------------------------------
/src/scrapybara/openai/__init__.py:
--------------------------------------------------------------------------------
1 | from typing import Literal, Optional
2 |
3 | from pydantic import Field
4 |
5 | from ..types.act import Model
6 | from datetime import datetime
7 |
8 |
9 | class OpenAI(Model):
10 | """Model adapter for OpenAI.
11 |
12 | Supported models:
13 | - computer-use-preview
14 |
15 | Args:
16 | name: OpenAI model name, defaults to "computer-use-preview"
17 | api_key: Optional OpenAI API key
18 |
19 | Returns:
20 | A Model configuration object
21 | """
22 |
23 | provider: Literal["openai"] = Field(default="openai", frozen=True)
24 |
25 | def __init__(
26 | self,
27 | name: Optional[str] = "computer-use-preview",
28 | api_key: Optional[str] = None,
29 | ) -> None:
30 | super().__init__(provider="openai", name=name, api_key=api_key)
31 |
32 |
33 | UBUNTU_SYSTEM_PROMPT = f"""You have access to an Ubuntu VM with internet connectivity. You can install Ubuntu applications using the bash tool (prefer curl over wget).
34 |
35 | ### Running GUI Applications
36 | - To run GUI applications with the bash tool, use a subshell: `(DISPLAY=:1 xterm &)`
37 | - GUI apps may take time to load; confirm their appearance with an extra screenshot.
38 | - Chromium is the default browser. Start it using `(DISPLAY=:1 chromium &)` via the bash tool, but interact with it visually via the computer tool.
39 |
40 | ### Handling HTML and Large Text Output
41 | - To read an HTML file, open it in Chromium using the address bar.
42 | - For commands with large text output:
43 | - Redirect output to a temp file.
44 | - Use `str_replace_editor` or `grep` with context flags (`-B` and `-A`) to extract relevant sections.
45 |
46 | ### Interacting with Web Pages and Forms
47 | - Zoom out or scroll to ensure all content is visible.
48 | - When interacting with input fields:
49 | - Clear the field first using `Ctrl+A` and `Delete`.
50 | - Take an extra screenshot after pressing "Enter" to confirm the input was submitted correctly.
51 | - Move the mouse to the next field after submission.
52 |
53 | ### Efficiency and Authentication
54 | - Computer function calls take time; optimize by stringing together related actions when possible.
55 | - You are allowed to take actions on authenticated sites on behalf of the user.
56 | - Assume the user has already authenticated if they request access to a site.
57 | - For logging into additional sites, ask the user to use Auth Contexts or the Interactive Desktop.
58 |
59 | ### Handling Black Screens
60 | - If the first screenshot shows a black screen:
61 | - Click the center of the screen.
62 | - Take another screenshot.
63 |
64 | ### Best Practices
65 | - If given a complex task, break it down into smaller steps and ask for details only when necessary.
66 | - Read web pages thoroughly by scrolling down until sufficient information is gathered.
67 | - Explain each action you take and why.
68 | - Avoid asking for confirmation on routine actions (e.g., pressing "Enter" after typing a URL). Seek clarification only for ambiguous or critical actions (e.g., deleting files or submitting sensitive information).
69 | - If a user's request implies the need for external information, assume they want you to search for it and provide the answer directly.
70 |
71 | ### Date Context
72 | Today's date is {datetime.today().strftime('%A, %B %d, %Y')}."""
73 |
74 | BROWSER_SYSTEM_PROMPT = f"""You have access to a Chromium VM with internet connectivity. Chromium should already be open and running.
75 |
76 | ### Interacting with Web Pages
77 | - Use the computer tool to interact with web pages.
78 | - Zoom out or scroll to ensure all content is visible.
79 |
80 | ### Handling Input Fields
81 | - Always clear fields before entering text using `Ctrl+A` and `Delete`.
82 | - After submitting a field by pressing "Enter":
83 | - Take an extra screenshot to confirm the input was properly submitted.
84 | - Move the mouse to the next field.
85 |
86 | ### Efficiency and Authentication
87 | - Computer function calls take time; optimize by combining related actions when possible.
88 | - You are allowed to take actions on authenticated sites on behalf of the user.
89 | - Assume the user has already authenticated if they request access to a site.
90 | - To log into additional sites, ask the user to use Auth Contexts.
91 |
92 | ### Handling Black Screens
93 | - If the first screenshot shows a black screen:
94 | - Click the center of the screen.
95 | - Take another screenshot.
96 |
97 | ### Best Practices
98 | - If given a complex task, break it down into smaller steps and ask for details only when necessary.
99 | - Read web pages thoroughly by scrolling down until sufficient information is gathered.
100 | - Explain each action you take and why.
101 | - Avoid asking for confirmation on routine actions (e.g., pressing "Enter" after typing a URL). Seek clarification only for ambiguous or critical actions (e.g., deleting files or submitting sensitive information).
102 | - If a user's request implies the need for external information, assume they want you to search for it and provide the answer directly.
103 |
104 | ### Date Context
105 | Today's date is {datetime.today().strftime('%A, %B %d, %Y')}."""
106 |
107 | WINDOWS_SYSTEM_PROMPT = f"""You have access to a Windows VM with internet connectivity and can interact with the Windows desktop using the computer tool.
108 |
109 | ### Interacting with Applications and Web Pages
110 | - GUI applications may take time to load—confirm with an extra screenshot.
111 | - Microsoft Edge is the default browser.
112 | - When viewing pages:
113 | - Zoom out or scroll to ensure all content is visible.
114 |
115 | ### Handling Input Fields
116 | - Always clear fields before entering text using `Ctrl+A` and `Delete`.
117 | - After submitting a field by pressing "Enter":
118 | - Take an extra screenshot to confirm the input was properly submitted.
119 | - Move the mouse to the next field.
120 |
121 | ### Efficiency and Authentication
122 | - Computer function calls take time; optimize by combining related actions when possible.
123 | - You are allowed to take actions on authenticated sites on behalf of the user.
124 | - Assume the user has already authenticated if they request access to a site.
125 | - To log into additional sites, ask the user to use Auth Contexts or the Interactive Desktop.
126 |
127 | ### Handling Black Screens
128 | - If the first screenshot shows a black screen:
129 | - Click the center of the screen.
130 | - Take another screenshot.
131 |
132 | ### Best Practices
133 | - If given a complex task, break it down into smaller steps and ask for details only when necessary.
134 | - Read web pages thoroughly by scrolling down until sufficient information is gathered.
135 | - Explain each action you take and why.
136 | - Avoid asking for confirmation on routine actions (e.g., pressing "Enter" after typing a URL). Seek clarification only for ambiguous or critical actions (e.g., deleting files or submitting sensitive information).
137 | - If a user's request implies the need for external information, assume they want you to search for it and provide the answer directly.
138 |
139 | ### Date Context
140 | Today's date is {datetime.today().strftime('%A, %B %d, %Y')}."""
141 |
142 | STRUCTURED_OUTPUT_SECTION = """
143 | ### Final Output
144 | - When you have completed your task and are ready to provide the final result to the user, use the 'structured_output' tool.
145 | - This tool allows you to output structured data according to the provided schema.
146 | - Ensure that your output matches the expected schema by providing the correct fields and data types as specified in the tool's parameters.
147 | - The output from this tool will be passed directly back to the user as the final result.
148 | - Do not present the final result in plain text; always use the 'structured_output' tool for the final output.
149 | """
--------------------------------------------------------------------------------
/src/scrapybara/prompts/__init__.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 |
3 | UBUNTU_SYSTEM_PROMPT = f"""
4 | * You have access to an Ubuntu VM with internet connectivity
5 | * You can install Ubuntu applications using the bash tool (use curl over wget)
6 | * To run GUI applications with the bash tool, use a subshell, e.g. "(DISPLAY=:1 xterm &)", make sure to include the parantheses
7 | * GUI apps will appear but may take time to load - confirm with an extra screenshot
8 | * Chromium is the default browser
9 | * Start Chromium via the bash tool "(DISPLAY=:1 chromium &)", but interact with it visually via the computer tool
10 | * If you need to read a HTML file:
11 | - Open with the address bar in Chromium
12 | * For commands with large text output:
13 | - Redirect to a temp file
14 | - Use str_replace_editor or grep with context (-B and -A flags) to view output
15 | * When viewing pages:
16 | - Zoom out to see full content, or
17 | - Scroll to ensure you see everything
18 | * When interacting with a field, always clear the field first using "ctrl+A" and "delete"
19 | - Take an extra screenshot after clicking "enter" to confirm the field is properly submitted and move the mouse to the next field
20 | * Computer function calls take time, string together calls when possible
21 | * You are allowed to take actions on behalf of the user on sites that are authenticated
22 | * If the user asks you to access a site, assume that the user has already authenticated
23 | * To login additional sites, ask the user to use Auth Contexts or the Interactive Desktop
24 | * If first screenshot shows black screen:
25 | - Click mouse in screen center
26 | - Take another screenshot
27 | * Today's date is {datetime.today().strftime('%A, %B %d, %Y')}
28 |
29 |
30 |
31 | * If given a complex task, break down into smaller steps and ask the user for details only if necessary
32 | * Read through web pages thoroughly by scrolling down till you have gathered enough info
33 | * Be concise!
34 | """
35 | """DEPRECATED — Please import prompts from their respective models instead: `from scrapybara.anthropic import UBUNTU_SYSTEM_PROMPT`"""
36 |
37 |
38 | BROWSER_SYSTEM_PROMPT = f"""
39 | * You have access to a Chromium VM with internet connectivity
40 | * Chromium should already be open and running
41 | * You can interact with web pages using the computer tool
42 | * When viewing pages:
43 | - Zoom out to see full content, or
44 | - Scroll to ensure you see everything
45 | * When interacting with a field, always clear the field first using "ctrl+A" and "delete"
46 | - Take an extra screenshot after clicking "enter" to confirm the field is properly submitted and move the mouse to the next field
47 | * Computer function calls take time, string together calls when possible
48 | * You are allowed to take actions on behalf of the user on sites that are authenticated
49 | * If the user asks you to access a site, assume that the user has already authenticated
50 | * To login additional sites, ask the user to use Auth Contexts
51 | * If first screenshot shows black screen:
52 | - Click mouse in screen center
53 | - Take another screenshot
54 | * Today's date is {datetime.today().strftime('%A, %B %d, %Y')}
55 |
56 |
57 |
58 | * If given a complex task, break down into smaller steps and ask the user for details only if necessary
59 | * Read through web pages thoroughly by scrolling down till you have gathered enough info
60 | * Be concise!
61 | """
62 | """DEPRECATED — Please import prompts from their respective models instead: `from scrapybara.anthropic import BROWSER_SYSTEM_PROMPT`"""
63 |
64 |
65 | WINDOWS_SYSTEM_PROMPT = f"""
66 | * You have access to a Windows VM with internet connectivity
67 | * You can interact with the Windows desktop using the computer tool
68 | * GUI apps will appear but may take time to load - confirm with an extra screenshot
69 | * Edge is the default browser
70 | * When viewing pages:
71 | - Zoom out to see full content, or
72 | - Scroll to ensure you see everything
73 | * When interacting with a field, always clear the field first using "ctrl+A" and "delete"
74 | - Take an extra screenshot after clicking "enter" to confirm the field is properly submitted and move the mouse to the next field
75 | * Computer function calls take time, string together calls when possible
76 | * You are allowed to take actions on behalf of the user on sites that are authenticated
77 | * If the user asks you to access a site, assume that the user has already authenticated
78 | * To login additional sites, ask the user to use Auth Contexts or the Interactive Desktop
79 | * If first screenshot shows black screen:
80 | - Click mouse in screen center
81 | - Take another screenshot
82 | * Today's date is {datetime.today().strftime('%A, %B %d, %Y')}
83 |
84 |
85 |
86 | * If given a complex task, break down into smaller steps and ask the user for details only if necessary
87 | * Read through web pages thoroughly by scrolling down till you have gathered enough info
88 | * Be concise!
89 | """
90 | """DEPRECATED — Please import prompts from their respective models instead: `from scrapybara.anthropic import WINDOWS_SYSTEM_PROMPT`"""
91 |
--------------------------------------------------------------------------------
/src/scrapybara/py.typed:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Scrapybara/scrapybara-python/5438d954a8d63f14f7f53ffd45b8ee76b94e2070/src/scrapybara/py.typed
--------------------------------------------------------------------------------
/src/scrapybara/tools/__init__.py:
--------------------------------------------------------------------------------
1 | from typing import Any, List, Optional
2 | from pydantic import BaseModel, Field
3 |
4 | from ..types import Action, Button, ClickMouseActionClickType, Tool
5 | from ..client import BaseInstance, UbuntuInstance
6 | from ..instance.types import Command
7 | from typing import Literal
8 |
9 | class ComputerToolParameters(BaseModel):
10 | """Parameters for computer interaction commands."""
11 |
12 | action: Action = Field(description="The computer action to execute")
13 | button: Optional[Button] = Field(None, description="The button to click")
14 | click_type: Optional[ClickMouseActionClickType] = Field(
15 | None, description="The type of click to perform"
16 | )
17 | coordinates: Optional[List[int]] = Field(
18 | None, description="The coordinates to move to"
19 | )
20 | delta_x: Optional[float] = Field(None, description="The x delta to move")
21 | delta_y: Optional[float] = Field(None, description="The y delta to move")
22 | num_clicks: Optional[int] = Field(
23 | None, description="The number of clicks to perform"
24 | )
25 | hold_keys: Optional[List[str]] = Field(None, description="The keys to hold")
26 | path: Optional[List[List[int]]] = Field(None, description="The path to move to")
27 | keys: Optional[List[str]] = Field(None, description="The keys to press")
28 | text: Optional[str] = Field(None, description="The text to type")
29 | duration: Optional[float] = Field(None, description="The duration to wait")
30 |
31 |
32 | class ComputerTool(Tool):
33 | """A computer interaction tool that allows the agent to control mouse and keyboard.
34 |
35 | Available for Ubuntu, Browser, and Windows instances."""
36 |
37 | _instance: BaseInstance
38 |
39 | def __init__(self, instance: BaseInstance) -> None:
40 | super().__init__(
41 | name="computer",
42 | description="Control mouse and keyboard for computer interaction",
43 | parameters=ComputerToolParameters,
44 | )
45 | self._instance = instance
46 |
47 | def __call__(self, **kwargs: Any) -> Any:
48 | params = ComputerToolParameters.model_validate(kwargs)
49 |
50 | if params.action == "move_mouse":
51 | if not params.coordinates:
52 | raise ValueError("coordinates is required for move_mouse action")
53 | return self._instance.computer(
54 | action=params.action,
55 | coordinates=params.coordinates,
56 | hold_keys=params.hold_keys,
57 | )
58 | elif params.action == "click_mouse":
59 | if not params.button:
60 | raise ValueError("button is required for click_mouse action")
61 | return self._instance.computer(
62 | action=params.action,
63 | button=params.button,
64 | click_type=params.click_type,
65 | coordinates=params.coordinates,
66 | num_clicks=params.num_clicks,
67 | hold_keys=params.hold_keys,
68 | )
69 | elif params.action == "drag_mouse":
70 | if not params.path:
71 | raise ValueError("path is required for drag_mouse action")
72 | return self._instance.computer(
73 | action=params.action,
74 | path=params.path,
75 | hold_keys=params.hold_keys,
76 | )
77 | elif params.action == "scroll":
78 | return self._instance.computer(
79 | action=params.action,
80 | coordinates=params.coordinates,
81 | delta_x=params.delta_x,
82 | delta_y=params.delta_y,
83 | hold_keys=params.hold_keys,
84 | )
85 | elif params.action == "press_key":
86 | if not params.keys:
87 | raise ValueError("keys is required for press_key action")
88 | return self._instance.computer(
89 | action=params.action,
90 | keys=params.keys,
91 | duration=params.duration,
92 | )
93 | elif params.action == "type_text":
94 | if not params.text:
95 | raise ValueError("text is required for type_text action")
96 | return self._instance.computer(
97 | action=params.action,
98 | text=params.text,
99 | hold_keys=params.hold_keys,
100 | )
101 | elif params.action == "wait":
102 | if params.duration is None:
103 | raise ValueError("duration is required for wait action")
104 | return self._instance.computer(
105 | action=params.action,
106 | duration=params.duration,
107 | )
108 | elif params.action == "take_screenshot":
109 | return self._instance.computer(action=params.action)
110 | elif params.action == "get_cursor_position":
111 | return self._instance.computer(action=params.action)
112 | else:
113 | raise ValueError(f"Unknown action: {params.action}")
114 |
115 |
116 | class EditToolParameters(BaseModel):
117 | """Parameters for file editing commands."""
118 |
119 | command: Command = Field(description="The edit command to execute")
120 | path: str = Field(description="Path to the file to edit")
121 | file_text: Optional[str] = Field(
122 | None, description="File content for create command"
123 | )
124 | view_range: Optional[List[int]] = Field(
125 | None, description="Line range for view command"
126 | )
127 | old_str: Optional[str] = Field(
128 | None, description="String to replace for replace command"
129 | )
130 | new_str: Optional[str] = Field(None, description="New string for replace command")
131 | insert_line: Optional[int] = Field(
132 | None, description="Line number for insert command"
133 | )
134 |
135 |
136 | class EditTool(Tool):
137 | """A filesystem editor tool that allows the agent to view, create, and edit files.
138 |
139 | Available for Ubuntu instances."""
140 |
141 | _instance: UbuntuInstance
142 |
143 | def __init__(self, instance: UbuntuInstance) -> None:
144 | super().__init__(
145 | name="str_replace_editor",
146 | description="View, create, and edit files in the filesystem",
147 | parameters=EditToolParameters,
148 | )
149 | self._instance = instance
150 |
151 | def __call__(self, **kwargs: Any) -> Any:
152 | params = EditToolParameters.model_validate(kwargs)
153 | return self._instance.edit(
154 | command=params.command,
155 | path=params.path,
156 | file_text=params.file_text,
157 | view_range=params.view_range,
158 | old_str=params.old_str,
159 | new_str=params.new_str,
160 | insert_line=params.insert_line,
161 | )
162 |
163 |
164 | class BashToolParameters(BaseModel):
165 | """Parameters for bash command execution."""
166 |
167 | command: Optional[str] = Field(description="The bash command to execute")
168 | session: Optional[int] = Field(None, description="Session ID to run the command in")
169 | restart: Optional[bool] = Field(False, description="Whether to restart the shell")
170 | list_sessions: Optional[bool] = Field(None, description="Whether to list all bash sessions")
171 | check_session: Optional[int] = Field(None, description="Session ID to check status")
172 | timeout: Optional[float] = Field(None, description="Timeout for the command")
173 |
174 |
175 | class BashTool(Tool):
176 | """A shell execution tool that allows the agent to run bash commands.
177 |
178 | Available for Ubuntu instances."""
179 |
180 | _instance: UbuntuInstance
181 |
182 | def __init__(self, instance: UbuntuInstance) -> None:
183 | super().__init__(
184 | name="bash",
185 | description="Execute bash commands in the shell",
186 | parameters=BashToolParameters,
187 | )
188 | self._instance = instance
189 |
190 | def __call__(self, **kwargs: Any) -> Any:
191 | params = BashToolParameters.model_validate(kwargs)
192 | return self._instance.bash(
193 | command=params.command,
194 | session=params.session,
195 | restart=params.restart,
196 | list_sessions=params.list_sessions,
197 | check_session=params.check_session,
198 | timeout=params.timeout,
199 | )
200 |
201 |
--------------------------------------------------------------------------------
/src/scrapybara/types/__init__.py:
--------------------------------------------------------------------------------
1 | from typing import Literal
2 | from .auth_state_response import AuthStateResponse
3 | from .bash_response import BashResponse
4 | from .browser_authenticate_response import BrowserAuthenticateResponse
5 | from .browser_get_cdp_url_response import BrowserGetCdpUrlResponse
6 | from .browser_get_current_url_response import BrowserGetCurrentUrlResponse
7 | from .button import Button
8 | from .cell_type import CellType
9 | from .click_mouse_action import ClickMouseAction
10 | from .click_mouse_action_click_type import ClickMouseActionClickType
11 | from .computer_response import ComputerResponse
12 | from .deployment_config_instance_type import DeploymentConfigInstanceType
13 | from .drag_mouse_action import DragMouseAction
14 | from .edit_response import EditResponse
15 | from .env_get_response import EnvGetResponse
16 | from .env_response import EnvResponse
17 | from .execute_cell_request import ExecuteCellRequest
18 | from .file_response import FileResponse
19 | from .upload_response import UploadResponse
20 | from .get_cursor_position_action import GetCursorPositionAction
21 | from .get_instance_response import GetInstanceResponse
22 | from .get_instance_response_instance_type import GetInstanceResponseInstanceType
23 | from .http_validation_error import HttpValidationError
24 | from .instance_get_stream_url_response import InstanceGetStreamUrlResponse
25 | from .instance_screenshot_response import InstanceScreenshotResponse
26 | from .kernel_info import KernelInfo
27 | from .modify_browser_auth_response import ModifyBrowserAuthResponse
28 | from .move_mouse_action import MoveMouseAction
29 | from .notebook import Notebook
30 | from .notebook_cell import NotebookCell
31 | from .press_key_action import PressKeyAction
32 | from .save_browser_auth_response import SaveBrowserAuthResponse
33 | from .scroll_action import ScrollAction
34 | from .start_browser_response import StartBrowserResponse
35 | from .status import Status
36 | from .stop_browser_response import StopBrowserResponse
37 | from .stop_instance_response import StopInstanceResponse
38 | from .take_screenshot_action import TakeScreenshotAction
39 | from .type_text_action import TypeTextAction
40 | from .validation_error import ValidationError
41 | from .validation_error_loc_item import ValidationErrorLocItem
42 | from .wait_action import WaitAction
43 | from .act import (
44 | TextPart,
45 | ImagePart,
46 | ToolCallPart,
47 | ToolResultPart,
48 | UserMessage,
49 | AssistantMessage,
50 | ToolMessage,
51 | Message,
52 | Model,
53 | SingleActRequest,
54 | TokenUsage,
55 | SingleActResponse,
56 | Step,
57 | ActResponse,
58 | )
59 | from .tool import Tool, ApiTool
60 |
61 | Action = Literal[
62 | "move_mouse",
63 | "click_mouse",
64 | "drag_mouse",
65 | "scroll",
66 | "press_key",
67 | "type_text",
68 | "wait",
69 | "take_screenshot",
70 | "get_cursor_position",
71 | ]
72 |
73 | __all__ = [
74 | "ActResponse",
75 | "Action",
76 | "ApiTool",
77 | "AssistantMessage",
78 | "AuthStateResponse",
79 | "BashResponse",
80 | "BrowserAuthenticateResponse",
81 | "BrowserGetCdpUrlResponse",
82 | "BrowserGetCurrentUrlResponse",
83 | "Button",
84 | "CellType",
85 | "ClickMouseAction",
86 | "ClickMouseActionClickType",
87 | "ComputerResponse",
88 | "DeploymentConfigInstanceType",
89 | "DragMouseAction",
90 | "EditResponse",
91 | "EnvGetResponse",
92 | "EnvResponse",
93 | "ExecuteCellRequest",
94 | "FileResponse",
95 | "GetCursorPositionAction",
96 | "GetInstanceResponse",
97 | "GetInstanceResponseInstanceType",
98 | "HttpValidationError",
99 | "ImagePart",
100 | "InstanceGetStreamUrlResponse",
101 | "InstanceScreenshotResponse",
102 | "KernelInfo",
103 | "Message",
104 | "Model",
105 | "ModifyBrowserAuthResponse",
106 | "MoveMouseAction",
107 | "Notebook",
108 | "NotebookCell",
109 | "PressKeyAction",
110 | "SaveBrowserAuthResponse",
111 | "ScrollAction",
112 | "SingleActRequest",
113 | "SingleActResponse",
114 | "StartBrowserResponse",
115 | "Status",
116 | "Step",
117 | "StopBrowserResponse",
118 | "StopInstanceResponse",
119 | "TakeScreenshotAction",
120 | "TextPart",
121 | "Tool",
122 | "ToolCallPart",
123 | "ToolMessage",
124 | "ToolResultPart",
125 | "TokenUsage",
126 | "TypeTextAction",
127 | "UserMessage",
128 | "ValidationError",
129 | "ValidationErrorLocItem",
130 | "WaitAction",
131 | ]
132 |
--------------------------------------------------------------------------------
/src/scrapybara/types/act.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict, List, Literal, Optional, Union, Generic, TypeVar
2 | from pydantic import BaseModel
3 | from .tool import Tool, ApiTool # noqa: F401
4 |
5 | OutputT = TypeVar("OutputT")
6 |
7 |
8 | # Message part types
9 | class TextPart(BaseModel):
10 | type: Literal["text"] = "text"
11 | text: str
12 |
13 |
14 | class ImagePart(BaseModel):
15 | type: Literal["image"] = "image"
16 | image: str # Base64 encoded image or URL
17 | mime_type: Optional[str] = None
18 |
19 |
20 | class ToolCallPart(BaseModel):
21 | type: Literal["tool-call"] = "tool-call"
22 | id: Optional[str] = None
23 | tool_call_id: str
24 | tool_name: str
25 | safety_checks: Optional[List[Any]] = None
26 | args: Dict[str, Any]
27 |
28 |
29 | class ToolResultPart(BaseModel):
30 | type: Literal["tool-result"] = "tool-result"
31 | tool_call_id: str
32 | tool_name: str
33 | result: Any
34 | is_error: Optional[bool] = False
35 |
36 | class ReasoningPart(BaseModel):
37 | type: Literal["reasoning"] = "reasoning"
38 | id: Optional[str] = None
39 | reasoning: str
40 | signature: Optional[str] = None
41 | instructions: Optional[str] = None
42 |
43 | class UserMessage(BaseModel):
44 | role: Literal["user"] = "user"
45 | content: List[Union[TextPart, ImagePart]]
46 |
47 |
48 | class AssistantMessage(BaseModel):
49 | role: Literal["assistant"] = "assistant"
50 | content: List[Union[TextPart, ToolCallPart, ReasoningPart]]
51 | response_id: Optional[str] = None
52 |
53 |
54 | class ToolMessage(BaseModel):
55 | role: Literal["tool"] = "tool"
56 | content: List[ToolResultPart]
57 |
58 |
59 | Message = Union[UserMessage, AssistantMessage, ToolMessage]
60 |
61 |
62 | # Request/Response models
63 | class Model(BaseModel):
64 | provider: Literal["anthropic", "openai", "herd"]
65 | name: str
66 | api_key: Optional[str] = None
67 |
68 |
69 | class SingleActRequest(BaseModel):
70 | model: Model
71 | system: Optional[str] = None
72 | messages: Optional[List[Message]] = None
73 | tools: Optional[List[ApiTool]] = None
74 | temperature: Optional[float] = None
75 | max_tokens: Optional[int] = None
76 |
77 |
78 | class TokenUsage(BaseModel):
79 | prompt_tokens: int
80 | completion_tokens: int
81 | total_tokens: int
82 |
83 |
84 | class SingleActResponse(BaseModel):
85 | message: AssistantMessage
86 | finish_reason: Literal[
87 | "stop", "length", "content-filter", "tool-calls", "error", "other", "unknown"
88 | ]
89 | usage: Optional[TokenUsage] = None
90 |
91 |
92 | # Step definition
93 | class Step(BaseModel):
94 | text: str
95 | response_id: Optional[str] = None
96 | reasoning_parts: Optional[List[ReasoningPart]] = None
97 | tool_calls: Optional[List[ToolCallPart]] = None
98 | tool_results: Optional[List[ToolResultPart]] = None
99 | finish_reason: Optional[
100 | Literal[
101 | "stop",
102 | "length",
103 | "content-filter",
104 | "tool-calls",
105 | "error",
106 | "other",
107 | "unknown",
108 | ]
109 | ] = None
110 | usage: Optional[TokenUsage] = None
111 |
112 |
113 | # Act response
114 | class ActResponse(BaseModel, Generic[OutputT]):
115 | messages: List[Message]
116 | steps: List[Step]
117 | text: Optional[str] = None
118 | output: OutputT
119 | usage: Optional[TokenUsage] = None
120 |
--------------------------------------------------------------------------------
/src/scrapybara/types/auth_state_response.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing
5 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
6 | import pydantic
7 |
8 |
9 | class AuthStateResponse(UniversalBaseModel):
10 | id: str
11 | name: typing.Optional[str] = None
12 |
13 | if IS_PYDANTIC_V2:
14 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
15 | else:
16 |
17 | class Config:
18 | frozen = True
19 | smart_union = True
20 | extra = pydantic.Extra.allow
21 |
--------------------------------------------------------------------------------
/src/scrapybara/types/bash_response.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing
5 | import typing_extensions
6 | from ..core.serialization import FieldMetadata
7 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
8 | import pydantic
9 |
10 |
11 | class BashResponse(UniversalBaseModel):
12 | """
13 | Response model for bash actions.
14 | """
15 |
16 | output: typing.Optional[str] = None
17 | error: typing.Optional[str] = None
18 | base_64_image: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="base64_image")] = None
19 | system: typing.Optional[str] = None
20 |
21 | if IS_PYDANTIC_V2:
22 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
23 | else:
24 |
25 | class Config:
26 | frozen = True
27 | smart_union = True
28 | extra = pydantic.Extra.allow
29 |
--------------------------------------------------------------------------------
/src/scrapybara/types/browser_authenticate_response.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
5 | import typing
6 | import pydantic
7 |
8 |
9 | class BrowserAuthenticateResponse(UniversalBaseModel):
10 | status: str
11 |
12 | if IS_PYDANTIC_V2:
13 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
14 | else:
15 |
16 | class Config:
17 | frozen = True
18 | smart_union = True
19 | extra = pydantic.Extra.allow
20 |
--------------------------------------------------------------------------------
/src/scrapybara/types/browser_get_cdp_url_response.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
5 | import typing
6 | import pydantic
7 |
8 |
9 | class BrowserGetCdpUrlResponse(UniversalBaseModel):
10 | cdp_url: str
11 |
12 | if IS_PYDANTIC_V2:
13 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
14 | else:
15 |
16 | class Config:
17 | frozen = True
18 | smart_union = True
19 | extra = pydantic.Extra.allow
20 |
--------------------------------------------------------------------------------
/src/scrapybara/types/browser_get_current_url_response.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
5 | import typing
6 | import pydantic
7 |
8 |
9 | class BrowserGetCurrentUrlResponse(UniversalBaseModel):
10 | current_url: str
11 |
12 | if IS_PYDANTIC_V2:
13 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
14 | else:
15 |
16 | class Config:
17 | frozen = True
18 | smart_union = True
19 | extra = pydantic.Extra.allow
20 |
--------------------------------------------------------------------------------
/src/scrapybara/types/button.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | import typing
4 |
5 | Button = typing.Union[typing.Literal["left", "right", "middle", "back", "forward"], typing.Any]
6 |
--------------------------------------------------------------------------------
/src/scrapybara/types/cell_type.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | import typing
4 |
5 | CellType = typing.Union[typing.Literal["code", "markdown", "raw"], typing.Any]
6 |
--------------------------------------------------------------------------------
/src/scrapybara/types/click_mouse_action.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | from .button import Button
5 | import typing
6 | from .click_mouse_action_click_type import ClickMouseActionClickType
7 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
8 | import pydantic
9 |
10 |
11 | class ClickMouseAction(UniversalBaseModel):
12 | button: Button
13 | click_type: typing.Optional[ClickMouseActionClickType] = None
14 | coordinates: typing.Optional[typing.List[int]] = None
15 | num_clicks: typing.Optional[int] = None
16 | hold_keys: typing.Optional[typing.List[str]] = None
17 | screenshot: typing.Optional[bool] = None
18 |
19 | if IS_PYDANTIC_V2:
20 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
21 | else:
22 |
23 | class Config:
24 | frozen = True
25 | smart_union = True
26 | extra = pydantic.Extra.allow
27 |
--------------------------------------------------------------------------------
/src/scrapybara/types/click_mouse_action_click_type.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | import typing
4 |
5 | ClickMouseActionClickType = typing.Union[typing.Literal["down", "up", "click"], typing.Any]
6 |
--------------------------------------------------------------------------------
/src/scrapybara/types/computer_response.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing
5 | import typing_extensions
6 | from ..core.serialization import FieldMetadata
7 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
8 | import pydantic
9 |
10 |
11 | class ComputerResponse(UniversalBaseModel):
12 | """
13 | Response model for computer actions.
14 | """
15 |
16 | output: typing.Optional[str] = None
17 | error: typing.Optional[str] = None
18 | base_64_image: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="base64_image")] = None
19 | system: typing.Optional[str] = None
20 |
21 | if IS_PYDANTIC_V2:
22 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
23 | else:
24 |
25 | class Config:
26 | frozen = True
27 | smart_union = True
28 | extra = pydantic.Extra.allow
29 |
--------------------------------------------------------------------------------
/src/scrapybara/types/deployment_config_instance_type.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | import typing
4 |
5 | DeploymentConfigInstanceType = typing.Union[typing.Literal["ubuntu", "browser", "windows"], typing.Any]
6 |
--------------------------------------------------------------------------------
/src/scrapybara/types/drag_mouse_action.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing
5 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
6 | import pydantic
7 |
8 |
9 | class DragMouseAction(UniversalBaseModel):
10 | path: typing.List[typing.List[int]]
11 | hold_keys: typing.Optional[typing.List[str]] = None
12 | screenshot: typing.Optional[bool] = None
13 |
14 | if IS_PYDANTIC_V2:
15 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
16 | else:
17 |
18 | class Config:
19 | frozen = True
20 | smart_union = True
21 | extra = pydantic.Extra.allow
22 |
--------------------------------------------------------------------------------
/src/scrapybara/types/edit_response.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing
5 | import typing_extensions
6 | from ..core.serialization import FieldMetadata
7 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
8 | import pydantic
9 |
10 |
11 | class EditResponse(UniversalBaseModel):
12 | """
13 | Response model for edit actions.
14 | """
15 |
16 | output: typing.Optional[str] = None
17 | error: typing.Optional[str] = None
18 | base_64_image: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="base64_image")] = None
19 | system: typing.Optional[str] = None
20 |
21 | if IS_PYDANTIC_V2:
22 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
23 | else:
24 |
25 | class Config:
26 | frozen = True
27 | smart_union = True
28 | extra = pydantic.Extra.allow
29 |
--------------------------------------------------------------------------------
/src/scrapybara/types/env_get_response.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing
5 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
6 | import pydantic
7 |
8 |
9 | class EnvGetResponse(UniversalBaseModel):
10 | variables: typing.Dict[str, str]
11 |
12 | if IS_PYDANTIC_V2:
13 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
14 | else:
15 |
16 | class Config:
17 | frozen = True
18 | smart_union = True
19 | extra = pydantic.Extra.allow
20 |
--------------------------------------------------------------------------------
/src/scrapybara/types/env_response.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
5 | import typing
6 | import pydantic
7 |
8 |
9 | class EnvResponse(UniversalBaseModel):
10 | status: str
11 | message: str
12 |
13 | if IS_PYDANTIC_V2:
14 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
15 | else:
16 |
17 | class Config:
18 | frozen = True
19 | smart_union = True
20 | extra = pydantic.Extra.allow
21 |
--------------------------------------------------------------------------------
/src/scrapybara/types/execute_cell_request.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing
5 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
6 | import pydantic
7 |
8 |
9 | class ExecuteCellRequest(UniversalBaseModel):
10 | timeout: typing.Optional[int] = None
11 |
12 | if IS_PYDANTIC_V2:
13 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
14 | else:
15 |
16 | class Config:
17 | frozen = True
18 | smart_union = True
19 | extra = pydantic.Extra.allow
20 |
--------------------------------------------------------------------------------
/src/scrapybara/types/file_response.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing
5 | import typing_extensions
6 | from ..core.serialization import FieldMetadata
7 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
8 | import pydantic
9 |
10 |
11 | class FileResponse(UniversalBaseModel):
12 | """
13 | Response model for file actions.
14 | """
15 |
16 | output: typing.Optional[str] = None
17 | error: typing.Optional[str] = None
18 | base_64_image: typing_extensions.Annotated[typing.Optional[str], FieldMetadata(alias="base64_image")] = None
19 | system: typing.Optional[str] = None
20 |
21 | if IS_PYDANTIC_V2:
22 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
23 | else:
24 |
25 | class Config:
26 | frozen = True
27 | smart_union = True
28 | extra = pydantic.Extra.allow
29 |
--------------------------------------------------------------------------------
/src/scrapybara/types/get_cursor_position_action.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
5 | import typing
6 | import pydantic
7 |
8 |
9 | class GetCursorPositionAction(UniversalBaseModel):
10 | if IS_PYDANTIC_V2:
11 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
12 | else:
13 |
14 | class Config:
15 | frozen = True
16 | smart_union = True
17 | extra = pydantic.Extra.allow
18 |
--------------------------------------------------------------------------------
/src/scrapybara/types/get_instance_response.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import datetime as dt
5 | from .get_instance_response_instance_type import GetInstanceResponseInstanceType
6 | from .status import Status
7 | import typing
8 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
9 | import pydantic
10 |
11 |
12 | class GetInstanceResponse(UniversalBaseModel):
13 | id: str
14 | launch_time: dt.datetime
15 | instance_type: GetInstanceResponseInstanceType
16 | status: Status
17 | resolution: typing.Optional[typing.List[int]] = None
18 |
19 | if IS_PYDANTIC_V2:
20 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
21 | else:
22 |
23 | class Config:
24 | frozen = True
25 | smart_union = True
26 | extra = pydantic.Extra.allow
27 |
--------------------------------------------------------------------------------
/src/scrapybara/types/get_instance_response_instance_type.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | import typing
4 |
5 | GetInstanceResponseInstanceType = typing.Union[typing.Literal["ubuntu", "browser", "windows"], typing.Any]
6 |
--------------------------------------------------------------------------------
/src/scrapybara/types/http_validation_error.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing
5 | from .validation_error import ValidationError
6 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
7 | import pydantic
8 |
9 |
10 | class HttpValidationError(UniversalBaseModel):
11 | detail: typing.Optional[typing.List[ValidationError]] = None
12 |
13 | if IS_PYDANTIC_V2:
14 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
15 | else:
16 |
17 | class Config:
18 | frozen = True
19 | smart_union = True
20 | extra = pydantic.Extra.allow
21 |
--------------------------------------------------------------------------------
/src/scrapybara/types/instance_get_stream_url_response.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
5 | import typing
6 | import pydantic
7 |
8 |
9 | class InstanceGetStreamUrlResponse(UniversalBaseModel):
10 | stream_url: str
11 |
12 | if IS_PYDANTIC_V2:
13 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
14 | else:
15 |
16 | class Config:
17 | frozen = True
18 | smart_union = True
19 | extra = pydantic.Extra.allow
20 |
--------------------------------------------------------------------------------
/src/scrapybara/types/instance_screenshot_response.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing_extensions
5 | from ..core.serialization import FieldMetadata
6 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
7 | import typing
8 | import pydantic
9 |
10 |
11 | class InstanceScreenshotResponse(UniversalBaseModel):
12 | base_64_image: typing_extensions.Annotated[str, FieldMetadata(alias="base64_image")]
13 |
14 | if IS_PYDANTIC_V2:
15 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
16 | else:
17 |
18 | class Config:
19 | frozen = True
20 | smart_union = True
21 | extra = pydantic.Extra.allow
22 |
--------------------------------------------------------------------------------
/src/scrapybara/types/kernel_info.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
5 | import typing
6 | import pydantic
7 |
8 |
9 | class KernelInfo(UniversalBaseModel):
10 | name: str
11 | display_name: str
12 | language: str
13 |
14 | if IS_PYDANTIC_V2:
15 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
16 | else:
17 |
18 | class Config:
19 | frozen = True
20 | smart_union = True
21 | extra = pydantic.Extra.allow
22 |
--------------------------------------------------------------------------------
/src/scrapybara/types/modify_browser_auth_response.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing
5 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
6 | import pydantic
7 |
8 |
9 | class ModifyBrowserAuthResponse(UniversalBaseModel):
10 | status: str
11 | auth_state_id: str
12 | name: typing.Optional[str] = None
13 |
14 | if IS_PYDANTIC_V2:
15 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
16 | else:
17 |
18 | class Config:
19 | frozen = True
20 | smart_union = True
21 | extra = pydantic.Extra.allow
22 |
--------------------------------------------------------------------------------
/src/scrapybara/types/move_mouse_action.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing
5 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
6 | import pydantic
7 |
8 |
9 | class MoveMouseAction(UniversalBaseModel):
10 | coordinates: typing.List[int]
11 | hold_keys: typing.Optional[typing.List[str]] = None
12 | screenshot: typing.Optional[bool] = None
13 |
14 | if IS_PYDANTIC_V2:
15 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
16 | else:
17 |
18 | class Config:
19 | frozen = True
20 | smart_union = True
21 | extra = pydantic.Extra.allow
22 |
--------------------------------------------------------------------------------
/src/scrapybara/types/notebook.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing
5 | from .notebook_cell import NotebookCell
6 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
7 | import pydantic
8 |
9 |
10 | class Notebook(UniversalBaseModel):
11 | id: str
12 | name: str
13 | kernel_name: str
14 | cells: typing.List[NotebookCell]
15 | metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None
16 |
17 | if IS_PYDANTIC_V2:
18 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
19 | else:
20 |
21 | class Config:
22 | frozen = True
23 | smart_union = True
24 | extra = pydantic.Extra.allow
25 |
--------------------------------------------------------------------------------
/src/scrapybara/types/notebook_cell.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | from .cell_type import CellType
5 | import typing
6 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
7 | import pydantic
8 |
9 |
10 | class NotebookCell(UniversalBaseModel):
11 | id: str
12 | type: CellType
13 | content: str
14 | metadata: typing.Optional[typing.Dict[str, typing.Optional[typing.Any]]] = None
15 | outputs: typing.Optional[typing.List[typing.Dict[str, typing.Optional[typing.Any]]]] = None
16 | execution_count: typing.Optional[int] = None
17 |
18 | if IS_PYDANTIC_V2:
19 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
20 | else:
21 |
22 | class Config:
23 | frozen = True
24 | smart_union = True
25 | extra = pydantic.Extra.allow
26 |
--------------------------------------------------------------------------------
/src/scrapybara/types/press_key_action.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing
5 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
6 | import pydantic
7 |
8 |
9 | class PressKeyAction(UniversalBaseModel):
10 | keys: typing.List[str]
11 | duration: typing.Optional[float] = None
12 | screenshot: typing.Optional[bool] = None
13 |
14 | if IS_PYDANTIC_V2:
15 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
16 | else:
17 |
18 | class Config:
19 | frozen = True
20 | smart_union = True
21 | extra = pydantic.Extra.allow
22 |
--------------------------------------------------------------------------------
/src/scrapybara/types/save_browser_auth_response.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing
5 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
6 | import pydantic
7 |
8 |
9 | class SaveBrowserAuthResponse(UniversalBaseModel):
10 | status: str
11 | auth_state_id: str
12 | name: typing.Optional[str] = None
13 |
14 | if IS_PYDANTIC_V2:
15 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
16 | else:
17 |
18 | class Config:
19 | frozen = True
20 | smart_union = True
21 | extra = pydantic.Extra.allow
22 |
--------------------------------------------------------------------------------
/src/scrapybara/types/scroll_action.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing
5 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
6 | import pydantic
7 |
8 |
9 | class ScrollAction(UniversalBaseModel):
10 | coordinates: typing.Optional[typing.List[int]] = None
11 | delta_x: typing.Optional[float] = None
12 | delta_y: typing.Optional[float] = None
13 | hold_keys: typing.Optional[typing.List[str]] = None
14 | screenshot: typing.Optional[bool] = None
15 |
16 | if IS_PYDANTIC_V2:
17 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
18 | else:
19 |
20 | class Config:
21 | frozen = True
22 | smart_union = True
23 | extra = pydantic.Extra.allow
24 |
--------------------------------------------------------------------------------
/src/scrapybara/types/start_browser_response.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
5 | import typing
6 | import pydantic
7 |
8 |
9 | class StartBrowserResponse(UniversalBaseModel):
10 | cdp_url: str
11 |
12 | if IS_PYDANTIC_V2:
13 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
14 | else:
15 |
16 | class Config:
17 | frozen = True
18 | smart_union = True
19 | extra = pydantic.Extra.allow
20 |
--------------------------------------------------------------------------------
/src/scrapybara/types/status.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | import typing
4 |
5 | Status = typing.Union[typing.Literal["deploying", "running", "paused", "terminated", "error", "warm_pool"], typing.Any]
6 |
--------------------------------------------------------------------------------
/src/scrapybara/types/stop_browser_response.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
5 | import typing
6 | import pydantic
7 |
8 |
9 | class StopBrowserResponse(UniversalBaseModel):
10 | status: str
11 |
12 | if IS_PYDANTIC_V2:
13 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
14 | else:
15 |
16 | class Config:
17 | frozen = True
18 | smart_union = True
19 | extra = pydantic.Extra.allow
20 |
--------------------------------------------------------------------------------
/src/scrapybara/types/stop_instance_response.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
5 | import typing
6 | import pydantic
7 |
8 |
9 | class StopInstanceResponse(UniversalBaseModel):
10 | status: str
11 |
12 | if IS_PYDANTIC_V2:
13 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
14 | else:
15 |
16 | class Config:
17 | frozen = True
18 | smart_union = True
19 | extra = pydantic.Extra.allow
20 |
--------------------------------------------------------------------------------
/src/scrapybara/types/take_screenshot_action.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
5 | import typing
6 | import pydantic
7 |
8 |
9 | class TakeScreenshotAction(UniversalBaseModel):
10 | if IS_PYDANTIC_V2:
11 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
12 | else:
13 |
14 | class Config:
15 | frozen = True
16 | smart_union = True
17 | extra = pydantic.Extra.allow
18 |
--------------------------------------------------------------------------------
/src/scrapybara/types/tool.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict, Optional, Type
2 | from pydantic import BaseModel
3 |
4 |
5 | class Tool(BaseModel):
6 | name: str
7 | description: Optional[str] = None
8 | parameters: Optional[Type[BaseModel]] = None
9 |
10 | def __call__(self, **kwargs: Any) -> Any:
11 | """Execute the tool with the given arguments.
12 |
13 | The kwargs type will be inferred from the parameters field's type hints.
14 | """
15 | raise NotImplementedError("Tool.__call__ must be implemented by subclasses")
16 |
17 |
18 | class ApiTool(BaseModel):
19 | """A tool that can be serialized to JSON for API calls."""
20 |
21 | name: str
22 | description: Optional[str] = None
23 | parameters: Optional[Dict[str, Any]] = None
24 |
25 | @classmethod
26 | def from_tool(cls, tool: Tool) -> "ApiTool":
27 | """Convert a Tool to an ApiTool for API serialization."""
28 | return cls(
29 | name=tool.name,
30 | description=tool.description,
31 | parameters=tool.parameters.model_json_schema() if tool.parameters else None,
32 | )
33 |
--------------------------------------------------------------------------------
/src/scrapybara/types/type_text_action.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing
5 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
6 | import pydantic
7 |
8 |
9 | class TypeTextAction(UniversalBaseModel):
10 | text: str
11 | hold_keys: typing.Optional[typing.List[str]] = None
12 | screenshot: typing.Optional[bool] = None
13 |
14 | if IS_PYDANTIC_V2:
15 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
16 | else:
17 |
18 | class Config:
19 | frozen = True
20 | smart_union = True
21 | extra = pydantic.Extra.allow
22 |
--------------------------------------------------------------------------------
/src/scrapybara/types/upload_response.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing
5 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
6 | import pydantic
7 |
8 |
9 | class UploadResponse(UniversalBaseModel):
10 | """
11 | Response model for file uploads.
12 | """
13 |
14 | filename: str
15 | path: str
16 | media_type: typing.Optional[str] = None
17 |
18 | if IS_PYDANTIC_V2:
19 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
20 | else:
21 |
22 | class Config:
23 | frozen = True
24 | smart_union = True
25 | extra = pydantic.Extra.allow
26 |
--------------------------------------------------------------------------------
/src/scrapybara/types/validation_error.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing
5 | from .validation_error_loc_item import ValidationErrorLocItem
6 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
7 | import pydantic
8 |
9 |
10 | class ValidationError(UniversalBaseModel):
11 | loc: typing.List[ValidationErrorLocItem]
12 | msg: str
13 | type: str
14 |
15 | if IS_PYDANTIC_V2:
16 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
17 | else:
18 |
19 | class Config:
20 | frozen = True
21 | smart_union = True
22 | extra = pydantic.Extra.allow
23 |
--------------------------------------------------------------------------------
/src/scrapybara/types/validation_error_loc_item.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | import typing
4 |
5 | ValidationErrorLocItem = typing.Union[str, int]
6 |
--------------------------------------------------------------------------------
/src/scrapybara/types/wait_action.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from ..core.pydantic_utilities import UniversalBaseModel
4 | import typing
5 | from ..core.pydantic_utilities import IS_PYDANTIC_V2
6 | import pydantic
7 |
8 |
9 | class WaitAction(UniversalBaseModel):
10 | duration: float
11 | screenshot: typing.Optional[bool] = None
12 |
13 | if IS_PYDANTIC_V2:
14 | model_config: typing.ClassVar[pydantic.ConfigDict] = pydantic.ConfigDict(extra="allow", frozen=True) # type: ignore # Pydantic v2
15 | else:
16 |
17 | class Config:
18 | frozen = True
19 | smart_union = True
20 | extra = pydantic.Extra.allow
21 |
--------------------------------------------------------------------------------
/src/scrapybara/version.py:
--------------------------------------------------------------------------------
1 | from importlib import metadata
2 |
3 | __version__ = metadata.version("scrapybara")
4 |
--------------------------------------------------------------------------------
/tests/custom/test_client.py:
--------------------------------------------------------------------------------
1 | from pydantic import BaseModel
2 | from scrapybara import Scrapybara
3 | import os
4 | import pytest
5 | import tempfile
6 | import uuid
7 |
8 | from scrapybara.anthropic import (
9 | Anthropic,
10 | UBUNTU_SYSTEM_PROMPT as UBUNTU_SYSTEM_PROMPT_ANTHROPIC,
11 | BROWSER_SYSTEM_PROMPT as BROWSER_SYSTEM_PROMPT_ANTHROPIC,
12 | WINDOWS_SYSTEM_PROMPT as WINDOWS_SYSTEM_PROMPT_ANTHROPIC,
13 | )
14 | from scrapybara.openai import (
15 | OpenAI,
16 | UBUNTU_SYSTEM_PROMPT as UBUNTU_SYSTEM_PROMPT_OPENAI,
17 | BROWSER_SYSTEM_PROMPT as BROWSER_SYSTEM_PROMPT_OPENAI,
18 | WINDOWS_SYSTEM_PROMPT as WINDOWS_SYSTEM_PROMPT_OPENAI,
19 | )
20 | from scrapybara.tools import BashTool, ComputerTool, EditTool
21 |
22 |
23 | class ExampleSite(BaseModel):
24 | title: str
25 | has_links: bool
26 |
27 |
28 | def _check_api_key() -> None:
29 | if os.getenv("SCRAPYBARA_API_KEY") is None:
30 | raise ValueError("SCRAPYBARA_API_KEY is not set")
31 |
32 |
33 | def test_ubuntu() -> None:
34 | _check_api_key()
35 | client = Scrapybara()
36 |
37 | ubuntu_instance = client.start_ubuntu()
38 | print(ubuntu_instance.get_stream_url().stream_url)
39 | assert ubuntu_instance.id is not None
40 | instances = client.get_instances()
41 | assert len(instances) > 0
42 | screenshot_response = ubuntu_instance.screenshot()
43 | assert screenshot_response.base_64_image is not None
44 | ubuntu_instance.browser.start()
45 | cdp_url = ubuntu_instance.browser.get_cdp_url()
46 | assert cdp_url is not None
47 | response = client.act(
48 | model=Anthropic(),
49 | system=UBUNTU_SYSTEM_PROMPT_ANTHROPIC,
50 | prompt="Go to example.com and get the page title and whether it has any links",
51 | tools=[
52 | ComputerTool(ubuntu_instance),
53 | BashTool(ubuntu_instance),
54 | EditTool(ubuntu_instance),
55 | ],
56 | schema=ExampleSite,
57 | on_step=lambda step: print(step.text, step.tool_calls),
58 | )
59 | print(response.output)
60 | assert response.output is not None
61 | assert response.output.title is not None
62 | assert isinstance(response.output.has_links, bool)
63 | ubuntu_instance.browser.stop()
64 | ubuntu_instance.stop()
65 |
66 | @pytest.mark.skip()
67 | def test_ubuntu_openai() -> None:
68 | _check_api_key()
69 | client = Scrapybara()
70 |
71 | ubuntu_instance = client.start_ubuntu()
72 | print(ubuntu_instance.get_stream_url().stream_url)
73 | assert ubuntu_instance.id is not None
74 | instances = client.get_instances()
75 | assert len(instances) > 0
76 | screenshot_response = ubuntu_instance.screenshot()
77 | assert screenshot_response.base_64_image is not None
78 | ubuntu_instance.browser.start()
79 | cdp_url = ubuntu_instance.browser.get_cdp_url()
80 | assert cdp_url is not None
81 | response = client.act(
82 | model=OpenAI(),
83 | system=UBUNTU_SYSTEM_PROMPT_OPENAI,
84 | prompt="Go to example.com and get the page title and whether it has any links",
85 | tools=[
86 | ComputerTool(ubuntu_instance),
87 | BashTool(ubuntu_instance),
88 | EditTool(ubuntu_instance),
89 | ],
90 | schema=ExampleSite,
91 | on_step=lambda step: print(step.text, step.tool_calls),
92 | )
93 | print(response.output)
94 | assert response.output is not None
95 | assert response.output.title is not None
96 | assert isinstance(response.output.has_links, bool)
97 | ubuntu_instance.browser.stop()
98 | ubuntu_instance.stop()
99 |
100 |
101 | def test_browser() -> None:
102 | _check_api_key()
103 | client = Scrapybara()
104 |
105 | browser_instance = client.start_browser()
106 | print(browser_instance.get_stream_url().stream_url)
107 | assert browser_instance.id is not None
108 | screenshot_response = browser_instance.screenshot()
109 | assert screenshot_response.base_64_image is not None
110 | cdp_url = browser_instance.get_cdp_url()
111 | assert cdp_url is not None
112 | response = client.act(
113 | model=Anthropic(),
114 | system=BROWSER_SYSTEM_PROMPT_ANTHROPIC,
115 | prompt="Go to example.com and get the page title and whether it has any links",
116 | tools=[
117 | ComputerTool(browser_instance),
118 | ],
119 | schema=ExampleSite,
120 | on_step=lambda step: print(step.text, step.tool_calls),
121 | )
122 | print(response.output)
123 | assert response.output is not None
124 | assert response.output.title is not None
125 | assert isinstance(response.output.has_links, bool)
126 | browser_instance.stop()
127 |
128 | @pytest.mark.skip()
129 | def test_browser_openai() -> None:
130 | _check_api_key()
131 | client = Scrapybara()
132 |
133 | browser_instance = client.start_browser()
134 | print(browser_instance.get_stream_url().stream_url)
135 | assert browser_instance.id is not None
136 | screenshot_response = browser_instance.screenshot()
137 | assert screenshot_response.base_64_image is not None
138 | cdp_url = browser_instance.get_cdp_url()
139 | assert cdp_url is not None
140 | response = client.act(
141 | model=OpenAI(),
142 | system=BROWSER_SYSTEM_PROMPT_OPENAI,
143 | prompt="Go to example.com and get the page title and whether it has any links",
144 | tools=[
145 | ComputerTool(browser_instance),
146 | ],
147 | schema=ExampleSite,
148 | on_step=lambda step: print(step.text, step.tool_calls),
149 | )
150 | print(response.output)
151 | assert response.output is not None
152 | assert response.output.title is not None
153 | assert isinstance(response.output.has_links, bool)
154 | browser_instance.stop()
155 |
156 |
157 | @pytest.mark.skip()
158 | def test_windows() -> None:
159 | _check_api_key()
160 | client = Scrapybara()
161 |
162 | windows_instance = client.start_windows()
163 | print(windows_instance.get_stream_url().stream_url)
164 | assert windows_instance.id is not None
165 | screenshot_response = windows_instance.screenshot()
166 | assert screenshot_response.base_64_image is not None
167 | response = client.act(
168 | model=Anthropic(),
169 | system=WINDOWS_SYSTEM_PROMPT_ANTHROPIC,
170 | prompt="Go to example.com and get the page title and whether it has any links",
171 | tools=[
172 | ComputerTool(windows_instance),
173 | ],
174 | schema=ExampleSite,
175 | on_step=lambda step: print(step.text, step.tool_calls),
176 | )
177 | print(response.output)
178 | assert response.output is not None
179 | assert response.output.title is not None
180 | assert isinstance(response.output.has_links, bool)
181 | windows_instance.stop()
182 |
183 |
184 | @pytest.mark.skip()
185 | def test_ubuntu_thinking() -> None:
186 | _check_api_key()
187 | client = Scrapybara()
188 |
189 | ubuntu_instance = client.start_ubuntu()
190 | print(ubuntu_instance.get_stream_url().stream_url)
191 | assert ubuntu_instance.id is not None
192 | instances = client.get_instances()
193 | assert len(instances) > 0
194 | screenshot_response = ubuntu_instance.screenshot()
195 | assert screenshot_response.base_64_image is not None
196 | ubuntu_instance.browser.start()
197 | cdp_url = ubuntu_instance.browser.get_cdp_url()
198 | assert cdp_url is not None
199 | response = client.act(
200 | model=Anthropic(name="claude-3-7-sonnet-20250219-thinking"),
201 | system=UBUNTU_SYSTEM_PROMPT_ANTHROPIC,
202 | prompt="Go to example.com and get the page title and whether it has any links",
203 | tools=[
204 | ComputerTool(ubuntu_instance),
205 | BashTool(ubuntu_instance),
206 | EditTool(ubuntu_instance),
207 | ],
208 | schema=ExampleSite,
209 | on_step=lambda step: print(step.text, step.tool_calls, step.reasoning_parts),
210 | )
211 | print(response.output)
212 | assert response.output is not None
213 | assert response.output.title is not None
214 | assert isinstance(response.output.has_links, bool)
215 | ubuntu_instance.browser.stop()
216 | ubuntu_instance.stop()
217 |
218 |
219 | @pytest.mark.skip()
220 | def test_browser_thinking() -> None:
221 | _check_api_key()
222 | client = Scrapybara()
223 |
224 | browser_instance = client.start_browser()
225 | print(browser_instance.get_stream_url().stream_url)
226 | assert browser_instance.id is not None
227 | screenshot_response = browser_instance.screenshot()
228 | assert screenshot_response.base_64_image is not None
229 | cdp_url = browser_instance.get_cdp_url()
230 | assert cdp_url is not None
231 | response = client.act(
232 | model=Anthropic(name="claude-3-7-sonnet-20250219-thinking"),
233 | system=BROWSER_SYSTEM_PROMPT_ANTHROPIC,
234 | prompt="Go to example.com and get the page title and whether it has any links",
235 | tools=[
236 | ComputerTool(browser_instance),
237 | ],
238 | schema=ExampleSite,
239 | on_step=lambda step: print(step.text, step.tool_calls, step.reasoning_parts),
240 | )
241 | print(response.output)
242 | assert response.output is not None
243 | assert response.output.title is not None
244 | assert isinstance(response.output.has_links, bool)
245 | browser_instance.stop()
246 |
247 |
248 | def test_upload_download() -> None:
249 | _check_api_key()
250 | client = Scrapybara()
251 |
252 | # Start Ubuntu instance
253 | ubuntu_instance = client.start_ubuntu()
254 | assert ubuntu_instance.id is not None
255 |
256 | try:
257 | # Create a temporary file with test content
258 | test_content = f"Test content {uuid.uuid4()}"
259 | with tempfile.NamedTemporaryFile(mode='w+', delete=False) as temp_file:
260 | temp_file.write(test_content)
261 | temp_path = temp_file.name
262 |
263 | # Upload the file to the instance
264 | remote_path = f"test_file_{uuid.uuid4()}"
265 | with open(temp_path, 'rb') as f:
266 | upload_response = ubuntu_instance.upload(file=f, path=remote_path)
267 | assert upload_response is not None
268 |
269 | # Verify file exists on remote and content matches
270 | file_check = ubuntu_instance.bash(command=f"cat {remote_path}")
271 | assert file_check is not None
272 | assert test_content in str(file_check)
273 |
274 | # Call the download method to at least test the API call
275 | # Note: In a real application you would need to handle the response
276 | # and save the content to a local file
277 | # ubuntu_instance.download(path=remote_path)
278 |
279 | # Clean up local files
280 | os.unlink(temp_path)
281 |
282 | finally:
283 | # Always stop the instance
284 | ubuntu_instance.stop()
285 |
286 |
287 | if __name__ == "__main__":
288 | test_ubuntu()
289 | test_browser()
290 | # test_ubuntu_openai()
291 | # test_browser_openai()
292 | test_upload_download()
293 | # test_ubuntu_thinking()
294 | # test_browser_thinking()
295 | # test_windows()
296 |
--------------------------------------------------------------------------------
/tests/utils/__init__.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 |
--------------------------------------------------------------------------------
/tests/utils/assets/models/__init__.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | # This file was auto-generated by Fern from our API Definition.
4 |
5 | from .circle import CircleParams
6 | from .object_with_defaults import ObjectWithDefaultsParams
7 | from .object_with_optional_field import ObjectWithOptionalFieldParams
8 | from .shape import ShapeParams, Shape_CircleParams, Shape_SquareParams
9 | from .square import SquareParams
10 | from .undiscriminated_shape import UndiscriminatedShapeParams
11 |
12 | __all__ = [
13 | "CircleParams",
14 | "ObjectWithDefaultsParams",
15 | "ObjectWithOptionalFieldParams",
16 | "ShapeParams",
17 | "Shape_CircleParams",
18 | "Shape_SquareParams",
19 | "SquareParams",
20 | "UndiscriminatedShapeParams",
21 | ]
22 |
--------------------------------------------------------------------------------
/tests/utils/assets/models/circle.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | # This file was auto-generated by Fern from our API Definition.
4 |
5 | import typing_extensions
6 | import typing_extensions
7 | from scrapybara.core.serialization import FieldMetadata
8 |
9 |
10 | class CircleParams(typing_extensions.TypedDict):
11 | radius_measurement: typing_extensions.Annotated[float, FieldMetadata(alias="radiusMeasurement")]
12 |
--------------------------------------------------------------------------------
/tests/utils/assets/models/color.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | # This file was auto-generated by Fern from our API Definition.
4 |
5 | import typing
6 |
7 | Color = typing.Union[typing.Literal["red", "blue"], typing.Any]
8 |
--------------------------------------------------------------------------------
/tests/utils/assets/models/object_with_defaults.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | # This file was auto-generated by Fern from our API Definition.
4 |
5 | import typing_extensions
6 | import typing_extensions
7 |
8 |
9 | class ObjectWithDefaultsParams(typing_extensions.TypedDict):
10 | """
11 | Defines properties with default values and validation rules.
12 | """
13 |
14 | decimal: typing_extensions.NotRequired[float]
15 | string: typing_extensions.NotRequired[str]
16 | required_string: str
17 |
--------------------------------------------------------------------------------
/tests/utils/assets/models/object_with_optional_field.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | # This file was auto-generated by Fern from our API Definition.
4 |
5 | import typing_extensions
6 | import typing
7 | import typing_extensions
8 | from scrapybara.core.serialization import FieldMetadata
9 | import datetime as dt
10 | import uuid
11 | from .color import Color
12 | from .shape import ShapeParams
13 | from .undiscriminated_shape import UndiscriminatedShapeParams
14 |
15 |
16 | class ObjectWithOptionalFieldParams(typing_extensions.TypedDict):
17 | literal: typing.Literal["lit_one"]
18 | string: typing_extensions.NotRequired[str]
19 | integer: typing_extensions.NotRequired[int]
20 | long_: typing_extensions.NotRequired[typing_extensions.Annotated[int, FieldMetadata(alias="long")]]
21 | double: typing_extensions.NotRequired[float]
22 | bool_: typing_extensions.NotRequired[typing_extensions.Annotated[bool, FieldMetadata(alias="bool")]]
23 | datetime: typing_extensions.NotRequired[dt.datetime]
24 | date: typing_extensions.NotRequired[dt.date]
25 | uuid_: typing_extensions.NotRequired[typing_extensions.Annotated[uuid.UUID, FieldMetadata(alias="uuid")]]
26 | base_64: typing_extensions.NotRequired[typing_extensions.Annotated[str, FieldMetadata(alias="base64")]]
27 | list_: typing_extensions.NotRequired[typing_extensions.Annotated[typing.Sequence[str], FieldMetadata(alias="list")]]
28 | set_: typing_extensions.NotRequired[typing_extensions.Annotated[typing.Set[str], FieldMetadata(alias="set")]]
29 | map_: typing_extensions.NotRequired[typing_extensions.Annotated[typing.Dict[int, str], FieldMetadata(alias="map")]]
30 | enum: typing_extensions.NotRequired[Color]
31 | union: typing_extensions.NotRequired[ShapeParams]
32 | second_union: typing_extensions.NotRequired[ShapeParams]
33 | undiscriminated_union: typing_extensions.NotRequired[UndiscriminatedShapeParams]
34 | any: typing.Optional[typing.Any]
35 |
--------------------------------------------------------------------------------
/tests/utils/assets/models/shape.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | # This file was auto-generated by Fern from our API Definition.
4 |
5 | from __future__ import annotations
6 | import typing_extensions
7 | import typing_extensions
8 | import typing
9 | from scrapybara.core.serialization import FieldMetadata
10 |
11 |
12 | class Base(typing_extensions.TypedDict):
13 | id: str
14 |
15 |
16 | class Shape_CircleParams(Base):
17 | shape_type: typing_extensions.Annotated[typing.Literal["circle"], FieldMetadata(alias="shapeType")]
18 | radius_measurement: typing_extensions.Annotated[float, FieldMetadata(alias="radiusMeasurement")]
19 |
20 |
21 | class Shape_SquareParams(Base):
22 | shape_type: typing_extensions.Annotated[typing.Literal["square"], FieldMetadata(alias="shapeType")]
23 | length_measurement: typing_extensions.Annotated[float, FieldMetadata(alias="lengthMeasurement")]
24 |
25 |
26 | ShapeParams = typing.Union[Shape_CircleParams, Shape_SquareParams]
27 |
--------------------------------------------------------------------------------
/tests/utils/assets/models/square.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | # This file was auto-generated by Fern from our API Definition.
4 |
5 | import typing_extensions
6 | import typing_extensions
7 | from scrapybara.core.serialization import FieldMetadata
8 |
9 |
10 | class SquareParams(typing_extensions.TypedDict):
11 | length_measurement: typing_extensions.Annotated[float, FieldMetadata(alias="lengthMeasurement")]
12 |
--------------------------------------------------------------------------------
/tests/utils/assets/models/undiscriminated_shape.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | # This file was auto-generated by Fern from our API Definition.
4 |
5 | import typing
6 | from .circle import CircleParams
7 | from .square import SquareParams
8 |
9 | UndiscriminatedShapeParams = typing.Union[CircleParams, SquareParams]
10 |
--------------------------------------------------------------------------------
/tests/utils/test_http_client.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from scrapybara.core.http_client import get_request_body
4 | from scrapybara.core.request_options import RequestOptions
5 |
6 |
7 | def get_request_options() -> RequestOptions:
8 | return {"additional_body_parameters": {"see you": "later"}}
9 |
10 |
11 | def test_get_json_request_body() -> None:
12 | json_body, data_body = get_request_body(json={"hello": "world"}, data=None, request_options=None, omit=None)
13 | assert json_body == {"hello": "world"}
14 | assert data_body is None
15 |
16 | json_body_extras, data_body_extras = get_request_body(
17 | json={"goodbye": "world"}, data=None, request_options=get_request_options(), omit=None
18 | )
19 |
20 | assert json_body_extras == {"goodbye": "world", "see you": "later"}
21 | assert data_body_extras is None
22 |
23 |
24 | def test_get_files_request_body() -> None:
25 | json_body, data_body = get_request_body(json=None, data={"hello": "world"}, request_options=None, omit=None)
26 | assert data_body == {"hello": "world"}
27 | assert json_body is None
28 |
29 | json_body_extras, data_body_extras = get_request_body(
30 | json=None, data={"goodbye": "world"}, request_options=get_request_options(), omit=None
31 | )
32 |
33 | assert data_body_extras == {"goodbye": "world", "see you": "later"}
34 | assert json_body_extras is None
35 |
36 |
37 | def test_get_none_request_body() -> None:
38 | json_body, data_body = get_request_body(json=None, data=None, request_options=None, omit=None)
39 | assert data_body is None
40 | assert json_body is None
41 |
42 | json_body_extras, data_body_extras = get_request_body(
43 | json=None, data=None, request_options=get_request_options(), omit=None
44 | )
45 |
46 | assert json_body_extras == {"see you": "later"}
47 | assert data_body_extras is None
48 |
49 |
50 | def test_get_empty_json_request_body() -> None:
51 | unrelated_request_options: RequestOptions = {"max_retries": 3}
52 | json_body, data_body = get_request_body(json=None, data=None, request_options=unrelated_request_options, omit=None)
53 | assert json_body is None
54 | assert data_body is None
55 |
56 | json_body_extras, data_body_extras = get_request_body(
57 | json={}, data=None, request_options=unrelated_request_options, omit=None
58 | )
59 |
60 | assert json_body_extras is None
61 | assert data_body_extras is None
62 |
--------------------------------------------------------------------------------
/tests/utils/test_query_encoding.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 |
4 | from scrapybara.core.query_encoder import encode_query
5 |
6 |
7 | def test_query_encoding_deep_objects() -> None:
8 | assert encode_query({"hello world": "hello world"}) == [("hello world", "hello world")]
9 | assert encode_query({"hello_world": {"hello": "world"}}) == [("hello_world[hello]", "world")]
10 | assert encode_query({"hello_world": {"hello": {"world": "today"}, "test": "this"}, "hi": "there"}) == [
11 | ("hello_world[hello][world]", "today"),
12 | ("hello_world[test]", "this"),
13 | ("hi", "there"),
14 | ]
15 |
16 |
17 | def test_query_encoding_deep_object_arrays() -> None:
18 | assert encode_query({"objects": [{"key": "hello", "value": "world"}, {"key": "foo", "value": "bar"}]}) == [
19 | ("objects[key]", "hello"),
20 | ("objects[value]", "world"),
21 | ("objects[key]", "foo"),
22 | ("objects[value]", "bar"),
23 | ]
24 | assert encode_query(
25 | {"users": [{"name": "string", "tags": ["string"]}, {"name": "string2", "tags": ["string2", "string3"]}]}
26 | ) == [
27 | ("users[name]", "string"),
28 | ("users[tags]", "string"),
29 | ("users[name]", "string2"),
30 | ("users[tags]", "string2"),
31 | ("users[tags]", "string3"),
32 | ]
33 |
34 |
35 | def test_encode_query_with_none() -> None:
36 | encoded = encode_query(None)
37 | assert encoded == None
38 |
--------------------------------------------------------------------------------
/tests/utils/test_serialization.py:
--------------------------------------------------------------------------------
1 | # This file was auto-generated by Fern from our API Definition.
2 |
3 | from typing import List, Any
4 |
5 | from scrapybara.core.serialization import convert_and_respect_annotation_metadata
6 | from .assets.models import ShapeParams, ObjectWithOptionalFieldParams
7 |
8 |
9 | UNION_TEST: ShapeParams = {"radius_measurement": 1.0, "shape_type": "circle", "id": "1"}
10 | UNION_TEST_CONVERTED = {"shapeType": "circle", "radiusMeasurement": 1.0, "id": "1"}
11 |
12 |
13 | def test_convert_and_respect_annotation_metadata() -> None:
14 | data: ObjectWithOptionalFieldParams = {
15 | "string": "string",
16 | "long_": 12345,
17 | "bool_": True,
18 | "literal": "lit_one",
19 | "any": "any",
20 | }
21 | converted = convert_and_respect_annotation_metadata(
22 | object_=data, annotation=ObjectWithOptionalFieldParams, direction="write"
23 | )
24 | assert converted == {"string": "string", "long": 12345, "bool": True, "literal": "lit_one", "any": "any"}
25 |
26 |
27 | def test_convert_and_respect_annotation_metadata_in_list() -> None:
28 | data: List[ObjectWithOptionalFieldParams] = [
29 | {"string": "string", "long_": 12345, "bool_": True, "literal": "lit_one", "any": "any"},
30 | {"string": "another string", "long_": 67890, "list_": [], "literal": "lit_one", "any": "any"},
31 | ]
32 | converted = convert_and_respect_annotation_metadata(
33 | object_=data, annotation=List[ObjectWithOptionalFieldParams], direction="write"
34 | )
35 |
36 | assert converted == [
37 | {"string": "string", "long": 12345, "bool": True, "literal": "lit_one", "any": "any"},
38 | {"string": "another string", "long": 67890, "list": [], "literal": "lit_one", "any": "any"},
39 | ]
40 |
41 |
42 | def test_convert_and_respect_annotation_metadata_in_nested_object() -> None:
43 | data: ObjectWithOptionalFieldParams = {
44 | "string": "string",
45 | "long_": 12345,
46 | "union": UNION_TEST,
47 | "literal": "lit_one",
48 | "any": "any",
49 | }
50 | converted = convert_and_respect_annotation_metadata(
51 | object_=data, annotation=ObjectWithOptionalFieldParams, direction="write"
52 | )
53 |
54 | assert converted == {
55 | "string": "string",
56 | "long": 12345,
57 | "union": UNION_TEST_CONVERTED,
58 | "literal": "lit_one",
59 | "any": "any",
60 | }
61 |
62 |
63 | def test_convert_and_respect_annotation_metadata_in_union() -> None:
64 | converted = convert_and_respect_annotation_metadata(object_=UNION_TEST, annotation=ShapeParams, direction="write")
65 |
66 | assert converted == UNION_TEST_CONVERTED
67 |
68 |
69 | def test_convert_and_respect_annotation_metadata_with_empty_object() -> None:
70 | data: Any = {}
71 | converted = convert_and_respect_annotation_metadata(object_=data, annotation=ShapeParams, direction="write")
72 | assert converted == data
73 |
--------------------------------------------------------------------------------