├── .env.example ├── .github └── workflows │ ├── pypi.yml │ └── testpypi.yml ├── .gitignore ├── Changelog.md ├── LICENSE ├── README.md ├── conftest.py ├── fxn ├── __init__.py ├── beta │ ├── __init__.py │ ├── cli │ │ ├── __init__.py │ │ └── llm.py │ ├── client.py │ ├── llm │ │ ├── __init__.py │ │ └── server.py │ ├── metadata.py │ └── services │ │ ├── __init__.py │ │ ├── prediction.py │ │ └── remote.py ├── c │ ├── __init__.py │ ├── configuration.py │ ├── fxnc.py │ ├── map.py │ ├── prediction.py │ ├── predictor.py │ ├── stream.py │ └── value.py ├── cli │ ├── __init__.py │ ├── auth.py │ ├── compile.py │ ├── misc.py │ ├── predictions.py │ ├── predictors.py │ └── sources.py ├── client.py ├── compile.py ├── function.py ├── lib │ └── __init__.py ├── logging.py ├── sandbox.py ├── services │ ├── __init__.py │ ├── prediction.py │ ├── predictor.py │ └── user.py ├── types │ ├── __init__.py │ ├── dtype.py │ ├── prediction.py │ ├── predictor.py │ └── user.py └── version.py ├── fxnc.py ├── pyproject.toml ├── requirements.txt └── test ├── compile_test.py ├── media ├── cat.jpg └── pexels-samson-katt-5255233.jpg ├── predict_test.py ├── prediction_test.py ├── predictor_test.py ├── remote_prediction_test.py └── user_test.py /.env.example: -------------------------------------------------------------------------------- 1 | # Function 2 | FXN_ACCESS_KEY= -------------------------------------------------------------------------------- /.github/workflows/pypi.yml: -------------------------------------------------------------------------------- 1 | name: Publish to PyPi 2 | 3 | on: 4 | release: 5 | types: [published] 6 | workflow_dispatch: 7 | 8 | jobs: 9 | pypi: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v4 13 | 14 | - uses: actions/setup-python@v2 15 | with: 16 | python-version: "3.11" 17 | 18 | - name: Install dependencies 19 | run: | 20 | python3 -m pip install --upgrade pip 21 | python3 -m pip install build twine 22 | python3 -m pip install -r requirements.txt 23 | 24 | - name: Pull Function C 25 | run: python3 fxnc.py --version 0.0.35 26 | 27 | - name: Build Function 28 | run: python3 -m build 29 | 30 | - name: Publish to PyPi 31 | run: python3 -m twine upload dist/* 32 | env: 33 | TWINE_USERNAME: __token__ 34 | TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} 35 | 36 | - uses: actions/upload-artifact@v4 37 | with: 38 | name: Wheels 39 | path: dist/ -------------------------------------------------------------------------------- /.github/workflows/testpypi.yml: -------------------------------------------------------------------------------- 1 | name: Publish to TestPyPi 2 | 3 | on: workflow_dispatch 4 | 5 | jobs: 6 | pypi: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v4 10 | 11 | - uses: actions/setup-python@v2 12 | with: 13 | python-version: "3.11" 14 | 15 | - name: Install dependencies 16 | run: | 17 | python3 -m pip install --upgrade pip 18 | python3 -m pip install build twine 19 | python3 -m pip install -r requirements.txt 20 | 21 | - name: Pull EdgeFunction 22 | run: python3 fxnc.py --version 0.0.35 23 | 24 | - name: Build Function 25 | run: python3 -m build 26 | 27 | - name: Publish to TestPyPi 28 | run: python3 -m twine upload dist/* 29 | env: 30 | TWINE_REPOSITORY: testpypi 31 | TWINE_USERNAME: __token__ 32 | TWINE_PASSWORD: ${{ secrets.TEST_PYPI_TOKEN }} 33 | 34 | - uses: actions/upload-artifact@v4 35 | with: 36 | name: Wheels 37 | path: dist/ -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Python 2 | __pycache__ 3 | .pytest_cache 4 | build/ 5 | dist/ 6 | *.egg 7 | *.egg-info/ 8 | 9 | # Libs 10 | fxn/**/*.dll 11 | fxn/**/*.dylib 12 | fxn/**/*.so 13 | 14 | # IDE 15 | .vscode 16 | .vs 17 | .idea 18 | 19 | # Env 20 | *.env 21 | !.env.example 22 | 23 | # Images 24 | *.png 25 | *.jpg 26 | *.jpeg 27 | *.gif 28 | !test/media/**/*.jpg 29 | !test/media/**/*.jpeg 30 | 31 | # Misc 32 | .DS_Store -------------------------------------------------------------------------------- /Changelog.md: -------------------------------------------------------------------------------- 1 | ## 0.0.54 2 | + Refactored `AccessMode` enumeration to `PredictorAccess` literal type. 3 | + Refactored `PredictorStatus` enumeration to literal type. 4 | 5 | ## 0.0.53 6 | + Added `beta.QnnInferenceMetadata.backend` field for specifying the backend to use for Qualcomm QNN AI inference. Currently, CPU and GPU backends are supported, with HTP backend coming soon. 7 | + Added `beta.QnnInferenceMetadata.quantization` field for specifying how to quantize a Qualcomm QNN model when running with the HTP backend. 8 | + Refactored `beta.ONNXInferenceMetadata` metadata type to `beta.OnnxInferenceMetadata`. 9 | + Refactored `beta.ONNXRuntimeInferenceSessionMetadata` metadata type to `beta.OnnxRuntimeInferenceSessionMetadata`. 10 | + Removed `fxn.predictions.ready` method. You must manually track whether a predictor is loaded. 11 | 12 | ## 0.0.52 13 | + Added `beta.OpenVINOInferenceMetadata` class to lower PyTorch models for inference with Intel OpenVINO. 14 | + Added `beta.QnnInferenceMetadata` class to lower PyTorch models for inference on Qualcomm accelerators with QNN SDK. 15 | + Updated `@compile` decorator to preserve type information for decorated prediction functions. 16 | 17 | ## 0.0.51 18 | + Minor improvements. 19 | 20 | ## 0.0.50 21 | + Fixed import errors in `fxn.compile` module. 22 | 23 | ## 0.0.49 24 | + Fixed import errors in `fxn.beta` module. 25 | 26 | ## 0.0.48 27 | + Added `beta.LiteRTInferenceMetadata` class to lower PyTorch models for inference with LiteRT (fka TensorFlow Lite). 28 | + Refactored `beta.GGUFInferenceMetadata` class to `beta.LlamaCppInferenceMetadata`. 29 | + Upgraded to Function 0.0.35. 30 | 31 | ## 0.0.47 32 | + Fixed `Sandbox.upload_file` function failing to upload files when `from_path` is not an absolute path. 33 | + Fixed `Sandbox.upload_directory` function failing to upload files when `from_path` is not an absolute path. 34 | 35 | ## 0.0.46 36 | + Added `metadata` parameter in `@compile` decorator to provide metadata to the compiler. 37 | + Added `beta.CoreMLInferenceMetadata` class to lower PyTorch models for inference on iOS, macOS, and visionOS with CoreML. 38 | + Added `beta.ONNXInferenceMetadata` class to lower PyTorch models to ONNX for inference. 39 | + Added `beta.ONNXRuntimeInferenceSessionMetadata` class to lower ONNXRuntime inference sessions for inference. 40 | + Added `beta.GGUFInferenceMetadata` class to lower GGUF models for LLM inference. 41 | 42 | ## 0.0.45 43 | + Added `fxn source` CLI command to retrieve the native source code for a given prediction. 44 | + Added `--overwrite` flag in `fxn compile` CLI command to overwrite existing predictor before compiling. 45 | 46 | ## 0.0.44 47 | + Added `trace_modules` argument in `@compile` decorator to opt specific modules into tracing and compilation. 48 | + Added `targets` argument in `@compile` decorator for specifying targets to compile for. 49 | + Fixed prediction error when prediction output includes a `PIL.Image`. 50 | + Refactored `fxn.predictions.stream` method to return an `Iterator` instead of `AsyncIterator`. 51 | + Refactored `Acceleration` enumeration to string literal type. 52 | + Refactored `RemoteAcceleration` enumeration to string literal type. 53 | 54 | ## 0.0.43 55 | + Added `fxn compile` CLI command for access to the Function compiler proof of concept. 56 | + Added `fxn archive` CLI command for archiving predictors. 57 | + Added `fxn delete` CLI command for deleting predictors. 58 | + Refactored `PredictorStatus.Provisioning` enumeration member to `Compiling`. 59 | 60 | ## 0.0.42 61 | + Added `fxn.beta.predictions.remote.create` method for creating predictions on remote GPU servers. 62 | 63 | ## 0.0.41 64 | + Added support for streaming predictions. 65 | + Added `fxn.predictions.ready` to check whether a predictor has been preloaded. 66 | + Added `verbose` parameter in `fxn.predictions.create` method to print prediction progress. 67 | + Added `--quiet` option in `fxn predict` CLI action to suppress verbose logging. 68 | + Fixed prediction errors when making passing in `str`, `list`, or `dict` arguments. 69 | + Fixed invalid data type error when prediction returns a greyscale image. 70 | + Fixed prediction error in CLI when passing file path as prediction input value. 71 | + Refactored `Acceleration.Default` enumeration member to `Acceleration.Auto`. 72 | + Removed `Profile` type. Use `User` type instead. 73 | + Updated to Function C 0.0.31. 74 | 75 | ## 0.0.40 76 | + Fixed errors when `Function` client is created for the first time on a new device. 77 | + Updated to Function C 0.0.29. 78 | 79 | ## 0.0.39 80 | + Fixed errors when `Function` client is created on Windows. 81 | 82 | ## 0.0.38 83 | + Function now supports Linux, across `x86_64` and `arm64` architectures. 84 | 85 | ## 0.0.37 86 | + Added `fxn --explore` CLI action to explore predictions on [fxn.ai](https://fxn.ai/explore). 87 | 88 | ## 0.0.36 89 | + Added `Acceleration.Default` enumeration constant. 90 | + Added `Acceleration.GPU` enumeration constant for running predictions on the GPU. 91 | + Added `Acceleration.NPU` enumeration constant forn running predictions on the neural processor. 92 | + Fixed crash when using `PIL.Image` values returned by edge predictors. 93 | + Updated to Function C 0.0.26. 94 | + Removed `Value` type. 95 | + Removed `PredictorType` enumeration. 96 | + Removed `fxn.predictors.create` method for creating predictors. [Apply](https://fxn.ai/waitlist) for early access to the new experience. 97 | + Removed `fxn.predictions.to_object` method. 98 | + Removed `fxn.predictions.to_value` method. 99 | + Removed `Predictor.type` field. 100 | + Removed `Predictor.acceleration` field. 101 | + Removed `Prediction.type` field. 102 | + Removed `Acceleration.A40` enumeration constant. 103 | + Removed `Acceleration.A100` enumeration constant. 104 | + Removed `fxn create` CLI function. 105 | + Removed `fxn delete` CLI function. 106 | + Removed `fxn list` CLI function. 107 | + Removed `fxn search` CLI function. 108 | + Removed `fxn retrieve` CLI function. 109 | + Removed `fxn archive` CLI function. 110 | + Removed `fxn env` CLI function group. 111 | + Removed `--raw-outputs` option from `fxn predict` CLI function. 112 | + Function now requires Python 3.10+. 113 | 114 | ## 0.0.35 115 | + Updated to Function C 0.0.18. 116 | 117 | ## 0.0.34 118 | + Fixed `fxn` import error caused by `fxn.predictions.stream` function. 119 | 120 | ## 0.0.33 121 | + Fixed error in `fxn.predictors.retrieve` function. 122 | 123 | ## 0.0.32 124 | + Added missing native libraries. 125 | 126 | ## 0.0.31 127 | + Added experimental support for making on-device predictions. 128 | + Added `PredictionResource.name` field for handling prediction resources with required file names. 129 | 130 | ## 0.0.30 131 | + Fixed data type inference when making predictions. 132 | 133 | ## 0.0.29 134 | + Minor fixes and improvements. 135 | 136 | ## 0.0.28 137 | + Added `fxn create --cloud` CLI shorthand flag for setting the predictor type to `PredictorType.Cloud`. 138 | + Added `fxn create --edge` CLI shorthand flag for setting the predictor type to `PredictorType.Edge`. 139 | + Removed `AccessMode.Protected` access mode. Use `AccessMode.Public` or `AccessMode.Private` instead. 140 | + Removed `fxn.types.tag.parse_tag` function. Use `Tag.from_str` class method instead. 141 | + Removed `fxn.types.tag.serialize_tag` function. Use `str(Tag)` instead. 142 | 143 | ## 0.0.27 144 | + Added support for streaming when making predictions with Function CLI. 145 | + Added `PredictionResource.type` field for inspecting the type of a prediction resource. 146 | + Fixed pydantic forward reference errors when constructing `Signature` and `Predictor` instances. 147 | + Fixed `model_dump` error when making predictions in Google Colab due to outdated `pydantic` dependency. 148 | + Refactored `fxn.predictions.create` method to accept an `inputs` dictionary instead of relying on keyword arguments. 149 | 150 | ## 0.0.26 151 | + Added support for serializing `BytesIO` instances in `fxn.predictions.to_value` method. 152 | + Refactored `fxn.predictions.to_value` method to `to_object` for clarity. 153 | + Refactored `fxn.predictions.from_value` method to `to_value` for clarity. 154 | + Updated `fxn.predictions.to_object` method to always use aliased field names when serializing Pydantic types. 155 | 156 | ## 0.0.25 157 | + Fixed JSON serialization errors when using the CLI to perform some operations. 158 | 159 | ## 0.0.24 160 | + Added `Function` client class to replace functions on individual API types. 161 | + Refactored `Value.from_value` method to `fxn.predictions.from_value`. 162 | + Refactored `Value.to_value` method to `fxn.predictions.to_value`. 163 | + Changed `Parameter.default_value` field type to `Value`. 164 | + Removed `CloudPrediction` class. Use `Prediction` class instead. 165 | + Removed `EdgePrediction` class. Use `Prediction` class instead. 166 | 167 | ## 0.0.23 168 | + Added `AccessMode.Protected` enumeration member for working with protected predictors. 169 | + Added `pydantic` as an explicit dependency. 170 | 171 | ## 0.0.22 172 | + Added `Prediction.stream` method for creating streaming predictions. 173 | 174 | ## 0.0.21 175 | + Fixed `Value.from_value` method raising exception when serializing a list of Pydantic models. 176 | 177 | ## 0.0.20 178 | + Added support for serializing Pydantic models in `Value.from_value` method. 179 | 180 | ## 0.0.19 181 | + Added `Parameter.schema` field for inspecting the JSON schema for `dict` and `list` parameters. 182 | + Fixed `UnboundLocalError` when calling `Value.from_value` method with unsupported value type. 183 | 184 | ## 0.0.18 185 | + Switched to more ergonomic loading indicator in CLI. 186 | 187 | ## 0.0.17 188 | + Refactored `Predictor.readme` field to `card`. 189 | 190 | ## 0.0.16 191 | + Add loading indicator when making predictions in CLI. 192 | 193 | ## 0.0.15 194 | + Fixed `Predictor.search` method raising error. 195 | 196 | ## 0.0.14 197 | + Added `Dtype.null` constant for working with `None` prediction values. 198 | 199 | ## 0.0.13 200 | + Refactored `Feature` class to `Value` for improved clarity. 201 | + Refactored `UploadType.Feature` enumeration member to `UploadType.Value`. 202 | 203 | ## 0.0.12 204 | + Added `Predictor.readme` field for inspecting the readme of a predictor notebook. 205 | 206 | ## 0.0.11 207 | + Added `EnumerationMember` class for working with parameters that are enumeration values. 208 | + Added `Parameter.enumeration` field for inspecting parameters which hold enumeration values. 209 | + Added `Parameter.default_value` field for inspecting the default value of a predictor parameter. 210 | + Renamed `Dtype._3d` data type to `model`. 211 | + Removed `Parameter.string_default` field. Use `Parameter.default_value` field instead. 212 | + Removed `Parameter.int_default` field. Use `Parameter.default_value` field instead. 213 | + Removed `Parameter.float_default` field. Use `Parameter.default_value` field instead. 214 | + Removed `Parameter.bool_default` field. Use `Parameter.default_value` field instead. 215 | 216 | ## 0.0.10 217 | + Added `Feature.from_value` class method for creating `Feature` instances from plain Python values. 218 | + Added `Feature.to_value` method for converting a `Feature` instance to a plain Python value. 219 | + Added `Predictor.list` class method for listing a user's predictors. 220 | + Added `fxn list` CLI command for listing a user's predictors. 221 | + Removed `features` argument in `Prediction.create` method. Use `inputs` kwargs instead. 222 | + Removed `FeatureInput` class. 223 | 224 | ## 0.0.9 225 | + Added `EnvironmentVariable` class for managing predictor environment variables. 226 | + Added `fxn env` CLI command for managing predictor environment variables. 227 | + Added Function magics to customize how predictors are provisioned. Use `%load_ext fxn.magic` in your predictor notebook. 228 | + Fixed `Prediction.create` raising exception when the prediction resulted in an error. 229 | + Moved `fxn predictors` CLI commands to top-level. You can now use commands like `fxn create` directly. 230 | 231 | ## 0.0.8 232 | + Fixed `fxn predictors create` CLI command raising authentication error. 233 | 234 | ## 0.0.7 235 | + Fixed `fxn predictors create` CLI command raising error. 236 | 237 | ## 0.0.6 238 | + Fixed `fxn predictors create` CLI command raising error. 239 | 240 | ## 0.0.5 241 | + Added `Prediction` class for making predictions. 242 | + Added `fxn predict` CLI command for makong predictions. 243 | + Updated `Predictor.create` method `type` argument to be optional. Cloud predictors are now the default. 244 | 245 | ## 0.0.4 246 | + Added `Predictor.create` class method for creating predictors. 247 | + Added `fxn predictors create` CLI command for creating predictors. 248 | + Fixed `User.retrieve` method raising exception when retrieving current user. 249 | + Fixed `fxn auth logout` CLI command erroring when user was already logged out. 250 | 251 | ## 0.0.3 252 | + Added `Signature` type for inspecting predictor signatures. 253 | + Added `Parameter` type for inspecting predictor signature parameters. 254 | + Added `Acceleration` enumeration for specifying predictor acceleration. 255 | + Added `PredictorType` enumeration for specifying predictor type. 256 | 257 | ## 0.0.2 258 | + Added Function API types and services. 259 | + Added command line interface. Run `fxn` in your terminal. 260 | 261 | ## 0.0.1 262 | + First pre-release. -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Function for Python 2 | 3 | ![function logo](https://raw.githubusercontent.com/fxnai/.github/main/logo_wide.png) 4 | 5 | [![Dynamic JSON Badge](https://img.shields.io/badge/dynamic/json?url=https%3A%2F%2Fdiscord.com%2Fapi%2Finvites%2Fy5vwgXkz2f%3Fwith_counts%3Dtrue&query=%24.approximate_member_count&logo=discord&logoColor=white&label=Function%20community)](https://discord.gg/fxn) 6 | 7 | Run prediction functions (a.k.a "predictors") locally in your Python apps, with full GPU acceleration and zero dependencies. 8 | 9 | > [!TIP] 10 | > [Join our waitlist](https://fxn.ai/waitlist) to bring your custom Python functions and run them on-device across Android, iOS, macOS, Linux, web, and Windows. 11 | 12 | ## Installing Function 13 | Function is distributed on PyPi. This distribution contains both the Python client and the command line interface (CLI). To install, open a terminal and run the following command: 14 | ```sh 15 | # Install Function 16 | $ pip install --upgrade fxn 17 | ``` 18 | 19 | > [!NOTE] 20 | > Function requires Python 3.10+ 21 | 22 | ## Retrieving your Access Key 23 | Head over to [fxn.ai](https://www.fxn.ai/account/developer) to create an account by logging in. Once you do, generate an access key: 24 | 25 | ![generate access key](https://raw.githubusercontent.com/fxnai/.github/main/access_key.gif) 26 | 27 | ## Making a Prediction 28 | First, create a Function client, specifying your access key: 29 | ```py 30 | from fxn import Function 31 | 32 | # Create the Function client 33 | fxn = Function(access_key="") 34 | ``` 35 | 36 | Then make a prediction: 37 | ```py 38 | # Create a prediction 39 | prediction = fxn.predictions.create( 40 | tag="@fxn/greeting", 41 | inputs={ "name": "Peter" } 42 | ) 43 | # Print the returned greeting 44 | print(prediction.results[0]) 45 | ``` 46 | 47 | ## Using the Function CLI 48 | Open up a terminal and login to the Function CLI: 49 | ```sh 50 | # Login to Function 51 | $ fxn auth login 52 | ``` 53 | 54 | Then make a prediction: 55 | ```sh 56 | # Make a prediction using the Function CLI 57 | $ fxn predict @fxn/greeting --name Peter 58 | ``` 59 | 60 | ___ 61 | 62 | ## Useful Links 63 | - [Discover predictors to use in your apps](https://fxn.ai/explore). 64 | - [Join our Discord community](https://discord.gg/fxn). 65 | - [Check out our docs](https://docs.fxn.ai). 66 | - Learn more about us [on our blog](https://blog.fxn.ai). 67 | - Reach out to us at [hi@fxn.ai](mailto:hi@fxn.ai). 68 | 69 | Function is a product of [NatML Inc](https://github.com/natmlx). 70 | -------------------------------------------------------------------------------- /conftest.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # -------------------------------------------------------------------------------- /fxn/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from .client import FunctionAPIError 7 | from .compile import compile, CompileMetadata, CompileTarget 8 | from .function import Function 9 | from .sandbox import Sandbox 10 | from .types import * 11 | from .version import __version__ -------------------------------------------------------------------------------- /fxn/beta/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from .metadata import ( 7 | CoreMLInferenceMetadata, LiteRTInferenceMetadata, LlamaCppInferenceMetadata, 8 | OnnxInferenceMetadata, OnnxRuntimeInferenceSessionMetadata, OpenVINOInferenceMetadata, 9 | QnnInferenceMetadata, QnnInferenceBackend, QnnInferenceQuantization, 10 | # Deprecated 11 | ONNXInferenceMetadata, ONNXRuntimeInferenceSessionMetadata 12 | ) 13 | from .services import RemoteAcceleration -------------------------------------------------------------------------------- /fxn/beta/cli/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from .llm import app as llm_app -------------------------------------------------------------------------------- /fxn/beta/cli/llm.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from pathlib import Path 7 | from typer import Argument, Option, Typer 8 | from typing_extensions import Annotated 9 | 10 | app = Typer(no_args_is_help=True) 11 | 12 | @app.command(name="chat", help="Start a chat session.") 13 | def chat ( 14 | model: Annotated[str, Argument(help="Model to chat with.")] 15 | ): 16 | pass 17 | 18 | @app.command(name="serve", help="Start an LLM server.") 19 | def serve ( 20 | port: Annotated[int, Option(help="Port to start the server on.")] = 11435 21 | ): 22 | pass -------------------------------------------------------------------------------- /fxn/beta/client.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from functools import wraps 7 | from inspect import signature as get_signature, Signature 8 | from typing import get_origin, Callable, Generator, Iterator, TypeVar 9 | 10 | from ..client import FunctionClient 11 | from ..services import PredictionService as EdgePredictionService 12 | from ..types import Acceleration 13 | from .services import PredictionService, RemoteAcceleration 14 | 15 | F = TypeVar("F", bound=Callable[..., object]) 16 | 17 | class BetaClient: 18 | """ 19 | Client for incubating features. 20 | """ 21 | predictions: PredictionService 22 | 23 | def __init__ ( 24 | self, 25 | client: FunctionClient, 26 | *, 27 | predictions: EdgePredictionService 28 | ): 29 | self.predictions = PredictionService(client) 30 | self.__edge_predictions = predictions 31 | 32 | def predict ( # INCOMPLETE # Preload 33 | self, 34 | tag: str, 35 | *, 36 | remote: bool=False, 37 | acceleration: Acceleration | RemoteAcceleration="auto", 38 | preload: bool=True 39 | ) -> Callable[[F], F]: 40 | """ 41 | Create a prediction and return results when the decorated function is invoked. 42 | 43 | Parameters: 44 | tag (str): Predictor tag. 45 | remote (bool): Whether to create the prediction remotely. 46 | acceleration (Acceleration | RemoteAcceleration): Prediction acceleration. 47 | preload (bool): Whether to preload the predictor on the first run. 48 | """ 49 | def decorator(func: F) -> F: 50 | signature = get_signature(func) 51 | @wraps(func) 52 | def wrapper(*args, **kwargs): 53 | bound_args = signature.bind(*args, **kwargs) 54 | bound_args.apply_defaults() 55 | stream = ( 56 | signature.return_annotation is not Signature.empty and 57 | get_origin(signature.return_annotation) in [Iterator, Generator] 58 | ) 59 | create_func = self.predictions.remote.create if remote else self.__edge_predictions.create 60 | def _predict (): # INCOMPLETE 61 | prediction = create_func( 62 | tag=tag, 63 | inputs=bound_args.arguments, 64 | acceleration=acceleration 65 | ) 66 | if prediction.error: 67 | raise RuntimeError(prediction.error) 68 | return tuple(prediction.results) if len(prediction.results) > 1 else prediction.results[0] 69 | result = _predict() 70 | return result 71 | return wrapper 72 | return decorator -------------------------------------------------------------------------------- /fxn/beta/llm/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | -------------------------------------------------------------------------------- /fxn/beta/llm/server.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | -------------------------------------------------------------------------------- /fxn/beta/metadata.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from os import PathLike 7 | from pathlib import Path 8 | from pydantic import BaseModel, BeforeValidator, ConfigDict, Field 9 | from typing import Annotated, Literal 10 | 11 | def _validate_torch_module (module: "torch.nn.Module") -> "torch.nn.Module": # type: ignore 12 | try: 13 | from torch.nn import Module # type: ignore 14 | if not isinstance(module, Module): 15 | raise ValueError(f"Expected torch.nn.Module, got {type(module)}") 16 | return module 17 | except ImportError: 18 | raise ImportError("PyTorch is required to create this metadata but is not installed.") 19 | 20 | def _validate_ort_inference_session (session: "onnxruntime.InferenceSession") -> "onnxruntime.InferenceSession": # type: ignore 21 | try: 22 | from onnxruntime import InferenceSession # type: ignore 23 | if not isinstance(session, InferenceSession): 24 | raise ValueError(f"Expected onnxruntime.InferenceSession, got {type(session)}") 25 | return session 26 | except ImportError: 27 | raise ImportError("ONNXRuntime is required to create this metadata but is not installed.") 28 | 29 | class CoreMLInferenceMetadata (BaseModel): 30 | """ 31 | Metadata required to lower a PyTorch model for inference on iOS, macOS, and visionOS with CoreML. 32 | 33 | Members: 34 | model (torch.nn.Module): PyTorch module to apply metadata to. 35 | model_args (tuple[Tensor,...]): Positional inputs to the model. 36 | """ 37 | kind: Literal["meta.inference.coreml"] = "meta.inference.coreml" 38 | model: Annotated[object, BeforeValidator(_validate_torch_module)] = Field( 39 | description="PyTorch module to apply metadata to.", 40 | exclude=True 41 | ) 42 | model_args: list[object] = Field( 43 | description="Positional inputs to the model.", 44 | exclude=True 45 | ) 46 | model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True) 47 | 48 | class OnnxInferenceMetadata (BaseModel): 49 | """ 50 | Metadata required to lower a PyTorch model for inference. 51 | 52 | Members: 53 | model (torch.nn.Module): PyTorch module to apply metadata to. 54 | model_args (tuple[Tensor,...]): Positional inputs to the model. 55 | """ 56 | kind: Literal["meta.inference.onnx"] = "meta.inference.onnx" 57 | model: Annotated[object, BeforeValidator(_validate_torch_module)] = Field( 58 | description="PyTorch module to apply metadata to.", 59 | exclude=True 60 | ) 61 | model_args: list[object] = Field( 62 | description="Positional inputs to the model.", 63 | exclude=True 64 | ) 65 | model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True) 66 | 67 | class OnnxRuntimeInferenceSessionMetadata (BaseModel): 68 | """ 69 | Metadata required to lower an ONNXRuntime `InferenceSession` for inference. 70 | 71 | Members: 72 | session (onnxruntime.InferenceSession): ONNXRuntime inference session to apply metadata to. 73 | model_path (str | Path): ONNX model path. The model must exist at this path in the compiler sandbox. 74 | """ 75 | kind: Literal["meta.inference.onnxruntime"] = "meta.inference.onnxruntime" 76 | session: Annotated[object, BeforeValidator(_validate_ort_inference_session)] = Field( 77 | description="ONNXRuntime inference session to apply metadata to.", 78 | exclude=True 79 | ) 80 | model_path: str | Path = Field( 81 | description="ONNX model path. The model must exist at this path in the compiler sandbox.", 82 | exclude=True 83 | ) 84 | model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True) 85 | 86 | class LiteRTInferenceMetadata (BaseModel): 87 | """ 88 | Metadata required to lower PyTorch model for inference with LiteRT (fka TensorFlow Lite). 89 | 90 | Members: 91 | model (torch.nn.Module): PyTorch module to apply metadata to. 92 | model_args (tuple[Tensor,...]): Positional inputs to the model. 93 | """ 94 | kind: Literal["meta.inference.litert"] = "meta.inference.litert" 95 | model: Annotated[object, BeforeValidator(_validate_torch_module)] = Field( 96 | description="PyTorch module to apply metadata to.", 97 | exclude=True 98 | ) 99 | model_args: list[object] = Field( 100 | description="Positional inputs to the model.", 101 | exclude=True 102 | ) 103 | model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True) 104 | 105 | class OpenVINOInferenceMetadata (BaseModel): 106 | """ 107 | Metadata required to lower PyTorch model for interence with Intel OpenVINO. 108 | 109 | Members: 110 | model (torch.nn.Module): PyTorch module to apply metadata to. 111 | model_args (tuple[Tensor,...]): Positional inputs to the model. 112 | """ 113 | kind: Literal["meta.inference.openvino"] = "meta.inference.openvino" 114 | model: Annotated[object, BeforeValidator(_validate_torch_module)] = Field( 115 | description="PyTorch module to apply metadata to.", 116 | exclude=True 117 | ) 118 | model_args: list[object] = Field( 119 | description="Positional inputs to the model.", 120 | exclude=True 121 | ) 122 | model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True) 123 | 124 | QnnInferenceBackend = Literal["cpu", "gpu"] # `htp` coming soon 125 | QnnInferenceQuantization = Literal["w8a8", "w8a16", "w4a8", "w4a16"] 126 | 127 | class QnnInferenceMetadata (BaseModel): 128 | """ 129 | Metadata required to lower a PyTorch model for inference on Qualcomm accelerators with QNN SDK. 130 | 131 | Members: 132 | model (torch.nn.Module): PyTorch module to apply metadata to. 133 | model_args (tuple[Tensor,...]): Positional inputs to the model. 134 | backend (QnnInferenceBackend): QNN inference backend. Defaults to `cpu`. 135 | quantization (QnnInferenceQuantization): QNN model quantization mode. This MUST only be specified when backend is `htp`. 136 | """ 137 | kind: Literal["meta.inference.qnn"] = "meta.inference.qnn" 138 | model: Annotated[object, BeforeValidator(_validate_torch_module)] = Field( 139 | description="PyTorch module to apply metadata to.", 140 | exclude=True 141 | ) 142 | model_args: list[object] = Field( 143 | description="Positional inputs to the model.", 144 | exclude=True 145 | ) 146 | backend: QnnInferenceBackend = Field( 147 | default="cpu", 148 | description="QNN backend to execute the model.", 149 | exclude=True 150 | ) 151 | quantization: QnnInferenceQuantization | None = Field( 152 | default=None, 153 | description="QNN model quantization mode. This MUST only be specified when backend is `htp`.", 154 | exclude=True 155 | ) 156 | model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True) 157 | 158 | class LlamaCppInferenceMetadata (BaseModel): # INCOMPLETE 159 | """ 160 | Metadata required to lower a GGUF model for LLM inference. 161 | """ 162 | kind: Literal["meta.inference.gguf"] = "meta.inference.gguf" 163 | model_path: Path = Field( 164 | description="GGUF model path. The model must exist at this path in the compiler sandbox.", 165 | exclude=True 166 | ) 167 | model_config = ConfigDict(arbitrary_types_allowed=True, frozen=True) 168 | 169 | # DEPRECATED 170 | ONNXInferenceMetadata = OnnxInferenceMetadata 171 | ONNXRuntimeInferenceSessionMetadata = OnnxRuntimeInferenceSessionMetadata -------------------------------------------------------------------------------- /fxn/beta/services/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from .prediction import PredictionService 7 | from .remote import RemoteAcceleration -------------------------------------------------------------------------------- /fxn/beta/services/prediction.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from ...client import FunctionClient 7 | from .remote import RemotePredictionService 8 | 9 | class PredictionService: 10 | """ 11 | Make predictions. 12 | """ 13 | remote: RemotePredictionService 14 | 15 | def __init__ (self, client: FunctionClient): 16 | self.remote = RemotePredictionService(client) -------------------------------------------------------------------------------- /fxn/beta/services/remote.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from __future__ import annotations 7 | from base64 import b64encode 8 | from dataclasses import asdict, is_dataclass 9 | from io import BytesIO 10 | from json import dumps, loads 11 | from numpy import array, frombuffer, ndarray 12 | from PIL import Image 13 | from pydantic import BaseModel, Field 14 | from requests import get, put 15 | from typing import Literal 16 | from urllib.request import urlopen 17 | 18 | from ...c import Configuration 19 | from ...client import FunctionClient 20 | from ...services.prediction import Value 21 | from ...types import Dtype, Prediction 22 | 23 | RemoteAcceleration = Literal["auto", "cpu", "a40", "a100"] 24 | 25 | class RemotePredictionService: 26 | """ 27 | Make remote predictions. 28 | """ 29 | 30 | def __init__ (self, client: FunctionClient): 31 | self.client = client 32 | 33 | def create ( 34 | self, 35 | tag: str, 36 | *, 37 | inputs: dict[str, Value], 38 | acceleration: RemoteAcceleration="auto" 39 | ) -> Prediction: 40 | """ 41 | Create a remote prediction. 42 | 43 | Parameters: 44 | tag (str): Predictor tag. 45 | inputs (dict): Input values. 46 | acceleration (RemoteAcceleration): Prediction acceleration. 47 | 48 | Returns: 49 | Prediction: Created prediction. 50 | """ 51 | input_map = { name: self.__to_value(value, name=name).model_dump(mode="json") for name, value in inputs.items() } 52 | prediction = self.client.request( 53 | method="POST", 54 | path="/predictions/remote", 55 | body={ 56 | "tag": tag, 57 | "inputs": input_map, 58 | "acceleration": acceleration, 59 | "clientId": Configuration.get_client_id() 60 | }, 61 | response_type=RemotePrediction 62 | ) 63 | results = list(map(self.__to_object, prediction.results)) if prediction.results is not None else None 64 | prediction = Prediction(**{ **prediction.model_dump(), "results": results }) 65 | return prediction 66 | 67 | def __to_value ( 68 | self, 69 | obj: Value, 70 | *, 71 | name: str, 72 | max_data_url_size: int=4 * 1024 * 1024 73 | ) -> RemoteValue: 74 | obj = self.__try_ensure_serializable(obj) 75 | if obj is None: 76 | return RemoteValue(data=None, type=Dtype.null) 77 | elif isinstance(obj, float): 78 | obj = array(obj, dtype=Dtype.float32) 79 | return self.__to_value(obj, name=name, max_data_url_size=max_data_url_size) 80 | elif isinstance(obj, bool): 81 | obj = array(obj, dtype=Dtype.bool) 82 | return self.__to_value(obj, name=name, max_data_url_size=max_data_url_size) 83 | elif isinstance(obj, int): 84 | obj = array(obj, dtype=Dtype.int32) 85 | return self.__to_value(obj, name=name, max_data_url_size=max_data_url_size) 86 | elif isinstance(obj, ndarray): 87 | buffer = BytesIO(obj.tobytes()) 88 | data = self.__upload(buffer, name=name, max_data_url_size=max_data_url_size) 89 | return RemoteValue(data=data, type=obj.dtype.name, shape=list(obj.shape)) 90 | elif isinstance(obj, str): 91 | buffer = BytesIO(obj.encode()) 92 | data = self.__upload(buffer, name=name, mime="text/plain", max_data_url_size=max_data_url_size) 93 | return RemoteValue(data=data, type=Dtype.string) 94 | elif isinstance(obj, list): 95 | buffer = BytesIO(dumps(obj).encode()) 96 | data = self.__upload(buffer, name=name, mime="application/json", max_data_url_size=max_data_url_size) 97 | return RemoteValue(data=data, type=Dtype.list) 98 | elif isinstance(obj, dict): 99 | buffer = BytesIO(dumps(obj).encode()) 100 | data = self.__upload(buffer, name=name, mime="application/json", max_data_url_size=max_data_url_size) 101 | return RemoteValue(data=data, type=Dtype.dict) 102 | elif isinstance(obj, Image.Image): 103 | buffer = BytesIO() 104 | format = "PNG" if obj.mode == "RGBA" else "JPEG" 105 | mime = f"image/{format.lower()}" 106 | obj.save(buffer, format=format) 107 | data = self.__upload(buffer, name=name, mime=mime, max_data_url_size=max_data_url_size) 108 | return RemoteValue(data=data, type=Dtype.image) 109 | elif isinstance(obj, BytesIO): 110 | data = self.__upload(obj, name=name, max_data_url_size=max_data_url_size) 111 | return RemoteValue(data=data, type=Dtype.binary) 112 | else: 113 | raise ValueError(f"Failed to serialize value '{obj}' of type `{type(obj)}` because it is not supported") 114 | 115 | def __to_object (self, value: RemoteValue) -> Value: 116 | if value.type == Dtype.null: 117 | return None 118 | buffer = self.__download(value.data) 119 | if value.type in [ 120 | Dtype.int8, Dtype.int16, Dtype.int32, Dtype.int64, 121 | Dtype.uint8, Dtype.uint16, Dtype.uint32, Dtype.uint64, 122 | Dtype.float16, Dtype.float32, Dtype.float64, Dtype.bool 123 | ]: 124 | assert value.shape is not None, "Array value must have a shape specified" 125 | array = frombuffer(buffer.getbuffer(), dtype=value.type).reshape(value.shape) 126 | return array if len(value.shape) > 0 else array.item() 127 | elif value.type == Dtype.string: 128 | return buffer.getvalue().decode("utf-8") 129 | elif value.type in [Dtype.list, Dtype.dict]: 130 | return loads(buffer.getvalue().decode("utf-8")) 131 | elif value.type == Dtype.image: 132 | return Image.open(buffer) 133 | elif value.type == Dtype.binary: 134 | return buffer 135 | else: 136 | raise ValueError(f"Failed to deserialize value with type `{value.type}` because it is not supported") 137 | 138 | def __upload ( 139 | self, 140 | data: BytesIO, 141 | *, 142 | name: str, 143 | mime: str="application/octet-stream", 144 | max_data_url_size: int=4 * 1024 * 1024 145 | ) -> str: 146 | if data.getbuffer().nbytes <= max_data_url_size: 147 | encoded_data = b64encode(data.getvalue()).decode("ascii") 148 | return f"data:{mime};base64,{encoded_data}" 149 | value = self.client.request( 150 | method="POST", 151 | path="/values", 152 | body={ "name": name }, 153 | response_type=CreateValueResponse 154 | ) 155 | put( 156 | value.upload_url, 157 | data=data, 158 | headers={ "Content-Type": mime } 159 | ).raise_for_status() 160 | return value.download_url 161 | 162 | def __download (self, url: str) -> BytesIO: 163 | if url.startswith("data:"): 164 | with urlopen(url) as response: 165 | return BytesIO(response.read()) 166 | response = get(url) 167 | response.raise_for_status() 168 | result = BytesIO(response.content) 169 | return result 170 | 171 | @classmethod 172 | def __try_ensure_serializable (cls, obj: object) -> object: 173 | if obj is None: 174 | return obj 175 | if isinstance(obj, list): 176 | return [cls.__try_ensure_serializable(x) for x in obj] 177 | if is_dataclass(obj) and not isinstance(obj, type): 178 | return asdict(obj) 179 | if isinstance(obj, BaseModel): 180 | return obj.model_dump(mode="json", by_alias=True) 181 | return obj 182 | 183 | class RemoteValue (BaseModel): 184 | data: str | None 185 | type: Dtype 186 | shape: list[int] | None = None 187 | 188 | class RemotePrediction (BaseModel): 189 | id: str 190 | tag: str 191 | created: str 192 | results: list[RemoteValue] | None 193 | latency: float | None 194 | error: str | None 195 | logs: str | None 196 | 197 | class CreateValueResponse (BaseModel): 198 | upload_url: str = Field(validation_alias="uploadUrl") 199 | download_url: str = Field(validation_alias="downloadUrl") -------------------------------------------------------------------------------- /fxn/c/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | # https://github.com/fxnai/fxnc 7 | 8 | from .configuration import Configuration 9 | from .map import ValueMap 10 | from .prediction import Prediction 11 | from .predictor import Predictor 12 | from .stream import PredictionStream 13 | from .value import Value, ValueFlags -------------------------------------------------------------------------------- /fxn/c/configuration.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from ctypes import byref, c_int, c_void_p, create_string_buffer 7 | from pathlib import Path 8 | from typing import final 9 | 10 | from ..types import Acceleration 11 | from .fxnc import get_fxnc, status_to_error, FXNStatus 12 | 13 | @final 14 | class Configuration: 15 | 16 | def __init__ (self): 17 | configuration = c_void_p() 18 | status = get_fxnc().FXNConfigurationCreate(byref(configuration)) 19 | if status != FXNStatus.OK: 20 | raise RuntimeError(f"Failed to create configuration with error: {status_to_error(status)}") 21 | self.__configuration = configuration 22 | 23 | @property 24 | def tag (self) -> str: 25 | buffer = create_string_buffer(2048) 26 | status = get_fxnc().FXNConfigurationGetTag( 27 | self.__configuration, 28 | buffer, 29 | len(buffer) 30 | ) 31 | if status != FXNStatus.OK: 32 | raise RuntimeError(f"Failed to get configuration tag with error: {status_to_error(status)}") 33 | tag = buffer.value.decode("utf-8") 34 | return tag if tag else None 35 | 36 | @tag.setter 37 | def tag (self, tag: str): 38 | tag = tag.encode() if tag is not None else None 39 | status = get_fxnc().FXNConfigurationSetTag(self.__configuration, tag) 40 | if status != FXNStatus.OK: 41 | raise RuntimeError(f"Failed to set configuration tag with error: {status_to_error(status)}") 42 | 43 | @property 44 | def token (self) -> str: 45 | buffer = create_string_buffer(2048) 46 | status = get_fxnc().FXNConfigurationGetToken( 47 | self.__configuration, 48 | buffer, 49 | len(buffer) 50 | ) 51 | if status != FXNStatus.OK: 52 | raise RuntimeError(f"Failed to get configuration token with error: {status_to_error(status)}") 53 | token = buffer.value.decode("utf-8") 54 | return token if token else None 55 | 56 | @token.setter 57 | def token (self, token: str): 58 | token = token.encode() if token is not None else None 59 | status = get_fxnc().FXNConfigurationSetToken(self.__configuration, token) 60 | if status != FXNStatus.OK: 61 | raise RuntimeError(f"Failed to set configuration token with error: {status_to_error(status)}") 62 | 63 | @property 64 | def acceleration (self) -> Acceleration: 65 | acceleration = c_int() 66 | status = get_fxnc().FXNConfigurationGetAcceleration( 67 | self.__configuration, 68 | byref(acceleration) 69 | ) 70 | if status != FXNStatus.OK: 71 | raise RuntimeError(f"Failed to get configuration acceleration with error: {status_to_error(status)}") 72 | return self.__to_acceleration_str(acceleration.value) 73 | 74 | @acceleration.setter 75 | def acceleration (self, acceleration: Acceleration): 76 | status = get_fxnc().FXNConfigurationSetAcceleration( 77 | self.__configuration, 78 | self.__to_acceleration_int(acceleration) 79 | ) 80 | if status != FXNStatus.OK: 81 | raise RuntimeError(f"Failed to set configuration acceleration with error: {status_to_error(status)}") 82 | 83 | @property 84 | def device (self): 85 | device = c_void_p() 86 | status = get_fxnc().FXNConfigurationGetDevice( 87 | self.__configuration, 88 | byref(device) 89 | ) 90 | if status != FXNStatus.OK: 91 | raise RuntimeError(f"Failed to get configuration device with error: {status_to_error(status)}") 92 | return device if device.value else None 93 | 94 | @device.setter 95 | def device (self, device): 96 | status = get_fxnc().FXNConfigurationSetDevice(self.__configuration, device) 97 | if status != FXNStatus.OK: 98 | raise RuntimeError(f"Failed to set configuration device with error: {status_to_error(status)}") 99 | 100 | def add_resource (self, type: str, path: Path): 101 | status = get_fxnc().FXNConfigurationAddResource( 102 | self.__configuration, 103 | type.encode(), 104 | str(path).encode() 105 | ) 106 | if status != FXNStatus.OK: 107 | raise RuntimeError(f"Failed to add configuration resource with error: {status_to_error(status)}") 108 | 109 | @classmethod 110 | def get_unique_id (cls) -> str: 111 | buffer = create_string_buffer(2048) 112 | status = get_fxnc().FXNConfigurationGetUniqueID(buffer, len(buffer)) 113 | if status != FXNStatus.OK: 114 | raise RuntimeError(f"Failed to retrieve configuration identifier with error: {status_to_error(status)}") 115 | return buffer.value.decode("utf-8") 116 | 117 | @classmethod 118 | def get_client_id (cls) -> str: 119 | buffer = create_string_buffer(64) 120 | status = get_fxnc().FXNConfigurationGetClientID(buffer, len(buffer)) 121 | if status == FXNStatus.OK: 122 | return buffer.value.decode("utf-8") 123 | else: 124 | raise RuntimeError(f"Failed to retrieve client identifier with error: {status_to_error(status)}") 125 | 126 | def __enter__ (self): 127 | return self 128 | 129 | def __exit__ (self, exc_type, exc_value, traceback): 130 | self.__release() 131 | 132 | def __release (self): 133 | if self.__configuration: 134 | get_fxnc().FXNConfigurationRelease(self.__configuration) 135 | self.__configuration = None 136 | 137 | def __to_acceleration_int (self, value: Acceleration) -> int: 138 | match value: 139 | case "auto": return 0 140 | case "cpu": return 1 141 | case "gpu": return 2 142 | case "npu": return 4 143 | 144 | def __to_acceleration_str (self, value: int) -> Acceleration: 145 | match value: 146 | case 0: return "auto" 147 | case 1: return "cpu" 148 | case 2: return "gpu" 149 | case 4: return "npu" 150 | case _: return None -------------------------------------------------------------------------------- /fxn/c/fxnc.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from ctypes import CDLL 7 | from enum import IntEnum 8 | from importlib import resources 9 | from platform import machine, system 10 | 11 | _fxnc: CDLL = None 12 | 13 | class FXNStatus(IntEnum): 14 | OK = 0 15 | ERROR_INVALID_ARGUMENT = 1 16 | ERROR_INVALID_OPERATION = 2 17 | ERROR_NOT_IMPLEMENTED = 3 18 | 19 | def get_fxnc () -> CDLL: 20 | global _fxnc 21 | _fxnc = _fxnc if _fxnc is not None else _load_fxnc() 22 | return _fxnc 23 | 24 | def set_fxnc (fxnc: CDLL): 25 | global _fxnc 26 | _fxnc = fxnc 27 | 28 | def _load_fxnc () -> CDLL: 29 | os = system().lower() 30 | os = "macos" if os == "darwin" else os 31 | arch = machine().lower() 32 | arch = "arm64" if arch == "aarch64" else arch 33 | arch = "x86_64" if arch in ["x64", "amd64"] else arch 34 | package = f"fxn.lib.{os}.{arch}" 35 | resource = "libFunction.so" 36 | resource = "Function.dylib" if os == "macos" else resource 37 | resource = "Function.dll" if os == "windows" else resource 38 | with resources.path(package, resource) as path: 39 | return CDLL(str(path)) 40 | 41 | def status_to_error (status: int) -> str: 42 | if status == FXNStatus.ERROR_INVALID_ARGUMENT: 43 | return "FXN_ERROR_INVALID_ARGUMENT" 44 | elif status == FXNStatus.ERROR_INVALID_OPERATION: 45 | return "FXN_ERROR_INVALID_OPERATION" 46 | elif status == FXNStatus.ERROR_NOT_IMPLEMENTED: 47 | return "FXN_ERROR_NOT_IMPLEMENTED" 48 | return "" -------------------------------------------------------------------------------- /fxn/c/map.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from ctypes import byref, c_int, c_int32, c_void_p, create_string_buffer 7 | from pathlib import Path 8 | from typing import final 9 | 10 | from .fxnc import get_fxnc, status_to_error, FXNStatus 11 | from .value import Value 12 | 13 | @final 14 | class ValueMap: 15 | 16 | def __init__ (self, map=None, *, owner: bool=True): 17 | if map is None: 18 | map = c_void_p() 19 | owner = True 20 | status = get_fxnc().FXNValueMapCreate(byref(map)) 21 | if status != FXNStatus.OK: 22 | raise RuntimeError(f"Failed to create value map with error: {status_to_error(status)}") 23 | self.__map = map 24 | self.__owner = owner 25 | 26 | def key (self, index: int) -> str: 27 | buffer = create_string_buffer(256) 28 | status = get_fxnc().FXNValueMapGetKey(self.__map, index, buffer, len(buffer)) 29 | if status == FXNStatus.OK: 30 | return buffer.value.decode("utf-8") 31 | else: 32 | raise RuntimeError(f"Failed to get value map key at index {index} with error: {status_to_error(status)}") 33 | 34 | def __getitem__ (self, key: str) -> Value | None: 35 | value = c_void_p() 36 | status = get_fxnc().FXNValueMapGetValue(self.__map, key.encode(), byref(value)) 37 | if status == FXNStatus.OK: 38 | return Value(value, owner=False) 39 | else: 40 | raise RuntimeError(f"Failed to get value map value for key '{key}' with error: {status_to_error(status)}") 41 | 42 | def __setitem__ (self, key: str, value: Value): 43 | status = get_fxnc().FXNValueMapSetValue(self.__map, key.encode(), value._Value__value) 44 | if status != FXNStatus.OK: 45 | raise RuntimeError(f"Failed to set value map value for key '{key}' with error: {status_to_error(status)}") 46 | 47 | def __len__ (self) -> int: 48 | count = c_int32() 49 | status = get_fxnc().FXNValueMapGetSize(self.__map, byref(count)) 50 | if status == FXNStatus.OK: 51 | return count.value 52 | else: 53 | raise RuntimeError(f"Failed to get value map size with error: {status_to_error(status)}") 54 | 55 | def __enter__ (self): 56 | return self 57 | 58 | def __exit__ (self, exc_type, exc_value, traceback): 59 | self.__release() 60 | 61 | def __release (self): 62 | if self.__map and self.__owner: 63 | get_fxnc().FXNValueMapRelease(self.__map) 64 | self.__map = None -------------------------------------------------------------------------------- /fxn/c/prediction.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from ctypes import byref, c_double, c_int32, c_void_p, create_string_buffer 7 | from pathlib import Path 8 | from typing import final 9 | 10 | from .fxnc import get_fxnc, status_to_error, FXNStatus 11 | from .map import ValueMap 12 | 13 | @final 14 | class Prediction: 15 | 16 | def __init__ (self, prediction): 17 | self.__prediction = prediction 18 | 19 | @property 20 | def id (self) -> str: 21 | id = create_string_buffer(256) 22 | status = get_fxnc().FXNPredictionGetID(self.__prediction, id, len(id)) 23 | if status == FXNStatus.OK: 24 | return id.value.decode("utf-8") 25 | else: 26 | raise RuntimeError(f"Failed to get prediction id with error: {status_to_error(status)}") 27 | 28 | @property 29 | def latency (self) -> float: 30 | latency = c_double() 31 | status = get_fxnc().FXNPredictionGetLatency(self.__prediction, byref(latency)) 32 | if status == FXNStatus.OK: 33 | return latency.value 34 | else: 35 | raise RuntimeError(f"Failed to get prediction latency with error: {status_to_error(status)}") 36 | 37 | @property 38 | def results (self) -> ValueMap | None: 39 | map = c_void_p() 40 | status = get_fxnc().FXNPredictionGetResults(self.__prediction, byref(map)) 41 | if status != FXNStatus.OK: 42 | raise RuntimeError(f"Failed to get prediction results with error: {status_to_error(status)}") 43 | map = ValueMap(map, owner=False) 44 | return map if len(map) > 0 else None 45 | 46 | @property 47 | def error (self) -> str | None: 48 | error = create_string_buffer(2048) 49 | get_fxnc().FXNPredictionGetError(self.__prediction, error, len(error)) 50 | error = error.value.decode("utf-8") 51 | return error if error else None 52 | 53 | @property 54 | def logs (self) -> str: 55 | fxnc = get_fxnc() 56 | log_length = c_int32() 57 | status = fxnc.FXNPredictionGetLogLength(self.__prediction, byref(log_length)) 58 | if status != FXNStatus.OK: 59 | raise RuntimeError(f"Failed to get prediction log length with error: {status_to_error(status)}") 60 | logs = create_string_buffer(log_length.value + 1) 61 | status = fxnc.FXNPredictionGetLogs(self.__prediction, logs, len(logs)) 62 | if status == FXNStatus.OK: 63 | return logs.value.decode("utf-8") 64 | else: 65 | raise RuntimeError(f"Failed to get prediction logs with error: {status_to_error(status)}") 66 | 67 | def __enter__ (self): 68 | return self 69 | 70 | def __exit__ (self, exc_type, exc_value, traceback): 71 | self.__release() 72 | 73 | def __release (self): 74 | if self.__prediction: 75 | get_fxnc().FXNPredictionRelease(self.__prediction) 76 | self.__prediction = None -------------------------------------------------------------------------------- /fxn/c/predictor.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from ctypes import byref, c_void_p 7 | from typing import final 8 | 9 | from .configuration import Configuration 10 | from .fxnc import get_fxnc, status_to_error, FXNStatus 11 | from .map import ValueMap 12 | from .prediction import Prediction 13 | from .stream import PredictionStream 14 | 15 | @final 16 | class Predictor: 17 | 18 | def __init__ (self, configuration: Configuration): 19 | predictor = c_void_p() 20 | status = get_fxnc().FXNPredictorCreate(configuration._Configuration__configuration, byref(predictor)) 21 | if status == FXNStatus.OK: 22 | self.__predictor = predictor 23 | else: 24 | raise RuntimeError(f"Failed to create predictor with error: {status_to_error(status)}") 25 | 26 | def create_prediction (self, inputs: ValueMap) -> Prediction: 27 | prediction = c_void_p() 28 | status = get_fxnc().FXNPredictorCreatePrediction( 29 | self.__predictor, 30 | inputs._ValueMap__map, 31 | byref(prediction) 32 | ) 33 | if status == FXNStatus.OK: 34 | return Prediction(prediction) 35 | else: 36 | raise RuntimeError(f"Failed to create prediction with error: {status_to_error(status)}") 37 | 38 | def stream_prediction (self, inputs: ValueMap) -> PredictionStream: 39 | stream = c_void_p() 40 | status = get_fxnc().FXNPredictorStreamPrediction( 41 | self.__predictor, 42 | inputs._ValueMap__map, 43 | byref(stream) 44 | ) 45 | if status == FXNStatus.OK: 46 | return PredictionStream(stream) 47 | else: 48 | raise RuntimeError(f"Failed to stream prediction with error: {status_to_error(status)}") 49 | 50 | def __enter__ (self): 51 | return self 52 | 53 | def __exit__ (self, exc_type, exc_value, traceback): 54 | self.__release() 55 | 56 | def __release (self): 57 | if self.__predictor: 58 | get_fxnc().FXNPredictorRelease(self.__predictor) 59 | self.__predictor = None -------------------------------------------------------------------------------- /fxn/c/stream.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from ctypes import byref, c_void_p 7 | from typing import final 8 | 9 | from .fxnc import get_fxnc, status_to_error, FXNStatus 10 | from .prediction import Prediction 11 | 12 | @final 13 | class PredictionStream: 14 | 15 | def __init__ (self, stream): 16 | self.__stream = stream 17 | 18 | def __iter__ (self): 19 | return self 20 | 21 | def __next__ (self) -> Prediction: 22 | prediction = c_void_p() 23 | status = get_fxnc().FXNPredictionStreamReadNext(self.__stream, byref(prediction)) 24 | if status == FXNStatus.ERROR_INVALID_OPERATION: 25 | raise StopIteration() 26 | elif status != FXNStatus.OK: 27 | raise RuntimeError(f"Failed to read next prediction in stream with error: {status_to_error(status)}") 28 | else: 29 | return Prediction(prediction) 30 | 31 | def __enter__ (self): 32 | return self 33 | 34 | def __exit__ (self, exc_type, exc_value, traceback): 35 | self.__release() 36 | 37 | def __release (self): 38 | if self.__stream: 39 | get_fxnc().FXNPredictionStreamRelease(self.__stream) 40 | self.__stream = None -------------------------------------------------------------------------------- /fxn/c/value.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from __future__ import annotations 7 | from collections.abc import Iterable 8 | from enum import IntFlag 9 | from ctypes import byref, cast, c_char_p, c_int, c_int32, c_uint8, c_void_p, string_at, POINTER 10 | from io import BytesIO 11 | from json import dumps, loads 12 | from numpy import array, dtype, int32, ndarray, zeros 13 | from numpy.ctypeslib import as_array, as_ctypes_type 14 | from PIL import Image 15 | from typing import final, Any 16 | 17 | from ..types import Dtype 18 | from .fxnc import get_fxnc, status_to_error, FXNStatus 19 | 20 | class ValueFlags (IntFlag): 21 | NONE = 0 22 | COPY_DATA = 1 23 | 24 | @final 25 | class Value: 26 | 27 | def __init__ (self, value, *, owner: bool=True): 28 | self.__value = value 29 | self.__owner = owner 30 | 31 | @property 32 | def data (self): 33 | data = c_void_p() 34 | status = get_fxnc().FXNValueGetData(self.__value, byref(data)) 35 | if status == FXNStatus.OK: 36 | return data 37 | else: 38 | raise RuntimeError(f"Failed to get value data with error: {status_to_error(status)}") 39 | 40 | @property 41 | def type (self) -> Dtype: 42 | dtype = c_int() 43 | status = get_fxnc().FXNValueGetType(self.__value, byref(dtype)) 44 | if status == FXNStatus.OK: 45 | return _DTYPE_TO_STR.get(dtype.value) 46 | else: 47 | raise RuntimeError(f"Failed to get value data type with error: {status_to_error(status)}") 48 | 49 | @property 50 | def shape (self) -> list[int] | None: 51 | if self.type not in _TENSOR_ISH_DTYPES: 52 | return None 53 | fxnc = get_fxnc() 54 | dims = c_int32() 55 | status = fxnc.FXNValueGetDimensions(self.__value, byref(dims)) 56 | if status != FXNStatus.OK: 57 | raise RuntimeError(f"Failed to get value dimensions with error: {status_to_error(status)}") 58 | shape = zeros(dims.value, dtype=int32) 59 | status = fxnc.FXNValueGetShape(self.__value, shape.ctypes.data_as(POINTER(c_int32)), dims) 60 | if status == FXNStatus.OK: 61 | return shape.tolist() 62 | else: 63 | raise RuntimeError(f"Failed to get value shape with error: {status_to_error(status)}") 64 | 65 | def to_object (self) -> Any: 66 | type = self.type 67 | if type == Dtype.null: 68 | return None 69 | elif type in _TENSOR_DTYPES: 70 | ctype = as_ctypes_type(dtype(type)) 71 | tensor = as_array(cast(self.data, POINTER(ctype)), self.shape) 72 | return tensor.item() if len(tensor.shape) == 0 else tensor.copy() 73 | elif type == Dtype.string: 74 | return cast(self.data, c_char_p).value.decode() 75 | elif type in [Dtype.list, Dtype.dict]: 76 | return loads(cast(self.data, c_char_p).value.decode()) 77 | elif type == Dtype.image: 78 | pixel_buffer = as_array(cast(self.data, POINTER(c_uint8)), self.shape) 79 | return Image.fromarray(pixel_buffer.squeeze()).copy() 80 | elif type == Dtype.binary: 81 | return BytesIO(string_at(self.data, self.shape[0])) 82 | else: 83 | raise RuntimeError(f"Failed to convert Function value to object because value has unsupported type: {type}") 84 | 85 | def __enter__ (self): 86 | return self 87 | 88 | def __exit__ (self, exc_type, exc_value, traceback): 89 | self.__release() 90 | 91 | def __release (self): 92 | if self.__value and self.__owner: 93 | get_fxnc().FXNValueRelease(self.__value) 94 | self.__value = None 95 | 96 | @classmethod 97 | def create_array ( 98 | cls, 99 | data: ndarray, 100 | *, 101 | flags: ValueFlags=ValueFlags.NONE 102 | ) -> Value: 103 | dtype = _STR_TO_DTYPE.get(data.dtype.name) 104 | if dtype is None: 105 | raise RuntimeError(f"Failed to create array value because data type is not supported: {data.dtype}") 106 | value = c_void_p() 107 | status = get_fxnc().FXNValueCreateArray( 108 | data.ctypes.data_as(c_void_p), 109 | data.ctypes.shape_as(c_int32), 110 | len(data.shape), 111 | dtype, 112 | flags, 113 | byref(value) 114 | ) 115 | if status == FXNStatus.OK: 116 | return Value(value) 117 | else: 118 | raise RuntimeError(f"Failed to create array value with error: {status_to_error(status)}") 119 | 120 | @classmethod 121 | def create_string (cls, data: str) -> Value: 122 | value = c_void_p() 123 | status = get_fxnc().FXNValueCreateString(data.encode(), byref(value)) 124 | if status == FXNStatus.OK: 125 | return Value(value) 126 | else: 127 | raise RuntimeError(f"Failed to create string value with error: {status_to_error(status)}") 128 | 129 | @classmethod 130 | def create_list (cls, data: Iterable[Any]) -> Value: 131 | value = c_void_p() 132 | status = get_fxnc().FXNValueCreateList(dumps(data).encode(), byref(value)) 133 | if status == FXNStatus.OK: 134 | return Value(value) 135 | else: 136 | raise RuntimeError(f"Failed to create list value with error: {status_to_error(status)}") 137 | 138 | @classmethod 139 | def create_dict (cls, data: dict[str, Any]) -> Value: 140 | value = c_void_p() 141 | status = get_fxnc().FXNValueCreateDict(dumps(data).encode(), byref(value)) 142 | if status == FXNStatus.OK: 143 | return Value(value) 144 | else: 145 | raise RuntimeError(f"Failed to create dict value with error: {status_to_error(status)}") 146 | 147 | @classmethod 148 | def create_image (cls, image: Image.Image) -> Value: 149 | value = c_void_p() 150 | pixel_buffer = array(image) 151 | status = get_fxnc().FXNValueCreateImage( 152 | pixel_buffer.ctypes.data_as(c_void_p), 153 | image.width, 154 | image.height, 155 | pixel_buffer.shape[2], 156 | ValueFlags.COPY_DATA, 157 | byref(value) 158 | ) 159 | if status == FXNStatus.OK: 160 | return Value(value) 161 | else: 162 | raise RuntimeError(f"Failed to create image value with error: {status_to_error(status)}") 163 | 164 | @classmethod 165 | def create_binary ( 166 | cls, 167 | data: memoryview, 168 | *, 169 | flags: ValueFlags=ValueFlags.NONE 170 | ) -> Value: 171 | buffer = (c_uint8 * len(data)).from_buffer(data) 172 | value = c_void_p() 173 | status = get_fxnc().FXNValueCreateBinary(buffer, len(data), flags, byref(value)) 174 | if status == FXNStatus.OK: 175 | return Value(value) 176 | else: 177 | raise RuntimeError(f"Failed to create binary value with error: {status_to_error(status)}") 178 | 179 | @classmethod 180 | def create_null (cls) -> Value: 181 | value = c_void_p() 182 | status = get_fxnc().FXNValueCreateNull(byref(value)) 183 | if status == FXNStatus.OK: 184 | return Value(value) 185 | else: 186 | raise RuntimeError(f"Failed to create null value with error: {status_to_error(status)}") 187 | 188 | 189 | _STR_TO_DTYPE = { 190 | Dtype.null: 0, 191 | Dtype.float16: 1, 192 | Dtype.float32: 2, 193 | Dtype.float64: 3, 194 | Dtype.int8: 4, 195 | Dtype.int16: 5, 196 | Dtype.int32: 6, 197 | Dtype.int64: 7, 198 | Dtype.uint8: 8, 199 | Dtype.uint16: 9, 200 | Dtype.uint32: 10, 201 | Dtype.uint64: 11, 202 | Dtype.bool: 12, 203 | Dtype.string: 13, 204 | Dtype.list: 14, 205 | Dtype.dict: 15, 206 | Dtype.image: 16, 207 | Dtype.binary: 17, 208 | } 209 | _DTYPE_TO_STR = { value: key for key, value in _STR_TO_DTYPE.items() } 210 | _TENSOR_DTYPES = { 211 | Dtype.float16, 212 | Dtype.float32, 213 | Dtype.float64, 214 | Dtype.int8, 215 | Dtype.int16, 216 | Dtype.int32, 217 | Dtype.int64, 218 | Dtype.uint8, 219 | Dtype.uint16, 220 | Dtype.uint32, 221 | Dtype.uint64, 222 | Dtype.bool, 223 | } 224 | _TENSOR_ISH_DTYPES = _TENSOR_DTYPES | { Dtype.image } -------------------------------------------------------------------------------- /fxn/cli/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | import typer 7 | 8 | from ..logging import TracebackMarkupConsole 9 | from ..version import __version__ 10 | 11 | from .auth import app as auth_app 12 | from .compile import compile_predictor 13 | from .misc import cli_options 14 | from .predictions import create_prediction 15 | from .predictors import archive_predictor, delete_predictor, retrieve_predictor 16 | from .sources import retrieve_source 17 | from ..beta.cli import llm_app 18 | 19 | # Define CLI 20 | typer.main.console_stderr = TracebackMarkupConsole() 21 | app = typer.Typer( 22 | name=f"Function CLI {__version__}", 23 | no_args_is_help=True, 24 | pretty_exceptions_show_locals=False, 25 | pretty_exceptions_short=True, 26 | add_completion=False 27 | ) 28 | 29 | # Add top level options 30 | app.callback()(cli_options) 31 | 32 | # Add subcommands 33 | app.add_typer(auth_app, name="auth", help="Login, logout, and check your authentication status.") 34 | app.add_typer(llm_app, name="llm", hidden=True, help="Work with large language models (LLMs).") 35 | 36 | # Add top-level commands 37 | app.command( 38 | name="predict", 39 | help="Make a prediction.", 40 | context_settings={ "allow_extra_args": True, "ignore_unknown_options": True } 41 | )(create_prediction) 42 | app.command( 43 | name="compile", 44 | help="Create a predictor by compiling a Python function." 45 | )(compile_predictor) 46 | app.command(name="retrieve", help="Retrieve a predictor.")(retrieve_predictor) 47 | app.command(name="archive", help="Archive a predictor.")(archive_predictor) 48 | app.command(name="delete", help="Delete a predictor.")(delete_predictor) 49 | app.command(name="source", help="Retrieve the generated native code for a given predictor.")(retrieve_source) 50 | 51 | # Run 52 | if __name__ == "__main__": 53 | app() -------------------------------------------------------------------------------- /fxn/cli/auth.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from pathlib import Path 7 | from rich import print, print_json 8 | from typer import Argument, Typer 9 | 10 | from ..function import Function 11 | 12 | app = Typer(no_args_is_help=True) 13 | 14 | @app.command(name="login", help="Login to Function.") 15 | def login ( 16 | access_key: str=Argument(..., help="Function access key.", envvar="FXN_ACCESS_KEY") 17 | ): 18 | fxn = Function(access_key=access_key) 19 | user = fxn.users.retrieve() 20 | user = user.model_dump() if user else None 21 | _set_access_key(access_key if user is not None else None) 22 | print_json(data=user) 23 | 24 | @app.command(name="status", help="Get current authentication status.") 25 | def auth_status (): 26 | fxn = Function(get_access_key()) 27 | user = fxn.users.retrieve() 28 | user = user.model_dump() if user else None 29 | print_json(data=user) 30 | 31 | @app.command(name="logout", help="Logout from Function.") 32 | def logout (): 33 | _set_access_key(None) 34 | print("Successfully logged out of Function") 35 | 36 | def get_access_key () -> str: 37 | """ 38 | Get the CLI access key. 39 | 40 | Returns: 41 | str: CLI access key. 42 | """ 43 | credentials_path = Path.home() / ".fxn" / "credentials" 44 | if not credentials_path.exists(): 45 | return None 46 | with open(credentials_path) as f: 47 | return f.read() 48 | 49 | def _set_access_key (key: str): 50 | """ 51 | Set the CLI access key. 52 | 53 | Parameters: 54 | key (str); CLI access key. 55 | """ 56 | credentials_path = Path.home() / ".fxn" / "credentials" 57 | credentials_path.parent.mkdir(parents=True, exist_ok=True) 58 | if key: 59 | with open(credentials_path, "w") as f: 60 | f.write(key) 61 | elif credentials_path.exists(): 62 | credentials_path.unlink() -------------------------------------------------------------------------------- /fxn/cli/compile.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from asyncio import run as run_async 7 | from importlib.util import module_from_spec, spec_from_file_location 8 | from inspect import getmembers, getmodulename, isfunction 9 | from pathlib import Path 10 | from pydantic import BaseModel 11 | from rich import print as print_rich 12 | import sys 13 | from typer import Argument, Option 14 | from typing import Callable, Literal 15 | from urllib.parse import urlparse, urlunparse 16 | 17 | from ..client import FunctionAPIError 18 | from ..compile import PredictorSpec 19 | from ..function import Function 20 | from ..sandbox import EntrypointCommand 21 | from ..logging import CustomProgress, CustomProgressTask 22 | from .auth import get_access_key 23 | 24 | class CompileError (Exception): 25 | pass 26 | 27 | def compile_predictor ( 28 | path: str=Argument(..., help="Predictor path."), 29 | overwrite: bool=Option(False, "--overwrite", help="Whether to delete any existing predictor with the same tag before compiling."), 30 | ): 31 | run_async(_compile_predictor_async(path, overwrite=overwrite)) 32 | 33 | async def _compile_predictor_async ( 34 | path: str, 35 | *, 36 | overwrite: bool 37 | ): 38 | fxn = Function(get_access_key()) 39 | path: Path = Path(path).resolve() 40 | with CustomProgress(): 41 | # Load 42 | with CustomProgressTask(loading_text="Loading predictor...") as task: 43 | func = _load_predictor_func(path) 44 | entrypoint = EntrypointCommand(from_path=str(path), to_path="./", name=func.__name__) 45 | spec: PredictorSpec = func.__predictor_spec 46 | task.finish(f"Loaded prediction function: [bold cyan]{spec.tag}[/bold cyan]") 47 | # Populate 48 | sandbox = spec.sandbox 49 | sandbox.commands.append(entrypoint) 50 | with CustomProgressTask(loading_text="Uploading sandbox...", done_text="Uploaded sandbox"): 51 | sandbox.populate(fxn=fxn) 52 | # Compile 53 | with CustomProgressTask(loading_text="Running codegen...", done_text="Completed codegen"): 54 | with CustomProgressTask(loading_text="Creating predictor..."): 55 | if overwrite: 56 | try: 57 | fxn.client.request( 58 | method="DELETE", 59 | path=f"/predictors/{spec.tag}" 60 | ) 61 | except FunctionAPIError as error: 62 | if error.status_code != 404: 63 | raise 64 | predictor = fxn.client.request( 65 | method="POST", 66 | path="/predictors", 67 | body=spec.model_dump(mode="json", exclude=spec.model_extra.keys(), by_alias=True), 68 | response_type=_Predictor 69 | ) 70 | with ProgressLogQueue() as task_queue: 71 | async for event in fxn.client.stream( 72 | method="POST", 73 | path=f"/predictors/{predictor.tag}/compile", 74 | body={ }, 75 | response_type=_LogEvent | _ErrorEvent 76 | ): 77 | if isinstance(event, _LogEvent): 78 | task_queue.push_log(event) 79 | elif isinstance(event, _ErrorEvent): 80 | task_queue.push_error(event) 81 | raise CompileError(event.data.error) 82 | predictor_url = _compute_predictor_url(fxn.client.api_url, spec.tag) 83 | print_rich(f"\n[bold spring_green3]🎉 Predictor is now being compiled.[/bold spring_green3] Check it out at [link={predictor_url}]{predictor_url}[/link]") 84 | 85 | def _load_predictor_func (path: str) -> Callable[...,object]: 86 | if "" not in sys.path: 87 | sys.path.insert(0, "") 88 | path: Path = Path(path).resolve() 89 | if not path.exists(): 90 | raise ValueError(f"Cannot compile predictor because no Python module exists at the given path.") 91 | sys.path.insert(0, str(path.parent)) 92 | name = getmodulename(path) 93 | spec = spec_from_file_location(name, path) 94 | module = module_from_spec(spec) 95 | sys.modules[name] = module 96 | spec.loader.exec_module(module) 97 | main_func = next(func for _, func in getmembers(module, isfunction) if hasattr(func, "__predictor_spec")) 98 | return main_func 99 | 100 | def _compute_predictor_url (api_url: str, tag: str) -> str: 101 | parsed_url = urlparse(api_url) 102 | hostname_parts = parsed_url.hostname.split(".") 103 | if hostname_parts[0] == "api": 104 | hostname_parts.pop(0) 105 | hostname = ".".join(hostname_parts) 106 | netloc = hostname if not parsed_url.port else f"{hostname}:{parsed_url.port}" 107 | predictor_url = urlunparse(parsed_url._replace(netloc=netloc, path=f"{tag}")) 108 | return predictor_url 109 | 110 | class _Predictor (BaseModel): 111 | tag: str 112 | 113 | class _LogData (BaseModel): 114 | message: str 115 | level: int = 0 116 | status: Literal["success", "error"] = "success" 117 | update: bool = False 118 | 119 | class _LogEvent (BaseModel): 120 | event: Literal["log"] 121 | data: _LogData 122 | 123 | class _ErrorData (BaseModel): 124 | error: str 125 | 126 | class _ErrorEvent (BaseModel): 127 | event: Literal["error"] 128 | data: _ErrorData 129 | 130 | class ProgressLogQueue: 131 | 132 | def __init__ (self): 133 | self.queue: list[tuple[int, CustomProgressTask]] = [] 134 | 135 | def push_log (self, event: _LogEvent): 136 | # Check for update 137 | if event.data.update and self.queue: 138 | current_level, current_task = self.queue[-1] 139 | current_task.update(description=event.data.message, status=event.data.status) 140 | return 141 | # Pop 142 | while self.queue: 143 | current_level, current_task = self.queue[-1] 144 | if event.data.level > current_level: 145 | break 146 | current_task.__exit__(None, None, None) 147 | self.queue.pop() 148 | task = CustomProgressTask(loading_text=event.data.message) 149 | task.__enter__() 150 | self.queue.append((event.data.level, task)) 151 | 152 | def push_error (self, error: _ErrorEvent): 153 | while self.queue: 154 | _, current_task = self.queue.pop() 155 | current_task.__exit__(RuntimeError, None, None) 156 | 157 | def __enter__ (self): 158 | return self 159 | 160 | def __exit__ (self, exc_type, exc_value, traceback): 161 | while self.queue: 162 | _, current_task = self.queue.pop() 163 | current_task.__exit__(None, None, None) -------------------------------------------------------------------------------- /fxn/cli/misc.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from rich import print 7 | from typer import Exit, Option 8 | from webbrowser import open as open_browser 9 | 10 | from ..version import __version__ 11 | 12 | def _explore (value: bool): 13 | if value: 14 | open_browser("https://fxn.ai/explore") 15 | raise Exit() 16 | 17 | def _learn (value: bool): 18 | if value: 19 | open_browser("https://docs.fxn.ai") 20 | raise Exit() 21 | 22 | def _version (value: bool): 23 | if value: 24 | print(__version__) 25 | raise Exit() 26 | 27 | def cli_options ( 28 | explore: bool = Option(None, "--explore", callback=_explore, help="Explore predictors on Function."), 29 | learn: bool = Option(None, "--learn", callback=_learn, help="Learn about Function."), 30 | version: bool = Option(None, "--version", callback=_version, help="Get the Function CLI version.") 31 | ): 32 | pass -------------------------------------------------------------------------------- /fxn/cli/predictions.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from asyncio import run as run_async 7 | from io import BytesIO 8 | from numpy import array_repr, ndarray 9 | from pathlib import Path, PurePath 10 | from PIL import Image 11 | from rich import print_json 12 | from tempfile import mkstemp 13 | from typer import Argument, Context, Option 14 | 15 | from ..function import Function 16 | from ..logging import CustomProgress, CustomProgressTask 17 | from ..types import Prediction 18 | from .auth import get_access_key 19 | 20 | def create_prediction ( 21 | tag: str=Argument(..., help="Predictor tag."), 22 | quiet: bool=Option(False, "--quiet", help="Suppress verbose logging when creating the prediction."), 23 | context: Context = 0 24 | ): 25 | run_async(_predict_async(tag, quiet=quiet, context=context)) 26 | 27 | async def _predict_async (tag: str, quiet: bool, context: Context): 28 | # Preload 29 | with CustomProgress(transient=True, disable=quiet): 30 | fxn = Function(get_access_key()) 31 | with CustomProgressTask( 32 | loading_text="Preloading predictor...", 33 | done_text="Preloaded predictor" 34 | ): 35 | fxn.predictions.create(tag, inputs={ }) 36 | with CustomProgressTask(loading_text="Making prediction..."): 37 | inputs = { } 38 | for i in range(0, len(context.args), 2): 39 | name = context.args[i].replace("-", "") 40 | value = _parse_value(context.args[i+1]) 41 | inputs[name] = value 42 | prediction = fxn.predictions.create(tag, inputs=inputs) 43 | _log_prediction(prediction) 44 | 45 | def _parse_value (value: str): 46 | """ 47 | Parse a value from a CLI argument. 48 | 49 | Parameters: 50 | value (str): CLI input argument. 51 | 52 | Returns: 53 | bool | int | float | str | Path: Parsed value. 54 | """ 55 | # Boolean 56 | if value == "true": 57 | return True 58 | if value == "false": 59 | return False 60 | # Integer 61 | try: 62 | return int(value) 63 | except ValueError: 64 | pass 65 | # Float 66 | try: 67 | return float(value) 68 | except ValueError: 69 | pass 70 | # File 71 | if value.startswith("@"): 72 | path = Path(value[1:]).expanduser().resolve() 73 | if path.suffix in [".txt", ".md"]: 74 | with open(path) as f: 75 | return f.read() 76 | elif path.suffix in [".jpg", ".png"]: 77 | return Image.open(path) 78 | else: 79 | with open(path, "rb") as f: 80 | return BytesIO(f.read()) 81 | # String 82 | return value 83 | 84 | def _log_prediction (prediction: Prediction): 85 | images = [value for value in prediction.results or [] if isinstance(value, Image.Image)] 86 | prediction.results = [_serialize_value(value) for value in prediction.results] if prediction.results is not None else None 87 | print_json(data=prediction.model_dump()) 88 | for image in images: 89 | image.show() 90 | 91 | def _serialize_value (value): 92 | if isinstance(value, ndarray): 93 | return array_repr(value) 94 | if isinstance(value, Image.Image): 95 | _, path = mkstemp(suffix=".png" if value.mode == "RGBA" else ".jpg") 96 | value.save(path) 97 | return path 98 | if isinstance(value, BytesIO): 99 | return str(value) 100 | if isinstance(value, PurePath): 101 | return str(value) 102 | return value -------------------------------------------------------------------------------- /fxn/cli/predictors.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from rich import print_json 7 | from typer import Argument 8 | 9 | from ..function import Function 10 | from ..logging import CustomProgress, CustomProgressTask 11 | from .auth import get_access_key 12 | 13 | def retrieve_predictor ( 14 | tag: str=Argument(..., help="Predictor tag.") 15 | ): 16 | with CustomProgress(transient=True): 17 | with CustomProgressTask(loading_text="Retrieving predictor..."): 18 | fxn = Function(get_access_key()) 19 | predictor = fxn.predictors.retrieve(tag) 20 | predictor = predictor.model_dump() if predictor else None 21 | print_json(data=predictor) 22 | 23 | def archive_predictor ( 24 | tag: str=Argument(..., help="Predictor tag.") 25 | ): 26 | with CustomProgress(): 27 | with CustomProgressTask( 28 | loading_text="Archiving predictor...", 29 | done_text=f"Archived predictor: [bold dark_orange]{tag}[/bold dark_orange]" 30 | ): 31 | fxn = Function(get_access_key()) 32 | fxn.client.request( 33 | method="POST", 34 | path=f"/predictors/{tag}/archive" 35 | ) 36 | 37 | def delete_predictor ( 38 | tag: str=Argument(..., help="Predictor tag.") 39 | ): 40 | with CustomProgress(): 41 | with CustomProgressTask( 42 | loading_text="Deleting predictor...", 43 | done_text=f"Deleted predictor: [bold red]{tag}[/bold red]" 44 | ): 45 | fxn = Function(get_access_key()) 46 | fxn.client.request( 47 | method="DELETE", 48 | path=f"/predictors/{tag}" 49 | ) -------------------------------------------------------------------------------- /fxn/cli/sources.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from datetime import datetime 7 | from pathlib import Path 8 | from pydantic import BaseModel 9 | from rich import print_json 10 | from typer import Argument, Option 11 | from typing_extensions import Annotated 12 | 13 | from ..function import Function 14 | from ..logging import CustomProgress, CustomProgressTask 15 | from .auth import get_access_key 16 | 17 | def retrieve_source ( 18 | predictor: Annotated[str, Option(help="Predictor tag.")] = None, 19 | prediction: Annotated[str, Option(help="Prediction identifier. If specified, this MUST be from a prediction returned by the Function API.")] = None, 20 | output: Annotated[Path, Option(help="Path to output source file.")] = Path("predictor.cpp") 21 | ): 22 | if not ((predictor is not None) ^ (prediction is not None)): 23 | raise ValueError(f"Predictor tag or prediction identifier must be provided, but not both.") 24 | fxn = Function(get_access_key()) 25 | with CustomProgress(transient=True): 26 | if prediction is None: 27 | with CustomProgressTask(loading_text="Creating prediction..."): 28 | empty_prediction = fxn.predictions.create(tag=predictor) 29 | prediction = empty_prediction.id 30 | with CustomProgressTask(loading_text="Retrieving source..."): 31 | source = fxn.client.request( 32 | method="GET", 33 | path=f"/predictions/{prediction}/source", 34 | response_type=_PredictionSource 35 | ) 36 | output.write_text(source.code) 37 | source.code = str(output.resolve()) 38 | print_json(data=source.model_dump(mode="json", by_alias=True)) 39 | 40 | class _PredictionSource (BaseModel): 41 | tag: str 42 | target: str 43 | code: str 44 | created: datetime 45 | compiled: datetime 46 | latency: float # millis -------------------------------------------------------------------------------- /fxn/client.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from json import loads, JSONDecodeError 7 | from pydantic import BaseModel, TypeAdapter 8 | from requests import request 9 | from typing import AsyncGenerator, Literal, Type, TypeVar 10 | 11 | T = TypeVar("T", bound=BaseModel) 12 | 13 | class FunctionClient: 14 | 15 | def __init__(self, access_key: str, api_url: str | None) -> None: 16 | self.access_key = access_key 17 | self.api_url = api_url or "https://api.fxn.ai/v1" 18 | 19 | def request ( 20 | self, 21 | *, 22 | method: Literal["GET", "POST", "PATCH", "DELETE"], 23 | path: str, 24 | body: dict[str, object]=None, 25 | response_type: Type[T]=None 26 | ) -> T: 27 | """ 28 | Make a request to a REST endpoint. 29 | 30 | Parameters: 31 | method (str): Request method. 32 | path (str): Endpoint path. 33 | body (dict): Request JSON body. 34 | response_type (Type): Response type. 35 | """ 36 | response = request( 37 | method=method, 38 | url=f"{self.api_url}{path}", 39 | json=body, 40 | headers={ "Authorization": f"Bearer {self.access_key}" } 41 | ) 42 | data = response.text 43 | try: 44 | data = response.json() 45 | except JSONDecodeError: 46 | pass 47 | if response.ok: 48 | return response_type(**data) if response_type is not None else None 49 | else: 50 | error = _ErrorResponse(**data).errors[0].message if isinstance(data, dict) else data 51 | raise FunctionAPIError(error, response.status_code) 52 | 53 | async def stream ( 54 | self, 55 | *, 56 | method: Literal["GET", "POST", "PATCH", "DELETE"], 57 | path: str, 58 | body: dict[str, object]=None, 59 | response_type: Type[T]=None 60 | ) -> AsyncGenerator[T, None]: 61 | """ 62 | Make a request to a REST endpoint and consume the response as a server-sent events stream. 63 | 64 | Parameters: 65 | method (str): Request method. 66 | path (str): Endpoint path. 67 | body (dict): Request JSON body. 68 | response_type (Type): Response type. 69 | """ 70 | response = request( 71 | method=method, 72 | url=f"{self.api_url}{path}", 73 | json=body, 74 | headers={ 75 | "Accept": "text/event-stream", 76 | "Authorization": f"Bearer {self.access_key}" 77 | }, 78 | stream=True 79 | ) 80 | event = None 81 | data: str = "" 82 | for line in response.iter_lines(decode_unicode=True): 83 | if line is None: 84 | break 85 | line: str = line.strip() 86 | if line: 87 | if line.startswith("event:"): 88 | event = line[len("event:"):].strip() 89 | elif line.startswith("data:"): 90 | line_data = line[len("data:"):].strip() 91 | data = f"{data}\n{line_data}" 92 | continue 93 | if event is not None: 94 | yield _parse_sse_event(event, data, response_type) 95 | event = None 96 | data = "" 97 | if event or data: 98 | yield _parse_sse_event(event, data, response_type) 99 | 100 | class FunctionAPIError (Exception): 101 | 102 | def __init__(self, message: str, status_code: int): 103 | super().__init__(message) 104 | self.message = message 105 | self.status_code = status_code 106 | 107 | def __str__(self): 108 | return f"FunctionAPIError: {self.message} (Status Code: {self.status_code})" 109 | 110 | class _APIError (BaseModel): 111 | message: str 112 | 113 | class _ErrorResponse (BaseModel): 114 | errors: list[_APIError] 115 | 116 | def _parse_sse_event (event: str, data: str, type: Type[T]=None) -> T: 117 | result = { "event": event, "data": loads(data) } 118 | result = TypeAdapter(type).validate_python(result) if type is not None else result 119 | return result -------------------------------------------------------------------------------- /fxn/compile.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from collections.abc import Callable 7 | from functools import wraps 8 | from inspect import isasyncgenfunction, iscoroutinefunction 9 | from pathlib import Path 10 | from pydantic import BaseModel, ConfigDict, Field 11 | from types import ModuleType 12 | from typing import Any, Callable, Literal, ParamSpec, TypeVar, cast 13 | 14 | from .beta import ( 15 | CoreMLInferenceMetadata, LiteRTInferenceMetadata, LlamaCppInferenceMetadata, 16 | OnnxInferenceMetadata, OnnxRuntimeInferenceSessionMetadata, OpenVINOInferenceMetadata, 17 | QnnInferenceMetadata 18 | ) 19 | from .sandbox import Sandbox 20 | from .types import PredictorAccess 21 | 22 | CompileTarget = Literal[ 23 | "android", 24 | "ios", 25 | "linux", 26 | "macos", 27 | "visionos", 28 | "wasm", 29 | "windows" 30 | ] 31 | 32 | CompileMetadata = ( 33 | CoreMLInferenceMetadata | 34 | LiteRTInferenceMetadata | 35 | LlamaCppInferenceMetadata | 36 | OnnxInferenceMetadata | 37 | OnnxRuntimeInferenceSessionMetadata | 38 | OpenVINOInferenceMetadata | 39 | QnnInferenceMetadata 40 | ) 41 | 42 | P = ParamSpec("P") 43 | R = TypeVar("R") 44 | 45 | class PredictorSpec (BaseModel): 46 | """ 47 | Descriptor of a predictor to be compiled. 48 | """ 49 | tag: str = Field(description="Predictor tag.") 50 | description: str = Field(description="Predictor description. MUST be less than 100 characters long.", min_length=4, max_length=100) 51 | sandbox: Sandbox = Field(description="Sandbox to compile the function.") 52 | targets: list[str] | None = Field(description="Targets to compile this predictor for. Pass `None` to compile for our default targets.") 53 | metadata: list[object] = Field(default=[], description="Metadata to use while compiling the function.") 54 | access: PredictorAccess = Field(description="Predictor access.") 55 | card: str | None = Field(default=None, description="Predictor card (markdown).") 56 | media: str | None = Field(default=None, description="Predictor media URL.") 57 | license: str | None = Field(default=None, description="Predictor license URL. This is required for public predictors.") 58 | model_config = ConfigDict(arbitrary_types_allowed=True, extra="allow", frozen=True) 59 | 60 | def compile ( 61 | tag: str, 62 | *, 63 | description: str, 64 | sandbox: Sandbox=None, 65 | trace_modules: list[ModuleType]=[], 66 | targets: list[CompileTarget]=None, 67 | metadata: list[CompileMetadata]=[], 68 | access: PredictorAccess="private", 69 | card: str | Path=None, 70 | media: Path=None, 71 | license: str=None, 72 | **kwargs 73 | ) -> Callable[[Callable[P, R]], Callable[P, R]]: 74 | """ 75 | Create a predictor by compiling a stateless function. 76 | 77 | Parameters: 78 | tag (str): Predictor tag. 79 | description (str): Predictor description. MUST be less than 100 characters long. 80 | sandbox (Sandbox): Sandbox to compile the function. 81 | trace_modules (list): Modules to trace and compile. 82 | targets (list): Targets to compile this predictor for. Pass `None` to compile for our default targets. 83 | metadata (list): Metadata to use while compiling the function. 84 | access (PredictorAccess): Predictor access. 85 | card (str | Path): Predictor card markdown string or path to card. 86 | media (Path): Predictor thumbnail image (jpeg or png) path. 87 | license (str): Predictor license URL. This is required for public predictors. 88 | """ 89 | def decorator (func: Callable): 90 | # Check type 91 | if not callable(func): 92 | raise TypeError("Cannot compile non-function objects") 93 | if isasyncgenfunction(func) or iscoroutinefunction(func): 94 | raise TypeError(f"Entrypoint function '{func.__name__}' must be a regular function or generator") 95 | # Gather metadata 96 | spec = PredictorSpec( 97 | tag=tag, 98 | description=description, 99 | sandbox=sandbox if sandbox is not None else Sandbox(), 100 | targets=targets, 101 | access=access, 102 | card=card.read_text() if isinstance(card, Path) else card, 103 | media=None, # INCOMPLETE 104 | license=license, 105 | trace_modules=trace_modules, 106 | metadata=metadata, 107 | **kwargs 108 | ) 109 | # Wrap 110 | @wraps(func) 111 | def wrapper (*args, **kwargs): 112 | return func(*args, **kwargs) 113 | wrapper.__predictor_spec = spec 114 | return cast(Callable[P, R], wrapper) 115 | return decorator -------------------------------------------------------------------------------- /fxn/function.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from os import environ 7 | 8 | from .beta.client import BetaClient 9 | from .client import FunctionClient 10 | from .services import PredictionService, PredictorService, UserService 11 | 12 | class Function: 13 | """ 14 | Function client. 15 | 16 | Members: 17 | client (GraphClient): Function graph API client. Do NOT use this unless you know what you are doing. 18 | users (UserService): Manage users. 19 | predictors (PredictorService): Manage predictors. 20 | predictions (PredictionService): Manage predictions. 21 | beta (BetaClient): Beta client for incubating features. 22 | 23 | Constructor: 24 | access_key (str): Function access key. 25 | api_url (str): Function API URL. 26 | """ 27 | client: FunctionClient 28 | users: UserService 29 | predictors: PredictorService 30 | predictions: PredictionService 31 | beta: BetaClient 32 | 33 | def __init__ ( 34 | self, 35 | access_key: str=None, 36 | *, 37 | api_url: str=None 38 | ): 39 | access_key = access_key or environ.get("FXN_ACCESS_KEY") 40 | api_url = api_url or environ.get("FXN_API_URL") 41 | self.client = FunctionClient(access_key, api_url) 42 | self.users = UserService(self.client) 43 | self.predictors = PredictorService(self.client) 44 | self.predictions = PredictionService(self.client) 45 | self.beta = BetaClient(self.client, predictions=self.predictions) -------------------------------------------------------------------------------- /fxn/lib/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # -------------------------------------------------------------------------------- /fxn/logging.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from contextvars import ContextVar 7 | from rich.console import Console, ConsoleOptions, RenderResult 8 | from rich.progress import Progress, ProgressColumn, SpinnerColumn, TextColumn 9 | from rich.text import Text 10 | from rich.traceback import Traceback 11 | from types import MethodType 12 | 13 | current_progress = ContextVar("current_progress", default=None) 14 | progress_task_stack = ContextVar("progress_task_stack", default=[]) 15 | 16 | class CustomSpinnerColumn (SpinnerColumn): 17 | 18 | def __init__ ( 19 | self, 20 | spinner_name="dots", 21 | success_text="[bold green]✔[/bold green]", 22 | failure_text="[bright_red]✘[/bright_red]", 23 | style="", 24 | ): 25 | super().__init__(spinner_name=spinner_name, style=style) 26 | self.success_text = success_text 27 | self.failure_text = failure_text 28 | 29 | def render (self, task): 30 | done_text = ( 31 | self.failure_text 32 | if task.fields.get("status") == "error" 33 | else self.success_text 34 | ) 35 | return done_text if task.finished else self.spinner 36 | 37 | class CustomTextColumn (TextColumn): 38 | """Custom text column that changes color based on task status""" 39 | 40 | def __init__ (self, text_format="{task.description}"): 41 | super().__init__(text_format) 42 | 43 | def render (self, task): 44 | # Indent and color 45 | description = task.description 46 | indent_level = task.fields.get("indent_level", 0) 47 | indent = self.__get_indent(indent_level) 48 | task.description = f"{indent}{description}" 49 | if task.fields.get("status") == "error": 50 | task.description = f"[bright_red]{task.description}[/bright_red]" 51 | # Render 52 | text = super().render(task) 53 | task.description = description 54 | # Return 55 | return text 56 | 57 | def __get_indent (self, level: int) -> str: 58 | if level == 0: 59 | return "" 60 | indicator = "└── " 61 | return " " * len(indicator) * (level - 1) + indicator 62 | 63 | class CustomProgress(Progress): 64 | 65 | def __init__ ( 66 | self, 67 | *columns: ProgressColumn, 68 | console=None, 69 | auto_refresh=True, 70 | refresh_per_second = 10, 71 | speed_estimate_period=30, 72 | transient=False, 73 | redirect_stdout=True, 74 | redirect_stderr=True, 75 | get_time=None, 76 | disable=False, 77 | expand=False 78 | ): 79 | default_columns = list(columns) if len(columns) > 0 else [ 80 | CustomSpinnerColumn(), 81 | CustomTextColumn("[progress.description]{task.description}"), 82 | ] 83 | super().__init__( 84 | *default_columns, 85 | console=console, 86 | auto_refresh=auto_refresh, 87 | refresh_per_second=refresh_per_second, 88 | speed_estimate_period=speed_estimate_period, 89 | transient=transient, 90 | redirect_stdout=redirect_stdout, 91 | redirect_stderr=redirect_stderr, 92 | get_time=get_time, 93 | disable=disable, 94 | expand=expand 95 | ) 96 | self.default_columns = default_columns 97 | 98 | def __enter__ (self): 99 | self._token = current_progress.set(self) 100 | self._stack_token = progress_task_stack.set([]) 101 | return super().__enter__() 102 | 103 | def __exit__ (self, exc_type, exc_val, exc_tb): 104 | current_progress.reset(self._token) 105 | progress_task_stack.reset(self._stack_token) 106 | return super().__exit__(exc_type, exc_val, exc_tb) 107 | 108 | def get_renderables (self): 109 | for task in self.tasks: 110 | task_columns = task.fields.get("columns") or list() 111 | self.columns = self.default_columns + task_columns 112 | yield self.make_tasks_table([task]) 113 | 114 | class CustomProgressTask: 115 | 116 | def __init__ ( 117 | self, 118 | *, 119 | loading_text: str, 120 | done_text: str=None, 121 | columns: list[ProgressColumn]=None 122 | ): 123 | self.loading_text = loading_text 124 | self.done_text = done_text 125 | self.task_id = None 126 | self.columns = columns 127 | 128 | def __enter__ (self): 129 | progress = current_progress.get() 130 | if progress is not None: 131 | self.task_id = progress.add_task( 132 | self.loading_text, 133 | total=1, 134 | columns=self.columns, 135 | indent_level=len(progress_task_stack.get()) 136 | ) 137 | current_stack = progress_task_stack.get() 138 | progress_task_stack.set(current_stack + [self.task_id]) 139 | return self 140 | 141 | def __exit__ (self, exc_type, exc_val, exc_tb): 142 | progress = current_progress.get() 143 | if progress is not None and self.task_id is not None: 144 | current_task = progress._tasks[self.task_id] 145 | progress.update( 146 | self.task_id, 147 | description=self.done_text or current_task.description, 148 | completed=current_task.total, 149 | status="error" if exc_type is not None else current_task.fields.get("status") 150 | ) 151 | current_stack = progress_task_stack.get() 152 | if current_stack: 153 | progress_task_stack.set(current_stack[:-1]) 154 | self.task_id = None 155 | return False 156 | 157 | def update (self, **kwargs): 158 | progress = current_progress.get() 159 | if progress is None or self.task_id is None: 160 | return 161 | progress.update(self.task_id, **kwargs) 162 | 163 | def finish (self, message: str): 164 | self.done_text = message 165 | 166 | class TracebackMarkupConsole (Console): 167 | 168 | def print( 169 | self, 170 | *objects, 171 | sep = " ", 172 | end = "\n", 173 | style = None, 174 | justify = None, 175 | overflow = None, 176 | no_wrap = None, 177 | emoji = None, 178 | markup = None, 179 | highlight = None, 180 | width = None, 181 | height = None, 182 | crop = True, 183 | soft_wrap = None, 184 | new_line_start = False 185 | ): 186 | traceback = objects[0] 187 | if isinstance(traceback, Traceback): 188 | stack = traceback.trace.stacks[0] 189 | original_rich_console = traceback.__rich_console__ 190 | def __rich_console__ (self: Traceback, console: Console, options: ConsoleOptions) -> RenderResult: 191 | for renderable in original_rich_console(console, options): 192 | if ( 193 | isinstance(renderable, Text) and 194 | any(part.startswith(f"{stack.exc_type}:") for part in renderable._text) 195 | ): 196 | yield Text.assemble( 197 | (f"{stack.exc_type}: ", "traceback.exc_type"), 198 | Text.from_markup(stack.exc_value) 199 | ) 200 | else: 201 | yield renderable 202 | traceback.__rich_console__ = MethodType(__rich_console__, traceback) 203 | return super().print( 204 | *objects, 205 | sep=sep, 206 | end=end, 207 | style=style, 208 | justify=justify, 209 | overflow=overflow, 210 | no_wrap=no_wrap, 211 | emoji=emoji, 212 | markup=markup, 213 | highlight=highlight, 214 | width=width, 215 | height=height, 216 | crop=crop, 217 | soft_wrap=soft_wrap, 218 | new_line_start=new_line_start 219 | ) -------------------------------------------------------------------------------- /fxn/sandbox.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from __future__ import annotations 7 | from abc import ABC, abstractmethod 8 | from hashlib import sha256 9 | from pathlib import Path 10 | from pydantic import BaseModel 11 | from requests import put 12 | from rich.progress import BarColumn, TextColumn 13 | from typing import Literal 14 | 15 | from .function import Function 16 | from .logging import CustomProgressTask 17 | 18 | class WorkdirCommand (BaseModel): 19 | kind: Literal["workdir"] = "workdir" 20 | path: str 21 | 22 | class EnvCommand (BaseModel): 23 | kind: Literal["env"] = "env" 24 | env: dict[str, str] 25 | 26 | class UploadableCommand (BaseModel, ABC): 27 | from_path: str 28 | to_path: str 29 | manifest: dict[str, str] | None = None 30 | 31 | @abstractmethod 32 | def get_files (self) -> list[Path]: 33 | pass 34 | 35 | class UploadFileCommand (UploadableCommand): 36 | kind: Literal["upload_file"] = "upload_file" 37 | 38 | def get_files (self) -> list[Path]: 39 | return [Path(self.from_path).resolve()] 40 | 41 | class UploadDirectoryCommand (UploadableCommand): 42 | kind: Literal["upload_dir"] = "upload_dir" 43 | 44 | def get_files (self) -> list[Path]: 45 | from_path = Path(self.from_path) 46 | if not from_path.is_absolute(): 47 | raise ValueError("Cannot upload directory because directory path must be absolute") 48 | return [file for file in from_path.rglob("*") if file.is_file()] 49 | 50 | class EntrypointCommand (UploadableCommand): 51 | kind: Literal["entrypoint"] = "entrypoint" 52 | name: str 53 | 54 | def get_files (self) -> list[Path]: 55 | return [Path(self.from_path).resolve()] 56 | 57 | class PipInstallCommand (BaseModel): 58 | kind: Literal["pip_install"] = "pip_install" 59 | packages: list[str] 60 | 61 | class AptInstallCommand (BaseModel): 62 | kind: Literal["apt_install"] = "apt_install" 63 | packages: list[str] 64 | 65 | Command = ( 66 | WorkdirCommand | 67 | EnvCommand | 68 | UploadFileCommand | 69 | UploadDirectoryCommand | 70 | PipInstallCommand | 71 | AptInstallCommand | 72 | EntrypointCommand 73 | ) 74 | 75 | class Sandbox (BaseModel): 76 | """ 77 | Sandbox which defines a containerized environment for compiling your Python function. 78 | """ 79 | commands: list[Command] = [] 80 | 81 | def workdir (self, path: str | Path) -> Sandbox: 82 | """ 83 | Change the current working directory for subsequent commands. 84 | 85 | Parameters: 86 | path (str | Path): Path to change to. 87 | """ 88 | command = WorkdirCommand(path=str(path)) 89 | return Sandbox(commands=self.commands + [command]) 90 | 91 | def env (self, **env: str) -> Sandbox: 92 | """ 93 | Set environment variables in the sandbox. 94 | """ 95 | command = EnvCommand(env=env) 96 | return Sandbox(commands=self.commands + [command]) 97 | 98 | def upload_file ( 99 | self, 100 | from_path: str | Path, 101 | to_path: str | Path = "./" 102 | ) -> Sandbox: 103 | """ 104 | Upload a file to the sandbox. 105 | 106 | Parameters: 107 | from_path (str | Path): File path on the local file system. 108 | to_path (str | Path): Remote path to upload file to. 109 | """ 110 | from_path = from_path if isinstance(from_path, Path) else Path(from_path) 111 | command = UploadFileCommand( 112 | from_path=str(from_path.resolve()), 113 | to_path=str(to_path) 114 | ) 115 | return Sandbox(commands=self.commands + [command]) 116 | 117 | def upload_directory ( 118 | self, 119 | from_path: str | Path, 120 | to_path: str | Path = "." 121 | ) -> Sandbox: 122 | """ 123 | Upload a directory to the sandbox. 124 | 125 | Parameters: 126 | from_path (str | Path): Directory path on the local file system. 127 | to_path (str | Path): Remote path to upload directory to. 128 | """ 129 | from_path = from_path if isinstance(from_path, Path) else Path(from_path) 130 | command = UploadDirectoryCommand( 131 | from_path=str(from_path.resolve()), 132 | to_path=str(to_path) 133 | ) 134 | return Sandbox(commands=self.commands + [command]) 135 | 136 | def pip_install (self, *packages: str) -> Sandbox: 137 | """ 138 | Install Python packages in the sandbox. 139 | 140 | Parameters: 141 | packages (list): Packages to install. 142 | """ 143 | command = PipInstallCommand(packages=packages) 144 | return Sandbox(commands=self.commands + [command]) 145 | 146 | def apt_install (self, *packages: str) -> Sandbox: 147 | """ 148 | Install Debian packages in the sandbox. 149 | 150 | Parameters: 151 | packages (list): Packages to install. 152 | """ 153 | command = AptInstallCommand(packages=packages) 154 | return Sandbox(commands=self.commands + [command]) 155 | 156 | def populate (self, fxn: Function=None) -> Sandbox: # CHECK # In place 157 | """ 158 | Populate all metadata. 159 | """ 160 | fxn = fxn if fxn is not None else Function() 161 | entrypoint = next(cmd for cmd in self.commands if isinstance(cmd, EntrypointCommand)) 162 | entry_path = Path(entrypoint.from_path).resolve() 163 | for command in self.commands: 164 | if isinstance(command, UploadableCommand): 165 | cwd = Path.cwd() 166 | from_path = Path(command.from_path) 167 | to_path = Path(command.to_path) 168 | if not from_path.is_absolute(): 169 | from_path = (entry_path / from_path).resolve() 170 | command.from_path = str(from_path) 171 | files = command.get_files() 172 | name = from_path.relative_to(cwd) if from_path.is_relative_to(cwd) else from_path.resolve() 173 | with CustomProgressTask( 174 | loading_text=f"Uploading [light_slate_blue]{name}[/light_slate_blue]...", 175 | done_text=f"Uploaded [light_slate_blue]{name}[/light_slate_blue]", 176 | columns=[ 177 | BarColumn(), 178 | TextColumn("{task.completed}/{task.total}") 179 | ] 180 | ) as task: 181 | manifest = { } 182 | for idx, file in enumerate(files): 183 | rel_file_path = file.relative_to(from_path) if from_path.is_dir() else file.name 184 | dst_path = to_path / rel_file_path 185 | checksum = self.__upload_file(file, fxn=fxn) 186 | manifest[str(dst_path)] = checksum 187 | task.update(total=len(files), completed=idx+1) 188 | command.manifest = manifest 189 | return self 190 | 191 | def __upload_file (self, path: Path, fxn: Function) -> str: 192 | if not path.is_file(): 193 | raise ValueError(f"Cannot upload file at path {path} because it is not a file") 194 | hash = self.__compute_hash(path) 195 | try: 196 | fxn.client.request(method="HEAD", path=f"/resources/{hash}") 197 | except: 198 | resource = fxn.client.request( 199 | method="POST", 200 | path="/resources", 201 | body={ "name": hash }, 202 | response_type=_Resource 203 | ) 204 | with path.open("rb") as f: 205 | put(resource.url, data=f).raise_for_status() 206 | return hash 207 | 208 | def __compute_hash (self, path: Path) -> str: 209 | hash = sha256() 210 | with path.open("rb") as f: 211 | for chunk in iter(lambda: f.read(4096), b""): 212 | hash.update(chunk) 213 | return hash.hexdigest() 214 | 215 | class _Resource (BaseModel): 216 | url: str -------------------------------------------------------------------------------- /fxn/services/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from .user import UserService 7 | from .predictor import PredictorService 8 | from .prediction import PredictionService, Value -------------------------------------------------------------------------------- /fxn/services/prediction.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from dataclasses import asdict, is_dataclass 7 | from datetime import datetime, timezone 8 | from io import BytesIO 9 | from numpy import array, ndarray 10 | from pathlib import Path 11 | from PIL import Image 12 | from pydantic import BaseModel 13 | from requests import get 14 | from rich.progress import BarColumn, DownloadColumn, TransferSpeedColumn, TimeRemainingColumn 15 | from tempfile import gettempdir, NamedTemporaryFile 16 | from typing import Iterator 17 | from urllib.parse import urlparse 18 | 19 | from ..c import Configuration, Predictor, Prediction as CPrediction, Value as CValue, ValueFlags, ValueMap 20 | from ..client import FunctionClient 21 | from ..logging import CustomProgressTask 22 | from ..types import Acceleration, Prediction, PredictionResource 23 | 24 | Value = ( 25 | None | 26 | float | 27 | int | 28 | bool | 29 | ndarray | 30 | str | 31 | list[object] | 32 | dict[str, object] | 33 | Image.Image | 34 | BytesIO | 35 | memoryview 36 | ) 37 | 38 | class PredictionService: 39 | 40 | def __init__ (self, client: FunctionClient): 41 | self.client = client 42 | self.__cache = { } 43 | self.__cache_dir = self.__class__.__get_home_dir() / ".fxn" / "cache" 44 | self.__cache_dir.mkdir(parents=True, exist_ok=True) 45 | 46 | def create ( 47 | self, 48 | tag: str, 49 | *, 50 | inputs: dict[str, Value] | None=None, 51 | acceleration: Acceleration="auto", 52 | device=None, 53 | client_id: str=None, 54 | configuration_id: str=None 55 | ) -> Prediction: 56 | """ 57 | Create a prediction. 58 | 59 | Parameters: 60 | tag (str): Predictor tag. 61 | inputs (dict): Input values. 62 | acceleration (Acceleration): Prediction acceleration. 63 | client_id (str): Function client identifier. Specify this to override the current client identifier. 64 | configuration_id (str): Configuration identifier. Specify this to override the current client configuration identifier. 65 | 66 | Returns: 67 | Prediction: Created prediction. 68 | """ 69 | if inputs is None: 70 | return self.__create_raw_prediction( 71 | tag=tag, 72 | client_id=client_id, 73 | configuration_id=configuration_id 74 | ) 75 | predictor = self.__get_predictor( 76 | tag=tag, 77 | acceleration=acceleration, 78 | device=device, 79 | client_id=client_id, 80 | configuration_id=configuration_id 81 | ) 82 | with ( 83 | self.__to_value_map(inputs) as input_map, 84 | predictor.create_prediction(input_map) as prediction 85 | ): 86 | return self.__to_prediction(tag, prediction) 87 | 88 | def stream ( 89 | self, 90 | tag: str, 91 | *, 92 | inputs: dict[str, Value], 93 | acceleration: Acceleration="auto", 94 | device=None 95 | ) -> Iterator[Prediction]: 96 | """ 97 | Stream a prediction. 98 | 99 | Parameters: 100 | tag (str): Predictor tag. 101 | inputs (dict): Input values. 102 | acceleration (Acceleration): Prediction acceleration. 103 | 104 | Returns: 105 | Prediction: Created prediction. 106 | """ 107 | predictor = self.__get_predictor( 108 | tag=tag, 109 | acceleration=acceleration, 110 | device=device, 111 | ) 112 | with ( 113 | self.__to_value_map(inputs) as input_map, 114 | predictor.stream_prediction(input_map) as stream 115 | ): 116 | for prediction in stream: 117 | with prediction: 118 | yield self.__to_prediction(tag, prediction) 119 | 120 | def __create_raw_prediction ( 121 | self, 122 | tag: str, 123 | client_id: str=None, 124 | configuration_id: str=None 125 | ) -> Prediction: 126 | client_id = client_id if client_id is not None else Configuration.get_client_id() 127 | configuration_id = configuration_id if configuration_id is not None else Configuration.get_unique_id() 128 | prediction = self.client.request( 129 | method="POST", 130 | path="/predictions", 131 | body={ 132 | "tag": tag, 133 | "clientId": client_id, 134 | "configurationId": configuration_id, 135 | }, 136 | response_type=Prediction 137 | ) 138 | return prediction 139 | 140 | def __get_predictor ( 141 | self, 142 | tag: str, 143 | acceleration: Acceleration="auto", 144 | device=None, 145 | client_id: str=None, 146 | configuration_id: str=None 147 | ) -> Predictor: 148 | if tag in self.__cache: 149 | return self.__cache[tag] 150 | prediction = self.__create_raw_prediction( 151 | tag=tag, 152 | client_id=client_id, 153 | configuration_id=configuration_id 154 | ) 155 | with Configuration() as configuration: 156 | configuration.tag = prediction.tag 157 | configuration.token = prediction.configuration 158 | configuration.acceleration = acceleration 159 | configuration.device = device 160 | for resource in prediction.resources: 161 | path = self.__download_resource(resource) 162 | configuration.add_resource(resource.type, path) 163 | predictor = Predictor(configuration) 164 | self.__cache[tag] = predictor 165 | return predictor 166 | 167 | def __to_value_map (self, inputs: dict[str, Value]) -> ValueMap: 168 | map = ValueMap() 169 | for name, value in inputs.items(): 170 | map[name] = self.__to_value(value) 171 | return map 172 | 173 | def __to_value ( 174 | self, 175 | value: Value, 176 | *, 177 | flags: ValueFlags=ValueFlags.NONE 178 | ) -> CValue: 179 | value = self.__class__.__try_ensure_serializable(value) 180 | if value is None: 181 | return CValue.create_null() 182 | elif isinstance(value, bool): 183 | return self.__to_value(array(value, dtype="bool"), flags=flags | ValueFlags.COPY_DATA) 184 | elif isinstance(value, int): 185 | return self.__to_value(array(value, dtype="int32"), flags=flags | ValueFlags.COPY_DATA) 186 | elif isinstance(value, float): 187 | return self.__to_value(array(value, dtype="float32"), flags=flags | ValueFlags.COPY_DATA) 188 | elif isinstance(value, ndarray): 189 | return CValue.create_array(value, flags=flags) 190 | elif isinstance(value, str): 191 | return CValue.create_string(value) 192 | elif isinstance(value, list): 193 | return CValue.create_list(value) 194 | elif isinstance(value, dict): 195 | return CValue.create_dict(value) 196 | elif isinstance(value, Image.Image): 197 | return CValue.create_image(value) 198 | elif isinstance(value, (bytes, bytearray, memoryview, BytesIO)): 199 | flags |= ValueFlags.COPY_DATA if not isinstance(value, memoryview) else 0 200 | view_or_bytes = value.getvalue() if isinstance(value, BytesIO) else value 201 | view = memoryview(view_or_bytes) if not isinstance(view_or_bytes, memoryview) else view_or_bytes 202 | return CValue.create_binary(view, flags=flags) 203 | else: 204 | raise RuntimeError(f"Failed to convert object to Function value because object has an unsupported type: {type(value)}") 205 | 206 | def __to_prediction (self, tag: str, raw_prediction: CPrediction) -> Prediction: 207 | output_map = raw_prediction.results 208 | results = [output_map[output_map.key(idx)].to_object() for idx in range(len(output_map))] if output_map else None 209 | prediction = Prediction( 210 | id=raw_prediction.id, 211 | tag=tag, 212 | results=results, 213 | latency=raw_prediction.latency, 214 | error=raw_prediction.error, 215 | logs=raw_prediction.logs, 216 | created=datetime.now(timezone.utc).isoformat() 217 | ) 218 | return prediction 219 | 220 | def __download_resource (self, resource: PredictionResource) -> Path: 221 | path = self.__get_resource_path(resource) 222 | if path.exists(): 223 | return path 224 | path.parent.mkdir(parents=True, exist_ok=True) 225 | response = get(resource.url, stream=True) 226 | response.raise_for_status() 227 | size = int(response.headers.get("content-length", 0)) 228 | stem = Path(urlparse(resource.url).path).name 229 | completed = 0 230 | color = "dark_orange" if not resource.type == "dso" else "purple" 231 | with ( 232 | CustomProgressTask( 233 | loading_text=f"[{color}]{stem}[/{color}]", 234 | columns=[ 235 | BarColumn(), 236 | DownloadColumn(), 237 | TransferSpeedColumn(), 238 | TimeRemainingColumn() 239 | ] 240 | ) as task, 241 | NamedTemporaryFile(mode="wb", delete=False) as tmp_file 242 | ): 243 | for chunk in response.iter_content(chunk_size=8192): 244 | if chunk: 245 | tmp_file.write(chunk) 246 | completed += len(chunk) 247 | task.update(total=size, completed=completed) 248 | Path(tmp_file.name).replace(path) 249 | return path 250 | 251 | def __get_resource_path (self, resource: PredictionResource) -> Path: 252 | stem = Path(urlparse(resource.url).path).name 253 | path = self.__cache_dir / stem 254 | path = path / resource.name if resource.name else path 255 | return path 256 | 257 | @classmethod 258 | def __get_home_dir (cls) -> Path: 259 | try: 260 | check = Path.home() / ".fxntest" 261 | with open(check, "w") as f: 262 | f.write("fxn") 263 | check.unlink() 264 | return Path.home() 265 | except: 266 | return Path(gettempdir()) 267 | 268 | @classmethod 269 | def __try_ensure_serializable (cls, object: object) -> object: 270 | if object is None: 271 | return object 272 | if isinstance(object, list): 273 | return [cls.__try_ensure_serializable(x) for x in object] 274 | if is_dataclass(object) and not isinstance(object, type): 275 | return asdict(object) 276 | if isinstance(object, BaseModel): 277 | return object.model_dump(mode="json", by_alias=True) 278 | return object -------------------------------------------------------------------------------- /fxn/services/predictor.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from ..client import FunctionClient, FunctionAPIError 7 | from ..types import Predictor 8 | 9 | class PredictorService: 10 | 11 | def __init__ (self, client: FunctionClient) -> None: 12 | self.client = client 13 | 14 | def retrieve (self, tag: str) -> Predictor: 15 | """ 16 | Retrieve a predictor. 17 | 18 | Parameters: 19 | tag (str): Predictor tag. 20 | 21 | Returns: 22 | Predictor: Predictor. 23 | """ 24 | try: 25 | return self.client.request( 26 | method="GET", 27 | path=f"/predictors/{tag}", 28 | response_type=Predictor 29 | ) 30 | except FunctionAPIError as error: 31 | if error.status_code == 404: 32 | return None 33 | raise -------------------------------------------------------------------------------- /fxn/services/user.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from ..client import FunctionClient, FunctionAPIError 7 | from ..types import User 8 | 9 | class UserService: 10 | 11 | def __init__ (self, client: FunctionClient) -> None: 12 | self.client = client 13 | 14 | def retrieve (self) -> User: 15 | """ 16 | Retrieve the current user. 17 | 18 | Returns: 19 | User: User. 20 | """ 21 | try: 22 | return self.client.request( 23 | method="GET", 24 | path="/users", 25 | response_type=User 26 | ) 27 | except FunctionAPIError as error: 28 | if error.status_code == 401: 29 | return None 30 | raise -------------------------------------------------------------------------------- /fxn/types/__init__.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from .dtype import Dtype 7 | from .prediction import Acceleration, Prediction, PredictionResource 8 | from .predictor import EnumerationMember, Parameter, Predictor, PredictorAccess, PredictorStatus, Signature 9 | from .user import User -------------------------------------------------------------------------------- /fxn/types/dtype.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from enum import Enum 7 | 8 | class Dtype (str, Enum): 9 | """ 10 | Value type. 11 | This follows `numpy` dtypes. 12 | """ 13 | null = "null" 14 | float16 = "float16" 15 | float32 = "float32" 16 | float64 = "float64" 17 | int8 = "int8" 18 | int16 = "int16" 19 | int32 = "int32" 20 | int64 = "int64" 21 | uint8 = "uint8" 22 | uint16 = "uint16" 23 | uint32 = "uint32" 24 | uint64 = "uint64" 25 | bool = "bool" 26 | string = "string" 27 | list = "list" 28 | dict = "dict" 29 | image = "image" 30 | video = "video" 31 | audio = "audio" 32 | model = "model" 33 | binary = "binary" -------------------------------------------------------------------------------- /fxn/types/prediction.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from pydantic import BaseModel, Field 7 | from typing import Literal 8 | 9 | Acceleration = Literal["auto", "cpu", "gpu", "npu"] 10 | 11 | class PredictionResource (BaseModel): 12 | """ 13 | Prediction resource. 14 | 15 | Members: 16 | type (str): Resource type. 17 | url (str): Resource URL. 18 | name (str): Resource name. 19 | """ 20 | type: str = Field(description="Resource type.") 21 | url: str = Field(description="Resource URL.") 22 | name: str | None = Field(default=None, description="Resource name.") 23 | 24 | class Prediction (BaseModel): 25 | """ 26 | Prediction. 27 | 28 | Members: 29 | id (str): Prediction identifier. 30 | tag (str): Predictor tag. 31 | configuration (str): Prediction configuration token. This is only populated for `EDGE` predictions. 32 | resources (list): Prediction resources. This is only populated for `EDGE` predictions. 33 | results (list): Prediction results. 34 | latency (float): Prediction latency in milliseconds. 35 | error (str): Prediction error. This is `None` if the prediction completed successfully. 36 | logs (str): Prediction logs. 37 | created (str): Date created. 38 | """ 39 | id: str = Field(description="Prediction identifier.") 40 | tag: str = Field(description="Predictor tag.") 41 | configuration: str | None = Field(default=None, description="Prediction configuration token. This is only populated for `EDGE` predictions.") 42 | resources: list[PredictionResource] | None = Field(default=None, description="Prediction resources. This is only populated for `EDGE` predictions.") 43 | results: list[object] | None = Field(default=None, description="Prediction results.") 44 | latency: float | None = Field(default=None, description="Prediction latency in milliseconds.") 45 | error: str | None = Field(default=None, description="Prediction error. This is `None` if the prediction completed successfully.") 46 | logs: str | None = Field(default=None, description="Prediction logs.") 47 | created: str = Field(description="Date created.") -------------------------------------------------------------------------------- /fxn/types/predictor.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from pydantic import AliasChoices, BaseModel, ConfigDict, Field 7 | from typing import Any, Literal 8 | 9 | from .dtype import Dtype 10 | from .user import User 11 | 12 | PredictorAccess = Literal["public", "private", "unlisted"] 13 | 14 | PredictorStatus = Literal["compiling", "active", "archived"] 15 | 16 | class EnumerationMember (BaseModel): 17 | """ 18 | Prediction parameter enumeration member. 19 | 20 | Members: 21 | name (str): Enumeration member name. 22 | value (str | int): Enumeration member value. 23 | """ 24 | name: str = Field(description="Enumeration member name.") 25 | value: str | int = Field(description="Enumeration member value.") 26 | 27 | class Parameter (BaseModel): 28 | """ 29 | Predictor parameter. 30 | 31 | Members: 32 | name (str): Parameter name. 33 | type (Dtype): Parameter type. This is `None` if the type is unknown or unsupported by Function. 34 | description (str): Parameter description. 35 | optional (bool): Whether the parameter is optional. 36 | range (tuple): Parameter value range for numeric parameters. 37 | enumeration (list): Parameter value choices for enumeration parameters. 38 | value_schema (dict): Parameter JSON schema. This is only populated for `list` and `dict` parameters. 39 | """ 40 | name: str = Field(description="Parameter name.") 41 | type: Dtype | None = Field(default=None, description="Parameter type. This is `None` if the type is unknown or unsupported by Function.") 42 | description: str | None = Field(default=None, description="Parameter description.") 43 | optional: bool | None = Field(default=None, description="Whether the parameter is optional.") 44 | range: tuple[float, float] | None = Field(default=None, description="Parameter value range for numeric parameters.") 45 | enumeration: list[EnumerationMember] | None = Field(default=None, description="Parameter value choices for enumeration parameters.") 46 | value_schema: dict[str, Any] | None = Field(default=None, description="Parameter JSON schema. This is only populated for `list` and `dict` parameters.", serialization_alias="schema", validation_alias=AliasChoices("schema", "value_schema")) 47 | model_config = ConfigDict(arbitrary_types_allowed=True) 48 | 49 | class Signature (BaseModel): 50 | """ 51 | Predictor signature. 52 | 53 | Members: 54 | inputs (list): Input parameters. 55 | outputs (list): Output parameters. 56 | """ 57 | inputs: list[Parameter] = Field(description="Input parameters.") 58 | outputs: list[Parameter] = Field(description="Output parameters.") 59 | 60 | class Predictor (BaseModel): 61 | """ 62 | Predictor. 63 | 64 | Members: 65 | tag (str): Predictor tag. 66 | owner (User): Predictor owner. 67 | name (str): Predictor name. 68 | status (PredictorStatus): Predictor status. 69 | access (PredictorAccess): Predictor access. 70 | signature (Signature): Predictor signature. 71 | created (str): Date created. 72 | description (str): Predictor description. 73 | card (str): Predictor card. 74 | media (str): Predictor media URL. 75 | license (str): Predictor license URL. 76 | """ 77 | tag: str = Field(description="Predictor tag.") 78 | owner: User = Field(description="Predictor owner.") 79 | name: str = Field(description="Predictor name.") 80 | status: PredictorStatus = Field(description="Predictor status.") 81 | access: PredictorAccess = Field(description="Predictor access.") 82 | signature: Signature = Field(description="Predictor signature.") 83 | created: str = Field(description="Date created.") 84 | description: str | None = Field(default=None, description="Predictor description.") 85 | card: str | None = Field(default=None, description="Predictor card.") 86 | media: str | None = Field(default=None, description="Predictor media URL.") 87 | license: str | None = Field(default=None, description="Predictor license URL.") -------------------------------------------------------------------------------- /fxn/types/user.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from pydantic import BaseModel, Field 7 | 8 | class User (BaseModel): 9 | """ 10 | Function user profile. 11 | 12 | Members: 13 | username (str): Username. 14 | email (str): User email address. 15 | created (str): Date created. 16 | name (str): User display name. 17 | avatar (str): User avatar URL. 18 | bio (str): User bio. 19 | website (str): User website. 20 | github (str): User GitHub handle. 21 | """ 22 | username: str = Field(description="Username.") 23 | email: str | None = Field(default=None, description="User email address.") 24 | created: str | None = Field(default=None, description="Date created.") 25 | name: str | None = Field(default=None, description="User display name.") 26 | avatar: str | None = Field(default=None, description="User avatar URL.") 27 | bio: str | None = Field(default=None, description="User bio.") 28 | website: str | None = Field(default=None, description="User website.") 29 | github: str | None = Field(default=None, description="User GitHub handle.") -------------------------------------------------------------------------------- /fxn/version.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | __version__ = "0.0.54" -------------------------------------------------------------------------------- /fxnc.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from argparse import ArgumentParser 7 | from pathlib import Path 8 | from requests import get 9 | 10 | parser = ArgumentParser() 11 | parser.add_argument("--version", type=str, required=True) 12 | 13 | def _download_fxnc (name: str, version: str, path: Path): 14 | url = f"https://cdn.fxn.ai/fxnc/{version}/{name}" 15 | response = get(url) 16 | response.raise_for_status() 17 | path.parent.mkdir(parents=True, exist_ok=True) 18 | with open(path, "wb") as f: 19 | f.write(response.content) 20 | print(f"Wrote {name} {version} to path: {path}") 21 | 22 | def main (): 23 | args = parser.parse_args() 24 | LIB_PATH_BASE = Path("fxn") / "lib" 25 | DOWNLOADS = [ 26 | ("Function-macos-x86_64.dylib", LIB_PATH_BASE / "macos" / "x86_64" / "Function.dylib"), 27 | ("Function-macos-arm64.dylib", LIB_PATH_BASE / "macos" / "arm64" / "Function.dylib"), 28 | ("Function-win-x86_64.dll", LIB_PATH_BASE / "windows" / "x86_64" / "Function.dll"), 29 | ("Function-win-arm64.dll", LIB_PATH_BASE / "windows" / "arm64" / "Function.dll"), 30 | ("libFunction-linux-x86_64.so", LIB_PATH_BASE / "linux" / "x86_64" / "libFunction.so"), 31 | ("libFunction-linux-arm64.so", LIB_PATH_BASE / "linux" / "arm64" / "libFunction.so"), 32 | ] 33 | for name, path in DOWNLOADS: 34 | _download_fxnc(name, args.version, path) 35 | 36 | if __name__ == "__main__": 37 | main() -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | [project] 7 | name = "fxn" 8 | dynamic = ["version"] 9 | description = "Run prediction functions locally in Python. Register at https://fxn.ai." 10 | readme = "README.md" 11 | dependencies = [ 12 | "numpy", 13 | "pillow", 14 | "pydantic>=2.0", 15 | "requests", 16 | "rich", 17 | "typer" 18 | ] 19 | requires-python = ">=3.10" 20 | authors = [ { name = "NatML Inc.", email = "hi@fxn.ai" } ] 21 | license = { file = "LICENSE" } 22 | classifiers = [ 23 | "Programming Language :: Python :: 3", 24 | "License :: OSI Approved :: Apache Software License", 25 | "Operating System :: OS Independent", 26 | "Topic :: Scientific/Engineering :: Image Recognition", 27 | "Topic :: Software Development :: Libraries", 28 | ] 29 | 30 | [project.urls] 31 | Homepage = "https://fxn.ai" 32 | Documentation = "https://docs.fxn.ai" 33 | Source = "https://github.com/fxnai/fxn" 34 | Changelog = "https://github.com/fxnai/fxn/blob/main/Changelog.md" 35 | 36 | [project.optional-dependencies] 37 | # None 38 | 39 | [project.scripts] 40 | fxn = "fxn.cli.__init__:app" 41 | 42 | [build-system] 43 | requires = ["setuptools>=42", "wheel"] 44 | build-backend = "setuptools.build_meta" 45 | 46 | [tool.setuptools] 47 | include-package-data = true 48 | 49 | [tool.setuptools.packages.find] 50 | include = ["fxn", "fxn*"] 51 | namespaces = false 52 | 53 | [tool.setuptools.package-data] 54 | "fxn.lib" = ["macos/*/*.dylib", "windows/*/*.dll", "linux/*/*.so"] 55 | 56 | [tool.setuptools.dynamic] 57 | version = { attr = "fxn.version.__version__" } -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | pillow 3 | requests 4 | rich 5 | typer 6 | pydantic 7 | pytest 8 | pytest-asyncio -------------------------------------------------------------------------------- /test/compile_test.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from fxn import compile 7 | from fxn.compile import PredictorSpec 8 | import numpy as np 9 | 10 | def test_populate_predictor_spec (): 11 | @compile( 12 | "@yusuf/test", 13 | description="Test function.", 14 | trace_modules=[np], 15 | targets=["android", "macos"], 16 | hidden_attribute="kept" 17 | ) 18 | def predictor () -> str: 19 | return "Hello world" 20 | spec: PredictorSpec = predictor.__predictor_spec 21 | assert spec is not None 22 | assert spec.hidden_attribute is not None -------------------------------------------------------------------------------- /test/media/cat.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fxnai/fxn/cfb5d9a0ae89cb89a5a6d58615662503c701668a/test/media/cat.jpg -------------------------------------------------------------------------------- /test/media/pexels-samson-katt-5255233.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fxnai/fxn/cfb5d9a0ae89cb89a5a6d58615662503c701668a/test/media/pexels-samson-katt-5255233.jpg -------------------------------------------------------------------------------- /test/predict_test.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from fxn import Function 7 | import pytest 8 | from typing import Iterator 9 | 10 | fxn = Function() 11 | 12 | @fxn.beta.predict("@yusuf/area") 13 | def compute_area (radius): 14 | ... 15 | 16 | @fxn.beta.predict("@yusuf/area", remote=True) 17 | def compute_area_remote (radius): 18 | ... 19 | 20 | @fxn.beta.predict("@yusuf/split-string") 21 | def split_sentence (sentence: str) -> Iterator[str]: 22 | ... 23 | 24 | @fxn.beta.predict("@yusuf/split-string", remote=True) 25 | def split_sentence_remote (sentence: str) -> Iterator[str]: 26 | ... 27 | 28 | def test_decorated_create_prediction (): 29 | area = compute_area(2) 30 | assert isinstance(area, float) 31 | 32 | def test_decorated_create_remote_prediction (): 33 | area = compute_area_remote(2) 34 | assert isinstance(area, float) 35 | 36 | @pytest.mark.skip 37 | def test_decorated_stream_prediction (): # INCOMPLETE 38 | pass 39 | 40 | @pytest.mark.skip 41 | def test_decorated_stream_remote_prediction (): # INCOMPLETE 42 | pass -------------------------------------------------------------------------------- /test/prediction_test.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from fxn import Function 7 | import pytest 8 | 9 | def test_create_raw_prediction (): 10 | fxn = Function() 11 | prediction = fxn.predictions.create(tag="@fxn/greeting") 12 | assert prediction is not None 13 | assert prediction.configuration is not None 14 | assert prediction.resources is None 15 | 16 | def test_create_prediction (): 17 | fxn = Function() 18 | radius = 4 19 | prediction = fxn.predictions.create( 20 | tag="@yusuf/area", 21 | inputs={ "radius": radius } 22 | ) 23 | assert prediction.results 24 | assert isinstance(prediction.results[0], float) 25 | 26 | def test_stream_prediction (): 27 | fxn = Function() 28 | sentence = "Hello world" 29 | stream = fxn.predictions.stream( 30 | tag="@yusuf/streaming", 31 | inputs={ "sentence": sentence } 32 | ) 33 | for prediction in stream: 34 | print(prediction) 35 | 36 | def test_create_invalid_prediction (): 37 | fxn = Function() 38 | with pytest.raises(RuntimeError): 39 | fxn.predictions.create(tag="@yusu/invalid-predictor") -------------------------------------------------------------------------------- /test/predictor_test.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from fxn import Function 7 | 8 | def test_retrieve_predictor (): 9 | fxn = Function() 10 | predictor = fxn.predictors.retrieve("@fxn/greeting") 11 | assert predictor is not None -------------------------------------------------------------------------------- /test/remote_prediction_test.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from fxn import Function 7 | 8 | def test_create_remote_prediction (): 9 | fxn = Function() 10 | prediction = fxn.beta.predictions.remote.create( 11 | tag="@fxn/greeting", 12 | inputs={ "name": "Yusuf" } 13 | ) 14 | assert prediction.results 15 | assert isinstance(prediction.results[0], str) -------------------------------------------------------------------------------- /test/user_test.py: -------------------------------------------------------------------------------- 1 | # 2 | # Function 3 | # Copyright © 2025 NatML Inc. All Rights Reserved. 4 | # 5 | 6 | from fxn import Function 7 | 8 | def test_retrieve_user (): 9 | fxn = Function() 10 | user = fxn.users.retrieve() 11 | assert user is not None --------------------------------------------------------------------------------