├── .git-blame-ignore-revs ├── .github ├── labeler.yml ├── release-drafter.yml └── workflows │ ├── ci.yml │ ├── scala-steward.yml │ └── test-report.yml ├── .gitignore ├── .scala-steward.conf ├── .scalafmt.conf ├── LICENSE ├── README.md ├── banner.jpg ├── build.sbt ├── core └── src │ ├── main │ └── scala │ │ └── sttp │ │ └── openai │ │ ├── OpenAI.scala │ │ ├── OpenAIExceptions.scala │ │ ├── OpenAISyncClient.scala │ │ ├── json │ │ ├── SnakePickle.scala │ │ └── SttpUpickleApiExtension.scala │ │ └── requests │ │ ├── admin │ │ ├── AdminApiKeyRequestBody.scala │ │ ├── AdminApiKeyResponse.scala │ │ └── QueryParameters.scala │ │ ├── assistants │ │ ├── AssistantsRequestBody.scala │ │ └── AssistantsResponseData.scala │ │ ├── audio │ │ ├── AudioResponseData.scala │ │ ├── RecognitionModel.scala │ │ ├── speech │ │ │ └── SpeechRequestBody.scala │ │ ├── transcriptions │ │ │ └── TranscriptionConfig.scala │ │ └── translations │ │ │ └── TranslationConfig.scala │ │ ├── batch │ │ ├── BatchRequestBody.scala │ │ ├── BatchResponse.scala │ │ └── QueryParameters.scala │ │ ├── completions │ │ ├── CompletionsRequestBody.scala │ │ ├── CompletionsResponseData.scala │ │ ├── Stop.scala │ │ ├── Usage.scala │ │ └── chat │ │ │ ├── Audio.scala │ │ │ ├── ChatChunkRequestResponseData.scala │ │ │ ├── ChatRequestBody.scala │ │ │ ├── ChatRequestResponseData.scala │ │ │ ├── FunctionCall.scala │ │ │ ├── ListChatCompletionsQueryParameters.scala │ │ │ ├── ListMessagesQueryParameters.scala │ │ │ ├── Role.scala │ │ │ ├── SchemaSupport.scala │ │ │ ├── ToolCall.scala │ │ │ └── message │ │ │ ├── Attachment.scala │ │ │ ├── Content.scala │ │ │ ├── Message.scala │ │ │ ├── Tool.scala │ │ │ ├── ToolChoice.scala │ │ │ ├── ToolResource.scala │ │ │ └── ToolResources.scala │ │ ├── embeddings │ │ ├── EmbeddingsRequestBody.scala │ │ └── EmbeddingsResponseBody.scala │ │ ├── files │ │ └── FilesResponseData.scala │ │ ├── finetuning │ │ ├── FineTuningJobRequestBody.scala │ │ ├── FineTuningJobResponse.scala │ │ ├── Hyperparameters.scala │ │ ├── Integration.scala │ │ ├── Method.scala │ │ ├── QueryParameters.scala │ │ └── Type.scala │ │ ├── images │ │ ├── ImageResponseData.scala │ │ ├── ResponseFormat.scala │ │ ├── Size.scala │ │ ├── creation │ │ │ └── ImageCreationRequestBody.scala │ │ ├── edit │ │ │ └── ImageEditsConfig.scala │ │ └── variations │ │ │ └── ImageVariationsConfig.scala │ │ ├── models │ │ └── ModelsResponseData.scala │ │ ├── moderations │ │ ├── ModerationsRequestBody.scala │ │ └── ModerationsResponseData.scala │ │ ├── threads │ │ ├── QueryParameters.scala │ │ ├── ThreadsRequestBody.scala │ │ ├── ThreadsResponseData.scala │ │ ├── messages │ │ │ ├── ThreadMessagesRequestBody.scala │ │ │ └── ThreadMessagesResponseData.scala │ │ └── runs │ │ │ ├── ThreadRunsRequestBody.scala │ │ │ └── ThreadRunsResponseData.scala │ │ ├── upload │ │ ├── UploadRequestBody.scala │ │ └── UploadResponse.scala │ │ └── vectorstore │ │ ├── ExpiresAfter.scala │ │ ├── VectorStoreRequestBody.scala │ │ ├── VectorStoreResponseData.scala │ │ └── file │ │ ├── FileStatus.scala │ │ ├── VectorStoreFileRequestBody.scala │ │ └── VectorStoreFileResponseData.scala │ └── test │ └── scala │ └── sttp │ └── openai │ ├── client │ └── SyncClientSpec.scala │ ├── fixtures │ ├── AdminFixture.scala │ ├── AssistantsFixture.scala │ ├── AudioFixture.scala │ ├── BatchFixture.scala │ ├── ChatChunkFixture.scala │ ├── ChatFixture.scala │ ├── CompletionsFixture.scala │ ├── EmbeddingsFixture.scala │ ├── ErrorFixture.scala │ ├── FilesResponse.scala │ ├── FineTuningJobFixture.scala │ ├── ImageCreationFixture.scala │ ├── JsonSchemaFixture.scala │ ├── ModelsGetResponse.scala │ ├── ModerationsFixture.scala │ ├── ThreadMessagesFixture.scala │ ├── ThreadRunsFixture.scala │ ├── ThreadsFixture.scala │ ├── ToolFixture.scala │ ├── UploadFixture.scala │ ├── VectorStoreFileFixture.scala │ └── VectorStoreFixture.scala │ ├── requests │ ├── BatchDataSpec.scala │ ├── admin │ │ └── AdminApiKeyDataSpec.scala │ ├── assistants │ │ └── AssistantsDataSpec.scala │ ├── audio │ │ ├── AudioCreationDataSpec.scala │ │ └── speech │ │ │ └── SpeechDataSpec.scala │ ├── completions │ │ ├── CompletionsDataSpec.scala │ │ └── chat │ │ │ ├── ChatChunkDataSpec.scala │ │ │ ├── ChatDataSpec.scala │ │ │ ├── JsonSchemaSpec.scala │ │ │ └── ToolSpec.scala │ ├── embeddings │ │ └── EmbeddingsDataSpec.scala │ ├── files │ │ └── FilesResponseDataSpec.scala │ ├── finetuning │ │ └── FineTuningDataSpec.scala │ ├── images │ │ └── creation │ │ │ └── ImageCreationDataSpec.scala │ ├── models │ │ └── ModelsGetResponseDataSpec.scala │ ├── moderations │ │ └── ModerationsDataSpec.scala │ ├── threads │ │ ├── ThreadsDataSpec.scala │ │ ├── messages │ │ │ └── ThreadMessagesDataSpec.scala │ │ └── runs │ │ │ └── ThreadRunsDataSpec.scala │ ├── upload │ │ └── UploadDataSpec.scala │ └── vectorstore │ │ ├── VectorStoreDataSpec.scala │ │ └── file │ │ └── VectorStoreFileDataSpec.scala │ └── utils │ ├── ChatCompletionFixtures.scala │ └── JsonUtils.scala ├── examples └── src │ └── main │ ├── resources │ └── logback.xml │ └── scala │ └── examples │ ├── ChatProxy.scala │ └── StrictStructuredFunctionCallingExample.scala ├── project ├── Dependencies.scala ├── build.properties └── plugins.sbt └── streaming ├── akka └── src │ ├── main │ └── scala │ │ └── sttp │ │ └── openai │ │ └── streaming │ │ └── akka │ │ └── package.scala │ └── test │ └── scala │ └── sttp │ └── openai │ └── streaming │ └── akka │ └── AkkaClientSpec.scala ├── fs2 └── src │ ├── main │ └── scala │ │ └── sttp │ │ └── openai │ │ └── streaming │ │ └── fs2 │ │ └── package.scala │ └── test │ └── scala │ └── sttp │ └── openai │ └── streaming │ └── fs2 │ └── Fs2ClientSpec.scala ├── ox └── src │ ├── main │ └── scala │ │ └── sttp │ │ └── openai │ │ └── streaming │ │ └── ox │ │ └── oxStreaming.scala │ └── test │ └── scala │ └── sttp │ └── openai │ └── streaming │ └── ox │ └── OxClientSpec.scala ├── pekko └── src │ ├── main │ └── scala │ │ └── sttp │ │ └── openai │ │ └── streaming │ │ └── pekko │ │ └── package.scala │ └── test │ └── scala │ └── sttp │ └── openai │ └── streaming │ └── pekko │ └── PekkoClientSpec.scala └── zio └── src ├── main └── scala │ └── sttp │ └── openai │ └── streaming │ └── zio │ └── package.scala └── test └── scala └── sttp └── openai └── streaming └── zio └── ZioClientSpec.scala /.git-blame-ignore-revs: -------------------------------------------------------------------------------- 1 | # Scala Steward: Reformat with scalafmt 3.7.17 2 | 266a025b23369b5c3ef80bb96bccc87888c3971c 3 | 4 | # Scala Steward: Reformat with scalafmt 3.8.0 5 | 5b2bb84be0a9e9d3334b7d23c1e56dd47c0acc50 6 | 7 | # Scala Steward: Reformat with scalafmt 3.8.1 8 | 0da64294cb90279afc959af8a39a19227ad64372 9 | 10 | # Scala Steward: Reformat with scalafmt 3.8.2 11 | c1761dd17f98157d61fc8639d12803b6eafc1346 12 | 13 | # Scala Steward: Reformat with scalafmt 3.8.6 14 | dd0e99544340af1bde22ceab886cab7e6c1f5cb1 15 | -------------------------------------------------------------------------------- /.github/labeler.yml: -------------------------------------------------------------------------------- 1 | version: 1 2 | labels: 3 | - label: "automerge" 4 | authors: ["softwaremill-ci"] 5 | files: 6 | - "build.sbt" 7 | - "project/plugins.sbt" 8 | - "project/build.properties" 9 | - "project/Dependencies.scala" 10 | - label: "dependency" 11 | authors: ["softwaremill-ci"] 12 | files: 13 | - "build.sbt" 14 | - "project/plugins.sbt" 15 | - "project/build.properties" 16 | - "project/Dependencies.scala" 17 | -------------------------------------------------------------------------------- /.github/release-drafter.yml: -------------------------------------------------------------------------------- 1 | categories: 2 | - title: 'Dependency updates' 3 | labels: 4 | - 'dependency' 5 | template: | 6 | ## What’s Changed 7 | 8 | $CHANGES -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: 3 | pull_request: 4 | branches: ['**'] 5 | push: 6 | branches: ['**'] 7 | tags: [v*] 8 | jobs: 9 | build: 10 | uses: softwaremill/github-actions-workflows/.github/workflows/build-scala.yml@main 11 | # run on 1) push, 2) external PRs, 3) softwaremill-ci PRs 12 | # do not run on internal, non-steward PRs since those will be run by push to branch 13 | if: | 14 | github.event_name == 'push' || 15 | github.event.pull_request.head.repo.full_name != github.repository || 16 | github.event.pull_request.user.login == 'softwaremill-ci' 17 | with: 18 | java-version: '21' 19 | compile-documentation: true 20 | 21 | publish: 22 | uses: softwaremill/github-actions-workflows/.github/workflows/publish-release.yml@main 23 | needs: [build] 24 | if: github.event_name != 'pull_request' && (startsWith(github.ref, 'refs/tags/v')) 25 | secrets: inherit 26 | with: 27 | java-version: '21' 28 | java-opts: "-Xmx4G" 29 | 30 | label: 31 | # only for PRs by softwaremill-ci 32 | if: github.event.pull_request.user.login == 'softwaremill-ci' 33 | uses: softwaremill/github-actions-workflows/.github/workflows/label.yml@main 34 | 35 | auto-merge: 36 | # only for PRs by softwaremill-ci 37 | if: github.event.pull_request.user.login == 'softwaremill-ci' 38 | needs: [ build, label ] 39 | uses: softwaremill/github-actions-workflows/.github/workflows/auto-merge.yml@main -------------------------------------------------------------------------------- /.github/workflows/scala-steward.yml: -------------------------------------------------------------------------------- 1 | name: Scala Steward 2 | 3 | # This workflow will launch at 00:00 every day 4 | on: 5 | schedule: 6 | - cron: '0 0 * * *' 7 | workflow_dispatch: 8 | 9 | jobs: 10 | scala-steward: 11 | uses: softwaremill/github-actions-workflows/.github/workflows/scala-steward.yml@main 12 | with: 13 | java-version: '21' 14 | secrets: 15 | repo-github-token: ${{secrets.REPO_GITHUB_TOKEN}} -------------------------------------------------------------------------------- /.github/workflows/test-report.yml: -------------------------------------------------------------------------------- 1 | name: 'Test Report' 2 | on: 3 | workflow_run: 4 | workflows: ['CI'] 5 | types: 6 | - completed 7 | 8 | permissions: 9 | contents: read 10 | actions: read 11 | checks: write 12 | 13 | jobs: 14 | test-report: 15 | uses: softwaremill/github-actions-workflows/.github/workflows/test-report.yml@main -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.class 2 | *.log 3 | 4 | .cache 5 | .env 6 | .envrc 7 | .history 8 | .sdkmanrc 9 | .lib/ 10 | dist/* 11 | target/ 12 | lib_managed/ 13 | local.conf 14 | src_managed/ 15 | project/boot/ 16 | project/plugins/project/ 17 | 18 | .idea* 19 | 20 | # Metals 21 | .metals/ 22 | .bsp/ 23 | .bloop/ 24 | metals.sbt 25 | .vscode 26 | .scala-build -------------------------------------------------------------------------------- /.scala-steward.conf: -------------------------------------------------------------------------------- 1 | updates.pin = [ 2 | {groupId = "com.typesafe.akka", version = "2.6."}, 3 | {groupId = "org.scala-lang", artifactId = "scala3-library", version = "3.3."}, 4 | {groupId = "org.scala-lang", artifactId = "scala3-library_sjs1", version = "3.3."}, 5 | {groupId = "org.scala-lang", artifactId = "scala-library", version = "2.13."} 6 | ] 7 | -------------------------------------------------------------------------------- /.scalafmt.conf: -------------------------------------------------------------------------------- 1 | version = 3.8.6 2 | maxColumn = 140 3 | rewrite.rules = [RedundantBraces, RedundantParens, SortImports] 4 | runner.dialect = scala3 5 | -------------------------------------------------------------------------------- /banner.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/softwaremill/sttp-openai/d8133d485270b2eb8f7d06cc7364fcd8f10a3201/banner.jpg -------------------------------------------------------------------------------- /build.sbt: -------------------------------------------------------------------------------- 1 | import com.softwaremill.SbtSoftwareMillCommon.commonSmlBuildSettings 2 | import com.softwaremill.Publish.ossPublishSettings 3 | import Dependencies._ 4 | 5 | val scala2 = List("2.13.16") 6 | val scala3 = List("3.3.6") 7 | 8 | def dependenciesFor(version: String)(deps: (Option[(Long, Long)] => ModuleID)*): Seq[ModuleID] = 9 | deps.map(_.apply(CrossVersion.partialVersion(version))) 10 | 11 | lazy val commonSettings = commonSmlBuildSettings ++ ossPublishSettings ++ Seq( 12 | organization := "com.softwaremill.sttp.openai" 13 | ) 14 | 15 | lazy val root = (project in file(".")) 16 | .settings(commonSettings: _*) 17 | .settings(publish / skip := true, name := "sttp-openai", scalaVersion := scala2.head) 18 | .aggregate(allAgregates: _*) 19 | 20 | lazy val allAgregates = core.projectRefs ++ 21 | fs2.projectRefs ++ 22 | zio.projectRefs ++ 23 | pekko.projectRefs ++ 24 | akka.projectRefs ++ 25 | ox.projectRefs ++ 26 | examples.projectRefs ++ 27 | docs.projectRefs 28 | 29 | lazy val core = (projectMatrix in file("core")) 30 | .jvmPlatform( 31 | scalaVersions = scala2 ++ scala3 32 | ) 33 | .settings( 34 | libraryDependencies ++= Seq( 35 | Libraries.tapirApispecDocs, 36 | Libraries.uJsonCirce, 37 | Libraries.uPickle 38 | ) ++ Libraries.sttpApispec ++ Libraries.sttpClient ++ Seq(Libraries.scalaTest) 39 | ) 40 | .settings(commonSettings: _*) 41 | 42 | lazy val fs2 = (projectMatrix in file("streaming/fs2")) 43 | .jvmPlatform( 44 | scalaVersions = scala2 ++ scala3 45 | ) 46 | .settings(commonSettings) 47 | .settings( 48 | libraryDependencies ++= Libraries.sttpClientFs2 49 | ) 50 | .dependsOn(core % "compile->compile;test->test") 51 | 52 | lazy val zio = (projectMatrix in file("streaming/zio")) 53 | .jvmPlatform( 54 | scalaVersions = scala2 ++ scala3 55 | ) 56 | .settings(commonSettings) 57 | .settings( 58 | libraryDependencies += Libraries.sttpClientZio 59 | ) 60 | .dependsOn(core % "compile->compile;test->test") 61 | 62 | lazy val pekko = (projectMatrix in file("streaming/pekko")) 63 | .jvmPlatform( 64 | scalaVersions = scala2 ++ scala3 65 | ) 66 | .settings(commonSettings) 67 | .settings( 68 | libraryDependencies ++= Libraries.sttpClientPekko 69 | ) 70 | .dependsOn(core % "compile->compile;test->test") 71 | 72 | lazy val akka = (projectMatrix in file("streaming/akka")) 73 | .jvmPlatform( 74 | scalaVersions = scala2 75 | ) 76 | .settings(commonSettings) 77 | .settings( 78 | libraryDependencies ++= Libraries.sttpClientAkka 79 | ) 80 | .dependsOn(core % "compile->compile;test->test") 81 | 82 | lazy val ox = (projectMatrix in file("streaming/ox")) 83 | .jvmPlatform( 84 | scalaVersions = scala3 85 | ) 86 | .settings(commonSettings) 87 | .settings( 88 | libraryDependencies ++= Libraries.sttpClientOx 89 | ) 90 | .dependsOn(core % "compile->compile;test->test") 91 | 92 | lazy val examples = (projectMatrix in file("examples")) 93 | .jvmPlatform( 94 | scalaVersions = scala3 95 | ) 96 | .settings(commonSettings) 97 | .settings( 98 | libraryDependencies ++= Seq( 99 | "com.softwaremill.sttp.tapir" %% "tapir-netty-server-sync" % "1.11.33", 100 | "ch.qos.logback" % "logback-classic" % "1.5.6" 101 | ) ++ Libraries.sttpClientOx, 102 | publish / skip := true 103 | ) 104 | .dependsOn(ox) 105 | 106 | val compileDocumentation: TaskKey[Unit] = taskKey[Unit]("Compiles docs module throwing away its output") 107 | compileDocumentation := 108 | (docs.jvm(scala2.head) / mdoc).toTask(" --out target/sttp-openai-docs").value 109 | 110 | lazy val docs = (projectMatrix in file("generated-docs")) // important: it must not be docs/ 111 | .enablePlugins(MdocPlugin) 112 | .settings(commonSettings) 113 | .settings( 114 | mdocIn := file("README.md"), 115 | moduleName := "sttp-openai-docs", 116 | mdocOut := file("generated-docs/README.md"), 117 | mdocExtraArguments := Seq("--clean-target", "--disable-using-directives"), 118 | publishArtifact := false, 119 | name := "docs", 120 | evictionErrorLevel := Level.Info 121 | ) 122 | .dependsOn(core, fs2, zio, akka, pekko) 123 | .jvmPlatform(scalaVersions = scala2) 124 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/OpenAIExceptions.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai 2 | 3 | import sttp.client4.ResponseException 4 | import sttp.client4.ResponseException.{DeserializationException, UnexpectedStatusCode} 5 | import sttp.model.ResponseMetadata 6 | 7 | object OpenAIExceptions { 8 | sealed abstract class OpenAIException( 9 | val message: Option[String], 10 | val `type`: Option[String], 11 | val param: Option[String], 12 | val code: Option[String], 13 | val cause: ResponseException[String] 14 | ) extends Exception(cause.getMessage, cause) 15 | 16 | object OpenAIException { 17 | class DeserializationOpenAIException( 18 | message: String, 19 | cause: DeserializationException 20 | ) extends OpenAIException(Some(message), None, None, None, cause) 21 | 22 | object DeserializationOpenAIException { 23 | def apply(cause: DeserializationException): DeserializationOpenAIException = 24 | new DeserializationOpenAIException(cause.getMessage, cause) 25 | 26 | def apply(cause: Exception, meta: ResponseMetadata): DeserializationOpenAIException = apply( 27 | DeserializationException(cause.getMessage, cause, meta) 28 | ) 29 | } 30 | class RateLimitException( 31 | message: Option[String], 32 | `type`: Option[String], 33 | param: Option[String], 34 | code: Option[String], 35 | cause: UnexpectedStatusCode[String] 36 | ) extends OpenAIException(message, `type`, param, code, cause) 37 | 38 | class InvalidRequestException( 39 | message: Option[String], 40 | `type`: Option[String], 41 | param: Option[String], 42 | code: Option[String], 43 | cause: UnexpectedStatusCode[String] 44 | ) extends OpenAIException(message, `type`, param, code, cause) 45 | 46 | class AuthenticationException( 47 | message: Option[String], 48 | `type`: Option[String], 49 | param: Option[String], 50 | code: Option[String], 51 | cause: UnexpectedStatusCode[String] 52 | ) extends OpenAIException(message, `type`, param, code, cause) 53 | 54 | class PermissionException( 55 | message: Option[String], 56 | `type`: Option[String], 57 | param: Option[String], 58 | code: Option[String], 59 | cause: UnexpectedStatusCode[String] 60 | ) extends OpenAIException(message, `type`, param, code, cause) 61 | 62 | class TryAgain( 63 | message: Option[String], 64 | `type`: Option[String], 65 | param: Option[String], 66 | code: Option[String], 67 | cause: UnexpectedStatusCode[String] 68 | ) extends OpenAIException(message, `type`, param, code, cause) 69 | 70 | class ServiceUnavailableException( 71 | message: Option[String], 72 | `type`: Option[String], 73 | param: Option[String], 74 | code: Option[String], 75 | cause: UnexpectedStatusCode[String] 76 | ) extends OpenAIException(message, `type`, param, code, cause) 77 | 78 | class APIException( 79 | message: Option[String], 80 | `type`: Option[String], 81 | param: Option[String], 82 | code: Option[String], 83 | cause: UnexpectedStatusCode[String] 84 | ) extends OpenAIException(message, `type`, param, code, cause) 85 | } 86 | } 87 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/json/SnakePickle.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.json 2 | 3 | /** An object that transforms all snake_case keys into camelCase [[https://com-lihaoyi.github.io/upickle/#CustomConfiguration]] */ 4 | object SnakePickle extends upickle.AttributeTagged { 5 | private def camelToSnake(s: String): String = 6 | s.replaceAll("([A-Z])", "#$1").split('#').map(_.toLowerCase).mkString("_") 7 | 8 | private def snakeToCamel(s: String): String = { 9 | val res = s.split("_", -1).map(x => s"${x(0).toUpper}${x.drop(1)}").mkString 10 | s"${s(0).toLower}${res.drop(1)}" 11 | } 12 | 13 | override def objectAttributeKeyReadMap(s: CharSequence): String = 14 | snakeToCamel(s.toString) 15 | 16 | override def objectAttributeKeyWriteMap(s: CharSequence): String = 17 | camelToSnake(s.toString) 18 | 19 | override def objectTypeKeyReadMap(s: CharSequence): String = 20 | snakeToCamel(s.toString) 21 | 22 | override def objectTypeKeyWriteMap(s: CharSequence): String = 23 | camelToSnake(s.toString) 24 | 25 | /** This is required in order to parse null values into Scala's Option */ 26 | override implicit def OptionWriter[T: SnakePickle.Writer]: Writer[Option[T]] = 27 | implicitly[SnakePickle.Writer[T]].comap[Option[T]] { 28 | case None => null.asInstanceOf[T] 29 | case Some(x) => x 30 | } 31 | 32 | override implicit def OptionReader[T: SnakePickle.Reader]: Reader[Option[T]] = 33 | new Reader.Delegate[Any, Option[T]](implicitly[SnakePickle.Reader[T]].map(Some(_))) { 34 | override def visitNull(index: Int) = None 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/json/SttpUpickleApiExtension.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.json 2 | 3 | import sttp.capabilities.Streams 4 | import sttp.client4.ResponseException.UnexpectedStatusCode 5 | import sttp.client4._ 6 | import sttp.client4.json._ 7 | import sttp.client4.upicklejson.SttpUpickleApi 8 | import sttp.model.ResponseMetadata 9 | import sttp.model.StatusCode._ 10 | import sttp.openai.OpenAIExceptions.OpenAIException 11 | import sttp.openai.OpenAIExceptions.OpenAIException._ 12 | 13 | import java.io.InputStream 14 | 15 | /** An sttp upickle api extension that deserializes JSON with snake_case keys into case classes with fields corresponding to keys in 16 | * camelCase and maps errors to OpenAIException subclasses. 17 | */ 18 | object SttpUpickleApiExtension extends SttpUpickleApi { 19 | override val upickleApi: SnakePickle.type = SnakePickle 20 | 21 | def asStreamUnsafe_parseErrors[S](s: Streams[S]): StreamResponseAs[Either[OpenAIException, s.BinaryStream], S] = 22 | asStreamUnsafe(s).mapWithMetadata { (body, meta) => 23 | body.left.map(errorBody => httpToOpenAIError(UnexpectedStatusCode(errorBody, meta))) 24 | } 25 | 26 | def asInputStreamUnsafe_parseErrors: ResponseAs[Either[OpenAIException, InputStream]] = 27 | asInputStreamUnsafe.mapWithMetadata { (body, meta) => 28 | body.left.map(errorBody => httpToOpenAIError(UnexpectedStatusCode(errorBody, meta))) 29 | } 30 | 31 | def asJson_parseErrors[B: upickleApi.Reader: IsOption]: ResponseAs[Either[OpenAIException, B]] = 32 | asString.mapWithMetadata(deserializeRightWithMappedExceptions(deserializeJsonSnake)).showAsJson 33 | 34 | private def deserializeRightWithMappedExceptions[T]( 35 | doDeserialize: (String, ResponseMetadata) => Either[DeserializationOpenAIException, T] 36 | ): (Either[String, String], ResponseMetadata) => Either[OpenAIException, T] = { 37 | case (Left(body), meta) => 38 | Left(httpToOpenAIError(UnexpectedStatusCode(body, meta))) 39 | case (Right(body), meta) => doDeserialize.apply(body, meta) 40 | } 41 | 42 | def deserializeJsonSnake[B: upickleApi.Reader: IsOption]: (String, ResponseMetadata) => Either[DeserializationOpenAIException, B] = { 43 | (s: String, meta: ResponseMetadata) => 44 | try 45 | Right(upickleApi.read[B](JsonInput.sanitize[B].apply(s))) 46 | catch { 47 | case e: Exception => Left(DeserializationOpenAIException(e, meta)) 48 | case t: Throwable => 49 | // in ScalaJS, ArrayIndexOutOfBoundsException exceptions are wrapped in org.scalajs.linker.runtime.UndefinedBehaviorError 50 | t.getCause match { 51 | case e: ArrayIndexOutOfBoundsException => Left(DeserializationOpenAIException(e, meta)) 52 | case _ => throw t 53 | } 54 | } 55 | } 56 | 57 | def asStringEither: ResponseAs[Either[OpenAIException, String]] = 58 | asStringAlways 59 | .mapWithMetadata { (string, metadata) => 60 | if (metadata.isSuccess) Right(string) else Left(httpToOpenAIError(UnexpectedStatusCode(string, metadata))) 61 | } 62 | .showAs("either(as error, as string)") 63 | 64 | private def httpToOpenAIError(he: UnexpectedStatusCode[String]): OpenAIException = { 65 | val errorMessageBody = upickleApi.read[ujson.Value](he.body).apply("error") 66 | val error = upickleApi.read[Error](errorMessageBody) 67 | import error._ 68 | 69 | he.response.code match { 70 | case TooManyRequests => new RateLimitException(message, `type`, param, code, he) 71 | case BadRequest | NotFound | UnsupportedMediaType => new InvalidRequestException(message, `type`, param, code, he) 72 | case Unauthorized => new AuthenticationException(message, `type`, param, code, he) 73 | case Forbidden => new PermissionException(message, `type`, param, code, he) 74 | case Conflict => new TryAgain(message, `type`, param, code, he) 75 | case ServiceUnavailable => new ServiceUnavailableException(message, `type`, param, code, he) 76 | case _ => new APIException(message, `type`, param, code, he) 77 | } 78 | } 79 | 80 | private case class Error( 81 | message: Option[String] = None, 82 | `type`: Option[String] = None, 83 | param: Option[String] = None, 84 | code: Option[String] = None 85 | ) 86 | private object Error { 87 | implicit val errorR: upickleApi.Reader[Error] = upickleApi.macroR 88 | } 89 | } 90 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/admin/AdminApiKeyRequestBody.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.admin 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | /** Create an organization admin API key 6 | */ 7 | case class AdminApiKeyRequestBody(name: String) {} 8 | 9 | object AdminApiKeyRequestBody { 10 | implicit val adminApiKeyRequestBodyW: SnakePickle.Writer[AdminApiKeyRequestBody] = SnakePickle.macroW[AdminApiKeyRequestBody] 11 | } 12 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/admin/AdminApiKeyResponse.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.admin 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | case class AdminApiKeyResponse( 6 | `object`: String = "organization.admin_api_key", 7 | id: String, 8 | name: String, 9 | redactedValue: String, 10 | createdAt: Int, 11 | owner: Owner, 12 | value: Option[String] 13 | ) 14 | 15 | object AdminApiKeyResponse { 16 | implicit val adminApiKeyResponseR: SnakePickle.Reader[AdminApiKeyResponse] = SnakePickle.macroR[AdminApiKeyResponse] 17 | } 18 | 19 | case class Owner( 20 | `type`: String, 21 | `object`: String, 22 | id: String, 23 | name: String, 24 | createdAt: Int, 25 | role: String 26 | ) 27 | 28 | object Owner { 29 | implicit val ownerR: SnakePickle.Reader[Owner] = SnakePickle.macroR[Owner] 30 | } 31 | 32 | case class ListAdminApiKeyResponse( 33 | `object`: String = "list", 34 | data: Seq[AdminApiKeyResponse], 35 | hasMore: Boolean, 36 | firstId: String, 37 | lastId: String 38 | ) 39 | 40 | object ListAdminApiKeyResponse { 41 | implicit val listAdminApiKeyResponseR: SnakePickle.Reader[ListAdminApiKeyResponse] = SnakePickle.macroR[ListAdminApiKeyResponse] 42 | } 43 | 44 | case class DeleteAdminApiKeyResponse( 45 | id: String, 46 | `object`: String = "organization.admin_api_key.deleted", 47 | deleted: Boolean 48 | ) 49 | 50 | object DeleteAdminApiKeyResponse { 51 | implicit val deleteAdminApiKeyResponseR: SnakePickle.Reader[DeleteAdminApiKeyResponse] = SnakePickle.macroR[DeleteAdminApiKeyResponse] 52 | } 53 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/admin/QueryParameters.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.admin 2 | 3 | /** @param after 4 | * Identifier for the last job from the previous pagination request. 5 | * @param order 6 | * Defaults to asc. 7 | * @param limit 8 | * Number of fine-tuning jobs to retrieve. Defaults to 20. 9 | */ 10 | case class QueryParameters( 11 | after: Option[String] = None, 12 | order: Option[String] = None, 13 | limit: Option[Int] = None 14 | ) { 15 | 16 | def toMap: Map[String, String] = { 17 | val queryParams = after.map("after" -> _) ++ 18 | order.map("order" -> _) ++ 19 | limit.map(_.toString).map("limit" -> _) 20 | queryParams.toMap 21 | } 22 | } 23 | 24 | object QueryParameters { 25 | val empty: QueryParameters = QueryParameters(None, None, None) 26 | } 27 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/assistants/AssistantsResponseData.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.assistants 2 | 3 | import sttp.openai.json.SnakePickle 4 | import sttp.openai.requests.completions.chat.message.{Tool, ToolResources} 5 | 6 | object AssistantsResponseData { 7 | 8 | /** Represents an assistant that can call the model and use tools. 9 | * @param id 10 | * The identifier, which can be referenced in API endpoints. 11 | * 12 | * @param object 13 | * The object type, which is always assistant. 14 | * 15 | * @param createdAt 16 | * The Unix timestamp (in seconds) for when the assistant was created. 17 | * 18 | * @param name 19 | * The name of the assistant. The maximum length is 256 characters. 20 | * 21 | * @param description 22 | * The description of the assistant. The maximum length is 512 characters. 23 | * 24 | * @param model 25 | * ID of the model to use. You can use the List models API to see all of your available models, or see our Model overview for 26 | * descriptions of them. 27 | * 28 | * @param instructions 29 | * The system instructions that the assistant uses. The maximum length is 32768 characters. 30 | * 31 | * @param tools 32 | * A list of tool enabled on the assistant. There can be a maximum of 128 tools per assistant. Tools can be of types code_interpreter, 33 | * file_search, or function. 34 | * 35 | * @param toolResources 36 | * A set of resources that are used by the assistant's tools. The resources are specific to the type of tool. For example, the 37 | * code_interpreter tool requires a list of file IDs, while the file_search tool requires a list of vector store IDs. 38 | * 39 | * @param metadata 40 | * Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object 41 | * in a structured format. Keys can be a maximum of 64 characters long and values can be a maxium of 512 characters long. 42 | * 43 | * For more information please visit: [[https://platform.openai.com/docs/api-reference/assistants/object]] 44 | */ 45 | case class AssistantData( 46 | id: String, 47 | `object`: String, 48 | createdAt: Int, 49 | name: Option[String], 50 | description: Option[String], 51 | model: String, 52 | instructions: Option[String], 53 | tools: Seq[Tool], 54 | toolResources: Option[ToolResources], 55 | metadata: Map[String, String] 56 | ) 57 | 58 | object AssistantData { 59 | implicit val assistantDataR: SnakePickle.Reader[AssistantData] = SnakePickle.macroR[AssistantData] 60 | } 61 | 62 | /** @param object 63 | * Always "list" 64 | * @param data 65 | * A list of assistant objects. 66 | * @param firstId 67 | * Id of first object 68 | * @param lastId 69 | * Id of last object 70 | * @param hasMore 71 | * Denotes if there are more object available } 72 | */ 73 | case class ListAssistantsResponse( 74 | `object`: String = "list", 75 | data: Seq[AssistantData], 76 | firstId: String, 77 | lastId: String, 78 | hasMore: Boolean 79 | ) 80 | object ListAssistantsResponse { 81 | implicit val listAssistantsResponseR: SnakePickle.Reader[ListAssistantsResponse] = SnakePickle.macroR[ListAssistantsResponse] 82 | } 83 | 84 | /** @param id 85 | * Id of deleted object 86 | * @param `object` 87 | * assistant.deleted 88 | * @param deleted 89 | * 90 | * For more information please visit: [[https://platform.openai.com/docs/api-reference/assistants/deleteAssistant]] 91 | */ 92 | case class DeleteAssistantResponse( 93 | id: String, 94 | `object`: String, 95 | deleted: Boolean 96 | ) 97 | 98 | object DeleteAssistantResponse { 99 | implicit val deleteAssistantResponseReadWriter: SnakePickle.ReadWriter[DeleteAssistantResponse] = 100 | SnakePickle.macroRW[DeleteAssistantResponse] 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/audio/AudioResponseData.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.audio 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | object AudioResponseData { 6 | 7 | case class AudioResponse(text: String) 8 | 9 | object AudioResponse { 10 | implicit val audioResponseR: SnakePickle.Reader[AudioResponse] = SnakePickle.macroR[AudioResponse] 11 | } 12 | 13 | } 14 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/audio/RecognitionModel.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.audio 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | sealed abstract class RecognitionModel(val value: String) 6 | 7 | object RecognitionModel { 8 | case object Whisper1 extends RecognitionModel("whisper-1") 9 | 10 | /** Use only as a workaround if API supports a format that's not yet predefined as a case object of Model. Otherwise, a custom format 11 | * would be rejected. See [[https://platform.openai.com/docs/api-reference/audio]] for current list of supported formats 12 | */ 13 | case class Custom(customModel: String) extends RecognitionModel(customModel) 14 | 15 | implicit val ModelW: SnakePickle.Writer[RecognitionModel] = SnakePickle 16 | .writer[ujson.Value] 17 | .comap(_.value) 18 | } 19 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/audio/speech/SpeechRequestBody.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.audio.speech 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | /** Represents the request body for generating speech from text. 6 | * 7 | * @param model 8 | * One of the available TTS models: tts-1 or tts-1-hd. 9 | * @param input 10 | * The text to generate audio for. The maximum length is 4096 characters. 11 | * @param voice 12 | * The voice to use when generating the audio. Supported voices are alloy, ash, coral, echo, fable, onyx, nova, sage, and shimmer. 13 | * Previews of the voices are available in the Text to speech guide 14 | * [[https://platform.openai.com/docs/guides/text-to-speech#voice-options]]. 15 | * @param responseFormat 16 | * The format to audio in. Supported formats are mp3, opus, aac, flac, wav, and pcm. Defaults to mp3. 17 | * @param speed 18 | * The speed of the generated audio. Select a value from 0.25 to 4.0. 1.0 is the default. 19 | */ 20 | case class SpeechRequestBody( 21 | model: SpeechModel, 22 | input: String, 23 | voice: Voice, 24 | responseFormat: Option[ResponseFormat] = None, 25 | speed: Option[Float] = None 26 | ) 27 | 28 | object SpeechRequestBody { 29 | implicit val speechRequestBodyW: SnakePickle.Writer[SpeechRequestBody] = SnakePickle.macroW[SpeechRequestBody] 30 | } 31 | 32 | abstract sealed class SpeechModel(val value: String) 33 | 34 | object SpeechModel { 35 | implicit val speechModelW: SnakePickle.Writer[SpeechModel] = SnakePickle 36 | .writer[ujson.Value] 37 | .comap[SpeechModel](_.value) 38 | 39 | case object TTS1 extends SpeechModel("tts-1") 40 | case object TTS1HD extends SpeechModel("tts-1-hd") 41 | case class CustomSpeechModel(customValue: String) extends SpeechModel(customValue) 42 | } 43 | 44 | sealed abstract class Voice(val value: String) 45 | 46 | object Voice { 47 | case object Alloy extends Voice("alloy") 48 | case object Ash extends Voice("ash") 49 | case object Coral extends Voice("coral") 50 | case object Echo extends Voice("echo") 51 | case object Fable extends Voice("fable") 52 | case object Onyx extends Voice("onyx") 53 | case object Nova extends Voice("nova") 54 | case object Sage extends Voice("sage") 55 | case object Shimmer extends Voice("shimmer") 56 | case class CustomVoice(customVoice: String) extends Voice(customVoice) 57 | 58 | implicit val voiceW: SnakePickle.Writer[Voice] = SnakePickle 59 | .writer[ujson.Value] 60 | .comap[Voice](_.value) 61 | } 62 | 63 | sealed abstract class ResponseFormat(val value: String) 64 | 65 | object ResponseFormat { 66 | case object Mp3 extends ResponseFormat("mp3") 67 | case object Opus extends ResponseFormat("opus") 68 | case object Aac extends ResponseFormat("aac") 69 | case object Flac extends ResponseFormat("flac") 70 | case object Wav extends ResponseFormat("wav") 71 | case object Pcm extends ResponseFormat("pcm") 72 | case class CustomFormat(customFormat: String) extends ResponseFormat(customFormat) 73 | 74 | implicit val formatW: SnakePickle.Writer[ResponseFormat] = SnakePickle 75 | .writer[ujson.Value] 76 | .comap[ResponseFormat](_.value) 77 | } 78 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/audio/translations/TranslationConfig.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.audio.translations 2 | 3 | import sttp.openai.requests.audio.RecognitionModel 4 | import sttp.openai.requests.images.ResponseFormat 5 | 6 | import java.io.File 7 | 8 | /** @param file 9 | * The audio file to translate, in one of these formats: mp3, mp4, mpeg, mpga, m4a, wav, or webm. 10 | * @param model 11 | * ID of the model to use. Only whisper-1 is currently available. 12 | * @param prompt 13 | * An optional text to guide the model's style or continue a previous audio segment. The prompt should be in English. 14 | * @param responseFormat 15 | * The format of the transcript output, in one of these options: json, text, srt, verbose_json, or vtt. 16 | * @param temperature 17 | * The sampling temperature, between 0 and 1. Higher values like 0.8 will make the output more random, while lower values like 0.2 will 18 | * make it more focused and deterministic. If set to 0, the model will use log probability to automatically increase the temperature 19 | * until certain thresholds are hit. 20 | * @param language 21 | * The language of the input audio. Supplying the input language in ISO-639-1 (e.g. en) format will improve accuracy and latency. 22 | */ 23 | case class TranslationConfig( 24 | file: File, 25 | model: RecognitionModel, 26 | prompt: Option[String] = None, 27 | responseFormat: Option[ResponseFormat] = None, 28 | temperature: Option[Float] = None, 29 | language: Option[String] = None 30 | ) 31 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/batch/BatchRequestBody.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.batch 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | /** @param inputFileId 6 | * The ID of an uploaded file that contains requests for the new batch. Your input file must be formatted as a JSONL file, and must be 7 | * uploaded with the purpose batch. The file can contain up to 50,000 requests, and can be up to 200 MB in size. 8 | * @param endpoint 9 | * The endpoint to be used for all requests in the batch. Currently, /v1/chat/completions, /v1/embeddings, and /v1/completions are 10 | * supported. Note that /v1/embeddings batches are also restricted to a maximum of 50,000 embedding inputs across all requests in the 11 | * batch. 12 | * @param completionWindow 13 | * The time frame within which the batch should be processed. Currently only 24h is supported. 14 | * @param metadata 15 | * Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object in 16 | * a structured format, and querying for objects via API or the dashboard. Keys are strings with a maximum length of 64 characters. 17 | * Values are strings with a maximum length of 512 characters. 18 | */ 19 | case class BatchRequestBody( 20 | inputFileId: String, 21 | endpoint: String, 22 | completionWindow: String, 23 | metadata: Option[Map[String, String]] = None 24 | ) 25 | 26 | object BatchRequestBody { 27 | implicit val batchRequestBodyW: SnakePickle.Writer[BatchRequestBody] = SnakePickle.macroW[BatchRequestBody] 28 | } 29 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/batch/QueryParameters.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.batch 2 | 3 | /** @param after 4 | * A cursor for use in pagination. after is an object ID that defines your place in the list. For instance, if you make a list request 5 | * and receive 100 objects, ending with obj_foo, your subsequent call can include after=obj_foo in order to fetch the next page of the 6 | * list. 7 | * @param limit 8 | * A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. 9 | */ 10 | case class QueryParameters( 11 | after: Option[String] = None, 12 | limit: Option[Int] = None 13 | ) { 14 | 15 | def toMap: Map[String, String] = { 16 | val queryParams = after.map("after" -> _) ++ 17 | limit.map(_.toString).map("limit" -> _) 18 | queryParams.toMap 19 | } 20 | } 21 | 22 | object QueryParameters { 23 | val empty: QueryParameters = QueryParameters(None, None) 24 | } 25 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/completions/CompletionsResponseData.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions 2 | 3 | import sttp.openai.json.SnakePickle 4 | import sttp.openai.requests.completions.CompletionsRequestBody.CompletionModel 5 | 6 | object CompletionsResponseData { 7 | case class Choices( 8 | text: String, 9 | index: Int, 10 | finishReason: String, 11 | logprobs: Option[String] = None 12 | ) 13 | object Choices { 14 | implicit val choicesR: SnakePickle.Reader[Choices] = SnakePickle.macroR[Choices] 15 | } 16 | 17 | case class CompletionsResponse( 18 | id: String, 19 | `object`: String, 20 | created: Int, 21 | model: CompletionModel, 22 | choices: Seq[Choices], 23 | usage: Usage 24 | ) 25 | object CompletionsResponse { 26 | implicit val choicesR: SnakePickle.Reader[CompletionsResponse] = SnakePickle.macroR[CompletionsResponse] 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/completions/Stop.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | sealed trait Stop 6 | object Stop { 7 | implicit val stopW: SnakePickle.Writer[Stop] = SnakePickle 8 | .writer[ujson.Value] 9 | .comap[Stop] { 10 | case SingleStop(value) => SnakePickle.writeJs(value) 11 | case MultipleStop(values) => SnakePickle.writeJs(values) 12 | } 13 | 14 | case class SingleStop(value: String) extends Stop 15 | 16 | case class MultipleStop(values: Seq[String]) extends Stop 17 | } 18 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/completions/Usage.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | /** @param promptTokens 6 | * Number of tokens in the prompt. 7 | * @param completionTokens 8 | * Number of tokens in the generated completion. 9 | * @param totalTokens 10 | * Total number of tokens used in the request (prompt + completion). 11 | * @param completionTokensDetails 12 | * Breakdown of tokens used in a completion. 13 | * @param promptTokensDetails 14 | * Breakdown of tokens used in the prompt. 15 | */ 16 | case class Usage( 17 | promptTokens: Int, 18 | completionTokens: Int, 19 | totalTokens: Int, 20 | completionTokensDetails: Option[CompletionTokensDetails] = None, 21 | promptTokensDetails: Option[PromptTokensDetails] = None 22 | ) 23 | 24 | object Usage { 25 | implicit val choicesR: SnakePickle.Reader[Usage] = SnakePickle.macroR[Usage] 26 | } 27 | 28 | /** @param acceptedPredictionTokens 29 | * When using Predicted Outputs, the number of tokens in the prediction that appeared in the completion. 30 | * @param audioTokens 31 | * Audio input tokens generated by the model. 32 | * @param reasoningTokens 33 | * Tokens generated by the model for reasoning. 34 | * @param rejectedPredictionTokens 35 | * When using Predicted Outputs, the number of tokens in the prediction that did not appear in the completion. However, like reasoning 36 | * tokens, these tokens are still counted in the total completion tokens for purposes of billing, output, and context window limits. 37 | */ 38 | case class CompletionTokensDetails( 39 | acceptedPredictionTokens: Int, 40 | audioTokens: Int, 41 | reasoningTokens: Int, 42 | rejectedPredictionTokens: Int 43 | ) 44 | 45 | object CompletionTokensDetails { 46 | implicit val completionTokensDetailsR: SnakePickle.Reader[CompletionTokensDetails] = SnakePickle.macroR[CompletionTokensDetails] 47 | } 48 | 49 | /** @param audioTokens 50 | * Audio input tokens present in the prompt. 51 | * @param cachedTokens 52 | * Cached tokens present in the prompt. 53 | */ 54 | case class PromptTokensDetails( 55 | audioTokens: Int, 56 | cachedTokens: Int 57 | ) 58 | 59 | object PromptTokensDetails { 60 | implicit val promptTokensDetailsR: SnakePickle.Reader[PromptTokensDetails] = SnakePickle.macroR[PromptTokensDetails] 61 | } 62 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/completions/chat/Audio.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions.chat 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | /** @param id 6 | * Unique identifier for this audio response. 7 | * @param expiresAt 8 | * The Unix timestamp (in seconds) for when this audio response will no longer be accessible on the server for use in multi-turn 9 | * conversations. 10 | * @param data 11 | * Base64 encoded audio bytes generated by the model, in the format specified in the request. 12 | * @param transcript 13 | * Transcript of the audio generated by the model. 14 | */ 15 | case class Audio( 16 | id: String, 17 | expiresAt: Int, 18 | data: String, 19 | transcript: String 20 | ) 21 | 22 | object Audio { 23 | implicit val audioR: SnakePickle.Reader[Audio] = SnakePickle.macroR[Audio] 24 | } 25 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/completions/chat/ChatChunkRequestResponseData.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions.chat 2 | 3 | import sttp.openai.json.SnakePickle 4 | import sttp.model.sse.ServerSentEvent 5 | 6 | object ChatChunkRequestResponseData { 7 | 8 | /** @param role 9 | * The role of the author of this message. 10 | * @param content 11 | * The contents of the message. 12 | * @param functionCall 13 | * The name of the author of this message. May contain a-z, A-Z, 0-9, and underscores, with a maximum length of 64 characters. 14 | */ 15 | case class Delta( 16 | role: Option[Role] = None, 17 | content: Option[String] = None, 18 | toolCalls: Seq[ToolCall] = Nil, 19 | functionCall: Option[FunctionCall] = None 20 | ) 21 | 22 | object Delta { 23 | implicit val deltaR: SnakePickle.Reader[Delta] = SnakePickle.macroR[Delta] 24 | } 25 | 26 | case class Choices( 27 | delta: Delta, 28 | finishReason: Option[String] = None, 29 | index: Int 30 | ) 31 | 32 | object Choices { 33 | implicit val choicesR: SnakePickle.Reader[Choices] = SnakePickle.macroR[Choices] 34 | } 35 | 36 | case class ChatChunkResponse( 37 | id: String, 38 | choices: Seq[Choices], 39 | created: Int, 40 | model: String, 41 | `object`: String, 42 | systemFingerprint: Option[String] = None 43 | ) 44 | 45 | object ChatChunkResponse { 46 | val DoneEventMessage = "[DONE]" 47 | val DoneEvent = ServerSentEvent(Some(DoneEventMessage)) 48 | 49 | implicit val chunkChatR: SnakePickle.Reader[ChatChunkResponse] = SnakePickle.macroR[ChatChunkResponse] 50 | } 51 | 52 | } 53 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/completions/chat/FunctionCall.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions.chat 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | /** @param arguments 6 | * The arguments to call the function with, as generated by the model in JSON format. Note that the model does not always generate valid 7 | * JSON, and may hallucinate parameters not defined by your function schema. Validate the arguments in your code before calling your 8 | * function. 9 | * @param name 10 | * The name of the function to call. 11 | */ 12 | case class FunctionCall(arguments: String, name: Option[String] = None) 13 | 14 | object FunctionCall { 15 | implicit val functionCallRW: SnakePickle.ReadWriter[FunctionCall] = SnakePickle.macroRW[FunctionCall] 16 | } 17 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/completions/chat/ListChatCompletionsQueryParameters.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions.chat 2 | 3 | import sttp.openai.requests.completions.chat.ChatRequestBody.ChatCompletionModel 4 | 5 | /** @param model 6 | * The model used to generate the chat completions. 7 | * @param metadata 8 | * A list of metadata keys to filter the chat completions by. Example: metadata[key1]=value1&metadata[key2]=value2 9 | * @param after 10 | * Identifier for the last chat completion from the previous pagination request. 11 | * @param limit 12 | * Number of chat completions to retrieve. Defaults to 20. 13 | * @param order 14 | * Sort order for chat completions by timestamp. Use asc for ascending order or desc for descending order. Defaults to asc. 15 | */ 16 | case class ListChatCompletionsQueryParameters( 17 | model: Option[ChatCompletionModel] = None, 18 | metadata: Option[Map[String, String]] = None, 19 | after: Option[String] = None, 20 | limit: Option[Int] = None, 21 | order: Option[String] = None 22 | ) { 23 | 24 | def toMap: Map[String, String] = { 25 | val queryParams = model.map("model" -> _.value) ++ 26 | metadata.map(_.map { case (k, v) => s"metadata[$k]" -> v }).getOrElse(Map.empty) ++ 27 | after.map("after" -> _) ++ 28 | limit.map(_.toString).map("limit" -> _) ++ 29 | order.map("order" -> _) 30 | queryParams.toMap 31 | } 32 | } 33 | 34 | object ListChatCompletionsQueryParameters { 35 | val empty: ListChatCompletionsQueryParameters = ListChatCompletionsQueryParameters(None, None, None, None, None) 36 | } 37 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/completions/chat/ListMessagesQueryParameters.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions.chat 2 | 3 | /** @param after 4 | * Identifier for the last message from the previous pagination request. 5 | * @param order 6 | * Sort order for messages by timestamp. Use asc for ascending order or desc for descending order. Defaults to asc. Defaults to asc. 7 | * @param limit 8 | * Number of fine-tuning jobs to retrieve. Defaults to 20. 9 | */ 10 | case class ListMessagesQueryParameters( 11 | after: Option[String] = None, 12 | order: Option[String] = None, 13 | limit: Option[Int] = None 14 | ) { 15 | 16 | def toMap: Map[String, String] = { 17 | val queryParams = after.map("after" -> _) ++ 18 | order.map("order" -> _) ++ 19 | limit.map(_.toString).map("limit" -> _) 20 | queryParams.toMap 21 | } 22 | } 23 | 24 | object ListMessagesQueryParameters { 25 | val empty: ListMessagesQueryParameters = ListMessagesQueryParameters(None, None, None) 26 | } 27 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/completions/chat/Role.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions.chat 2 | 3 | import sttp.openai.json.SnakePickle 4 | import ujson.Str 5 | 6 | sealed abstract class Role(val value: String) 7 | 8 | object Role { 9 | case object System extends Role("system") 10 | 11 | case object User extends Role("user") 12 | 13 | case object Assistant extends Role("assistant") 14 | 15 | case object Tool extends Role("tool") 16 | 17 | case class Custom(customRole: String) extends Role(customRole) 18 | 19 | val values: Set[Role] = Set(System, User, Assistant, Tool) 20 | 21 | private val byRoleValue = values.map(role => role.value -> role).toMap 22 | 23 | implicit val roleRW: SnakePickle.ReadWriter[Role] = SnakePickle 24 | .readwriter[ujson.Value] 25 | .bimap[Role]( 26 | role => SnakePickle.writeJs(role.value), 27 | jsonValue => 28 | SnakePickle.read[ujson.Value](jsonValue) match { 29 | case Str(value) => byRoleValue.getOrElse(value, Custom(value)) 30 | case e => throw new Exception(s"Could not deserialize: $e") 31 | } 32 | ) 33 | } 34 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/completions/chat/SchemaSupport.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions.chat 2 | 3 | import io.circe.syntax._ 4 | import io.circe.{DecodingFailure, Json, JsonNumber, JsonObject} 5 | import sttp.apispec.Schema 6 | import sttp.apispec.circe._ 7 | import sttp.openai.json.SnakePickle 8 | import ujson._ 9 | import ujson.circe.CirceJson 10 | 11 | object SchemaSupport { 12 | 13 | private case class ParseException(circeException: DecodingFailure) extends Exception("Failed to parse JSON schema", circeException) 14 | 15 | val schemaRW: SnakePickle.ReadWriter[Schema] = SnakePickle 16 | .readwriter[Value] 17 | .bimap( 18 | s => CirceJson.transform(s.asJson.deepDropNullValues.foldWith(schemaFolder), upickle.default.reader[Value]), 19 | v => 20 | upickle.default.transform(v).to(CirceJson).as[Schema] match { 21 | case Left(e) => throw ParseException(e) 22 | case Right(s) => s 23 | } 24 | ) 25 | 26 | private case class FolderState( 27 | fields: List[(String, Json)], 28 | addAdditionalProperties: Boolean, 29 | requiredProperties: List[String] 30 | ) 31 | 32 | /** OpenAI's JSON schema support imposes two requirements: 33 | * 34 | * 1. All fields must be `required`: https://platform.openai.com/docs/guides/structured-outputs/all-fields-must-be-required 35 | * 2. `additionalProperties: false` must always be set in objects: 36 | * https://platform.openai.com/docs/guides/structured-outputs/additionalproperties-false-must-always-be-set-in-objects 37 | * 38 | * We implement these by folding over the JSON structure. However, if a schema uses discriminated unions (indicated by a `discriminator` 39 | * property), we skip forcing `additionalProperties: false` to preserve flexibility in selecting sub-schemas. 40 | */ 41 | private val schemaFolder: Json.Folder[Json] = new Json.Folder[Json] { 42 | lazy val onNull: Json = Json.Null 43 | def onBoolean(value: Boolean): Json = Json.fromBoolean(value) 44 | def onNumber(value: JsonNumber): Json = Json.fromJsonNumber(value) 45 | def onString(value: String): Json = Json.fromString(value) 46 | def onArray(value: Vector[Json]): Json = Json.fromValues(value.map(_.foldWith(this))) 47 | def onObject(value: JsonObject): Json = { 48 | val state = value.toList.foldRight(FolderState(Nil, addAdditionalProperties = false, Nil)) { case ((k, v), acc) => 49 | if (k == "properties") 50 | acc.copy( 51 | fields = (k, v.foldWith(this)) :: acc.fields, 52 | addAdditionalProperties = true, 53 | requiredProperties = v.asObject.fold(List.empty[String])(_.keys.toList) 54 | ) 55 | else if (k == "type") 56 | acc.copy( 57 | fields = (k, v.foldWith(this)) :: acc.fields, 58 | addAdditionalProperties = acc.addAdditionalProperties || v.asString.contains("object") 59 | ) 60 | else 61 | acc.copy(fields = (k, v.foldWith(this)) :: acc.fields) 62 | } 63 | 64 | // Detect if this object is part of a discriminated union by checking for a "discriminator" property. 65 | val isDiscriminatedUnion = value.contains("discriminator") 66 | 67 | val (addlPropsRemove, addlPropsAdd) = 68 | if (state.addAdditionalProperties && !isDiscriminatedUnion) 69 | (Set("additionalProperties"), List("additionalProperties" := false)) 70 | else 71 | (Set(), Nil) 72 | 73 | val (requiredRemove, requiredAdd) = 74 | if (state.requiredProperties.nonEmpty) 75 | (Set("required"), List("required" := state.requiredProperties)) 76 | else 77 | (Set(), Nil) 78 | 79 | val remove = addlPropsRemove ++ requiredRemove 80 | val fields = addlPropsAdd ++ requiredAdd ++ state.fields.filterNot { case (k, _) => remove.contains(k) } 81 | 82 | Json.fromFields(fields) 83 | } 84 | } 85 | 86 | } 87 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/completions/chat/ToolCall.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions.chat 2 | 3 | import sttp.openai.json.SnakePickle 4 | import ujson._ 5 | 6 | sealed trait ToolCall 7 | 8 | object ToolCall { 9 | 10 | /** @param id 11 | * The ID of the tool call. 12 | * @param function 13 | * The function that the model called. 14 | */ 15 | case class FunctionToolCall(id: Option[String], function: FunctionCall) extends ToolCall 16 | 17 | implicit val functionToolCallRW: SnakePickle.ReadWriter[FunctionToolCall] = SnakePickle 18 | .readwriter[Value] 19 | .bimap[FunctionToolCall]( 20 | functionToolCall => { 21 | val baseObj = Obj("type" -> "function", "function" -> SnakePickle.writeJs(functionToolCall.function)) 22 | functionToolCall.id.foreach(baseObj("id") = _) 23 | baseObj 24 | }, 25 | json => FunctionToolCall(json.obj.get("id").map(_.str), SnakePickle.read[FunctionCall](json("function"))) 26 | ) 27 | 28 | implicit val toolCallRW: SnakePickle.ReadWriter[ToolCall] = SnakePickle 29 | .readwriter[Value] 30 | .bimap[ToolCall]( 31 | { case functionToolCall: FunctionToolCall => 32 | SnakePickle.writeJs(functionToolCall) 33 | }, 34 | json => SnakePickle.read[FunctionToolCall](json) 35 | ) 36 | } 37 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/completions/chat/message/Attachment.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions.chat.message 2 | 3 | import sttp.openai.json.SnakePickle 4 | import ujson.{Obj, Value} 5 | 6 | case class Attachment(fileId: Option[String] = None, tools: Option[Seq[Tool]] = None) 7 | 8 | object Attachment { 9 | implicit val toolResourcesOptRW: SnakePickle.ReadWriter[Attachment] = SnakePickle 10 | .readwriter[Value] 11 | .bimap[Attachment]( 12 | attachment => 13 | (attachment.fileId, attachment.tools) match { 14 | case (Some(fileId), Some(tools)) => Obj("file_id" -> fileId, "tools" -> SnakePickle.writeJs(tools)) 15 | case (Some(fileId), None) => Obj("file_id" -> fileId) 16 | case (None, Some(tools)) => Obj("tools" -> SnakePickle.writeJs(tools)) 17 | case _ => Obj() 18 | }, 19 | json => { 20 | val map = json.obj 21 | val fileId: Option[String] = map.get("file_id").map(_.str) 22 | val tools: Option[Seq[Tool]] = map.get("tools").map(_.arr.map(e => SnakePickle.read[Tool](e)).toList).filter(_.nonEmpty) 23 | Attachment(fileId, tools) 24 | } 25 | ) 26 | } 27 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/completions/chat/message/Content.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions.chat.message 2 | 3 | import sttp.openai.json.SnakePickle 4 | import ujson._ 5 | 6 | sealed trait Content 7 | 8 | object Content { 9 | case class TextContent(value: String) extends Content 10 | case class ArrayContent(value: Seq[ContentPart]) extends Content 11 | 12 | implicit val contentRW: SnakePickle.ReadWriter[Content] = SnakePickle 13 | .readwriter[Value] 14 | .bimap[Content]( 15 | { 16 | case TextContent(value) => SnakePickle.writeJs(value) 17 | case ArrayContent(value) => SnakePickle.writeJs(value) 18 | }, 19 | jsonValue => 20 | SnakePickle.read[Value](jsonValue) match { 21 | case Str(value) => TextContent(value) 22 | case Arr(value) => ArrayContent(value.toSeq.map(SnakePickle.read[ContentPart](_))) 23 | case e => throw new Exception(s"Could not deserialize: $e") 24 | } 25 | ) 26 | 27 | sealed trait ContentPart 28 | case class TextContentPart(text: String) extends ContentPart 29 | case class ImageContentPart(imageUrl: ImageUrl) extends ContentPart 30 | 31 | implicit val textContentPartRW: SnakePickle.ReadWriter[TextContentPart] = 32 | SnakePickle 33 | .readwriter[Value] 34 | .bimap[TextContentPart]( 35 | textPart => Obj("type" -> "text", "text" -> textPart.text), 36 | json => TextContentPart(json("text").str) 37 | ) 38 | 39 | implicit val imageContentPartRW: SnakePickle.ReadWriter[ImageContentPart] = 40 | SnakePickle 41 | .readwriter[Value] 42 | .bimap[ImageContentPart]( 43 | imagePart => Obj("type" -> "image_url", "image_url" -> SnakePickle.writeJs(imagePart.imageUrl)), 44 | json => ImageContentPart(SnakePickle.read[ImageUrl](json("image_url"))) 45 | ) 46 | 47 | implicit val contentPartRW: SnakePickle.ReadWriter[ContentPart] = 48 | SnakePickle 49 | .readwriter[Value] 50 | .bimap[ContentPart]( 51 | { 52 | case textPart: TextContentPart => SnakePickle.writeJs(textPart) 53 | case imagePart: ImageContentPart => SnakePickle.writeJs(imagePart) 54 | }, 55 | json => 56 | json("type").str match { 57 | case "text" => SnakePickle.read[TextContentPart](json) 58 | case "image_url" => SnakePickle.read[ImageContentPart](json) 59 | } 60 | ) 61 | 62 | case class ImageUrl(url: String, detail: Option[String] = None) 63 | 64 | implicit val imageUrlRW: SnakePickle.ReadWriter[ImageUrl] = SnakePickle.macroRW[ImageUrl] 65 | } 66 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/completions/chat/message/Message.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions.chat.message 2 | 3 | import sttp.openai.json.SnakePickle 4 | import sttp.openai.requests.completions.chat.ToolCall 5 | import ujson._ 6 | 7 | sealed trait Message 8 | 9 | object Message { 10 | case class SystemMessage(content: String, name: Option[String] = None) extends Message 11 | case class UserMessage(content: Content, name: Option[String] = None) extends Message 12 | case class AssistantMessage(content: String, name: Option[String] = None, toolCalls: Seq[ToolCall] = Nil) extends Message 13 | case class ToolMessage(content: String, toolCallId: String) extends Message 14 | 15 | object ToolMessage { 16 | def apply(content: String, toolCallId: String): ToolMessage = new ToolMessage(content, toolCallId) 17 | 18 | def apply[T: SnakePickle.Writer](content: T, toolCallId: String): ToolMessage = 19 | new ToolMessage(SnakePickle.write(content), toolCallId) 20 | 21 | implicit val toolMessageRW: SnakePickle.ReadWriter[ToolMessage] = 22 | SnakePickle 23 | .readwriter[Value] 24 | .bimap[ToolMessage]( 25 | msg => Obj("role" -> "tool", "content" -> msg.content, "tool_call_id" -> msg.toolCallId), 26 | json => ToolMessage(json("content").str, json("tool_call_id").str) 27 | ) 28 | } 29 | 30 | implicit val systemMessageRW: SnakePickle.ReadWriter[SystemMessage] = 31 | SnakePickle 32 | .readwriter[Value] 33 | .bimap[SystemMessage]( 34 | msg => { 35 | val baseObj = Obj("role" -> "system", "content" -> msg.content) 36 | msg.name.foreach(name => baseObj("name") = name) 37 | baseObj 38 | }, 39 | json => SystemMessage(json("content").str, json.obj.get("name").map(_.str)) 40 | ) 41 | 42 | implicit val userMessageRW: SnakePickle.ReadWriter[UserMessage] = 43 | SnakePickle 44 | .readwriter[Value] 45 | .bimap[UserMessage]( 46 | msg => { 47 | val baseObj = Obj("role" -> "user", "content" -> SnakePickle.writeJs(msg.content)) 48 | msg.name.foreach(name => baseObj("name") = name) 49 | baseObj 50 | }, 51 | json => UserMessage(SnakePickle.read[Content](json("content")), json.obj.get("name").map(_.str)) 52 | ) 53 | 54 | implicit val assistantMessageRW: SnakePickle.ReadWriter[AssistantMessage] = 55 | SnakePickle 56 | .readwriter[Value] 57 | .bimap[AssistantMessage]( 58 | msg => { 59 | val baseObj = Obj("role" -> "assistant", "content" -> msg.content) 60 | msg.name.foreach(name => baseObj("name") = name) 61 | if (msg.toolCalls.nonEmpty) { 62 | baseObj("tool_calls") = SnakePickle.writeJs(msg.toolCalls) 63 | } 64 | baseObj 65 | }, 66 | json => 67 | AssistantMessage( 68 | json("content").str, 69 | json.obj.get("name").map(_.str), 70 | SnakePickle.read[Seq[ToolCall]](json.obj("tool_calls")) 71 | ) 72 | ) 73 | 74 | implicit val messageRW: SnakePickle.ReadWriter[Message] = 75 | SnakePickle 76 | .readwriter[Value] 77 | .bimap[Message]( 78 | { 79 | case msg: SystemMessage => SnakePickle.writeJs(msg) 80 | case msg: UserMessage => SnakePickle.writeJs(msg) 81 | case msg: AssistantMessage => SnakePickle.writeJs(msg) 82 | case msg: ToolMessage => SnakePickle.writeJs(msg) 83 | }, 84 | json => 85 | json("role").str match { 86 | case "system" => SnakePickle.read[SystemMessage](json) 87 | case "user" => SnakePickle.read[UserMessage](json) 88 | case "assistant" => SnakePickle.read[AssistantMessage](json) 89 | case "tool" => SnakePickle.read[ToolMessage](json) 90 | } 91 | ) 92 | } 93 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/completions/chat/message/ToolChoice.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions.chat.message 2 | 3 | import sttp.openai.json.SnakePickle 4 | import ujson._ 5 | 6 | sealed trait ToolChoice 7 | 8 | object ToolChoice { 9 | 10 | /** Means that the model will not call a function and instead generates a message. */ 11 | case object ToolNone extends ToolChoice 12 | 13 | /** Means the model can pick between generating a message or calling a function. */ 14 | case object ToolAuto extends ToolChoice 15 | 16 | /** Means the model will call a function. */ 17 | case class ToolFunction(name: String) extends ToolChoice 18 | 19 | implicit val toolNoneRW: SnakePickle.ReadWriter[ToolNone.type] = SnakePickle 20 | .readwriter[Value] 21 | .bimap[ToolNone.type]( 22 | _ => Str("none"), 23 | _ => ToolNone 24 | ) 25 | 26 | implicit val toolAutoRW: SnakePickle.ReadWriter[ToolAuto.type] = SnakePickle 27 | .readwriter[Value] 28 | .bimap[ToolAuto.type]( 29 | _ => Str("auto"), 30 | _ => ToolAuto 31 | ) 32 | 33 | implicit val toolFunctionRW: SnakePickle.ReadWriter[ToolFunction] = SnakePickle 34 | .readwriter[Value] 35 | .bimap[ToolFunction]( 36 | toolFunction => Obj("type" -> "function", "function" -> Obj("name" -> toolFunction.name)), 37 | json => ToolFunction(json.obj("function")("name").str) 38 | ) 39 | 40 | implicit val toolChoiceRW: SnakePickle.ReadWriter[ToolChoice] = SnakePickle 41 | .readwriter[Value] 42 | .bimap[ToolChoice]( 43 | { 44 | case toolAuto: ToolAuto.type => SnakePickle.writeJs(toolAuto) 45 | case toolNone: ToolNone.type => SnakePickle.writeJs(toolNone) 46 | case toolFunction: ToolFunction => SnakePickle.writeJs(toolFunction) 47 | }, 48 | { 49 | case json @ Str("none") => SnakePickle.read[ToolNone.type](json) 50 | case json @ Str("auto") => SnakePickle.read[ToolAuto.type](json) 51 | case json => SnakePickle.read[ToolFunction](json) 52 | } 53 | ) 54 | 55 | case class FunctionSpec(name: String) 56 | 57 | implicit val functionSpecRW: SnakePickle.ReadWriter[FunctionSpec] = SnakePickle.macroRW[FunctionSpec] 58 | 59 | } 60 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/completions/chat/message/ToolResource.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions.chat.message 2 | 3 | import sttp.openai.json.SnakePickle 4 | import ujson._ 5 | 6 | sealed trait ToolResource 7 | 8 | object ToolResource { 9 | 10 | /** Code interpreter tool 11 | * 12 | * The type of tool being defined: code_interpreter 13 | */ 14 | case class CodeInterpreterToolResource(filesIds: Option[Seq[String]] = None) extends ToolResource 15 | 16 | implicit val codeInterpreterToolResourceRW: SnakePickle.ReadWriter[CodeInterpreterToolResource] = SnakePickle 17 | .readwriter[Value] 18 | .bimap[CodeInterpreterToolResource]( 19 | resource => 20 | resource.filesIds match { 21 | case Some(fileIds) => Obj("file_ids" -> fileIds) 22 | case None => Obj() 23 | }, 24 | json => { 25 | val map = json.obj 26 | if (map.nonEmpty) { 27 | val fileIds = map.get("file_ids").map(_.arr.map(_.str).toList) 28 | CodeInterpreterToolResource(fileIds) 29 | } else { 30 | null 31 | } 32 | } 33 | ) 34 | 35 | /** file_search tool 36 | * 37 | * The type of tool being defined: file_search 38 | */ 39 | case class FileSearchToolResource(vectorStoreIds: Option[Seq[String]] = None, vectorStores: Option[Seq[String]] = None) 40 | extends ToolResource 41 | 42 | implicit val fileSearchToolResourceRW: SnakePickle.ReadWriter[FileSearchToolResource] = SnakePickle 43 | .readwriter[Value] 44 | .bimap[FileSearchToolResource]( 45 | resource => 46 | (resource.vectorStoreIds, resource.vectorStores) match { 47 | case (Some(vectorStoreIds), Some(vectorStores)) => Obj("vector_store_ids" -> vectorStoreIds, "vector_stores" -> vectorStores) 48 | case (Some(vectorStoreIds), None) => Obj("vector_store_ids" -> vectorStoreIds) 49 | case (None, Some(vectorStores)) => Obj("vector_stores" -> vectorStores) 50 | case _ => Obj() 51 | }, 52 | json => { 53 | val map = json.obj 54 | if (map.nonEmpty) { 55 | val storeIds: Option[List[String]] = map.get("vector_store_ids").map(_.arr.map(_.str).toList).filter(_.nonEmpty) 56 | val stores: Option[List[String]] = map.get("vector_stores").map(_.arr.map(_.str).toList).filter(_.nonEmpty) 57 | FileSearchToolResource(storeIds, stores) 58 | } else { 59 | null 60 | } 61 | } 62 | ) 63 | } 64 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/completions/chat/message/ToolResources.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions.chat.message 2 | 3 | import sttp.openai.json.SnakePickle 4 | import sttp.openai.requests.completions.chat.message.ToolResource.{CodeInterpreterToolResource, FileSearchToolResource} 5 | import ujson._ 6 | 7 | case class ToolResources( 8 | codeInterpreter: Option[CodeInterpreterToolResource] = None, 9 | fileSearch: Option[FileSearchToolResource] = None 10 | ) 11 | 12 | object ToolResources { 13 | 14 | implicit val toolResourcesOptRW: SnakePickle.ReadWriter[Option[ToolResources]] = SnakePickle 15 | .readwriter[Value] 16 | .bimap[Option[ToolResources]]( 17 | { 18 | case Some(resources) => 19 | (resources.fileSearch, resources.codeInterpreter) match { 20 | case (Some(fileSearch), Some(codeInterpreter)) => 21 | Obj("file_search" -> SnakePickle.writeJs(fileSearch), "code_interpreter" -> SnakePickle.writeJs(codeInterpreter)) 22 | case (Some(fileSearch), None) => Obj("file_search" -> SnakePickle.writeJs(fileSearch)) 23 | case (None, Some(codeInterpreter)) => Obj("code_interpreter" -> SnakePickle.writeJs(codeInterpreter)) 24 | case _ => Obj() 25 | } 26 | case None => Obj() 27 | }, 28 | json => { 29 | val map = json.obj 30 | if (map.nonEmpty) { 31 | val codeInterpreter: Option[CodeInterpreterToolResource] = 32 | map.get("code_interpreter").map(e => SnakePickle.read[CodeInterpreterToolResource](e)) 33 | val fileSearch: Option[FileSearchToolResource] = map.get("file_search").map(e => SnakePickle.read[FileSearchToolResource](e)) 34 | Some(ToolResources(codeInterpreter, fileSearch)) 35 | } else { 36 | None 37 | } 38 | } 39 | ) 40 | } 41 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/embeddings/EmbeddingsRequestBody.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.embeddings 2 | 3 | import sttp.openai.json.SnakePickle 4 | import ujson.Str 5 | 6 | object EmbeddingsRequestBody { 7 | 8 | /** @param model 9 | * ID of the [[EmbeddingsModel]] to use. 10 | * @param input 11 | * Input text to get embeddings for, encoded as a string or array of tokens. 12 | * @param user 13 | * A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. 14 | * @param dimensions 15 | * The number of dimensions for the embeddings. Only supported in text-embedding-3 and later models. 16 | */ 17 | case class EmbeddingsBody(model: EmbeddingsModel, input: EmbeddingsInput, user: Option[String] = None, dimensions: Option[Int] = None) 18 | 19 | object EmbeddingsBody { 20 | implicit val embeddingsBodyWriter: SnakePickle.Writer[EmbeddingsBody] = SnakePickle.macroW 21 | } 22 | 23 | sealed abstract class EmbeddingsModel(val value: String) 24 | 25 | object EmbeddingsModel { 26 | 27 | implicit val embeddingsModelReadWriter: SnakePickle.ReadWriter[EmbeddingsModel] = SnakePickle 28 | .readwriter[ujson.Value] 29 | .bimap[EmbeddingsModel]( 30 | model => SnakePickle.writeJs(model.value), 31 | jsonValue => 32 | SnakePickle.read[ujson.Value](jsonValue) match { 33 | case Str(value) => 34 | byEmbeddingsModelValue.getOrElse(value, CustomEmbeddingsModel(value)) 35 | case e => throw new Exception(s"Could not deserialize: $e") 36 | } 37 | ) 38 | case object TextEmbeddingAda002 extends EmbeddingsModel("text-embedding-ada-002") 39 | case object TextSearchAdaDoc001 extends EmbeddingsModel("text-search-ada-doc-001") 40 | case class CustomEmbeddingsModel(customEmbeddingsModel: String) extends EmbeddingsModel(customEmbeddingsModel) 41 | 42 | val values: Set[EmbeddingsModel] = Set(TextEmbeddingAda002, TextSearchAdaDoc001) 43 | 44 | private val byEmbeddingsModelValue = values.map(model => model.value -> model).toMap 45 | } 46 | 47 | sealed trait EmbeddingsInput 48 | object EmbeddingsInput { 49 | case class SingleInput(value: String) extends EmbeddingsInput 50 | case class MultipleInput(values: Seq[String]) extends EmbeddingsInput 51 | 52 | implicit val embeddingsInputWriter: SnakePickle.Writer[EmbeddingsInput] = SnakePickle.writer[ujson.Value].comap[EmbeddingsInput] { 53 | case SingleInput(value) => SnakePickle.writeJs(value) 54 | case MultipleInput(values) => SnakePickle.writeJs(values) 55 | } 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/embeddings/EmbeddingsResponseBody.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.embeddings 2 | 3 | import sttp.openai.json.SnakePickle 4 | import sttp.openai.requests.embeddings.EmbeddingsRequestBody.EmbeddingsModel 5 | 6 | object EmbeddingsResponseBody { 7 | case class EmbeddingData( 8 | `object`: String, 9 | index: Int, 10 | embedding: Seq[Double] 11 | ) 12 | object EmbeddingData { 13 | implicit val embeddingDataReader: SnakePickle.Reader[EmbeddingData] = SnakePickle.macroR 14 | } 15 | 16 | case class EmbeddingResponse( 17 | `object`: String, 18 | data: Seq[EmbeddingData], 19 | model: EmbeddingsModel, 20 | usage: Usage 21 | ) 22 | 23 | object EmbeddingResponse { 24 | implicit val embeddingResponseDataReader: SnakePickle.Reader[EmbeddingResponse] = SnakePickle.macroR 25 | } 26 | case class Usage( 27 | promptTokens: Int, 28 | totalTokens: Int 29 | ) 30 | 31 | object Usage { 32 | implicit val usageDataReader: SnakePickle.Reader[Usage] = SnakePickle.macroR 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/files/FilesResponseData.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.files 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | object FilesResponseData { 6 | case class FileData( 7 | `object`: String, 8 | id: String, 9 | purpose: String, 10 | filename: String, 11 | bytes: Int, 12 | createdAt: Int, 13 | @deprecated("Mark as deprecated in OpenAI spec") status: String, 14 | @deprecated("Mark as deprecated in OpenAI spec") statusDetails: Option[String] 15 | ) 16 | 17 | object FileData { 18 | implicit val fileInfoReadWriter: SnakePickle.ReadWriter[FileData] = SnakePickle.macroRW[FileData] 19 | } 20 | 21 | case class FilesResponse( 22 | `object`: String, 23 | data: Seq[FileData] 24 | ) 25 | 26 | object FilesResponse { 27 | implicit val filesResponseReadWriter: SnakePickle.ReadWriter[FilesResponse] = SnakePickle.macroRW[FilesResponse] 28 | } 29 | 30 | case class DeletedFileData( 31 | `object`: String, 32 | id: String, 33 | deleted: Boolean 34 | ) 35 | 36 | object DeletedFileData { 37 | implicit val deleteFileResponseReadWriter: SnakePickle.ReadWriter[DeletedFileData] = SnakePickle.macroRW[DeletedFileData] 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/finetuning/FineTuningJobRequestBody.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.finetuning 2 | 3 | import sttp.openai.json.SnakePickle 4 | import ujson.Str 5 | 6 | /** @param model 7 | * The name of the model to fine-tune. You can select one of the supported models 8 | * [[https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned]]. 9 | * @param trainingFile 10 | * The ID of an uploaded file that contains training data. See upload file for how to upload a file. Your dataset must be formatted as a 11 | * JSONL file. Additionally, you must upload your file with the purpose fine-tune. The contents of the file should differ depending on if 12 | * the model uses the chat, completions format, or if the fine-tuning method uses the preference format. See the fine-tuning guide for 13 | * more details. 14 | * @param suffix 15 | * A string of up to 64 characters that will be added to your fine-tuned model name. For example, a suffix of "custom-model-name" would 16 | * produce a model name like ft:gpt-4o-mini:openai:custom-model-name:7p4lURel. 17 | * @param validationFile 18 | * The ID of an uploaded file that contains validation data. If you provide this file, the data is used to generate validation metrics 19 | * periodically during fine-tuning. These metrics can be viewed in the fine-tuning results file. The same data should not be present in 20 | * both train and validation files. Your dataset must be formatted as a JSONL file. You must upload your file with the purpose fine-tune. 21 | * See the fine-tuning guide for more details. 22 | * @param integrations 23 | * A list of integrations to enable for your fine-tuning job. 24 | * @param seed 25 | * The seed controls the reproducibility of the job. Passing in the same seed and job parameters should produce the same results, but may 26 | * differ in rare cases. If a seed is not specified, one will be generated for you. 27 | * @param method 28 | * The method used for fine-tuning. 29 | */ 30 | case class FineTuningJobRequestBody( 31 | model: FineTuningModel, 32 | trainingFile: String, 33 | suffix: Option[String] = None, 34 | validationFile: Option[String] = None, 35 | integrations: Option[Seq[Integration]] = None, 36 | seed: Option[Int] = None, 37 | method: Option[Method] = None 38 | ) 39 | object FineTuningJobRequestBody { 40 | implicit val fineTuningRequestBodyWriter: SnakePickle.Writer[FineTuningJobRequestBody] = SnakePickle.macroW[FineTuningJobRequestBody] 41 | } 42 | 43 | sealed abstract class FineTuningModel(val value: String) 44 | 45 | object FineTuningModel { 46 | 47 | implicit val fineTuningModelRW: SnakePickle.ReadWriter[FineTuningModel] = SnakePickle 48 | .readwriter[ujson.Value] 49 | .bimap[FineTuningModel]( 50 | model => SnakePickle.writeJs(model.value), 51 | jsonValue => 52 | SnakePickle.read[ujson.Value](jsonValue) match { 53 | case Str(value) => 54 | byFineTuningModelValue.getOrElse(value, CustomFineTuningModel(value)) 55 | case e => throw new Exception(s"Could not deserialize: $e") 56 | } 57 | ) 58 | 59 | case object GPT4o20240806 extends FineTuningModel("gpt-4o-2024-08-06") 60 | 61 | case object GPT4oMini20240718 extends FineTuningModel("gpt-4o-mini-2024-07-18") 62 | 63 | case object GPT40613 extends FineTuningModel("gpt-4-0613") 64 | 65 | case object GPT35Turbo0125 extends FineTuningModel("gpt-3.5-turbo-0125") 66 | 67 | case object GPT35Turbo1106 extends FineTuningModel("gpt-3.5-turbo-1106") 68 | 69 | case object GPT35Turbo0613 extends FineTuningModel("gpt-3.5-turbo-0613") 70 | 71 | case class CustomFineTuningModel(customFineTuningModel: String) extends FineTuningModel(customFineTuningModel) 72 | 73 | val values: Set[FineTuningModel] = Set(GPT4o20240806, GPT4oMini20240718, GPT40613, GPT35Turbo0125, GPT35Turbo1106, GPT35Turbo0613) 74 | 75 | private val byFineTuningModelValue = values.map(model => model.value -> model).toMap 76 | 77 | } 78 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/finetuning/Hyperparameters.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.finetuning 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | /** @param batchSize 6 | * Number of examples in each batch. A larger batch size means that model parameters are updated less frequently, but with lower 7 | * variance. 8 | * @param learningRateMultiplier 9 | * Scaling factor for the learning rate. A smaller learning rate may be useful to avoid overfitting. 10 | * @param nEpochs 11 | * The number of epochs to train the model for. An epoch refers to one full cycle through the training dataset. 12 | * @param beta 13 | * The beta value for the DPO method. A higher beta value will increase the weight of the penalty between the policy and reference model. 14 | */ 15 | case class Hyperparameters( 16 | batchSize: Option[Int] = None, 17 | learningRateMultiplier: Option[Float] = None, 18 | nEpochs: Option[Int] = None, 19 | beta: Option[Float] = None 20 | ) 21 | 22 | object Hyperparameters { 23 | implicit val hyperparametersW: SnakePickle.ReadWriter[Hyperparameters] = SnakePickle.macroRW[Hyperparameters] 24 | } 25 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/finetuning/Integration.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.finetuning 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | /** @param `type` 6 | * The type of integration to enable. Currently, only "wandb" (Weights and Biases) is supported. 7 | * @param wandb 8 | * The settings for your integration with Weights and Biases. This payload specifies the project that metrics will be sent to. 9 | * Optionally, you can set an explicit display name for your run, add tags to your run, and set a default entity (team, username, etc) to 10 | * be associated with your run. 11 | */ 12 | case class Integration( 13 | `type`: Type, 14 | wandb: Wandb 15 | ) 16 | 17 | object Integration { 18 | implicit val integrationRW: SnakePickle.ReadWriter[Integration] = SnakePickle.macroRW[Integration] 19 | 20 | case object Wandb extends Type("wandb") 21 | 22 | private val values: Set[Type] = Set(Wandb) 23 | 24 | private val byTypeValue = values.map(`type` => `type`.value -> `type`).toMap 25 | 26 | implicit val typeRW: SnakePickle.ReadWriter[Type] = Type.typeRW(byTypeValue) 27 | } 28 | 29 | /** @param project 30 | * The name of the project that the new run will be created under. 31 | * @param name 32 | * A display name to set for the run. If not set, we will use the Job ID as the name. 33 | * @param entity 34 | * The entity to use for the run. This allows you to set the team or username of the WandB user that you would like associated with the 35 | * run. If not set, the default entity for the registered WandB API key is used. 36 | * @param tags 37 | * A list of tags to be attached to the newly created run. These tags are passed through directly to WandB. Some default tags are 38 | * generated by OpenAI: "openai/finetune", "openai/{base-model}", "openai/{ftjob-abcdef}". 39 | */ 40 | case class Wandb( 41 | project: String, 42 | name: Option[String] = None, 43 | entity: Option[String] = None, 44 | tags: Option[Seq[String]] 45 | ) 46 | 47 | object Wandb { 48 | implicit val wandbRW: SnakePickle.ReadWriter[Wandb] = SnakePickle.macroRW[Wandb] 49 | } 50 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/finetuning/Method.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.finetuning 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | /** @param `type` 6 | * The type of method. Is either supervised or dpo. 7 | * @param supervised 8 | * Configuration for the supervised fine-tuning method. 9 | * @param dpo 10 | * Configuration for the DPO fine-tuning method. 11 | */ 12 | case class Method( 13 | `type`: Option[Type] = None, 14 | supervised: Option[Supervised] = None, 15 | dpo: Option[Dpo] = None 16 | ) 17 | 18 | object Method { 19 | implicit val methodRW: SnakePickle.ReadWriter[Method] = SnakePickle.macroRW[Method] 20 | 21 | case object Supervised extends Type("supervised") 22 | 23 | case object Dpo extends Type("dpo") 24 | 25 | private val values: Set[Type] = Set(Supervised, Dpo) 26 | 27 | implicit val byTypeValue: Map[String, Type] = values.map(`type` => `type`.value -> `type`).toMap 28 | } 29 | 30 | /** @param hyperparameters 31 | * The hyperparameters used for the fine-tuning job. 32 | */ 33 | case class Supervised( 34 | hyperparameters: Option[Hyperparameters] = None 35 | ) 36 | 37 | object Supervised { 38 | implicit val supervisedRW: SnakePickle.ReadWriter[Supervised] = SnakePickle.macroRW[Supervised] 39 | } 40 | 41 | /** @param hyperparameters 42 | * The hyperparameters used for the fine-tuning job. 43 | */ 44 | case class Dpo( 45 | hyperparameters: Option[Hyperparameters] = None 46 | ) 47 | 48 | object Dpo { 49 | implicit val dpoRW: SnakePickle.ReadWriter[Dpo] = SnakePickle.macroRW[Dpo] 50 | } 51 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/finetuning/QueryParameters.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.finetuning 2 | 3 | /** @param after 4 | * Identifier for the last job from the previous pagination request. 5 | * @param limit 6 | * Number of fine-tuning jobs to retrieve. 7 | */ 8 | case class QueryParameters( 9 | after: Option[String] = None, 10 | limit: Option[Int] = None 11 | ) { 12 | 13 | def toMap: Map[String, String] = { 14 | val queryParams = after.map("after" -> _) ++ 15 | limit.map(_.toString).map("limit" -> _) 16 | queryParams.toMap 17 | } 18 | } 19 | 20 | object QueryParameters { 21 | val empty: QueryParameters = QueryParameters(None, None) 22 | } 23 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/finetuning/Type.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.finetuning 2 | 3 | import sttp.openai.json.SnakePickle 4 | import ujson.Str 5 | 6 | abstract class Type(val value: String) 7 | 8 | object Type { 9 | implicit def typeRW(implicit byTypeValue: Map[String, Type]): SnakePickle.ReadWriter[Type] = SnakePickle 10 | .readwriter[ujson.Value] 11 | .bimap[Type]( 12 | `type` => SnakePickle.writeJs(`type`.value), 13 | jsonValue => 14 | SnakePickle.read[ujson.Value](jsonValue) match { 15 | case Str(value) => 16 | byTypeValue.get(value) match { 17 | case Some(t) => t 18 | case None => throw new Exception(s"Could not deserialize: $value") 19 | } 20 | case e => throw new Exception(s"Could not deserialize: $e") 21 | } 22 | ) 23 | } 24 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/images/ImageResponseData.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.images 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | object ImageResponseData { 6 | 7 | case class ImageResponse( 8 | created: Int, 9 | data: Seq[GeneratedImageData] 10 | ) 11 | 12 | object ImageResponse { 13 | implicit val imageCreationResponseR: SnakePickle.Reader[ImageResponse] = SnakePickle.macroR[ImageResponse] 14 | } 15 | 16 | case class GeneratedImageData(url: String) 17 | 18 | object GeneratedImageData { 19 | implicit val generatedImageDataR: SnakePickle.Reader[GeneratedImageData] = SnakePickle.macroR[GeneratedImageData] 20 | } 21 | 22 | } 23 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/images/ResponseFormat.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.images 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | sealed abstract class ResponseFormat(val value: String) 6 | 7 | object ResponseFormat { 8 | case object URL extends ResponseFormat("url") 9 | 10 | case object B64Json extends ResponseFormat("b64_json") 11 | 12 | /** Use only as a workaround if API supports a format that's not yet predefined as a case object of Response Format. Otherwise, a custom 13 | * format would be rejected. See [[https://platform.openai.com/docs/api-reference/images/create-edit]] for current list of supported 14 | * formats 15 | */ 16 | case class Custom(customResponseFormat: String) extends ResponseFormat(customResponseFormat) 17 | 18 | val values: Set[ResponseFormat] = Set(URL, B64Json) 19 | 20 | implicit val responseFormatW: SnakePickle.Writer[ResponseFormat] = SnakePickle 21 | .writer[ujson.Value] 22 | .comap[ResponseFormat](_.value) 23 | } 24 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/images/Size.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.images 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | sealed abstract class Size(val value: String) 6 | 7 | object Size { 8 | case object Small extends Size("256x256") 9 | 10 | case object Medium extends Size("512x512") 11 | 12 | case object Large extends Size("1024x1024") 13 | 14 | /** Use only as a workaround if API supports a format that's not yet predefined as a case object of Size. Otherwise, a custom format would 15 | * be rejected. See [[https://platform.openai.com/docs/api-reference/images/create-edit]] for current list of supported formats 16 | */ 17 | case class Custom(customSize: String) extends Size(customSize) 18 | 19 | val values: Set[Size] = Set(Small, Medium, Large) 20 | 21 | implicit val sizeW: SnakePickle.Writer[Size] = SnakePickle 22 | .writer[ujson.Value] 23 | .comap[Size](_.value) 24 | } 25 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/images/creation/ImageCreationRequestBody.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.images.creation 2 | 3 | import sttp.openai.json.SnakePickle 4 | import sttp.openai.requests.images.Size 5 | import sttp.openai.requests.images.ResponseFormat 6 | 7 | object ImageCreationRequestBody { 8 | 9 | /** @param prompt 10 | * A text description of the desired image(s). The maximum length is 1000 characters. 11 | * @param model 12 | * A name of the model to use for image generation 13 | * @param n 14 | * The number of images to generate. Must be between 1 and 10. 15 | * @param size 16 | * The size of the generated images. Must be one of `256x256`, `512x512`, or `1024x1024`. 17 | * @param responseFormat 18 | * The format in which the generated images are returned. Must be one of `url`` or `b64_json`. 19 | * @param user 20 | * A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. 21 | */ 22 | case class ImageCreationBody( 23 | prompt: String, 24 | model: String, 25 | n: Option[Int] = None, 26 | size: Option[Size] = None, 27 | responseFormat: Option[ResponseFormat] = None, 28 | user: Option[String] = None 29 | ) 30 | 31 | object ImageCreationBody { 32 | implicit val imageCreationBodyW: SnakePickle.Writer[ImageCreationBody] = SnakePickle.macroW[ImageCreationBody] 33 | } 34 | 35 | } 36 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/images/edit/ImageEditsConfig.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.images.edit 2 | 3 | import sttp.openai.requests.images.{ResponseFormat, Size} 4 | 5 | import java.io.File 6 | import java.nio.file.Paths 7 | 8 | /** @param image 9 | * The image to edit. Must be a valid PNG file, less than 4MB, and square. If mask is not provided, image must have transparency, which 10 | * will be used as the mask. 11 | * @param prompt 12 | * A text description of the desired image(s). The maximum length is 1000 characters. 13 | * @param mask 14 | * An additional image whose fully transparent areas (e.g. where alpha is zero) indicate where image should be edited. Must be a valid 15 | * PNG file, less than 4MB, and have the same dimensions as image. 16 | * @param n 17 | * The number of images to generate. Must be between 1 and 10. 18 | * @param size 19 | * The size of the generated images. Must be one of `256x256`, `512x512`, or `1024x1024`. 20 | * @param responseFormat 21 | * The format in which the generated images are returned. Must be one of `url`` or `b64_json`. 22 | * @param user 23 | * A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse 24 | */ 25 | case class ImageEditsConfig( 26 | image: File, 27 | prompt: String, 28 | mask: Option[File] = None, 29 | n: Option[Int] = None, 30 | size: Option[Size] = None, 31 | responseFormat: Option[ResponseFormat] = None, 32 | user: Option[String] = None 33 | ) 34 | 35 | object ImageEditsConfig { 36 | def createImageEditConfigWithSystemPaths( 37 | systemPathImage: String, 38 | prompt: String, 39 | systemPathMask: Option[String], 40 | n: Option[Int], 41 | size: Option[Size], 42 | responseFormat: Option[ResponseFormat], 43 | user: Option[String] 44 | ): ImageEditsConfig = { 45 | val image: File = Paths.get(systemPathImage).toFile 46 | val mask: Option[File] = systemPathMask.map(Paths.get(_).toFile) 47 | 48 | ImageEditsConfig(image, prompt, mask, n, size, responseFormat, user) 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/images/variations/ImageVariationsConfig.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.images.variations 2 | 3 | import sttp.openai.requests.images.{ResponseFormat, Size} 4 | 5 | import java.io.File 6 | import java.nio.file.Paths 7 | 8 | /** @param image 9 | * The image to use as the basis for the variation(s). Must be a valid PNG file, less than 4MB, and square. 10 | * @param n 11 | * The number of images to generate. Must be between 1 and 10. 12 | * @param size 13 | * The size of the generated images. Must be one of `256x256`, `512x512`, or `1024x1024`. 14 | * @param responseFormat 15 | * The format in which the generated images are returned. Must be one of `url` or `b64_json`. 16 | * @param user 17 | * A unique identifier representing your end-user, which can help OpenAI to monitor and detect abuse. 18 | */ 19 | case class ImageVariationsConfig( 20 | image: File, 21 | n: Option[Int] = None, 22 | size: Option[Size] = None, 23 | responseFormat: Option[ResponseFormat] = None, 24 | user: Option[String] = None 25 | ) 26 | 27 | object ImageVariationsConfig { 28 | def createImageVariationConfigWithSystemPaths( 29 | systemPathImage: String, 30 | n: Option[Int], 31 | size: Option[Size], 32 | responseFormat: Option[ResponseFormat], 33 | user: Option[String] 34 | ): ImageVariationsConfig = 35 | ImageVariationsConfig( 36 | Paths.get(systemPathImage).toFile, 37 | n, 38 | size, 39 | responseFormat, 40 | user 41 | ) 42 | } 43 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/models/ModelsResponseData.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.models 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | object ModelsResponseData { 6 | 7 | case class DeletedModelData( 8 | id: String, 9 | `object`: String, 10 | deleted: Boolean 11 | ) 12 | 13 | object DeletedModelData { 14 | implicit val deletedModelDataR: SnakePickle.Reader[DeletedModelData] = SnakePickle.macroR[DeletedModelData] 15 | } 16 | 17 | case class ModelData( 18 | id: String, 19 | `object`: String, 20 | created: Int, 21 | ownedBy: String, 22 | permission: Seq[ModelPermission], 23 | root: String, 24 | parent: Option[String] 25 | ) 26 | 27 | object ModelData { 28 | implicit def dataReader: SnakePickle.Reader[ModelData] = SnakePickle.macroR[ModelData] 29 | } 30 | 31 | case class ModelPermission( 32 | id: String, 33 | `object`: String, 34 | created: Int, 35 | allowCreateEngine: Boolean, 36 | allowSampling: Boolean, 37 | allowLogprobs: Boolean, 38 | allowSearchIndices: Boolean, 39 | allowView: Boolean, 40 | allowFineTuning: Boolean, 41 | organization: String, 42 | group: Option[String], 43 | isBlocking: Boolean 44 | ) 45 | 46 | object ModelPermission { 47 | implicit def permissionR: SnakePickle.Reader[ModelPermission] = SnakePickle.macroR[ModelPermission] 48 | } 49 | 50 | case class ModelsResponse(`object`: String, data: Seq[ModelData]) 51 | 52 | object ModelsResponse { 53 | implicit def modelsResponseR: SnakePickle.Reader[ModelsResponse] = SnakePickle.macroR[ModelsResponse] 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/moderations/ModerationsRequestBody.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.moderations 2 | 3 | import sttp.openai.json.SnakePickle 4 | import ujson.Str 5 | object ModerationsRequestBody { 6 | 7 | /** @param input 8 | * The input text to classify. 9 | * @param model 10 | * Specifies content moderation models of [[ModerationModel]]. 11 | */ 12 | case class ModerationsBody(input: String, model: Option[ModerationModel] = None) 13 | 14 | object ModerationsBody { 15 | implicit val moderationsBodyWriter: SnakePickle.Writer[ModerationsBody] = SnakePickle.macroW[ModerationsBody] 16 | } 17 | sealed abstract class ModerationModel(val value: String) 18 | object ModerationModel { 19 | implicit val moderationsBodyWriter: SnakePickle.ReadWriter[ModerationModel] = SnakePickle 20 | .readwriter[ujson.Value] 21 | .bimap[ModerationModel]( 22 | model => SnakePickle.writeJs(model.value), 23 | jsonValue => 24 | SnakePickle.read[ujson.Value](jsonValue) match { 25 | case Str(value) => 26 | byModerationModelValue.getOrElse(value, CustomModerationModel(value)) 27 | case e => throw new Exception(s"Could not deserialize: $e") 28 | } 29 | ) 30 | 31 | case object TextModerationStable extends ModerationModel("text-moderation-stable") 32 | 33 | case object TextModerationLatest extends ModerationModel("text-moderation-latest") 34 | 35 | case class CustomModerationModel(customModerationModel: String) extends ModerationModel(customModerationModel) 36 | 37 | val values: Set[ModerationModel] = Set(TextModerationStable, TextModerationLatest) 38 | 39 | private val byModerationModelValue = values.map(model => model.value -> model).toMap 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/moderations/ModerationsResponseData.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.moderations 2 | 3 | import sttp.openai.json.SnakePickle 4 | import sttp.openai.requests.moderations.ModerationsRequestBody.ModerationModel 5 | 6 | object ModerationsResponseData { 7 | case class CategoryScores( 8 | sexual: Double, 9 | hate: Double, 10 | violence: Double, 11 | `self-harm`: Double, 12 | `sexual/minors`: Double, 13 | `hate/threatening`: Double, 14 | `violence/graphic`: Double 15 | ) 16 | 17 | object CategoryScores { 18 | implicit val categoryScoresInfoReader: SnakePickle.Reader[CategoryScores] = SnakePickle.macroR[CategoryScores] 19 | } 20 | 21 | case class Categories( 22 | sexual: Boolean, 23 | hate: Boolean, 24 | violence: Boolean, 25 | `self-harm`: Boolean, 26 | `sexual/minors`: Boolean, 27 | `hate/threatening`: Boolean, 28 | `violence/graphic`: Boolean 29 | ) 30 | object Categories { 31 | implicit val categoriesInfoReader: SnakePickle.Reader[Categories] = SnakePickle.macroR[Categories] 32 | } 33 | case class Result( 34 | flagged: Boolean, 35 | categories: Categories, 36 | categoryScores: CategoryScores 37 | ) 38 | 39 | object Result { 40 | implicit val resultsInfoReader: SnakePickle.Reader[Result] = SnakePickle.macroR[Result] 41 | } 42 | case class ModerationData( 43 | id: String, 44 | model: ModerationModel, 45 | results: Seq[Result] 46 | ) 47 | object ModerationData { 48 | implicit val morderationInfoReader: SnakePickle.Reader[ModerationData] = SnakePickle.macroR[ModerationData] 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/threads/QueryParameters.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.threads 2 | 3 | /** @param limit 4 | * Defaults to 20 A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. 5 | * @param order 6 | * Defaults to desc Sort order by the created_at timestamp of the objects. asc for ascending order and desc for descending order. 7 | * @param after 8 | * A cursor for use in pagination. after is an object ID that defines your place in the list. For instance, if you make a list request 9 | * and receive 100 objects, ending with obj_foo, your subsequent call can include after=obj_foo in order to fetch the next page of the 10 | * list. 11 | * @param before 12 | * A cursor for use in pagination. before is an object ID that defines your place in the list. For instance, if you make a list request 13 | * and receive 100 objects, ending with obj_foo, your subsequent call can include before=obj_foo in order to fetch the previous page of 14 | * the list. 15 | */ 16 | case class QueryParameters( 17 | limit: Option[Int] = None, 18 | order: Option[String] = None, 19 | after: Option[String] = None, 20 | before: Option[String] = None 21 | ) { 22 | 23 | def toMap: Map[String, String] = { 24 | val queryParams = limit.map(_.toString).map("limit" -> _) ++ 25 | order.map("order" -> _) ++ 26 | after.map("after" -> _) ++ 27 | before.map("before" -> _) 28 | queryParams.toMap 29 | } 30 | } 31 | 32 | object QueryParameters { 33 | val empty: QueryParameters = QueryParameters(None, None, None, None) 34 | } 35 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/threads/ThreadsRequestBody.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.threads 2 | 3 | import sttp.openai.json.SnakePickle 4 | import sttp.openai.requests.completions.chat.message.ToolResources 5 | import sttp.openai.requests.threads.messages.ThreadMessagesRequestBody.CreateMessage 6 | 7 | object ThreadsRequestBody { 8 | 9 | /** @param messages 10 | * A list of messages to start the thread with. 11 | * @param metadata 12 | * Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the object 13 | * in a structured format. Keys can be a maximum of 64 characters long and values can be a maxium of 512 characters long. 14 | * 15 | * For more information please visit: [[https://platform.openai.com/docs/api-reference/threads/createThread]] 16 | */ 17 | case class CreateThreadBody( 18 | messages: Option[Seq[CreateMessage]] = None, 19 | toolResources: Option[ToolResources] = None, 20 | metadata: Option[Map[String, String]] = None 21 | ) 22 | 23 | object CreateThreadBody { 24 | implicit val completionBodyW: SnakePickle.Writer[CreateThreadBody] = SnakePickle.macroW[CreateThreadBody] 25 | } 26 | 27 | } 28 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/threads/ThreadsResponseData.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.threads 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | object ThreadsResponseData { 6 | 7 | /** @param id 8 | * string The identifier, which can be referenced in API endpoints. 9 | * 10 | * @param object 11 | * string The object type, which is always thread. 12 | * 13 | * @param createdAt 14 | * integer The Unix timestamp (in seconds) for when the thread was created. 15 | * 16 | * @param metadata 17 | * map Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information about the 18 | * object in a structured format. Keys can be a maximum of 64 characters long and values can be a maxium of 512 characters long. For 19 | * more information please visit: [[https://platform.openai.com/docs/api-reference/threads/object]] 20 | */ 21 | case class ThreadData( 22 | id: String, 23 | `object`: String = "thread", 24 | createdAt: Option[Int] = None, 25 | metadata: Map[String, String] = Map.empty 26 | ) 27 | 28 | object ThreadData { 29 | implicit val threadDataR: SnakePickle.Reader[ThreadData] = SnakePickle.macroR[ThreadData] 30 | } 31 | 32 | /** @param id 33 | * @param `object` 34 | * thread.deleted 35 | * @param deleted 36 | */ 37 | case class DeleteThreadResponse( 38 | id: String, 39 | `object`: String, 40 | deleted: Boolean 41 | ) 42 | 43 | object DeleteThreadResponse { 44 | implicit val deleteThreadResponseReadWriter: SnakePickle.ReadWriter[DeleteThreadResponse] = SnakePickle.macroRW[DeleteThreadResponse] 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/threads/messages/ThreadMessagesRequestBody.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.threads.messages 2 | 3 | import sttp.openai.json.SnakePickle 4 | import sttp.openai.requests.completions.chat.message.Attachment 5 | 6 | object ThreadMessagesRequestBody { 7 | 8 | /** @param role 9 | * string Required The role of the entity that is creating the message. Currently only user is supported. 10 | * @param content 11 | * string Required The content of the message. 12 | * @param attachments 13 | * A list of files attached to the message, and the tools they were added to. 14 | * @param metadata 15 | * map Optional Set of 16 key-value pairs that can be attached to an object. This can be useful for storing additional information 16 | * about the object in a structured format. Keys can be a maximum of 64 characters long and values can be a maxium of 512 characters 17 | * long. 18 | * 19 | * For more information please visit: [[https://platform.openai.com/docs/api-reference/messages/createMessage]] 20 | */ 21 | case class CreateMessage( 22 | role: String, 23 | content: String, 24 | attachments: Option[Seq[Attachment]] = None, 25 | metadata: Option[Map[String, String]] = None 26 | ) 27 | 28 | object CreateMessage { 29 | implicit val completionBodyW: SnakePickle.Writer[CreateMessage] = SnakePickle.macroW[CreateMessage] 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/upload/UploadRequestBody.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.upload 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | /** Represents the request body for uploading a file. 6 | * 7 | * @param filename 8 | * The name of the file to upload. 9 | * @param purpose 10 | * The intended purpose of the uploaded file. 11 | * @param bytes 12 | * The number of bytes in the file you are uploading. 13 | * @param mimeType 14 | * The MIME type of the file. 15 | * 16 | * This must fall within the supported MIME types for your file purpose. See the supported MIME types for assistants and vision. 17 | */ 18 | case class UploadRequestBody( 19 | filename: String, 20 | purpose: String, 21 | bytes: Int, 22 | mimeType: String 23 | ) 24 | 25 | object UploadRequestBody { 26 | implicit val uploadRequestBodyW: SnakePickle.Writer[UploadRequestBody] = SnakePickle.macroW[UploadRequestBody] 27 | } 28 | 29 | /** @param partIds 30 | * The ordered list of Part IDs. 31 | * @param md5 32 | * The optional md5 checksum for the file contents to verify if the bytes uploaded matches what you expect. 33 | */ 34 | case class CompleteUploadRequestBody( 35 | partIds: Seq[String], 36 | md5: Option[String] 37 | ) 38 | 39 | object CompleteUploadRequestBody { 40 | implicit val completeUploadRequestBodyW: SnakePickle.Writer[CompleteUploadRequestBody] = SnakePickle.macroW[CompleteUploadRequestBody] 41 | } 42 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/upload/UploadResponse.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.upload 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | /** Represents the response for an upload request. 6 | * 7 | * @param id 8 | * The Upload unique identifier, which can be referenced in API endpoints. 9 | * @param `object` 10 | * The object type, which is always "upload". 11 | * @param bytes 12 | * The intended number of bytes to be uploaded. 13 | * @param createdAt 14 | * The Unix timestamp (in seconds) for when the Upload was created. 15 | * @param filename 16 | * The name of the file to be uploaded. 17 | * @param purpose 18 | * The intended purpose of the file. Please refer here for acceptable values. 19 | * @param status 20 | * The status of the Upload. 21 | * @param expiresAt 22 | * The Unix timestamp (in seconds) for when the Upload will expire. 23 | * @param file 24 | * The File object represents a document that has been uploaded to OpenAI. 25 | */ 26 | case class UploadResponse( 27 | id: String, 28 | `object`: String = "upload", 29 | bytes: Int, 30 | createdAt: Int, 31 | filename: String, 32 | purpose: String, 33 | status: String, 34 | expiresAt: Int, 35 | file: Option[FileMetadata] 36 | ) 37 | 38 | object UploadResponse { 39 | implicit val uploadResponseR: SnakePickle.Reader[UploadResponse] = SnakePickle.macroR[UploadResponse] 40 | } 41 | 42 | case class FileMetadata( 43 | id: String, 44 | `object`: String, 45 | bytes: Int, 46 | createdAt: Int, 47 | filename: String, 48 | purpose: String 49 | ) 50 | 51 | object FileMetadata { 52 | implicit val fileR: SnakePickle.Reader[FileMetadata] = SnakePickle.macroR[FileMetadata] 53 | } 54 | 55 | /** Represents the response for an upload part. 56 | * 57 | * @param id 58 | * The upload Part unique identifier, which can be referenced in API endpoints. 59 | * @param createdAt 60 | * The Unix timestamp (in seconds) for when the Part was created. 61 | * @param uploadId 62 | * The ID of the Upload object that this Part was added to. 63 | * @param `object` 64 | * The object type, which is always upload.part. 65 | */ 66 | case class UploadPartResponse( 67 | id: String, 68 | createdAt: Int, 69 | uploadId: String, 70 | `object`: String = "upload.part" 71 | ) 72 | 73 | object UploadPartResponse { 74 | implicit val uploadPartResponseR: SnakePickle.Reader[UploadPartResponse] = SnakePickle.macroR[UploadPartResponse] 75 | } 76 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/vectorstore/ExpiresAfter.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.vectorstore 2 | 3 | import sttp.openai.json.SnakePickle 4 | import ujson.{Obj, Value} 5 | 6 | /** Represents the expiration policy for a vector store. 7 | * 8 | * @param anchor 9 | * Required. Anchor timestamp after which the expiration policy applies. Supported anchors: last_active_at. 10 | * @param days 11 | * Required. The number of days after the anchor time that the vector store will expire. 12 | */ 13 | case class ExpiresAfter(anchor: String, days: Int) 14 | object ExpiresAfter { 15 | 16 | implicit val expiresAfterRW: SnakePickle.ReadWriter[ExpiresAfter] = SnakePickle 17 | .readwriter[Value] 18 | .bimap[ExpiresAfter]( 19 | ea => Obj("anchor" -> ea.anchor, "days" -> ea.days), 20 | json => ExpiresAfter(json("anchor").str, json("days").num.toInt) 21 | ) 22 | } 23 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/vectorstore/VectorStoreRequestBody.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.vectorstore 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | object VectorStoreRequestBody { 6 | 7 | /** Represents options for creating vector store. 8 | * 9 | * @param fileIds 10 | * Optional. A list of File IDs that the vector store should use. Useful for tools like file_search that can access files. 11 | * @param name 12 | * Optional. The name of the vector store. 13 | * @param expiresAfter 14 | * Optional. The expiration policy for a vector store. 15 | * @param metadata 16 | * Optional. Set of 16 key-value pairs that can be attached to an object. Useful for storing additional information about the object in 17 | * a structured format. Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. 18 | */ 19 | case class CreateVectorStoreBody( 20 | name: Option[String] = None, 21 | fileIds: Option[Seq[String]] = None, 22 | expiresAfter: Option[ExpiresAfter] = None, 23 | metadata: Option[Map[String, String]] = None 24 | ) 25 | 26 | object CreateVectorStoreBody { 27 | implicit val createVectorStoreBodyW: SnakePickle.Writer[CreateVectorStoreBody] = SnakePickle.macroW[CreateVectorStoreBody] 28 | } 29 | 30 | /** Represents options for modifying vector store. 31 | * @param name 32 | * Optional. The name of the vector store. 33 | * @param expiresAfter 34 | * Optional. The expiration policy for a vector store. 35 | * @param metadata 36 | * Optional. Set of 16 key-value pairs that can be attached to an object. Useful for storing additional information about the object in 37 | * a structured format. Keys can be a maximum of 64 characters long and values can be a maximum of 512 characters long. 38 | */ 39 | case class ModifyVectorStoreBody( 40 | name: Option[String] = None, 41 | expiresAfter: Option[ExpiresAfter] = None, 42 | metadata: Option[Map[String, String]] = None 43 | ) 44 | 45 | object ModifyVectorStoreBody { 46 | implicit val modifyVectorStoreBody: SnakePickle.Writer[ModifyVectorStoreBody] = SnakePickle.macroW[ModifyVectorStoreBody] 47 | } 48 | 49 | } 50 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/vectorstore/VectorStoreResponseData.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.vectorstore 2 | 3 | import sttp.openai.json.SnakePickle 4 | import ujson.Value 5 | 6 | object VectorStoreResponseData { 7 | 8 | /** Represents a vector store object. 9 | * 10 | * @param id 11 | * The identifier, which can be referenced in API endpoints. 12 | * @param `object` 13 | * The object type, which is always vector_store. 14 | * @param createdAt 15 | * The Unix timestamp (in seconds) for when the vector store was created. 16 | * @param name 17 | * The name of the vector store. 18 | * @param usageBytes 19 | * The total number of bytes used by the files in the vector store. 20 | * @param fileCounts 21 | * Object containing file count properties. 22 | * @param status 23 | * The status of the vector store. 24 | * @param expiresAfter 25 | * The expiration policy for a vector store. 26 | * @param metadata 27 | * Set of key-value pairs that can be attached to an object. 28 | */ 29 | case class VectorStore( 30 | id: String, 31 | `object`: String = "vector_store", 32 | createdAt: Int, 33 | name: String, 34 | usageBytes: Int, 35 | fileCounts: FileCounts, 36 | status: StoreStatus, 37 | expiresAfter: Option[ExpiresAfter] = None, 38 | expiresAt: Option[Int] = None, 39 | lastActiveAt: Option[Int] = None, 40 | lastUsedAt: Option[Int] = None, 41 | metadata: Map[String, String] = Map.empty 42 | ) 43 | 44 | object VectorStore { 45 | implicit val vectorStoreR: SnakePickle.Reader[VectorStore] = SnakePickle.macroR[VectorStore] 46 | } 47 | 48 | /** Describes number of files in different statuses. 49 | * 50 | * @param inProgress 51 | * The number of files currently in progress. 52 | * @param completed 53 | * The number of files that have been completed successfully. 54 | * @param failed 55 | * The number of files that have failed. 56 | * @param cancelled 57 | * The number of files that have been cancelled. 58 | * @param total 59 | * The total number of files. 60 | */ 61 | case class FileCounts( 62 | inProgress: Int, 63 | completed: Int, 64 | failed: Int, 65 | cancelled: Int, 66 | total: Int 67 | ) 68 | 69 | object FileCounts { 70 | implicit val fileCountsR: SnakePickle.Reader[FileCounts] = SnakePickle.macroR[FileCounts] 71 | } 72 | 73 | sealed trait StoreStatus 74 | case object InProgress extends StoreStatus 75 | case object Completed extends StoreStatus 76 | case object Expired extends StoreStatus 77 | 78 | object StoreStatus { 79 | implicit val storeStatusR: SnakePickle.Reader[StoreStatus] = SnakePickle 80 | .reader[Value] 81 | .map(json => 82 | json.str match { 83 | case "in_progress" => InProgress 84 | case "completed" => Completed 85 | case "expired" => Expired 86 | } 87 | ) 88 | } 89 | 90 | /** @param object 91 | * Always "list" 92 | * @param data 93 | * A list of vector store objects. 94 | * @param firstId 95 | * Id of first object 96 | * @param lastId 97 | * Id of last object 98 | * @param hasMore 99 | * Denotes if there are more object available 100 | */ 101 | case class ListVectorStoresResponse( 102 | `object`: String = "list", 103 | data: Seq[VectorStore], 104 | firstId: String, 105 | lastId: String, 106 | hasMore: Boolean 107 | ) 108 | 109 | object ListVectorStoresResponse { 110 | implicit val listVectorStoresResponseR: SnakePickle.Reader[ListVectorStoresResponse] = SnakePickle.macroR[ListVectorStoresResponse] 111 | } 112 | 113 | /** @param id 114 | * Id of deleted object 115 | * @param `object` 116 | * vector_store.deleted 117 | * @param deleted 118 | * boolean describing whether or not operation was successful For more information please visit: 119 | * [[https://platform.openai.com/docs/api-reference/assistants/deleteAssistant]] 120 | */ 121 | case class DeleteVectorStoreResponse( 122 | id: String, 123 | `object`: String, 124 | deleted: Boolean 125 | ) 126 | 127 | object DeleteVectorStoreResponse { 128 | implicit val deleteVectorStoreResponseR: SnakePickle.Reader[DeleteVectorStoreResponse] = SnakePickle.macroR[DeleteVectorStoreResponse] 129 | } 130 | } 131 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/vectorstore/file/FileStatus.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.vectorstore.file 2 | 3 | import sttp.openai.json.SnakePickle 4 | import ujson.Value 5 | 6 | sealed trait FileStatus 7 | case object InProgress extends FileStatus 8 | case object Completed extends FileStatus 9 | case object Failed extends FileStatus 10 | case object Cancelled extends FileStatus 11 | 12 | object FileStatus { 13 | implicit val expiresAfterRW: SnakePickle.ReadWriter[FileStatus] = SnakePickle 14 | .readwriter[Value] 15 | .bimap[FileStatus]( 16 | { 17 | case InProgress => "in_progress" 18 | case Completed => "completed" 19 | case Failed => "failed" 20 | case Cancelled => "cancelled" 21 | }, 22 | json => 23 | json.str match { 24 | case "in_progress" => InProgress 25 | case "completed" => Completed 26 | case "failed" => Failed 27 | case "cancelled" => Cancelled 28 | } 29 | ) 30 | } 31 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileRequestBody.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.vectorstore.file 2 | 3 | import sttp.openai.json.SnakePickle 4 | 5 | object VectorStoreFileRequestBody { 6 | 7 | /** Create a vector store file by attaching a File to a vector store. 8 | * 9 | * @param fileId 10 | * A File ID that the vector store should use. Useful for tools like file_search that can access files. 11 | */ 12 | case class CreateVectorStoreFileBody( 13 | fileId: String 14 | ) 15 | 16 | object CreateVectorStoreFileBody { 17 | implicit val createVectorStoreFileBodyR: SnakePickle.Writer[CreateVectorStoreFileBody] = SnakePickle.macroW[CreateVectorStoreFileBody] 18 | } 19 | 20 | /** Represents options for listing objects with pagination and filtering. 21 | * 22 | * @param limit 23 | * Defaults to 20 A limit on the number of objects to be returned. Limit can range between 1 and 100, and the default is 20. 24 | * @param order 25 | * Defaults to desc Sort order by the created_at timestamp of the objects. asc for ascending order and desc for descending order. 26 | * @param after 27 | * A cursor for use in pagination. after is an object ID that defines your place in the list. For instance, if you make a list request 28 | * and receive 100 objects, ending with obj_foo, your subsequent call can include after=obj_foo in order to fetch the next page of the 29 | * list. 30 | * @param before 31 | * A cursor for use in pagination. before is an object ID that defines your place in the list. For instance, if you make a list request 32 | * and receive 100 objects, ending with obj_foo, your subsequent call can include before=obj_foo in order to fetch the previous page of 33 | * the list. 34 | * @param filter 35 | * Optional. Filter by file status. Possible values are "in_progress", "completed", "failed", "cancelled". 36 | */ 37 | case class ListVectorStoreFilesBody( 38 | limit: Int = 20, 39 | order: String = "desc", 40 | after: Option[String] = None, 41 | before: Option[String] = None, 42 | filter: Option[FileStatus] = None 43 | ) { 44 | def toMap: Map[String, String] = { 45 | val map = Map("limit" -> limit.toString, "order" -> order) 46 | map ++ 47 | after.map("after" -> _) ++ 48 | before.map("before" -> _) ++ 49 | filter.map("filter" -> _.toString) 50 | } 51 | } 52 | 53 | object ListVectorStoreFilesBody { 54 | implicit val listVectorStoreFilesBodyR: SnakePickle.Writer[ListVectorStoreFilesBody] = SnakePickle.macroW[ListVectorStoreFilesBody] 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /core/src/main/scala/sttp/openai/requests/vectorstore/file/VectorStoreFileResponseData.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.vectorstore.file 2 | 3 | import sttp.openai.json.SnakePickle 4 | import ujson.Value 5 | 6 | object VectorStoreFileResponseData { 7 | 8 | /** Represents a vector store file. 9 | * 10 | * @param id 11 | * The identifier, which can be referenced in API endpoints. 12 | * @param object 13 | * The object type, which is always vector_store.file. 14 | * @param usageBytes 15 | * The total vector store usage in bytes. Note that this may be different from the original file size. 16 | * @param createdAt 17 | * The Unix timestamp (in seconds) for when the vector store file was created. 18 | * @param vectorStoreId 19 | * The ID of the vector store that the File is attached to. 20 | * @param status 21 | * The status of the vector store file. Possible values are "in_progress", "completed", "cancelled", or "failed". The status 22 | * "completed" indicates that the vector store file is ready for use. 23 | * @param lastError 24 | * The last error associated with this vector store file, or null if there are no errors. 25 | */ 26 | case class VectorStoreFile( 27 | id: String, 28 | `object`: String, 29 | usageBytes: Int, 30 | createdAt: Int, 31 | vectorStoreId: String, 32 | status: FileStatus, 33 | lastError: Option[LastError] = None 34 | ) 35 | 36 | object VectorStoreFile { 37 | implicit val vectorStoreFileR: SnakePickle.Reader[VectorStoreFile] = SnakePickle.macroR[VectorStoreFile] 38 | } 39 | 40 | /** Represents the last error associated with a vector store file. 41 | * 42 | * @param code 43 | * The error code. Possible values are "server_error" or "rate_limit_exceeded". 44 | * @param message 45 | * A human-readable description of the error. 46 | */ 47 | case class LastError(code: ErrorCode, message: String) 48 | 49 | object LastError { 50 | implicit val lastErrorR: SnakePickle.Reader[LastError] = SnakePickle.macroR[LastError] 51 | } 52 | 53 | sealed trait ErrorCode 54 | case object ServerError extends ErrorCode 55 | case object RateLimitExceeded extends ErrorCode 56 | 57 | object ErrorCode { 58 | implicit val errorCodeR: SnakePickle.Reader[ErrorCode] = SnakePickle 59 | .reader[Value] 60 | .map(json => 61 | json.str match { 62 | case "server_error" => ServerError 63 | case "rate_limit_exceeded" => RateLimitExceeded 64 | } 65 | ) 66 | } 67 | 68 | /** @param object 69 | * Always "list" 70 | * @param data 71 | * A list of vector store file objects. 72 | * @param firstId 73 | * Id of first object 74 | * @param lastId 75 | * Id of last object 76 | * @param hasMore 77 | * Denotes if there are more object available 78 | */ 79 | case class ListVectorStoreFilesResponse( 80 | `object`: String = "list", 81 | data: Seq[VectorStoreFile], 82 | firstId: String, 83 | lastId: String, 84 | hasMore: Boolean 85 | ) 86 | 87 | object ListVectorStoreFilesResponse { 88 | implicit val listVectorStoreFilesResponseR: SnakePickle.Reader[ListVectorStoreFilesResponse] = 89 | SnakePickle.macroR[ListVectorStoreFilesResponse] 90 | } 91 | 92 | /** @param id 93 | * Id of deleted object 94 | * @param `object` 95 | * vector_store.file.deleted 96 | * @param deleted 97 | * boolean describing whether or not operation was successful For more information please visit: 98 | * [[https://platform.openai.com/docs/api-reference/assistants/deleteAssistant]] 99 | */ 100 | case class DeleteVectorStoreFileResponse( 101 | id: String, 102 | `object`: String, 103 | deleted: Boolean 104 | ) 105 | 106 | object DeleteVectorStoreFileResponse { 107 | implicit val deleteVectorStoreFileResponseR: SnakePickle.Reader[DeleteVectorStoreFileResponse] = 108 | SnakePickle.macroR[DeleteVectorStoreFileResponse] 109 | } 110 | 111 | } 112 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/client/SyncClientSpec.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.client 2 | 3 | import org.scalatest.EitherValues 4 | import org.scalatest.flatspec.AnyFlatSpec 5 | import org.scalatest.matchers.should.Matchers 6 | import sttp.client4._ 7 | import sttp.client4.testing.ResponseStub 8 | import sttp.model.StatusCode 9 | import sttp.model.StatusCode._ 10 | import sttp.openai.OpenAIExceptions.OpenAIException 11 | import sttp.openai.{CustomizeOpenAIRequest, OpenAISyncClient} 12 | import sttp.openai.fixtures.ErrorFixture 13 | import sttp.openai.requests.models.ModelsResponseData._ 14 | 15 | import java.util.concurrent.atomic.AtomicReference 16 | 17 | class SyncClientSpec extends AnyFlatSpec with Matchers with EitherValues { 18 | for ((statusCode, expectedError) <- ErrorFixture.testData) 19 | s"Service response with status code: $statusCode" should s"return properly deserialized ${expectedError.getClass.getSimpleName}" in { 20 | // given 21 | val syncBackendStub = DefaultSyncBackend.stub.whenAnyRequest.thenRespondAdjust(ErrorFixture.errorResponse, statusCode) 22 | val syncClient = OpenAISyncClient(authToken = "test-token", backend = syncBackendStub) 23 | 24 | // when 25 | val caught = intercept[OpenAIException](syncClient.getModels) 26 | 27 | // then 28 | caught.getClass shouldBe expectedError.getClass: Unit 29 | caught.message shouldBe expectedError.message: Unit 30 | caught.cause.getClass shouldBe expectedError.cause.getClass: Unit 31 | caught.code shouldBe expectedError.code: Unit 32 | caught.param shouldBe expectedError.param: Unit 33 | caught.`type` shouldBe expectedError.`type` 34 | } 35 | 36 | "Fetching models with successful response" should "return properly deserialized list of available models" in { 37 | // given 38 | val modelsResponse = sttp.openai.fixtures.ModelsGetResponse.singleModelResponse 39 | val syncBackendStub = DefaultSyncBackend.stub.whenAnyRequest.thenRespondAdjust(modelsResponse, Ok) 40 | val syncClient = OpenAISyncClient(authToken = "test-token", backend = syncBackendStub) 41 | val deserializedModels = ModelsResponse( 42 | `object` = "list", 43 | data = Seq( 44 | ModelData( 45 | id = "babbage", 46 | `object` = "model", 47 | created = 1649358449, 48 | ownedBy = "openai", 49 | permission = Seq( 50 | ModelPermission( 51 | id = "modelperm-49FUp5v084tBB49tC4z8LPH5", 52 | `object` = "model_permission", 53 | created = 1669085501, 54 | allowCreateEngine = false, 55 | allowSampling = true, 56 | allowLogprobs = true, 57 | allowSearchIndices = false, 58 | allowView = true, 59 | allowFineTuning = false, 60 | organization = "*", 61 | group = None, 62 | isBlocking = false 63 | ) 64 | ), 65 | root = "babbage", 66 | parent = None 67 | ) 68 | ) 69 | ) 70 | 71 | // when & then 72 | syncClient.getModels shouldBe deserializedModels 73 | } 74 | 75 | "Customizing the request" should "be additive" in { 76 | // given 77 | val capturedRequest = new AtomicReference[GenericRequest[_, _]](null) 78 | val syncBackendStub = DefaultSyncBackend.stub.whenAnyRequest.thenRespondF { request => 79 | capturedRequest.set(request) 80 | ResponseStub.adjust(sttp.openai.fixtures.ModelsGetResponse.singleModelResponse, StatusCode.Ok) 81 | } 82 | val syncClient = OpenAISyncClient(authToken = "test-token", backend = syncBackendStub) 83 | 84 | // when 85 | syncClient 86 | .customizeRequest(new CustomizeOpenAIRequest { 87 | override def apply[A](request: Request[Either[OpenAIException, A]]): Request[Either[OpenAIException, A]] = 88 | request.header("X-Test", "test") 89 | }) 90 | .customizeRequest(new CustomizeOpenAIRequest { 91 | override def apply[A](request: Request[Either[OpenAIException, A]]): Request[Either[OpenAIException, A]] = 92 | request.header("X-Test-2", "test-2") 93 | }) 94 | .getModels: Unit 95 | 96 | // then 97 | capturedRequest.get().headers.find(_.is("X-Test")).map(_.value) shouldBe Some("test"): Unit 98 | capturedRequest.get().headers.find(_.is("X-Test-2")).map(_.value) shouldBe Some("test-2") 99 | } 100 | } 101 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/fixtures/AdminFixture.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.fixtures 2 | 3 | import sttp.openai.requests.admin.{AdminApiKeyResponse, Owner} 4 | 5 | object AdminFixture { 6 | 7 | val jsonRequest: String = 8 | """{ 9 | | "name": "api_key_name" 10 | |}""".stripMargin 11 | 12 | val jsonResponse: String = 13 | """{ 14 | | "object": "organization.admin_api_key", 15 | | "id": "key_xyz", 16 | | "name": "New Admin Key", 17 | | "redacted_value": "sk-admin...xyz", 18 | | "created_at": 1711471533, 19 | | "owner": { 20 | | "type": "user", 21 | | "object": "organization.user", 22 | | "id": "user_123", 23 | | "name": "John Doe", 24 | | "created_at": 1711471533, 25 | | "role": "owner" 26 | | }, 27 | | "value": "sk-admin-1234abcd" 28 | |}""".stripMargin 29 | 30 | val jsonListResponse: String = 31 | s"""{ 32 | | "object": "list", 33 | | "data": [$jsonResponse], 34 | | "first_id": "key_abc", 35 | | "last_id": "key_abc", 36 | | "has_more": false 37 | |}""".stripMargin 38 | 39 | val jsonDeleteResponse: String = 40 | """{ 41 | | "id": "key_abc", 42 | | "object": "organization.admin_api_key.deleted", 43 | | "deleted": true 44 | |}""".stripMargin 45 | 46 | val adminApiKeyResponse: AdminApiKeyResponse = AdminApiKeyResponse( 47 | id = "key_xyz", 48 | name = "New Admin Key", 49 | redactedValue = "sk-admin...xyz", 50 | createdAt = 1711471533, 51 | owner = Owner( 52 | `type` = "user", 53 | `object` = "organization.user", 54 | id = "user_123", 55 | name = "John Doe", 56 | createdAt = 1711471533, 57 | role = "owner" 58 | ), 59 | value = Some("sk-admin-1234abcd") 60 | ) 61 | 62 | } 63 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/fixtures/AudioFixture.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.fixtures 2 | 3 | object AudioFixture { 4 | val jsonResponse: String = """{ 5 | | "text": "Imagine the wildest idea that you've ever had, and you're curious about how it might scale to something that's a 100, a 1,000 times bigger. This is a place where you can get to do that." 6 | |}""".stripMargin 7 | 8 | val jsonCreateSpeechRequest: String = """{ 9 | | "model": "tts-1", 10 | | "input": "Hello, my name is John.", 11 | | "voice": "alloy", 12 | | "response_format": "mp3", 13 | | "speed": 1.0 14 | |}""".stripMargin 15 | } 16 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/fixtures/BatchFixture.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.fixtures 2 | 3 | import sttp.openai.requests.batch.{BatchResponse, RequestCounts} 4 | 5 | object BatchFixture { 6 | 7 | val jsonCreateBatchRequest: String = """{ 8 | | "input_file_id": "file-id", 9 | | "endpoint": "/v1/chat/completions", 10 | | "completion_window": "24h", 11 | | "metadata": { 12 | | "key1": "value1", 13 | | "key2": "value2" 14 | | } 15 | |}""".stripMargin 16 | 17 | val jsonCreateBatchResponse: String = """{ 18 | | "id": "batch_abc123", 19 | | "object": "batch", 20 | | "endpoint": "/v1/completions", 21 | | "errors": null, 22 | | "input_file_id": "file-abc123", 23 | | "completion_window": "24h", 24 | | "status": "completed", 25 | | "output_file_id": "file-cvaTdG", 26 | | "error_file_id": "file-HOWS94", 27 | | "created_at": 1711471533, 28 | | "in_progress_at": 1711471538, 29 | | "expires_at": 1711557933, 30 | | "finalizing_at": 1711493133, 31 | | "completed_at": 1711493163, 32 | | "failed_at": null, 33 | | "expired_at": null, 34 | | "cancelling_at": null, 35 | | "cancelled_at": null, 36 | | "request_counts": { 37 | | "total": 100, 38 | | "completed": 95, 39 | | "failed": 5 40 | | }, 41 | | "metadata": { 42 | | "customer_id": "user_123456789", 43 | | "batch_description": "Nightly eval job" 44 | | } 45 | |}""".stripMargin 46 | 47 | val jsonListBatchResponse: String = s"""{ 48 | | "object": "list", 49 | | "data": [$jsonCreateBatchResponse], 50 | | "first_id": "ftckpt_zc4Q7MP6XxulcVzj4MZdwsAB", 51 | | "last_id": "ftckpt_enQCFmOTGj3syEpYVhBRLTSy", 52 | | "has_more": true 53 | |}""".stripMargin 54 | 55 | val batchResponse: BatchResponse = BatchResponse( 56 | id = "batch_abc123", 57 | endpoint = "/v1/completions", 58 | errors = None, 59 | inputFileId = "file-abc123", 60 | completionWindow = "24h", 61 | status = "completed", 62 | outputFileId = Some("file-cvaTdG"), 63 | errorFileId = Some("file-HOWS94"), 64 | createdAt = 1711471533, 65 | inProgressAt = Some(1711471538), 66 | expiresAt = Some(1711557933), 67 | finalizingAt = Some(1711493133), 68 | completedAt = Some(1711493163), 69 | failedAt = None, 70 | expiredAt = None, 71 | cancellingAt = None, 72 | cancelledAt = None, 73 | requestCounts = Some(RequestCounts(total = 100, completed = 95, failed = 5)), 74 | metadata = Some(Map("customer_id" -> "user_123456789", "batch_description" -> "Nightly eval job")) 75 | ) 76 | 77 | } 78 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/fixtures/EmbeddingsFixture.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.fixtures 2 | 3 | object EmbeddingsFixture { 4 | val jsonCreateEmbeddingsResponse = """{ 5 | | "object": "list", 6 | | "data": [ 7 | | { 8 | | "object": "embedding", 9 | | "index": 0, 10 | | "embedding": [ 11 | | 0.0023064255, 12 | | -0.009327292, 13 | | 0.015797347, 14 | | -0.0077780345, 15 | | -0.0046922187 16 | | ] 17 | | } 18 | | ], 19 | | "model": "text-embedding-ada-002", 20 | | "usage": { 21 | | "prompt_tokens": 8, 22 | | "total_tokens": 8 23 | | } 24 | |}""".stripMargin 25 | } 26 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/fixtures/ErrorFixture.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.fixtures 2 | 3 | import sttp.client4.ResponseException.UnexpectedStatusCode 4 | import sttp.client4.testing.ResponseStub 5 | import sttp.model.StatusCode 6 | import sttp.model.StatusCode._ 7 | import sttp.openai.OpenAIExceptions.OpenAIException 8 | import sttp.openai.OpenAIExceptions.OpenAIException._ 9 | 10 | object ErrorFixture { 11 | private val (message, errorType, param, code) = ("Some error message.", "error_type", "null", "invalid_api_key") 12 | 13 | val errorResponse = 14 | s""" 15 | |{ 16 | | "error": { 17 | | "message": "$message", 18 | | "type": "$errorType", 19 | | "param": $param, 20 | | "code": "$code" 21 | | } 22 | |}""".stripMargin 23 | 24 | val testData: Seq[(StatusCode, OpenAIException)] = List( 25 | ( 26 | TooManyRequests, 27 | new RateLimitException( 28 | Some(message), 29 | Some(errorType), 30 | None, 31 | Some(code), 32 | UnexpectedStatusCode(errorResponse, ResponseStub.adjust(errorResponse, TooManyRequests)) 33 | ) 34 | ), 35 | ( 36 | BadRequest, 37 | new InvalidRequestException( 38 | Some(message), 39 | Some(errorType), 40 | None, 41 | Some(code), 42 | UnexpectedStatusCode(errorResponse, ResponseStub.adjust(errorResponse, BadRequest)) 43 | ) 44 | ), 45 | ( 46 | NotFound, 47 | new InvalidRequestException( 48 | Some(message), 49 | Some(errorType), 50 | None, 51 | Some(code), 52 | UnexpectedStatusCode(errorResponse, ResponseStub.adjust(errorResponse, NotFound)) 53 | ) 54 | ), 55 | ( 56 | UnsupportedMediaType, 57 | new InvalidRequestException( 58 | Some(message), 59 | Some(errorType), 60 | None, 61 | Some(code), 62 | UnexpectedStatusCode(errorResponse, ResponseStub.adjust(errorResponse, UnsupportedMediaType)) 63 | ) 64 | ), 65 | ( 66 | Unauthorized, 67 | new AuthenticationException( 68 | Some(message), 69 | Some(errorType), 70 | None, 71 | Some(code), 72 | UnexpectedStatusCode(errorResponse, ResponseStub.adjust(errorResponse, Unauthorized)) 73 | ) 74 | ), 75 | ( 76 | Forbidden, 77 | new PermissionException( 78 | Some(message), 79 | Some(errorType), 80 | None, 81 | Some(code), 82 | UnexpectedStatusCode(errorResponse, ResponseStub.adjust(errorResponse, Forbidden)) 83 | ) 84 | ), 85 | ( 86 | Conflict, 87 | new TryAgain( 88 | Some(message), 89 | Some(errorType), 90 | None, 91 | Some(code), 92 | UnexpectedStatusCode(errorResponse, ResponseStub.adjust(errorResponse, Conflict)) 93 | ) 94 | ), 95 | ( 96 | ServiceUnavailable, 97 | new ServiceUnavailableException( 98 | Some(message), 99 | Some(errorType), 100 | None, 101 | Some(code), 102 | UnexpectedStatusCode(errorResponse, ResponseStub.adjust(errorResponse, ServiceUnavailable)) 103 | ) 104 | ), 105 | ( 106 | Gone, 107 | new APIException( 108 | Some(message), 109 | Some(errorType), 110 | None, 111 | Some(code), 112 | UnexpectedStatusCode(errorResponse, ResponseStub.adjust(errorResponse, Gone)) 113 | ) 114 | ) 115 | ) 116 | } 117 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/fixtures/FilesResponse.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.fixtures 2 | 3 | object FilesResponse { 4 | val listFilesJsonResponse: String = """{ 5 | | "object": "list", 6 | | "data": [ 7 | | { 8 | | "object": "file", 9 | | "id": "file-tralala", 10 | | "purpose": "fine-tune", 11 | | "filename": "example.jsonl", 12 | | "bytes": 44, 13 | | "created_at": 1681375533, 14 | | "status": "processed", 15 | | "status_details": null 16 | | } 17 | | ] 18 | |}""".stripMargin 19 | 20 | val singleFileJsonResponse: String = """{ 21 | | "object": "file", 22 | | "id": "file-tralala", 23 | | "purpose": "fine-tune", 24 | | "filename": "example.jsonl", 25 | | "bytes": 44, 26 | | "created_at": 1681375533, 27 | | "status": "uploaded", 28 | | "status_details": null 29 | |}""".stripMargin 30 | 31 | val deleteFileJsonResponse: String = """{ 32 | | "object": "file", 33 | | "id": "file-tralala", 34 | | "deleted": true 35 | |}""".stripMargin 36 | 37 | val retrieveFileJsonResponse: String = """{ 38 | | "object": "file", 39 | | "id": "file-tralala", 40 | | "purpose": "fine-tune", 41 | | "filename": "example.jsonl", 42 | | "bytes": 44, 43 | | "created_at": 1681375533, 44 | | "status": "processed", 45 | | "status_details": null 46 | |}""".stripMargin 47 | } 48 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/fixtures/ImageCreationFixture.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.fixtures 2 | 3 | object ImageCreationFixture { 4 | val jsonRequest: String = """{ 5 | |"prompt": "cute fish", 6 | |"model": "dall-e-3", 7 | |"n": 1, 8 | |"size": "1024x1024", 9 | |"response_format": "url", 10 | |"user": "user1" 11 | |}""".stripMargin 12 | 13 | val jsonRequestDalle2: String = """{ 14 | |"prompt": "cute fish", 15 | |"model": "dall-e-2", 16 | |"n": 1, 17 | |"size": "1024x1024", 18 | |"response_format": "url", 19 | |"user": "user1" 20 | |}""".stripMargin 21 | 22 | val jsonResponse: String = """{ 23 | |"created": 1681893694, 24 | |"data": [ 25 | | { 26 | | "url": "https://generated.image.url" 27 | | } 28 | |] 29 | |}""".stripMargin 30 | } 31 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/fixtures/JsonSchemaFixture.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.fixtures 2 | 3 | object JsonSchemaFixture { 4 | 5 | val stringSchema: String = 6 | """{ 7 | | "type": "json_schema", 8 | | "json_schema": { 9 | | "name": "testString", 10 | | "strict": true, 11 | | "schema": { 12 | | "type": "string" 13 | | } 14 | | } 15 | |}""".stripMargin 16 | 17 | val numberSchema: String = 18 | """{ 19 | | "type": "json_schema", 20 | | "json_schema": { 21 | | "name": "testNumber", 22 | | "strict": true, 23 | | "schema": { 24 | | "type": "number" 25 | | } 26 | | } 27 | |}""".stripMargin 28 | 29 | val objectSchema: String = 30 | """{ 31 | | "type": "json_schema", 32 | | "json_schema": { 33 | | "name": "testObject", 34 | | "strict": true, 35 | | "schema": { 36 | | "additionalProperties": false, 37 | | "required": [ 38 | | "foo", 39 | | "bar" 40 | | ], 41 | | "type": "object", 42 | | "properties": { 43 | | "foo": { 44 | | "type": "string" 45 | | }, 46 | | "bar": { 47 | | "type": "number" 48 | | } 49 | | } 50 | | } 51 | | } 52 | |}""".stripMargin 53 | 54 | val arraySchema: String = 55 | """{ 56 | | "type": "json_schema", 57 | | "json_schema": { 58 | | "name": "testArray", 59 | | "strict": true, 60 | | "schema": { 61 | | "type": "array", 62 | | "items": { 63 | | "type": "string" 64 | | } 65 | | } 66 | | } 67 | |}""".stripMargin 68 | 69 | } 70 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/fixtures/ModelsGetResponse.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.fixtures 2 | 3 | object ModelsGetResponse { 4 | val responseJson: String = 5 | """{ 6 | | "object":"list", 7 | | "data":[ 8 | | { 9 | | "id":"babbage", 10 | | "object":"model", 11 | | "created":1649358449, 12 | | "owned_by":"openai", 13 | | "permission":[ 14 | | { 15 | | "id":"modelperm-49FUp5v084tBB49tC4z8LPH5", 16 | | "object":"model_permission", 17 | | "created":1669085501, 18 | | "allow_create_engine":false, 19 | | "allow_sampling":true, 20 | | "allow_logprobs":true, 21 | | "allow_search_indices":false, 22 | | "allow_view":true, 23 | | "allow_fine_tuning":false, 24 | | "organization":"*", 25 | | "group":null, 26 | | "is_blocking":false 27 | | } 28 | | ], 29 | | "root":"babbage", 30 | | "parent":null 31 | | }, 32 | | { 33 | | "id":"davinci", 34 | | "object":"model", 35 | | "created":1649359874, 36 | | "owned_by":"openai", 37 | | "permission":[ 38 | | { 39 | | "id":"modelperm-U6ZwlyAd0LyMk4rcMdz33Yc3", 40 | | "object":"model_permission", 41 | | "created":1669066355, 42 | | "allow_create_engine":false, 43 | | "allow_sampling":true, 44 | | "allow_logprobs":true, 45 | | "allow_search_indices":false, 46 | | "allow_view":true, 47 | | "allow_fine_tuning":false, 48 | | "organization":"*", 49 | | "group":null, 50 | | "is_blocking":false 51 | | } 52 | | ], 53 | | "root":"davinci", 54 | | "parent":null 55 | | } 56 | | ] 57 | |}""".stripMargin 58 | 59 | val singleModelResponse = """{ 60 | | "object":"list", 61 | | "data":[ 62 | | { 63 | | "id":"babbage", 64 | | "object":"model", 65 | | "created":1649358449, 66 | | "owned_by":"openai", 67 | | "permission":[ 68 | | { 69 | | "id":"modelperm-49FUp5v084tBB49tC4z8LPH5", 70 | | "object":"model_permission", 71 | | "created":1669085501, 72 | | "allow_create_engine":false, 73 | | "allow_sampling":true, 74 | | "allow_logprobs":true, 75 | | "allow_search_indices":false, 76 | | "allow_view":true, 77 | | "allow_fine_tuning":false, 78 | | "organization":"*", 79 | | "group":null, 80 | | "is_blocking":false 81 | | } 82 | | ], 83 | | "root":"babbage", 84 | | "parent":null 85 | | } 86 | | ] 87 | |}""".stripMargin 88 | } 89 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/fixtures/ModerationsFixture.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.fixtures 2 | 3 | object ModerationsFixture { 4 | 5 | val jsonCreateModerationResponse = """{ 6 | | "id": "modr-5MWoLO", 7 | | "model": "text-moderation-stable", 8 | | "results": [ 9 | | { 10 | | "categories": { 11 | | "hate": false, 12 | | "hate/threatening": true, 13 | | "self-harm": false, 14 | | "sexual": false, 15 | | "sexual/minors": false, 16 | | "violence": true, 17 | | "violence/graphic": false 18 | | }, 19 | | "category_scores": { 20 | | "hate": 0.22714105248451233, 21 | | "hate/threatening": 0.4132447838783264, 22 | | "self-harm": 0.005232391878962517, 23 | | "sexual": 0.01407341007143259, 24 | | "sexual/minors": 0.0038522258400917053, 25 | | "violence": 0.9223177433013916, 26 | | "violence/graphic": 0.036865197122097015 27 | | }, 28 | | "flagged": true 29 | | } 30 | | ] 31 | |} 32 | |""".stripMargin 33 | 34 | val jsonCreateModerationResponseUnknownModel = """{ 35 | | "id": "modr-5MWoL1", 36 | | "model": "text-moderation-007", 37 | | "results": [ 38 | | { 39 | | "categories": { 40 | | "hate": false, 41 | | "hate/threatening": true, 42 | | "self-harm": false, 43 | | "sexual": false, 44 | | "sexual/minors": false, 45 | | "violence": true, 46 | | "violence/graphic": false 47 | | }, 48 | | "category_scores": { 49 | | "hate": 0.22714105248451233, 50 | | "hate/threatening": 0.4132447838783264, 51 | | "self-harm": 0.005232391878962517, 52 | | "sexual": 0.01407341007143259, 53 | | "sexual/minors": 0.0038522258400917053, 54 | | "violence": 0.9223177433013916, 55 | | "violence/graphic": 0.036865197122097015 56 | | }, 57 | | "flagged": true 58 | | } 59 | | ] 60 | |} 61 | |""".stripMargin 62 | } 63 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/fixtures/ThreadsFixture.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.fixtures 2 | 3 | object ThreadsFixture { 4 | val jsonCreateEmptyThreadRequest: String = """{} 5 | |""".stripMargin 6 | 7 | val jsonCreateThreadWithMessagesRequestNoAttachments: String = """{ 8 | | "messages": [{ 9 | | "role": "user", 10 | | "content": "Hello, what is AI?" 11 | | }, { 12 | | "role": "user", 13 | | "content": "How does AI work? Explain it in simple terms." 14 | | }] 15 | | }""".stripMargin 16 | 17 | val jsonCreateThreadWithMessagesRequest: String = """{ 18 | | "messages": [{ 19 | | "role": "user", 20 | | "content": "Hello, what is AI?", 21 | | "attachments": [ 22 | | { 23 | | "file_id" : "file-abc123", 24 | | "tools": [ 25 | | { "type": "code_interpreter" }, 26 | | { "type": "file_search" } 27 | | ] 28 | | } 29 | | ] 30 | | }, { 31 | | "role": "user", 32 | | "content": "How does AI work? Explain it in simple terms." 33 | | }] 34 | | }""".stripMargin 35 | 36 | val jsonCreateThreadWithMessagesAndMetadataRequest: String = """{ 37 | | "messages": [{ 38 | | "role": "user", 39 | | "content": "Hello, what is AI?", 40 | | "attachments": [ 41 | | { 42 | | "file_id" : "file-abc456", 43 | | "tools": [ 44 | | { "type": "code_interpreter" } 45 | | ] 46 | | } 47 | | ] 48 | | }, { 49 | | "role": "user", 50 | | "content": "How does AI work? Explain it in simple terms." 51 | | }], 52 | | "metadata": { 53 | | "modified": "true", 54 | | "user": "abc123" 55 | | } 56 | |}""".stripMargin 57 | 58 | val jsonCreateEmptyThreadResponse: String = """{ 59 | | "id": "thread_abc123", 60 | | "object": "thread", 61 | | "created_at": 1699012949, 62 | | "metadata": {} 63 | |}""".stripMargin 64 | 65 | val jsonCreateThreadWithMessagesAndMetadataResponse: String = """{ 66 | | "id": "thread_abc123", 67 | | "object": "thread", 68 | | "created_at": 1699014083, 69 | | "metadata": { 70 | | "modified": "true", 71 | | "user": "abc123" 72 | | } 73 | |}""".stripMargin 74 | 75 | val jsonDeleteThreadResponse: String = """{ 76 | | "id": "thread_abc123", 77 | | "object": "thread.deleted", 78 | | "deleted": true 79 | |} 80 | |""".stripMargin 81 | 82 | } 83 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/fixtures/ToolFixture.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.fixtures 2 | 3 | object ToolFixture { 4 | 5 | val jsonToolCall: String = 6 | """{ 7 | | "description": "Books a flight for a passenger with full details", 8 | | "name": "book_flight", 9 | | "parameters": { 10 | | "additionalProperties": false, 11 | | "required": [ 12 | | "passenger", 13 | | "departureCity", 14 | | "destinationCity" 15 | | ], 16 | | "$schema": "http://json-schema.org/draft/2020-12/schema#", 17 | | "$defs": { 18 | | "Passenger": { 19 | | "additionalProperties": false, 20 | | "required": [ 21 | | "name", 22 | | "age" 23 | | ], 24 | | "title": "Passenger", 25 | | "type": "object", 26 | | "properties": { 27 | | "name": { 28 | | "type": "string" 29 | | }, 30 | | "age": { 31 | | "type": "integer", 32 | | "format": "int32" 33 | | } 34 | | } 35 | | } 36 | | }, 37 | | "title": "FlightDetails", 38 | | "type": "object", 39 | | "properties": { 40 | | "passenger": { 41 | | "$ref": "#/$defs/Passenger" 42 | | }, 43 | | "departureCity": { 44 | | "type": "string" 45 | | }, 46 | | "destinationCity": { 47 | | "type": "string" 48 | | } 49 | | } 50 | | } 51 | |}""".stripMargin 52 | 53 | val jsonToolCallStrictTrue: String = 54 | """{ 55 | | "description": "Return greeting", 56 | | "name": "greet", 57 | | "parameters": { 58 | | "type": "object" 59 | | }, 60 | | "strict": true 61 | |}""".stripMargin 62 | 63 | val jsonSchematizedToolCallStrictTrue: String = 64 | """{ 65 | | "description": "Books a flight for a passenger with full details", 66 | | "name": "book_flight", 67 | | "parameters": { 68 | | "additionalProperties": false, 69 | | "required": [ 70 | | "passenger", 71 | | "departureCity", 72 | | "destinationCity" 73 | | ], 74 | | "$schema": "http://json-schema.org/draft/2020-12/schema#", 75 | | "$defs": { 76 | | "Passenger": { 77 | | "additionalProperties": false, 78 | | "required": [ 79 | | "name", 80 | | "age" 81 | | ], 82 | | "title": "Passenger", 83 | | "type": "object", 84 | | "properties": { 85 | | "name": { 86 | | "type": "string" 87 | | }, 88 | | "age": { 89 | | "type": "integer", 90 | | "format": "int32" 91 | | } 92 | | } 93 | | } 94 | | }, 95 | | "title": "FlightDetails", 96 | | "type": "object", 97 | | "properties": { 98 | | "passenger": { 99 | | "$ref": "#/$defs/Passenger" 100 | | }, 101 | | "departureCity": { 102 | | "type": "string" 103 | | }, 104 | | "destinationCity": { 105 | | "type": "string" 106 | | } 107 | | } 108 | | }, 109 | | "strict": true 110 | |}""".stripMargin 111 | 112 | } 113 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/fixtures/UploadFixture.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.fixtures 2 | 3 | object UploadFixture { 4 | 5 | val jsonCreateUpload: String = 6 | """{ 7 | | "filename": "file-name", 8 | | "purpose": "file-purpose", 9 | | "bytes": 123, 10 | | "mime_type": "file/mime-type" 11 | |}""".stripMargin 12 | 13 | val jsonCompleteUpload: String = 14 | """{ 15 | | "part_ids": ["part_abc123", "part_def456"], 16 | | "md5": "md5-checksum" 17 | |}""".stripMargin 18 | 19 | val jsonUpdateResponse: String = 20 | """{ 21 | | "id": "upload_abc123", 22 | | "object": "upload", 23 | | "bytes": 1147483648, 24 | | "created_at": 1719184911, 25 | | "filename": "training_examples.jsonl", 26 | | "purpose": "fine-tune", 27 | | "status": "completed", 28 | | "expires_at": 1719127296, 29 | | "file": { 30 | | "id": "file-xyz321", 31 | | "object": "file", 32 | | "bytes": 1147483648, 33 | | "created_at": 1719186911, 34 | | "filename": "training_examples.jsonl", 35 | | "purpose": "fine-tune" 36 | | } 37 | |}""".stripMargin 38 | 39 | val jsonUploadPartResponse: String = 40 | """{ 41 | | "id": "part_def456", 42 | | "object": "upload.part", 43 | | "created_at": 1719186911, 44 | | "upload_id": "upload_abc123" 45 | |}""".stripMargin 46 | 47 | } 48 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/fixtures/VectorStoreFileFixture.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.fixtures 2 | 3 | object VectorStoreFileFixture { 4 | 5 | val jsonCreateRequest: String = 6 | """{ 7 | | "file_id": "file_1" 8 | |}""".stripMargin 9 | 10 | val jsonListRequest: String = 11 | """{ 12 | | "limit": 30, 13 | | "order": "asc", 14 | | "after": "111", 15 | | "before": "222", 16 | | "filter": "in_progress" 17 | |}""".stripMargin 18 | 19 | val jsonObject: String = 20 | """{ 21 | | "id": "vsf_1", 22 | | "object": "vector_store.file", 23 | | "usage_bytes": 123456, 24 | | "created_at": 1698107661, 25 | | "vector_store_id": "vs_1", 26 | | "status": "completed", 27 | | "last_error": null 28 | |}""".stripMargin 29 | 30 | val jsonObjectWithLastError: String = 31 | """{ 32 | | "id": "vsf_1", 33 | | "object": "vector_store.file", 34 | | "usage_bytes": 123456, 35 | | "created_at": 1698107661, 36 | | "vector_store_id": "vs_1", 37 | | "status": "completed", 38 | | "last_error": { 39 | | "code": "server_error", 40 | | "message": "Failed" 41 | | } 42 | |}""".stripMargin 43 | 44 | val jsonList: String = 45 | """{ 46 | | "object": "list", 47 | | "data": [ 48 | | { 49 | | "id": "vsf_1", 50 | | "object": "vector_store.file", 51 | | "usage_bytes" : 123456, 52 | | "status": "in_progress", 53 | | "created_at": 1698107661, 54 | | "vector_store_id": "vs_1" 55 | | }, 56 | | { 57 | | "id": "vsf_2", 58 | | "object": "vector_store.file", 59 | | "usage_bytes" : 1234567, 60 | | "status": "completed", 61 | | "created_at": 1698107661, 62 | | "vector_store_id": "vs_1", 63 | | "last_error": { 64 | | "code": "rate_limit_exceeded", 65 | | "message": "Failed2" 66 | | } 67 | | } 68 | | ], 69 | | "first_id": "vsf_1", 70 | | "last_id": "vsf_2", 71 | | "has_more": true 72 | |}""".stripMargin 73 | 74 | val jsonDelete: String = 75 | """{ 76 | | "id": "file_abc123", 77 | | "object": "vector_store.file.deleted", 78 | | "deleted": true 79 | |}""".stripMargin 80 | } 81 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/fixtures/VectorStoreFixture.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.fixtures 2 | 3 | object VectorStoreFixture { 4 | 5 | val jsonCreateRequest: String = 6 | """{ 7 | | "file_ids": ["file_1", "file_2"], 8 | | "name": "vs_1" 9 | |}""".stripMargin 10 | 11 | val jsonCreateWithExpiresRequest: String = 12 | """{ 13 | | "file_ids": ["file_1", "file_2"], 14 | | "name": "vs_1", 15 | | "expires_after": { 16 | | "anchor": "11111", 17 | | "days": 2 18 | | } 19 | |}""".stripMargin 20 | 21 | val jsonModify: String = 22 | """{ 23 | | "name": "vs_3", 24 | | "expires_after": { 25 | | "anchor": "2322", 26 | | "days": 5 27 | | } 28 | |}""".stripMargin 29 | 30 | val jsonObject: String = 31 | """{ 32 | | "id": "vs_1", 33 | | "object": "vector_store", 34 | | "created_at": 1698107661, 35 | | "usage_bytes": 123456, 36 | | "last_active_at": 1698107661, 37 | | "name": "test_vs", 38 | | "status": "in_progress", 39 | | "expires_at": 1698107651, 40 | | "file_counts": { 41 | | "in_progress": 0, 42 | | "completed": 1, 43 | | "cancelled": 2, 44 | | "failed": 1, 45 | | "total": 4 46 | | }, 47 | | "metadata": {}, 48 | | "last_used_at": 1698107681 49 | |}""".stripMargin 50 | 51 | val jsonList: String = 52 | """{ 53 | |"object": "list", 54 | | "data": [ 55 | | { 56 | | "id": "vs_abc123", 57 | | "object": "vector_store", 58 | | "created_at": 1699061776, 59 | | "name": "Support FAQ", 60 | | "usage_bytes": 139920, 61 | | "status": "completed", 62 | | "file_counts": { 63 | | "in_progress": 0, 64 | | "completed": 3, 65 | | "failed": 0, 66 | | "cancelled": 0, 67 | | "total": 3 68 | | } 69 | | }, 70 | | { 71 | | "id": "vs_abc456", 72 | | "object": "vector_store", 73 | | "created_at": 1699061776, 74 | | "name": "Support FAQ v2", 75 | | "usage_bytes": 139921, 76 | | "status": "in_progress", 77 | | "file_counts": { 78 | | "in_progress": 1, 79 | | "completed": 2, 80 | | "failed": 2, 81 | | "cancelled": 1, 82 | | "total": 6 83 | | } 84 | | } 85 | | ], 86 | | "first_id": "vs_abc123", 87 | | "last_id": "vs_abc456", 88 | | "has_more": false 89 | |}""".stripMargin 90 | 91 | val jsonDelete: String = 92 | """{ 93 | | "id": "vs_abc123", 94 | | "object": "vector_store.deleted", 95 | | "deleted": true 96 | |}""".stripMargin 97 | } 98 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/requests/BatchDataSpec.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests 2 | 3 | import org.scalatest.EitherValues 4 | import org.scalatest.flatspec.AnyFlatSpec 5 | import org.scalatest.matchers.should.Matchers 6 | import sttp.openai.fixtures.BatchFixture 7 | import sttp.openai.json.SnakePickle 8 | import sttp.openai.requests.batch.{BatchRequestBody, BatchResponse, ListBatchResponse} 9 | import sttp.openai.utils.JsonUtils 10 | 11 | class BatchDataSpec extends AnyFlatSpec with Matchers with EitherValues { 12 | 13 | "Given create batch request as case class" should "be properly serialized to Json" in { 14 | // given 15 | val givenRequest = BatchRequestBody( 16 | inputFileId = "file-id", 17 | endpoint = "/v1/chat/completions", 18 | completionWindow = "24h", 19 | metadata = Some(Map("key1" -> "value1", "key2" -> "value2")) 20 | ) 21 | val jsonRequest: ujson.Value = ujson.read(BatchFixture.jsonCreateBatchRequest) 22 | // when 23 | val serializedJson: ujson.Value = SnakePickle.writeJs(givenRequest) 24 | // then 25 | serializedJson shouldBe jsonRequest 26 | } 27 | 28 | "Given create batch response as Json" should "be properly deserialized to case class" in { 29 | // given 30 | val jsonResponse = BatchFixture.jsonCreateBatchResponse 31 | val expectedResponse: BatchResponse = BatchFixture.batchResponse 32 | // when 33 | val deserializedJsonResponse: Either[Exception, BatchResponse] = 34 | JsonUtils.deserializeJsonSnake[BatchResponse].apply(jsonResponse) 35 | // then 36 | deserializedJsonResponse.value shouldBe expectedResponse 37 | } 38 | 39 | "Given list batch response as Json" should "be properly deserialized to case class" in { 40 | // given 41 | val jsonResponse = BatchFixture.jsonListBatchResponse 42 | val expectedResponse: ListBatchResponse = ListBatchResponse( 43 | data = Seq(BatchFixture.batchResponse), 44 | hasMore = true, 45 | firstId = "ftckpt_zc4Q7MP6XxulcVzj4MZdwsAB", 46 | lastId = "ftckpt_enQCFmOTGj3syEpYVhBRLTSy" 47 | ) 48 | // when 49 | val deserializedJsonResponse: Either[Exception, ListBatchResponse] = 50 | JsonUtils.deserializeJsonSnake[ListBatchResponse].apply(jsonResponse) 51 | 52 | // then 53 | deserializedJsonResponse.value shouldBe expectedResponse 54 | } 55 | 56 | } 57 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/requests/admin/AdminApiKeyDataSpec.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.admin 2 | 3 | import org.scalatest.EitherValues 4 | import org.scalatest.flatspec.AnyFlatSpec 5 | import org.scalatest.matchers.should.Matchers 6 | import sttp.openai.fixtures.AdminFixture 7 | import sttp.openai.json.SnakePickle 8 | import sttp.openai.utils.JsonUtils 9 | 10 | class AdminApiKeyDataSpec extends AnyFlatSpec with Matchers with EitherValues { 11 | "Given create admin api key request as case class" should "be properly serialized to Json" in { 12 | // given 13 | val givenRequest = AdminApiKeyRequestBody( 14 | name = "api_key_name" 15 | ) 16 | val jsonRequest: ujson.Value = ujson.read(AdminFixture.jsonRequest) 17 | // when 18 | val serializedJson: ujson.Value = SnakePickle.writeJs(givenRequest) 19 | // then 20 | serializedJson shouldBe jsonRequest 21 | } 22 | 23 | "Given create admin api key response as Json" should "be properly deserialized to case class" in { 24 | // given 25 | val jsonResponse = AdminFixture.jsonResponse 26 | val expectedResponse: AdminApiKeyResponse = AdminFixture.adminApiKeyResponse 27 | // when 28 | val deserializedJsonResponse: Either[Exception, AdminApiKeyResponse] = 29 | JsonUtils.deserializeJsonSnake[AdminApiKeyResponse].apply(jsonResponse) 30 | // then 31 | deserializedJsonResponse.value shouldBe expectedResponse 32 | } 33 | 34 | "Given list admin api key response as Json" should "be properly deserialized to case class" in { 35 | // given 36 | val jsonResponse = AdminFixture.jsonListResponse 37 | val expectedResponse: ListAdminApiKeyResponse = ListAdminApiKeyResponse( 38 | data = Seq(AdminFixture.adminApiKeyResponse), 39 | hasMore = false, 40 | firstId = "key_abc", 41 | lastId = "key_abc" 42 | ) 43 | // when 44 | val deserializedJsonResponse: Either[Exception, ListAdminApiKeyResponse] = 45 | JsonUtils.deserializeJsonSnake[ListAdminApiKeyResponse].apply(jsonResponse) 46 | // then 47 | deserializedJsonResponse.value shouldBe expectedResponse 48 | } 49 | 50 | "Given delete admin api key response as Json" should "be properly deserialized to case class" in { 51 | // given 52 | val jsonResponse = AdminFixture.jsonDeleteResponse 53 | val expectedResponse: DeleteAdminApiKeyResponse = DeleteAdminApiKeyResponse( 54 | id = "key_abc", 55 | deleted = true 56 | ) 57 | // when 58 | val deserializedJsonResponse: Either[Exception, DeleteAdminApiKeyResponse] = 59 | JsonUtils.deserializeJsonSnake[DeleteAdminApiKeyResponse].apply(jsonResponse) 60 | // then 61 | deserializedJsonResponse.value shouldBe expectedResponse 62 | } 63 | 64 | } 65 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/requests/audio/AudioCreationDataSpec.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.audio 2 | 3 | import org.scalatest.EitherValues 4 | import org.scalatest.flatspec.AnyFlatSpec 5 | import org.scalatest.matchers.should.Matchers 6 | import sttp.openai.fixtures 7 | import sttp.openai.utils.JsonUtils 8 | 9 | class AudioCreationDataSpec extends AnyFlatSpec with Matchers with EitherValues { 10 | "Given audio generation response as Json" should "be properly deserialized to case class" in { 11 | import sttp.openai.requests.audio.AudioResponseData.AudioResponse 12 | import sttp.openai.requests.audio.AudioResponseData.AudioResponse._ 13 | 14 | // given 15 | val jsonResponse = fixtures.AudioFixture.jsonResponse 16 | 17 | val expectedResponse = AudioResponse( 18 | "Imagine the wildest idea that you've ever had, and you're curious about how it might scale to something that's a 100, a 1,000 times bigger. This is a place where you can get to do that." 19 | ) 20 | 21 | // when 22 | val deserializedJsonResponse = JsonUtils.deserializeJsonSnake.apply(jsonResponse) 23 | 24 | // then 25 | deserializedJsonResponse.value shouldBe expectedResponse 26 | } 27 | 28 | } 29 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/requests/audio/speech/SpeechDataSpec.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.audio.speech 2 | 3 | import org.scalatest.EitherValues 4 | import org.scalatest.flatspec.AnyFlatSpec 5 | import org.scalatest.matchers.should.Matchers 6 | import sttp.openai.fixtures.AudioFixture 7 | import sttp.openai.json.SnakePickle 8 | import sttp.openai.requests.audio.speech.SpeechModel.TTS1 9 | 10 | class SpeechDataSpec extends AnyFlatSpec with Matchers with EitherValues { 11 | 12 | "Given create fine tuning job request as case class" should "be properly serialized to Json" in { 13 | // given 14 | val givenRequest = SpeechRequestBody( 15 | model = TTS1, 16 | input = "Hello, my name is John.", 17 | voice = Voice.Alloy, 18 | responseFormat = Some(ResponseFormat.Mp3), 19 | speed = Some(1.0f) 20 | ) 21 | val jsonRequest: ujson.Value = ujson.read(AudioFixture.jsonCreateSpeechRequest) 22 | // when 23 | val serializedJson: ujson.Value = SnakePickle.writeJs(givenRequest) 24 | // then 25 | serializedJson shouldBe jsonRequest 26 | } 27 | 28 | } 29 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/requests/completions/chat/ChatChunkDataSpec.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions.chat 2 | 3 | import org.scalatest.EitherValues 4 | import org.scalatest.flatspec.AnyFlatSpec 5 | import org.scalatest.matchers.should.Matchers 6 | import sttp.openai.fixtures 7 | import sttp.openai.requests.completions.Stop.SingleStop 8 | import sttp.openai.utils.ChatCompletionFixtures._ 9 | import sttp.openai.utils.JsonUtils 10 | 11 | class ChatChunkDataSpec extends AnyFlatSpec with Matchers with EitherValues { 12 | 13 | "Given chat chunk completions response as Json" should "be properly deserialized to case class" in { 14 | import ChatChunkRequestResponseData.ChatChunkResponse._ 15 | import ChatChunkRequestResponseData._ 16 | 17 | // given 18 | val jsonResponse = fixtures.ChatChunkFixture.jsonResponse 19 | 20 | val choices = Seq( 21 | Choices( 22 | delta = Delta( 23 | content = Some("...") 24 | ), 25 | finishReason = None, 26 | index = 0 27 | ), 28 | Choices( 29 | delta = Delta( 30 | role = Some(Role.Assistant), 31 | content = Some(" Hi"), 32 | toolCalls = toolCalls 33 | ), 34 | finishReason = Some("stop"), 35 | index = 1 36 | ) 37 | ) 38 | 39 | val expectedResponse: ChatChunkResponse = ChatChunkResponse( 40 | id = "chatcmpl-76FxnKOjnPkDVYTAQ1wK8iUNFJPvR", 41 | `object` = "chat.completion", 42 | created = 1681725687, 43 | model = "gpt-3.5-turbo-0301", 44 | choices = choices, 45 | systemFingerprint = Some("systemFingerprint") 46 | ) 47 | 48 | // when 49 | val givenResponse: Either[Exception, ChatChunkResponse] = JsonUtils.deserializeJsonSnake.apply(jsonResponse) 50 | 51 | // then 52 | givenResponse.value shouldBe expectedResponse 53 | } 54 | 55 | "Given completions request with streaming enabled as case class" should "be properly serialized to Json" in { 56 | import ChatRequestBody._ 57 | import sttp.openai.requests.completions.chat.message._ 58 | 59 | // given 60 | val givenRequest = ChatRequestBody.ChatBody( 61 | messages = messages, 62 | model = ChatCompletionModel.GPT35Turbo, 63 | frequencyPenalty = Some(0), 64 | maxTokens = Some(7), 65 | n = Some(1), 66 | presencePenalty = Some(0), 67 | temperature = Some(1), 68 | topP = Some(1), 69 | tools = Some(tools), 70 | responseFormat = Some(ResponseFormat.Text), 71 | toolChoice = Some(ToolChoice.ToolAuto), 72 | stop = Some(SingleStop("\n")), 73 | user = Some("testUser") 74 | ) 75 | 76 | val jsonRequest = ujson.read(fixtures.ChatChunkFixture.jsonRequest) 77 | 78 | // when 79 | val serializedJson = ChatBody.withStreaming(givenRequest) 80 | 81 | // then 82 | serializedJson shouldBe jsonRequest 83 | } 84 | } 85 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/requests/completions/chat/JsonSchemaSpec.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions.chat 2 | 3 | import org.scalatest.EitherValues 4 | import org.scalatest.flatspec.AnyFlatSpec 5 | import org.scalatest.matchers.should.Matchers 6 | import scala.collection.immutable.ListMap 7 | import sttp.apispec.{Schema, SchemaType} 8 | import sttp.openai.fixtures 9 | import sttp.openai.json.SnakePickle 10 | import sttp.openai.requests.completions.chat.ChatRequestBody.ResponseFormat.JsonSchema 11 | 12 | class JsonSchemaSpec extends AnyFlatSpec with Matchers with EitherValues { 13 | "Given string JSON schema" should "be properly serialized to Json" in { 14 | val schema = Schema(SchemaType.String) 15 | 16 | val jsonStringSchema = ujson.read(fixtures.JsonSchemaFixture.stringSchema) 17 | 18 | val serializedSchema = SnakePickle.writeJs(JsonSchema("testString", true, schema)) 19 | 20 | serializedSchema shouldBe jsonStringSchema 21 | } 22 | 23 | "Given number JSON schema" should "be properly serialized to Json" in { 24 | val schema = Schema(SchemaType.Number) 25 | 26 | val jsonNumberSchema = ujson.read(fixtures.JsonSchemaFixture.numberSchema) 27 | 28 | val serializedSchema = SnakePickle.writeJs(JsonSchema("testNumber", true, schema)) 29 | 30 | serializedSchema shouldBe jsonNumberSchema 31 | } 32 | 33 | "Given object JSON schema" should "be properly serialized to Json" in { 34 | val schema = Schema(SchemaType.Object) 35 | .copy(properties = ListMap("foo" -> Schema(SchemaType.String), "bar" -> Schema(SchemaType.Number))) 36 | 37 | val jsonObjectSchema = ujson.read(fixtures.JsonSchemaFixture.objectSchema) 38 | 39 | val serializedSchema = SnakePickle.writeJs(JsonSchema("testObject", true, schema)) 40 | 41 | serializedSchema shouldBe jsonObjectSchema 42 | } 43 | 44 | "Given array JSON schema" should "be properly serialized to Json" in { 45 | val schema = Schema(SchemaType.Array).copy(items = Some(Schema(SchemaType.String))) 46 | 47 | val jsonArraySchema = ujson.read(fixtures.JsonSchemaFixture.arraySchema) 48 | 49 | val serializedSchema = SnakePickle.writeJs(JsonSchema("testArray", true, schema)) 50 | 51 | serializedSchema shouldBe jsonArraySchema 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/requests/completions/chat/ToolSpec.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.completions.chat 2 | 3 | import org.scalatest.EitherValues 4 | import org.scalatest.flatspec.AnyFlatSpec 5 | import org.scalatest.matchers.should.Matchers 6 | import sttp.openai.fixtures.ToolFixture 7 | import sttp.openai.json.SnakePickle 8 | import sttp.openai.requests.completions.chat.message.Tool.SchematizedFunctionTool 9 | 10 | class ToolSpec extends AnyFlatSpec with Matchers with EitherValues { 11 | case class Passenger(name: String, age: Int) 12 | 13 | case class FlightDetails(passenger: Passenger, departureCity: String, destinationCity: String) 14 | 15 | "Given SchematizedToolCall" should "be properly serialized to Json" in { 16 | import sttp.tapir.generic.auto._ 17 | // given 18 | val functionTool = 19 | SchematizedFunctionTool[FlightDetails](name = "book_flight", description = "Books a flight for a passenger with full details") 20 | val expectedJson = ujson.read(ToolFixture.jsonToolCall) 21 | // when 22 | val serializedToolCall = SnakePickle.writeJs(functionTool) 23 | // then 24 | serializedToolCall shouldBe expectedJson 25 | } 26 | 27 | "Given FunctionTool with strict flag" should "serialize and deserialize properly" in { 28 | import sttp.openai.requests.completions.chat.message.Tool.FunctionTool 29 | // given 30 | val funcTool = FunctionTool( 31 | description = "Return greeting", 32 | name = "greet", 33 | parameters = Map("type" -> ujson.Str("object")), 34 | strict = Some(true) 35 | ) 36 | 37 | val expectedJson = ujson.read(ToolFixture.jsonToolCallStrictTrue) 38 | 39 | // when 40 | val serialized = SnakePickle.writeJs(funcTool) 41 | serialized shouldBe expectedJson 42 | 43 | // and deserialization 44 | val deserialized = SnakePickle.read[FunctionTool](expectedJson) 45 | deserialized shouldBe funcTool 46 | } 47 | 48 | "Given SchematizedFunctionTool with strict flag" should "serialize and deserialize properly" in { 49 | import sttp.tapir.generic.auto._ 50 | val tool = SchematizedFunctionTool[FlightDetails]( 51 | name = "book_flight", 52 | description = "Books a flight for a passenger with full details", 53 | strict = Some(true) 54 | ) 55 | 56 | val expectedJson = ujson.read(ToolFixture.jsonSchematizedToolCallStrictTrue) 57 | 58 | val serialized = SnakePickle.writeJs(tool) 59 | serialized shouldBe expectedJson 60 | 61 | val deserialized = SnakePickle.read[sttp.openai.requests.completions.chat.message.Tool.SchematizedFunctionTool](expectedJson) 62 | // Ensure strict flag is preserved 63 | deserialized.strict should contain(true) 64 | // Ensure serialisation after deserialization matches expected JSON (round-trip) 65 | SnakePickle.writeJs(deserialized) shouldBe expectedJson 66 | } 67 | } 68 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/requests/embeddings/EmbeddingsDataSpec.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.embeddings 2 | 3 | import org.scalatest.EitherValues 4 | import org.scalatest.flatspec.AnyFlatSpec 5 | import org.scalatest.matchers.should.Matchers 6 | import sttp.openai.fixtures 7 | import sttp.openai.requests.embeddings.EmbeddingsRequestBody.EmbeddingsModel 8 | import sttp.openai.requests.embeddings.EmbeddingsResponseBody._ 9 | import sttp.openai.utils.JsonUtils 10 | 11 | class EmbeddingsDataSpec extends AnyFlatSpec with Matchers with EitherValues { 12 | "Given list files response as Json" should "be properly deserialized to case class" in { 13 | // given 14 | val listFilesResponse = fixtures.EmbeddingsFixture.jsonCreateEmbeddingsResponse 15 | val expectedResponse = EmbeddingResponse( 16 | `object` = "list", 17 | data = Seq( 18 | EmbeddingData( 19 | `object` = "embedding", 20 | index = 0, 21 | embedding = Seq( 22 | 0.0023064255, -0.009327292, 0.015797347, -0.0077780345, -0.0046922187 23 | ) 24 | ) 25 | ), 26 | model = EmbeddingsModel.TextEmbeddingAda002, 27 | usage = Usage( 28 | promptTokens = 8, 29 | totalTokens = 8 30 | ) 31 | ) 32 | // when 33 | val givenResponse: Either[Exception, EmbeddingResponse] = 34 | JsonUtils.deserializeJsonSnake[EmbeddingResponse].apply(listFilesResponse) 35 | 36 | // then 37 | givenResponse.value shouldBe expectedResponse 38 | } 39 | 40 | } 41 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/requests/files/FilesResponseDataSpec.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.files 2 | 3 | import org.scalatest.EitherValues 4 | import org.scalatest.flatspec.AnyFlatSpec 5 | import org.scalatest.matchers.should.Matchers 6 | import sttp.openai.fixtures 7 | import sttp.openai.requests.files.FilesResponseData.FilesResponse._ 8 | import sttp.openai.requests.files.FilesResponseData.{DeletedFileData, FileData, FilesResponse} 9 | import sttp.openai.utils.JsonUtils 10 | 11 | class FilesResponseDataSpec extends AnyFlatSpec with Matchers with EitherValues { 12 | "Given list files response as Json" should "be properly deserialized to case class" in { 13 | // given 14 | val listFilesResponse = fixtures.FilesResponse.listFilesJsonResponse 15 | val expectedResponse = FilesResponse( 16 | `object` = "list", 17 | Seq( 18 | FileData( 19 | `object` = "file", 20 | id = "file-tralala", 21 | purpose = "fine-tune", 22 | filename = "example.jsonl", 23 | bytes = 44, 24 | createdAt = 1681375533, 25 | status = "processed", 26 | statusDetails = None 27 | ) 28 | ) 29 | ) 30 | 31 | // when 32 | val givenResponse: Either[Exception, FilesResponse] = JsonUtils.deserializeJsonSnake.apply(listFilesResponse) 33 | 34 | // then 35 | givenResponse.value shouldBe expectedResponse 36 | } 37 | 38 | "Given upload file response as Json" should "be properly deserialized to case class" in { 39 | // given 40 | val singleFileJsonResponse = fixtures.FilesResponse.singleFileJsonResponse 41 | val expectedResponse = 42 | FileData( 43 | `object` = "file", 44 | id = "file-tralala", 45 | purpose = "fine-tune", 46 | filename = "example.jsonl", 47 | bytes = 44, 48 | createdAt = 1681375533, 49 | status = "uploaded", 50 | statusDetails = None 51 | ) 52 | 53 | // when 54 | val givenResponse: Either[Exception, FileData] = JsonUtils.deserializeJsonSnake[FileData].apply(singleFileJsonResponse) 55 | 56 | // then 57 | givenResponse.value shouldBe expectedResponse 58 | } 59 | 60 | "Given delete file response as Json" should "be properly deserialized to case class" in { 61 | // given 62 | val listFilesResponse = fixtures.FilesResponse.deleteFileJsonResponse 63 | val expectedResponse = DeletedFileData( 64 | `object` = "file", 65 | id = "file-tralala", 66 | deleted = true 67 | ) 68 | 69 | // when 70 | val givenResponse: Either[Exception, DeletedFileData] = 71 | JsonUtils.deserializeJsonSnake[DeletedFileData].apply(listFilesResponse) 72 | 73 | // then 74 | givenResponse.value shouldBe expectedResponse 75 | } 76 | 77 | "Given retrieve file response as Json" should "be properly deserialized to case class" in { 78 | // given 79 | val retrieveFileJsonResponse = fixtures.FilesResponse.retrieveFileJsonResponse 80 | val expectedResponse = FileData( 81 | `object` = "file", 82 | id = "file-tralala", 83 | purpose = "fine-tune", 84 | filename = "example.jsonl", 85 | bytes = 44, 86 | createdAt = 1681375533, 87 | status = "processed", 88 | statusDetails = None 89 | ) 90 | 91 | // when 92 | val givenResponse: Either[Exception, FileData] = JsonUtils.deserializeJsonSnake[FileData].apply(retrieveFileJsonResponse) 93 | 94 | // then 95 | givenResponse.value shouldBe expectedResponse 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/requests/images/creation/ImageCreationDataSpec.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.images.creation 2 | 3 | import org.scalatest.EitherValues 4 | import org.scalatest.flatspec.AnyFlatSpec 5 | import org.scalatest.matchers.should.Matchers 6 | import sttp.openai.fixtures 7 | import sttp.openai.json.SnakePickle 8 | import sttp.openai.requests.images.{ResponseFormat, Size} 9 | import sttp.openai.utils.JsonUtils 10 | class ImageCreationDataSpec extends AnyFlatSpec with Matchers with EitherValues { 11 | 12 | "Given image generation response as Json" should "be properly deserialized to case class" in { 13 | import sttp.openai.requests.images.ImageResponseData.ImageResponse._ 14 | import sttp.openai.requests.images.ImageResponseData._ 15 | 16 | // given 17 | val jsonResponse = fixtures.ImageCreationFixture.jsonResponse 18 | 19 | val generatedImageData = Seq( 20 | GeneratedImageData("https://generated.image.url") 21 | ) 22 | 23 | val expectedResponse: ImageResponse = ImageResponse( 24 | created = 1681893694, 25 | data = generatedImageData 26 | ) 27 | // when 28 | val givenResponse = JsonUtils.deserializeJsonSnake.apply(jsonResponse) 29 | 30 | // then 31 | givenResponse.value shouldBe expectedResponse 32 | } 33 | 34 | "Given create image request as case class" should "be properly serialized to Json" in { 35 | import sttp.openai.requests.images.creation.ImageCreationRequestBody.ImageCreationBody._ 36 | import sttp.openai.requests.images.creation.ImageCreationRequestBody._ 37 | 38 | // given 39 | val givenRequest: ImageCreationBody = ImageCreationBody( 40 | prompt = "cute fish", 41 | "dall-e-3", 42 | Some(1), 43 | size = Some(Size.Custom("1024x1024")), 44 | Some(ResponseFormat.Custom("url")), 45 | Some("user1") 46 | ) 47 | 48 | val jsonRequest = ujson.read(fixtures.ImageCreationFixture.jsonRequest) 49 | 50 | // when 51 | val serializedJson: ujson.Value = SnakePickle.writeJs(givenRequest) 52 | 53 | // then 54 | serializedJson shouldBe jsonRequest 55 | } 56 | 57 | "Given create image request as case class created with enum values" should "be properly serialized to Json" in { 58 | import sttp.openai.requests.images.creation.ImageCreationRequestBody.ImageCreationBody._ 59 | import sttp.openai.requests.images.creation.ImageCreationRequestBody._ 60 | 61 | // given 62 | val givenRequest: ImageCreationBody = ImageCreationBody( 63 | "cute fish", 64 | "dall-e-3", 65 | Some(1), 66 | Some(Size.Large), 67 | Some(ResponseFormat.URL), 68 | Some("user1") 69 | ) 70 | 71 | val jsonRequest = ujson.read(fixtures.ImageCreationFixture.jsonRequest) 72 | 73 | // when 74 | val serializedJson: ujson.Value = SnakePickle.writeJs(givenRequest) 75 | 76 | // then 77 | serializedJson shouldBe jsonRequest 78 | } 79 | 80 | "Given create image request as case class with different model" should "be properly serialized to Json" in { 81 | import sttp.openai.requests.images.creation.ImageCreationRequestBody.ImageCreationBody._ 82 | import sttp.openai.requests.images.creation.ImageCreationRequestBody._ 83 | 84 | // given 85 | val givenRequest: ImageCreationBody = ImageCreationBody( 86 | "cute fish", 87 | "dall-e-2", 88 | Some(1), 89 | Some(Size.Large), 90 | Some(ResponseFormat.URL), 91 | Some("user1") 92 | ) 93 | 94 | val jsonRequest = ujson.read(fixtures.ImageCreationFixture.jsonRequestDalle2) 95 | 96 | // when 97 | val serializedJson: ujson.Value = SnakePickle.writeJs(givenRequest) 98 | 99 | // then 100 | serializedJson shouldBe jsonRequest 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/requests/models/ModelsGetResponseDataSpec.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.models 2 | 3 | import org.scalatest.EitherValues 4 | import org.scalatest.flatspec.AnyFlatSpec 5 | import org.scalatest.matchers.should.Matchers 6 | import sttp.openai.fixtures 7 | import sttp.openai.requests.models.ModelsResponseData.ModelsResponse._ 8 | import sttp.openai.requests.models.ModelsResponseData.{DeletedModelData, ModelData, ModelPermission, ModelsResponse} 9 | import sttp.openai.utils.JsonUtils 10 | 11 | class ModelsGetResponseDataSpec extends AnyFlatSpec with Matchers with EitherValues { 12 | 13 | "Given deleted model response as Json" should "be properly deserialized to case class" in { 14 | import ModelsResponseData.DeletedModelData._ 15 | // given 16 | val response: String = """{ 17 | | "id": "ft:gpt-4o-mini:acemeco:suffix:abc123", 18 | | "object": "model", 19 | | "deleted": true 20 | |}""".stripMargin 21 | val expectedResponse: DeletedModelData = DeletedModelData( 22 | id = "ft:gpt-4o-mini:acemeco:suffix:abc123", 23 | `object` = "model", 24 | deleted = true 25 | ) 26 | // when 27 | val givenResponse: Either[Exception, DeletedModelData] = JsonUtils.deserializeJsonSnake[DeletedModelData].apply(response) 28 | // then 29 | givenResponse.value shouldBe expectedResponse 30 | } 31 | 32 | "Given models response as Json" should "be properly deserialized to case class" in { 33 | 34 | // given 35 | val response: String = fixtures.ModelsGetResponse.responseJson 36 | 37 | val babbagePermission: Seq[ModelPermission] = Seq( 38 | ModelPermission( 39 | id = "modelperm-49FUp5v084tBB49tC4z8LPH5", 40 | `object` = "model_permission", 41 | created = 1669085501, 42 | allowCreateEngine = false, 43 | allowSampling = true, 44 | allowLogprobs = true, 45 | allowSearchIndices = false, 46 | allowView = true, 47 | allowFineTuning = false, 48 | organization = "*", 49 | group = None, 50 | isBlocking = false 51 | ) 52 | ) 53 | 54 | val davinciPermission: Seq[ModelPermission] = 55 | Seq( 56 | ModelPermission( 57 | id = "modelperm-U6ZwlyAd0LyMk4rcMdz33Yc3", 58 | `object` = "model_permission", 59 | created = 1669066355, 60 | allowCreateEngine = false, 61 | allowSampling = true, 62 | allowLogprobs = true, 63 | allowSearchIndices = false, 64 | allowView = true, 65 | allowFineTuning = false, 66 | organization = "*", 67 | group = None, 68 | isBlocking = false 69 | ) 70 | ) 71 | 72 | val serializedData: Seq[ModelData] = Seq( 73 | ModelData( 74 | id = "babbage", 75 | `object` = "model", 76 | created = 1649358449, 77 | ownedBy = "openai", 78 | permission = babbagePermission, 79 | root = "babbage", 80 | parent = None 81 | ), 82 | ModelData( 83 | id = "davinci", 84 | `object` = "model", 85 | created = 1649359874, 86 | ownedBy = "openai", 87 | permission = davinciPermission, 88 | root = "davinci", 89 | parent = None 90 | ) 91 | ) 92 | 93 | val expectedResponse: ModelsResponse = ModelsResponse(`object` = "list", data = serializedData) 94 | // when 95 | 96 | val givenResponse: Either[Exception, ModelsResponse] = JsonUtils.deserializeJsonSnake.apply(response) 97 | 98 | // then 99 | givenResponse.value shouldBe expectedResponse 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/requests/moderations/ModerationsDataSpec.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.moderations 2 | 3 | import org.scalatest.EitherValues 4 | import org.scalatest.flatspec.AnyFlatSpec 5 | import org.scalatest.matchers.should.Matchers 6 | import sttp.openai.fixtures 7 | import sttp.openai.requests.moderations.ModerationsRequestBody.ModerationModel 8 | import sttp.openai.requests.moderations.ModerationsResponseData._ 9 | import sttp.openai.utils.JsonUtils 10 | 11 | class ModerationsDataSpec extends AnyFlatSpec with Matchers with EitherValues { 12 | "Given create moderation response as Json" should "be properly deserialized to case class" in { 13 | // given 14 | val createModerationResponse = fixtures.ModerationsFixture.jsonCreateModerationResponse 15 | val expectedResponse = ModerationData( 16 | id = "modr-5MWoLO", 17 | model = ModerationModel.TextModerationStable, 18 | results = Seq( 19 | Result( 20 | flagged = true, 21 | categories = Categories( 22 | sexual = false, 23 | hate = false, 24 | violence = true, 25 | `self-harm` = false, 26 | `sexual/minors` = false, 27 | `hate/threatening` = true, 28 | `violence/graphic` = false 29 | ), 30 | categoryScores = CategoryScores( 31 | sexual = 0.01407341007143259, 32 | hate = 0.22714105248451233, 33 | violence = 0.9223177433013916, 34 | `self-harm` = 0.005232391878962517, 35 | `sexual/minors` = 0.0038522258400917053, 36 | `hate/threatening` = 0.4132447838783264, 37 | `violence/graphic` = 0.036865197122097015 38 | ) 39 | ) 40 | ) 41 | ) 42 | // when 43 | val givenResponse: Either[Exception, ModerationData] = 44 | JsonUtils.deserializeJsonSnake[ModerationData].apply(createModerationResponse) 45 | 46 | // then 47 | givenResponse.value shouldBe expectedResponse 48 | } 49 | "Unknown moderation model in create response" should "be deserialized to case class" in { 50 | // given 51 | val createModerationResponse = fixtures.ModerationsFixture.jsonCreateModerationResponseUnknownModel 52 | // when 53 | val givenResponse: Either[Exception, ModerationData] = 54 | JsonUtils.deserializeJsonSnake[ModerationData].apply(createModerationResponse) 55 | 56 | // then 57 | givenResponse.value.model shouldBe ModerationModel.CustomModerationModel("text-moderation-007") 58 | } 59 | 60 | } 61 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/requests/upload/UploadDataSpec.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.requests.upload 2 | 3 | import org.scalatest.EitherValues 4 | import org.scalatest.flatspec.AnyFlatSpec 5 | import org.scalatest.matchers.should.Matchers 6 | import sttp.openai.fixtures.UploadFixture 7 | import sttp.openai.json.SnakePickle 8 | import sttp.openai.utils.JsonUtils 9 | 10 | class UploadDataSpec extends AnyFlatSpec with Matchers with EitherValues { 11 | 12 | "Given upload request body as case class" should "be properly serialized to Json" in { 13 | // given 14 | val givenRequest = UploadRequestBody( 15 | filename = "file-name", 16 | purpose = "file-purpose", 17 | bytes = 123, 18 | mimeType = "file/mime-type" 19 | ) 20 | val jsonRequest: ujson.Value = ujson.read(UploadFixture.jsonCreateUpload) 21 | // when 22 | val serializedJson: ujson.Value = SnakePickle.writeJs(givenRequest) 23 | // then 24 | serializedJson shouldBe jsonRequest 25 | } 26 | 27 | "Given complete upload request body as case class" should "be properly serialized to Json" in { 28 | // given 29 | val givenRequest = CompleteUploadRequestBody( 30 | partIds = Seq("part_abc123", "part_def456"), 31 | md5 = Some("md5-checksum") 32 | ) 33 | val jsonRequest: ujson.Value = ujson.read(UploadFixture.jsonCompleteUpload) 34 | // when 35 | val serializedJson: ujson.Value = SnakePickle.writeJs(givenRequest) 36 | // then 37 | serializedJson shouldBe jsonRequest 38 | } 39 | 40 | "Given upload response as Json" should "be properly deserialized to case class" in { 41 | // given 42 | val jsonResponse = UploadFixture.jsonUpdateResponse 43 | val expectedResponse: UploadResponse = UploadResponse( 44 | id = "upload_abc123", 45 | bytes = 1147483648, 46 | createdAt = 1719184911, 47 | filename = "training_examples.jsonl", 48 | purpose = "fine-tune", 49 | status = "completed", 50 | expiresAt = 1719127296, 51 | file = Some( 52 | FileMetadata( 53 | id = "file-xyz321", 54 | bytes = 1147483648, 55 | createdAt = 1719186911, 56 | filename = "training_examples.jsonl", 57 | purpose = "fine-tune", 58 | `object` = "file" 59 | ) 60 | ) 61 | ) 62 | // when 63 | val deserializedJsonResponse: Either[Exception, UploadResponse] = 64 | JsonUtils.deserializeJsonSnake[UploadResponse].apply(jsonResponse) 65 | // then 66 | deserializedJsonResponse.value shouldBe expectedResponse 67 | } 68 | 69 | "Given upload part response as Json" should "be properly deserialized to case class" in { 70 | // given 71 | val jsonResponse = UploadFixture.jsonUploadPartResponse 72 | val expectedResponse: UploadPartResponse = UploadPartResponse( 73 | id = "part_def456", 74 | createdAt = 1719186911, 75 | uploadId = "upload_abc123" 76 | ) 77 | // when 78 | val deserializedJsonResponse: Either[Exception, UploadPartResponse] = 79 | JsonUtils.deserializeJsonSnake[UploadPartResponse].apply(jsonResponse) 80 | // then 81 | deserializedJsonResponse.value shouldBe expectedResponse 82 | } 83 | 84 | } 85 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/utils/ChatCompletionFixtures.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.utils 2 | 3 | import sttp.openai.requests.completions.chat.ToolCall.FunctionToolCall 4 | import sttp.openai.requests.completions.chat.message.Tool.FunctionTool 5 | import sttp.openai.requests.completions.chat.{FunctionCall, ToolCall} 6 | import sttp.openai.requests.completions.chat.message._ 7 | import ujson._ 8 | 9 | object ChatCompletionFixtures { 10 | def messages: Seq[Message] = systemMessages ++ userMessages ++ assistantMessages ++ toolMessages 11 | 12 | def systemMessages: Seq[Message.SystemMessage] = 13 | Seq(Message.SystemMessage("Hello!"), Message.SystemMessage("Hello!", Some("User"))) 14 | 15 | def userMessages: Seq[Message.UserMessage] = { 16 | val parts = Seq( 17 | Content.TextContentPart("Hello!"), 18 | Content.ImageContentPart(Content.ImageUrl("https://i.imgur.com/2tj5rQE.jpg")) 19 | ) 20 | val arrayMessage = Message.UserMessage(Content.ArrayContent(parts)) 21 | val stringMessage = Message.UserMessage(Content.TextContent("Hello!"), Some("User")) 22 | 23 | Seq(stringMessage, arrayMessage) 24 | } 25 | 26 | def assistantMessages: Seq[Message.AssistantMessage] = 27 | Seq( 28 | Message.AssistantMessage("Hello!", Some("User"), toolCalls), 29 | Message.AssistantMessage("Hello!", Some("User")), 30 | Message.AssistantMessage("Hello!") 31 | ) 32 | 33 | def toolMessages: Seq[Message.ToolMessage] = 34 | Seq( 35 | Message.ToolMessage("Hello!", "tool_call_id_1"), 36 | Message.ToolMessage("Hello!", "tool_call_id_2") 37 | ) 38 | 39 | def tools: Seq[Tool] = { 40 | val function = FunctionTool( 41 | description = "Random description", 42 | name = "Random name", 43 | parameters = Map( 44 | "type" -> Str("function"), 45 | "properties" -> Obj( 46 | "location" -> Obj( 47 | "type" -> "string", 48 | "description" -> "The city and state e.g. San Francisco, CA" 49 | ) 50 | ), 51 | "required" -> Arr("location") 52 | ) 53 | ) 54 | 55 | Seq(function) 56 | } 57 | 58 | def toolCalls: Seq[ToolCall] = 59 | Seq( 60 | FunctionToolCall( 61 | None, 62 | FunctionCall( 63 | arguments = "args" 64 | ) 65 | ), 66 | FunctionToolCall( 67 | Some("tool_id_2"), 68 | FunctionCall( 69 | arguments = "args", 70 | name = Some("Fish") 71 | ) 72 | ) 73 | ) 74 | } 75 | -------------------------------------------------------------------------------- /core/src/test/scala/sttp/openai/utils/JsonUtils.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.utils 2 | 3 | import sttp.client4.{IsOption, JsonInput} 4 | import sttp.openai.json.SnakePickle.{read, write} 5 | import sttp.openai.json.SttpUpickleApiExtension.upickleApi 6 | 7 | object JsonUtils { 8 | def compactJson(json: String): String = write(read[ujson.Value](json)) 9 | 10 | def deserializeJsonSnake[B: upickleApi.Reader: IsOption]: String => Either[Exception, B] = { (s: String) => 11 | try 12 | Right(upickleApi.read[B](JsonInput.sanitize[B].apply(s))) 13 | catch { 14 | case e: Exception => Left(e) 15 | case t: Throwable => 16 | // in ScalaJS, ArrayIndexOutOfBoundsException exceptions are wrapped in org.scalajs.linker.runtime.UndefinedBehaviorError 17 | t.getCause match { 18 | case e: ArrayIndexOutOfBoundsException => Left(e) 19 | case _ => throw t 20 | } 21 | } 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /examples/src/main/resources/logback.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | %date [%thread] %-5level %logger{36} - %msg%n 6 | 7 | 8 | 9 | 10 | 11 | 12 | -------------------------------------------------------------------------------- /examples/src/main/scala/examples/ChatProxy.scala: -------------------------------------------------------------------------------- 1 | //> using dep com.softwaremill.sttp.openai::ox:0.3.5 2 | //> using dep com.softwaremill.sttp.tapir::tapir-netty-server-sync:1.11.33 3 | //> using dep com.softwaremill.sttp.client4::ox:4.0.8 4 | //> using dep ch.qos.logback:logback-classic:1.5.18 5 | 6 | // remember to set the OPENAI_KEY env variable! 7 | // run with: OPENAI_KEY=... scala-cli run ChatProxy.scala 8 | 9 | // test by connecting to ws://localhost:8080/chat using a WebSocket client 10 | 11 | package examples 12 | 13 | import org.slf4j.{Logger, LoggerFactory} 14 | import ox.* 15 | import ox.either.orThrow 16 | import sttp.client4.{DefaultSyncBackend, SyncBackend} 17 | import sttp.openai.OpenAI 18 | import sttp.openai.requests.completions.chat.ChatRequestBody.{ChatBody, ChatCompletionModel} 19 | import sttp.openai.requests.completions.chat.message.{Content, Message} 20 | import sttp.openai.streaming.ox.* 21 | import sttp.tapir.* 22 | import sttp.tapir.CodecFormat.* 23 | import sttp.tapir.server.netty.sync.{NettySyncServer, OxStreams} 24 | import ox.flow.Flow 25 | 26 | // 27 | 28 | val logger: Logger = LoggerFactory.getLogger("ChatProxy") 29 | 30 | // model for sending & receiving chat messages to/from the end-user 31 | case class ChatMessage(message: String) 32 | given Codec[String, ChatMessage, TextPlain] = Codec.string.map(ChatMessage(_))(_.message) 33 | 34 | // the description of the endpoint, that will be exposed: GET /chat -> WS(consuming & producing ChatMessage-s) 35 | val chatProxyEndpoint = infallibleEndpoint.get 36 | .in("chat") 37 | .out(webSocketBody[ChatMessage, TextPlain, ChatMessage, TextPlain](OxStreams)) 38 | 39 | def chat(sttpBackend: SyncBackend, openAI: OpenAI): OxStreams.Pipe[ChatMessage, ChatMessage] = 40 | // the OxStreams.Pipe converts a flow of *incoming* messages (sent by the end-user over the web socket), to a flow 41 | // of *outgoing* messages (sent to the end-user over the web socket)t 42 | incoming => 43 | // we're returning an *outgoing* flow where we can freely emit elements (in our case - incremental chat responses) 44 | Flow.usingEmit { emit => 45 | incoming 46 | // main processing loop: receives messages from the WS and queries OpenAI with the chat's history 47 | .mapStateful(() => Vector.empty[Message]) { (history, nextMessage) => 48 | val nextHistory = history.apply() :+ Message.UserMessage(content = Content.TextContent(nextMessage.message)) 49 | 50 | // querying OpenAI with the entire chat history, as each request is stateless 51 | val chatRequestBody: ChatBody = ChatBody( 52 | model = ChatCompletionModel.GPT4oMini, 53 | messages = nextHistory 54 | ) 55 | 56 | // requesting a streaming completion, so that we can get back to the user as the answer is being generated 57 | val chatCompletionFlow = openAI 58 | .createStreamedChatCompletion(chatRequestBody) 59 | .send(sttpBackend) 60 | .body 61 | .orThrow // there might be an OpenAI HTTP-error 62 | 63 | // extracting the response increments 64 | val responseList = chatCompletionFlow 65 | .map(_.orThrow.choices.head.delta.content) 66 | .collect { case Some(msg) => ChatMessage(msg) } 67 | .tap(emit.apply) // emitting each to the *outgoing* flow 68 | .runToList() // accumulating all repsonses so they become part of the history for the next request 69 | 70 | val entireResponse = responseList.map(_.message).mkString 71 | val nextNextHistory = nextHistory :+ Message.AssistantMessage(content = entireResponse) 72 | 73 | (() => nextNextHistory, ()) 74 | } 75 | // when the outer flow is run, running the incoming flow as well; it doesn't produce any meaningful results 76 | // (apart from emitting responses to the outer flow), so discarding its result 77 | .runDrain() 78 | } 79 | 80 | object ChatProxy extends OxApp: 81 | override def run(args: Vector[String])(using Ox): ExitCode = 82 | val openAI = new OpenAI(System.getenv("OPENAI_KEY")) 83 | val sttpBackend = useCloseableInScope(DefaultSyncBackend()) 84 | val chatProxyServerEndpoint = chatProxyEndpoint.handleSuccess(_ => chat(sttpBackend, openAI)) 85 | val binding = NettySyncServer().addEndpoint(chatProxyServerEndpoint).start() 86 | logger.info(s"Server started at ${binding.hostName}:${binding.port}") 87 | never 88 | -------------------------------------------------------------------------------- /examples/src/main/scala/examples/StrictStructuredFunctionCallingExample.scala: -------------------------------------------------------------------------------- 1 | package examples 2 | 3 | /** Manual playground for the strict structured outputs for function calling feature (`strict = true`). 4 | * 5 | * It sends the instruction "Say hello" but constrains the model with a JSON-Schema that requires the reply to be a number. When 6 | * `strict = true` is set, the schema wins and the model must produce something that validates – hence we expect a number, not the word 7 | * "hello". 8 | * 9 | * Run from the project root folder with: OPENAI_API_KEY=… sbt "examples3/runMain examples.StrictStructuredFunctionCallingExample" 10 | */ 11 | object StrictStructuredFunctionCallingExample extends App { 12 | import ujson.{Arr, Bool, Obj, Str} 13 | import sttp.client4.{DefaultSyncBackend, SyncBackend} 14 | import sttp.openai.OpenAI 15 | import sttp.openai.requests.completions.chat.ChatRequestBody.{ChatBody, ChatCompletionModel, ResponseFormat} 16 | import sttp.openai.requests.completions.chat.message.{Content, Message, ToolChoice} 17 | import sttp.openai.requests.completions.chat.message.Tool.FunctionTool 18 | 19 | val apiKey = sys.env.getOrElse("OPENAI_API_KEY", sys.error("OPENAI_API_KEY env variable not set")) 20 | 21 | val getNumberTool = FunctionTool( 22 | description = "Convert given text to upper-case", 23 | name = "uppercase_text", 24 | parameters = Map( 25 | "type" -> Str("object"), 26 | "properties" -> Obj( 27 | "text" -> Obj("type" -> Str("number")) 28 | ), 29 | "required" -> Arr(Str("text")), 30 | "additionalProperties" -> Bool(false) 31 | ), 32 | strict = Some(true) 33 | ) 34 | 35 | val chatBody = ChatBody( 36 | model = ChatCompletionModel.GPT4oMini, 37 | messages = Seq(Message.UserMessage(Content.TextContent("Please uppercase the word 'hello'"))), 38 | tools = Some(Seq(getNumberTool)), 39 | toolChoice = Some(ToolChoice.ToolFunction("uppercase_text")) 40 | ) 41 | 42 | val backend: SyncBackend = DefaultSyncBackend() 43 | val openAI = new OpenAI(apiKey) 44 | 45 | println("Sending request …") 46 | val responseEither = openAI.createChatCompletion(chatBody).send(backend).body 47 | 48 | responseEither match { 49 | case Left(err) => 50 | System.err.println(s"OpenAI returned an error: $err") 51 | case Right(resp) => 52 | val choice = resp.choices.head 53 | 54 | val maybeArgsRaw: Option[String] = choice.message.toolCalls.collectFirst { 55 | case sttp.openai.requests.completions.chat.ToolCall.FunctionToolCall(_, fn) => fn.arguments 56 | } 57 | 58 | maybeArgsRaw match { 59 | case Some(jsonStr) => 60 | println(s"Function call arguments: $jsonStr") 61 | val parsed = ujson.read(jsonStr) 62 | val maybeNum = parsed.obj.get("text").flatMap(_.numOpt) 63 | 64 | maybeNum match { 65 | case Some(n) => println(s"Success, numeric value provided: $n") 66 | case None => println("Failure: arguments didn't contain numeric 'text' field.") 67 | } 68 | 69 | case None => 70 | println("Model did not return a function call. Full message: ") 71 | println(choice.message) 72 | } 73 | } 74 | 75 | backend.close() 76 | } 77 | -------------------------------------------------------------------------------- /project/Dependencies.scala: -------------------------------------------------------------------------------- 1 | import sbt.* 2 | 3 | object Dependencies { 4 | 5 | object V { 6 | val scalaTest = "3.2.19" 7 | val scalaTestCats = "1.6.0" 8 | 9 | val sttpApispec = "0.11.9" 10 | val sttpClient = "4.0.8" 11 | val pekkoStreams = "1.1.3" 12 | val akkaStreams = "2.6.20" 13 | val tapir = "1.11.33" 14 | val uPickle = "4.2.1" 15 | } 16 | 17 | object Libraries { 18 | 19 | val scalaTest = "org.scalatest" %% "scalatest" % V.scalaTest % Test 20 | 21 | val sttpApispec = Seq( 22 | "com.softwaremill.sttp.apispec" %% "apispec-model" % V.sttpApispec, 23 | "com.softwaremill.sttp.apispec" %% "jsonschema-circe" % V.sttpApispec 24 | ) 25 | 26 | val sttpClient = Seq( 27 | "com.softwaremill.sttp.client4" %% "core" % V.sttpClient, 28 | "com.softwaremill.sttp.client4" %% "upickle" % V.sttpClient 29 | ) 30 | 31 | val sttpClientFs2 = Seq( 32 | "com.softwaremill.sttp.client4" %% "fs2" % V.sttpClient, 33 | "org.typelevel" %% "cats-effect-testing-scalatest" % V.scalaTestCats % Test 34 | ) 35 | 36 | val sttpClientZio = "com.softwaremill.sttp.client4" %% "zio" % V.sttpClient 37 | 38 | val sttpClientPekko = Seq( 39 | "com.softwaremill.sttp.client4" %% "pekko-http-backend" % V.sttpClient, 40 | "org.apache.pekko" %% "pekko-stream" % V.pekkoStreams 41 | ) 42 | 43 | val sttpClientAkka = Seq( 44 | "com.softwaremill.sttp.client4" %% "akka-http-backend" % V.sttpClient, 45 | "com.typesafe.akka" %% "akka-stream" % V.akkaStreams 46 | ) 47 | 48 | val sttpClientOx = Seq( 49 | "com.softwaremill.sttp.client4" %% "ox" % V.sttpClient 50 | ) 51 | 52 | val tapirApispecDocs = "com.softwaremill.sttp.tapir" %% "tapir-apispec-docs" % V.tapir 53 | 54 | val uJsonCirce = "com.lihaoyi" %% "ujson-circe" % V.uPickle 55 | 56 | val uPickle = "com.lihaoyi" %% "upickle" % V.uPickle 57 | 58 | } 59 | 60 | } 61 | -------------------------------------------------------------------------------- /project/build.properties: -------------------------------------------------------------------------------- 1 | sbt.version=1.11.1 2 | -------------------------------------------------------------------------------- /project/plugins.sbt: -------------------------------------------------------------------------------- 1 | val sbtSoftwareMillVersion = "2.0.26" 2 | addSbtPlugin("com.softwaremill.sbt-softwaremill" % "sbt-softwaremill-common" % sbtSoftwareMillVersion) 3 | addSbtPlugin("com.softwaremill.sbt-softwaremill" % "sbt-softwaremill-publish" % sbtSoftwareMillVersion) 4 | addSbtPlugin("com.eed3si9n" % "sbt-projectmatrix" % "0.11.0") 5 | addSbtPlugin("org.scalameta" % "sbt-mdoc" % "2.7.1") 6 | -------------------------------------------------------------------------------- /streaming/akka/src/main/scala/sttp/openai/streaming/akka/package.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.streaming 2 | 3 | import _root_.akka.stream.scaladsl.{Flow, Source} 4 | import _root_.akka.util.ByteString 5 | import sttp.capabilities.akka.AkkaStreams 6 | import sttp.client4.StreamRequest 7 | import sttp.client4.akkahttp.AkkaHttpServerSentEvents 8 | import sttp.model.ResponseMetadata 9 | import sttp.model.sse.ServerSentEvent 10 | import sttp.openai.OpenAI 11 | import sttp.openai.OpenAIExceptions.OpenAIException 12 | import sttp.openai.json.SttpUpickleApiExtension.deserializeJsonSnake 13 | import sttp.openai.requests.audio.speech.SpeechRequestBody 14 | import sttp.openai.requests.completions.chat.ChatChunkRequestResponseData.ChatChunkResponse 15 | import sttp.openai.requests.completions.chat.ChatRequestBody.ChatBody 16 | 17 | package object akka { 18 | import ChatChunkResponse.DoneEvent 19 | 20 | implicit class extension(val client: OpenAI) { 21 | 22 | /** Generates audio from the input text. 23 | * 24 | * [[https://platform.openai.com/docs/api-reference/audio/createSpeech]] 25 | * 26 | * @param requestBody 27 | * Request body that will be used to create a speech. 28 | * 29 | * @return 30 | * The audio file content. 31 | */ 32 | def createSpeech(requestBody: SpeechRequestBody): StreamRequest[Either[OpenAIException, Source[ByteString, Any]], AkkaStreams] = 33 | client.createSpeechAsBinaryStream(AkkaStreams, requestBody) 34 | 35 | /** Creates and streams a model response as chunk objects for the given chat conversation defined in chatBody. The request will complete 36 | * and the connection close only once the source is fully consumed. 37 | * 38 | * [[https://platform.openai.com/docs/api-reference/chat/create]] 39 | * 40 | * @param chatBody 41 | * Chat request body. 42 | */ 43 | def createStreamedChatCompletion( 44 | chatBody: ChatBody 45 | ): StreamRequest[Either[OpenAIException, Source[ChatChunkResponse, Any]], AkkaStreams] = { 46 | val request = client 47 | .createChatCompletionAsBinaryStream(AkkaStreams, chatBody) 48 | 49 | request.response(request.response.mapWithMetadata(mapEventToResponse)) 50 | } 51 | } 52 | 53 | private def mapEventToResponse( 54 | response: Either[OpenAIException, Source[ByteString, Any]], 55 | metadata: ResponseMetadata 56 | ): Either[OpenAIException, Source[ChatChunkResponse, Any]] = 57 | response.map( 58 | _.via(AkkaHttpServerSentEvents.parse) 59 | .via(deserializeEvent(metadata)) 60 | ) 61 | 62 | private def deserializeEvent(metadata: ResponseMetadata): Flow[ServerSentEvent, ChatChunkResponse, Any] = 63 | Flow[ServerSentEvent] 64 | .takeWhile(_ != DoneEvent) 65 | .collect { case ServerSentEvent(Some(data), _, _, _) => 66 | deserializeJsonSnake[ChatChunkResponse].apply(data, metadata) match { 67 | case Left(exception) => throw exception 68 | case Right(value) => value 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /streaming/fs2/src/main/scala/sttp/openai/streaming/fs2/package.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.streaming 2 | 3 | import _root_.fs2.{Pipe, RaiseThrowable, Stream} 4 | import sttp.capabilities.fs2.Fs2Streams 5 | import sttp.client4.StreamRequest 6 | import sttp.client4.impl.fs2.Fs2ServerSentEvents 7 | import sttp.model.ResponseMetadata 8 | import sttp.model.sse.ServerSentEvent 9 | import sttp.openai.OpenAI 10 | import sttp.openai.OpenAIExceptions.OpenAIException 11 | import sttp.openai.json.SttpUpickleApiExtension.deserializeJsonSnake 12 | import sttp.openai.requests.audio.speech.SpeechRequestBody 13 | import sttp.openai.requests.completions.chat.ChatChunkRequestResponseData.ChatChunkResponse 14 | import sttp.openai.requests.completions.chat.ChatRequestBody.ChatBody 15 | 16 | package object fs2 { 17 | import ChatChunkResponse.DoneEvent 18 | 19 | implicit class extension(val client: OpenAI) { 20 | 21 | /** Generates audio from the input text. 22 | * 23 | * [[https://platform.openai.com/docs/api-reference/audio/createSpeech]] 24 | * 25 | * @param requestBody 26 | * Request body that will be used to create a speech. 27 | * 28 | * @return 29 | * The audio file content. 30 | */ 31 | def createSpeech[F[_]: RaiseThrowable]( 32 | requestBody: SpeechRequestBody 33 | ): StreamRequest[Either[OpenAIException, Stream[F, Byte]], Fs2Streams[F]] = 34 | client.createSpeechAsBinaryStream(Fs2Streams[F], requestBody) 35 | 36 | /** Creates and streams a model response as chunk objects for the given chat conversation defined in chatBody. The request will complete 37 | * and the connection close only once the source is fully consumed. 38 | * 39 | * [[https://platform.openai.com/docs/api-reference/chat/create]] 40 | * 41 | * @param chatBody 42 | * Chat request body. 43 | */ 44 | def createStreamedChatCompletion[F[_]: RaiseThrowable]( 45 | chatBody: ChatBody 46 | ): StreamRequest[Either[OpenAIException, Stream[F, ChatChunkResponse]], Fs2Streams[F]] = { 47 | val request = client 48 | .createChatCompletionAsBinaryStream(Fs2Streams[F], chatBody) 49 | 50 | request.response(request.response.mapWithMetadata(mapEventToResponse[F])) 51 | } 52 | } 53 | 54 | private def mapEventToResponse[F[_]: RaiseThrowable]( 55 | response: Either[OpenAIException, Stream[F, Byte]], 56 | metadata: ResponseMetadata 57 | ): Either[OpenAIException, Stream[F, ChatChunkResponse]] = 58 | response.map( 59 | _.through(Fs2ServerSentEvents.parse) 60 | .through(deserializeEvent(metadata)) 61 | .rethrow 62 | ) 63 | 64 | private def deserializeEvent[F[_]](metadata: ResponseMetadata): Pipe[F, ServerSentEvent, Either[Exception, ChatChunkResponse]] = 65 | _.takeWhile(_ != DoneEvent) 66 | .collect { case ServerSentEvent(Some(data), _, _, _) => 67 | deserializeJsonSnake[ChatChunkResponse].apply(data, metadata) 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /streaming/ox/src/main/scala/sttp/openai/streaming/ox/oxStreaming.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.streaming.ox 2 | 3 | import ox.flow.Flow 4 | import sttp.client4.Request 5 | import sttp.client4.impl.ox.sse.OxServerSentEvents 6 | import sttp.model.ResponseMetadata 7 | import sttp.model.sse.ServerSentEvent 8 | import sttp.openai.OpenAI 9 | import sttp.openai.OpenAIExceptions.OpenAIException 10 | import sttp.openai.json.SttpUpickleApiExtension.deserializeJsonSnake 11 | import sttp.openai.requests.completions.chat.ChatChunkRequestResponseData.ChatChunkResponse 12 | import sttp.openai.requests.completions.chat.ChatChunkRequestResponseData.ChatChunkResponse.DoneEvent 13 | import sttp.openai.requests.completions.chat.ChatRequestBody.ChatBody 14 | 15 | import java.io.InputStream 16 | 17 | extension (client: OpenAI) 18 | /** Creates and streams a model response as chunk objects for the given chat conversation defined in chatBody. 19 | * 20 | * The request will complete and the connection close only once the returned [[Flow]] is fully consumed. 21 | * 22 | * [[https://platform.openai.com/docs/api-reference/chat/create]] 23 | * 24 | * @param chatBody 25 | * Chat request body. 26 | */ 27 | def createStreamedChatCompletion( 28 | chatBody: ChatBody 29 | ): Request[Either[OpenAIException, Flow[Either[Exception, ChatChunkResponse]]]] = 30 | val request = client 31 | .createChatCompletionAsInputStream(chatBody) 32 | 33 | request.response(request.response.mapWithMetadata(mapEventToResponse)) 34 | 35 | private def mapEventToResponse( 36 | response: Either[OpenAIException, InputStream], 37 | metadata: ResponseMetadata 38 | ): Either[OpenAIException, Flow[Either[Exception, ChatChunkResponse]]] = 39 | response.map(s => 40 | OxServerSentEvents 41 | .parse(s) 42 | .takeWhile(_ != DoneEvent) 43 | .collect { case ServerSentEvent(Some(data), _, _, _) => 44 | deserializeJsonSnake[ChatChunkResponse].apply(data, metadata) 45 | } 46 | ) 47 | -------------------------------------------------------------------------------- /streaming/pekko/src/main/scala/sttp/openai/streaming/pekko/package.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.streaming 2 | 3 | import org.apache.pekko.stream.scaladsl.{Flow, Source} 4 | import org.apache.pekko.util.ByteString 5 | import sttp.capabilities.pekko.PekkoStreams 6 | import sttp.client4.StreamRequest 7 | import sttp.client4.pekkohttp.PekkoHttpServerSentEvents 8 | import sttp.model.ResponseMetadata 9 | import sttp.model.sse.ServerSentEvent 10 | import sttp.openai.OpenAI 11 | import sttp.openai.OpenAIExceptions.OpenAIException 12 | import sttp.openai.json.SttpUpickleApiExtension.deserializeJsonSnake 13 | import sttp.openai.requests.audio.speech.SpeechRequestBody 14 | import sttp.openai.requests.completions.chat.ChatChunkRequestResponseData.ChatChunkResponse 15 | import sttp.openai.requests.completions.chat.ChatRequestBody.ChatBody 16 | 17 | package object pekko { 18 | import ChatChunkResponse.DoneEvent 19 | 20 | implicit class extension(val client: OpenAI) { 21 | 22 | /** Generates audio from the input text. 23 | * 24 | * [[https://platform.openai.com/docs/api-reference/audio/createSpeech]] 25 | * 26 | * @param requestBody 27 | * Request body that will be used to create a speech. 28 | * 29 | * @return 30 | * The audio file content. 31 | */ 32 | def createSpeech(requestBody: SpeechRequestBody): StreamRequest[Either[OpenAIException, Source[ByteString, Any]], PekkoStreams] = 33 | client.createSpeechAsBinaryStream(PekkoStreams, requestBody) 34 | 35 | /** Creates and streams a model response as chunk objects for the given chat conversation defined in chatBody. The request will complete 36 | * and the connection close only once the source is fully consumed. 37 | * 38 | * [[https://platform.openai.com/docs/api-reference/chat/create]] 39 | * 40 | * @param chatBody 41 | * Chat request body. 42 | */ 43 | def createStreamedChatCompletion( 44 | chatBody: ChatBody 45 | ): StreamRequest[Either[OpenAIException, Source[ChatChunkResponse, Any]], PekkoStreams] = { 46 | val request = client 47 | .createChatCompletionAsBinaryStream(PekkoStreams, chatBody) 48 | 49 | request.response(request.response.mapWithMetadata(mapEventToResponse)) 50 | } 51 | } 52 | 53 | private def mapEventToResponse( 54 | response: Either[OpenAIException, Source[ByteString, Any]], 55 | metadata: ResponseMetadata 56 | ): Either[OpenAIException, Source[ChatChunkResponse, Any]] = 57 | response.map( 58 | _.via(PekkoHttpServerSentEvents.parse) 59 | .via(deserializeEvent(metadata)) 60 | ) 61 | 62 | private def deserializeEvent(metadata: ResponseMetadata): Flow[ServerSentEvent, ChatChunkResponse, Any] = 63 | Flow[ServerSentEvent] 64 | .takeWhile(_ != DoneEvent) 65 | .collect { case ServerSentEvent(Some(data), _, _, _) => 66 | deserializeJsonSnake[ChatChunkResponse].apply(data, metadata) match { 67 | case Left(exception) => throw exception 68 | case Right(value) => value 69 | } 70 | } 71 | } 72 | -------------------------------------------------------------------------------- /streaming/zio/src/main/scala/sttp/openai/streaming/zio/package.scala: -------------------------------------------------------------------------------- 1 | package sttp.openai.streaming 2 | 3 | import _root_.zio.ZIO 4 | import _root_.zio.stream._ 5 | import sttp.capabilities.zio.ZioStreams 6 | import sttp.client4.StreamRequest 7 | import sttp.client4.impl.zio.ZioServerSentEvents 8 | import sttp.model.ResponseMetadata 9 | import sttp.model.sse.ServerSentEvent 10 | import sttp.openai.OpenAI 11 | import sttp.openai.OpenAIExceptions.OpenAIException 12 | import sttp.openai.json.SttpUpickleApiExtension.deserializeJsonSnake 13 | import sttp.openai.requests.audio.speech.SpeechRequestBody 14 | import sttp.openai.requests.completions.chat.ChatChunkRequestResponseData.ChatChunkResponse 15 | import sttp.openai.requests.completions.chat.ChatRequestBody.ChatBody 16 | 17 | package object zio { 18 | import ChatChunkResponse.DoneEvent 19 | 20 | implicit class extension(val client: OpenAI) { 21 | 22 | /** Generates audio from the input text. 23 | * 24 | * [[https://platform.openai.com/docs/api-reference/audio/createSpeech]] 25 | * 26 | * @param requestBody 27 | * Request body that will be used to create a speech. 28 | * 29 | * @return 30 | * The audio file content. 31 | */ 32 | def createSpeech(requestBody: SpeechRequestBody): StreamRequest[Either[OpenAIException, Stream[Throwable, Byte]], ZioStreams] = 33 | client.createSpeechAsBinaryStream(ZioStreams, requestBody) 34 | 35 | /** Creates and streams a model response as chunk objects for the given chat conversation defined in chatBody. The request will complete 36 | * and the connection close only once the source is fully consumed. 37 | * 38 | * [[https://platform.openai.com/docs/api-reference/chat/create]] 39 | * 40 | * @param chatBody 41 | * Chat request body. 42 | */ 43 | def createStreamedChatCompletion( 44 | chatBody: ChatBody 45 | ): StreamRequest[Either[OpenAIException, Stream[Throwable, ChatChunkResponse]], ZioStreams] = { 46 | val request = client 47 | .createChatCompletionAsBinaryStream(ZioStreams, chatBody) 48 | request.response(request.response.mapWithMetadata(mapEventToResponse)) 49 | } 50 | } 51 | 52 | private def mapEventToResponse( 53 | response: Either[OpenAIException, Stream[Throwable, Byte]], 54 | metadata: ResponseMetadata 55 | ): Either[OpenAIException, Stream[Throwable, ChatChunkResponse]] = 56 | response.map( 57 | _.viaFunction(ZioServerSentEvents.parse) 58 | .viaFunction(deserializeEvent(metadata)) 59 | ) 60 | 61 | private def deserializeEvent(metadata: ResponseMetadata): ZioStreams.Pipe[ServerSentEvent, ChatChunkResponse] = 62 | _.takeWhile(_ != DoneEvent) 63 | .collectZIO { case ServerSentEvent(Some(data), _, _, _) => 64 | ZIO.fromEither(deserializeJsonSnake[ChatChunkResponse].apply(data, metadata)) 65 | } 66 | } 67 | --------------------------------------------------------------------------------