├── Demos
├── Chat
│ └── Chat
│ │ ├── Assets.xcassets
│ │ ├── Contents.json
│ │ ├── AppIcon.appiconset
│ │ │ ├── chat.png
│ │ │ └── Contents.json
│ │ └── AccentColor.colorset
│ │ │ └── Contents.json
│ │ ├── Preview Content
│ │ └── Preview Assets.xcassets
│ │ │ └── Contents.json
│ │ ├── ChatApp.swift
│ │ ├── ChatMessage.swift
│ │ ├── AppLogger.swift
│ │ ├── AppConstants.swift
│ │ ├── ChatManager.swift
│ │ ├── ChatInputView.swift
│ │ ├── ChatBubble.swift
│ │ └── ChatDataLoader.swift
├── Trivia
│ └── Trivia
│ │ ├── Assets.xcassets
│ │ ├── Contents.json
│ │ ├── AppIcon.appiconset
│ │ │ ├── trivia.png
│ │ │ └── Contents.json
│ │ └── AccentColor.colorset
│ │ │ └── Contents.json
│ │ ├── Preview Content
│ │ └── Preview Assets.xcassets
│ │ │ └── Contents.json
│ │ ├── TriviaApp.swift
│ │ ├── AppLogger.swift
│ │ ├── AppConstants.swift
│ │ ├── TriviaCardData.swift
│ │ ├── TriviaManager.swift
│ │ ├── TriviaView.swift
│ │ ├── TriviaAnswerPicker.swift
│ │ ├── TriviaDataLoader.swift
│ │ ├── TriviaFormView.swift
│ │ └── TriviaCardView.swift
├── PuLIDDemo
│ └── PuLIDDemo
│ │ ├── Assets.xcassets
│ │ ├── Contents.json
│ │ ├── pulid.imageset
│ │ │ ├── pulid.png
│ │ │ └── Contents.json
│ │ ├── AccentColor.colorset
│ │ │ └── Contents.json
│ │ └── AppIcon.appiconset
│ │ │ └── Contents.json
│ │ ├── Preview Content
│ │ └── Preview Assets.xcassets
│ │ │ └── Contents.json
│ │ ├── Info.plist
│ │ ├── PuLIDDemoApp.swift
│ │ ├── AppConstants.swift
│ │ └── Ripple.metal
├── Stickers
│ └── Stickers
│ │ ├── Assets.xcassets
│ │ ├── Contents.json
│ │ ├── AppIcon.appiconset
│ │ │ ├── sticker.png
│ │ │ └── Contents.json
│ │ └── AccentColor.colorset
│ │ │ └── Contents.json
│ │ ├── Preview Content
│ │ └── Preview Assets.xcassets
│ │ │ └── Contents.json
│ │ ├── StickersApp.swift
│ │ ├── AppLogger.swift
│ │ ├── AppConstants.swift
│ │ ├── StickerLoadingView.swift
│ │ ├── StickerImageView.swift
│ │ ├── StickerInputView.swift
│ │ ├── StickerDataLoader.swift
│ │ └── StickerManager.swift
├── Classifier
│ └── Classifier
│ │ ├── Assets.xcassets
│ │ ├── Contents.json
│ │ ├── AppIcon.appiconset
│ │ │ ├── classify.png
│ │ │ └── Contents.json
│ │ └── AccentColor.colorset
│ │ │ └── Contents.json
│ │ ├── Preview Content
│ │ └── Preview Assets.xcassets
│ │ │ └── Contents.json
│ │ ├── ClassifierApp.swift
│ │ ├── AppLogger.swift
│ │ ├── CameraControlsView.swift
│ │ ├── AppConstants.swift
│ │ ├── CameraView.swift
│ │ ├── ClassifierManager.swift
│ │ ├── CameraFrameManager.swift
│ │ ├── ClassifierDataLoader.swift
│ │ └── CameraDataLoader.swift
├── FilmFinder
│ └── FilmFinder
│ │ ├── Assets.xcassets
│ │ ├── Contents.json
│ │ ├── AppIcon.appiconset
│ │ │ ├── icon.png
│ │ │ └── Contents.json
│ │ └── AccentColor.colorset
│ │ │ └── Contents.json
│ │ ├── Preview Content
│ │ └── Preview Assets.xcassets
│ │ │ └── Contents.json
│ │ ├── Info.plist
│ │ ├── GetStartedTip.swift
│ │ ├── FilmFinderApp.swift
│ │ ├── AppConstants.swift
│ │ ├── Movie.swift
│ │ └── Ripple.metal
├── Transcriber
│ └── Transcriber
│ │ ├── Assets.xcassets
│ │ ├── Contents.json
│ │ ├── AppIcon.appiconset
│ │ │ ├── transcribe.png
│ │ │ └── Contents.json
│ │ └── AccentColor.colorset
│ │ │ └── Contents.json
│ │ ├── Preview Content
│ │ └── Preview Assets.xcassets
│ │ │ └── Contents.json
│ │ ├── TranscriberApp.swift
│ │ ├── ModelContext+Extensions.swift
│ │ ├── AppLogger.swift
│ │ ├── NoRecordingsView.swift
│ │ ├── TranscriberDataLoader.swift
│ │ ├── AudioRecording.swift
│ │ ├── AppConstants.swift
│ │ ├── FileUtils.swift
│ │ ├── TranscribedAudioRecording.swift
│ │ ├── RecordingRowView.swift
│ │ ├── AudioRecorder.swift
│ │ ├── TranscriberManager.swift
│ │ └── AudioFileWriter.swift
├── Translator
│ └── Translator
│ │ ├── Assets.xcassets
│ │ ├── Contents.json
│ │ ├── AppIcon.appiconset
│ │ │ ├── translate.png
│ │ │ └── Contents.json
│ │ └── AccentColor.colorset
│ │ │ └── Contents.json
│ │ ├── Preview Content
│ │ └── Preview Assets.xcassets
│ │ │ └── Contents.json
│ │ ├── TranslatorApp.swift
│ │ ├── AppLogger.swift
│ │ ├── AppConstants.swift
│ │ ├── TranslationDataLoader.swift
│ │ ├── TranslateView.swift
│ │ ├── BottomTranslateView.swift
│ │ └── TopTranslateView.swift
├── AIColorPalette
│ ├── AIColorPalette
│ │ ├── Assets.xcassets
│ │ │ ├── Contents.json
│ │ │ ├── palm.imageset
│ │ │ │ ├── palm.jpg
│ │ │ │ └── Contents.json
│ │ │ ├── AccentColor.colorset
│ │ │ │ └── Contents.json
│ │ │ └── AppIcon.appiconset
│ │ │ │ └── Contents.json
│ │ ├── Preview Content
│ │ │ └── Preview Assets.xcassets
│ │ │ │ └── Contents.json
│ │ ├── AIColorPaletteApp.swift
│ │ ├── ColorData.swift
│ │ ├── Ripple.metal
│ │ └── AIProxyIntegration.swift
│ └── README.md
└── EmojiPuzzleMaker
│ └── EmojiPuzzleMaker
│ ├── Assets.xcassets
│ ├── Contents.json
│ ├── AccentColor.colorset
│ │ └── Contents.json
│ └── AppIcon.appiconset
│ │ └── Contents.json
│ ├── Preview Content
│ └── Preview Assets.xcassets
│ │ └── Contents.json
│ └── EmojiPuzzleMakerApp.swift
├── AIProxyFal
└── AIProxyFal
│ ├── Assets.xcassets
│ ├── Contents.json
│ ├── fal.imageset
│ │ ├── fal.png
│ │ └── Contents.json
│ ├── AccentColor.colorset
│ │ └── Contents.json
│ └── AppIcon.appiconset
│ │ └── Contents.json
│ ├── Preview Content
│ └── Preview Assets.xcassets
│ │ └── Contents.json
│ ├── AIProxyFalApp.swift
│ ├── AppConstants.swift
│ └── ContentView.swift
├── AIProxyDeepL
└── AIProxyDeepL
│ ├── Assets.xcassets
│ ├── Contents.json
│ ├── deepl.imageset
│ │ ├── deepl.png
│ │ └── Contents.json
│ ├── AccentColor.colorset
│ │ └── Contents.json
│ └── AppIcon.appiconset
│ │ └── Contents.json
│ ├── Preview Content
│ └── Preview Assets.xcassets
│ │ └── Contents.json
│ ├── AIProxyDeepLApp.swift
│ ├── AppConstants.swift
│ ├── ContentView.swift
│ └── TranslationView.swift
├── AIProxyGroq
└── AIProxyGroq
│ ├── Assets.xcassets
│ ├── Contents.json
│ ├── groq.imageset
│ │ ├── groq.png
│ │ └── Contents.json
│ ├── AccentColor.colorset
│ │ └── Contents.json
│ └── AppIcon.appiconset
│ │ └── Contents.json
│ ├── Preview Content
│ └── Preview Assets.xcassets
│ │ └── Contents.json
│ ├── AIProxyGroqApp.swift
│ ├── AppConstants.swift
│ ├── ContentView.swift
│ ├── ChatView.swift
│ └── StreamingChatView.swift
├── AIProxyGemini
└── AIProxyGemini
│ ├── Assets.xcassets
│ ├── Contents.json
│ ├── icon.imageset
│ │ ├── gemini.png
│ │ └── Contents.json
│ ├── AccentColor.colorset
│ │ └── Contents.json
│ └── AppIcon.appiconset
│ │ └── Contents.json
│ ├── Preview Content
│ └── Preview Assets.xcassets
│ │ └── Contents.json
│ ├── AIProxyGeminiApp.swift
│ ├── AppConstants.swift
│ └── ContentView.swift
├── AIProxyOpenAI
└── AIProxyOpenAI
│ ├── Assets.xcassets
│ ├── Contents.json
│ ├── openai.imageset
│ │ ├── openai.png
│ │ └── Contents.json
│ ├── surfer.imageset
│ │ ├── surfer.jpeg
│ │ └── Contents.json
│ ├── AccentColor.colorset
│ │ └── Contents.json
│ └── AppIcon.appiconset
│ │ └── Contents.json
│ ├── Preview Content
│ └── Preview Assets.xcassets
│ │ └── Contents.json
│ ├── AIProxyOpenAIApp.swift
│ ├── AppConstants.swift
│ ├── ContentView.swift
│ ├── ChatView.swift
│ ├── StreamingChatView.swift
│ └── DalleView.swift
├── AIProxyAnthropic
└── AIProxyAnthropic
│ ├── Assets.xcassets
│ ├── Contents.json
│ ├── climber.imageset
│ │ ├── climber.jpg
│ │ └── Contents.json
│ ├── anthropic.imageset
│ │ ├── anthropic.jpeg
│ │ └── Contents.json
│ ├── AccentColor.colorset
│ │ └── Contents.json
│ └── AppIcon.appiconset
│ │ └── Contents.json
│ ├── Preview Content
│ └── Preview Assets.xcassets
│ │ └── Contents.json
│ ├── AIProxyAnthropicApp.swift
│ ├── AppConstants.swift
│ └── ContentView.swift
├── AIProxyReplicate
└── AIProxyReplicate
│ ├── Assets.xcassets
│ ├── Contents.json
│ ├── replicate.imageset
│ │ ├── replicate.jpeg
│ │ └── Contents.json
│ ├── AccentColor.colorset
│ │ └── Contents.json
│ └── AppIcon.appiconset
│ │ └── Contents.json
│ ├── Preview Content
│ └── Preview Assets.xcassets
│ │ └── Contents.json
│ ├── AIProxyReplicateApp.swift
│ ├── AppConstants.swift
│ ├── ContentView.swift
│ └── ImageGenView.swift
├── AIProxyTogetherAI
└── AIProxyTogetherAI
│ ├── Assets.xcassets
│ ├── Contents.json
│ ├── togetherai.imageset
│ │ ├── togetherai.png
│ │ └── Contents.json
│ ├── AccentColor.colorset
│ │ └── Contents.json
│ └── AppIcon.appiconset
│ │ └── Contents.json
│ ├── Preview Content
│ └── Preview Assets.xcassets
│ │ └── Contents.json
│ ├── AIProxyTogetherAIApp.swift
│ ├── AppConstants.swift
│ ├── ContentView.swift
│ ├── ChatView.swift
│ └── StreamingChatView.swift
├── AIProxyStabilityAI
└── AIProxyStabilityAI
│ ├── Assets.xcassets
│ ├── Contents.json
│ ├── stability.imageset
│ │ ├── stability.png
│ │ └── Contents.json
│ ├── AccentColor.colorset
│ │ └── Contents.json
│ └── AppIcon.appiconset
│ │ └── Contents.json
│ ├── Preview Content
│ └── Preview Assets.xcassets
│ │ └── Contents.json
│ ├── AIProxyStabilityAIApp.swift
│ ├── AppConstants.swift
│ ├── ContentView.swift
│ └── ImageGenView.swift
├── .gitignore
└── README.md
/Demos/Chat/Chat/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/AIProxyFal/AIProxyFal/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demos/Trivia/Trivia/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/AIProxyDeepL/AIProxyDeepL/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/AIProxyGroq/AIProxyGroq/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demos/PuLIDDemo/PuLIDDemo/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demos/Stickers/Stickers/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/AIProxyGemini/AIProxyGemini/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/AIProxyOpenAI/AIProxyOpenAI/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demos/Classifier/Classifier/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demos/FilmFinder/FilmFinder/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demos/Transcriber/Transcriber/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demos/Translator/Translator/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/AIProxyAnthropic/AIProxyAnthropic/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/AIProxyReplicate/AIProxyReplicate/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/AIProxyTogetherAI/AIProxyTogetherAI/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demos/AIColorPalette/AIColorPalette/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/AIProxyStabilityAI/AIProxyStabilityAI/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demos/Chat/Chat/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demos/EmojiPuzzleMaker/EmojiPuzzleMaker/Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/AIProxyFal/AIProxyFal/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demos/Trivia/Trivia/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/AIProxyDeepL/AIProxyDeepL/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/AIProxyGemini/AIProxyGemini/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/AIProxyGroq/AIProxyGroq/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/AIProxyOpenAI/AIProxyOpenAI/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demos/Classifier/Classifier/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demos/FilmFinder/FilmFinder/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demos/PuLIDDemo/PuLIDDemo/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demos/Stickers/Stickers/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demos/Translator/Translator/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demos/Transcriber/Transcriber/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/AIProxyAnthropic/AIProxyAnthropic/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/AIProxyFal/AIProxyFal/Assets.xcassets/fal.imageset/fal.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/AIProxyFal/AIProxyFal/Assets.xcassets/fal.imageset/fal.png
--------------------------------------------------------------------------------
/AIProxyReplicate/AIProxyReplicate/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/AIProxyStabilityAI/AIProxyStabilityAI/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/AIProxyTogetherAI/AIProxyTogetherAI/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demos/AIColorPalette/AIColorPalette/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/Demos/Chat/Chat/Assets.xcassets/AppIcon.appiconset/chat.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/Demos/Chat/Chat/Assets.xcassets/AppIcon.appiconset/chat.png
--------------------------------------------------------------------------------
/Demos/EmojiPuzzleMaker/EmojiPuzzleMaker/Preview Content/Preview Assets.xcassets/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "info" : {
3 | "author" : "xcode",
4 | "version" : 1
5 | }
6 | }
7 |
--------------------------------------------------------------------------------
/AIProxyGroq/AIProxyGroq/Assets.xcassets/groq.imageset/groq.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/AIProxyGroq/AIProxyGroq/Assets.xcassets/groq.imageset/groq.png
--------------------------------------------------------------------------------
/AIProxyDeepL/AIProxyDeepL/Assets.xcassets/deepl.imageset/deepl.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/AIProxyDeepL/AIProxyDeepL/Assets.xcassets/deepl.imageset/deepl.png
--------------------------------------------------------------------------------
/Demos/PuLIDDemo/PuLIDDemo/Assets.xcassets/pulid.imageset/pulid.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/Demos/PuLIDDemo/PuLIDDemo/Assets.xcassets/pulid.imageset/pulid.png
--------------------------------------------------------------------------------
/Demos/Trivia/Trivia/Assets.xcassets/AppIcon.appiconset/trivia.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/Demos/Trivia/Trivia/Assets.xcassets/AppIcon.appiconset/trivia.png
--------------------------------------------------------------------------------
/AIProxyGemini/AIProxyGemini/Assets.xcassets/icon.imageset/gemini.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/AIProxyGemini/AIProxyGemini/Assets.xcassets/icon.imageset/gemini.png
--------------------------------------------------------------------------------
/AIProxyOpenAI/AIProxyOpenAI/Assets.xcassets/openai.imageset/openai.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/AIProxyOpenAI/AIProxyOpenAI/Assets.xcassets/openai.imageset/openai.png
--------------------------------------------------------------------------------
/AIProxyOpenAI/AIProxyOpenAI/Assets.xcassets/surfer.imageset/surfer.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/AIProxyOpenAI/AIProxyOpenAI/Assets.xcassets/surfer.imageset/surfer.jpeg
--------------------------------------------------------------------------------
/Demos/FilmFinder/FilmFinder/Assets.xcassets/AppIcon.appiconset/icon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/Demos/FilmFinder/FilmFinder/Assets.xcassets/AppIcon.appiconset/icon.png
--------------------------------------------------------------------------------
/Demos/Stickers/Stickers/Assets.xcassets/AppIcon.appiconset/sticker.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/Demos/Stickers/Stickers/Assets.xcassets/AppIcon.appiconset/sticker.png
--------------------------------------------------------------------------------
/Demos/AIColorPalette/AIColorPalette/Assets.xcassets/palm.imageset/palm.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/Demos/AIColorPalette/AIColorPalette/Assets.xcassets/palm.imageset/palm.jpg
--------------------------------------------------------------------------------
/Demos/Classifier/Classifier/Assets.xcassets/AppIcon.appiconset/classify.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/Demos/Classifier/Classifier/Assets.xcassets/AppIcon.appiconset/classify.png
--------------------------------------------------------------------------------
/Demos/Translator/Translator/Assets.xcassets/AppIcon.appiconset/translate.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/Demos/Translator/Translator/Assets.xcassets/AppIcon.appiconset/translate.png
--------------------------------------------------------------------------------
/AIProxyAnthropic/AIProxyAnthropic/Assets.xcassets/climber.imageset/climber.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/AIProxyAnthropic/AIProxyAnthropic/Assets.xcassets/climber.imageset/climber.jpg
--------------------------------------------------------------------------------
/Demos/Transcriber/Transcriber/Assets.xcassets/AppIcon.appiconset/transcribe.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/Demos/Transcriber/Transcriber/Assets.xcassets/AppIcon.appiconset/transcribe.png
--------------------------------------------------------------------------------
/AIProxyAnthropic/AIProxyAnthropic/Assets.xcassets/anthropic.imageset/anthropic.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/AIProxyAnthropic/AIProxyAnthropic/Assets.xcassets/anthropic.imageset/anthropic.jpeg
--------------------------------------------------------------------------------
/AIProxyReplicate/AIProxyReplicate/Assets.xcassets/replicate.imageset/replicate.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/AIProxyReplicate/AIProxyReplicate/Assets.xcassets/replicate.imageset/replicate.jpeg
--------------------------------------------------------------------------------
/AIProxyStabilityAI/AIProxyStabilityAI/Assets.xcassets/stability.imageset/stability.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/AIProxyStabilityAI/AIProxyStabilityAI/Assets.xcassets/stability.imageset/stability.png
--------------------------------------------------------------------------------
/AIProxyTogetherAI/AIProxyTogetherAI/Assets.xcassets/togetherai.imageset/togetherai.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/toddham/AIProxyBootstrap/HEAD/AIProxyTogetherAI/AIProxyTogetherAI/Assets.xcassets/togetherai.imageset/togetherai.png
--------------------------------------------------------------------------------
/Demos/PuLIDDemo/PuLIDDemo/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/Demos/FilmFinder/FilmFinder/Info.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
--------------------------------------------------------------------------------
/Demos/Chat/Chat/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/AIProxyFal/AIProxyFal/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/AIProxyGroq/AIProxyGroq/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/Demos/Stickers/Stickers/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/Demos/Trivia/Trivia/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/AIProxyDeepL/AIProxyDeepL/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/AIProxyGemini/AIProxyGemini/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/AIProxyOpenAI/AIProxyOpenAI/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/Demos/Classifier/Classifier/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/Demos/FilmFinder/FilmFinder/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/Demos/PuLIDDemo/PuLIDDemo/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/Demos/Translator/Translator/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/AIProxyAnthropic/AIProxyAnthropic/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/AIProxyReplicate/AIProxyReplicate/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/Demos/Transcriber/Transcriber/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/AIProxyStabilityAI/AIProxyStabilityAI/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/AIProxyTogetherAI/AIProxyTogetherAI/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/Demos/AIColorPalette/AIColorPalette/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/Demos/EmojiPuzzleMaker/EmojiPuzzleMaker/Assets.xcassets/AccentColor.colorset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "colors" : [
3 | {
4 | "idiom" : "universal"
5 | }
6 | ],
7 | "info" : {
8 | "author" : "xcode",
9 | "version" : 1
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/AIProxyFal/AIProxyFal/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | }
8 | ],
9 | "info" : {
10 | "author" : "xcode",
11 | "version" : 1
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/AIProxyDeepL/AIProxyDeepL/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | }
8 | ],
9 | "info" : {
10 | "author" : "xcode",
11 | "version" : 1
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/AIProxyOpenAI/AIProxyOpenAI/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | }
8 | ],
9 | "info" : {
10 | "author" : "xcode",
11 | "version" : 1
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/AIProxyAnthropic/AIProxyAnthropic/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | }
8 | ],
9 | "info" : {
10 | "author" : "xcode",
11 | "version" : 1
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/AIProxyReplicate/AIProxyReplicate/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | }
8 | ],
9 | "info" : {
10 | "author" : "xcode",
11 | "version" : 1
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/AIProxyTogetherAI/AIProxyTogetherAI/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | }
8 | ],
9 | "info" : {
10 | "author" : "xcode",
11 | "version" : 1
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/Demos/AIColorPalette/AIColorPalette/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | }
8 | ],
9 | "info" : {
10 | "author" : "xcode",
11 | "version" : 1
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/Demos/Trivia/Trivia/TriviaApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TriviaApp.swift
3 | // Trivia
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | struct TriviaApp: App {
12 | var body: some Scene {
13 | WindowGroup {
14 | TriviaView()
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/AIProxyStabilityAI/AIProxyStabilityAI/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | }
8 | ],
9 | "info" : {
10 | "author" : "xcode",
11 | "version" : 1
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/Demos/EmojiPuzzleMaker/EmojiPuzzleMaker/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | }
8 | ],
9 | "info" : {
10 | "author" : "xcode",
11 | "version" : 1
12 | }
13 | }
14 |
--------------------------------------------------------------------------------
/Demos/Chat/Chat/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "chat.png",
5 | "idiom" : "universal",
6 | "platform" : "ios",
7 | "size" : "1024x1024"
8 | }
9 | ],
10 | "info" : {
11 | "author" : "xcode",
12 | "version" : 1
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/Demos/Translator/Translator/TranslatorApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TranslatorApp.swift
3 | // Translator
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | struct TranslatorApp: App {
12 | var body: some Scene {
13 | WindowGroup {
14 | TranslateView()
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/Demos/Trivia/Trivia/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "trivia.png",
5 | "idiom" : "universal",
6 | "platform" : "ios",
7 | "size" : "1024x1024"
8 | }
9 | ],
10 | "info" : {
11 | "author" : "xcode",
12 | "version" : 1
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/Demos/Stickers/Stickers/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "sticker.png",
5 | "idiom" : "universal",
6 | "platform" : "ios",
7 | "size" : "1024x1024"
8 | }
9 | ],
10 | "info" : {
11 | "author" : "xcode",
12 | "version" : 1
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/AIProxyFal/AIProxyFal/AIProxyFalApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AIProxyFalApp.swift
3 | // AIProxyFal
4 | //
5 | // Created by Todd Hamilton on 6/13/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | struct AIProxyFalApp: App {
12 | var body: some Scene {
13 | WindowGroup {
14 | ContentView()
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/Demos/Classifier/Classifier/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "classify.png",
5 | "idiom" : "universal",
6 | "platform" : "ios",
7 | "size" : "1024x1024"
8 | }
9 | ],
10 | "info" : {
11 | "author" : "xcode",
12 | "version" : 1
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/Demos/PuLIDDemo/PuLIDDemo/PuLIDDemoApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // PuLIDDemoApp.swift
3 | // PuLIDDemo
4 | //
5 | // Created by Todd Hamilton on 9/26/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | struct PuLIDDemoApp: App {
12 | var body: some Scene {
13 | WindowGroup {
14 | ContentView()
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/Demos/Transcriber/Transcriber/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "transcribe.png",
5 | "idiom" : "universal",
6 | "platform" : "ios",
7 | "size" : "1024x1024"
8 | }
9 | ],
10 | "info" : {
11 | "author" : "xcode",
12 | "version" : 1
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/Demos/Translator/Translator/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "translate.png",
5 | "idiom" : "universal",
6 | "platform" : "ios",
7 | "size" : "1024x1024"
8 | }
9 | ],
10 | "info" : {
11 | "author" : "xcode",
12 | "version" : 1
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/AIProxyGroq/AIProxyGroq/AIProxyGroqApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AIProxyGroqApp.swift
3 | // AIProxyGroq
4 | //
5 | // Created by Todd Hamilton on 10/1/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | struct AIProxyGroqApp: App {
12 | var body: some Scene {
13 | WindowGroup {
14 | ContentView()
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/AIProxyDeepL/AIProxyDeepL/AIProxyDeepLApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AIProxyDeepLApp.swift
3 | // AIProxyDeepL
4 | //
5 | // Created by Todd Hamilton on 8/14/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | struct AIProxyDeepLApp: App {
12 | var body: some Scene {
13 | WindowGroup {
14 | ContentView()
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/AIProxyOpenAI/AIProxyOpenAI/AIProxyOpenAIApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AIProxyOpenAIApp.swift
3 | // AIProxyOpenAI
4 | //
5 | // Created by Todd Hamilton on 6/14/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | struct AIProxyOpenAIApp: App {
12 | var body: some Scene {
13 | WindowGroup {
14 | ContentView()
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/AIProxyGemini/AIProxyGemini/AIProxyGeminiApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AIProxyGeminiApp.swift
3 | // AIProxyGemini
4 | //
5 | // Created by Todd Hamilton on 10/18/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | struct AIProxyGeminiApp: App {
12 | var body: some Scene {
13 | WindowGroup {
14 | ContentView()
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/Demos/AIColorPalette/AIColorPalette/AIColorPaletteApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AIColorPaletteApp.swift
3 | // AIColorPalette
4 | //
5 | // Created by Todd Hamilton on 6/20/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | struct AIColorPaletteApp: App {
12 | var body: some Scene {
13 | WindowGroup {
14 | ContentView()
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/AIProxyAnthropic/AIProxyAnthropic/AIProxyAnthropicApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AIProxyAnthropicApp.swift
3 | // AIProxyAnthropic
4 | //
5 | // Created by Todd Hamilton on 6/17/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | struct AIProxyAnthropicApp: App {
12 | var body: some Scene {
13 | WindowGroup {
14 | ContentView()
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/AIProxyReplicate/AIProxyReplicate/AIProxyReplicateApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AIProxyReplicateApp.swift
3 | // AIProxyReplicate
4 | //
5 | // Created by Todd Hamilton on 6/13/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | struct AIProxyReplicateApp: App {
12 | var body: some Scene {
13 | WindowGroup {
14 | ContentView()
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/AIProxyTogetherAI/AIProxyTogetherAI/AIProxyTogetherAIApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AIProxyTogetherAIApp.swift
3 | // AIProxyTogetherAI
4 | //
5 | // Created by Todd Hamilton on 8/18/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | struct AIProxyTogetherAIApp: App {
12 | var body: some Scene {
13 | WindowGroup {
14 | ContentView()
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/Demos/EmojiPuzzleMaker/EmojiPuzzleMaker/EmojiPuzzleMakerApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // EmojiPuzzleMakerApp.swift
3 | // EmojiPuzzleMaker
4 | //
5 | // Created by Todd Hamilton on 8/1/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | struct EmojiPuzzleMakerApp: App {
12 | var body: some Scene {
13 | WindowGroup {
14 | ContentView()
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/AIProxyStabilityAI/AIProxyStabilityAI/AIProxyStabilityAIApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AIProxyStabilityAIApp.swift
3 | // AIProxyStabilityAI
4 | //
5 | // Created by Todd Hamilton on 8/13/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | struct AIProxyStabilityAIApp: App {
12 | var body: some Scene {
13 | WindowGroup {
14 | ContentView()
15 | }
16 | }
17 | }
18 |
--------------------------------------------------------------------------------
/Demos/Chat/Chat/ChatApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatApp.swift
3 | // Chat
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | @MainActor
12 | struct ChatApp: App {
13 |
14 | @State private var chatManager = ChatManager()
15 |
16 | var body: some Scene {
17 | WindowGroup {
18 | ChatView(chatManager: chatManager)
19 | }
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/Demos/Stickers/Stickers/StickersApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // StickersApp.swift
3 | // Stickers
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | @MainActor
12 | struct StickersApp: App {
13 |
14 | @State var stickerManager = StickerManager()
15 |
16 | var body: some Scene {
17 | WindowGroup {
18 | StickerView(stickerManager: stickerManager)
19 | }
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/AIProxyFal/AIProxyFal/Assets.xcassets/fal.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "fal.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "author" : "xcode",
19 | "version" : 1
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/AIProxyGroq/AIProxyGroq/Assets.xcassets/groq.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "groq.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "author" : "xcode",
19 | "version" : 1
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/Demos/FilmFinder/FilmFinder/GetStartedTip.swift:
--------------------------------------------------------------------------------
1 | //
2 | // GetStartedTip.swift
3 | // FilmFinder
4 | //
5 | // Created by Todd Hamilton on 10/31/24.
6 | //
7 |
8 | import SwiftUI
9 | import TipKit
10 |
11 | // Tooltip for first time users
12 | struct GetStartedTip: Tip {
13 | var title: Text {
14 | Text("Get movie recommendations")
15 | }
16 | var message: Text? {
17 | Text("Drag the circle to choose a genre.")
18 | }
19 | }
20 |
--------------------------------------------------------------------------------
/AIProxyDeepL/AIProxyDeepL/Assets.xcassets/deepl.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "deepl.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "author" : "xcode",
19 | "version" : 1
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/AIProxyGemini/AIProxyGemini/Assets.xcassets/icon.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "gemini.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "author" : "xcode",
19 | "version" : 1
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/AIProxyOpenAI/AIProxyOpenAI/Assets.xcassets/openai.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "openai.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "author" : "xcode",
19 | "version" : 1
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/Demos/PuLIDDemo/PuLIDDemo/Assets.xcassets/pulid.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "pulid.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "author" : "xcode",
19 | "version" : 1
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/AIProxyOpenAI/AIProxyOpenAI/Assets.xcassets/surfer.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "surfer.jpeg",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "author" : "xcode",
19 | "version" : 1
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/Demos/AIColorPalette/AIColorPalette/Assets.xcassets/palm.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "palm.jpg",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "author" : "xcode",
19 | "version" : 1
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/Demos/Transcriber/Transcriber/TranscriberApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TranscriberApp.swift
3 | // Transcriber
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | @MainActor
12 | struct TranscriberApp: App {
13 |
14 | @State var transcriberManager = TranscriberManager()
15 |
16 | var body: some Scene {
17 | WindowGroup {
18 | TranscriberView(transcriberManager: transcriberManager)
19 | }
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/AIProxyAnthropic/AIProxyAnthropic/Assets.xcassets/climber.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "climber.jpg",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "author" : "xcode",
19 | "version" : 1
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/AIProxyAnthropic/AIProxyAnthropic/Assets.xcassets/anthropic.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "anthropic.jpeg",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "author" : "xcode",
19 | "version" : 1
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/AIProxyReplicate/AIProxyReplicate/Assets.xcassets/replicate.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "replicate.jpeg",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "author" : "xcode",
19 | "version" : 1
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/AIProxyStabilityAI/AIProxyStabilityAI/Assets.xcassets/stability.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "stability.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "author" : "xcode",
19 | "version" : 1
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/AIProxyTogetherAI/AIProxyTogetherAI/Assets.xcassets/togetherai.imageset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "togetherai.png",
5 | "idiom" : "universal",
6 | "scale" : "1x"
7 | },
8 | {
9 | "idiom" : "universal",
10 | "scale" : "2x"
11 | },
12 | {
13 | "idiom" : "universal",
14 | "scale" : "3x"
15 | }
16 | ],
17 | "info" : {
18 | "author" : "xcode",
19 | "version" : 1
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/Demos/FilmFinder/FilmFinder/FilmFinderApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FilmFinderApp.swift
3 | // FilmFinder
4 | //
5 | // Created by Todd Hamilton on 10/30/24.
6 | //
7 |
8 | import SwiftUI
9 | import TipKit
10 |
11 | @main
12 | struct FilmFinderApp: App {
13 | var body: some Scene {
14 | WindowGroup {
15 | ContentView()
16 | }
17 | }
18 |
19 | init() {
20 | /// Load and configure the state of all the tips of the app
21 | try? Tips.configure()
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/Demos/AIColorPalette/AIColorPalette/ColorData.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ColorData.swift
3 | // AIColorPalette
4 | //
5 | // Created by Todd Hamilton on 6/21/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | // Define the structure for the JSON data
11 | struct ColorData: Codable {
12 | let red: Double
13 | let green: Double
14 | let blue: Double
15 |
16 | enum CodingKeys: String, CodingKey {
17 | case red
18 | case green
19 | case blue
20 | }
21 | }
22 |
23 | struct Colors: Codable {
24 | let colors: [ColorData]
25 | }
26 |
--------------------------------------------------------------------------------
/Demos/Classifier/Classifier/ClassifierApp.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ClassifierApp.swift
3 | // Classifier
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import SwiftUI
9 |
10 | @main
11 | @MainActor
12 | struct ClassifierApp: App {
13 |
14 | @State private var cameraFrameManager = CameraFrameManager()
15 | @State private var classifierManager = ClassifierManager()
16 |
17 | var body: some Scene {
18 | WindowGroup {
19 | ClassifierView(cameraFrameManager: cameraFrameManager,
20 | classifierManager: classifierManager)
21 | }
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/Demos/Chat/Chat/ChatMessage.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatMessage.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 |
10 | /// Data model to represent a chat message
11 | struct ChatMessage: Identifiable, Equatable {
12 | /// Unique identifier
13 | let id = UUID()
14 |
15 | /// The body of the chat message
16 | var text: String
17 |
18 | /// True if the message originates from the user, false if it originates from OpenAI
19 | let isUser: Bool
20 |
21 | /// Indicates that we are waiting for the first bit of message content from OpenAI
22 | var isWaitingForFirstText = false
23 | }
24 |
--------------------------------------------------------------------------------
/Demos/Transcriber/Transcriber/ModelContext+Extensions.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ModelContext+Extensions.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import SwiftData
10 |
11 | extension ModelContext {
12 |
13 | /// Deletes all models in `AppConstants.swiftDataModels` from SwiftData.
14 | /// Use this during development to return to a clean slate.
15 | func reset() {
16 | do {
17 | for model in AppConstants.swiftDataModels {
18 | try self.delete(model: model)
19 | }
20 | } catch {
21 | AppLogger.error("Failed to reset swift data for all models. Error: \(error.localizedDescription)")
22 | }
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/Demos/Chat/Chat/AppLogger.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppLogger.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import OSLog
10 |
11 | /// Log levels available:
12 | ///
13 | /// AppLogger.debug
14 | /// AppLogger.info
15 | /// AppLogger.warning
16 | /// AppLogger.error
17 | /// AppLogger.critical
18 | ///
19 | /// Flip on metadata logging in Xcode's console to show which source line the log occurred from.
20 | ///
21 | /// See my reddit post for a video instructions:
22 | /// https://www.reddit.com/r/SwiftUI/comments/15lsdtk/how_to_use_the_oslog_logger/
23 | let AppLogger = Logger(subsystem: Bundle.main.bundleIdentifier ?? "UnknownApp",
24 | category: "AIProxyBootstrapChat")
25 |
--------------------------------------------------------------------------------
/Demos/Trivia/Trivia/AppLogger.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppLogger.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import OSLog
10 |
11 | /// Log levels available:
12 | ///
13 | /// AppLogger.debug
14 | /// AppLogger.info
15 | /// AppLogger.warning
16 | /// AppLogger.error
17 | /// AppLogger.critical
18 | ///
19 | /// Flip on metadata logging in Xcode's console to show which source line the log occurred from.
20 | ///
21 | /// See my reddit post for a video instructions:
22 | /// https://www.reddit.com/r/SwiftUI/comments/15lsdtk/how_to_use_the_oslog_logger/
23 | let AppLogger = Logger(subsystem: Bundle.main.bundleIdentifier ?? "UnknownApp",
24 | category: "AIProxyBootstrapTrivia")
25 |
--------------------------------------------------------------------------------
/Demos/Stickers/Stickers/AppLogger.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppLogger.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import OSLog
10 |
11 | /// Log levels available:
12 | ///
13 | /// AppLogger.debug
14 | /// AppLogger.info
15 | /// AppLogger.warning
16 | /// AppLogger.error
17 | /// AppLogger.critical
18 | ///
19 | /// Flip on metadata logging in Xcode's console to show which source line the log occurred from.
20 | ///
21 | /// See my reddit post for a video instructions:
22 | /// https://www.reddit.com/r/SwiftUI/comments/15lsdtk/how_to_use_the_oslog_logger/
23 | let AppLogger = Logger(subsystem: Bundle.main.bundleIdentifier ?? "UnknownApp",
24 | category: "AIProxyBootstrapStickers")
25 |
--------------------------------------------------------------------------------
/Demos/Classifier/Classifier/AppLogger.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppLogger.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import OSLog
10 |
11 | /// Log levels available:
12 | ///
13 | /// AppLogger.debug
14 | /// AppLogger.info
15 | /// AppLogger.warning
16 | /// AppLogger.error
17 | /// AppLogger.critical
18 | ///
19 | /// Flip on metadata logging in Xcode's console to show which source line the log occurred from.
20 | ///
21 | /// See my reddit post for a video instructions:
22 | /// https://www.reddit.com/r/SwiftUI/comments/15lsdtk/how_to_use_the_oslog_logger/
23 | let AppLogger = Logger(subsystem: Bundle.main.bundleIdentifier ?? "UnknownApp",
24 | category: "AIProxyBootstrapClassifier")
25 |
--------------------------------------------------------------------------------
/Demos/Transcriber/Transcriber/AppLogger.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppLogger.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import OSLog
10 |
11 | /// Log levels available:
12 | ///
13 | /// AppLogger.debug
14 | /// AppLogger.info
15 | /// AppLogger.warning
16 | /// AppLogger.error
17 | /// AppLogger.critical
18 | ///
19 | /// Flip on metadata logging in Xcode's console to show which source line the log occurred from.
20 | ///
21 | /// See my reddit post for a video instructions:
22 | /// https://www.reddit.com/r/SwiftUI/comments/15lsdtk/how_to_use_the_oslog_logger/
23 | let AppLogger = Logger(subsystem: Bundle.main.bundleIdentifier ?? "UnknownApp",
24 | category: "AIProxyBootstrapTranscriber")
25 |
--------------------------------------------------------------------------------
/Demos/Translator/Translator/AppLogger.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppLogger.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import OSLog
10 |
11 | /// Log levels available:
12 | ///
13 | /// AppLogger.debug
14 | /// AppLogger.info
15 | /// AppLogger.warning
16 | /// AppLogger.error
17 | /// AppLogger.critical
18 | ///
19 | /// Flip on metadata logging in Xcode's console to show which source line the log occurred from.
20 | ///
21 | /// See my reddit post for a video instructions:
22 | /// https://www.reddit.com/r/SwiftUI/comments/15lsdtk/how_to_use_the_oslog_logger/
23 | let AppLogger = Logger(subsystem: Bundle.main.bundleIdentifier ?? "UnknownApp",
24 | category: "AIProxyBootstrapTranslator")
25 |
--------------------------------------------------------------------------------
/AIProxyFal/AIProxyFal/AppConstants.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppConstants.swift
3 | // AIProxyFal
4 | //
5 | // Created by Todd Hamilton on 9/17/24.
6 | //
7 |
8 | import AIProxy
9 |
10 | #error(
11 | """
12 | Uncomment one of the methods below. To build and run on device you must follow the AIProxy integration guide.
13 | Please see https://www.aiproxy.pro/docs/integration-guide.html")
14 | """
15 | )
16 |
17 | /* Uncomment for BYOK use cases */
18 | let falService = AIProxy.falDirectService(
19 | unprotectedAPIKey: "your-fal-key"
20 | )
21 |
22 | /* Uncomment for all other production use cases */
23 | //let falService = AIProxy.falService(
24 | // partialKey: "partial-key-from-your-developer-dashboard",
25 | // serviceURL: "service-url-from-your-developer-dashboard"
26 | //)
27 |
--------------------------------------------------------------------------------
/AIProxyGroq/AIProxyGroq/AppConstants.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppConstants.swift
3 | // AIProxyGroq
4 | //
5 | // Created by Todd Hamilton on 10/1/24.
6 | //
7 |
8 | import AIProxy
9 |
10 | #error(
11 | """
12 | Uncomment one of the methods below. To build and run on device you must follow the AIProxy integration guide.
13 | Please see https://www.aiproxy.pro/docs/integration-guide.html")
14 | """
15 | )
16 |
17 | /* Uncomment for BYOK use cases */
18 | let groqService = AIProxy.groqDirectService(
19 | unprotectedAPIKey: "your-groq-key"
20 | )
21 |
22 | /* Uncomment for all other production use cases */
23 | //let groqService = AIProxy.groqService(
24 | // partialKey: "partial-key-from-your-developer-dashboard",
25 | // serviceURL: "service-url-from-your-developer-dashboard"
26 | //)
27 |
--------------------------------------------------------------------------------
/AIProxyOpenAI/AIProxyOpenAI/AppConstants.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppConstants.swift
3 | // AIProxyOpenAI
4 | //
5 | // Created by Todd Hamilton on 6/14/24.
6 | //
7 |
8 | import AIProxy
9 |
10 | #error(
11 | """
12 | Uncomment one of the methods below. To build and run on device you must follow the AIProxy integration guide.
13 | Please see https://www.aiproxy.pro/docs/integration-guide.html")
14 | """
15 | )
16 |
17 | /* Uncomment for BYOK use cases */
18 | let openAIService = AIProxy.openAIDirectService(
19 | unprotectedAPIKey: "your-openai-key"
20 | )
21 |
22 | /* Uncomment for all other production use cases */
23 | //let openAIService = AIProxy.openAIService(
24 | // partialKey: "partial-key-from-your-developer-dashboard",
25 | // serviceURL: "service-url-from-your-developer-dashboard"
26 | //)
27 |
--------------------------------------------------------------------------------
/AIProxyGemini/AIProxyGemini/AppConstants.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppConstants.swift
3 | // AIProxyGemini
4 | //
5 | // Created by Todd Hamilton on 10/18/24.
6 | //
7 |
8 | import AIProxy
9 |
10 | #error(
11 | """
12 | Uncomment one of the methods below. To build and run on device you must follow the AIProxy integration guide.
13 | Please see https://www.aiproxy.pro/docs/integration-guide.html")
14 | """
15 | )
16 |
17 | /* Uncomment for BYOK use cases */
18 | let geminiService = AIProxy.geminiDirectService(
19 | unprotectedAPIKey: "your-gemini-key"
20 | )
21 |
22 | /* Uncomment for all other production use cases */
23 | //let geminiService = AIProxy.geminiService(
24 | // partialKey: "partial-key-from-your-developer-dashboard",
25 | // serviceURL: "service-url-from-your-developer-dashboard"
26 | //)
27 |
--------------------------------------------------------------------------------
/AIProxyGroq/AIProxyGroq/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | },
8 | {
9 | "appearances" : [
10 | {
11 | "appearance" : "luminosity",
12 | "value" : "dark"
13 | }
14 | ],
15 | "idiom" : "universal",
16 | "platform" : "ios",
17 | "size" : "1024x1024"
18 | },
19 | {
20 | "appearances" : [
21 | {
22 | "appearance" : "luminosity",
23 | "value" : "tinted"
24 | }
25 | ],
26 | "idiom" : "universal",
27 | "platform" : "ios",
28 | "size" : "1024x1024"
29 | }
30 | ],
31 | "info" : {
32 | "author" : "xcode",
33 | "version" : 1
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/Demos/PuLIDDemo/PuLIDDemo/AppConstants.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppConstants.swift
3 | // PuLIDDemo
4 | //
5 | // Created by Todd Hamilton on 9/28/24.
6 | //
7 |
8 | import AIProxy
9 |
10 | #error(
11 | """
12 | Uncomment one of the methods below. To build and run on device you must follow the AIProxy integration guide.
13 | Please see https://www.aiproxy.pro/docs/integration-guide.html")
14 | """
15 | )
16 |
17 | /* Uncomment for BYOK use cases */
18 | let replicateService = AIProxy.replicateDirectService(
19 | unprotectedAPIKey: "your-replicate-key"
20 | )
21 |
22 | /* Uncomment for all other production use cases */
23 | //let replicateService = AIProxy.replicateService(
24 | // partialKey: "partial-key-from-your-developer-dashboard",
25 | // serviceURL: "service-url-from-your-developer-dashboard"
26 | //)
27 |
--------------------------------------------------------------------------------
/Demos/PuLIDDemo/PuLIDDemo/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | },
8 | {
9 | "appearances" : [
10 | {
11 | "appearance" : "luminosity",
12 | "value" : "dark"
13 | }
14 | ],
15 | "idiom" : "universal",
16 | "platform" : "ios",
17 | "size" : "1024x1024"
18 | },
19 | {
20 | "appearances" : [
21 | {
22 | "appearance" : "luminosity",
23 | "value" : "tinted"
24 | }
25 | ],
26 | "idiom" : "universal",
27 | "platform" : "ios",
28 | "size" : "1024x1024"
29 | }
30 | ],
31 | "info" : {
32 | "author" : "xcode",
33 | "version" : 1
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/AIProxyGemini/AIProxyGemini/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "idiom" : "universal",
5 | "platform" : "ios",
6 | "size" : "1024x1024"
7 | },
8 | {
9 | "appearances" : [
10 | {
11 | "appearance" : "luminosity",
12 | "value" : "dark"
13 | }
14 | ],
15 | "idiom" : "universal",
16 | "platform" : "ios",
17 | "size" : "1024x1024"
18 | },
19 | {
20 | "appearances" : [
21 | {
22 | "appearance" : "luminosity",
23 | "value" : "tinted"
24 | }
25 | ],
26 | "idiom" : "universal",
27 | "platform" : "ios",
28 | "size" : "1024x1024"
29 | }
30 | ],
31 | "info" : {
32 | "author" : "xcode",
33 | "version" : 1
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/AIProxyAnthropic/AIProxyAnthropic/AppConstants.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppConstants.swift
3 | // AIProxyAnthropic
4 | //
5 | // Created by Todd Hamilton on 8/14/24.
6 | //
7 |
8 | import AIProxy
9 |
10 | #error(
11 | """
12 | Uncomment one of the methods below. To build and run on device you must follow the AIProxy integration guide.
13 | Please see https://www.aiproxy.pro/docs/integration-guide.html")
14 | """
15 | )
16 |
17 | /* Uncomment for BYOK use cases */
18 | let anthropicService = AIProxy.anthropicDirectService(
19 | unprotectedAPIKey: "your-anthropic-key"
20 | )
21 |
22 | /* Uncomment for all other production use cases */
23 | //let anthropicService = AIProxy.anthropicService(
24 | // partialKey: "partial-key-from-your-developer-dashboard",
25 | // serviceURL: "service-url-from-your-developer-dashboard"
26 | //)
27 |
--------------------------------------------------------------------------------
/AIProxyDeepL/AIProxyDeepL/AppConstants.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppConstants.swift
3 | // AIProxyDeepL
4 | //
5 | // Created by Todd Hamilton on 8/14/24.
6 | //
7 |
8 | import AIProxy
9 |
10 | #error(
11 | """
12 | Uncomment one of the methods below. To build and run on device you must follow the AIProxy integration guide.
13 | Please see https://www.aiproxy.pro/docs/integration-guide.html")
14 | """
15 | )
16 |
17 | /* Uncomment for BYOK use cases */
18 | let deepLService = AIProxy.deepLDirectService(
19 | unprotectedAPIKey: "your-deepL-key",
20 | accountType: .free
21 | )
22 |
23 | /* Uncomment for all other production use cases */
24 | //let deepLService = AIProxy.deepLService(
25 | // partialKey: "partial-key-from-your-developer-dashboard",
26 | // serviceURL: "service-url-from-your-developer-dashboard"
27 | //)
28 |
--------------------------------------------------------------------------------
/AIProxyReplicate/AIProxyReplicate/AppConstants.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppConstants.swift
3 | // AIProxyReplicate
4 | //
5 | // Created by Todd Hamilton on 6/13/24.
6 | //
7 |
8 | import AIProxy
9 |
10 | #error(
11 | """
12 | Uncomment one of the methods below. To build and run on device you must follow the AIProxy integration guide.
13 | Please see https://www.aiproxy.pro/docs/integration-guide.html")
14 | """
15 | )
16 |
17 | /* Uncomment for BYOK use cases */
18 | let replicateService = AIProxy.replicateDirectService(
19 | unprotectedAPIKey: "your-replicate-key"
20 | )
21 |
22 | /* Uncomment for all other production use cases */
23 | //let replicateService = AIProxy.replicateService(
24 | // partialKey: "partial-key-from-your-developer-dashboard",
25 | // serviceURL: "service-url-from-your-developer-dashboard"
26 | //)
27 |
--------------------------------------------------------------------------------
/AIProxyStabilityAI/AIProxyStabilityAI/AppConstants.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppConstants.swift
3 | // AIProxyStabilityAI
4 | //
5 | // Created by Todd Hamilton on 8/13/24.
6 | //
7 |
8 | import AIProxy
9 |
10 | #error(
11 | """
12 | Uncomment one of the methods below. To build and run on device you must follow the AIProxy integration guide.
13 | Please see https://www.aiproxy.pro/docs/integration-guide.html")
14 | """
15 | )
16 |
17 | /* Uncomment for BYOK use cases */
18 | let stabilityService = AIProxy.stabilityAIDirectService(
19 | unprotectedAPIKey: "your-stability-key"
20 | )
21 |
22 | /* Uncomment for all other production use cases */
23 | //let stabilityService = AIProxy.stabilityAIService(
24 | // partialKey: "partial-key-from-your-developer-dashboard",
25 | // serviceURL: "service-url-from-your-developer-dashboard"
26 | //)
27 |
--------------------------------------------------------------------------------
/AIProxyTogetherAI/AIProxyTogetherAI/AppConstants.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppConstants.swift
3 | // AIProxyTogetherAI
4 | //
5 | // Created by Todd Hamilton on 8/18/24.
6 | //
7 |
8 | import AIProxy
9 |
10 | #error(
11 | """
12 | Uncomment one of the methods below. To build and run on device you must follow the AIProxy integration guide.
13 | Please see https://www.aiproxy.pro/docs/integration-guide.html")
14 | """
15 | )
16 |
17 | /* Uncomment for BYOK use cases */
18 | let togetherAIService = AIProxy.togetherAIDirectService(
19 | unprotectedAPIKey: "your-togetherAI-key"
20 | )
21 |
22 | /* Uncomment for all other production use cases */
23 | //let togetherAIService = AIProxy.togetherAIService(
24 | // partialKey: "partial-key-from-your-developer-dashboard",
25 | // serviceURL: "service-url-from-your-developer-dashboard"
26 | //)
27 |
--------------------------------------------------------------------------------
/Demos/FilmFinder/FilmFinder/Assets.xcassets/AppIcon.appiconset/Contents.json:
--------------------------------------------------------------------------------
1 | {
2 | "images" : [
3 | {
4 | "filename" : "icon.png",
5 | "idiom" : "universal",
6 | "platform" : "ios",
7 | "size" : "1024x1024"
8 | },
9 | {
10 | "appearances" : [
11 | {
12 | "appearance" : "luminosity",
13 | "value" : "dark"
14 | }
15 | ],
16 | "idiom" : "universal",
17 | "platform" : "ios",
18 | "size" : "1024x1024"
19 | },
20 | {
21 | "appearances" : [
22 | {
23 | "appearance" : "luminosity",
24 | "value" : "tinted"
25 | }
26 | ],
27 | "idiom" : "universal",
28 | "platform" : "ios",
29 | "size" : "1024x1024"
30 | }
31 | ],
32 | "info" : {
33 | "author" : "xcode",
34 | "version" : 1
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/Demos/Trivia/Trivia/AppConstants.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppConstants.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import AIProxy
9 |
10 | enum AppConstants {
11 | #error(
12 | """
13 | Uncomment one of the methods below. To build and run on device you must follow the AIProxy integration guide.
14 | Please see https://www.aiproxy.pro/docs/integration-guide.html")
15 | """
16 | )
17 |
18 | /* Uncomment for BYOK use cases */
19 | static let openAIService = AIProxy.openAIDirectService(
20 | unprotectedAPIKey: "your-openai-key"
21 | )
22 |
23 | /* Uncomment for all other production use cases */
24 | // let openAIService = AIProxy.openAIService(
25 | // partialKey: "partial-key-from-your-developer-dashboard",
26 | // serviceURL: "service-url-from-your-developer-dashboard"
27 | // )
28 | }
29 |
--------------------------------------------------------------------------------
/Demos/Chat/Chat/AppConstants.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppConstants.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import AIProxy
9 |
10 | enum AppConstants {
11 | #error(
12 | """
13 | Uncomment one of the methods below. To build and run on device you must follow the AIProxy integration guide.
14 | Please see https://www.aiproxy.pro/docs/integration-guide.html")
15 | """
16 | )
17 |
18 | /* Uncomment for BYOK use cases */
19 | static let openAIService = AIProxy.openAIDirectService(
20 | unprotectedAPIKey: "your-openai-key"
21 | )
22 |
23 | /* Uncomment for all other production use cases */
24 | // static let openAIService = AIProxy.openAIService(
25 | // partialKey: "partial-key-from-your-developer-dashboard",
26 | // serviceURL: "service-url-from-your-developer-dashboard"
27 | // )
28 | }
29 |
--------------------------------------------------------------------------------
/Demos/Translator/Translator/AppConstants.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppConstants.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import AIProxy
9 |
10 | enum AppConstants {
11 | #error(
12 | """
13 | Uncomment one of the methods below. To build and run on device you must follow the AIProxy integration guide.
14 | Please see https://www.aiproxy.pro/docs/integration-guide.html")
15 | """
16 | )
17 |
18 | /* Uncomment for BYOK use cases */
19 | static let openAIService = AIProxy.openAIDirectService(
20 | unprotectedAPIKey: "your-openai-key"
21 | )
22 |
23 | /* Uncomment for all other production use cases */
24 | // let openAIService = AIProxy.openAIService(
25 | // partialKey: "partial-key-from-your-developer-dashboard",
26 | // serviceURL: "service-url-from-your-developer-dashboard"
27 | // )
28 | }
29 |
--------------------------------------------------------------------------------
/Demos/Stickers/Stickers/AppConstants.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppConstants.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import AIProxy
9 |
10 | enum AppConstants {
11 | #error(
12 | """
13 | Uncomment one of the methods below. To build and run on device you must follow the AIProxy integration guide.
14 | Please see https://www.aiproxy.pro/docs/integration-guide.html")
15 | """
16 | )
17 |
18 | /* Uncomment for BYOK use cases */
19 | static let openAIService = AIProxy.openAIDirectService(
20 | unprotectedAPIKey: "your-openai-key"
21 | )
22 |
23 | /* Uncomment for all other production use cases */
24 | // let openAIService = AIProxy.openAIService(
25 | // partialKey: "partial-key-from-your-developer-dashboard",
26 | // serviceURL: "service-url-from-your-developer-dashboard"
27 | // )
28 |
29 | }
30 |
--------------------------------------------------------------------------------
/Demos/Transcriber/Transcriber/NoRecordingsView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // NoRecordingsView.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Todd Hamilton
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct NoRecordingsView: View {
11 | var body: some View {
12 | VStack{
13 | Image(systemName: "waveform")
14 | .font(.largeTitle)
15 | .foregroundColor(.secondary)
16 | .padding(.bottom, 8)
17 |
18 | Text("No recordings")
19 | .font(.headline)
20 | Text("Tap the record button below to start transcribing.")
21 | .multilineTextAlignment(.center)
22 | .frame(maxWidth:240)
23 | .foregroundColor(.secondary)
24 | .font(.subheadline)
25 | }
26 | .frame(maxHeight:.infinity)
27 | .foregroundColor(.primary)
28 | .padding(.bottom, 48)
29 | }
30 | }
31 |
32 | #Preview {
33 | NoRecordingsView()
34 | }
35 |
--------------------------------------------------------------------------------
/Demos/FilmFinder/FilmFinder/AppConstants.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppConstants.swift
3 | // FilmFinder
4 | //
5 | // Created by Todd Hamilton on 11/4/24.
6 | //
7 |
8 | import AIProxy
9 |
10 | #error(
11 | """
12 | Uncomment one of the methods below. To build and run on device you must follow the AIProxy integration guide.
13 | Please see https://www.aiproxy.pro/docs/integration-guide.html")
14 |
15 | You will also need a read access token from TMDB:
16 | https://developer.themoviedb.org/docs/getting-started
17 | """
18 | )
19 |
20 | /* Uncomment for BYOK use cases */
21 | let groqService = AIProxy.groqDirectService(
22 | unprotectedAPIKey: "your-groq-key"
23 | )
24 |
25 | /* Uncomment for all other production use cases */
26 | //static let groqService = AIProxy.groqService(
27 | // partialKey: "partial-key-from-your-developer-dashboard",
28 | // serviceURL: "service-url-from-your-developer-dashboard"
29 | //)
30 |
31 | let tmdb = "api-read-access-token-from-tmdb"
32 |
--------------------------------------------------------------------------------
/Demos/Classifier/Classifier/CameraControlsView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CameraControlsView.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import SwiftUI
10 |
11 | struct CameraControlsView: View {
12 |
13 | let shutterButtonAction: () -> Void
14 |
15 | var body: some View {
16 |
17 | Button(action: shutterButtonAction) {
18 | ZStack{
19 | Circle()
20 | .fill(.clear)
21 | .stroke(.mint, lineWidth: 4)
22 | .frame(width:72, height: 72)
23 | Circle()
24 | .fill(.mint.gradient)
25 | .frame(width:60, height: 60)
26 | Image(systemName: "camera")
27 | .font(.title2)
28 | .fontWeight(.semibold)
29 | .foregroundColor(.black.opacity(0.4))
30 | }
31 | }
32 | .buttonStyle(.plain)
33 | }
34 | }
35 |
36 | #Preview {
37 | CameraControlsView(shutterButtonAction: {})
38 | }
39 |
--------------------------------------------------------------------------------
/Demos/Classifier/Classifier/AppConstants.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppConstants.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import SwiftData
10 | import AIProxy
11 |
12 | enum AppConstants {
13 |
14 | static let videoSampleQueue = DispatchQueue(label: "com.AIProxyBootstrap.videoSampleQueue")
15 |
16 | #error(
17 | """
18 | Uncomment one of the methods below. To build and run on device you must follow the AIProxy integration guide.
19 | Please see https://www.aiproxy.pro/docs/integration-guide.html")
20 | """
21 | )
22 |
23 | /* Uncomment for BYOK use cases */
24 | static let openAIService = AIProxy.openAIDirectService(
25 | unprotectedAPIKey: "your-openai-key"
26 | )
27 |
28 | /* Uncomment for all other production use cases */
29 | // static let openAIService = AIProxy.openAIService(
30 | // partialKey: "partial-key-from-your-developer-dashboard",
31 | // serviceURL: "service-url-from-your-developer-dashboard"
32 | // )
33 | }
34 |
--------------------------------------------------------------------------------
/Demos/Transcriber/Transcriber/TranscriberDataLoader.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TranscriberDataLoader.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import AIProxy
10 |
11 | /// Interfaces with OpenAI to convert a recording into a transcript
12 | final actor TranscriberDataLoader {
13 |
14 | /// Run the OpenAI transcriber on an audio recording
15 | /// - Parameter recording: the audio recording to transcribe
16 | /// - Returns: a transcript of the recording created by OpenAI's Whisper model
17 | func run(onRecording recording: AudioRecording) async -> String {
18 | do {
19 | let requestBody = OpenAICreateTranscriptionRequestBody(
20 | file: try Data(contentsOf: recording.localUrl),
21 | model: "whisper-1"
22 | )
23 | let response = try await AppConstants.openAIService.createTranscriptionRequest(body: requestBody)
24 | return response.text
25 | } catch {
26 | AppLogger.error("Could not get transcript from OpenAI: \(error.localizedDescription)")
27 | return "Transcription Error"
28 | }
29 | }
30 | }
31 |
--------------------------------------------------------------------------------
/Demos/Stickers/Stickers/StickerLoadingView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // StickerLoadingView.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import SwiftUI
10 |
11 | @MainActor
12 | struct StickerLoadingView: View {
13 |
14 | /// Loading text to display while long requests to OpenAI are fulfilled
15 | @State private var currentLoadState = "Hold tight"
16 |
17 | var body: some View {
18 | VStack(spacing:16){
19 | ProgressView()
20 | .controlSize(.extraLarge)
21 | .tint(.white)
22 | Text(currentLoadState)
23 | .transition(.move(edge: .bottom))
24 | .font(.system(size: 20, weight: .semibold, design: .rounded))
25 | .foregroundColor(.black.opacity(0.28))
26 | }
27 | .frame(maxWidth:.infinity, maxHeight:.infinity)
28 | .onAppear {
29 | Task {
30 | try await Task.sleep(for: .seconds(4))
31 | currentLoadState = "Generating sticker"
32 | try await Task.sleep(for: .seconds(4))
33 | currentLoadState = "Finalizing"
34 | }
35 | }
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/Demos/Classifier/Classifier/CameraView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CameraView.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Todd Hamilton
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct CameraView: View {
11 |
12 | /// The camera frame image to display
13 | var image: CGImage?
14 |
15 | private let label = Text("frame")
16 |
17 | var body: some View {
18 | GeometryReader { geo in
19 | VStack {
20 | if let image = image {
21 | Image(image, scale: 0.5, orientation: .up, label: label)
22 | .resizable()
23 | .scaledToFill()
24 | .frame(maxWidth:geo.size.width, maxHeight: geo.size.width)
25 | .clipShape(RoundedRectangle(cornerRadius: 14))
26 | .padding()
27 |
28 | } else {
29 | Color.black
30 | .frame(maxWidth:geo.size.width, maxHeight: geo.size.width)
31 | .clipShape(RoundedRectangle(cornerRadius: 14))
32 | .padding()
33 | }
34 | }
35 | }
36 | }
37 | }
38 |
39 | #Preview {
40 | CameraView()
41 | }
42 |
--------------------------------------------------------------------------------
/Demos/Trivia/Trivia/TriviaCardData.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TriviaQuestion.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import SwiftUI
10 |
11 | @MainActor
12 | @Observable
13 | /// UI model for TriviaCardView
14 | final class TriviaCardData: Identifiable {
15 |
16 | /// Position of the card, with 0 meaning that the card is on top and 1 meaning directly below the top card, etc.
17 | let position: Int
18 |
19 | /// Data model for the card contents
20 | var triviaQuestionModel: TriviaQuestionModel?
21 |
22 | /// Networker to load card contents from OpenAI
23 | private let triviaFetcher: TriviaDataLoader
24 |
25 | /// Creates a UI model for TriviaCardView
26 | /// - Parameters:
27 | /// - triviaFetcher: Loads card contents from OpenAI
28 | /// - position: Position of the card, with 0 being the top of the stack
29 | init(triviaFetcher: TriviaDataLoader, position: Int) {
30 | self.triviaFetcher = triviaFetcher
31 | self.position = position
32 | }
33 |
34 | /// Loads the trivia question's data model asynchronously
35 | func load() async {
36 | self.triviaQuestionModel = try! await self.triviaFetcher.getNextQuestion()
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/Demos/AIColorPalette/README.md:
--------------------------------------------------------------------------------
1 | ## Example projects
2 |
3 | ### AIColorPalette
4 |
5 | #### About
6 |
7 | AIColorPalette generates a color palette from a photo in your camera roll. This project uses [AIProxy](https://www.aiproxy.pro) to secure your OpenAI key.
8 |
9 | #### Features demonstrated
10 |
11 | - Shows off new iOS 18 SwiftUI effects
12 | - Calls the OpenAI chat completion endpoint
13 | - Submits a photo as the chat completion request body
14 | - Compels OpenAI to return valid JSON in the chat completion response
15 |
16 | #### Minimum requirements
17 |
18 | The minimum deployment target for this sample app is 18.0, as it uses beta UI effects.
19 |
20 | You'll need:
21 |
22 | - macOS Sonoma 14.5 or higher
23 | - Xcode Beta 16.0 or higher
24 |
25 | #### How to run with your own AIProxy settings
26 |
27 | - Set the `AIPROXY_DEVICE_CHECK_BYPASS` environment variable in your Xcode build settings.
28 | Refer to the [README](https://github.com/lzell/AIProxySwift?tab=readme-ov-file#adding-this-package-as-a-dependency-to-your-xcode-project)
29 | for instructions on adding an env variable to your Xcode project.
30 |
31 | - Replace the `partialKey` placeholder value in `AIColorPalette/AIProxyIntegration.swift` with the
32 | value provided to you in the AIProxy dashboard when you submit your OpenAI key
33 |
--------------------------------------------------------------------------------
/Demos/Transcriber/Transcriber/AudioRecording.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioRecording.swift
3 | // Transcriber
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import SwiftData
10 |
11 | /// Encapsulates a recording. The `url` is the location on disk of the raw audio file (an m4a).
12 | @Model
13 | final class AudioRecording {
14 | @Attribute(.unique) let localUrl: URL
15 | let duration: String
16 |
17 | init(localUrl: URL, duration: String) {
18 | self.localUrl = localUrl
19 | self.duration = duration
20 | }
21 |
22 | var resolvedURL: URL? {
23 | // There is a little nuance here. Every time you build and run the app the apple sandbox changes.
24 | // We first try to find the associated file at the spot that we stored it, but if it's not there then
25 | // we construct a new URL based on the current apple sandbox
26 | if (FileManager.default.fileExists(atPath: self.localUrl.path)) {
27 | return self.localUrl
28 | } else {
29 | let resolvedURL = FileUtils.getDocumentsURL().appending(component: self.localUrl.lastPathComponent)
30 | if FileManager.default.fileExists(atPath: resolvedURL.path) {
31 | return resolvedURL
32 | }
33 | }
34 | return nil
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/Demos/Stickers/Stickers/StickerImageView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // StickerImageView.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import SwiftUI
10 |
11 | /// Holds a sticker image.
12 | /// The sticker animates into view with a scale effect, and then floats in the Y-axis.
13 | struct StickerImageView: View {
14 |
15 | /// The sticker as UIImage
16 | let uiImage: UIImage
17 |
18 | @State private var floating = false
19 | @State private var showSticker = false
20 | private let floatingAnimation = Animation.easeInOut(duration: 2.0).repeatForever(autoreverses: true)
21 |
22 | var body: some View {
23 | Image(uiImage: uiImage)
24 | .resizable()
25 | .scaledToFit()
26 | .cornerRadius(14)
27 | .shadow(color:.black.opacity(0.28), radius: 8, x:0, y:4)
28 | .padding()
29 | .offset(y:floating ? 8.0 : -8.0)
30 | .animation(floatingAnimation, value: floating)
31 | .scaleEffect(showSticker ? 1.0 : 0.5)
32 | .animation(.bouncy, value: showSticker)
33 | .onAppear{
34 | withAnimation(.bouncy){
35 | floating = true
36 | showSticker = true
37 | }
38 | }
39 | }
40 | }
41 |
42 |
--------------------------------------------------------------------------------
/Demos/Translator/Translator/TranslationDataLoader.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Translator.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 |
10 | private let prompt = "The response is an exact translation from english to spanish. You don't respond with any english."
11 |
12 | /// Interfaces with OpenAI to translate input text from english to spanish
13 | struct TranslationDataLoader {
14 | private init() {
15 | fatalError("Translator is a namespace only")
16 | }
17 |
18 | /// Translate `input` from english to spanish
19 | /// - Parameter input: the english input
20 | /// - Returns: the spanish translation
21 | static func run(on input: String) async -> String {
22 | do {
23 | let response = try await AppConstants.openAIService.chatCompletionRequest(body: .init(
24 | model: "gpt-4o",
25 | messages: [
26 | .system(content: .text(prompt)),
27 | .user(content: .text(input))
28 | ]
29 | ))
30 | if let text = response.choices.first?.message.content {
31 | return text
32 | }
33 | } catch {
34 | AppLogger.error("Could not translate using gpt4o: \(error)")
35 | }
36 | return "Translation failed!"
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/AIProxyGemini/AIProxyGemini/ContentView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ContentView.swift
3 | // AIProxyGemini
4 | //
5 | // Created by Todd Hamilton on 10/18/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ContentView: View {
11 | var body: some View {
12 | NavigationStack{
13 | VStack(spacing:24){
14 | VStack{
15 | Image("icon")
16 | .resizable()
17 | .scaledToFit()
18 | .frame(width: /*@START_MENU_TOKEN@*/100/*@END_MENU_TOKEN@*/)
19 | .cornerRadius(14)
20 | .foregroundColor(.primary)
21 | Text("Gemini")
22 | .bold()
23 | .font(.largeTitle)
24 | Text("AIProxy Sample")
25 | .font(.subheadline)
26 | .foregroundColor(.secondary)
27 | }
28 | .frame(maxWidth:.infinity,alignment:.center)
29 |
30 | VStack{
31 | NavigationLink("Text Generation",destination: TextGenerationView())
32 | }
33 | .bold()
34 | .controlSize(.large)
35 | .tint(.teal)
36 | .buttonStyle(.bordered)
37 | }
38 | }
39 | }
40 | }
41 |
42 | #Preview {
43 | ContentView()
44 | }
45 |
--------------------------------------------------------------------------------
/Demos/Transcriber/Transcriber/AppConstants.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AppConstants.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import SwiftData
10 | import AIProxy
11 |
12 | /// Use this actor for audio work
13 | @globalActor actor AudioActor {
14 | static let shared = AudioActor()
15 | }
16 |
17 | enum AppConstants {
18 |
19 | static let swiftDataModels: [any PersistentModel.Type] = [AudioRecording.self, TranscribedAudioRecording.self]
20 | static let swiftDataContainer = try! ModelContainer(for: AudioRecording.self, TranscribedAudioRecording.self)
21 |
22 | static let audioSampleQueue = DispatchQueue(label: "com.AIProxyBootstrap.audioSampleQueue")
23 |
24 | #error(
25 | """
26 | Uncomment one of the methods below. To build and run on device you must follow the AIProxy integration guide.
27 | Please see https://www.aiproxy.pro/docs/integration-guide.html")
28 | """
29 | )
30 |
31 | /* Uncomment for BYOK use cases */
32 | static let openAIService = AIProxy.openAIDirectService(
33 | unprotectedAPIKey: "your-openai-key"
34 | )
35 |
36 | /* Uncomment for all other production use cases */
37 | // let openAIService = AIProxy.openAIService(
38 | // partialKey: "partial-key-from-your-developer-dashboard",
39 | // serviceURL: "service-url-from-your-developer-dashboard"
40 | // )
41 | }
42 |
--------------------------------------------------------------------------------
/Demos/Classifier/Classifier/ClassifierManager.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ClassifierManager.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import SwiftUI
9 |
10 | @MainActor
11 | @Observable
12 | final class ClassifierManager {
13 | /// The description of a plant. Descriptions are generated by OpenAI
14 | private(set) var plantDescription: String?
15 |
16 | /// The image of a plant. This camera image is supplied by the user
17 | private(set) var image: CGImage?
18 |
19 | /// A wikipedia URL for the user to learn more about the identified plant. This URL is generated by OpenAI
20 | private(set) var wikipediaURL: URL?
21 |
22 | /// Loads data from OpenAI
23 | private let classifierDataLoader = ClassifierDataLoader()
24 |
25 | /// Identify a plant based on a passed in image
26 | /// - Parameter image: a camera frame that the user took of a plant in their surroundings
27 | func identify(_ image: CGImage) {
28 | self.image = image
29 | Task {
30 | let (description, wikipediaURL) = try await classifierDataLoader.identify(fromImage: image)
31 | self.plantDescription = description
32 | self.wikipediaURL = wikipediaURL
33 | }
34 | }
35 |
36 | /// Reset all previously classified state
37 | func reset() {
38 | self.plantDescription = nil
39 | self.image = nil
40 | self.wikipediaURL = nil
41 | }
42 | }
43 |
--------------------------------------------------------------------------------
/AIProxyFal/AIProxyFal/ContentView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ContentView.swift
3 | // AIProxyFal
4 | //
5 | // Created by Todd Hamilton on 6/13/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ContentView: View {
11 | var body: some View {
12 | NavigationStack{
13 |
14 | VStack(spacing:24){
15 | VStack{
16 | Image("fal")
17 | .resizable()
18 | .scaledToFit()
19 | .frame(width: /*@START_MENU_TOKEN@*/100/*@END_MENU_TOKEN@*/)
20 | .cornerRadius(14)
21 | .foregroundColor(.primary)
22 | Text("Fal")
23 | .bold()
24 | .font(.largeTitle)
25 | Text("AIProxy Sample")
26 | .font(.subheadline)
27 | .foregroundColor(.secondary)
28 | }
29 | .frame(maxWidth:.infinity,alignment:.center)
30 |
31 | VStack{
32 | NavigationLink("Text to Image with FastSDXL",destination: TextToImageView())
33 | .bold()
34 | .controlSize(.large)
35 | .tint(.indigo)
36 | .buttonStyle(.bordered)
37 | }
38 | }
39 | }
40 | }
41 | }
42 |
43 | #Preview {
44 | ContentView()
45 | }
46 |
--------------------------------------------------------------------------------
/Demos/Classifier/Classifier/CameraFrameManager.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CameraFrameManager.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import AVFoundation
9 | import Foundation
10 | import SwiftUI
11 |
12 | @MainActor
13 | @Observable
14 | final class CameraFrameManager {
15 |
16 | /// The most recent camera frame of the back-facing built-in camera
17 | private(set) var cameraFrameImage: CGImage?
18 | private let cameraDataLoader = CameraDataLoader()
19 |
20 | init() {
21 | self.checkPermission() { [weak self] granted in
22 | if granted {
23 | self?.startCapturingCameraFrames()
24 | }
25 | }
26 | }
27 |
28 | private func startCapturingCameraFrames() {
29 | Task {
30 | let stream = await self.cameraDataLoader.imageStream()
31 | for await image in stream {
32 | self.cameraFrameImage = image
33 | }
34 | }
35 | }
36 |
37 | private func checkPermission(checkComplete: @escaping (Bool) -> Void) {
38 | switch AVCaptureDevice.authorizationStatus(for: .video) {
39 | case .authorized:
40 | checkComplete(true)
41 | case .notDetermined:
42 | AVCaptureDevice.requestAccess(for: .video) { granted in
43 | checkComplete(granted)
44 | }
45 | default:
46 | checkComplete(false)
47 | }
48 | }
49 | }
50 |
--------------------------------------------------------------------------------
/Demos/Transcriber/Transcriber/FileUtils.swift:
--------------------------------------------------------------------------------
1 | //
2 | // FileUtils.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 |
10 |
11 | struct FileUtils {
12 | private init() {
13 | fatalError("FileUtils is a namespace only")
14 | }
15 |
16 | static func getDocumentsURL() -> URL {
17 | guard let documentsUrl = FileManager.default.urls(
18 | for: .documentDirectory, in: .userDomainMask).first
19 | else {
20 | fatalError("Could could not find the Documents directory")
21 | }
22 | return documentsUrl
23 | }
24 |
25 | static func getFileURL() -> URL {
26 | let documentsUrl = self.getDocumentsURL()
27 | let isoFormatter = ISO8601DateFormatter()
28 | isoFormatter.formatOptions = [.withFullDate, .withTime, .withColonSeparatorInTime]
29 | isoFormatter.timeZone = .current
30 |
31 | var dateString = isoFormatter.string(from: Date())
32 | dateString = dateString.replacingOccurrences(of: ":", with: ".")
33 | let filename = "AIProxyBootstrap-\(dateString).m4a"
34 |
35 | return documentsUrl.appendingPathComponent(filename)
36 | }
37 |
38 | static func deleteFile(at url: URL) {
39 | do {
40 | try FileManager.default.removeItem(at: url)
41 | } catch {
42 | AppLogger.info("Could not find file to delete at \(url)")
43 | }
44 | }
45 | }
46 |
--------------------------------------------------------------------------------
/AIProxyReplicate/AIProxyReplicate/ContentView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ContentView.swift
3 | // AIProxyReplicate
4 | //
5 | // Created by Todd Hamilton on 6/13/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ContentView: View {
11 | var body: some View {
12 | NavigationStack{
13 | VStack(spacing:24){
14 | VStack{
15 | Image("replicate")
16 | .resizable()
17 | .scaledToFit()
18 | .frame(width: /*@START_MENU_TOKEN@*/100/*@END_MENU_TOKEN@*/)
19 | .cornerRadius(14)
20 | .foregroundColor(.primary)
21 | Text("Replicate")
22 | .bold()
23 | .font(.largeTitle)
24 | Text("AIProxy Samples")
25 | .font(.subheadline)
26 | .foregroundColor(.secondary)
27 | }
28 | .frame(maxWidth:.infinity,alignment:.center)
29 |
30 | VStack{
31 | NavigationLink("Generate Image Example",destination: ImageGenView())
32 | .bold()
33 | .controlSize(.large)
34 | .tint(.pink)
35 | .buttonStyle(.bordered)
36 | }
37 | }
38 | }
39 | }
40 | }
41 |
42 | #Preview {
43 | ContentView()
44 | }
45 |
46 |
--------------------------------------------------------------------------------
/AIProxyDeepL/AIProxyDeepL/ContentView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ContentView.swift
3 | // AIProxyDeepL
4 | //
5 | // Created by Todd Hamilton on 8/14/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ContentView: View {
11 | var body: some View {
12 | NavigationStack{
13 |
14 | VStack(spacing:48){
15 | VStack{
16 | Image("deepl")
17 | .resizable()
18 | .scaledToFit()
19 | .frame(width: /*@START_MENU_TOKEN@*/100/*@END_MENU_TOKEN@*/)
20 | .cornerRadius(14)
21 | .foregroundColor(.primary)
22 | Text("DeepL")
23 | .bold()
24 | .font(.largeTitle)
25 | Text("AIProxy Sample")
26 | .font(.subheadline)
27 | .foregroundColor(.secondary)
28 | }
29 | .frame(maxWidth:.infinity,alignment:.center)
30 |
31 | VStack{
32 | NavigationLink("Translation Example",destination: TranslationView())
33 | .bold()
34 | .controlSize(.large)
35 | .tint(.blue)
36 | .buttonStyle(.bordered)
37 |
38 | }
39 | }
40 | }
41 | }
42 | }
43 |
44 | #Preview {
45 | ContentView()
46 | }
47 |
--------------------------------------------------------------------------------
/AIProxyGroq/AIProxyGroq/ContentView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ContentView.swift
3 | // AIProxyGroq
4 | //
5 | // Created by Todd Hamilton on 10/1/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ContentView: View {
11 | var body: some View {
12 | NavigationStack{
13 | VStack(spacing:24){
14 | VStack{
15 | Image("groq")
16 | .resizable()
17 | .scaledToFit()
18 | .frame(width: /*@START_MENU_TOKEN@*/100/*@END_MENU_TOKEN@*/)
19 | .cornerRadius(14)
20 | .foregroundColor(.primary)
21 | Text("Groq")
22 | .bold()
23 | .font(.largeTitle)
24 | Text("AIProxy Sample")
25 | .font(.subheadline)
26 | .foregroundColor(.secondary)
27 | }
28 | .frame(maxWidth:.infinity,alignment:.center)
29 |
30 | VStack{
31 | NavigationLink("Chat Completion",destination: ChatView())
32 | NavigationLink("Streaming Chat Completion",destination: StreamingChatView())
33 | }
34 | .bold()
35 | .controlSize(.large)
36 | .tint(.red)
37 | .buttonStyle(.bordered)
38 | }
39 | }
40 | }
41 | }
42 |
43 | #Preview {
44 | ContentView()
45 | }
46 |
--------------------------------------------------------------------------------
/AIProxyStabilityAI/AIProxyStabilityAI/ContentView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ContentView.swift
3 | // AIProxyStabilityAI
4 | //
5 | // Created by Todd Hamilton on 8/13/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ContentView: View {
11 | var body: some View {
12 | NavigationStack{
13 |
14 | VStack(spacing:48){
15 | VStack{
16 | Image("stability")
17 | .resizable()
18 | .scaledToFit()
19 | .frame(width: /*@START_MENU_TOKEN@*/100/*@END_MENU_TOKEN@*/)
20 | .cornerRadius(14)
21 | .foregroundColor(.primary)
22 | Text("Stability.ai")
23 | .bold()
24 | .font(.largeTitle)
25 | Text("AIProxy Sample")
26 | .font(.subheadline)
27 | .foregroundColor(.secondary)
28 | }
29 | .frame(maxWidth:.infinity,alignment:.center)
30 |
31 | VStack{
32 | NavigationLink("Generate Image Example",destination: ImageGenView())
33 | .bold()
34 | .controlSize(.large)
35 | .tint(.indigo)
36 | .buttonStyle(.bordered)
37 | }
38 | }
39 | }
40 | }
41 | }
42 |
43 | #Preview {
44 | ContentView()
45 | }
46 |
--------------------------------------------------------------------------------
/Demos/Transcriber/Transcriber/TranscribedAudioRecording.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TranscribedAudioRecording.swift
3 | // Transcriber
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import AVFoundation
9 | import Foundation
10 | import SwiftData
11 |
12 | /// Encapsulates a transcribed audio recording
13 | @Model
14 | final class TranscribedAudioRecording {
15 | @Relationship(deleteRule: .cascade) var audioRecording: AudioRecording
16 | let transcript: String
17 | let createdAt: Date
18 | @Transient var player: AVAudioPlayer?
19 |
20 | init(audioRecording: AudioRecording, transcript: String, createdAt: Date) {
21 | self.audioRecording = audioRecording
22 | self.transcript = transcript
23 | self.createdAt = createdAt
24 | }
25 |
26 | func play() {
27 | guard let resolvedURL = self.audioRecording.resolvedURL else {
28 | AppLogger.error("The audio recording model does not have an associated audio file")
29 | return
30 | }
31 | AppLogger.info("Playing file at \(resolvedURL), which exists? \(FileManager.default.fileExists(atPath: resolvedURL.path))")
32 |
33 | Task.detached {
34 | do {
35 | try AVAudioSession.sharedInstance().setCategory(.playback)
36 | self.player = try AVAudioPlayer(contentsOf: resolvedURL)
37 | self.player?.play()
38 | } catch {
39 | AppLogger.error("Could not play audio file. Error: \(error.localizedDescription)")
40 | }
41 | }
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/Demos/Translator/Translator/TranslateView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TranslateView.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Todd Hamilton
6 | //
7 |
8 | import SwiftUI
9 |
10 | @MainActor
11 | struct TranslateView: View {
12 |
13 | @State private var newText:String = ""
14 | @State private var translatedText:String = ""
15 | @State private var processing:Bool = false
16 |
17 | private let prompt = "The response is an exact translation from english to spanish. You don't respond with any english."
18 |
19 | var body: some View {
20 | ZStack{
21 | Color(.systemGroupedBackground)
22 | .ignoresSafeArea()
23 |
24 | VStack{
25 | TopTranslateView(
26 | newText: $newText,
27 | translatedText: $translatedText,
28 | translate: { self.translate() }
29 | )
30 | BottomTranslateView(
31 | processing: $processing,
32 | translatedText: $translatedText
33 | )
34 | }
35 | .padding()
36 | }
37 | }
38 |
39 | func translate(){
40 | withAnimation(.smooth){
41 | processing = true
42 | }
43 | Task {
44 | translatedText = await TranslationDataLoader.run(on: self.newText)
45 | withAnimation(.smooth){
46 | processing = false
47 | }
48 | }
49 | }
50 | }
51 |
52 | #Preview {
53 | TranslateView()
54 | }
55 |
--------------------------------------------------------------------------------
/Demos/Stickers/Stickers/StickerInputView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // StickerInputView.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import SwiftUI
10 |
11 | /// The user enters a sticker prompt using this view.
12 | struct StickerInputView: View {
13 |
14 | enum FocusedField {
15 | case currentPrompt
16 | }
17 |
18 | /// Bind to a UI model's property for that property to change as the user enters text,
19 | /// and for programmatic changes to the UI model's property to be reflected in this view
20 | @Binding var currentPrompt: String
21 | @FocusState private var focusedField: FocusedField?
22 |
23 | var body: some View {
24 | VStack(spacing:8){
25 | Text("Describe your sticker below")
26 | .frame(maxWidth: .infinity, alignment: .topLeading)
27 | .font(.system(size: 20, weight: .bold, design: .rounded))
28 | .foregroundColor(.black.opacity(0.28))
29 | TextField("type here...", text: $currentPrompt, axis: .vertical)
30 | .focused($focusedField, equals: .currentPrompt)
31 | .frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .topLeading)
32 | .font(.system(size: 36, weight: .bold, design: .rounded))
33 | .textFieldStyle(.plain)
34 | .foregroundColor(.black.opacity(0.75))
35 | .onAppear {
36 | focusedField = .currentPrompt
37 | }
38 | }
39 | .padding()
40 | }
41 | }
42 |
43 |
--------------------------------------------------------------------------------
/Demos/FilmFinder/FilmFinder/Movie.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Movie.swift
3 | // FilmFinder
4 | //
5 | // Created by Todd Hamilton on 10/29/24.
6 | //
7 |
8 | import Foundation
9 |
10 | struct Recommendation: Codable {
11 | let title: String
12 | }
13 |
14 | // Define the structs to match the JSON structure
15 | struct MovieResponse: Codable {
16 | let page: Int
17 | let results: [Movie]
18 | let totalPages: Int
19 | let totalResults: Int
20 |
21 | // Map JSON keys to Swift property names if they differ
22 | enum CodingKeys: String, CodingKey {
23 | case page, results
24 | case totalPages = "total_pages"
25 | case totalResults = "total_results"
26 | }
27 | }
28 |
29 | struct Movie: Codable {
30 | let adult: Bool
31 | let backdropPath: String?
32 | let genreIds: [Int]
33 | let id: Int
34 | let originalLanguage: String
35 | let originalTitle: String
36 | let overview: String
37 | let popularity: Double
38 | let posterPath: String?
39 | let releaseDate: String
40 | let title: String
41 | let video: Bool
42 | let voteAverage: Double
43 | let voteCount: Int
44 |
45 | enum CodingKeys: String, CodingKey {
46 | case adult
47 | case backdropPath = "backdrop_path"
48 | case genreIds = "genre_ids"
49 | case id
50 | case originalLanguage = "original_language"
51 | case originalTitle = "original_title"
52 | case overview, popularity
53 | case posterPath = "poster_path"
54 | case releaseDate = "release_date"
55 | case title, video
56 | case voteAverage = "vote_average"
57 | case voteCount = "vote_count"
58 | }
59 | }
60 |
--------------------------------------------------------------------------------
/AIProxyOpenAI/AIProxyOpenAI/ContentView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ContentView.swift
3 | // AIProxyOpenAI
4 | //
5 | // Created by Todd Hamilton on 6/14/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ContentView: View {
11 | var body: some View {
12 | NavigationStack{
13 |
14 | VStack(spacing:48){
15 | VStack{
16 | Image("openai")
17 | .resizable()
18 | .scaledToFit()
19 | .frame(width: /*@START_MENU_TOKEN@*/100/*@END_MENU_TOKEN@*/)
20 | .cornerRadius(14)
21 | .foregroundColor(.primary)
22 | Text("OpenAI")
23 | .bold()
24 | .font(.largeTitle)
25 | Text("AIProxy Sample")
26 | .font(.subheadline)
27 | .foregroundColor(.secondary)
28 | }
29 | .frame(maxWidth:.infinity,alignment:.center)
30 |
31 | VStack{
32 | NavigationLink("Chat Example",destination: ChatView())
33 | NavigationLink("Streaming Chat Example",destination: ChatView())
34 | NavigationLink("Multi-Modal Chat Example",destination: MultiModalChatView())
35 | NavigationLink("DALLE Example",destination: DalleView())
36 | NavigationLink("Text-to-Speech Example",destination: TextToSpeechView())
37 | }
38 | .bold()
39 | .controlSize(.large)
40 | .buttonStyle(.bordered)
41 | .tint(.purple)
42 | }
43 | }
44 | }
45 | }
46 |
47 | #Preview {
48 | ContentView()
49 | }
50 |
--------------------------------------------------------------------------------
/Demos/FilmFinder/FilmFinder/Ripple.metal:
--------------------------------------------------------------------------------
1 | //
2 | // Ripple.metal
3 | // FilmFinder
4 | //
5 | // Created by Todd Hamilton on 6/21/24.
6 | //
7 |
8 | // Insert #include
9 | #include
10 | using namespace metal;
11 |
12 | [[ stitchable ]]
13 | half4 Ripple(
14 | float2 position,
15 | SwiftUI::Layer layer,
16 | float2 origin,
17 | float time,
18 | float amplitude,
19 | float frequency,
20 | float decay,
21 | float speed
22 | ) {
23 | // The distance of the current pixel position from `origin`.
24 | float distance = length(position - origin);
25 | // The amount of time it takes for the ripple to arrive at the current pixel position.
26 | float delay = distance / speed;
27 |
28 | // Adjust for delay, clamp to 0.
29 | time -= delay;
30 | time = max(0.0, time);
31 |
32 | // The ripple is a sine wave that Metal scales by an exponential decay
33 | // function.
34 | float rippleAmount = amplitude * sin(frequency * time) * exp(-decay * time);
35 |
36 | // A vector of length `amplitude` that points away from position.
37 | float2 n = normalize(position - origin);
38 |
39 | // Scale `n` by the ripple amount at the current pixel position and add it
40 | // to the current pixel position.
41 | //
42 | // This new position moves toward or away from `origin` based on the
43 | // sign and magnitude of `rippleAmount`.
44 | float2 newPosition = position + rippleAmount * n;
45 |
46 | // Sample the layer at the new position.
47 | half4 color = layer.sample(newPosition);
48 |
49 | // Lighten or darken the color based on the ripple amount and its alpha
50 | // component.
51 | color.rgb += 0.3 * (rippleAmount / amplitude) * color.a;
52 |
53 | return color;
54 | }
55 |
56 |
57 |
--------------------------------------------------------------------------------
/Demos/PuLIDDemo/PuLIDDemo/Ripple.metal:
--------------------------------------------------------------------------------
1 | //
2 | // Ripple.metal
3 | // AIColorPalette
4 | //
5 | // Created by Todd Hamilton on 6/21/24.
6 | //
7 |
8 | // Insert #include
9 | #include
10 | using namespace metal;
11 |
12 | [[ stitchable ]]
13 | half4 Ripple(
14 | float2 position,
15 | SwiftUI::Layer layer,
16 | float2 origin,
17 | float time,
18 | float amplitude,
19 | float frequency,
20 | float decay,
21 | float speed
22 | ) {
23 | // The distance of the current pixel position from `origin`.
24 | float distance = length(position - origin);
25 | // The amount of time it takes for the ripple to arrive at the current pixel position.
26 | float delay = distance / speed;
27 |
28 | // Adjust for delay, clamp to 0.
29 | time -= delay;
30 | time = max(0.0, time);
31 |
32 | // The ripple is a sine wave that Metal scales by an exponential decay
33 | // function.
34 | float rippleAmount = amplitude * sin(frequency * time) * exp(-decay * time);
35 |
36 | // A vector of length `amplitude` that points away from position.
37 | float2 n = normalize(position - origin);
38 |
39 | // Scale `n` by the ripple amount at the current pixel position and add it
40 | // to the current pixel position.
41 | //
42 | // This new position moves toward or away from `origin` based on the
43 | // sign and magnitude of `rippleAmount`.
44 | float2 newPosition = position + rippleAmount * n;
45 |
46 | // Sample the layer at the new position.
47 | half4 color = layer.sample(newPosition);
48 |
49 | // Lighten or darken the color based on the ripple amount and its alpha
50 | // component.
51 | color.rgb += 0.3 * (rippleAmount / amplitude) * color.a;
52 |
53 | return color;
54 | }
55 |
56 |
57 |
--------------------------------------------------------------------------------
/Demos/AIColorPalette/AIColorPalette/Ripple.metal:
--------------------------------------------------------------------------------
1 | //
2 | // Ripple.metal
3 | // AIColorPalette
4 | //
5 | // Created by Todd Hamilton on 6/21/24.
6 | //
7 |
8 | // Insert #include
9 | #include
10 | using namespace metal;
11 |
12 | [[ stitchable ]]
13 | half4 Ripple(
14 | float2 position,
15 | SwiftUI::Layer layer,
16 | float2 origin,
17 | float time,
18 | float amplitude,
19 | float frequency,
20 | float decay,
21 | float speed
22 | ) {
23 | // The distance of the current pixel position from `origin`.
24 | float distance = length(position - origin);
25 | // The amount of time it takes for the ripple to arrive at the current pixel position.
26 | float delay = distance / speed;
27 |
28 | // Adjust for delay, clamp to 0.
29 | time -= delay;
30 | time = max(0.0, time);
31 |
32 | // The ripple is a sine wave that Metal scales by an exponential decay
33 | // function.
34 | float rippleAmount = amplitude * sin(frequency * time) * exp(-decay * time);
35 |
36 | // A vector of length `amplitude` that points away from position.
37 | float2 n = normalize(position - origin);
38 |
39 | // Scale `n` by the ripple amount at the current pixel position and add it
40 | // to the current pixel position.
41 | //
42 | // This new position moves toward or away from `origin` based on the
43 | // sign and magnitude of `rippleAmount`.
44 | float2 newPosition = position + rippleAmount * n;
45 |
46 | // Sample the layer at the new position.
47 | half4 color = layer.sample(newPosition);
48 |
49 | // Lighten or darken the color based on the ripple amount and its alpha
50 | // component.
51 | color.rgb += 0.3 * (rippleAmount / amplitude) * color.a;
52 |
53 | return color;
54 | }
55 |
56 |
57 |
--------------------------------------------------------------------------------
/Demos/Translator/Translator/BottomTranslateView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // BottomTranslateView.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Todd Hamilton
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct BottomTranslateView: View {
11 |
12 | @Binding var processing:Bool
13 | @Binding var translatedText:String
14 |
15 | var body: some View {
16 | VStack{
17 |
18 | VStack(alignment:.leading, spacing:8){
19 | Text("Spanish")
20 | .font(.callout)
21 | .foregroundColor(.secondary)
22 | if processing {
23 | ProgressView()
24 | .frame(maxWidth: .infinity, maxHeight:.infinity)
25 | } else{
26 | Text(translatedText)
27 | .font(.title2)
28 | }
29 | }
30 | .frame(maxWidth: .infinity, maxHeight:.infinity, alignment:.topLeading)
31 |
32 |
33 | HStack(spacing:0){
34 | Button(){
35 | /// copy result
36 | } label:{
37 | Image(systemName: "square.on.square")
38 | .font(.title2)
39 | }
40 | .frame(width:44, height:44)
41 | }
42 | .frame(maxWidth: .infinity, alignment:.leading)
43 | }
44 | .frame(maxWidth: .infinity, maxHeight:.infinity)
45 | .padding(16)
46 | .background(
47 | RoundedRectangle(cornerRadius: 14, style: .continuous)
48 | .fill(Color(.tertiarySystemBackground))
49 | .shadow(color:.black.opacity(0.14), radius: 1)
50 | )
51 | }
52 | }
53 |
54 |
55 | #Preview {
56 | BottomTranslateView(processing: .constant(false), translatedText: .constant(""))
57 | }
58 |
--------------------------------------------------------------------------------
/Demos/Transcriber/Transcriber/RecordingRowView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // RecordingRowView.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Todd Hamilton
6 | //
7 |
8 | import SwiftUI
9 |
10 |
11 | struct RecordingRowView: View {
12 |
13 | let recording: TranscribedAudioRecording
14 | @State private var startAnimation = false
15 |
16 | var body: some View {
17 | HStack(spacing:0){
18 | Text(recording.transcript)
19 | .font(.body)
20 |
21 | Spacer()
22 |
23 | Button{
24 | recording.play()
25 | }label:{
26 | HStack(spacing:6){
27 | Image(systemName: "play.circle.fill")
28 | .font(.system(size: 15, weight:.semibold, design: .rounded))
29 | Text("\(recording.audioRecording.duration)s")
30 | .font(.system(size: 11, weight: .regular, design: .monospaced))
31 | }
32 | }
33 | .buttonStyle(TranscriptionButtonStyle())
34 | }
35 | .padding(.vertical, 8)
36 | .opacity(startAnimation ? 1 : 0)
37 | .offset(y:startAnimation ? 0 : -10)
38 | .onAppear{
39 | withAnimation(.smooth.delay(0.2)){
40 | startAnimation = true
41 | }
42 | }
43 | }
44 | }
45 |
46 | #Preview {
47 | RecordingRowView(recording: previewRecording())
48 | .padding()
49 | }
50 |
51 | private func previewRecording() -> TranscribedAudioRecording {
52 | let audioRecording = AudioRecording(localUrl: URL(fileURLWithPath: "/dev/null"),
53 | duration: "1.2s")
54 | return TranscribedAudioRecording(
55 | audioRecording: audioRecording,
56 | transcript: "hello world",
57 | createdAt: Date()
58 | )
59 | }
60 |
61 |
--------------------------------------------------------------------------------
/Demos/Transcriber/Transcriber/AudioRecorder.swift:
--------------------------------------------------------------------------------
1 | //
2 | // Manager.swift
3 | // OpenAIExperiment
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import AVFoundation
9 | import Foundation
10 |
11 | @AudioActor
12 | final class AudioRecorder {
13 | private var microphoneSampleVendor: MicrophoneSampleVendor?
14 | private var audioFileWriter: AudioFileWriter?
15 |
16 | nonisolated init() {}
17 |
18 | /// Start recording an audio file
19 | /// - Returns: true if the audio recorder was able to start recording, false otherwise
20 | func start() -> Bool {
21 | do {
22 | self.microphoneSampleVendor = try MicrophoneSampleVendor()
23 | } catch {
24 | AppLogger.error("Could not create a MicrophoneSampleVendor: \(error)")
25 | return false
26 | }
27 |
28 | do {
29 | self.audioFileWriter = try AudioFileWriter(fileURL: FileUtils.getFileURL())
30 | } catch {
31 | AppLogger.error("Could not create an audio file writer: \(error)")
32 | return false
33 | }
34 |
35 | self.microphoneSampleVendor?.start(onSample: { [weak self] sampleBuffer in
36 | self?.audioFileWriter?.append(sample: sampleBuffer)
37 | })
38 | return true
39 | }
40 |
41 | /// Returns the recording created between calls to `startRecording` and `stopRecording`
42 | func stopRecording(duration: String) async -> AudioRecording? {
43 | guard let fileWriter = self.audioFileWriter,
44 | let sampleVendor = self.microphoneSampleVendor else
45 | {
46 | AppLogger.warning("Expected audio dependencies to be set")
47 | return nil
48 | }
49 | sampleVendor.stop()
50 | let url = await fileWriter.finishWriting()
51 | return AudioRecording(localUrl: url, duration: duration)
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/AIProxyAnthropic/AIProxyAnthropic/ContentView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ContentView.swift
3 | // AIProxyAnthropic
4 | //
5 | // Created by Todd Hamilton on 6/17/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ContentView: View {
11 | var body: some View {
12 | NavigationStack{
13 |
14 | VStack(spacing:24){
15 | VStack{
16 | Image("anthropic")
17 | .resizable()
18 | .scaledToFit()
19 | .frame(width: /*@START_MENU_TOKEN@*/100/*@END_MENU_TOKEN@*/)
20 | .cornerRadius(14)
21 | .foregroundColor(.primary)
22 | Text("Anthropic")
23 | .bold()
24 | .font(.largeTitle)
25 | Text("AIProxy Sample")
26 | .font(.subheadline)
27 | .foregroundColor(.secondary)
28 | }
29 | .frame(maxWidth:.infinity,alignment:.center)
30 |
31 | VStack{
32 | NavigationLink("Message Request Example",destination: MessageRequestView())
33 | .bold()
34 | .controlSize(.large)
35 | .tint(.brown)
36 | .buttonStyle(.bordered)
37 | NavigationLink("Vision Example",destination: VisionView())
38 | .bold()
39 | .controlSize(.large)
40 | .tint(.brown)
41 | .buttonStyle(.bordered)
42 | NavigationLink("Tools Example",destination: ToolsView())
43 | .bold()
44 | .controlSize(.large)
45 | .tint(.brown)
46 | .buttonStyle(.bordered)
47 | }
48 | }
49 | }
50 | }
51 | }
52 |
53 | #Preview {
54 | ContentView()
55 | }
56 |
--------------------------------------------------------------------------------
/AIProxyTogetherAI/AIProxyTogetherAI/ContentView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ContentView.swift
3 | // AIProxyTogetherAI
4 | //
5 | // Created by Todd Hamilton on 8/18/24.
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct ContentView: View {
11 | var body: some View {
12 | NavigationStack{
13 |
14 | VStack(spacing:48){
15 | VStack{
16 | Image("togetherai")
17 | .resizable()
18 | .scaledToFit()
19 | .frame(width: /*@START_MENU_TOKEN@*/100/*@END_MENU_TOKEN@*/)
20 | .cornerRadius(14)
21 | .foregroundColor(.primary)
22 | Text("Together AI")
23 | .bold()
24 | .font(.largeTitle)
25 | Text("AIProxy Sample")
26 | .font(.subheadline)
27 | .foregroundColor(.secondary)
28 | }
29 | .frame(maxWidth:.infinity,alignment:.center)
30 |
31 | VStack{
32 | NavigationLink("Chat Example",destination: ChatView())
33 | .bold()
34 | .controlSize(.large)
35 | .tint(.blue)
36 | .buttonStyle(.bordered)
37 | NavigationLink("Streaming Chat Example",destination: StreamingChatView())
38 | .bold()
39 | .controlSize(.large)
40 | .tint(.blue)
41 | .buttonStyle(.bordered)
42 | NavigationLink("JSON Response",destination: JSONResponseView())
43 | .bold()
44 | .controlSize(.large)
45 | .tint(.blue)
46 | .buttonStyle(.bordered)
47 |
48 | }
49 | }
50 | }
51 | }
52 | }
53 |
54 | #Preview {
55 | ContentView()
56 | }
57 |
--------------------------------------------------------------------------------
/Demos/Chat/Chat/ChatManager.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatManager.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import SwiftUI
10 |
11 | @MainActor
12 | @Observable
13 | final class ChatManager {
14 |
15 | /// Messages sent from the user or received from OpenAI
16 | var messages = [ChatMessage]()
17 |
18 | /// Returns true if OpenAI is still streaming a response back to us
19 | var isProcessing: Bool {
20 | return self.streamTask != nil
21 | }
22 |
23 | /// Task that encapsulates OpenAI's streaming response.
24 | /// Cancel this to interrupt OpenAI's response.
25 | private var streamTask: Task? = nil
26 | private let chatDataLoader = ChatDataLoader()
27 |
28 | /// Send a new message to OpenAI and start streaming OpenAI's response
29 | func send(message: ChatMessage) {
30 | self.messages.append(message)
31 | self.setupStreamingTask(withPrompt: message.text)
32 | }
33 |
34 | /// Stop the streaming response from OpenAI
35 | func stop() {
36 | self.streamTask?.cancel()
37 | self.streamTask = nil
38 | }
39 |
40 | private func setupStreamingTask(withPrompt prompt: String) {
41 | self.messages.append(ChatMessage(text: "", isUser: false, isWaitingForFirstText: true))
42 | self.streamTask = Task { [weak self] in
43 | guard let this = self else { return }
44 | do {
45 | let responseStream = try await this.chatDataLoader.addToConversation(prompt)
46 | for try await responseText in responseStream {
47 | if var last = this.messages.popLast() {
48 | last.isWaitingForFirstText = false
49 | last.text += responseText
50 | this.messages.append(last)
51 | }
52 | }
53 | this.streamTask = nil
54 | } catch {
55 | AppLogger.error("Received an unexpected error from OpenAI streaming: \(error)")
56 | }
57 | }
58 | }
59 | }
60 |
--------------------------------------------------------------------------------
/Demos/Translator/Translator/TopTranslateView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TopTranslateView.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Todd Hamilton
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct TopTranslateView: View {
11 |
12 | @Binding var newText:String
13 | @Binding var translatedText:String
14 | @State private var showButton: Bool = false
15 | var translate: () -> Void
16 |
17 | var body: some View {
18 | VStack(alignment:.leading){
19 | Text("English")
20 | .font(.callout)
21 | .foregroundColor(.secondary)
22 | TextField("Type something...", text: $newText, axis: .vertical)
23 | .font(.title2)
24 | .lineLimit(...2)
25 | .textFieldStyle(.plain)
26 | .frame(maxHeight: .infinity, alignment:.topLeading)
27 | .onChange(of: newText) { _, newValue in
28 | withAnimation(.bouncy){
29 | if !newValue.isEmpty {
30 | showButton = true
31 | } else {
32 | showButton = false
33 | }
34 | }
35 | }
36 |
37 | if showButton {
38 | HStack(alignment:.bottom){
39 | Button{
40 | newText = ""
41 | translatedText = ""
42 | showButton = false
43 | } label:{
44 | Text("Clear")
45 | }
46 |
47 | Spacer()
48 |
49 | Button{
50 | self.translate()
51 | }label:{
52 | HStack(spacing:4){
53 | Text("Translate")
54 | Image(systemName: "arrow.forward")
55 | }
56 | }
57 | .buttonStyle(TranslateButton())
58 | }
59 | .transition(.opacity)
60 | }
61 | }
62 | .frame(maxWidth: .infinity)
63 | .padding(16)
64 | .background(
65 | RoundedRectangle(cornerRadius: 14, style: .continuous)
66 | .fill(Color(.tertiarySystemBackground))
67 | .shadow(color:.black.opacity(0.14), radius: 1)
68 | )
69 | }
70 | }
71 |
72 | #Preview {
73 | TopTranslateView(newText: .constant(""), translatedText: .constant(""), translate: {})
74 | }
75 |
--------------------------------------------------------------------------------
/Demos/Chat/Chat/ChatInputView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatInputView.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Todd Hamilton
6 | //
7 |
8 | import SwiftUI
9 |
10 | /// A view for the user to enter chat messages
11 | struct ChatInputView: View {
12 |
13 | private enum FocusedField {
14 | case newMessageText
15 | }
16 |
17 | /// Is a streaming chat response in progress
18 | let isStreamingResponse: Bool
19 |
20 | /// Callback invoked when the user taps the submit button or presses return
21 | var didSubmit: (String) -> Void
22 |
23 | /// Callback invoked when the user taps on the stop button
24 | var didTapStop: () -> Void
25 |
26 | /// State to collect new text messages
27 | @State private var newMessageText: String = ""
28 | @FocusState private var focusedField: FocusedField?
29 |
30 | var body: some View {
31 | HStack(spacing:0){
32 | chatInputTextField
33 | actionButton
34 | }
35 | .padding(8)
36 | }
37 |
38 | private var chatInputTextField: some View {
39 | TextField("Type a message", text: $newMessageText, axis: .vertical)
40 | .focused($focusedField, equals: .newMessageText)
41 | .lineLimit(5)
42 | .padding(.horizontal, 16)
43 | .padding(.vertical, 10)
44 | .background(
45 | RoundedRectangle(cornerRadius:30)
46 | .fill(Color(.tertiarySystemGroupedBackground))
47 | .stroke(.separator)
48 | )
49 | .onAppear {
50 | focusedField = .newMessageText
51 | }
52 | .onSubmit {
53 | didSubmit(newMessageText)
54 | newMessageText = ""
55 | }
56 | }
57 |
58 | private var actionButton: some View {
59 | Button {
60 | if isStreamingResponse {
61 | didTapStop()
62 | } else {
63 | didSubmit(newMessageText)
64 | newMessageText = ""
65 | }
66 | } label:{
67 | Image(systemName: isStreamingResponse ? "stop.circle.fill" : "arrow.up.circle.fill")
68 | .font(.title)
69 | .foregroundColor((isStreamingResponse || !newMessageText.isEmpty) ? .primary : .secondary)
70 | .frame(width:40, height:40)
71 | }
72 | .contentTransition(.symbolEffect(.replace))
73 | .padding(.horizontal, 8)
74 | }
75 | }
76 |
77 | #Preview {
78 | ChatInputView(isStreamingResponse: false, didSubmit: { _ in }, didTapStop: { })
79 | }
80 |
--------------------------------------------------------------------------------
/Demos/Trivia/Trivia/TriviaManager.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TriviaManager.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import SwiftUI
10 |
11 | @MainActor
12 | @Observable
13 | final class TriviaManager {
14 |
15 | /// The topic of trivia
16 | let topic: String
17 |
18 | /// The number of cards for the user to solve
19 | let numCards: Int
20 |
21 | /// All trivia cards
22 | let triviaCards: [TriviaCardData]
23 |
24 | /// Observable of remaining cards that the user hasn't yet solved
25 | var remainingCards: [TriviaCardData] {
26 | return Array(self.triviaCards.suffix(from: self.currentCardIndex))
27 | }
28 |
29 | /// Number of questions that were answered correctly on the first guess
30 | var numCorrectOnFirstGuess: Int {
31 | return self.guessTracker.filter { $0.value == 1 }.count
32 | }
33 |
34 | private var currentCardIndex: Int
35 | private let triviaDataLoader: TriviaDataLoader
36 |
37 | /// Tracks number of guesses before the right answer was reached.
38 | /// The key is the the question, the value is the number of guesses
39 | private var guessTracker = [TriviaQuestionModel: Int]()
40 |
41 | /// Creates a UI model for the TriviaView view
42 | /// - Parameters:
43 | /// - topic: The topic of trivia
44 | /// - numCards: The number of cards to display in the UI
45 | init(topic: String, numCards: Int) {
46 | let triviaFetcher = TriviaDataLoader(topic: topic)
47 | self.topic = topic
48 | self.numCards = numCards
49 | self.currentCardIndex = 0
50 | self.triviaDataLoader = triviaFetcher
51 | self.triviaCards = (0.. (String, URL?) {
25 |
26 | guard let localURL = image.openAILocalURLEncoding() else {
27 | throw ClassifierDataLoaderError.couldNotCreateImageURL
28 | }
29 |
30 | let prompt = "What kind of plant is this and provide the wikipedia link for it, not in markdown"
31 | let response = try await AppConstants.openAIService.chatCompletionRequest(body: .init(
32 | model: "gpt-4o",
33 | messages: [
34 | .user(
35 | content: .parts(
36 | [
37 | .text(prompt),
38 | .imageURL(localURL, detail: .auto)
39 | ]
40 | )
41 | )
42 | ]
43 | ))
44 | let choices = response.choices
45 | guard let text = choices.first?.message.content else {
46 | throw ClassifierDataLoaderError.couldNotIdentifyPlant
47 | }
48 |
49 | return extractDescriptionAndWikipediaURL(text)
50 | }
51 | }
52 |
53 |
54 | // Assumes that the wikipedia link as at the end of input `text`
55 | private func extractDescriptionAndWikipediaURL(_ text: String) -> (String, URL?) {
56 | var mutableText = text
57 | let re = Regex {
58 | TryCapture {
59 | /https?:\/\/[^.]*\.wikipedia\.org[^\b]+$/
60 | } transform: {
61 | URL(string: String($0))
62 | }
63 | }
64 |
65 | var matchingURL: URL? = nil
66 | mutableText.replace(re, maxReplacements: 1) { matchingURL = $0.1; return "" }
67 | return (mutableText, matchingURL)
68 | }
69 |
70 | private extension CGImage {
71 | func openAILocalURLEncoding() -> URL? {
72 | if let data = UIImage(cgImage: self).jpegData(compressionQuality: 0.4) {
73 | let base64String = data.base64EncodedString()
74 | if let url = URL(string: "data:image/jpeg;base64,\(base64String)") {
75 | return url
76 | }
77 | }
78 | return nil
79 | }
80 | }
81 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Starter apps for AIProxy
2 |
3 | Use these apps as a jumping off point to build your own experiences using AIProxy. Sample apps are organized by services (for ex. OpenAI, Anthropic etc.). Each sample app has a placeholder to add your AIProxy constants (see AppConstants.swift). The apps all use [AIProxySwift](https://github.com/lzell/AIProxySwift) to implement API calls.
4 |
5 | ### Instructions to build and run
6 |
7 | 1. Watch [the AIProxy bootstrap walkthrough video](https://www.youtube.com/watch?v=ohsN9awCzw4)
8 | 2. Replace the constants in `AppConstants.swift` files with the snippet you receive from the AIProxy dashboard in step 1
9 | 3. Change the bundler identifier of the sample app to match the App ID you created in step 1
10 | 4. Add an AIPROXY_DEVICE_CHECK_BYPASS env variable to Xcode. This is necessary for the iOS simulator to communicate with the AIProxy backend. Type **cmd-shift-comma** to open up the "Edit Schemes" menu. Select Run in the sidebar, then select Arguments from the top nav. Add to the "Environment Variables" section an env variable with name AIPROXY_DEVICE_CHECK_BYPASS and value displayed on the key details screen.
11 |
12 | ### Quickstart apps
13 |
14 | - **AIProxyAnthropic** - An Anthropic app that generates a message.
15 | - **AIProxyDeepL** - A DeepL app that translates input text to Spanish.
16 | - **AIProxyOpenAI** - An OpenAI app with chat, DALLE, and vision.
17 | - **AIProxyReplicate** - A Replicate app with Stable Diffusion XL.
18 | - **AIProxyGroq** - A Groq app with chat completion and streaming chat completion examples.
19 | - **AIProxyStability** - A Stability AI app that generates an image.
20 | - **AIProxyTogetherAI** - A Together AI app with examples for chat, streaming chat, and JSON response.
21 |
22 | ### Playground apps
23 |
24 | - **FilmFinder** - A movie recommendation app that uses Groq and TMDB (requires Xcode 16).
25 | - **PuLIDDemo** - An image generator app that uses PuLID on Replicate (requires Xcode 16).
26 | - **AIColorPalette** - An OpenAI color palette generator that uses an image as input (requires Xcode 16).
27 | - **Chat** - A basic chat application and interface with OpenAI. Includes streaming responses and ability to stop stream.
28 | - **Image Classifier** - An OpenAI image classification app that identifies plants and provides a link to Wikipedia.
29 | - **Transriber** - An OpenAI app that transcribes audio recorded using the device microphone.
30 | - **Translator** - A simple English to Spanish translation app with text to speech using OpenAI.
31 | - **Trivia Game** - A trivia game that uses GPT to generate multiple choice questions from a JSON response.
32 | - **Stickers** - An OpenAI app that turns a prompt into a kawaii style sticker and extracts the foreground/background using Vision.
33 | - **EmojiPuzzleMaker** - Generate emoji puzzles using Anthropic's Claude 3.5 Sonnet API.
34 |
--------------------------------------------------------------------------------
/Demos/Trivia/Trivia/TriviaView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // QuizView.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Todd Hamilton
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct TriviaView: View {
11 |
12 | @State private var triviaManager: TriviaManager?
13 |
14 | var body: some View {
15 | ZStack{
16 |
17 | Rectangle()
18 | .fill(Color(.secondarySystemBackground))
19 | .ignoresSafeArea()
20 |
21 | if let triviaManager = self.triviaManager {
22 | ZStack{
23 |
24 | if triviaManager.remainingCards.count == 0 {
25 | VStack{
26 | Text("You answered")
27 | .font(.system(size: 15, weight:.bold, design: .rounded))
28 | .foregroundColor(.secondary)
29 | Text("\(triviaManager.numCorrectOnFirstGuess) out \(triviaManager.numCards) correctly.")
30 | .font(.system(size: 24, weight:.semibold, design: .rounded))
31 | .multilineTextAlignment(.center)
32 | Button("Play again") {
33 | self.triviaManager = nil
34 | }
35 | .fontDesign(.rounded)
36 | .fontWeight(.bold)
37 | .controlSize(.large)
38 | .buttonStyle(.borderedProminent)
39 | .padding(.top, 8)
40 | }
41 | }
42 |
43 |
44 | VStack(spacing:24){
45 | Text(triviaManager.topic)
46 | .font(.system(size: 24, weight:.bold, design: .rounded))
47 | .fontWeight(.bold)
48 | ZStack{
49 | ForEach(triviaManager.remainingCards) { triviaCard in
50 | let cardPosition = triviaCard.position - (triviaManager.remainingCards.first?.position ?? 0)
51 | TriviaCardView(triviaCardData: triviaCard, triviaManager: $triviaManager)
52 | .zIndex(1 - Double(cardPosition))
53 | .offset(y: CGFloat(cardPosition) * 25)
54 | .scaleEffect(1.0 - (CGFloat(cardPosition) * 0.05))
55 | }
56 | }
57 | Spacer()
58 | }
59 | .padding()
60 | }
61 |
62 | } else {
63 | TriviaFormView(triviaManager: $triviaManager)
64 | }
65 | }
66 | }
67 | }
68 |
69 | #Preview {
70 | TriviaView()
71 | }
72 |
73 |
74 |
75 |
--------------------------------------------------------------------------------
/Demos/Stickers/Stickers/StickerDataLoader.swift:
--------------------------------------------------------------------------------
1 | //
2 | // StickerDataLoader.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import Vision
10 | import UIKit
11 | import AIProxy
12 |
13 | final actor StickerDataLoader {
14 | /// Creates a sticker from a given `prompt` using OpenAI's APIs
15 | /// On simulator, the sticker has an opaque background because the Vision framework is not available.
16 | /// On device, the sticker has a transparent background
17 | ///
18 | /// - Parameter prompt: The user-entered prompt
19 | /// - Returns: A sticker as a UIImage if we were able to get one from OpenAI, or nil otherwise
20 | func create(fromPrompt prompt: String) async throws -> UIImage? {
21 | let requestBody = OpenAICreateImageRequestBody(
22 | prompt: "cute design of a " + prompt + " kawaii sticker. nothing in the bg. white bg.",
23 | model: "dall-e-3"
24 | )
25 | let response = try await AppConstants.openAIService.createImageRequest(body: requestBody)
26 | print(response.data.first?.url ?? "")
27 |
28 | guard let url = response.data.first?.url, let data = try? Data(contentsOf: url) else {
29 | AppLogger.error("OpenAI returned a sticker imageURL that we could not fetch")
30 | return nil
31 | }
32 |
33 | guard let img = UIImage(data: data) else {
34 | AppLogger.error("Could not create a UIImage from the imageURL provided by OpenAI")
35 | return nil
36 | }
37 | return img.extractForegroundWithVision() ?? img
38 | }
39 | }
40 |
41 |
42 | private extension UIImage {
43 |
44 | convenience init?(pixelBuffer: CVPixelBuffer) {
45 | let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
46 | let context = CIContext(options: nil)
47 | guard let cgImage = context.createCGImage(ciImage, from: ciImage.extent) else {
48 | return nil
49 | }
50 | self.init(cgImage: cgImage)
51 | }
52 |
53 | func extractForegroundWithVision() -> UIImage? {
54 | guard let cgImage = self.cgImage else { return nil }
55 | let request = VNGenerateForegroundInstanceMaskRequest()
56 | let handler = VNImageRequestHandler(cgImage: cgImage)
57 | do {
58 | try handler.perform([request])
59 | guard let result = request.results?.first else { return nil }
60 |
61 | let foregroundPixelBuffer = try result.generateMaskedImage(
62 | ofInstances: result.allInstances,
63 | from: handler,
64 | croppedToInstancesExtent: false
65 | )
66 |
67 | if let foregroundImage = UIImage(pixelBuffer: foregroundPixelBuffer) {
68 | return foregroundImage
69 | }
70 | } catch {
71 | AppLogger.info("Could not use Vision to cut the sticker out. Perhaps you are running on simulator?")
72 | }
73 | return nil
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/Demos/Chat/Chat/ChatDataLoader.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatDataLoader.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import AIProxy
10 |
11 | enum ChatDataLoaderError: Error {
12 | case busy
13 | }
14 |
15 | /// Asynchronously sends prompts to OpenAI and streams back the response
16 | final actor ChatDataLoader {
17 | private var streamingResponseAccumulator: String?
18 |
19 | /// All chat messages, including user queries and openai responses.
20 | /// The full history of the chat is sent with each request to openai to provide an ongoing conversation with memory.
21 | private var messages = [OpenAIChatCompletionMessage]()
22 |
23 | /// Add a user message to the conversation and stream back the openai response
24 | func addToConversation(_ prompt: String) async throws -> AsyncThrowingStream {
25 | guard streamingResponseAccumulator == nil else {
26 | throw ChatDataLoaderError.busy
27 | }
28 | self.streamingResponseAccumulator = ""
29 |
30 | self.messages.append((.user(content: .text(prompt))))
31 | let requestBody = OpenAIChatCompletionRequestBody(
32 | model: "gpt-4o-mini",
33 | messages: [.user(content: .text(prompt))]
34 | )
35 | let stream = try await AppConstants.openAIService.streamingChatCompletionRequest(body: requestBody)
36 |
37 | return AsyncThrowingStream { continuation in
38 | let task = Task {
39 | for try await result in stream {
40 | guard let choice = result.choices.first,
41 | let content = choice.delta.content else
42 | {
43 | self.addAccumulatedResponseToMessageHistory()
44 | continuation.finish()
45 | return
46 | }
47 |
48 | self.addToResponseAccumulator(text: content)
49 | continuation.yield(content)
50 | }
51 | }
52 |
53 | continuation.onTermination = { @Sendable termination in
54 | task.cancel()
55 | if case .cancelled = termination {
56 | Task {
57 | await self.addAccumulatedResponseToMessageHistory()
58 | }
59 | }
60 | }
61 | }
62 | }
63 |
64 |
65 | private func addAccumulatedResponseToMessageHistory() {
66 | if let accumulator = self.streamingResponseAccumulator {
67 | self.messages.append(.assistant(content: .text(accumulator)))
68 | self.streamingResponseAccumulator = nil
69 | }
70 | }
71 |
72 | private func addToResponseAccumulator(text: String) {
73 | if let accumulator = self.streamingResponseAccumulator {
74 | self.streamingResponseAccumulator = accumulator + text
75 | } else {
76 | self.streamingResponseAccumulator = text
77 | }
78 | }
79 | }
80 |
--------------------------------------------------------------------------------
/Demos/Transcriber/Transcriber/TranscriberManager.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TranscriberManager.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import SwiftUI
10 | import SwiftData
11 |
12 | @MainActor
13 | @Observable
14 | final class TranscriberManager {
15 |
16 | private(set) var isRecording = false
17 | private let audioRecorder = AudioRecorder()
18 | private let transcriber = TranscriberDataLoader()
19 | private let modelContext: ModelContext
20 | var recordings = [TranscribedAudioRecording]()
21 |
22 | init() {
23 | let context = AppConstants.swiftDataContainer.mainContext
24 | self.recordings = fetchPersistedRecordings(context)
25 | self.modelContext = context
26 | }
27 |
28 | /// This pollutes the manager a bit.
29 | /// I wrote of a better way to do this, here: https://stackoverflow.com/a/77772091/143447
30 | /// - Parameter newValue: The value to set `isRecording` to
31 | private func setIsRecording(_ newValue: Bool) {
32 | withAnimation(.smooth(duration: 0.75)) {
33 | self.isRecording = newValue
34 | }
35 | }
36 |
37 | /// Start recording an audio file
38 | func startRecording() async {
39 | self.setIsRecording(await self.audioRecorder.start())
40 | if !self.isRecording {
41 | AppLogger.error("Could not start the audio recorder")
42 | }
43 | }
44 |
45 | /// Stop recording the audio file and transcribe it to text with Whisper
46 | /// - Parameter duration: Annotate the audio file with this duration.
47 | func stopRecording(duration: String) async {
48 | if let recording = await self.audioRecorder.stopRecording(duration: duration) {
49 | let transcript = await self.transcriber.run(onRecording: recording)
50 | let transcribed = TranscribedAudioRecording(audioRecording: recording, transcript: transcript, createdAt: Date())
51 | self.modelContext.insert(transcribed)
52 | self.recordings = fetchPersistedRecordings(self.modelContext)
53 | }
54 | self.setIsRecording(false)
55 | }
56 |
57 | /// Removes a recording from persistent storage and deletes the associated audio file from disk
58 | /// - Parameter index: the index in `recordings` to delete
59 | func deleteRecording(at index: Int) {
60 | FileUtils.deleteFile(at: self.recordings[index].audioRecording.localUrl)
61 | self.modelContext.delete(self.recordings[index])
62 | self.recordings = fetchPersistedRecordings(self.modelContext)
63 | }
64 | }
65 |
66 | private func fetchPersistedRecordings(_ modelContext: ModelContext) -> [TranscribedAudioRecording] {
67 | do {
68 | let descriptor = FetchDescriptor(
69 | sortBy: [SortDescriptor(\TranscribedAudioRecording.createdAt, order: .reverse)]
70 | )
71 | return try modelContext.fetch(descriptor)
72 | } catch {
73 | AppLogger.error("Could not fetch audio recordings with SwiftData")
74 | return []
75 | }
76 | }
77 |
--------------------------------------------------------------------------------
/Demos/Classifier/Classifier/CameraDataLoader.swift:
--------------------------------------------------------------------------------
1 | //
2 | // CameraFrameHandler.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Todd Hamilton
6 | //
7 |
8 | import AVFoundation
9 | import CoreImage
10 |
11 | /// Vends camera frames from the built-in back camera.
12 | final actor CameraDataLoader {
13 | private let sampleBufferDelegate = CameraFrameSampleBufferDelegate()
14 | private let captureSession = AVCaptureSession()
15 |
16 | /// Streams images of the camera frame.
17 | /// Use the returned stream in a `for await` loop.
18 | func imageStream() -> AsyncStream {
19 | self.setupCaptureSession()
20 | self.captureSession.startRunning()
21 | return AsyncStream { [weak self] continuation in
22 | self?.sampleBufferDelegate.didReceiveImage = { image in
23 | continuation.yield(image)
24 | }
25 | }
26 | }
27 |
28 | private func setupCaptureSession() {
29 | let videoOutput = AVCaptureVideoDataOutput()
30 | guard let videoDevice = AVCaptureDevice.default(.builtInDualWideCamera,for: .video, position: .back) else { return }
31 | guard let videoDeviceInput = try? AVCaptureDeviceInput(device: videoDevice) else { return }
32 | guard captureSession.canAddInput(videoDeviceInput) else { return }
33 | captureSession.addInput(videoDeviceInput)
34 |
35 | videoOutput.setSampleBufferDelegate(
36 | sampleBufferDelegate,
37 | queue: AppConstants.videoSampleQueue
38 | )
39 | captureSession.addOutput(videoOutput)
40 |
41 | videoOutput.connection(with: .video)?.videoRotationAngle = 90
42 | }
43 | }
44 |
45 |
46 | private final class CameraFrameSampleBufferDelegate: NSObject, AVCaptureVideoDataOutputSampleBufferDelegate {
47 | private let coreImageContext = CIContext()
48 | var didReceiveImage: ((CGImage) -> Void)?
49 |
50 | /// Delegate implementation for AVCaptureVideoDataOutputSampleBufferDelegate conformance
51 | func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
52 | dispatchPrecondition(condition: .onQueue(AppConstants.videoSampleQueue))
53 | guard let cgImage = self.imageFromSampleBuffer(sampleBuffer: sampleBuffer) else {
54 | AppLogger.info("Could not convert a sample buffer from the camera into a CGImage")
55 | return
56 | }
57 |
58 | self.didReceiveImage?(cgImage)
59 | }
60 |
61 | private func imageFromSampleBuffer(sampleBuffer: CMSampleBuffer) -> CGImage? {
62 | guard let imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
63 | AppLogger.info("Could not get an image buffer from CMSampleBuffer")
64 | return nil
65 | }
66 |
67 | let ciImage = CIImage(cvPixelBuffer: imageBuffer)
68 | guard let cgImage = self.coreImageContext.createCGImage(ciImage, from: ciImage.extent) else {
69 | AppLogger.info("Could not create a CGImage using a core image context")
70 | return nil
71 | }
72 |
73 | return cgImage
74 | }
75 | }
76 |
--------------------------------------------------------------------------------
/AIProxyOpenAI/AIProxyOpenAI/ChatView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatView.swift
3 | // AIProxyOpenAI
4 | //
5 | // Created by Todd Hamilton on 6/14/24.
6 | //
7 |
8 | import SwiftUI
9 | import AIProxy
10 |
11 | struct ChatView: View {
12 |
13 | @State private var prompt = ""
14 | @State private var result = ""
15 | @State private var isLoading = false
16 | @State private var showingAlert = false
17 |
18 | func generate() async throws {
19 | isLoading = true
20 | defer { isLoading = false }
21 | do {
22 | let response = try await openAIService.chatCompletionRequest(body: .init(
23 | model: "gpt-4o",
24 | messages: [.system(content: .text(prompt))]
25 | ))
26 | result = (response.choices.first?.message.content)!
27 | showingAlert = true
28 | } catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
29 | print("Received non-200 status code: \(statusCode) with response body: \(String(describing: responseBody))")
30 | } catch {
31 | print(error.localizedDescription)
32 | }
33 | }
34 |
35 | var body: some View {
36 | VStack {
37 | VStack{
38 | ContentUnavailableView(
39 | "Generate Text",
40 | systemImage: "doc.plaintext.fill",
41 | description: Text("Write a prompt below")
42 | )
43 | }
44 | .alert(isPresented: $showingAlert) {
45 | Alert(
46 | title: Text("Result"),
47 | message: Text(result),
48 | dismissButton: .default(Text("Close"))
49 | )
50 | }
51 |
52 | Spacer()
53 |
54 | VStack(spacing:12){
55 | TextField("Type a prompt", text:$prompt)
56 | .submitLabel(.go)
57 | .padding(12)
58 | .background(Color(.systemBackground))
59 | .cornerRadius(8)
60 | .shadow(color:.primary, radius: 1)
61 | .onSubmit {
62 | Task{ try await generate() }
63 | }
64 | Button{
65 | Task{ try await generate() }
66 | }label:{
67 | if isLoading {
68 | ProgressView()
69 | .controlSize(.regular)
70 | .frame(maxWidth:.infinity)
71 | } else {
72 | Text("Generate Text")
73 | .bold()
74 | .frame(maxWidth:.infinity)
75 | }
76 | }
77 | .controlSize(.large)
78 | .buttonStyle(.borderedProminent)
79 | .disabled(isLoading ? true : false)
80 | }
81 | }
82 | .padding()
83 | .navigationTitle("Chat Completion")
84 | .navigationBarTitleDisplayMode(.inline)
85 | }
86 | }
87 |
88 | #Preview {
89 | ChatView()
90 | }
91 |
--------------------------------------------------------------------------------
/Demos/Trivia/Trivia/TriviaAnswerPicker.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TriviaAnswerPicker.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import SwiftUI
10 |
11 | struct TriviaAnswerPicker: View {
12 | /// Data model that holds the trivia question, potential answers, and correct answer index
13 | let questionModel: TriviaQuestionModel
14 |
15 | /// This question position in the stack of trivia cards
16 | let questionNumber: Int
17 |
18 | /// Number of questions in the stack of trivia cards
19 | let questionOf: Int
20 |
21 | /// The argument passed to this closure is the guessed answer index for comparison with `questionModel.correctAnswerIndex`
22 | let didTapAnswer: (Int) -> Void
23 |
24 |
25 | var body: some View {
26 | VStack(alignment:.leading, spacing:36) {
27 |
28 | VStack(alignment:.leading, spacing:16){
29 | Text("Question \(questionNumber) of \(questionOf)")
30 | .font(.system(size: 15, weight:.medium, design: .rounded))
31 | .foregroundColor(.secondary)
32 |
33 | Text(questionModel.question)
34 | .font(.system(size: 20, weight:.medium, design: .rounded))
35 | .fixedSize(horizontal: false, vertical: true)
36 | }
37 | .frame(maxWidth: .infinity, alignment:.leading)
38 |
39 | VStack(alignment:.leading, spacing:8){
40 | ForEach(questionModel.labeledAnswers) { labeledAnswer in
41 | Text(labeledAnswer.text)
42 | .fixedSize(horizontal: false, vertical: true)
43 | }
44 | }
45 | .font(.system(size: 17, weight:.medium, design: .rounded))
46 | .frame(maxWidth: .infinity, alignment:.leading)
47 |
48 | VStack{
49 | HStack(spacing:8) {
50 | CardButton(systemImageName: "a.circle.fill", tint: .blue) {
51 | didTapAnswer(0)
52 | }
53 |
54 | CardButton(systemImageName: "b.circle.fill", tint: .mint) {
55 | didTapAnswer(1)
56 | }
57 | }
58 | HStack {
59 | CardButton(systemImageName: "c.circle.fill", tint: .green) {
60 | didTapAnswer(2)
61 | }
62 |
63 | CardButton(systemImageName: "d.circle.fill", tint: .indigo) {
64 | didTapAnswer(3)
65 | }
66 | }
67 | }
68 | .buttonStyle(.bordered)
69 | .font(.title)
70 | .frame(maxWidth:.infinity, alignment:.leading)
71 | }
72 | .padding(16)
73 | }
74 | }
75 |
76 | private struct CardButton: View {
77 | let systemImageName: String
78 | let tint: Color
79 | let action: () -> Void
80 |
81 | var body: some View {
82 | Button(action: action) {
83 | Image(systemName: systemImageName)
84 | .frame(maxWidth: .infinity)
85 | }
86 | .tint(tint)
87 | .controlSize(.large)
88 | }
89 | }
90 |
--------------------------------------------------------------------------------
/Demos/AIColorPalette/AIColorPalette/AIProxyIntegration.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AIProxyIntegration.swift
3 | // AIColorPalette
4 | //
5 | // Created by Todd Hamilton on 6/20/24.
6 | //
7 |
8 | import AIProxy // The AIProxy SPM package is found at https://github.com/lzell/AIProxySwift
9 | import Foundation
10 | import UIKit
11 |
12 | #error(
13 | """
14 | Uncomment one of the methods below. To build and run on device you must follow the AIProxy integration guide.
15 | Please see https://www.aiproxy.pro/docs/integration-guide.html")
16 | """
17 | )
18 |
19 | /* Uncomment for BYOK use cases */
20 | let openAIService = AIProxy.openAIDirectService(
21 | unprotectedAPIKey: "your-openai-key"
22 | )
23 |
24 | /* Uncomment for all other production use cases */
25 | //let openAIService = AIProxy.openAIService(
26 | // partialKey: "partial-key-from-your-developer-dashboard",
27 | // serviceURL: "service-url-from-your-developer-dashboard"
28 | //)
29 |
30 | struct AIProxyIntegration {
31 |
32 | static func getColorPalette(forImage image: UIImage) async -> String? {
33 | let message = "generate a color palette based on the provided image, return 4 colors in valid JSON, nothing else. Here's an example of the JSON format: 'colors': [{red: 0.85, green: 0.85, blue: 0.85}, {red: 0.85, green: 0.85, blue: 0.85}, {red: 0.85, green: 0.85, blue: 0.85}, {red: 0.85, green: 0.85, blue: 0.85}, {red: 0.85, green: 0.85, blue: 0.85}]."
34 |
35 | let localURL = createOpenAILocalURL(forImage: image)!
36 | do {
37 | let response = try await openAIService.chatCompletionRequest(body: .init(
38 | model: "gpt-4o",
39 | messages: [
40 | .system(
41 | content: .text(message)
42 | ),
43 | .user(
44 | content: .parts(
45 | [
46 | .imageURL(localURL, detail: .auto)
47 | ]
48 | )
49 | )
50 | ],
51 | responseFormat: .jsonObject
52 | ))
53 | return response.choices.first?.message.content
54 | } catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
55 | print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
56 | } catch {
57 | print(error.localizedDescription)
58 | }
59 | return nil
60 | }
61 |
62 | private init() {
63 | fatalError("This type is not intended to be instantiated")
64 | }
65 | }
66 |
67 | func createOpenAILocalURL(forImage image: UIImage) -> URL? {
68 | // Attempt to get JPEG data from the UIImage
69 | guard let jpegData = image.jpegData(compressionQuality: 0.4) else {
70 | return nil
71 | }
72 |
73 | // Encode the JPEG data to a base64 string
74 | let base64String = jpegData.base64EncodedString()
75 |
76 | // Create the data URL string
77 | let urlString = "data:image/jpeg;base64,\(base64String)"
78 |
79 | // Return the URL constructed from the data URL string
80 | return URL(string: urlString)
81 | }
82 |
--------------------------------------------------------------------------------
/Demos/Transcriber/Transcriber/AudioFileWriter.swift:
--------------------------------------------------------------------------------
1 | //
2 | // AudioFileWriter.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import AVFoundation
10 |
11 | /// One of the following errors will be thrown at initialization if the microphone vendor can't vend samples.
12 | enum AudioFileWriterError: Error {
13 | case couldNotWriteToDestinationURL
14 | case couldNotCreateAudioInput
15 | }
16 |
17 |
18 | /// Writes an m4a file out of audio sample buffers.
19 | /// Samples passed to the `append` method will be written to the m4a file between calls to `init()` and `finishWriting()`.
20 | /// Create one instance of AudioFileWriter for each audio file that you'd like to write.
21 | @AudioActor
22 | final class AudioFileWriter {
23 | /// The location to write the audio file to
24 | let fileURL: URL
25 |
26 | private let assetWriter: AVAssetWriter
27 | private let microphoneWriter: AVAssetWriterInput
28 | private let audioSettings: [String: Any] = [
29 | AVFormatIDKey: kAudioFormatMPEG4AAC,
30 | AVSampleRateKey: 48_000,
31 | AVNumberOfChannelsKey: 2,
32 | AVEncoderAudioQualityKey: AVAudioQuality.high.rawValue
33 | ]
34 |
35 | private var isWriting = false
36 |
37 | /// Throws one of `AudioFileWriterError` if we can't initialize the AVFoundation dependencies
38 | /// - Parameter fileURL: The location to write the audio file to
39 | init(fileURL: URL) throws {
40 | self.fileURL = fileURL
41 | do {
42 | self.assetWriter = try AVAssetWriter(outputURL: fileURL, fileType: .m4a)
43 | } catch {
44 | throw AudioFileWriterError.couldNotWriteToDestinationURL
45 | }
46 |
47 | self.microphoneWriter = AVAssetWriterInput(mediaType: .audio, outputSettings: self.audioSettings)
48 | self.microphoneWriter.expectsMediaDataInRealTime = true
49 |
50 | if self.assetWriter.canAdd(self.microphoneWriter) {
51 | self.assetWriter.add(self.microphoneWriter)
52 | } else {
53 | throw AudioFileWriterError.couldNotCreateAudioInput
54 | }
55 | }
56 |
57 | /// Append a sample buffer to the audio file
58 | /// - Parameter sample: A core media sample buffer. See the `MicrophoneSampleVendor` file for an example of how to source these.
59 | func append(sample: CMSampleBuffer) {
60 | if !self.isWriting {
61 | self.assetWriter.startWriting()
62 | self.assetWriter.startSession(atSourceTime: sample.presentationTimeStamp)
63 | self.isWriting = true
64 | }
65 | if self.microphoneWriter.isReadyForMoreMediaData {
66 | self.microphoneWriter.append(sample)
67 | } else {
68 | AppLogger.warning("The AudioFileWriter is not ready for more audio data")
69 | }
70 | }
71 |
72 | /// Finishes writing the file to disk
73 | /// - Returns: URL location of the m4a file on disk
74 | func finishWriting() async -> URL {
75 | self.microphoneWriter.markAsFinished()
76 | await self.assetWriter.finishWriting()
77 | self.isWriting = false
78 | return self.fileURL
79 | }
80 | }
81 |
--------------------------------------------------------------------------------
/AIProxyGroq/AIProxyGroq/ChatView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatView.swift
3 | // AIProxyGroq
4 | //
5 | // Created by Todd Hamilton on 10/1/24.
6 | //
7 |
8 | import SwiftUI
9 | import AIProxy
10 |
11 | struct ChatView: View {
12 |
13 | @State private var prompt = ""
14 | @State private var result = ""
15 | @State private var isLoading = false
16 | @State private var showingAlert = false
17 |
18 | func generate() async throws {
19 | isLoading = true
20 | defer { isLoading = false }
21 | do {
22 | let response = try await groqService.chatCompletionRequest(body: .init(
23 | messages: [.assistant(content: prompt)],
24 | model: "mixtral-8x7b-32768"
25 | ))
26 | print(response.choices.first?.message.content ?? "")
27 | result = response.choices.first?.message.content ?? ""
28 | showingAlert = true
29 | } catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
30 | print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
31 | } catch {
32 | print(error.localizedDescription)
33 | }
34 | }
35 |
36 | var body: some View {
37 | VStack {
38 | VStack{
39 | ContentUnavailableView(
40 | "Generate Text",
41 | systemImage: "doc.plaintext.fill",
42 | description: Text("Write a prompt below")
43 | )
44 | }
45 | .alert(isPresented: $showingAlert) {
46 | Alert(
47 | title: Text("Result"),
48 | message: Text(result),
49 | dismissButton: .default(Text("Close"))
50 | )
51 | }
52 |
53 | Spacer()
54 |
55 | VStack(spacing:12){
56 | TextField("Type a prompt", text:$prompt)
57 | .submitLabel(.go)
58 | .padding(12)
59 | .background(Color(.systemBackground))
60 | .cornerRadius(8)
61 | .shadow(color:.primary, radius: 1)
62 | .onSubmit {
63 | Task{ try await generate() }
64 | }
65 | Button{
66 | Task{ try await generate() }
67 | }label:{
68 | if isLoading {
69 | ProgressView()
70 | .controlSize(.regular)
71 | .frame(maxWidth:.infinity)
72 | } else {
73 | Text("Generate Text")
74 | .bold()
75 | .frame(maxWidth:.infinity)
76 | }
77 | }
78 | .controlSize(.large)
79 | .buttonStyle(.borderedProminent)
80 | .disabled(isLoading ? true : false)
81 | }
82 | }
83 | .padding()
84 | .navigationTitle("Chat Completion")
85 | .navigationBarTitleDisplayMode(.inline)
86 | }
87 | }
88 |
89 |
90 | #Preview {
91 | ChatView()
92 | }
93 |
--------------------------------------------------------------------------------
/AIProxyDeepL/AIProxyDeepL/TranslationView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TranslationView.swift
3 | // AIProxyDeepL
4 | //
5 | // Created by Todd Hamilton on 8/14/24.
6 | //
7 |
8 | import SwiftUI
9 | import AIProxy
10 |
11 | struct TranslationView: View {
12 |
13 | @State private var prompt = ""
14 | @State private var result = ""
15 | @State private var isLoading = false
16 | @State private var showingAlert = false
17 |
18 | func generate() async throws {
19 | isLoading = true
20 | defer { isLoading = false }
21 | do {
22 | let body = DeepLTranslateRequestBody(targetLang: "ES", text: [prompt])
23 | let response = try await deepLService.translateRequest(body: body)
24 | // Do something with `response.translations`
25 | result = response.translations.first?.text ?? ""
26 | showingAlert = true
27 | } catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
28 | print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
29 | } catch {
30 | print("Could not create translation: \(error.localizedDescription)")
31 | }
32 | }
33 |
34 | var body: some View {
35 | VStack {
36 | VStack{
37 | ContentUnavailableView(
38 | "Translate to Spanish",
39 | systemImage: "captions.bubble.fill",
40 | description: Text("Write text you want to translate below")
41 | )
42 | }
43 | .alert(isPresented: $showingAlert) {
44 | Alert(
45 | title: Text("Result"),
46 | message: Text(result),
47 | dismissButton: .default(Text("Close"))
48 | )
49 | }
50 |
51 | Spacer()
52 |
53 | VStack(spacing:12){
54 | TextField("Type your text here", text:$prompt)
55 | .submitLabel(.go)
56 | .padding(12)
57 | .background(Color(.systemBackground))
58 | .cornerRadius(8)
59 | .shadow(color:.primary, radius: 1)
60 | .onSubmit {
61 | Task{ try await generate() }
62 | }
63 | Button{
64 | Task{ try await generate() }
65 | }label:{
66 | if isLoading {
67 | ProgressView()
68 | .controlSize(.regular)
69 | .frame(maxWidth:.infinity)
70 | } else {
71 | Text("Translate")
72 | .bold()
73 | .frame(maxWidth:.infinity)
74 | }
75 | }
76 | .controlSize(.large)
77 | .buttonStyle(.borderedProminent)
78 | .disabled(isLoading ? true : false)
79 | }
80 | }
81 | .padding()
82 | .navigationTitle("Translate Example")
83 | .navigationBarTitleDisplayMode(.inline)
84 | }
85 | }
86 |
87 | #Preview {
88 | TranslationView()
89 | }
90 |
--------------------------------------------------------------------------------
/AIProxyTogetherAI/AIProxyTogetherAI/ChatView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ChatView.swift
3 | // AIProxyTogetherAI
4 | //
5 | // Created by Todd Hamilton on 8/18/24.
6 | //
7 |
8 | import SwiftUI
9 | import AIProxy
10 |
11 | struct ChatView: View {
12 |
13 | @State private var prompt = ""
14 | @State private var result = ""
15 | @State private var isLoading = false
16 | @State private var showingAlert = false
17 |
18 | func generate() async throws {
19 | isLoading = true
20 | defer { isLoading = false }
21 | do {
22 | let requestBody = TogetherAIChatCompletionRequestBody(
23 | messages: [TogetherAIMessage(content: prompt, role: .user)],
24 | model: "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo"
25 | )
26 | let response = try await togetherAIService.chatCompletionRequest(body: requestBody)
27 | print(response.choices.first?.message.content ?? "")
28 | result = response.choices.first?.message.content ?? ""
29 | showingAlert = true
30 | } catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
31 | print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
32 | } catch {
33 | print("Could not create TogetherAI chat completion: \(error.localizedDescription)")
34 | }
35 | }
36 |
37 | var body: some View {
38 | VStack {
39 | VStack{
40 | ContentUnavailableView(
41 | "Generate Text",
42 | systemImage: "doc.plaintext.fill",
43 | description: Text("Write a prompt below")
44 | )
45 | }
46 | .alert(isPresented: $showingAlert) {
47 | Alert(
48 | title: Text("Result"),
49 | message: Text(result),
50 | dismissButton: .default(Text("Close"))
51 | )
52 | }
53 |
54 | Spacer()
55 |
56 | VStack(spacing:12){
57 | TextField("Type a prompt", text:$prompt)
58 | .submitLabel(.go)
59 | .padding(12)
60 | .background(.white)
61 | .cornerRadius(8)
62 | .shadow(radius: 1)
63 | .onSubmit {
64 | Task{ try await generate() }
65 | }
66 | Button{
67 | Task{ try await generate() }
68 | }label:{
69 | if isLoading {
70 | ProgressView()
71 | .controlSize(.regular)
72 | .frame(maxWidth:.infinity)
73 | } else {
74 | Text("Generate Text")
75 | .bold()
76 | .frame(maxWidth:.infinity)
77 | }
78 | }
79 | .controlSize(.large)
80 | .buttonStyle(.borderedProminent)
81 | .disabled(isLoading ? true : false)
82 | }
83 | }
84 | .padding()
85 | .navigationTitle("Chat Example")
86 | .navigationBarTitleDisplayMode(.inline)
87 | }
88 | }
89 |
90 | #Preview {
91 | ChatView()
92 | }
93 |
--------------------------------------------------------------------------------
/Demos/Trivia/Trivia/TriviaDataLoader.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TriviaFetcher.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import AIProxy
10 |
11 | // It's important to add the 'produce JSON' instruction to the system prompt.
12 | // See the note at https://platform.openai.com/docs/api-reference/chat/create#chat-create-response_format
13 | private let prompt = """
14 | You are a trivia bot that produces JSON. You ask hard questions with four possible answers. Specifying the index of the correct answer in the key `correct_answer_index`. Example response:
15 | { question: "xyz", answers: ["a", "b", "c", "d"], correct_answer_index: 2 }
16 | """
17 |
18 | /// Loads trivia data from openai
19 | final actor TriviaDataLoader {
20 | /// The topic of trivia
21 | let topic: String
22 |
23 | /// Create the TriviaDataLoader responsible for fetching trivia data from OpenAI
24 | /// - Parameter topic: The topic of trivia
25 | init(topic: String) {
26 | self.topic = topic
27 | }
28 |
29 | /// We store past questions, and send them back to openai on subsequent requests.
30 | /// This prevents chat from asking the same questions
31 | private var pastQuestions = [String]()
32 |
33 | deinit {
34 | AppLogger.debug("TriviaFetcher is being freed")
35 | }
36 |
37 |
38 | /// Fetches the next trivia question from OpenAI over the network
39 | /// - Returns: A TriviaQuestionModel containing one question and multiple choice answers
40 | func getNextQuestion() async throws -> TriviaQuestionModel {
41 | var messages = prompt
42 | if self.pastQuestions.count > 0 {
43 | let pastQuestionsList = self.pastQuestions.joined(separator: "\n\n")
44 | messages += "\nDo not repeat any of these questions: \(pastQuestionsList)"
45 | }
46 |
47 | let requestBody = OpenAIChatCompletionRequestBody(
48 | model: "gpt-4o",
49 | messages: [
50 | .system(content: .text("Ask me a question about: \(topic)")),
51 | .user(content: .text(messages))
52 | ],
53 | responseFormat: .jsonObject
54 | )
55 | let response = try await AppConstants.openAIService.chatCompletionRequest(body: requestBody)
56 |
57 | guard let text = response.choices.first?.message.content else {
58 | throw TriviaFetcherError.couldNotFetchQuestion
59 | }
60 |
61 | let decoder = JSONDecoder()
62 | decoder.keyDecodingStrategy = .convertFromSnakeCase
63 |
64 | AppLogger.info("Received from openai: \(text)")
65 | let model = try decoder.decode(TriviaQuestionModel.self, from: text.data(using: .utf8)!)
66 | self.pastQuestions.append(model.question)
67 | return model
68 | }
69 | }
70 |
71 | enum TriviaFetcherError: Error {
72 | case couldNotFetchQuestion
73 | }
74 |
75 | struct TriviaQuestionModel: Decodable, Hashable {
76 |
77 | struct LabeledAnswer: Identifiable {
78 | let id = UUID()
79 | let text: String
80 | }
81 |
82 | let question: String
83 | let answers: [String]
84 | let correctAnswerIndex: Int
85 |
86 | var labeledAnswers: [LabeledAnswer] {
87 | return zip(["A", "B", "C", "D"], self.answers).map {
88 | LabeledAnswer(text: "\($0). \($1)")
89 | }
90 | }
91 | }
92 |
--------------------------------------------------------------------------------
/AIProxyGroq/AIProxyGroq/StreamingChatView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // StreamingChatView.swift
3 | // AIProxyGroq
4 | //
5 | // Created by Todd Hamilton on 10/1/24.
6 | //
7 |
8 |
9 | import SwiftUI
10 | import AIProxy
11 |
12 | struct StreamingChatView: View {
13 |
14 | @State private var prompt = ""
15 | @State private var result = ""
16 | @State private var isLoading = false
17 | @State private var showingAlert = false
18 |
19 | func generate() async throws {
20 | isLoading = true
21 | defer { isLoading = false }
22 | do {
23 | let stream = try await groqService.streamingChatCompletionRequest(body: .init(
24 | messages: [.assistant(content: prompt)],
25 | model: "mixtral-8x7b-32768"
26 | )
27 | )
28 | for try await chunk in stream {
29 | print(chunk.choices.first?.delta.content ?? "")
30 | }
31 | } catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
32 | print("Received \(statusCode) status code with response body: \(responseBody)")
33 | } catch {
34 | print(error.localizedDescription)
35 | }
36 | }
37 |
38 | var body: some View {
39 | VStack {
40 | VStack{
41 | ContentUnavailableView(
42 | "Generate Text",
43 | systemImage: "doc.plaintext.fill",
44 | description: Text("Write a prompt below")
45 | )
46 | }
47 | .alert(isPresented: $showingAlert) {
48 | Alert(
49 | title: Text("Result"),
50 | message: Text("View the streaming response in the Xcode console."),
51 | dismissButton: .default(Text("Close"))
52 | )
53 | }
54 |
55 | Spacer()
56 |
57 | VStack(spacing:12){
58 | TextField("Type a prompt", text:$prompt)
59 | .submitLabel(.go)
60 | .padding(12)
61 | .background(Color(.systemBackground))
62 | .cornerRadius(8)
63 | .shadow(color:.primary, radius: 1)
64 | .onSubmit {
65 | showingAlert = true
66 | Task{ try await generate() }
67 | }
68 | Button{
69 | showingAlert = true
70 | Task{ try await generate() }
71 | }label:{
72 | if isLoading {
73 | ProgressView()
74 | .controlSize(.regular)
75 | .frame(maxWidth:.infinity)
76 | } else {
77 | Text("Generate Text")
78 | .bold()
79 | .frame(maxWidth:.infinity)
80 | }
81 | }
82 | .controlSize(.large)
83 | .buttonStyle(.borderedProminent)
84 | .disabled(isLoading ? true : false)
85 | }
86 | }
87 | .padding()
88 | .navigationTitle("Streaming Chat Completion")
89 | .navigationBarTitleDisplayMode(.inline)
90 | }
91 | }
92 |
93 | #Preview {
94 | ChatView()
95 | }
96 |
--------------------------------------------------------------------------------
/AIProxyOpenAI/AIProxyOpenAI/StreamingChatView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // StreamingChatView.swift
3 | // AIProxyOpenAI
4 | //
5 | // Created by Todd Hamilton on 8/13/24.
6 | //
7 |
8 | import SwiftUI
9 | import AIProxy
10 |
11 | struct StreamingChatView: View {
12 |
13 | @State private var prompt = ""
14 | @State private var result = ""
15 | @State private var isLoading = false
16 | @State private var showingAlert = false
17 |
18 | func generate() async throws {
19 |
20 | let requestBody = OpenAIChatCompletionRequestBody(
21 | model: "gpt-4o",
22 | messages: [.user(content: .text(prompt))]
23 | )
24 | isLoading = true
25 | defer { isLoading = false }
26 | do {
27 | let stream = try await openAIService.streamingChatCompletionRequest(body: requestBody)
28 | for try await chunk in stream {
29 | print(chunk.choices.first?.delta.content ?? "")
30 | }
31 | } catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
32 | print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
33 | } catch {
34 | print(error.localizedDescription)
35 | }
36 | }
37 |
38 | var body: some View {
39 | VStack {
40 | VStack{
41 | ContentUnavailableView(
42 | "Generate Text",
43 | systemImage: "doc.plaintext.fill",
44 | description: Text("Write a prompt below")
45 | )
46 | }
47 | .alert(isPresented: $showingAlert) {
48 | Alert(
49 | title: Text("Result"),
50 | message: Text("View the streaming response in the Xcode console."),
51 | dismissButton: .default(Text("Close"))
52 | )
53 | }
54 |
55 | Spacer()
56 |
57 | VStack(spacing:12){
58 | TextField("Type a prompt", text:$prompt)
59 | .submitLabel(.go)
60 | .padding(12)
61 | .background(Color(.systemBackground))
62 | .cornerRadius(8)
63 | .shadow(color:.primary, radius: 1)
64 | .onSubmit {
65 | Task{ try await generate() }
66 | }
67 | Button{
68 | showingAlert = true
69 | Task{ try await generate() }
70 | }label:{
71 | if isLoading {
72 | ProgressView()
73 | .controlSize(.regular)
74 | .frame(maxWidth:.infinity)
75 | } else {
76 | Text("Generate Text")
77 | .bold()
78 | .frame(maxWidth:.infinity)
79 | }
80 | }
81 | .controlSize(.large)
82 | .buttonStyle(.borderedProminent)
83 | .disabled(isLoading ? true : false)
84 | }
85 | }
86 | .padding()
87 | .navigationTitle("Streaming Chat Completion")
88 | .navigationBarTitleDisplayMode(.inline)
89 | }
90 | }
91 |
92 | #Preview {
93 | StreamingChatView()
94 | }
95 |
--------------------------------------------------------------------------------
/AIProxyTogetherAI/AIProxyTogetherAI/StreamingChatView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // StreamingChatView.swift
3 | // AIProxyTogetherAI
4 | //
5 | // Created by Todd Hamilton on 8/18/24.
6 | //
7 |
8 | import SwiftUI
9 | import AIProxy
10 |
11 | struct StreamingChatView: View {
12 | @State private var prompt = ""
13 | @State private var result = ""
14 | @State private var isLoading = false
15 | @State private var showingAlert = false
16 |
17 | func generate() async throws {
18 | isLoading = true
19 | defer { isLoading = false }
20 | do {
21 | let requestBody = TogetherAIChatCompletionRequestBody(
22 | messages: [TogetherAIMessage(content: prompt, role: .user)],
23 | model: "meta-llama/Meta-Llama-3.1-8B-Instruct-Turbo"
24 | )
25 | let stream = try await togetherAIService.streamingChatCompletionRequest(body: requestBody)
26 | for try await chunk in stream {
27 | print(chunk.choices.first?.delta.content ?? "")
28 | }
29 | } catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
30 | print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
31 | } catch {
32 | print("Could not create TogetherAI streaming chat completion: \(error.localizedDescription)")
33 | }
34 | }
35 |
36 | var body: some View {
37 | VStack {
38 | VStack{
39 | ContentUnavailableView(
40 | "Generate Text",
41 | systemImage: "doc.plaintext.fill",
42 | description: Text("Write a prompt below")
43 | )
44 | }
45 | .alert(isPresented: $showingAlert) {
46 | Alert(
47 | title: Text("Result"),
48 | message: Text("View streaming response in the Xcode console."),
49 | dismissButton: .default(Text("Close"))
50 | )
51 | }
52 |
53 | Spacer()
54 |
55 | VStack(spacing:12){
56 | TextField("Type a prompt", text:$prompt)
57 | .submitLabel(.go)
58 | .padding(12)
59 | .background(.white)
60 | .cornerRadius(8)
61 | .shadow(radius: 1)
62 | .onSubmit {
63 | Task{ try await generate() }
64 | }
65 | Button{
66 | showingAlert = true
67 | Task{ try await generate() }
68 | }label:{
69 | if isLoading {
70 | ProgressView()
71 | .controlSize(.regular)
72 | .frame(maxWidth:.infinity)
73 | } else {
74 | Text("Generate Text")
75 | .bold()
76 | .frame(maxWidth:.infinity)
77 | }
78 | }
79 | .controlSize(.large)
80 | .buttonStyle(.borderedProminent)
81 | .disabled(isLoading ? true : false)
82 | }
83 | }
84 | .padding()
85 | .navigationTitle("Streaming Chat Example")
86 | .navigationBarTitleDisplayMode(.inline)
87 | }
88 | }
89 |
90 | #Preview {
91 | StreamingChatView()
92 | }
93 |
--------------------------------------------------------------------------------
/Demos/Trivia/Trivia/TriviaFormView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // TriviaFormView.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Todd Hamilton
6 | //
7 |
8 | import SwiftUI
9 |
10 | struct TriviaFormView:View{
11 |
12 | enum FocusedField {
13 | case topic
14 | }
15 |
16 | /// Topic entered by the user in a SwiftUI text field
17 | @State private var topic = ""
18 | @Binding var triviaManager: TriviaManager?
19 | @FocusState private var focusedField: FocusedField?
20 |
21 | var body: some View{
22 |
23 | VStack(spacing:24){
24 | VStack{
25 | ZStack{
26 | Image(systemName: "doc.questionmark.fill")
27 | .foregroundColor(.blue)
28 | .rotationEffect(.degrees(-15))
29 | Image(systemName: "doc.questionmark.fill")
30 | .foregroundColor(.teal)
31 | .rotationEffect(.degrees(10))
32 | Image(systemName: "doc.questionmark")
33 | .foregroundColor(.white)
34 | Image(systemName: "doc.questionmark.fill")
35 | .overlay {
36 | LinearGradient(
37 | colors: [.orange, .red, .purple],
38 | startPoint: .topLeading,
39 | endPoint: .bottomTrailing
40 | )
41 | .mask(
42 | Image(systemName: "doc.questionmark.fill")
43 | .font(.system(size: 72))
44 | )
45 | }
46 | }
47 | .font(.system(size: 72))
48 | .padding(.vertical, 8)
49 |
50 | Text("Trivia Generator")
51 | .font(.system(size: 36, weight:.bold, design: .rounded))
52 | .multilineTextAlignment(.center)
53 | Text("Type a trivia theme below")
54 | .font(.system(size: 17, weight:.medium, design: .rounded))
55 | .foregroundColor(.secondary)
56 | }
57 |
58 | VStack{
59 | TextField("Ex. 80's movies...", text: $topic, axis: .vertical)
60 | .focused($focusedField, equals: .topic)
61 | .font(.system(size: 17, weight:.medium, design: .rounded))
62 | .lineLimit(...3)
63 | .textFieldStyle(.plain)
64 | .padding()
65 | .background(.white)
66 | .cornerRadius(8)
67 | .overlay(
68 | RoundedRectangle(cornerRadius: 8)
69 | .fill(.clear)
70 | .stroke(.separator)
71 | )
72 | .onAppear {
73 | focusedField = .topic
74 | }
75 | Button{
76 | withAnimation(){
77 | triviaManager = TriviaManager(topic: topic, numCards: 5)
78 | }
79 | }label:{
80 | Label("Generate", systemImage: "sparkles")
81 | .frame(maxWidth:.infinity)
82 | .font(.system(size: 17, weight:.bold, design: .rounded))
83 | .fontWeight(.bold)
84 | }
85 | .buttonStyle(.borderedProminent)
86 | .controlSize(.large)
87 | }
88 | }
89 | .padding()
90 | }
91 | }
92 |
--------------------------------------------------------------------------------
/Demos/Stickers/Stickers/StickerManager.swift:
--------------------------------------------------------------------------------
1 | //
2 | // StickerManager.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Lou Zell
6 | //
7 |
8 | import Foundation
9 | import UIKit
10 | import SwiftUI
11 |
12 | /// The default message to display in the result view of the sticker experience.
13 | private let defaultUserMessage = "Tap on the image to copy to your clipboard."
14 |
15 | @MainActor
16 | @Observable
17 | final class StickerManager {
18 |
19 | /// The user-entered prompt
20 | var prompt: String = ""
21 |
22 | /// The current background color to use for the view
23 | var currentColor: Color = .teal
24 |
25 | /// The generated sticker as a UIImage
26 | private(set) var image: UIImage?
27 |
28 | /// A flag to indicate that the sticker is being generated and we are waiting on I/O from OpenAI
29 | private(set) var isProcessing = false
30 |
31 | /// The user message to display along with the generated sticker
32 | private(set) var userMessage = defaultUserMessage
33 |
34 | /// The set of potential background colors for the view
35 | private var bgColors: Set = [.teal, .mint, .indigo, .red, .pink, .purple, .orange, .brown, .blue, .cyan, .green, .yellow, .gray]
36 |
37 | /// A few examples to get the user's wheels turning
38 | private let placeholderExamples = [
39 | "a cactus wearing a sombrero...",
40 | "a hedgehog riding a motorcycle...",
41 | "a kangaroo holding a basketball..."
42 | ]
43 | private var placeholderIndex = 0
44 |
45 | private let stickerDataLoader = StickerDataLoader()
46 |
47 | /// Changes the user message briefly away from the default text.
48 | /// After two seconds, the user message reverts to the default message
49 | func flashUserMessage(_ message: String) {
50 | withAnimation(.bouncy) {
51 | userMessage = message
52 | }
53 | Task { [weak self] in
54 | try await Task.sleep(for: .seconds(2))
55 | withAnimation(.bouncy) { [weak self] in
56 | self?.userMessage = defaultUserMessage
57 | }
58 | }
59 | }
60 |
61 | /// Creates a sticker from the current prompt stored in `self.prompt`
62 | func createSticker() {
63 | guard !self.isProcessing else {
64 | AppLogger.info("Already creating a sticker. Please wait")
65 | return
66 | }
67 |
68 | let prompt = self.prompt
69 | guard prompt.count > 0 else {
70 | AppLogger.error("Trying to submit a sticker without a prompt. This is a programmer error")
71 | return
72 | }
73 |
74 | self.isProcessing = true
75 | Task {
76 | self.image = try await stickerDataLoader.create(fromPrompt: prompt)
77 | self.isProcessing = false
78 | }
79 | }
80 |
81 | /// Change the placeholder prompt
82 | func nextPlaceholder() {
83 | self.placeholderIndex = (self.placeholderIndex + 1) % self.placeholderExamples.count
84 | self.prompt = self.placeholderExamples[self.placeholderIndex]
85 | }
86 |
87 | /// Returns to the starting point of the sticker experience, e.g. where no sticker is in the UI
88 | func startOver() {
89 | self.image = nil
90 | self.nextPlaceholder()
91 | self.currentColor = self.bgColors.randomElement()!
92 | }
93 |
94 | /// Regenerate a sticker using the same prompt
95 | func regenerate() {
96 | self.image = nil
97 | self.createSticker()
98 | self.currentColor = self.bgColors.randomElement()!
99 | }
100 | }
101 |
102 |
--------------------------------------------------------------------------------
/Demos/Trivia/Trivia/TriviaCardView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // QuizView.swift
3 | // AIProxyBootstrap
4 | //
5 | // Created by Todd Hamilton
6 | //
7 |
8 | import Foundation
9 | import SwiftUI
10 |
11 | @MainActor
12 | struct TriviaCardView: View {
13 |
14 | let triviaCardData: TriviaCardData
15 | @Binding var triviaManager: TriviaManager?
16 | @State var attempts: Int = 0
17 | @State var isCorrect = false
18 |
19 | private var questionNumber: Int {
20 | triviaCardData.position + 1
21 | }
22 |
23 | private var totalQuestions: Int {
24 | triviaManager?.triviaCards.count ?? 0
25 | }
26 |
27 | var body: some View{
28 | ZStack {
29 | if let model = triviaCardData.triviaQuestionModel {
30 | ZStack {
31 | TriviaAnswerPicker(
32 | questionModel: model,
33 | questionNumber: questionNumber,
34 | questionOf: totalQuestions
35 | ) { guessIndex in
36 | checkAnswer(forQuestion: model, withGuessedIndex: guessIndex)
37 | }
38 |
39 | if self.isCorrect {
40 | Rectangle()
41 | .fill(.black.opacity(0.4))
42 | .frame(width: .infinity, height: .infinity)
43 | .transition(.opacity)
44 | Image(systemName: "checkmark.circle.fill")
45 | .font(.system(size: 64))
46 | .foregroundColor(.green)
47 | .background(.white)
48 | .clipShape(Circle())
49 | .transition(.scale(0.5).combined(with: .opacity))
50 | }
51 | }
52 | } else {
53 | VStack(spacing:16) {
54 | ProgressView()
55 | Text("Generating questions")
56 | .font(.system(size: 15, weight:.regular, design:.rounded))
57 | .foregroundColor(.secondary)
58 | }
59 | .frame(maxHeight:.infinity)
60 | }
61 | }
62 | .frame(maxWidth: .infinity, maxHeight:480, alignment:.top)
63 | .background(Color(.systemBackground))
64 | .cornerRadius(14)
65 | .shadow(color: .black.opacity(0.14), radius: 1, x: 0, y: 1)
66 | .modifier(Shake(animatableData: CGFloat(attempts)))
67 | }
68 |
69 |
70 | private func checkAnswer(forQuestion question: TriviaQuestionModel, withGuessedIndex guessedIndex: Int) {
71 | triviaManager?.trackGuess(ofQuestion: question)
72 | if (question.correctAnswerIndex == guessedIndex) {
73 | withAnimation(.bouncy){
74 | isCorrect = true
75 | }
76 | Task {
77 | try await Task.sleep(for: .seconds(1))
78 | withAnimation(.bouncy) {
79 | triviaManager?.progress()
80 | }
81 | }
82 | } else {
83 | withAnimation(.default) {
84 | attempts += 1
85 | }
86 | }
87 | }
88 | }
89 |
90 |
91 | private struct Shake: GeometryEffect {
92 | var amount: CGFloat = 10
93 | var shakesPerUnit = 3
94 | var animatableData: CGFloat
95 |
96 | func effectValue(size: CGSize) -> ProjectionTransform {
97 | ProjectionTransform(CGAffineTransform(translationX:
98 | amount * sin(animatableData * .pi * CGFloat(shakesPerUnit)),
99 | y: 0))
100 | }
101 | }
102 |
--------------------------------------------------------------------------------
/AIProxyReplicate/AIProxyReplicate/ImageGenView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImageGenView.swift
3 | // AIProxyReplicate
4 | //
5 | // Created by Todd Hamilton on 6/13/24.
6 | //
7 |
8 | import SwiftUI
9 | import AIProxy
10 |
11 | struct ImageGenView: View {
12 |
13 | @State private var prompt = ""
14 | @State private var imageUrl: URL?
15 | @State private var isLoading = false
16 |
17 | func generate() async throws {
18 | isLoading = true // Start loading
19 | defer { isLoading = false }
20 | do {
21 | let input = ReplicateSDXLInputSchema(
22 | prompt: prompt
23 | )
24 | let output = try await replicateService.createSDXLImage(
25 | input: input
26 | )
27 | print("Done creating SDXL image: ", output.first ?? "")
28 | imageUrl = output.first
29 | } catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
30 | print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
31 | } catch {
32 | print("Could not create SDXL image: \(error.localizedDescription)")
33 | }
34 | }
35 |
36 | var body: some View {
37 | VStack{
38 |
39 | VStack{
40 | if (imageUrl != nil) {
41 | AsyncImage(url: imageUrl) { phase in
42 | if let image = phase.image {
43 | image
44 | .resizable()
45 | .aspectRatio(contentMode: .fit)
46 | } else if phase.error != nil {
47 | Text("Failed to load image")
48 | .foregroundColor(.red)
49 | } else {
50 | ProgressView()
51 | }
52 | }
53 | } else{
54 | ContentUnavailableView(
55 | "Generate an image",
56 | systemImage: "photo.fill",
57 | description: Text("Write a prompt below")
58 | )
59 | }
60 | }
61 | .frame(maxHeight: .infinity)
62 |
63 | Spacer()
64 |
65 | VStack(spacing:12){
66 | TextField("Type a prompt", text:$prompt)
67 | .submitLabel(.go)
68 | .padding(12)
69 | .background(.white)
70 | .cornerRadius(8)
71 | .shadow(radius: 1)
72 | .onSubmit {
73 | Task{ try await generate() }
74 | }
75 | Button{
76 | Task{ try await generate() }
77 | }label:{
78 | if isLoading {
79 | ProgressView()
80 | .controlSize(.regular)
81 | .frame(maxWidth:.infinity)
82 | } else {
83 | Text("Generate Text")
84 | .bold()
85 | .frame(maxWidth:.infinity)
86 | }
87 | }
88 | .controlSize(.large)
89 | .buttonStyle(.borderedProminent)
90 | .disabled(isLoading ? true : false)
91 | }
92 | }
93 | .padding()
94 | .navigationTitle("Generate Image")
95 | .navigationBarTitleDisplayMode(.inline)
96 | }
97 | }
98 |
99 | #Preview {
100 | ImageGenView()
101 | }
102 |
--------------------------------------------------------------------------------
/AIProxyStabilityAI/AIProxyStabilityAI/ImageGenView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // ImageGenView.swift
3 | // AIProxyStabilityAI
4 | //
5 | // Created by Todd Hamilton on 8/13/24.
6 | //
7 |
8 | import SwiftUI
9 | import AIProxy
10 |
11 | struct ImageGenView: View {
12 |
13 | @State private var prompt = ""
14 | @State private var image: UIImage? = nil
15 | @State private var isLoading = false
16 |
17 | func generate() async throws {
18 | isLoading = true // Start loading
19 | defer { isLoading = false }
20 | do {
21 | let body = StabilityAIUltraRequestBody(prompt: prompt)
22 |
23 | // This demo is of text-to-image, which only requires a prompt
24 | // To use image-to-image the following parameters are required:
25 | // prompt - text to generate the image from
26 | // image - the image to use as the starting point for the generation
27 | // strength - controls how much influence the image parameter has on the output image
28 | // mode - must be set to image-to-image
29 | // Learn more: https://platform.stability.ai/docs/api-reference#tag/Generate
30 |
31 | let response = try await stabilityService.ultraRequest(body: body)
32 | image = UIImage(data: response.imageData)
33 | } catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
34 | print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
35 | } catch {
36 | print(error.localizedDescription)
37 | }
38 | }
39 |
40 | var body: some View {
41 | VStack{
42 |
43 | VStack{
44 | if (image != nil) {
45 | Image(uiImage: image!)
46 | .resizable()
47 | .aspectRatio(contentMode: .fit)
48 | .frame(maxHeight: UIScreen.main.bounds.width)
49 | } else{
50 | ContentUnavailableView(
51 | "Generate an image",
52 | systemImage: "photo.fill",
53 | description: Text("Write a prompt below")
54 | )
55 | }
56 | }
57 | .frame(maxHeight: .infinity)
58 |
59 | Spacer()
60 |
61 | VStack(spacing:12){
62 | TextField("Type a prompt", text:$prompt)
63 | .submitLabel(.go)
64 | .padding(12)
65 | .background(Color(.systemBackground))
66 | .cornerRadius(8)
67 | .shadow(color:.primary, radius: 1)
68 | .onSubmit {
69 | Task{ try await generate() }
70 | }
71 | Button{
72 | Task{ try await generate() }
73 | }label:{
74 | if isLoading {
75 | ProgressView()
76 | .controlSize(.regular)
77 | .frame(maxWidth:.infinity)
78 | } else {
79 | Text("Generate Image")
80 | .bold()
81 | .frame(maxWidth:.infinity)
82 | }
83 | }
84 | .controlSize(.large)
85 | .buttonStyle(.borderedProminent)
86 | .disabled(isLoading ? true : false)
87 | }
88 | }
89 | .padding()
90 | .navigationTitle("Generate Image")
91 | .navigationBarTitleDisplayMode(.inline)
92 | }
93 | }
94 |
95 | #Preview {
96 | ImageGenView()
97 | }
98 |
--------------------------------------------------------------------------------
/AIProxyOpenAI/AIProxyOpenAI/DalleView.swift:
--------------------------------------------------------------------------------
1 | //
2 | // DalleView.swift
3 | // AIProxyOpenAI
4 | //
5 | // Created by Todd Hamilton on 8/13/24.
6 | //
7 |
8 | import SwiftUI
9 | import AIProxy
10 |
11 | struct DalleView: View {
12 |
13 | @State private var prompt = ""
14 | @State private var imageUrl: String?
15 | @State private var isLoading = false
16 |
17 | func generate() async throws {
18 | isLoading = true // Start loading
19 | defer { isLoading = false }
20 | do {
21 | let requestBody = OpenAICreateImageRequestBody(
22 | prompt: prompt,
23 | model: "dall-e-3"
24 | )
25 | let response = try await openAIService.createImageRequest(body: requestBody)
26 | imageUrl = response.data.first?.url?.absoluteString ?? ""
27 | // print(response.data.first?.url ?? "")
28 | } catch AIProxyError.unsuccessfulRequest(let statusCode, let responseBody) {
29 | print("Received non-200 status code: \(statusCode) with response body: \(responseBody)")
30 | } catch {
31 | print(error.localizedDescription)
32 | }
33 | }
34 |
35 | var body: some View {
36 | VStack{
37 |
38 | VStack{
39 | if (imageUrl != nil) {
40 | AsyncImage(url: URL(string: imageUrl!)) { phase in
41 | if let image = phase.image {
42 | image
43 | .resizable()
44 | .aspectRatio(contentMode: .fit)
45 | } else if phase.error != nil {
46 | Text("Failed to load image")
47 | .foregroundColor(.red)
48 | } else {
49 | ProgressView()
50 | }
51 | }
52 | } else{
53 | ContentUnavailableView(
54 | "Generate an image",
55 | systemImage: "photo.fill",
56 | description: Text("Write a prompt below")
57 | )
58 | }
59 | }
60 | .frame(maxHeight: .infinity)
61 |
62 | Spacer()
63 |
64 | VStack(spacing:12){
65 | TextField("Type a prompt", text:$prompt)
66 | .submitLabel(.go)
67 | .padding(12)
68 | .background(Color(.systemBackground))
69 | .cornerRadius(8)
70 | .shadow(color:.primary, radius: 1)
71 | .onSubmit {
72 | Task{ try await generate() }
73 | }
74 | Button{
75 | Task{ try await generate() }
76 | }label:{
77 | if isLoading {
78 | ProgressView()
79 | .controlSize(.regular)
80 | .frame(maxWidth:.infinity)
81 | } else {
82 | Text("Generate Text")
83 | .bold()
84 | .frame(maxWidth:.infinity)
85 | }
86 | }
87 | .controlSize(.large)
88 | .buttonStyle(.borderedProminent)
89 | .disabled(isLoading ? true : false)
90 | }
91 | }
92 | .padding()
93 | .navigationTitle("Generate Image")
94 | .navigationBarTitleDisplayMode(.inline)
95 | }
96 | }
97 |
98 | #Preview {
99 | DalleView()
100 | }
101 |
--------------------------------------------------------------------------------