├── .gitignore ├── .babelrc.json ├── .DS_Store ├── .eslintignore ├── tsconfig.json ├── index.ts ├── src ├── config.ts ├── util.ts ├── enum.ts ├── types.ts ├── client.ts └── class.ts ├── rollup.config.mjs ├── package.json └── README.md /.gitignore: -------------------------------------------------------------------------------- 1 | dist/ 2 | node_modules/ -------------------------------------------------------------------------------- /.babelrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "presets": ["@babel/preset-env"] 3 | } 4 | -------------------------------------------------------------------------------- /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/omniinfer/javascript-sdk/HEAD/.DS_Store -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | test/** 2 | lib/** 3 | types/** 4 | rollup.config.mjs 5 | rollup.config.js 6 | src/examples/** -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "module": "ES6", 4 | "strict": true, 5 | "declaration": true, 6 | "esModuleInterop": true, 7 | "moduleResolution": "Node", 8 | "outDir": "./dist", 9 | }, 10 | "include": ["src/**/*", "index.ts"], 11 | "exclude": ["src/examples"] 12 | } 13 | -------------------------------------------------------------------------------- /index.ts: -------------------------------------------------------------------------------- 1 | export { 2 | setOmniinferKey, 3 | getModels, 4 | img2img, 5 | txt2Img, 6 | txt2ImgSync, 7 | img2imgSync, 8 | upscale, 9 | upscaleSync, 10 | } from "./src/client"; 11 | 12 | export { OmniinferSDK } from "./src/class"; 13 | 14 | export { 15 | Txt2ImgRequest, 16 | Txt2ImgResponse, 17 | Img2imgRequest, 18 | Img2imgResponse, 19 | GetModelsResponse, 20 | SyncConfig, 21 | UpscaleResponse, 22 | UpscalseRequest, 23 | } from "./src/types"; 24 | 25 | export { ControlNetPreprocessor, ControlNetMode, ModelType } from "./src/enum"; 26 | -------------------------------------------------------------------------------- /src/config.ts: -------------------------------------------------------------------------------- 1 | import { OmniinferConfig } from "./types"; 2 | import axios from "axios"; 3 | 4 | export const Omniinfer_Config: OmniinferConfig = { 5 | BASE_URL: "https://api.omniinfer.io", 6 | key: undefined, 7 | }; 8 | 9 | export const Omniinfer_axiosInstance = axios.create({ 10 | baseURL: Omniinfer_Config.BASE_URL, 11 | headers: { 12 | "Content-Type": "application/json", 13 | "X-Omni-Source": "Omniinfer", 14 | ...(Omniinfer_Config.key ? { "X-Omni-Key": Omniinfer_Config.key } : {}), 15 | }, 16 | }); 17 | 18 | export const Set_Omniinfer_axiosInstance_Key = (key: string) => { 19 | Omniinfer_Config.key = key; 20 | Omniinfer_axiosInstance.defaults.headers["X-Omni-Key"] = key; 21 | }; 22 | -------------------------------------------------------------------------------- /rollup.config.mjs: -------------------------------------------------------------------------------- 1 | import resolve from "@rollup/plugin-node-resolve"; 2 | import babel from "@rollup/plugin-babel"; 3 | import commonjs from "@rollup/plugin-commonjs"; 4 | import typescript from "@rollup/plugin-typescript"; 5 | import nodePolyfills from "rollup-plugin-polyfill-node"; 6 | import json from "@rollup/plugin-json"; 7 | import pkg from "./package.json" assert { type: "json" }; 8 | 9 | export default { 10 | input: "index.ts", 11 | output: { 12 | name: "@omniinfer/sdk", 13 | file: pkg.browser, 14 | format: "umd", 15 | sourcemap: true, 16 | }, 17 | plugins: [ 18 | resolve(), 19 | commonjs(), 20 | typescript(), 21 | json(), 22 | nodePolyfills(), 23 | babel({ babelHelpers: "bundled" }), 24 | ], 25 | external: ['axios'] 26 | }; 27 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "omniinfer-sdk", 3 | "browser": "dist/index.js", 4 | "version": "1.0.5", 5 | "keywords": [ 6 | "omniinfer", 7 | "sdk", 8 | "stable diffusion api", 9 | "typescript", 10 | "es" 11 | ], 12 | "description": "omniinfer api sdk ", 13 | "main": "dist/index", 14 | "files": [ 15 | "dist" 16 | ], 17 | "scripts": { 18 | "build": "rollup --config" 19 | }, 20 | "author": "", 21 | "license": "ISC", 22 | "devDependencies": { 23 | "@babel/core": "^7.22.9", 24 | "@babel/preset-env": "^7.22.9", 25 | "@rollup/plugin-babel": "^6.0.3", 26 | "@rollup/plugin-commonjs": "^25.0.3", 27 | "@rollup/plugin-json": "^6.0.0", 28 | "@rollup/plugin-node-resolve": "^15.1.0", 29 | "@rollup/plugin-typescript": "^11.1.2", 30 | "eslint": "^8.46.0", 31 | "rollup": "^3.27.1", 32 | "rollup-plugin-polyfill-node": "^0.12.0", 33 | "typescript": "^5.1.6" 34 | }, 35 | "dependencies": { 36 | "axios": "^1.4.0" 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /src/util.ts: -------------------------------------------------------------------------------- 1 | import axios from "axios"; 2 | import { Lora } from "./types"; 3 | 4 | export function isNodeEnvironment() { 5 | return typeof window === "undefined"; 6 | } 7 | 8 | export function generateLoraString(params: Array | undefined) { 9 | if (!Array.isArray(params) || params.length === 0) { 10 | return []; 11 | } 12 | return params.map((item) => { 13 | return ``; 14 | }); 15 | } 16 | 17 | export function addLoraPrompt(array: string[], prompt: string) { 18 | if (!Array.isArray(array) || array.length === 0) { 19 | return prompt; 20 | } 21 | array.forEach((str) => { 22 | if (!prompt.includes(str)) { 23 | prompt = prompt + str; 24 | } 25 | }); 26 | return prompt; 27 | } 28 | 29 | export function readImgtoBase64(url: string): Promise { 30 | return axios.get(url, { responseType: "arraybuffer" }).then((response) => { 31 | if (isNodeEnvironment()) { 32 | const buffer = Buffer.from(response.data); 33 | return `data:${response.headers["content-type"]};base64,${buffer.toString( 34 | "base64" 35 | )}`; 36 | } else { 37 | // For browsers 38 | const base64 = btoa( 39 | new Uint8Array(response.data).reduce( 40 | (data, byte) => data + String.fromCharCode(byte), 41 | "" 42 | ) 43 | ); 44 | return `data:${response.headers["content-type"]};base64,${base64}`; 45 | } 46 | }); 47 | } -------------------------------------------------------------------------------- /src/enum.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (c) Omniinfer 3 | */ 4 | 5 | export const ERROR_BAD_REQUEST = "Bad Request"; 6 | export const ERROR_UNAUTHORIZED = "Unauthorized"; 7 | export const ERROR_FORBIDDEN = "Forbidden"; 8 | export const ERROR_NOT_FOUND = "Not Found"; 9 | export const ERROR_METHOD_NOT_ALLOWED = "Method Not Allowed"; 10 | export const ERROR_SERVER_ERROR = "Internal Server Error"; 11 | export const ERROR_GENERATE_IMG_FAILED = "Generate Image Failed"; 12 | 13 | export const ControlNetPreprocessor = { 14 | NULL: "none", 15 | CANNY: "canny", 16 | DEPTH: "depth", 17 | DEPTH_LERES: "depth_leres", 18 | DEPTH_LERES_PLUS_PLUS: "depth_leres++", 19 | HED: "hed", 20 | HED_SAFE: "hed_safe", 21 | MEDIAPIPE_FACE: "mediapipe_face", 22 | MLSD: "mlsd", 23 | NORMAL_MAP: "normal_map", 24 | OPENPOSE: "openpose", 25 | OPENPOSE_HAND: "openpose_hand", 26 | OPENPOSE_FACE: "openpose_face", 27 | OPENPOSE_FACEONLY: "openpose_faceonly", 28 | OPENPOSE_FULL: "openpose_full", 29 | CLIP_VISION: "clip_vision", 30 | COLOR: "color", 31 | PIDINET: "pidinet", 32 | PIDINET_SAFE: "pidinet_safe", 33 | PIDINET_SKETCH: "pidinet_sketch", 34 | PIDINET_SCRIBBLE: "pidinet_scribble", 35 | SCRIBBLE_XDOG: "scribble_xdog", 36 | SCRIBBLE_HED: "scribble_hed", 37 | SEGMENTATION: "segmentation", 38 | THRESHOLD: "threshold", 39 | DEPTH_ZOE: "depth_zoe", 40 | NORMAL_BAE: "normal_bae", 41 | ONEFORMER_COCO: "oneformer_coco", 42 | ONEFORMER_ADE20K: "oneformer_ade20k", 43 | LINEART: "lineart", 44 | LINEART_COARSE: "lineart_coarse", 45 | LINEART_ANIME: "lineart_anime", 46 | LINEART_STANDARD: "lineart_standard", 47 | SHUFFLE: "shuffle", 48 | TILE_RESAMPLE: "tile_resample", 49 | INVERT: "invert", 50 | LINEART_ANIME_DENOISE: "lineart_anime_denoise", 51 | REFERENCE_ONLY: "reference_only", 52 | REFERENCE_ADAIN: "reference_adain", 53 | REFERENCE_ADAIN_PLUS_ATTN: "reference_adain+attn", 54 | INPAINT: "inpaint", 55 | INPAINT_ONLY: "inpaint_only", 56 | INPAINT_ONLY_PLUS_LAMA: "inpaint_only+lama", 57 | TILE_COLORFIX: "tile_colorfix", 58 | TILE_COLORFIX_PLUS_SHARP: "tile_colorfix+sharp", 59 | }; 60 | 61 | export const ControlNetMode = { 62 | BALANCED: 0, 63 | PROMPT_IMPORTANCE: 1, 64 | CONTROLNET_IMPORTANCE: 2, 65 | }; 66 | 67 | export const ModelType = { 68 | CHECKPOINT: "checkpoint", 69 | LORA: "lora", 70 | VAE: "vae", 71 | CONTROLNET: "controlnet", 72 | TEXT_INVERSION: "text_inversion", 73 | }; 74 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Omniinfer Js SDK 2 | 3 | this SDK is based on the official [API documentation](https://docs.omniinfer.io/) 4 | 5 | **join our discord server for help** 6 | 7 | [![](https://dcbadge.vercel.app/api/server/nzqq8UScpx)](https://discord.gg/nzqq8UScpx) 8 | 9 | ## Installation, [via npm](https://www.npmjs.com/package/omniinfer-sdk) 10 | 11 | ```bash 12 | npm i omniinfer-sdk 13 | ``` 14 | 15 | ## Quick Start 16 | 17 | **Get api key refer to [https://docs.omniinfer.io/get-started](https://docs.omniinfer.io/get-started/)** 18 | 19 | **We offer two ways to use the sdk** 20 | 21 | ### 1.Called as a function 22 | 23 | ```javascript 24 | import { txt2ImgSync, setOmniinferKey } from "omniinfer-sdk"; 25 | 26 | setOmniinferKey("your api key"); 27 | 28 | txt2ImgSync({ 29 | model_name: "", 30 | prompt: "a girl", 31 | }) 32 | .then((res) => { 33 | console.log("imgs", res); 34 | }) 35 | .catch((err) => { 36 | console.log(err); 37 | }); 38 | ``` 39 | 40 | ### 2.Use by way of class 41 | 42 | ```javascript 43 | import { OmniinferSDK } from "omniinfer-sdk"; 44 | 45 | const sdk = new OmniinferSDK("your api key"); 46 | 47 | sdk 48 | .txt2ImgSync(params) 49 | .then((res) => { 50 | console.log("imgs", res); 51 | }) 52 | .catch((err) => { 53 | alert(err); 54 | }); 55 | ``` 56 | 57 | ## Examples [SDK Online DEMO](https://stackblitz.com/edit/stackblitz-starters-1pddy4?file=pages%2Findex.js) 58 | 59 | ### function list 60 | 61 | - setOmniinferKey 62 | - getModels 63 | - img2img 64 | - txt2Img 65 | - txt2ImgSync 66 | - img2imgSync 67 | - upscale 68 | - upscaleSync 69 | 70 | ### Usage in React 71 | 72 | ```javascript 73 | import * as React from 'react'; 74 | import { txt2ImgSync } from 'omniinfer-sdk'; 75 | import './style.css'; 76 | 77 | const { useState, useCallback } = React; 78 | 79 | export default function App() { 80 | const [imgs, setImgs] = useState([]); 81 | const [loading, setLoading] = useState(false); 82 | const generateImg = useCallback(() => { 83 | setLoading(true); 84 | txt2ImgSync({ 85 | model_name: '', 86 | prompt: 'a girl', 87 | }) 88 | .then((res) => { 89 | setImgs(res); 90 | setLoading(false); 91 | }) 92 | .catch((err) => { 93 | console.log(err); 94 | setLoading(false); 95 | }); 96 | }, []); 97 | return ( 98 |
99 |

Text to image

100 | 103 |
108 | {imgs.map((one) => ( 109 | 117 | ))} 118 |
119 |
120 | ); 121 | } 122 | ``` 123 | 124 | ### Model Search 125 | 126 | When you use this model interface, keep an eye on `dependency_status` and `download_status`, **which are only considered to be available if they are both 1** 127 | 128 | We recommend that you keep the interface data in memory, e.g. redux. 129 | 130 | ```javascript 131 | getModels().then((res) => { 132 | console.log(res.models.slice(0, 100)); 133 | }); 134 | ``` 135 | 136 | ### Lora Usage 137 | 138 | ```javascript 139 | txt2ImgSync({ 140 | model_name: "majicmixRealistic_v2.safetensors", 141 | prompt: 142 | "Best quality, masterpiece, ultra high res, (photorealistic:1.4), raw photo, 1girl, offshoulder, in the dark, deep shadow, low key, cold light", 143 | negative_prompt: 144 | "ng_deepnegative_v1_75t, badhandv4 (worst quality:2), (low quality:2), (normal quality:2), lowres, bad anatomy, bad hands, normal quality, ((monochrome)), ((grayscale))", 145 | sampler_name: "DPM++ 2M Karras", 146 | lora: [ 147 | { 148 | sd_name: "film", 149 | weight: 0.4, 150 | }, 151 | ], 152 | }).then((res) => { 153 | console.log(res); 154 | }); 155 | ``` 156 | 157 | ### ControlNet QRCode 158 | 159 | ```javascript 160 | txt2ImgSync({ 161 | prompt: 162 | "a beautify butterfly in the colorful flowers, best quality, best details, masterpiece", 163 | model_name: "", 164 | steps: 30, 165 | controlnet_units: [ 166 | { 167 | input_image: imgbase64, 168 | module: ControlNetPreprocessor.NULL, 169 | control_mode: ControlNetMode.BALANCED, 170 | model: "control_v1p_sd15_qrcode_monster_v2", 171 | weight: 2.0, 172 | }, 173 | ], 174 | }).then((res) => { 175 | console.log(res); 176 | }); 177 | ``` 178 | 179 | ### Upscalse 180 | 181 | ```javascript 182 | .upscaleSync({ 183 | image: base64String, 184 | resize_mode: 0, 185 | upscaling_resize: 2, 186 | }) 187 | .then((res) => { 188 | if (res) { 189 | setImg(res[0]); 190 | } 191 | setLoading(false); 192 | }) 193 | .catch((err) => { 194 | alert(err); 195 | }); 196 | ``` 197 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Copyright (c) Omniinfer 3 | * 4 | * Typescript type definitions for Omniinfer 5 | */ 6 | 7 | export type OmniinferKey = string | undefined; 8 | 9 | export interface OmniinferConfig { 10 | BASE_URL: string; 11 | key: OmniinferKey; 12 | } 13 | 14 | // Request Code Enum 15 | export enum RequestCode { 16 | SUCCESS = 0, 17 | } 18 | 19 | // getModels dependency status 20 | export enum ModelStatus { 21 | READY = 1, 22 | UNREADY = 0, 23 | } 24 | 25 | export enum ModelType { 26 | Checkpoint = "checkpoint", 27 | Lora = "lora", 28 | } 29 | 30 | export type Model = { 31 | name: string; 32 | hash: string; 33 | sd_name: string; 34 | third_source: string | undefined; 35 | download_status: ModelStatus; 36 | download_name: string; 37 | dependency_status: ModelStatus; 38 | type: ModelType; 39 | civitai_link: string | undefined; 40 | civitai_model_id: number | undefined; 41 | civitai_version_id: number | undefined; 42 | civitai_nsfw: boolean | undefined; 43 | civitai_download_url: string | undefined; 44 | civitai_tags: string | undefined; 45 | civitai_download_count: number | undefined; 46 | civitai_favorite_count: number | undefined; 47 | civitai_comment_count: number | undefined; 48 | civitai_rating_count: number | undefined; 49 | civitai_rating: number | undefined; 50 | omni_used_count: number | undefined; 51 | civitai_image_url: string | undefined; 52 | civitai_image_nsfw: string | undefined; 53 | civitai_origin_image_url: string | undefined; 54 | civitai_image_prompt: string | undefined; 55 | civitai_image_negative_prompt: string | undefined; 56 | civitai_image_sampler_name: string | undefined; 57 | civitai_image_height: number | undefined; 58 | civitai_image_width: number | undefined; 59 | civitai_image_steps: number | undefined; 60 | civitai_image_cfg_scale: number | undefined; 61 | civitai_image_seed: number | undefined; 62 | }; 63 | 64 | export type GetModelsResponse = { 65 | code: RequestCode; 66 | msg: string; 67 | data: { 68 | models: Array; 69 | }; 70 | }; 71 | 72 | export type Lora = { 73 | sd_name: string; 74 | weight: number; 75 | }; 76 | 77 | export type ControlnetUnit = { 78 | model: string; 79 | weight: number | undefined; 80 | control_mode: 0 | 1 | 2; 81 | module: string; 82 | input_image: string; 83 | mask?: string | undefined; 84 | resize_mode?: number | undefined; 85 | lowvram?: boolean | undefined; 86 | processor_res?: number | undefined; 87 | threshold_a?: number | undefined; 88 | threshold_b?: number | undefined; 89 | guidance_start?: number | undefined; 90 | guidance_end?: number | undefined; 91 | pixel_perfect?: boolean | undefined; 92 | [key: string]: string | number | undefined | boolean; 93 | }; 94 | 95 | export type Txt2ImgRequest = { 96 | model_name: string; 97 | prompt: string; 98 | negative_prompt?: string | undefined; 99 | sampler_name?: string | undefined; 100 | steps?: number | undefined; 101 | cfg_scale?: number | undefined; 102 | seed?: number | undefined; 103 | width?: number | undefined; 104 | height?: number | undefined; 105 | n_iter?: number | undefined; 106 | batch_size?: number | undefined; 107 | lora?: Array | undefined; 108 | controlnet_units?: Array | undefined; 109 | sd_vae?: string | undefined; 110 | clip_skip?: number | undefined; 111 | hr_upscaler?: string | undefined; 112 | hr_scale?: number | undefined; 113 | hr_resize_x?: number | undefined; 114 | hr_resize_y?: number | undefined; 115 | restore_faces?: boolean | undefined; 116 | enable_hr?: boolean | undefined; 117 | [key: string]: any; 118 | }; 119 | 120 | export type Txt2ImgResponse = { 121 | code: RequestCode; 122 | msg: string; 123 | data: { 124 | task_id: string; 125 | }; 126 | }; 127 | 128 | export type SyncConfig = { 129 | // wait time between each request, default 1000ms 130 | interval?: number; 131 | // img result, base64 or url, default base64 132 | img_type?: "base64" | "url"; 133 | }; 134 | 135 | export type Img2imgRequest = { 136 | model_name: string; 137 | prompt: string; 138 | negative_prompt?: string | undefined; 139 | sampler_name?: string | undefined; 140 | steps?: number | undefined; 141 | cfg_scale?: number | undefined; 142 | seed?: number | undefined; 143 | width?: number | undefined; 144 | height?: number | undefined; 145 | n_iter?: number | undefined; 146 | batch_size?: number | undefined; 147 | restore_faces?: boolean | undefined; 148 | denoising_strength?: number | undefined; 149 | init_images: Array; 150 | sd_vae?: string | undefined; 151 | clip_skip?: number | undefined; 152 | mask?: string | undefined; 153 | resize_mode?: number | undefined; 154 | image_cfg_scale?: number | undefined; 155 | mask_blur?: number | undefined; 156 | inpainting_fill?: number | undefined; 157 | inpaint_full_res?: number | undefined; 158 | inpaint_full_res_padding?: number | undefined; 159 | inpainting_mask_invert?: number | undefined; 160 | initial_noise_multiplier?: number | undefined; 161 | lora?: Array | undefined; 162 | }; 163 | 164 | export type Img2imgResponse = { 165 | code: RequestCode; 166 | msg: string; 167 | data: { 168 | task_id: string; 169 | }; 170 | }; 171 | 172 | export type UpscaleBaseAttributes = { 173 | image: string; 174 | resize_mode: 0 | 1; 175 | upscaler_1?: string | undefined; 176 | upscaler_2?: string | undefined; 177 | upscaling_crop?: boolean | undefined; 178 | extras_upscaler_2_visibility?: number | undefined; 179 | gfpgan_visibility?: number | undefined; 180 | codeformer_visibility?: number | undefined; 181 | codeformer_weight?: string | undefined; 182 | [key: string]: number | string | undefined | boolean; 183 | }; 184 | 185 | type ResizeMode1Attributes = UpscaleBaseAttributes & { 186 | resize_mode: 1; 187 | upscaling_resize_w: number; 188 | upscaling_resize_h: number; 189 | }; 190 | 191 | type ResizeMode0Attributes = UpscaleBaseAttributes & { 192 | resize_mode: 0; 193 | upscaling_resize: number; 194 | }; 195 | 196 | export type UpscalseRequest = ResizeMode1Attributes | ResizeMode0Attributes; 197 | 198 | export type UpscaleResponse = { 199 | code: RequestCode; 200 | msg: string; 201 | data: { 202 | task_id: string; 203 | }; 204 | }; 205 | 206 | export type ProgressRequest = { 207 | task_id: string; 208 | }; 209 | 210 | export type ProgressResponse = { 211 | code: RequestCode; 212 | msg: string; 213 | data: { 214 | status: number; 215 | progress: number; 216 | eta_relative: number; 217 | imgs: Array; 218 | info: string | undefined; 219 | failed_reason: string | undefined; 220 | current_images?: string | undefined | null | Array; 221 | }; 222 | }; 223 | -------------------------------------------------------------------------------- /src/client.ts: -------------------------------------------------------------------------------- 1 | import { ERROR_GENERATE_IMG_FAILED } from "./enum"; 2 | import { 3 | GetModelsResponse, 4 | Img2imgRequest, 5 | ProgressRequest, 6 | ProgressResponse, 7 | RequestCode, 8 | SyncConfig, 9 | Txt2ImgRequest, 10 | Txt2ImgResponse, 11 | UpscaleResponse, 12 | UpscalseRequest, 13 | } from "./types"; 14 | import { addLoraPrompt, generateLoraString, readImgtoBase64 } from "./util"; 15 | import { 16 | Omniinfer_axiosInstance, 17 | Set_Omniinfer_axiosInstance_Key, 18 | } from "./config"; 19 | 20 | export function setOmniinferKey(key: string) { 21 | Set_Omniinfer_axiosInstance_Key(key); 22 | } 23 | 24 | export function httpFetch({ 25 | url = "", 26 | method = "GET", 27 | data = undefined, 28 | query = undefined, 29 | }: { 30 | url: string; 31 | method?: string; 32 | data?: Record | undefined; 33 | query?: Record | undefined; 34 | }) { 35 | return Omniinfer_axiosInstance({ 36 | url: url, 37 | method: method, 38 | data: data, 39 | params: query, 40 | }) 41 | .then((response) => response.data) 42 | .catch((error) => { 43 | throw new Error(error.response ? error.response.data : error.message); 44 | }); 45 | } 46 | 47 | export function getModels() { 48 | return httpFetch({ 49 | url: "/v2/models", 50 | }).then((res: GetModelsResponse) => { 51 | if (res.code !== RequestCode.SUCCESS) { 52 | throw new Error(res.msg); 53 | } 54 | return res.data; 55 | }); 56 | } 57 | 58 | export function txt2Img(params: Txt2ImgRequest) { 59 | return httpFetch({ 60 | url: "/v2/txt2img", 61 | method: "POST", 62 | data: { 63 | ...params, 64 | prompt: addLoraPrompt(generateLoraString(params.lora), params.prompt), 65 | }, 66 | }).then((res: Txt2ImgResponse) => { 67 | if (res.code !== RequestCode.SUCCESS) { 68 | throw new Error(res.msg); 69 | } 70 | return res.data; 71 | }); 72 | } 73 | 74 | export function img2img(params: Img2imgRequest) { 75 | return httpFetch({ 76 | url: "/v2/img2img", 77 | method: "POST", 78 | data: { 79 | ...params, 80 | prompt: addLoraPrompt(generateLoraString(params.lora), params.prompt), 81 | }, 82 | }).then((res: Txt2ImgResponse) => { 83 | if (res.code !== RequestCode.SUCCESS) { 84 | throw new Error(res.msg); 85 | } 86 | return res.data; 87 | }); 88 | } 89 | 90 | export function upscale(params: UpscalseRequest) { 91 | return httpFetch({ 92 | url: "/v2/upscale", 93 | method: "POST", 94 | data: { 95 | ...params, 96 | upscaler_1: params.upscaler_1 ?? "R-ESRGAN 4x+", 97 | upscaler_2: params.upscaler_2 ?? "R-ESRGAN 4x+", 98 | }, 99 | }).then((res: UpscaleResponse) => { 100 | if (res.code !== RequestCode.SUCCESS) { 101 | throw new Error(res.msg); 102 | } 103 | return res.data; 104 | }); 105 | } 106 | 107 | export function progress(params: ProgressRequest) { 108 | return httpFetch({ 109 | url: "/v2/progress", 110 | method: "GET", 111 | query: { 112 | ...params, 113 | }, 114 | }).then((res: ProgressResponse) => { 115 | if (res.code !== RequestCode.SUCCESS) { 116 | throw new Error(res.msg); 117 | } 118 | return res.data; 119 | }); 120 | } 121 | 122 | export function txt2ImgSync( 123 | params: Txt2ImgRequest, 124 | config?: SyncConfig 125 | ): Promise { 126 | return new Promise((resolve, reject) => { 127 | txt2Img({ 128 | ...params, 129 | prompt: addLoraPrompt(generateLoraString(params.lora), params.prompt), 130 | }) 131 | .then((res) => { 132 | if (res && res.task_id) { 133 | const timer = setInterval(async () => { 134 | try { 135 | const progressResult = await progress({ task_id: res.task_id }); 136 | if (progressResult && progressResult.status === 2) { 137 | clearInterval(timer); 138 | let imgs = progressResult.imgs; 139 | if (config?.img_type === "base64") { 140 | imgs = await Promise.all( 141 | progressResult.imgs.map((url) => readImgtoBase64(url)) 142 | ); 143 | } 144 | resolve(imgs); 145 | } else if ( 146 | progressResult && 147 | (progressResult.status === 3 || progressResult.status === 4) 148 | ) { 149 | clearInterval(timer); 150 | reject( 151 | new Error( 152 | progressResult.failed_reason ?? ERROR_GENERATE_IMG_FAILED 153 | ) 154 | ); 155 | } 156 | } catch (error) { 157 | clearInterval(timer); 158 | reject(error); 159 | } 160 | }, config?.interval ?? 1000); 161 | } else { 162 | reject(new Error("Failed to start the task.")); 163 | } 164 | }) 165 | .catch(reject); 166 | }); 167 | } 168 | 169 | export function img2imgSync( 170 | params: Img2imgRequest, 171 | config?: SyncConfig 172 | ): Promise { 173 | return new Promise((resolve, reject) => { 174 | img2img({ 175 | ...params, 176 | prompt: addLoraPrompt(generateLoraString(params.lora), params.prompt), 177 | }) 178 | .then((res) => { 179 | if (res && res.task_id) { 180 | const timer = setInterval(async () => { 181 | try { 182 | const progressResult = await progress({ task_id: res.task_id }); 183 | if (progressResult && progressResult.status === 2) { 184 | clearInterval(timer); 185 | let imgs = progressResult.imgs; 186 | if (config?.img_type === "base64") { 187 | imgs = await Promise.all( 188 | progressResult.imgs.map((url) => readImgtoBase64(url)) 189 | ); 190 | } 191 | resolve(imgs); 192 | } else if ( 193 | progressResult && 194 | (progressResult.status === 3 || progressResult.status === 4) 195 | ) { 196 | clearInterval(timer); 197 | reject( 198 | new Error( 199 | progressResult.failed_reason ?? ERROR_GENERATE_IMG_FAILED 200 | ) 201 | ); 202 | } 203 | } catch (error) { 204 | clearInterval(timer); 205 | reject(error); 206 | } 207 | }, config?.interval ?? 1000); 208 | } else { 209 | reject(new Error("Failed to start the task.")); 210 | } 211 | }) 212 | .catch(reject); 213 | }); 214 | } 215 | 216 | export function upscaleSync(params: UpscalseRequest, config?: SyncConfig) { 217 | return new Promise((resolve, reject) => { 218 | upscale({ 219 | ...params, 220 | upscaler_1: params.upscaler_1 ?? "R-ESRGAN 4x+", 221 | upscaler_2: params.upscaler_2 ?? "R-ESRGAN 4x+", 222 | }) 223 | .then((res) => { 224 | if (res && res.task_id) { 225 | const timer = setInterval(async () => { 226 | try { 227 | const progressResult = await progress({ task_id: res.task_id }); 228 | if (progressResult && progressResult.status === 2) { 229 | clearInterval(timer); 230 | let imgs = progressResult.imgs; 231 | if (config?.img_type === "base64") { 232 | imgs = await Promise.all( 233 | progressResult.imgs.map((url) => readImgtoBase64(url)) 234 | ); 235 | } 236 | resolve(imgs); 237 | } else if ( 238 | progressResult && 239 | (progressResult.status === 3 || progressResult.status === 4) 240 | ) { 241 | clearInterval(timer); 242 | reject( 243 | new Error( 244 | progressResult.failed_reason ?? ERROR_GENERATE_IMG_FAILED 245 | ) 246 | ); 247 | } 248 | } catch (error) { 249 | clearInterval(timer); 250 | reject(error); 251 | } 252 | }, config?.interval ?? 1000); 253 | } else { 254 | reject(new Error("Failed to start the task.")); 255 | } 256 | }) 257 | .catch(reject); 258 | }); 259 | } 260 | -------------------------------------------------------------------------------- /src/class.ts: -------------------------------------------------------------------------------- 1 | import axios from "axios"; 2 | import { 3 | GetModelsResponse, 4 | Img2imgRequest, 5 | ProgressRequest, 6 | ProgressResponse, 7 | RequestCode, 8 | SyncConfig, 9 | Txt2ImgRequest, 10 | Txt2ImgResponse, 11 | UpscaleResponse, 12 | UpscalseRequest, 13 | } from "./types"; 14 | import { addLoraPrompt, generateLoraString, readImgtoBase64 } from "./util"; 15 | import { ERROR_GENERATE_IMG_FAILED } from "./enum"; 16 | import { 17 | Omniinfer_axiosInstance, 18 | Set_Omniinfer_axiosInstance_Key, 19 | } from "./config"; 20 | 21 | export class OmniinferSDK { 22 | constructor(key: string) { 23 | Set_Omniinfer_axiosInstance_Key(key); 24 | } 25 | 26 | httpFetch({ 27 | url = "", 28 | method = "GET", 29 | data = undefined, 30 | query = undefined, 31 | }: { 32 | url: string; 33 | method?: string; 34 | data?: Record | undefined; 35 | query?: Record | undefined; 36 | }) { 37 | return Omniinfer_axiosInstance({ 38 | url: url, 39 | method: method, 40 | data: data, 41 | params: query, 42 | }) 43 | .then((response) => response.data) 44 | .catch((error) => { 45 | throw new Error(error.response ? error.response.data : error.message); 46 | }); 47 | } 48 | 49 | getModels() { 50 | return this.httpFetch({ 51 | url: "/v2/models", 52 | }).then((res: GetModelsResponse) => { 53 | if (res.code !== RequestCode.SUCCESS) { 54 | throw new Error(res.msg); 55 | } 56 | return res.data; 57 | }); 58 | } 59 | 60 | txt2Img(params: Txt2ImgRequest) { 61 | return this.httpFetch({ 62 | url: "/v2/txt2img", 63 | method: "POST", 64 | data: { 65 | ...params, 66 | prompt: addLoraPrompt(generateLoraString(params.lora), params.prompt), 67 | }, 68 | }).then((res: Txt2ImgResponse) => { 69 | if (res.code !== RequestCode.SUCCESS) { 70 | throw new Error(res.msg); 71 | } 72 | return res.data; 73 | }); 74 | } 75 | 76 | img2img(params: Img2imgRequest) { 77 | return this.httpFetch({ 78 | url: "/v2/img2img", 79 | method: "POST", 80 | data: { 81 | ...params, 82 | prompt: addLoraPrompt(generateLoraString(params.lora), params.prompt), 83 | }, 84 | }).then((res: Txt2ImgResponse) => { 85 | if (res.code !== RequestCode.SUCCESS) { 86 | throw new Error(res.msg); 87 | } 88 | return res.data; 89 | }); 90 | } 91 | 92 | progress(params: ProgressRequest) { 93 | return this.httpFetch({ 94 | url: "/v2/progress", 95 | method: "GET", 96 | query: { 97 | ...params, 98 | }, 99 | }).then((res: ProgressResponse) => { 100 | if (res.code !== RequestCode.SUCCESS) { 101 | throw new Error(res.msg); 102 | } 103 | return res.data; 104 | }); 105 | } 106 | 107 | txt2ImgSync(params: Txt2ImgRequest, config?: SyncConfig): Promise { 108 | return new Promise((resolve, reject) => { 109 | this.txt2Img({ 110 | ...params, 111 | prompt: addLoraPrompt(generateLoraString(params.lora), params.prompt), 112 | }) 113 | .then((res) => { 114 | if (res && res.task_id) { 115 | const timer = setInterval(async () => { 116 | try { 117 | const progressResult = await this.progress({ 118 | task_id: res.task_id, 119 | }); 120 | if (progressResult && progressResult.status === 2) { 121 | clearInterval(timer); 122 | let imgs = progressResult.imgs; 123 | if (config?.img_type === "base64") { 124 | imgs = await Promise.all( 125 | progressResult.imgs.map((url) => readImgtoBase64(url)) 126 | ); 127 | } 128 | resolve(imgs); 129 | } else if ( 130 | progressResult && 131 | (progressResult.status === 3 || progressResult.status === 4) 132 | ) { 133 | clearInterval(timer); 134 | reject( 135 | new Error( 136 | progressResult.failed_reason ?? ERROR_GENERATE_IMG_FAILED 137 | ) 138 | ); 139 | } 140 | } catch (error) { 141 | clearInterval(timer); 142 | reject(error); 143 | } 144 | }, config?.interval ?? 1000); 145 | } else { 146 | reject(new Error("Failed to start the task.")); 147 | } 148 | }) 149 | .catch(reject); 150 | }); 151 | } 152 | 153 | img2imgSync(params: Img2imgRequest, config?: SyncConfig): Promise { 154 | return new Promise((resolve, reject) => { 155 | this.img2img({ 156 | ...params, 157 | prompt: addLoraPrompt(generateLoraString(params.lora), params.prompt), 158 | }) 159 | .then((res) => { 160 | if (res && res.task_id) { 161 | const timer = setInterval(async () => { 162 | try { 163 | const progressResult = await this.progress({ 164 | task_id: res.task_id, 165 | }); 166 | if (progressResult && progressResult.status === 2) { 167 | clearInterval(timer); 168 | let imgs = progressResult.imgs; 169 | if (config?.img_type === "base64") { 170 | imgs = await Promise.all( 171 | progressResult.imgs.map((url) => readImgtoBase64(url)) 172 | ); 173 | } 174 | resolve(imgs); 175 | } else if ( 176 | progressResult && 177 | (progressResult.status === 3 || progressResult.status === 4) 178 | ) { 179 | clearInterval(timer); 180 | reject( 181 | new Error( 182 | progressResult.failed_reason ?? ERROR_GENERATE_IMG_FAILED 183 | ) 184 | ); 185 | } 186 | } catch (error) { 187 | clearInterval(timer); 188 | reject(error); 189 | } 190 | }, config?.interval ?? 1000); 191 | } else { 192 | reject(new Error("Failed to start the task.")); 193 | } 194 | }) 195 | .catch(reject); 196 | }); 197 | } 198 | 199 | upscale(params: UpscalseRequest) { 200 | return this.httpFetch({ 201 | url: "/v2/upscale", 202 | method: "POST", 203 | data: { 204 | ...params, 205 | upscaler_1: params.upscaler_1 ?? "R-ESRGAN 4x+", 206 | upscaler_2: params.upscaler_2 ?? "R-ESRGAN 4x+", 207 | }, 208 | }).then((res: UpscaleResponse) => { 209 | if (res.code !== RequestCode.SUCCESS) { 210 | throw new Error(res.msg); 211 | } 212 | return res.data; 213 | }); 214 | } 215 | 216 | upscaleSync(params: UpscalseRequest, config?: SyncConfig) { 217 | return new Promise((resolve, reject) => { 218 | this.upscale({ 219 | ...params, 220 | upscaler_1: params.upscaler_1 ?? "R-ESRGAN 4x+", 221 | upscaler_2: params.upscaler_2 ?? "R-ESRGAN 4x+", 222 | }) 223 | .then((res) => { 224 | if (res && res.task_id) { 225 | const timer = setInterval(async () => { 226 | try { 227 | const progressResult = await this.progress({ 228 | task_id: res.task_id, 229 | }); 230 | if (progressResult && progressResult.status === 2) { 231 | clearInterval(timer); 232 | let imgs = progressResult.imgs; 233 | if (config?.img_type === "base64") { 234 | imgs = await Promise.all( 235 | progressResult.imgs.map((url) => readImgtoBase64(url)) 236 | ); 237 | } 238 | resolve(imgs); 239 | } else if ( 240 | progressResult && 241 | (progressResult.status === 3 || progressResult.status === 4) 242 | ) { 243 | clearInterval(timer); 244 | reject( 245 | new Error( 246 | progressResult.failed_reason ?? ERROR_GENERATE_IMG_FAILED 247 | ) 248 | ); 249 | } 250 | } catch (error) { 251 | clearInterval(timer); 252 | reject(error); 253 | } 254 | }, config?.interval ?? 1000); 255 | } else { 256 | reject(new Error("Failed to start the task.")); 257 | } 258 | }) 259 | .catch(reject); 260 | }); 261 | } 262 | } 263 | --------------------------------------------------------------------------------