Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
File size: 1,434 Bytes
ef22617 c4b02b2 ef22617 c4b02b2 ef22617 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 |
import { HfInference } from "@huggingface/inference"
import { getValidNumber } from "./getValidNumber.mts"
import { generateSeed } from "./generateSeed.mts"
const hf = new HfInference(process.env.VC_HF_API_TOKEN)
export async function generateImage(options: {
positivePrompt: string;
negativePrompt: string;
seed?: number;
width?: number;
height?: number;
nbSteps?: number;
}) {
const positivePrompt = options?.positivePrompt || ""
if (!positivePrompt) {
throw new Error("missing prompt")
}
const negativePrompt = options?.negativePrompt || ""
// we treat 0 as meaning "random seed"
const seed = (options?.seed ? options.seed : 0) || generateSeed()
const width = getValidNumber(options?.width, 256, 1024, 512)
const height = getValidNumber(options?.height, 256, 1024, 512)
const nbSteps = getValidNumber(options?.nbSteps, 5, 50, 25)
const blob = await hf.textToImage({
inputs: [
positivePrompt,
"bautiful",
"award winning",
"intricate details",
"high resolution"
].filter(word => word)
.join(", "),
model: "stabilityai/stable-diffusion-2-1",
parameters: {
negative_prompt: [
negativePrompt,
"blurry",
// "artificial",
// "cropped",
"low quality",
"ugly"
].filter(word => word)
.join(", ")
}
})
const buffer = Buffer.from(await blob.arrayBuffer())
return buffer
} |