Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Commit
·
c4b02b2
1
Parent(s):
44fe180
ok, I think it works now
Browse files- src/config.mts +1 -1
- src/index.mts +21 -8
- src/initFolders.mts +5 -1
- src/production/generateVideo.mts +4 -1
- src/production/renderPipeline.mts +20 -0
- src/production/renderScene.mts +1 -1
- src/types.mts +3 -1
- src/utils/generateImage.mts +7 -3
- src/utils/generateImageSDXL.mts +4 -1
- src/utils/generateImageSDXL360.mts +4 -1
- src/utils/hashRequest.mts +23 -0
- src/utils/loadRenderedSceneFromCache.mts +8 -6
- src/utils/moveVideoFromTmpToCompleted.mts +2 -0
- src/utils/parseRenderRequest.mts +10 -3
- src/utils/saveRenderedSceneToCache.mts +16 -3
src/config.mts
CHANGED
|
@@ -13,7 +13,7 @@ export const completedFilesDirFilePath = path.join(filesDirPath, "completed")
|
|
| 13 |
|
| 14 |
// this is a semi-persistent storage (we want to renew it from time to time)
|
| 15 |
export const cacheDirPath = path.join(storagePath, "cache")
|
| 16 |
-
export const renderedDirFilePath = path.join(
|
| 17 |
|
| 18 |
export const shotFormatVersion = 1
|
| 19 |
export const sequenceFormatVersion = 1
|
|
|
|
| 13 |
|
| 14 |
// this is a semi-persistent storage (we want to renew it from time to time)
|
| 15 |
export const cacheDirPath = path.join(storagePath, "cache")
|
| 16 |
+
export const renderedDirFilePath = path.join(cacheDirPath, "rendered")
|
| 17 |
|
| 18 |
export const shotFormatVersion = 1
|
| 19 |
export const sequenceFormatVersion = 1
|
src/index.mts
CHANGED
|
@@ -51,14 +51,22 @@ app.post("/render", async (req, res) => {
|
|
| 51 |
return
|
| 52 |
}
|
| 53 |
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 62 |
}
|
| 63 |
|
| 64 |
let response: RenderedScene = {
|
|
@@ -112,12 +120,16 @@ app.get("/render/:renderId", async (req, res) => {
|
|
| 112 |
}
|
| 113 |
|
| 114 |
try {
|
|
|
|
| 115 |
const cached = await loadRenderedSceneFromCache(undefined, renderId)
|
| 116 |
const cachedJson = JSON.stringify(cached)
|
|
|
|
| 117 |
res.status(200)
|
| 118 |
res.write(cachedJson)
|
| 119 |
res.end()
|
|
|
|
| 120 |
} catch (err) {
|
|
|
|
| 121 |
// move along
|
| 122 |
}
|
| 123 |
|
|
@@ -130,6 +142,7 @@ app.get("/render/:renderId", async (req, res) => {
|
|
| 130 |
segments: []
|
| 131 |
}
|
| 132 |
|
|
|
|
| 133 |
try {
|
| 134 |
response = await getRenderedScene(renderId)
|
| 135 |
} catch (err) {
|
|
|
|
| 51 |
return
|
| 52 |
}
|
| 53 |
|
| 54 |
+
if (request.cache === "use") {
|
| 55 |
+
console.log("client requested to use the cache")
|
| 56 |
+
try {
|
| 57 |
+
const cached = await loadRenderedSceneFromCache(request)
|
| 58 |
+
const cachedJson = JSON.stringify(cached)
|
| 59 |
+
console.log(`request ${request} is in cache!`)
|
| 60 |
+
res.status(200)
|
| 61 |
+
res.write(cachedJson)
|
| 62 |
+
res.end()
|
| 63 |
+
return
|
| 64 |
+
} catch (err) {
|
| 65 |
+
console.log("request not found in cache: "+ err)
|
| 66 |
+
// move along
|
| 67 |
+
}
|
| 68 |
+
} else if (request.cache === "renew") {
|
| 69 |
+
console.log("client requested to renew the cache")
|
| 70 |
}
|
| 71 |
|
| 72 |
let response: RenderedScene = {
|
|
|
|
| 120 |
}
|
| 121 |
|
| 122 |
try {
|
| 123 |
+
// we still try to search for it in the cache
|
| 124 |
const cached = await loadRenderedSceneFromCache(undefined, renderId)
|
| 125 |
const cachedJson = JSON.stringify(cached)
|
| 126 |
+
console.log(`request ${renderId} is already in cache, so we return that`)
|
| 127 |
res.status(200)
|
| 128 |
res.write(cachedJson)
|
| 129 |
res.end()
|
| 130 |
+
return
|
| 131 |
} catch (err) {
|
| 132 |
+
// console.log("renderId not found in cache: "+ err)
|
| 133 |
// move along
|
| 134 |
}
|
| 135 |
|
|
|
|
| 142 |
segments: []
|
| 143 |
}
|
| 144 |
|
| 145 |
+
// console.log("going to render the scene!")
|
| 146 |
try {
|
| 147 |
response = await getRenderedScene(renderId)
|
| 148 |
} catch (err) {
|
src/initFolders.mts
CHANGED
|
@@ -4,7 +4,9 @@ import {
|
|
| 4 |
completedMetadataDirFilePath,
|
| 5 |
filesDirPath,
|
| 6 |
pendingFilesDirFilePath,
|
| 7 |
-
completedFilesDirFilePath
|
|
|
|
|
|
|
| 8 |
} from "./config.mts"
|
| 9 |
import { createDirIfNeeded } from "./utils/createDirIfNeeded.mts"
|
| 10 |
|
|
@@ -16,4 +18,6 @@ export const initFolders = () => {
|
|
| 16 |
createDirIfNeeded(filesDirPath)
|
| 17 |
createDirIfNeeded(pendingFilesDirFilePath)
|
| 18 |
createDirIfNeeded(completedFilesDirFilePath)
|
|
|
|
|
|
|
| 19 |
}
|
|
|
|
| 4 |
completedMetadataDirFilePath,
|
| 5 |
filesDirPath,
|
| 6 |
pendingFilesDirFilePath,
|
| 7 |
+
completedFilesDirFilePath,
|
| 8 |
+
cacheDirPath,
|
| 9 |
+
renderedDirFilePath
|
| 10 |
} from "./config.mts"
|
| 11 |
import { createDirIfNeeded } from "./utils/createDirIfNeeded.mts"
|
| 12 |
|
|
|
|
| 18 |
createDirIfNeeded(filesDirPath)
|
| 19 |
createDirIfNeeded(pendingFilesDirFilePath)
|
| 20 |
createDirIfNeeded(completedFilesDirFilePath)
|
| 21 |
+
createDirIfNeeded(cacheDirPath)
|
| 22 |
+
createDirIfNeeded(renderedDirFilePath)
|
| 23 |
}
|
src/production/generateVideo.mts
CHANGED
|
@@ -32,7 +32,10 @@ export const generateVideo = async (prompt: string, options?: {
|
|
| 32 |
|
| 33 |
|
| 34 |
try {
|
| 35 |
-
|
|
|
|
|
|
|
|
|
|
| 36 |
const nbFrames = options?.nbFrames || 24 // we can go up to 48 frames, but then upscaling quill require too much memory!
|
| 37 |
const nbSteps = options?.nbSteps || 35
|
| 38 |
|
|
|
|
| 32 |
|
| 33 |
|
| 34 |
try {
|
| 35 |
+
|
| 36 |
+
// we treat 0 as meaning "random seed"
|
| 37 |
+
const seed = (options?.seed ? options.seed : 0) || generateSeed()
|
| 38 |
+
|
| 39 |
const nbFrames = options?.nbFrames || 24 // we can go up to 48 frames, but then upscaling quill require too much memory!
|
| 40 |
const nbSteps = options?.nbSteps || 35
|
| 41 |
|
src/production/renderPipeline.mts
CHANGED
|
@@ -7,6 +7,7 @@ import { renderImageSegmentation } from "./renderImageSegmentation.mts"
|
|
| 7 |
import { renderVideoSegmentation } from "./renderVideoSegmentation.mts"
|
| 8 |
import { upscaleImage } from "../utils/upscaleImage.mts"
|
| 9 |
import { renderImageUpscaling } from "./renderImageUpscaling.mts"
|
|
|
|
| 10 |
|
| 11 |
export async function renderPipeline(request: RenderRequest, response: RenderedScene) {
|
| 12 |
const isVideo = request?.nbFrames > 1
|
|
@@ -67,4 +68,23 @@ export async function renderPipeline(request: RenderRequest, response: RenderedS
|
|
| 67 |
|
| 68 |
response.status = "completed"
|
| 69 |
response.error = ""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 70 |
}
|
|
|
|
| 7 |
import { renderVideoSegmentation } from "./renderVideoSegmentation.mts"
|
| 8 |
import { upscaleImage } from "../utils/upscaleImage.mts"
|
| 9 |
import { renderImageUpscaling } from "./renderImageUpscaling.mts"
|
| 10 |
+
import { saveRenderedSceneToCache } from "../utils/saveRenderedSceneToCache.mts"
|
| 11 |
|
| 12 |
export async function renderPipeline(request: RenderRequest, response: RenderedScene) {
|
| 13 |
const isVideo = request?.nbFrames > 1
|
|
|
|
| 68 |
|
| 69 |
response.status = "completed"
|
| 70 |
response.error = ""
|
| 71 |
+
|
| 72 |
+
if (!request.cache || request.cache === "ignore") {
|
| 73 |
+
console.log("client asked to not use the cache in the rendering pipeline")
|
| 74 |
+
return
|
| 75 |
+
}
|
| 76 |
+
|
| 77 |
+
console.log("client asked this for cache: "+request.cache)
|
| 78 |
+
|
| 79 |
+
try {
|
| 80 |
+
// since the request is now completed we cache it
|
| 81 |
+
await saveRenderedSceneToCache(request, response)
|
| 82 |
+
console.log("successfully saved to cache")
|
| 83 |
+
|
| 84 |
+
// we don't really need to remove it from the in-memory cache
|
| 85 |
+
// (the cache queue in src/production/renderScene.mts)
|
| 86 |
+
// since this cache queue has already an automatic pruning
|
| 87 |
+
} catch (err) {
|
| 88 |
+
console.error(`failed to save to cache, but no big deal: ${err}`)
|
| 89 |
+
}
|
| 90 |
}
|
src/production/renderScene.mts
CHANGED
|
@@ -8,7 +8,7 @@ import { getValidBoolean } from "../utils/getValidBoolean.mts"
|
|
| 8 |
|
| 9 |
const cache: Record<string, RenderedScene> = {}
|
| 10 |
const cacheQueue: string[] = []
|
| 11 |
-
const maxCacheSize =
|
| 12 |
|
| 13 |
export async function renderScene(request: RenderRequest): Promise<RenderedScene> {
|
| 14 |
// const key = getCacheKey(scene)
|
|
|
|
| 8 |
|
| 9 |
const cache: Record<string, RenderedScene> = {}
|
| 10 |
const cacheQueue: string[] = []
|
| 11 |
+
const maxCacheSize = 2000
|
| 12 |
|
| 13 |
export async function renderScene(request: RenderRequest): Promise<RenderedScene> {
|
| 14 |
// const key = getCacheKey(scene)
|
src/types.mts
CHANGED
|
@@ -271,6 +271,8 @@ export type Video = VideoSequence & {
|
|
| 271 |
|
| 272 |
export type ProjectionMode = 'cartesian' | 'spherical'
|
| 273 |
|
|
|
|
|
|
|
| 274 |
export interface RenderRequest {
|
| 275 |
prompt: string
|
| 276 |
|
|
@@ -300,7 +302,7 @@ export interface RenderRequest {
|
|
| 300 |
|
| 301 |
projection: ProjectionMode
|
| 302 |
|
| 303 |
-
|
| 304 |
}
|
| 305 |
|
| 306 |
export interface ImageSegmentationRequest {
|
|
|
|
| 271 |
|
| 272 |
export type ProjectionMode = 'cartesian' | 'spherical'
|
| 273 |
|
| 274 |
+
export type CacheMode = "use" | "renew" | "ignore"
|
| 275 |
+
|
| 276 |
export interface RenderRequest {
|
| 277 |
prompt: string
|
| 278 |
|
|
|
|
| 302 |
|
| 303 |
projection: ProjectionMode
|
| 304 |
|
| 305 |
+
cache: CacheMode
|
| 306 |
}
|
| 307 |
|
| 308 |
export interface ImageSegmentationRequest {
|
src/utils/generateImage.mts
CHANGED
|
@@ -1,6 +1,7 @@
|
|
| 1 |
import { HfInference } from "@huggingface/inference"
|
| 2 |
-
|
| 3 |
-
import {
|
|
|
|
| 4 |
|
| 5 |
const hf = new HfInference(process.env.VC_HF_API_TOKEN)
|
| 6 |
|
|
@@ -18,7 +19,10 @@ export async function generateImage(options: {
|
|
| 18 |
throw new Error("missing prompt")
|
| 19 |
}
|
| 20 |
const negativePrompt = options?.negativePrompt || ""
|
| 21 |
-
|
|
|
|
|
|
|
|
|
|
| 22 |
const width = getValidNumber(options?.width, 256, 1024, 512)
|
| 23 |
const height = getValidNumber(options?.height, 256, 1024, 512)
|
| 24 |
const nbSteps = getValidNumber(options?.nbSteps, 5, 50, 25)
|
|
|
|
| 1 |
import { HfInference } from "@huggingface/inference"
|
| 2 |
+
|
| 3 |
+
import { getValidNumber } from "./getValidNumber.mts"
|
| 4 |
+
import { generateSeed } from "./generateSeed.mts"
|
| 5 |
|
| 6 |
const hf = new HfInference(process.env.VC_HF_API_TOKEN)
|
| 7 |
|
|
|
|
| 19 |
throw new Error("missing prompt")
|
| 20 |
}
|
| 21 |
const negativePrompt = options?.negativePrompt || ""
|
| 22 |
+
|
| 23 |
+
// we treat 0 as meaning "random seed"
|
| 24 |
+
const seed = (options?.seed ? options.seed : 0) || generateSeed()
|
| 25 |
+
|
| 26 |
const width = getValidNumber(options?.width, 256, 1024, 512)
|
| 27 |
const height = getValidNumber(options?.height, 256, 1024, 512)
|
| 28 |
const nbSteps = getValidNumber(options?.nbSteps, 5, 50, 25)
|
src/utils/generateImageSDXL.mts
CHANGED
|
@@ -25,7 +25,10 @@ export async function generateImageSDXLAsBase64(options: {
|
|
| 25 |
throw new Error("missing prompt")
|
| 26 |
}
|
| 27 |
const negativePrompt = options?.negativePrompt || ""
|
| 28 |
-
|
|
|
|
|
|
|
|
|
|
| 29 |
const width = getValidNumber(options?.width, 256, 1024, 512)
|
| 30 |
const height = getValidNumber(options?.height, 256, 1024, 512)
|
| 31 |
const nbSteps = getValidNumber(options?.nbSteps, 5, 100, 20)
|
|
|
|
| 25 |
throw new Error("missing prompt")
|
| 26 |
}
|
| 27 |
const negativePrompt = options?.negativePrompt || ""
|
| 28 |
+
|
| 29 |
+
// we treat 0 as meaning "random seed"
|
| 30 |
+
const seed = (options?.seed ? options.seed : 0) || generateSeed()
|
| 31 |
+
|
| 32 |
const width = getValidNumber(options?.width, 256, 1024, 512)
|
| 33 |
const height = getValidNumber(options?.height, 256, 1024, 512)
|
| 34 |
const nbSteps = getValidNumber(options?.nbSteps, 5, 100, 20)
|
src/utils/generateImageSDXL360.mts
CHANGED
|
@@ -25,7 +25,10 @@ export async function generateImageSDXL360AsBase64(options: {
|
|
| 25 |
throw new Error("missing prompt")
|
| 26 |
}
|
| 27 |
const negativePrompt = options?.negativePrompt || ""
|
| 28 |
-
|
|
|
|
|
|
|
|
|
|
| 29 |
const width = getValidNumber(options?.width, 256, 1024, 512)
|
| 30 |
const height = getValidNumber(options?.height, 256, 1024, 512)
|
| 31 |
const nbSteps = getValidNumber(options?.nbSteps, 5, 100, 20)
|
|
|
|
| 25 |
throw new Error("missing prompt")
|
| 26 |
}
|
| 27 |
const negativePrompt = options?.negativePrompt || ""
|
| 28 |
+
|
| 29 |
+
// we treat 0 as meaning "random seed"
|
| 30 |
+
const seed = (options?.seed ? options.seed : 0) || generateSeed()
|
| 31 |
+
|
| 32 |
const width = getValidNumber(options?.width, 256, 1024, 512)
|
| 33 |
const height = getValidNumber(options?.height, 256, 1024, 512)
|
| 34 |
const nbSteps = getValidNumber(options?.nbSteps, 5, 100, 20)
|
src/utils/hashRequest.mts
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import { RenderRequest } from "../types.mts"
|
| 2 |
+
import { computeSha256 } from "./computeSha256.mts"
|
| 3 |
+
|
| 4 |
+
export function hashRequest(request: RenderRequest) {
|
| 5 |
+
|
| 6 |
+
// we ignore the commands associated to cache and stuff
|
| 7 |
+
const hashable = {
|
| 8 |
+
prompt: request.prompt,
|
| 9 |
+
segmentation: request.segmentation,
|
| 10 |
+
actionnables: request.actionnables,
|
| 11 |
+
nbFrames: request.actionnables,
|
| 12 |
+
nbSteps: request.actionnables,
|
| 13 |
+
seed: request.actionnables,
|
| 14 |
+
width: request.actionnables,
|
| 15 |
+
height: request.actionnables,
|
| 16 |
+
projection: request.actionnables,
|
| 17 |
+
}
|
| 18 |
+
|
| 19 |
+
const requestJson = JSON.stringify(hashable)
|
| 20 |
+
const hash = computeSha256(requestJson)
|
| 21 |
+
|
| 22 |
+
return hash
|
| 23 |
+
}
|
src/utils/loadRenderedSceneFromCache.mts
CHANGED
|
@@ -3,7 +3,7 @@ import path from "node:path"
|
|
| 3 |
|
| 4 |
import { RenderRequest, RenderedScene } from "../types.mts"
|
| 5 |
import { renderedDirFilePath } from "../config.mts"
|
| 6 |
-
import {
|
| 7 |
|
| 8 |
export async function loadRenderedSceneFromCache(request?: RenderRequest, id?: string): Promise<RenderedScene> {
|
| 9 |
|
|
@@ -11,8 +11,8 @@ export async function loadRenderedSceneFromCache(request?: RenderRequest, id?: s
|
|
| 11 |
|
| 12 |
if (request?.prompt) {
|
| 13 |
try {
|
| 14 |
-
|
| 15 |
-
const hash =
|
| 16 |
pattern = `hash_${hash}`
|
| 17 |
} catch (err) {
|
| 18 |
}
|
|
@@ -24,10 +24,12 @@ export async function loadRenderedSceneFromCache(request?: RenderRequest, id?: s
|
|
| 24 |
throw new Error("invalid request or id")
|
| 25 |
}
|
| 26 |
|
| 27 |
-
|
| 28 |
|
|
|
|
|
|
|
| 29 |
if (cachedFile.includes(pattern)) {
|
| 30 |
-
|
| 31 |
const cacheFilePath = path.join(renderedDirFilePath, cachedFile)
|
| 32 |
|
| 33 |
const scene = JSON.parse(
|
|
@@ -46,5 +48,5 @@ export async function loadRenderedSceneFromCache(request?: RenderRequest, id?: s
|
|
| 46 |
}
|
| 47 |
}
|
| 48 |
|
| 49 |
-
throw new Error(
|
| 50 |
}
|
|
|
|
| 3 |
|
| 4 |
import { RenderRequest, RenderedScene } from "../types.mts"
|
| 5 |
import { renderedDirFilePath } from "../config.mts"
|
| 6 |
+
import { hashRequest } from "./hashRequest.mts"
|
| 7 |
|
| 8 |
export async function loadRenderedSceneFromCache(request?: RenderRequest, id?: string): Promise<RenderedScene> {
|
| 9 |
|
|
|
|
| 11 |
|
| 12 |
if (request?.prompt) {
|
| 13 |
try {
|
| 14 |
+
// note: this hashing function ignores the commands associated to cache and stuff
|
| 15 |
+
const hash = hashRequest(request)
|
| 16 |
pattern = `hash_${hash}`
|
| 17 |
} catch (err) {
|
| 18 |
}
|
|
|
|
| 24 |
throw new Error("invalid request or id")
|
| 25 |
}
|
| 26 |
|
| 27 |
+
// console.log("pattern to find: " + pattern)
|
| 28 |
|
| 29 |
+
for (const cachedFile of await fs.readdir(renderedDirFilePath)) {
|
| 30 |
+
// console.log("evaluating " + cachedFile)
|
| 31 |
if (cachedFile.includes(pattern)) {
|
| 32 |
+
// console.log("matched with " + cachedFile)
|
| 33 |
const cacheFilePath = path.join(renderedDirFilePath, cachedFile)
|
| 34 |
|
| 35 |
const scene = JSON.parse(
|
|
|
|
| 48 |
}
|
| 49 |
}
|
| 50 |
|
| 51 |
+
throw new Error(`couldn't find a cache file for pattern ${pattern}`)
|
| 52 |
}
|
src/utils/moveVideoFromTmpToCompleted.mts
CHANGED
|
@@ -1,5 +1,7 @@
|
|
| 1 |
import path from "node:path"
|
|
|
|
| 2 |
import tmpDir from "temp-dir"
|
|
|
|
| 3 |
import { completedFilesDirFilePath } from "../config.mts"
|
| 4 |
import { moveFile } from "./moveFile.mts"
|
| 5 |
|
|
|
|
| 1 |
import path from "node:path"
|
| 2 |
+
|
| 3 |
import tmpDir from "temp-dir"
|
| 4 |
+
|
| 5 |
import { completedFilesDirFilePath } from "../config.mts"
|
| 6 |
import { moveFile } from "./moveFile.mts"
|
| 7 |
|
src/utils/parseRenderRequest.mts
CHANGED
|
@@ -5,13 +5,18 @@ import { getValidNumber } from "./getValidNumber.mts"
|
|
| 5 |
|
| 6 |
export function parseRenderRequest(request: RenderRequest) {
|
| 7 |
|
|
|
|
| 8 |
try {
|
| 9 |
request.nbFrames = getValidNumber(request.nbFrames, 1, 24, 16)
|
| 10 |
|
| 11 |
const isVideo = request?.nbFrames === 1
|
| 12 |
|
| 13 |
-
//
|
| 14 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
request.nbSteps = getValidNumber(request.nbSteps, 5, 50, 10)
|
| 16 |
|
| 17 |
if (isVideo) {
|
|
@@ -22,9 +27,11 @@ export function parseRenderRequest(request: RenderRequest) {
|
|
| 22 |
request.height = getValidNumber(request.height, 256, 720, 320)
|
| 23 |
}
|
| 24 |
|
| 25 |
-
request.
|
| 26 |
} catch (err) {
|
| 27 |
console.error(`failed to parse the render request: ${err}`)
|
| 28 |
}
|
|
|
|
|
|
|
| 29 |
return request
|
| 30 |
}
|
|
|
|
| 5 |
|
| 6 |
export function parseRenderRequest(request: RenderRequest) {
|
| 7 |
|
| 8 |
+
console.log("parseRenderRequest: "+JSON.stringify(request, null, 2))
|
| 9 |
try {
|
| 10 |
request.nbFrames = getValidNumber(request.nbFrames, 1, 24, 16)
|
| 11 |
|
| 12 |
const isVideo = request?.nbFrames === 1
|
| 13 |
|
| 14 |
+
// note that we accept a seed of 0
|
| 15 |
+
// (this ensure we are able to cache the whote request by signing it)
|
| 16 |
+
request.seed = getValidNumber(request.seed, 0, 2147483647, 0)
|
| 17 |
+
|
| 18 |
+
// but obviously we will treat 0 as the random seed at a later stage
|
| 19 |
+
|
| 20 |
request.nbSteps = getValidNumber(request.nbSteps, 5, 50, 10)
|
| 21 |
|
| 22 |
if (isVideo) {
|
|
|
|
| 27 |
request.height = getValidNumber(request.height, 256, 720, 320)
|
| 28 |
}
|
| 29 |
|
| 30 |
+
request.cache = request?.cache || "ignore"
|
| 31 |
} catch (err) {
|
| 32 |
console.error(`failed to parse the render request: ${err}`)
|
| 33 |
}
|
| 34 |
+
|
| 35 |
+
console.log("parsed request: "+JSON.stringify(request, null, 2))
|
| 36 |
return request
|
| 37 |
}
|
src/utils/saveRenderedSceneToCache.mts
CHANGED
|
@@ -4,18 +4,19 @@ import path from "node:path"
|
|
| 4 |
import { RenderRequest, RenderedScene } from "../types.mts"
|
| 5 |
import { renderedDirFilePath } from "../config.mts"
|
| 6 |
|
| 7 |
-
import {
|
| 8 |
|
| 9 |
export async function saveRenderedSceneToCache(
|
| 10 |
request: RenderRequest,
|
| 11 |
scene: RenderedScene
|
| 12 |
): Promise<RenderedScene> {
|
|
|
|
| 13 |
if (scene.status !== "completed") {
|
| 14 |
throw new Error("sorry, it only makes sense to cache a *completed* scene, not a pending or failed one.")
|
| 15 |
}
|
| 16 |
|
| 17 |
-
|
| 18 |
-
const hash =
|
| 19 |
const id = scene.renderId
|
| 20 |
|
| 21 |
const cacheFileName = `hash_${hash}_id_${id}.json`
|
|
@@ -23,7 +24,19 @@ export async function saveRenderedSceneToCache(
|
|
| 23 |
|
| 24 |
const renderedSceneJson = JSON.stringify(scene)
|
| 25 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 26 |
await fs.writeFile(cacheFilePath, renderedSceneJson, "utf8")
|
|
|
|
| 27 |
|
| 28 |
return scene
|
| 29 |
}
|
|
|
|
| 4 |
import { RenderRequest, RenderedScene } from "../types.mts"
|
| 5 |
import { renderedDirFilePath } from "../config.mts"
|
| 6 |
|
| 7 |
+
import { hashRequest } from "./hashRequest.mts"
|
| 8 |
|
| 9 |
export async function saveRenderedSceneToCache(
|
| 10 |
request: RenderRequest,
|
| 11 |
scene: RenderedScene
|
| 12 |
): Promise<RenderedScene> {
|
| 13 |
+
// console.log("saveRenderedSceneToCache")
|
| 14 |
if (scene.status !== "completed") {
|
| 15 |
throw new Error("sorry, it only makes sense to cache a *completed* scene, not a pending or failed one.")
|
| 16 |
}
|
| 17 |
|
| 18 |
+
//note: this hashing function ignores the commands associated to cache and stuff
|
| 19 |
+
const hash = hashRequest(request)
|
| 20 |
const id = scene.renderId
|
| 21 |
|
| 22 |
const cacheFileName = `hash_${hash}_id_${id}.json`
|
|
|
|
| 24 |
|
| 25 |
const renderedSceneJson = JSON.stringify(scene)
|
| 26 |
|
| 27 |
+
/*
|
| 28 |
+
console.log({
|
| 29 |
+
request,
|
| 30 |
+
hash,
|
| 31 |
+
id,
|
| 32 |
+
cacheFileName,
|
| 33 |
+
cacheFilePath,
|
| 34 |
+
scene
|
| 35 |
+
})
|
| 36 |
+
*/
|
| 37 |
+
|
| 38 |
await fs.writeFile(cacheFilePath, renderedSceneJson, "utf8")
|
| 39 |
+
console.log(`saved result to cache`)
|
| 40 |
|
| 41 |
return scene
|
| 42 |
}
|