try qwen3
Browse files
src/App.svelte
CHANGED
@@ -18,6 +18,7 @@
|
|
18 |
let fluxClient: GradioClient | null = $state(null);
|
19 |
let joyCaptionClient: GradioClient | null = $state(null);
|
20 |
let zephyrClient: GradioClient | null = $state(null);
|
|
|
21 |
|
22 |
// Navigation state
|
23 |
let activeTab: TabId = $state('scanner');
|
@@ -111,6 +112,11 @@
|
|
111 |
opts
|
112 |
);
|
113 |
|
|
|
|
|
|
|
|
|
|
|
114 |
authStore.setBannerMessage("");
|
115 |
} catch (err) {
|
116 |
console.error(err);
|
@@ -135,6 +141,7 @@
|
|
135 |
{fluxClient}
|
136 |
{joyCaptionClient}
|
137 |
{zephyrClient}
|
|
|
138 |
/>
|
139 |
{:else if activeTab === 'encounters'}
|
140 |
<Encounters />
|
|
|
18 |
let fluxClient: GradioClient | null = $state(null);
|
19 |
let joyCaptionClient: GradioClient | null = $state(null);
|
20 |
let zephyrClient: GradioClient | null = $state(null);
|
21 |
+
let qwenClient: GradioClient | null = $state(null);
|
22 |
|
23 |
// Navigation state
|
24 |
let activeTab: TabId = $state('scanner');
|
|
|
112 |
opts
|
113 |
);
|
114 |
|
115 |
+
qwenClient = await gradioClient.Client.connect(
|
116 |
+
"Qwen/Qwen3-Demo",
|
117 |
+
opts
|
118 |
+
);
|
119 |
+
|
120 |
authStore.setBannerMessage("");
|
121 |
} catch (err) {
|
122 |
console.error(err);
|
|
|
141 |
{fluxClient}
|
142 |
{joyCaptionClient}
|
143 |
{zephyrClient}
|
144 |
+
{qwenClient}
|
145 |
/>
|
146 |
{:else if activeTab === 'encounters'}
|
147 |
<Encounters />
|
src/lib/components/Pages/Scanner.svelte
CHANGED
@@ -6,17 +6,19 @@
|
|
6 |
fluxClient: GradioClient | null;
|
7 |
joyCaptionClient: GradioClient | null;
|
8 |
zephyrClient: GradioClient | null;
|
|
|
9 |
}
|
10 |
|
11 |
-
let { fluxClient, joyCaptionClient, zephyrClient }: Props = $props();
|
12 |
</script>
|
13 |
|
14 |
<div class="scanner-page">
|
15 |
-
{#if fluxClient && joyCaptionClient && zephyrClient}
|
16 |
<PicletGenerator
|
17 |
{fluxClient}
|
18 |
{joyCaptionClient}
|
19 |
{zephyrClient}
|
|
|
20 |
/>
|
21 |
{:else}
|
22 |
<div class="loading-state">
|
|
|
6 |
fluxClient: GradioClient | null;
|
7 |
joyCaptionClient: GradioClient | null;
|
8 |
zephyrClient: GradioClient | null;
|
9 |
+
qwenClient: GradioClient | null;
|
10 |
}
|
11 |
|
12 |
+
let { fluxClient, joyCaptionClient, zephyrClient, qwenClient }: Props = $props();
|
13 |
</script>
|
14 |
|
15 |
<div class="scanner-page">
|
16 |
+
{#if fluxClient && joyCaptionClient && zephyrClient && qwenClient}
|
17 |
<PicletGenerator
|
18 |
{fluxClient}
|
19 |
{joyCaptionClient}
|
20 |
{zephyrClient}
|
21 |
+
{qwenClient}
|
22 |
/>
|
23 |
{:else}
|
24 |
<div class="loading-state">
|
src/lib/components/PicletGenerator/PicletGenerator.svelte
CHANGED
@@ -11,7 +11,7 @@
|
|
11 |
|
12 |
interface Props extends PicletGeneratorProps {}
|
13 |
|
14 |
-
let { joyCaptionClient, zephyrClient, fluxClient }: Props = $props();
|
15 |
|
16 |
let state: PicletWorkflowState = $state({
|
17 |
currentStep: 'upload',
|
@@ -25,31 +25,6 @@
|
|
25 |
isProcessing: false
|
26 |
});
|
27 |
|
28 |
-
// Custom prompt for joy-caption-alpha-two to generate everything in one step
|
29 |
-
const MONSTER_GENERATION_PROMPT = `Based on this image create a Pokémon-style monster that transforms the object into an imaginative creature. The monster should clearly be inspired by the object's appearance but reimagined as a living monster.
|
30 |
-
|
31 |
-
Guidelines:
|
32 |
-
- Take the object's key visual elements (colors, shapes, materials) incorporating all of them into a single creature design
|
33 |
-
- Add eyes (can be glowing, mechanical, multiple, etc.) positioned where they make sense
|
34 |
-
- Include limbs (legs, arms, wings, tentacles) that grow from or replace parts of the object
|
35 |
-
- Add a mouth, beak, or feeding apparatus if appropriate
|
36 |
-
- Add creature elements like tail, fins, claws, horns, etc where fitting
|
37 |
-
|
38 |
-
Include:
|
39 |
-
- A creative name that hints at the original object
|
40 |
-
- Physical description showing how the object becomes a creature
|
41 |
-
- Personality traits based on the object's purpose
|
42 |
-
|
43 |
-
Format your response as:
|
44 |
-
\`\`\`
|
45 |
-
# Object Caption
|
46 |
-
{object description, also assess how rare the object is, the rarer the object the stronger the monster}
|
47 |
-
# Monster Name
|
48 |
-
{monster name}
|
49 |
-
## Monster Visual Description
|
50 |
-
{ensure the creature uses all the unique attributes of the object}
|
51 |
-
\`\`\``;
|
52 |
-
|
53 |
const IMAGE_GENERATION_PROMPT = (concept: string) => `Extract ONLY the visual appearance from this monster concept and describe it in one concise sentence:
|
54 |
"${concept}"
|
55 |
|
@@ -108,18 +83,26 @@ Focus on: colors, body shape, eyes, limbs, mouth, and key visual features. Omit
|
|
108 |
state.isProcessing = true;
|
109 |
|
110 |
try {
|
111 |
-
// Step 1: Generate
|
112 |
await captionImage();
|
113 |
await new Promise(resolve => setTimeout(resolve, 100)); // Small delay for state update
|
114 |
|
115 |
-
// Step 2: Generate monster
|
|
|
|
|
|
|
|
|
116 |
await generateStats();
|
117 |
await new Promise(resolve => setTimeout(resolve, 100)); // Small delay for state update
|
118 |
|
119 |
-
// Step
|
|
|
|
|
|
|
|
|
120 |
await generateMonsterImage();
|
121 |
|
122 |
-
// Step
|
123 |
await autoSavePiclet();
|
124 |
|
125 |
state.currentStep = 'complete';
|
@@ -179,68 +162,227 @@ Focus on: colors, body shape, eyes, limbs, mouth, and key visual features. Omit
|
|
179 |
const output = await joyCaptionClient.predict("/stream_chat", [
|
180 |
state.userImage, // input_image
|
181 |
"Descriptive", // caption_type
|
182 |
-
"
|
183 |
[], // extra_options
|
184 |
"", // name_input
|
185 |
-
|
186 |
]);
|
187 |
|
188 |
-
const [
|
189 |
-
//
|
190 |
state.imageCaption = caption;
|
191 |
-
|
192 |
-
console.log('Monster concept generated:', caption);
|
193 |
} catch (error) {
|
194 |
handleAPIError(error);
|
195 |
}
|
196 |
}
|
197 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
198 |
|
199 |
-
|
200 |
-
|
201 |
-
state.currentStep = 'generating';
|
202 |
|
203 |
-
if (!
|
204 |
-
throw new Error('
|
205 |
}
|
206 |
|
207 |
-
// Extract the
|
208 |
const visualDescMatch = state.picletConcept.match(/## Monster Visual Description\s*\n([\s\S]*?)(?=##|$)/);
|
209 |
|
210 |
if (visualDescMatch && visualDescMatch[1]) {
|
211 |
state.imagePrompt = visualDescMatch[1].trim();
|
212 |
console.log('Extracted visual description for image generation:', state.imagePrompt);
|
213 |
-
|
214 |
-
|
215 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
216 |
|
217 |
-
|
218 |
-
const
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
219 |
|
220 |
-
console.log('
|
221 |
|
222 |
-
|
223 |
-
|
224 |
-
|
225 |
-
|
226 |
-
|
227 |
-
|
228 |
-
|
229 |
-
|
230 |
-
|
231 |
-
|
232 |
-
|
|
|
|
|
|
|
|
|
233 |
|
234 |
-
|
235 |
-
|
236 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
237 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
238 |
}
|
|
|
|
|
|
|
|
|
239 |
|
240 |
-
if (!state.imagePrompt || state.
|
241 |
-
throw new Error('
|
242 |
}
|
243 |
|
|
|
|
|
244 |
// Get tier for image quality enhancement
|
245 |
const tier = state.picletStats.tier || 'medium';
|
246 |
const tierDescriptions = {
|
@@ -299,36 +441,32 @@ Focus on: colors, body shape, eyes, limbs, mouth, and key visual features. Omit
|
|
299 |
async function generateStats() {
|
300 |
state.currentStep = 'statsGenerating';
|
301 |
|
302 |
-
if (!state.picletConcept) {
|
303 |
-
throw new Error('
|
304 |
}
|
305 |
|
306 |
// Default tier (will be set from the generated stats)
|
307 |
let tier: 'low' | 'medium' | 'high' | 'legendary' = 'medium';
|
308 |
|
309 |
-
// Extract
|
310 |
-
const
|
311 |
-
const objectDescription = objectMatch ? objectMatch[1].trim() : '';
|
312 |
-
|
313 |
-
// Extract monster name from the Monster Name section - similar to object extraction
|
314 |
-
const monsterNameMatch = state.picletConcept.match(/# Monster Name\s*\n([\s\S]*?)(?=^## |$)/m);
|
315 |
const monsterName = monsterNameMatch ? monsterNameMatch[1].trim() : 'Unknown Monster';
|
316 |
|
317 |
-
|
318 |
-
|
319 |
-
console.log('Extracted monster name:', monsterName);
|
320 |
|
321 |
-
// Create stats prompt
|
322 |
-
const statsPrompt = `Based on this monster concept, generate a JSON object with battle stats and abilities:
|
|
|
|
|
|
|
|
|
|
|
323 |
"${state.picletConcept}"
|
324 |
|
325 |
-
The
|
326 |
|
327 |
-
|
328 |
-
• COMMON: Everyday items everyone has (stationery, grass, rocks, basic furniture, common tools)
|
329 |
-
• UNCOMMON: Items that cost money but are widely available (electronics, appliances, vehicles, branded items)
|
330 |
-
• RARE: Expensive or specialized items (luxury goods, professional equipment, gold jewelry, antiques)
|
331 |
-
• LEGENDARY: Priceless or one-of-a-kind items (crown jewels, world wonders, famous artifacts, masterpiece art)
|
332 |
|
333 |
Next, determine the monster's type based on its concept and appearance. Choose the most appropriate type from these options:
|
334 |
• BEAST: Vertebrate wildlife — mammals, birds, reptiles. Raw physicality, instincts, region-based variants.
|
@@ -347,6 +485,7 @@ The output should be formatted as a JSON instance that conforms to the JSON sche
|
|
347 |
\`\`\`json
|
348 |
{
|
349 |
"properties": {
|
|
|
350 |
"rarity": {"type": "string", "enum": ["common", "uncommon", "rare", "legendary"], "description": "Rarity of the original object based on real-world availability and value"},
|
351 |
"picletType": {"type": "string", "enum": ["beast", "bug", "aquatic", "flora", "mineral", "space", "machina", "structure", "culture", "cuisine"], "description": "The type that best matches this monster's concept, appearance, and nature"},
|
352 |
"height": {"type": "number", "minimum": 0.1, "maximum": 50.0, "description": "Height of the piclet in meters (e.g., 1.2, 0.5, 10.0)"},
|
@@ -366,7 +505,7 @@ The output should be formatted as a JSON instance that conforms to the JSON sche
|
|
366 |
"specialActionName": {"type": "string", "description": "Name of the monster's ultimate move (one use per battle)"},
|
367 |
"specialActionDescription": {"type": "string", "description": "Describe this powerful finishing move and its dramatic effects in battle"}
|
368 |
},
|
369 |
-
"required": ["rarity", "picletType", "height", "weight", "HP", "defence", "attack", "speed", "monsterLore", "specialPassiveTraitDescription", "attackActionName", "attackActionDescription", "buffActionName", "buffActionDescription", "debuffActionName", "debuffActionDescription", "specialActionName", "specialActionDescription"]
|
370 |
}
|
371 |
\`\`\`
|
372 |
|
@@ -378,24 +517,72 @@ Base the HP, defence, attack, and speed stats on the rarity level:
|
|
378 |
|
379 |
Write your response within \`\`\`json\`\`\``;
|
380 |
|
381 |
-
|
382 |
-
|
383 |
-
console.log('Generating monster stats from concept');
|
384 |
|
385 |
try {
|
386 |
-
|
387 |
-
|
388 |
-
|
389 |
-
|
390 |
-
|
391 |
-
|
392 |
-
|
393 |
-
|
394 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
395 |
]);
|
396 |
|
397 |
-
console.log('
|
398 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
399 |
|
400 |
// Extract JSON from the response (remove markdown if present)
|
401 |
let cleanJson = jsonString;
|
@@ -419,7 +606,7 @@ Write your response within \`\`\`json\`\`\``;
|
|
419 |
const parsedStats = JSON.parse(cleanJson.trim());
|
420 |
|
421 |
// Remove any extra fields not in our schema
|
422 |
-
const allowedFields = ['rarity', 'picletType', 'height', 'weight', 'HP', 'defence', 'attack', 'speed',
|
423 |
'monsterLore', 'specialPassiveTraitDescription', 'attackActionName', 'attackActionDescription',
|
424 |
'buffActionName', 'buffActionDescription', 'debuffActionName', 'debuffActionDescription',
|
425 |
'specialActionName', 'specialActionDescription', 'boostActionName', 'boostActionDescription',
|
@@ -442,8 +629,11 @@ Write your response within \`\`\`json\`\`\``;
|
|
442 |
tier = tierMap[parsedStats.rarity.toLowerCase()] || 'medium';
|
443 |
}
|
444 |
|
445 |
-
// Add the
|
446 |
-
|
|
|
|
|
|
|
447 |
parsedStats.description = parsedStats.monsterLore || 'A mysterious creature with unknown origins.';
|
448 |
parsedStats.tier = tier;
|
449 |
|
@@ -577,9 +767,13 @@ Write your response within \`\`\`json\`\`\``;
|
|
577 |
<div class="spinner"></div>
|
578 |
<p class="processing-text">
|
579 |
{#if state.currentStep === 'captioning'}
|
580 |
-
|
|
|
|
|
581 |
{:else if state.currentStep === 'statsGenerating'}
|
582 |
Generating battle stats...
|
|
|
|
|
583 |
{:else if state.currentStep === 'generating'}
|
584 |
Generating your Piclet...
|
585 |
{/if}
|
|
|
11 |
|
12 |
interface Props extends PicletGeneratorProps {}
|
13 |
|
14 |
+
let { joyCaptionClient, zephyrClient, fluxClient, qwenClient }: Props = $props();
|
15 |
|
16 |
let state: PicletWorkflowState = $state({
|
17 |
currentStep: 'upload',
|
|
|
25 |
isProcessing: false
|
26 |
});
|
27 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
const IMAGE_GENERATION_PROMPT = (concept: string) => `Extract ONLY the visual appearance from this monster concept and describe it in one concise sentence:
|
29 |
"${concept}"
|
30 |
|
|
|
83 |
state.isProcessing = true;
|
84 |
|
85 |
try {
|
86 |
+
// Step 1: Generate detailed object description with joy-caption
|
87 |
await captionImage();
|
88 |
await new Promise(resolve => setTimeout(resolve, 100)); // Small delay for state update
|
89 |
|
90 |
+
// Step 2: Generate free-form monster concept with qwen3
|
91 |
+
await generateConcept();
|
92 |
+
await new Promise(resolve => setTimeout(resolve, 100)); // Small delay for state update
|
93 |
+
|
94 |
+
// Step 3: Generate structured monster stats based on both caption and concept
|
95 |
await generateStats();
|
96 |
await new Promise(resolve => setTimeout(resolve, 100)); // Small delay for state update
|
97 |
|
98 |
+
// Step 4: Generate image prompt with qwen3
|
99 |
+
await generateImagePrompt();
|
100 |
+
await new Promise(resolve => setTimeout(resolve, 100)); // Small delay for state update
|
101 |
+
|
102 |
+
// Step 5: Generate monster image
|
103 |
await generateMonsterImage();
|
104 |
|
105 |
+
// Step 6: Auto-save the piclet
|
106 |
await autoSavePiclet();
|
107 |
|
108 |
state.currentStep = 'complete';
|
|
|
162 |
const output = await joyCaptionClient.predict("/stream_chat", [
|
163 |
state.userImage, // input_image
|
164 |
"Descriptive", // caption_type
|
165 |
+
"long", // caption_length
|
166 |
[], // extra_options
|
167 |
"", // name_input
|
168 |
+
"" // custom_prompt (empty for default descriptive captioning)
|
169 |
]);
|
170 |
|
171 |
+
const [, caption] = output.data;
|
172 |
+
// Store the detailed object description
|
173 |
state.imageCaption = caption;
|
174 |
+
console.log('Detailed object description generated:', caption);
|
|
|
175 |
} catch (error) {
|
176 |
handleAPIError(error);
|
177 |
}
|
178 |
}
|
179 |
|
180 |
+
async function generateConcept() {
|
181 |
+
state.currentStep = 'conceptualizing';
|
182 |
+
|
183 |
+
if (!qwenClient || !state.imageCaption) {
|
184 |
+
throw new Error('Qwen service not available or no image caption provided');
|
185 |
+
}
|
186 |
+
|
187 |
+
const conceptPrompt = `Based on this detailed object description, create a Pokémon-style monster that transforms the object into an imaginative creature. The monster should clearly be inspired by the object's appearance but reimagined as a living monster.
|
188 |
+
|
189 |
+
Object description: "${state.imageCaption}"
|
190 |
+
|
191 |
+
Guidelines:
|
192 |
+
- Take the object's key visual elements (colors, shapes, materials) incorporating all of them into a single creature design
|
193 |
+
- Add eyes (can be glowing, mechanical, multiple, etc.) positioned where they make sense
|
194 |
+
- Include limbs (legs, arms, wings, tentacles) that grow from or replace parts of the object
|
195 |
+
- Add a mouth, beak, or feeding apparatus if appropriate
|
196 |
+
- Add creature elements like tail, fins, claws, horns, etc where fitting
|
197 |
+
|
198 |
+
Format your response exactly as follows:
|
199 |
+
|
200 |
+
# Object Rarity
|
201 |
+
{Assess how rare the object is based on real-world availability and value. Rare objects give strong monsters while common objects give weak ones. Use: common, uncommon, rare, or legendary}
|
202 |
+
|
203 |
+
# Monster Name
|
204 |
+
{Creative name that hints at the original object}
|
205 |
+
|
206 |
+
## Monster Visual Description
|
207 |
+
{Detailed physical description showing how the object becomes a creature. Ensure the creature uses all the unique attributes of the object. Include colors, shapes, materials, eyes, limbs, mouth, and distinctive features. This section should be comprehensive as it will be used for both stats generation and image creation.}`;
|
208 |
+
|
209 |
+
try {
|
210 |
+
// Create the required state structure based on qwen.html
|
211 |
+
const defaultState = {
|
212 |
+
"conversation_contexts": {},
|
213 |
+
"conversations": [],
|
214 |
+
"conversation_id": "",
|
215 |
+
};
|
216 |
+
|
217 |
+
// Create default settings based on qwen.html with max thinking tokens
|
218 |
+
const defaultSettings = {
|
219 |
+
"model": "qwen3-235b-a22b",
|
220 |
+
"sys_prompt": "You are a creative monster designer specializing in transforming everyday objects into imaginative Pokémon-style creatures. Follow the exact format provided and create detailed, engaging descriptions that bring these monsters to life.",
|
221 |
+
"thinking_budget": 38
|
222 |
+
};
|
223 |
+
|
224 |
+
// Create thinking button state
|
225 |
+
const thinkingBtnState = {
|
226 |
+
"enable_thinking": true
|
227 |
+
};
|
228 |
+
|
229 |
+
console.log('Generating monster concept with qwen3...');
|
230 |
+
|
231 |
+
// Call the add_message function (fn_index 13)
|
232 |
+
const output = await qwenClient.predict(13, [
|
233 |
+
conceptPrompt, // input_value
|
234 |
+
defaultSettings, // settings_form_value
|
235 |
+
thinkingBtnState, // thinking_btn_state_value
|
236 |
+
defaultState // state_value
|
237 |
+
]);
|
238 |
+
|
239 |
+
console.log('Qwen3 concept response:', output);
|
240 |
+
|
241 |
+
// Extract the response text from the output
|
242 |
+
let responseText = "";
|
243 |
+
if (output && output.data && Array.isArray(output.data)) {
|
244 |
+
// The chatbot response is at index 5 in the outputs array
|
245 |
+
const chatbotUpdate = output.data[5];
|
246 |
+
|
247 |
+
if (chatbotUpdate && chatbotUpdate.value && Array.isArray(chatbotUpdate.value)) {
|
248 |
+
const chatHistory = chatbotUpdate.value;
|
249 |
+
|
250 |
+
if (chatHistory.length > 0) {
|
251 |
+
// Get the last message (assistant's response)
|
252 |
+
const lastMessage = chatHistory[chatHistory.length - 1];
|
253 |
+
|
254 |
+
if (lastMessage && lastMessage.content && Array.isArray(lastMessage.content)) {
|
255 |
+
// Extract text content from the message
|
256 |
+
const textContents = lastMessage.content
|
257 |
+
.filter((item: any) => item.type === "text")
|
258 |
+
.map((item: any) => item.content)
|
259 |
+
.join("\n");
|
260 |
+
responseText = textContents || "Response received but no text content found";
|
261 |
+
} else if (lastMessage && lastMessage.role === "assistant") {
|
262 |
+
// Fallback - if content structure is different
|
263 |
+
responseText = JSON.stringify(lastMessage, null, 2);
|
264 |
+
}
|
265 |
+
}
|
266 |
+
}
|
267 |
+
}
|
268 |
+
|
269 |
+
if (!responseText || responseText.trim() === '') {
|
270 |
+
throw new Error('Failed to generate monster concept');
|
271 |
+
}
|
272 |
+
|
273 |
+
state.picletConcept = responseText;
|
274 |
+
console.log('Monster concept generated:', responseText);
|
275 |
+
} catch (error) {
|
276 |
+
handleAPIError(error);
|
277 |
+
}
|
278 |
+
}
|
279 |
|
280 |
+
async function generateImagePrompt() {
|
281 |
+
state.currentStep = 'promptCrafting';
|
|
|
282 |
|
283 |
+
if (!qwenClient || !state.picletConcept || !state.imageCaption) {
|
284 |
+
throw new Error('Qwen service not available or no concept/caption available for prompt generation');
|
285 |
}
|
286 |
|
287 |
+
// Extract the Monster Visual Description from the structured concept
|
288 |
const visualDescMatch = state.picletConcept.match(/## Monster Visual Description\s*\n([\s\S]*?)(?=##|$)/);
|
289 |
|
290 |
if (visualDescMatch && visualDescMatch[1]) {
|
291 |
state.imagePrompt = visualDescMatch[1].trim();
|
292 |
console.log('Extracted visual description for image generation:', state.imagePrompt);
|
293 |
+
return; // Skip qwen3 call since we have the description
|
294 |
+
}
|
295 |
+
|
296 |
+
// Fallback: if format parsing fails, use qwen3 to extract visual description
|
297 |
+
const imagePromptPrompt = `Based on this monster concept, extract ONLY the visual description for image generation:
|
298 |
+
|
299 |
+
MONSTER CONCEPT:
|
300 |
+
"""
|
301 |
+
${state.picletConcept}
|
302 |
+
"""
|
303 |
+
|
304 |
+
Create a concise visual description (1-3 sentences, max 100 words). Focus only on colors, shapes, materials, eyes, limbs, mouth, and distinctive features. Omit all non-visual information like abilities and backstory.`;
|
305 |
+
|
306 |
+
try {
|
307 |
+
// Create the required state structure based on qwen.html
|
308 |
+
const defaultState = {
|
309 |
+
"conversation_contexts": {},
|
310 |
+
"conversations": [],
|
311 |
+
"conversation_id": "",
|
312 |
+
};
|
313 |
|
314 |
+
// Create default settings based on qwen.html
|
315 |
+
const defaultSettings = {
|
316 |
+
"model": "qwen3-235b-a22b",
|
317 |
+
"sys_prompt": "You are an expert at creating concise visual descriptions for image generation. Extract ONLY visual appearance details and describe them in 1-2 sentences (max 50 words). Focus on colors, shape, eyes, limbs, and distinctive features. Omit all non-visual information like abilities, personality, or backstory.",
|
318 |
+
"thinking_budget": 38
|
319 |
+
};
|
320 |
+
|
321 |
+
// Create thinking button state
|
322 |
+
const thinkingBtnState = {
|
323 |
+
"enable_thinking": true
|
324 |
+
};
|
325 |
|
326 |
+
console.log('Generating image prompt with qwen3...');
|
327 |
|
328 |
+
// Call the add_message function (fn_index 13)
|
329 |
+
const output = await qwenClient.predict(13, [
|
330 |
+
imagePromptPrompt, // input_value
|
331 |
+
defaultSettings, // settings_form_value
|
332 |
+
thinkingBtnState, // thinking_btn_state_value
|
333 |
+
defaultState // state_value
|
334 |
+
]);
|
335 |
+
|
336 |
+
console.log('Qwen3 image prompt response:', output);
|
337 |
+
|
338 |
+
// Extract the response text from the output using the same pattern as generateConcept
|
339 |
+
let responseText = "";
|
340 |
+
if (output && output.data && Array.isArray(output.data)) {
|
341 |
+
// The chatbot response is at index 5 in the outputs array
|
342 |
+
const chatbotUpdate = output.data[5];
|
343 |
|
344 |
+
if (chatbotUpdate && chatbotUpdate.value && Array.isArray(chatbotUpdate.value)) {
|
345 |
+
const chatHistory = chatbotUpdate.value;
|
346 |
+
|
347 |
+
if (chatHistory.length > 0) {
|
348 |
+
// Get the last message (assistant's response)
|
349 |
+
const lastMessage = chatHistory[chatHistory.length - 1];
|
350 |
+
|
351 |
+
if (lastMessage && lastMessage.content && Array.isArray(lastMessage.content)) {
|
352 |
+
// Extract text content from the message
|
353 |
+
const textContents = lastMessage.content
|
354 |
+
.filter((item: any) => item.type === "text")
|
355 |
+
.map((item: any) => item.content)
|
356 |
+
.join("\n");
|
357 |
+
responseText = textContents || "Response received but no text content found";
|
358 |
+
} else if (lastMessage && lastMessage.role === "assistant") {
|
359 |
+
// Fallback - if content structure is different
|
360 |
+
responseText = JSON.stringify(lastMessage, null, 2);
|
361 |
+
}
|
362 |
+
}
|
363 |
+
}
|
364 |
}
|
365 |
+
|
366 |
+
if (!responseText || responseText.trim() === '') {
|
367 |
+
throw new Error('Failed to generate image prompt');
|
368 |
+
}
|
369 |
+
|
370 |
+
state.imagePrompt = responseText.trim();
|
371 |
+
console.log('Image prompt generated:', state.imagePrompt);
|
372 |
+
} catch (error) {
|
373 |
+
handleAPIError(error);
|
374 |
}
|
375 |
+
}
|
376 |
+
|
377 |
+
async function generateMonsterImage() {
|
378 |
+
state.currentStep = 'generating';
|
379 |
|
380 |
+
if (!fluxClient || !state.imagePrompt || !state.picletStats) {
|
381 |
+
throw new Error('Image generation service not available or no prompt/stats');
|
382 |
}
|
383 |
|
384 |
+
// The image prompt should already be generated by generateImagePrompt() in the workflow
|
385 |
+
|
386 |
// Get tier for image quality enhancement
|
387 |
const tier = state.picletStats.tier || 'medium';
|
388 |
const tierDescriptions = {
|
|
|
441 |
async function generateStats() {
|
442 |
state.currentStep = 'statsGenerating';
|
443 |
|
444 |
+
if (!qwenClient || !state.picletConcept || !state.imageCaption) {
|
445 |
+
throw new Error('Qwen service not available or no concept/caption available for stats generation');
|
446 |
}
|
447 |
|
448 |
// Default tier (will be set from the generated stats)
|
449 |
let tier: 'low' | 'medium' | 'high' | 'legendary' = 'medium';
|
450 |
|
451 |
+
// Extract monster name and rarity from the structured concept
|
452 |
+
const monsterNameMatch = state.picletConcept.match(/# Monster Name\s*\n([\s\S]*?)(?=^##|$)/m);
|
|
|
|
|
|
|
|
|
453 |
const monsterName = monsterNameMatch ? monsterNameMatch[1].trim() : 'Unknown Monster';
|
454 |
|
455 |
+
const rarityMatch = state.picletConcept.match(/# Object Rarity\s*\n([\s\S]*?)(?=^#)/m);
|
456 |
+
const objectRarity = rarityMatch ? rarityMatch[1].trim().toLowerCase() : 'common';
|
|
|
457 |
|
458 |
+
// Create stats prompt using both the detailed object description and monster concept
|
459 |
+
const statsPrompt = `Based on this detailed object description and monster concept, generate a JSON object with battle stats and abilities:
|
460 |
+
|
461 |
+
ORIGINAL OBJECT DESCRIPTION:
|
462 |
+
"${state.imageCaption}"
|
463 |
+
|
464 |
+
MONSTER CONCEPT:
|
465 |
"${state.picletConcept}"
|
466 |
|
467 |
+
The object rarity has been assessed as: ${objectRarity}
|
468 |
|
469 |
+
Use this rarity level to determine appropriate stats:
|
|
|
|
|
|
|
|
|
470 |
|
471 |
Next, determine the monster's type based on its concept and appearance. Choose the most appropriate type from these options:
|
472 |
• BEAST: Vertebrate wildlife — mammals, birds, reptiles. Raw physicality, instincts, region-based variants.
|
|
|
485 |
\`\`\`json
|
486 |
{
|
487 |
"properties": {
|
488 |
+
"name": {"type": "string", "description": "Creative name for the monster that hints at the original object"},
|
489 |
"rarity": {"type": "string", "enum": ["common", "uncommon", "rare", "legendary"], "description": "Rarity of the original object based on real-world availability and value"},
|
490 |
"picletType": {"type": "string", "enum": ["beast", "bug", "aquatic", "flora", "mineral", "space", "machina", "structure", "culture", "cuisine"], "description": "The type that best matches this monster's concept, appearance, and nature"},
|
491 |
"height": {"type": "number", "minimum": 0.1, "maximum": 50.0, "description": "Height of the piclet in meters (e.g., 1.2, 0.5, 10.0)"},
|
|
|
505 |
"specialActionName": {"type": "string", "description": "Name of the monster's ultimate move (one use per battle)"},
|
506 |
"specialActionDescription": {"type": "string", "description": "Describe this powerful finishing move and its dramatic effects in battle"}
|
507 |
},
|
508 |
+
"required": ["name", "rarity", "picletType", "height", "weight", "HP", "defence", "attack", "speed", "monsterLore", "specialPassiveTraitDescription", "attackActionName", "attackActionDescription", "buffActionName", "buffActionDescription", "debuffActionName", "debuffActionDescription", "specialActionName", "specialActionDescription"]
|
509 |
}
|
510 |
\`\`\`
|
511 |
|
|
|
517 |
|
518 |
Write your response within \`\`\`json\`\`\``;
|
519 |
|
520 |
+
console.log('Generating monster stats with qwen3');
|
|
|
|
|
521 |
|
522 |
try {
|
523 |
+
// Create the required state structure based on qwen.html
|
524 |
+
const defaultState = {
|
525 |
+
"conversation_contexts": {},
|
526 |
+
"conversations": [],
|
527 |
+
"conversation_id": "",
|
528 |
+
};
|
529 |
+
|
530 |
+
// Create default settings based on qwen.html
|
531 |
+
const defaultSettings = {
|
532 |
+
"model": "qwen3-235b-a22b",
|
533 |
+
"sys_prompt": "You are a game designer specializing in monster stats and abilities. You must ONLY output valid JSON that matches the provided schema exactly. Do not include any text before or after the JSON. Do not include null values in your JSON response. Your entire response should be wrapped in a ```json``` code block.",
|
534 |
+
"thinking_budget": 38
|
535 |
+
};
|
536 |
+
|
537 |
+
// Create thinking button state
|
538 |
+
const thinkingBtnState = {
|
539 |
+
"enable_thinking": true
|
540 |
+
};
|
541 |
+
|
542 |
+
// Call the add_message function (fn_index 13)
|
543 |
+
const output = await qwenClient.predict(13, [
|
544 |
+
statsPrompt, // input_value
|
545 |
+
defaultSettings, // settings_form_value
|
546 |
+
thinkingBtnState, // thinking_btn_state_value
|
547 |
+
defaultState // state_value
|
548 |
]);
|
549 |
|
550 |
+
console.log('Qwen3 stats response:', output);
|
551 |
+
|
552 |
+
// Extract the response text from the output using the same pattern as generateConcept
|
553 |
+
let responseText = "";
|
554 |
+
if (output && output.data && Array.isArray(output.data)) {
|
555 |
+
// The chatbot response is at index 5 in the outputs array
|
556 |
+
const chatbotUpdate = output.data[5];
|
557 |
+
|
558 |
+
if (chatbotUpdate && chatbotUpdate.value && Array.isArray(chatbotUpdate.value)) {
|
559 |
+
const chatHistory = chatbotUpdate.value;
|
560 |
+
|
561 |
+
if (chatHistory.length > 0) {
|
562 |
+
// Get the last message (assistant's response)
|
563 |
+
const lastMessage = chatHistory[chatHistory.length - 1];
|
564 |
+
|
565 |
+
if (lastMessage && lastMessage.content && Array.isArray(lastMessage.content)) {
|
566 |
+
// Extract text content from the message
|
567 |
+
const textContents = lastMessage.content
|
568 |
+
.filter((item: any) => item.type === "text")
|
569 |
+
.map((item: any) => item.content)
|
570 |
+
.join("\n");
|
571 |
+
responseText = textContents || "Response received but no text content found";
|
572 |
+
} else if (lastMessage && lastMessage.role === "assistant") {
|
573 |
+
// Fallback - if content structure is different
|
574 |
+
responseText = JSON.stringify(lastMessage, null, 2);
|
575 |
+
}
|
576 |
+
}
|
577 |
+
}
|
578 |
+
}
|
579 |
+
|
580 |
+
if (!responseText || responseText.trim() === '') {
|
581 |
+
throw new Error('Failed to generate monster stats');
|
582 |
+
}
|
583 |
+
|
584 |
+
console.log('Stats output:', responseText);
|
585 |
+
let jsonString = responseText;
|
586 |
|
587 |
// Extract JSON from the response (remove markdown if present)
|
588 |
let cleanJson = jsonString;
|
|
|
606 |
const parsedStats = JSON.parse(cleanJson.trim());
|
607 |
|
608 |
// Remove any extra fields not in our schema
|
609 |
+
const allowedFields = ['name', 'rarity', 'picletType', 'height', 'weight', 'HP', 'defence', 'attack', 'speed',
|
610 |
'monsterLore', 'specialPassiveTraitDescription', 'attackActionName', 'attackActionDescription',
|
611 |
'buffActionName', 'buffActionDescription', 'debuffActionName', 'debuffActionDescription',
|
612 |
'specialActionName', 'specialActionDescription', 'boostActionName', 'boostActionDescription',
|
|
|
629 |
tier = tierMap[parsedStats.rarity.toLowerCase()] || 'medium';
|
630 |
}
|
631 |
|
632 |
+
// Add the description and tier that we extracted/mapped
|
633 |
+
// Use the name from the structured concept, or fall back to JSON response
|
634 |
+
if (!parsedStats.name) {
|
635 |
+
parsedStats.name = monsterName;
|
636 |
+
}
|
637 |
parsedStats.description = parsedStats.monsterLore || 'A mysterious creature with unknown origins.';
|
638 |
parsedStats.tier = tier;
|
639 |
|
|
|
767 |
<div class="spinner"></div>
|
768 |
<p class="processing-text">
|
769 |
{#if state.currentStep === 'captioning'}
|
770 |
+
Analyzing your image...
|
771 |
+
{:else if state.currentStep === 'conceptualizing'}
|
772 |
+
Creating monster concept...
|
773 |
{:else if state.currentStep === 'statsGenerating'}
|
774 |
Generating battle stats...
|
775 |
+
{:else if state.currentStep === 'promptCrafting'}
|
776 |
+
Creating image prompt...
|
777 |
{:else if state.currentStep === 'generating'}
|
778 |
Generating your Piclet...
|
779 |
{/if}
|
src/lib/types/index.ts
CHANGED
@@ -100,6 +100,7 @@ export interface PicletGeneratorProps {
|
|
100 |
joyCaptionClient: GradioClient | null;
|
101 |
zephyrClient: GradioClient | null;
|
102 |
fluxClient: GradioClient | null;
|
|
|
103 |
}
|
104 |
|
105 |
// Piclet Stats Types
|
|
|
100 |
joyCaptionClient: GradioClient | null;
|
101 |
zephyrClient: GradioClient | null;
|
102 |
fluxClient: GradioClient | null;
|
103 |
+
qwenClient: GradioClient | null;
|
104 |
}
|
105 |
|
106 |
// Piclet Stats Types
|