Spaces:
Sleeping
Sleeping
error
Browse files
app.py
CHANGED
@@ -52,17 +52,18 @@ class UnifiedAISystem:
|
|
52 |
|
53 |
# CPU-specific configuration
|
54 |
cpu_features = cpuinfo.get_cpu_info()['flags']
|
55 |
-
|
56 |
if 'avx512' in cpu_features:
|
57 |
-
|
58 |
elif 'avx2' in cpu_features:
|
59 |
-
|
60 |
|
61 |
-
# Initialize Mistral model
|
62 |
self.mistral_pipe = openvino_genai.LLMPipeline(
|
63 |
"mistral-ov",
|
64 |
device="CPU",
|
65 |
-
|
|
|
66 |
)
|
67 |
|
68 |
# Initialize Whisper for audio processing
|
@@ -229,8 +230,8 @@ class UnifiedAISystem:
|
|
229 |
output = self.internvl_pipe.generate(prompt, image=image_tensor, max_new_tokens=100)
|
230 |
self.internvl_pipe.finish_chat()
|
231 |
|
232 |
-
# output is
|
233 |
-
return output
|
234 |
|
235 |
except Exception as e:
|
236 |
return f"❌ Error: {str(e)}"
|
@@ -894,5 +895,4 @@ with gr.Blocks(css=css, title="Unified EDU Assistant") as demo:
|
|
894 |
)
|
895 |
|
896 |
if __name__ == "__main__":
|
897 |
-
demo.launch(share=True, debug=True, show_api=False)
|
898 |
-
|
|
|
52 |
|
53 |
# CPU-specific configuration
|
54 |
cpu_features = cpuinfo.get_cpu_info()['flags']
|
55 |
+
config_properties = {}
|
56 |
if 'avx512' in cpu_features:
|
57 |
+
config_properties["ENFORCE_BF16"] = "YES"
|
58 |
elif 'avx2' in cpu_features:
|
59 |
+
config_properties["INFERENCE_PRECISION_HINT"] = "f32"
|
60 |
|
61 |
+
# Initialize Mistral model with updated configuration
|
62 |
self.mistral_pipe = openvino_genai.LLMPipeline(
|
63 |
"mistral-ov",
|
64 |
device="CPU",
|
65 |
+
PERFORMANCE_HINT="THROUGHPUT",
|
66 |
+
**config_properties
|
67 |
)
|
68 |
|
69 |
# Initialize Whisper for audio processing
|
|
|
230 |
output = self.internvl_pipe.generate(prompt, image=image_tensor, max_new_tokens=100)
|
231 |
self.internvl_pipe.finish_chat()
|
232 |
|
233 |
+
# Ensure output is string
|
234 |
+
return str(output)
|
235 |
|
236 |
except Exception as e:
|
237 |
return f"❌ Error: {str(e)}"
|
|
|
895 |
)
|
896 |
|
897 |
if __name__ == "__main__":
|
898 |
+
demo.launch(share=True, debug=True, show_api=False)
|
|