Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -10,7 +10,7 @@ subprocess.run('pip install mistral_inference mamba-ssm --no-build-isolation', e
|
|
10 |
subprocess.run('pip install causal-conv1d --no-build-isolation', env={'CAUSAL_CONV1D_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
|
11 |
|
12 |
# Import after installation
|
13 |
-
from mistral_inference.
|
14 |
from mistral_inference.generate import generate
|
15 |
from mistral_common.tokens.tokenizers.mistral import MistralTokenizer
|
16 |
from mistral_common.protocol.instruct.messages import UserMessage, AssistantMessage
|
@@ -28,7 +28,7 @@ MODEL_PATH = str(mistral_models_path)
|
|
28 |
|
29 |
# Load model and tokenizer
|
30 |
tokenizer = MistralTokenizer.from_file(os.path.join(MODEL_PATH, "tokenizer.model.v3"))
|
31 |
-
model =
|
32 |
|
33 |
|
34 |
@spaces.GPU()
|
|
|
10 |
subprocess.run('pip install causal-conv1d --no-build-isolation', env={'CAUSAL_CONV1D_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
|
11 |
|
12 |
# Import after installation
|
13 |
+
from mistral_inference.mamba import Mamba
|
14 |
from mistral_inference.generate import generate
|
15 |
from mistral_common.tokens.tokenizers.mistral import MistralTokenizer
|
16 |
from mistral_common.protocol.instruct.messages import UserMessage, AssistantMessage
|
|
|
28 |
|
29 |
# Load model and tokenizer
|
30 |
tokenizer = MistralTokenizer.from_file(os.path.join(MODEL_PATH, "tokenizer.model.v3"))
|
31 |
+
model = Mamba.from_folder(MODEL_PATH)
|
32 |
|
33 |
|
34 |
@spaces.GPU()
|