Spaces:
				
			
			
	
			
			
		Runtime error
		
	
	
	
			
			
	
	
	
	
		
		
		Runtime error
		
	Update app.py
Browse files
    	
        app.py
    CHANGED
    
    | @@ -172,16 +172,16 @@ retriever = db.as_retriever( | |
| 172 |  | 
| 173 |  | 
| 174 | 
             
            quantization_config = BitsAndBytesConfig(
         | 
| 175 | 
            -
                      | 
| 176 | 
            -
                      | 
| 177 | 
            -
                      | 
| 178 | 
            -
                      | 
| 179 | 
             
            )
         | 
| 180 |  | 
| 181 |  | 
| 182 |  | 
| 183 |  | 
| 184 | 
            -
            model_id = " | 
| 185 | 
             
            tokenizer = AutoTokenizer.from_pretrained(model_id)
         | 
| 186 |  | 
| 187 | 
             
            model = AutoModelForCausalLM.from_pretrained(
         | 
| @@ -280,11 +280,11 @@ def clear_chat(): | |
| 280 | 
             
                return [], ""
         | 
| 281 |  | 
| 282 | 
             
            # Gradio Interface
         | 
| 283 | 
            -
            with gr.Blocks(theme= | 
| 284 | 
             
                gr.Image("Image.jpg" , width=1200 , height=300 ,show_label=False, show_download_button=False)
         | 
| 285 | 
            -
                gr.Markdown("# Mawared HR Assistant")
         | 
| 286 | 
             
                gr.Markdown('### Instructions')
         | 
| 287 | 
            -
                gr.Markdown(" | 
| 288 |  | 
| 289 |  | 
| 290 |  | 
|  | |
| 172 |  | 
| 173 |  | 
| 174 | 
             
            quantization_config = BitsAndBytesConfig(
         | 
| 175 | 
            +
                     load_in_4bit=True,
         | 
| 176 | 
            +
                     bnb_4bit_compute_dtype=torch.bfloat16,
         | 
| 177 | 
            +
                     bnb_4bit_quant_type="nf4",
         | 
| 178 | 
            +
                     bnb_4bit_use_double_quant=True
         | 
| 179 | 
             
            )
         | 
| 180 |  | 
| 181 |  | 
| 182 |  | 
| 183 |  | 
| 184 | 
            +
            model_id = "Daemontatox/MawaredT3" 
         | 
| 185 | 
             
            tokenizer = AutoTokenizer.from_pretrained(model_id)
         | 
| 186 |  | 
| 187 | 
             
            model = AutoModelForCausalLM.from_pretrained(
         | 
|  | |
| 280 | 
             
                return [], ""
         | 
| 281 |  | 
| 282 | 
             
            # Gradio Interface
         | 
| 283 | 
            +
            with gr.Blocks(theme='lone17/kotaemon') as iface:
         | 
| 284 | 
             
                gr.Image("Image.jpg" , width=1200 , height=300 ,show_label=False, show_download_button=False)
         | 
| 285 | 
            +
                gr.Markdown("# Mawared HR Assistant 2.5.1")
         | 
| 286 | 
             
                gr.Markdown('### Instructions')
         | 
| 287 | 
            +
                gr.Markdown("Ask a question about MawaredHR and get a detailed answer , if you get an error try again with same prompt , its an Api issue and we are working on it π")
         | 
| 288 |  | 
| 289 |  | 
| 290 |  | 
