Spaces:
				
			
			
	
			
			
		Runtime error
		
	
	
	
			
			
	
	
	
	
		
		
		Runtime error
		
	Update app.py
Browse files
    	
        app.py
    CHANGED
    
    | 
         @@ -3,12 +3,14 @@ from huggingface_hub import InferenceClient 
     | 
|
| 3 | 
         
             
            import spaces
         
     | 
| 4 | 
         
             
            import torch
         
     | 
| 5 | 
         
             
            import os
         
     | 
| 6 | 
         
            -
             
     | 
| 
         | 
|
| 7 | 
         
             
            model = ""
         
     | 
| 8 | 
         
             
            duration = 1
         
     | 
| 9 | 
         
             
            print(f"Is CUDA available: {torch.cuda.is_available()}")
         
     | 
| 10 | 
         
             
            print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
         
     | 
| 11 | 
         | 
| 
         | 
|
| 12 | 
         
             
            """
         
     | 
| 13 | 
         
             
            For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
         
     | 
| 14 | 
         
             
            """
         
     | 
| 
         | 
|
| 3 | 
         
             
            import spaces
         
     | 
| 4 | 
         
             
            import torch
         
     | 
| 5 | 
         
             
            import os
         
     | 
| 6 | 
         
            +
            import platform
         
     | 
| 7 | 
         
            +
            print(platform.python_version())
         
     | 
| 8 | 
         
             
            model = ""
         
     | 
| 9 | 
         
             
            duration = 1
         
     | 
| 10 | 
         
             
            print(f"Is CUDA available: {torch.cuda.is_available()}")
         
     | 
| 11 | 
         
             
            print(f"CUDA device: {torch.cuda.get_device_name(torch.cuda.current_device())}")
         
     | 
| 12 | 
         | 
| 13 | 
         
            +
             
     | 
| 14 | 
         
             
            """
         
     | 
| 15 | 
         
             
            For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
         
     | 
| 16 | 
         
             
            """
         
     |