Spaces:
				
			
			
	
			
			
		Runtime error
		
	
	
	
			
			
	
	
	
	
		
		
		Runtime error
		
	Update app.py
Browse files
    	
        app.py
    CHANGED
    
    | @@ -36,6 +36,14 @@ pipe = ConsistentIDStableDiffusionPipeline.from_pretrained( | |
| 36 | 
             
            ).to(device)
         | 
| 37 |  | 
| 38 | 
             
            ### Load other pretrained models
         | 
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
| 39 | 
             
            ## BiSenet
         | 
| 40 | 
             
            bise_net_cp_path = hf_hub_download(repo_id="JackAILab/ConsistentID", filename="face_parsing.pth", repo_type="model")
         | 
| 41 | 
             
            # Create a queue to share data between processes
         | 
| @@ -58,15 +66,6 @@ pipe.load_ConsistentID_model( | |
| 58 | 
             
            pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config)
         | 
| 59 |  | 
| 60 |  | 
| 61 | 
            -
            @spaces.GPU
         | 
| 62 | 
            -
            def load_model(queue, bise_net_cp_path):
         | 
| 63 | 
            -
                device = torch.device('cuda')
         | 
| 64 | 
            -
                bise_net = BiSeNet(n_classes = 19)
         | 
| 65 | 
            -
                bise_net.to(device)
         | 
| 66 | 
            -
                bise_net.load_state_dict(torch.load(bise_net_cp_path))
         | 
| 67 | 
            -
                bise_net.eval()
         | 
| 68 | 
            -
                queue.put(bise_net)
         | 
| 69 | 
            -
             | 
| 70 | 
             
            @spaces.GPU
         | 
| 71 | 
             
            def process(inputImage,prompt,negative_prompt):
         | 
| 72 |  | 
|  | |
| 36 | 
             
            ).to(device)
         | 
| 37 |  | 
| 38 | 
             
            ### Load other pretrained models
         | 
| 39 | 
            +
            @spaces.GPU
         | 
| 40 | 
            +
            def load_model(queue, bise_net_cp_path):
         | 
| 41 | 
            +
                device = torch.device('cuda')
         | 
| 42 | 
            +
                bise_net = BiSeNet(n_classes = 19)
         | 
| 43 | 
            +
                bise_net.to(device)
         | 
| 44 | 
            +
                bise_net.load_state_dict(torch.load(bise_net_cp_path))
         | 
| 45 | 
            +
                bise_net.eval()
         | 
| 46 | 
            +
                queue.put(bise_net)
         | 
| 47 | 
             
            ## BiSenet
         | 
| 48 | 
             
            bise_net_cp_path = hf_hub_download(repo_id="JackAILab/ConsistentID", filename="face_parsing.pth", repo_type="model")
         | 
| 49 | 
             
            # Create a queue to share data between processes
         | 
|  | |
| 66 | 
             
            pipe.scheduler = EulerDiscreteScheduler.from_config(pipe.scheduler.config)
         | 
| 67 |  | 
| 68 |  | 
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
|  | |
| 69 | 
             
            @spaces.GPU
         | 
| 70 | 
             
            def process(inputImage,prompt,negative_prompt):
         | 
| 71 |  | 
