File size: 2,329 Bytes
20d1a10
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
534fa09
 
20d1a10
 
534fa09
20d1a10
 
 
 
 
534fa09
20d1a10
534fa09
20d1a10
 
534fa09
20d1a10
 
534fa09
20d1a10
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
# import streamlit as st
# import torch
# import torchvision.transforms as T
# from PIL import Image
#
# # Assuming the necessary packages (featup, clip, etc.) are installed and accessible
# from featup.util import norm, unnorm
# from featup.plotting import plot_feats
#
# # Setup - ensure the repository content is accessible in the environment
#
# # Streamlit UI
# st.title("Feature Upsampling Demo")
#
# # File uploader
# uploaded_file = st.file_uploader("Choose an image...", type=["png", "jpg", "jpeg"])
# if uploaded_file is not None:
#     image = Image.open(uploaded_file).convert("RGB")
#
#     # Image preprocessing
#     input_size = 224
#     transform = T.Compose([
#         T.Resize(input_size),
#         T.CenterCrop((input_size, input_size)),
#         T.ToTensor(),
#         norm
#     ])
#
#     image_tensor = transform(image).unsqueeze(0) # Assuming CUDA is available, .cuda()
#
#     # Model selection
#     model_option = st.selectbox(
#         'Choose a model for feature upsampling',
#         ('dino16', 'dinov2', 'clip', 'resnet50')
#     )
#
#     if st.button('Upsample Features'):
#         # Load the selected model
#         upsampler = torch.hub.load("mhamilton723/FeatUp", model_option).cuda()
#         hr_feats = upsampler(image_tensor)
#         lr_feats = upsampler.model(image_tensor)
#
#         # Plotting - adjust the plot_feats function or find an alternative to display images in Streamlit
#         # This step will likely need customization to display within Streamlit's interface
#         plot_feats(unnorm(image_tensor)[0], lr_feats[0], hr_feats[0])


import streamlit as st
import torch

def check_gpu_status():
    # Check if CUDA (GPU support) is available in PyTorch
    cuda_available = torch.cuda.is_available()
    gpu_count = torch.cuda.device_count()
    gpu_name = torch.cuda.get_device_name(0) if cuda_available else "Not Available"

    return cuda_available, gpu_count, gpu_name

# Streamlit page configuration
st.title("PyTorch GPU Availability Test")

# Checking the GPU status
cuda_available, gpu_count, gpu_name = check_gpu_status()

# Displaying the results
if cuda_available:
    st.success(f"GPU is available! 🎉")
    st.info(f"Number of GPUs available: {gpu_count}")
    st.info(f"GPU Name: {gpu_name}")
else:
    st.error("GPU is not available. 😢")