rahul5035 commited on
Commit
65298d5
·
verified ·
1 Parent(s): b2a1fcb

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +57 -0
app.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import subprocess
3
+ from PIL import Image
4
+ import requests
5
+ from io import BytesIO
6
+ from transformers import AutoModel, AutoTokenizer
7
+ import torch
8
+
9
+ torch.cuda.empty_cache()
10
+
11
+ # Load the model and tokenizer on CPU
12
+ model = AutoModel.from_pretrained('openbmb/MiniCPM-Llama3-V-2_5-int4', trust_remote_code=True).to('cpu')
13
+ tokenizer = AutoTokenizer.from_pretrained('openbmb/MiniCPM-Llama3-V-2_5-int4', trust_remote_code=True)
14
+ model.eval()
15
+
16
+ # Title of the app
17
+ st.title("Streamlit App with Image URL and Prompts")
18
+
19
+ # Text area for image URL
20
+ image_url = st.text_area("Enter Image URL:")
21
+
22
+ # Text area for system prompt input
23
+ system_prompt = st.text_area("Enter System Prompt:")
24
+
25
+ # Text area for user prompt input
26
+ question = st.text_area("Enter User Prompt:")
27
+
28
+ # Button to submit and display the image
29
+ if st.button("Submit"):
30
+ if image_url:
31
+ try:
32
+ subprocess.run(['wget', image_url, "-O", 'flowchart.png'])
33
+ response = requests.get(image_url)
34
+ img = Image.open(BytesIO(response.content))
35
+ st.image(img, caption="Image from URL")
36
+ except Exception as e:
37
+ st.error(f"Error loading image. Please submit another image URL with a .png or .jpg extension: {e}")
38
+ else:
39
+ st.warning("Please enter an image URL.")
40
+
41
+ # Model code
42
+ if system_prompt and question:
43
+ image = Image.open('flowchart.png').convert('RGB')
44
+ msgs = [{'role': 'user', 'content': question}]
45
+
46
+ res = model.chat(
47
+ image=image,
48
+ msgs=msgs,
49
+ tokenizer=tokenizer,
50
+ sampling=True, # if sampling=False, beam_search will be used by default
51
+ temperature=0.7,
52
+ system_prompt=system_prompt # pass system_prompt if needed
53
+ )
54
+
55
+ st.text_area("Output:", value=res, height=200)
56
+ else:
57
+ st.warning("Please enter both system prompt and user prompt.")