prateekbh commited on
Commit
fc81c90
·
verified ·
1 Parent(s): dd1124f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -7
app.py CHANGED
@@ -61,8 +61,8 @@ def getProductDetails(history, image):
61
  do_sample=True,
62
  seed=rand_val,
63
  )
64
- system_prompt="you're a helpful e-commerce marketting assitant"
65
- prompt="Write me a poem"
66
  formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
67
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
68
  output = ""
@@ -76,15 +76,11 @@ def getProductDetails(history, image):
76
 
77
  @torch.no_grad()
78
  def getImageDescription(image):
79
- message = "Generate a product title for the image"
80
  gr.Info('Starting...' + message)
81
  stop = StopOnTokens()
82
  messages = [{"role": "system", "content": "You are a helpful assistant."}]
83
 
84
- # for user_msg, assistant_msg in history:
85
- # messages.append({"role": "user", "content": user_msg})
86
- # messages.append({"role": "assistant", "content": assistant_msg})
87
-
88
  if len(messages) == 1:
89
  message = f" <image>{message}"
90
 
 
61
  do_sample=True,
62
  seed=rand_val,
63
  )
64
+ system_prompt="You're a helpful e-commerce marketing assitant."
65
+ prompt="Our product description is as follows: " + product_description + ". Please write four product title options for this art product."
66
  formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
67
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
68
  output = ""
 
76
 
77
  @torch.no_grad()
78
  def getImageDescription(image):
79
+ message = "Generate an ecommerce product description for the image"
80
  gr.Info('Starting...' + message)
81
  stop = StopOnTokens()
82
  messages = [{"role": "system", "content": "You are a helpful assistant."}]
83
 
 
 
 
 
84
  if len(messages) == 1:
85
  message = f" <image>{message}"
86