levalencia commited on
Commit
c6e2c6b
·
1 Parent(s): 5daf42e

multiple files

Browse files
Files changed (1) hide show
  1. app.py +13 -6
app.py CHANGED
@@ -35,17 +35,23 @@ def generate_response(
35
  max_tokens: int,
36
  temperature: float,
37
  top_p: float,
38
- file=None
39
  ) -> Iterator[str]:
40
  """Generate streaming response from the model"""
41
  client = get_client()
42
 
43
  # Process file if uploaded
44
- file_content = process_file(file) if file else ""
 
 
 
 
 
 
 
45
 
46
- # If there's file content, append it to the message
47
- if file_content:
48
- message = f"File content:\n{file_content}\n\nUser message:\n{message}"
49
 
50
  messages = [{"role": "system", "content": system_message}]
51
 
@@ -118,7 +124,8 @@ def main():
118
  uploaded_file = st.file_uploader(
119
  "Upload File (optional)",
120
  type=['txt', 'py', 'md', 'swift', 'java', 'js', 'ts', 'rb', 'go',
121
- 'php', 'c', 'cpp', 'h', 'hpp', 'cs', 'html', 'css', 'kt']
 
122
  )
123
 
124
  # Display chat messages
 
35
  max_tokens: int,
36
  temperature: float,
37
  top_p: float,
38
+ files=None
39
  ) -> Iterator[str]:
40
  """Generate streaming response from the model"""
41
  client = get_client()
42
 
43
  # Process file if uploaded
44
+ # Process multiple files if uploaded
45
+ all_content = ""
46
+ if files:
47
+ file_contents = [process_file(file) for file in files]
48
+ all_content = "\n\n".join([
49
+ f"File {i+1} content:\n{content}"
50
+ for i, content in enumerate(file_contents)
51
+ ])
52
 
53
+ if all_content:
54
+ message = f"{all_content}\n\nUser message:\n{message}"
 
55
 
56
  messages = [{"role": "system", "content": system_message}]
57
 
 
124
  uploaded_file = st.file_uploader(
125
  "Upload File (optional)",
126
  type=['txt', 'py', 'md', 'swift', 'java', 'js', 'ts', 'rb', 'go',
127
+ 'php', 'c', 'cpp', 'h', 'hpp', 'cs', 'html', 'css', 'kt'],
128
+ accept_multiple_files=True # Add this parameter
129
  )
130
 
131
  # Display chat messages