File size: 8,844 Bytes
a747f19 6f27666 baa2015 6f27666 a747f19 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 |
import re
import json
import base64
import requests
import torch
import nest_asyncio
from fastapi import HTTPException
from pydantic import BaseModel
from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
from sentence_transformers import SentenceTransformer, models
import gradio as gr
# Apply nest_asyncio to allow async operations in the notebook/Spaces
nest_asyncio.apply()
import os
HF_TOKEN = os.environ.get("HF_TOKEN")
GITHUB_TOKEN = os.environ.get("GITHUB_TOKEN")
############################################
# GitHub API Functions
############################################
def extract_repo_info(github_url: str):
pattern = r"github\.com/([^/]+)/([^/]+)"
match = re.search(pattern, github_url)
if match:
owner = match.group(1)
repo = match.group(2).replace('.git', '')
return owner, repo
else:
raise ValueError("Invalid GitHub URL provided.")
def get_repo_metadata(owner: str, repo: str):
headers = {'Authorization': f'token {GITHUB_TOKEN}'}
repo_url = f"https://api.github.com/repos/{owner}/{repo}"
response = requests.get(repo_url, headers=headers)
return response.json()
def get_repo_tree(owner: str, repo: str, branch: str):
headers = {'Authorization': f'token {GITHUB_TOKEN}'}
tree_url = f"https://api.github.com/repos/{owner}/{repo}/git/trees/{branch}?recursive=1"
response = requests.get(tree_url, headers=headers)
data = response.json()
print("Repo Tree Data:", json.dumps(data, indent=2))
return data
def get_file_content(owner: str, repo: str, file_path: str):
headers = {'Authorization': f'token {GITHUB_TOKEN}'}
content_url = f"https://api.github.com/repos/{owner}/{repo}/contents/{file_path}"
response = requests.get(content_url, headers=headers)
data = response.json()
if 'content' in data:
return base64.b64decode(data['content']).decode('utf-8')
else:
return None
############################################
# Embedding Functions
############################################
def preprocess_text(text: str) -> str:
cleaned_text = text.strip()
cleaned_text = re.sub(r'\s+', ' ', cleaned_text)
return cleaned_text
def load_embedding_model(model_name: str = 'huggingface/CodeBERTa-small-v1') -> SentenceTransformer:
transformer_model = models.Transformer(model_name)
pooling_model = models.Pooling(transformer_model.get_word_embedding_dimension(),
pooling_mode_mean_tokens=True)
model = SentenceTransformer(modules=[transformer_model, pooling_model])
return model
def generate_embedding(text: str, model_name: str = 'huggingface/CodeBERTa-small-v1') -> list:
processed_text = preprocess_text(text)
model = load_embedding_model(model_name)
embedding = model.encode(processed_text)
return embedding
############################################
# LLM Integration Functions
############################################
def is_detailed_query(query: str) -> bool:
keywords = ["detail", "detailed", "thorough", "in depth", "comprehensive", "extensive"]
return any(keyword in query.lower() for keyword in keywords)
def generate_prompt(query: str, context_snippets: list) -> str:
context = "\n\n".join(context_snippets)
if is_detailed_query(query):
instruction = "Provide an extremely detailed and thorough explanation of at least 500 words."
else:
instruction = "Answer concisely."
prompt = (
f"Below is some context from a GitHub repository:\n\n"
f"{context}\n\n"
f"Based on the above, {instruction}\n{query}\n"
f"Answer:"
)
return prompt
def get_llm_response(prompt: str, model_name: str = "meta-llama/Llama-2-7b-chat-hf", max_new_tokens: int = None) -> str:
if max_new_tokens is None:
max_new_tokens = 1024 if is_detailed_query(prompt) else 256
torch.cuda.empty_cache()
# Load tokenizer and model with authentication using the 'token' parameter.
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True, token=HF_TOKEN)
model = AutoModelForCausalLM.from_pretrained(
model_name,
device_map="auto",
use_safetensors=False,
trust_remote_code=True,
torch_dtype=torch.float16,
token=HF_TOKEN
)
text_gen = pipeline("text-generation", model=model, tokenizer=tokenizer)
outputs = text_gen(prompt, max_new_tokens=max_new_tokens, do_sample=True, temperature=0.7)
full_response = outputs[0]['generated_text']
marker = "Answer:"
if marker in full_response:
answer = full_response.split(marker, 1)[1].strip()
else:
answer = full_response.strip()
return answer
############################################
# Gradio Interface Functions
############################################
def load_repo_contents(github_url: str):
try:
owner, repo = extract_repo_info(github_url)
except Exception as e:
return f"Error: {str(e)}"
repo_data = get_repo_metadata(owner, repo)
default_branch = repo_data.get("default_branch", "main")
tree_data = get_repo_tree(owner, repo, default_branch)
if "tree" not in tree_data:
return "Error: Could not fetch repository tree."
file_list = [item["path"] for item in tree_data["tree"] if item["type"] == "blob"]
return file_list
def get_file_content_for_choice(github_url: str, file_choice: int):
try:
owner, repo = extract_repo_info(github_url)
except Exception as e:
return str(e)
repo_data = get_repo_metadata(owner, repo)
default_branch = repo_data.get("default_branch", "main")
tree_data = get_repo_tree(owner, repo, default_branch)
if "tree" not in tree_data:
return "Error: Could not fetch repository tree."
file_list = [item["path"] for item in tree_data["tree"] if item["type"] == "blob"]
if file_choice < 1 or file_choice > len(file_list):
return "Error: Invalid file choice."
selected_file = file_list[file_choice - 1]
content = get_file_content(owner, repo, selected_file)
return content, selected_file
def chat_with_file(github_url: str, file_choice: int, user_query: str):
result = get_file_content_for_choice(github_url, file_choice)
if isinstance(result, str):
return result # Error message
file_content, selected_file = result
preprocessed = preprocess_text(file_content)
context_snippet = preprocessed[:1000] # use first 1000 characters as context
prompt = generate_prompt(user_query, [context_snippet])
llm_response = get_llm_response(prompt)
return f"File: {selected_file}\n\nLLM Response:\n{llm_response}"
############################################
# Gradio Interface Setup
############################################
with gr.Blocks() as demo:
gr.Markdown("# RepoChat - Chat with Repository Files")
with gr.Row():
with gr.Column(scale=1):
gr.Markdown("### Repository Information")
github_url_input = gr.Textbox(label="GitHub Repository URL", placeholder="https://github.com/username/repository")
load_repo_btn = gr.Button("Load Repository Contents")
file_dropdown = gr.Dropdown(label="Select a File", interactive=True)
repo_content_output = gr.Textbox(label="File Content", interactive=False, lines=10)
with gr.Column(scale=2):
gr.Markdown("### Chat Interface")
chat_query_input = gr.Textbox(label="Your Query", placeholder="Type your query here")
chat_output = gr.Textbox(label="Chatbot Response", interactive=False, lines=10)
chat_btn = gr.Button("Send Query")
# When clicking "Load Repository Contents", update file dropdown
def update_file_dropdown(github_url):
files = load_repo_contents(github_url)
return files
load_repo_btn.click(fn=update_file_dropdown, inputs=[github_url_input], outputs=[file_dropdown])
# When file selection changes, update file content display
def update_repo_content(github_url, file_choice):
if not file_choice:
return "No file selected."
try:
file_index = int(file_choice)
except:
file_index = 1
content, _ = get_file_content_for_choice(github_url, file_index)
return content
file_dropdown.change(fn=update_repo_content, inputs=[github_url_input, file_dropdown], outputs=[repo_content_output])
# When sending a chat query, process it
def process_chat(github_url, file_choice, chat_query):
return chat_with_file(github_url, int(file_choice), chat_query)
chat_btn.click(fn=process_chat, inputs=[github_url_input, file_dropdown, chat_query_input], outputs=[chat_output])
demo.launch()
|