Update main.py
Browse files
main.py
CHANGED
|
@@ -4,10 +4,7 @@ import numpy as np # Provides support for large, multi-dimensional arrays and m
|
|
| 4 |
from wiki import search as search_wikipedia # Import the search function from the wiki module and rename it
|
| 5 |
from concurrent.futures import ThreadPoolExecutor # Import ThreadPoolExecutor for concurrent execution
|
| 6 |
from llm_handler import send_to_llm # Import the send_to_llm function from the llm_handler module
|
| 7 |
-
from params import OUTPUT_FILE_PATH, NUM_WORKERS
|
| 8 |
-
|
| 9 |
-
# Set the provider for the language model to "local-model"
|
| 10 |
-
PROVIDER = "local-model"
|
| 11 |
|
| 12 |
# Import system messages from the system_messages module
|
| 13 |
from system_messages import (
|
|
@@ -39,8 +36,7 @@ async def generate_data(
|
|
| 39 |
topic_selected,
|
| 40 |
system_message_generation,
|
| 41 |
system_message_selected,
|
| 42 |
-
output_file_path
|
| 43 |
-
llm_provider
|
| 44 |
):
|
| 45 |
# Fetch Wikipedia content for the selected topic
|
| 46 |
wikipedia_info = search_wikipedia(topic_selected)
|
|
@@ -58,7 +54,7 @@ async def generate_data(
|
|
| 58 |
msg_list = [msg_context, {"role": "user", "content": f"Generate a question based on the SUBJECT_AREA: {topic_selected}"}]
|
| 59 |
|
| 60 |
# Send to LLM for question generation
|
| 61 |
-
question, _ = send_to_llm(
|
| 62 |
|
| 63 |
# Prepare message list for LLM to generate the answer
|
| 64 |
msg_list_answer = [
|
|
@@ -67,7 +63,7 @@ async def generate_data(
|
|
| 67 |
]
|
| 68 |
|
| 69 |
# Send to LLM for answer generation
|
| 70 |
-
answer, _ = send_to_llm(
|
| 71 |
|
| 72 |
# Prepare data for output (excluding usage information)
|
| 73 |
data = {
|
|
@@ -101,8 +97,7 @@ def main():
|
|
| 101 |
topic_selected,
|
| 102 |
system_message_generation,
|
| 103 |
system_message_selected,
|
| 104 |
-
OUTPUT_FILE_PATH
|
| 105 |
-
PROVIDER
|
| 106 |
)
|
| 107 |
)
|
| 108 |
|
|
|
|
| 4 |
from wiki import search as search_wikipedia # Import the search function from the wiki module and rename it
|
| 5 |
from concurrent.futures import ThreadPoolExecutor # Import ThreadPoolExecutor for concurrent execution
|
| 6 |
from llm_handler import send_to_llm # Import the send_to_llm function from the llm_handler module
|
| 7 |
+
from params import OUTPUT_FILE_PATH, NUM_WORKERS # Import constants from the params module
|
|
|
|
|
|
|
|
|
|
| 8 |
|
| 9 |
# Import system messages from the system_messages module
|
| 10 |
from system_messages import (
|
|
|
|
| 36 |
topic_selected,
|
| 37 |
system_message_generation,
|
| 38 |
system_message_selected,
|
| 39 |
+
output_file_path
|
|
|
|
| 40 |
):
|
| 41 |
# Fetch Wikipedia content for the selected topic
|
| 42 |
wikipedia_info = search_wikipedia(topic_selected)
|
|
|
|
| 54 |
msg_list = [msg_context, {"role": "user", "content": f"Generate a question based on the SUBJECT_AREA: {topic_selected}"}]
|
| 55 |
|
| 56 |
# Send to LLM for question generation
|
| 57 |
+
question, _ = send_to_llm(msg_list)
|
| 58 |
|
| 59 |
# Prepare message list for LLM to generate the answer
|
| 60 |
msg_list_answer = [
|
|
|
|
| 63 |
]
|
| 64 |
|
| 65 |
# Send to LLM for answer generation
|
| 66 |
+
answer, _ = send_to_llm(msg_list_answer)
|
| 67 |
|
| 68 |
# Prepare data for output (excluding usage information)
|
| 69 |
data = {
|
|
|
|
| 97 |
topic_selected,
|
| 98 |
system_message_generation,
|
| 99 |
system_message_selected,
|
| 100 |
+
OUTPUT_FILE_PATH
|
|
|
|
| 101 |
)
|
| 102 |
)
|
| 103 |
|