Spaces:
Running
Running
File size: 6,623 Bytes
3c2bfb2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 |
import os
import json
import pandas as pd
from datetime import date
import gradio as gr
import autogen
from autogen.cache import Cache
from finrobot.utils import get_current_date
from finrobot.data_source import FinnHubUtils, YFinanceUtils
from dotenv import load_dotenv
# Load environment variables from .env file
load_dotenv()
# Utility functions
def save_output(data: pd.DataFrame, tag: str, save_path: str = None) -> None:
if save_path:
data.to_csv(save_path)
print(f"{tag} saved to {save_path}")
def get_current_date():
return date.today().strftime("%Y-%m-%d")
def register_keys():
keys = {
"FINNHUB_API_KEY": os.getenv("FINNHUB_API_KEY"),
"FMP_API_KEY": os.getenv("FMP_API_KEY"),
"SEC_API_KEY": os.getenv("SEC_API_KEY")
}
for key, value in keys.items():
if value:
os.environ[key] = value
def read_response_from_md(filename):
with open(filename, "r") as file:
content = file.read()
return content
def save_to_md(content, filename):
with open(filename, "w") as file: # Use write mode to overwrite the file
file.write(content + "\n")
print(f"Content saved to {filename}")
# Initialize LLM configuration
config_list = [
{
"model": "gpt-4o",
"api_key": os.getenv("OPENAI_API_KEY")
}
]
llm_config = {"config_list": config_list, "timeout": 120, "temperature": 0}
# Register FINNHUB API keys
register_keys()
# Define agents
analyst = autogen.AssistantAgent(
name="Market_Analyst",
system_message="As a Market Analyst, one must possess strong analytical and problem-solving abilities, collect necessary financial information and aggregate them based on client's requirement. For coding tasks, only use the functions you have been provided with. Reply TERMINATE when the task is done.",
llm_config=llm_config,
)
user_proxy = autogen.UserProxyAgent(
name="User_Proxy",
is_termination_msg=lambda x: x.get("content", "") and x.get("content", "").strip().endswith("TERMINATE"),
human_input_mode="NEVER",
max_consecutive_auto_reply=10,
code_execution_config={
"work_dir": "coding",
"use_docker": False,
},
)
# Register tools
from finrobot.toolkits import register_toolkits
tools = [
{
"function": FinnHubUtils.get_company_profile,
"name": "get_company_profile",
"description": "get a company's profile information"
},
{
"function": FinnHubUtils.get_company_news,
"name": "get_company_news",
"description": "retrieve market news related to designated company"
},
{
"function": FinnHubUtils.get_basic_financials,
"name": "get_financial_basics",
"description": "get latest financial basics for a designated company"
},
{
"function": YFinanceUtils.get_stock_data,
"name": "get_stock_data",
"description": "retrieve stock price data for designated ticker symbol"
}
]
register_toolkits(tools, analyst, user_proxy)
def save_response_to_json(response, filename):
response_dict = {
"chat_id": response.chat_id,
"chat_history": response.chat_history,
"summary": response.summary,
"cost": response.cost,
"human_input": response.human_input
}
with open(filename, "w") as file:
file.write(json.dumps(response_dict, indent=4))
print(f"Response saved to {filename}")
# Function to initiate chat and save response
def initiate_chat_and_save_response(analyst, user_proxy, company):
today_date = get_current_date()
json_filename = f"result_{company}_{today_date}.json"
md_filename = f"result_{company}_{today_date}.md"
# Check if MD file already exists
if os.path.exists(md_filename):
return read_response_from_md(md_filename)
with Cache.disk() as cache:
response = user_proxy.initiate_chat(
analyst,
message=f"Use all the tools provided to retrieve information available for {company} upon {get_current_date()}. Analyze the positive developments and potential concerns of {company} with 2-4 most important factors respectively and keep them concise. Most factors should be inferred from company related news. Then make a rough prediction (e.g. up/down by %) of the {company} stock price movement for next week. Provide a summary analysis to support your prediction.",
cache=cache,
)
save_response_to_json(response, json_filename)
return json.dumps(response.chat_history, indent=4)
def filter_user_content(chat_history):
# 解析 chat_history 为 JSON 对象
chat_history_dict = json.loads(chat_history)
# 查找用户需要的内容
for entry in chat_history_dict:
if entry['role'] == 'user' and "###" in entry['content']:
return entry['content']
return "No relevant content found."
# 使用更新的函数在 analyze_company 中
def analyze_company(company):
if company:
company = company.upper()
today_date = get_current_date()
md_filename = f"result_{company}_{today_date}.md"
# Check if MD file already exists
if os.path.exists(md_filename):
return read_response_from_md(md_filename)
content = initiate_chat_and_save_response(analyst, user_proxy, company)
# 筛选有效的用户内容
filtered_content = filter_user_content(content)
save_to_md(filtered_content, md_filename) # 只保存筛选后的内容
return filtered_content
# 自定义CSS样式
custom_css = """
h1, h2, h3, h4, h5, h6 {
font-family: 'Arial', sans-serif;
font-weight: bold;
}
body {
font-family: 'Arial', sans-serif;
}
.gradio-container {
max-width: 800px;
margin: auto;
padding: 20px;
border: 1px solid #ccc;
border-radius: 10px;
box-shadow: 0 0 10px rgba(0, 0, 0, 0.1);
}
textarea, input, .btn-primary {
font-size: 16px !important;
padding: 10px !important;
border-radius: 5px !important;
}
#component-0 > .wrap > .block.markdown-block > .markdown {
font-size: 24px !important;
line-height: 1.8 !important;
}
"""
# Gradio接口
iface = gr.Interface(
fn=analyze_company,
inputs=gr.Textbox(lines=1, placeholder="Enter company name or stock code"),
outputs=gr.Markdown(label="Trade-Helper"),
title="Trade-Helper",
description="Enter the company name or stock code to get a AI-Powered analysis and forcast prediction.",
css=custom_css,
allow_flagging='never'
)
if __name__ == "__main__":
iface.launch(share=True)
|