Spaces:
Sleeping
Sleeping
File size: 2,715 Bytes
c3baede |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 |
from fastapi import FastAPI, HTTPException
from typing import List
import requests
from transformers import AutoTokenizer, AutoModelForSequenceClassification, pipeline
from datetime import datetime, timedelta
import numpy as np
from dotenv import load_dotenv
import os
app = FastAPI()
# Load FinBERT model and tokenizer
model_name = "ProsusAI/finbert"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForSequenceClassification.from_pretrained(model_name)
sentiment_analyzer = pipeline('sentiment-analysis', model=model, tokenizer=tokenizer)
# Load environment variables
load_dotenv()
api_key = os.getenv('NEWS_API_KEY')
def fetch_stock_news(company: str, days: int = 2):
today = datetime.now().date()
from_date = (today - timedelta(days=days)).strftime('%Y-%m-%d')
to_date = today.strftime('%Y-%m-%d')
query = f"{company} stock OR {company} shares"
url = (
f'https://newsapi.org/v2/everything?q={query}'
f'&from={from_date}&to={to_date}'
f'&language=en'
f'&sortBy=publishedAt&apiKey={api_key}'
)
response = requests.get(url)
news_data = response.json()
if news_data['status'] != 'ok':
raise HTTPException(status_code=400, detail=news_data.get('message', 'Unknown error'))
return news_data['articles']
def analyze_sentiment(articles):
results = []
for article in articles:
text = f"{article['title']}. {article['description']}"
sentiment = sentiment_analyzer(text)[0]
score = sentiment['score'] if sentiment['label'] == 'positive' else -sentiment['score']
results.append({
'title': article['title'],
'description': article['description'],
'sentiment_score': score
})
return results
@app.get("/")
def home():
return {"message":"welcome to stock sentiment analysis"}
@app.get("/sentiment/{company}", response_model=List[dict])
def get_sentiment(company: str):
articles = fetch_stock_news(company)
sentiments = analyze_sentiment(articles)
return sentiments
@app.get("/top_articles/{company}", response_model=List[dict])
def get_top_articles(company: str):
articles = fetch_stock_news(company)
sentiments = analyze_sentiment(articles)
sorted_articles = sorted(sentiments, key=lambda x: abs(x['sentiment_score']), reverse=True)[:5]
return sorted_articles
@app.get("/average_sentiment/{company}")
def get_average_sentiment(company: str):
articles = fetch_stock_news(company)
sentiments = analyze_sentiment(articles)
scores = [article['sentiment_score'] for article in sentiments]
average_score = np.mean(scores)
return {"average_sentiment_score": average_score}
|