File size: 1,250 Bytes
6f15a2e
8270298
6f15a2e
 
 
 
 
 
8270298
6f15a2e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
import os
from transformers import AutoTokenizer, AutoModelForCausalLM
import aiohttp

HUGGINGFACE_API_KEY = os.getenv("HUGGINGFACE_API_KEY")

def load_model(model_name):
    tokenizer = AutoTokenizer.from_pretrained(model_name)
    model = AutoModelForCausalLM.from_pretrained(model_name)  # Use AutoModelForCausalLM for Llama
    return tokenizer, model

async def process_text(model_name, text):
    tokenizer, model = load_model(model_name)
    prompt = f"Given the following company description, extract key products, geographies, and important keywords:\n\n{text}\n\nProducts, geographies, and keywords:"

    async with aiohttp.ClientSession() as session:
        async with session.post(f"https://api-inference.huggingface.co/models/{model_name}", 
                                headers={"Authorization": f"Bearer {HUGGINGFACE_API_KEY}"},
                                json={"inputs": prompt}) as response:
            result = await response.json()
            if isinstance(result, list) and len(result) > 0:
                return result[0].get('generated_text', '').strip()
            elif isinstance(result, dict):
                return result.get('generated_text', '').strip()
            else:
                return str(result)