abhinavyadav11 commited on
Commit
d90922d
·
verified ·
1 Parent(s): 53e715c

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -101
app.py DELETED
@@ -1,101 +0,0 @@
1
- import streamlit as st
2
- import pandas as pd
3
- import uuid
4
- import chromadb
5
- from langchain_groq import ChatGroq
6
- from langchain_community.document_loaders import WebBaseLoader
7
- from langchain_core.prompts import PromptTemplate
8
- from langchain_core.output_parsers import JsonOutputParser
9
- from dotenv import load_dotenv
10
- import os
11
-
12
- # Load environment variables from .env file // # Create .env file outside the (.venv) i.e in create in main files
13
- load_dotenv()
14
-
15
- # Initialize LLM
16
- llm = ChatGroq(
17
- temperature=0,
18
- api_key = os.getenv("API_KEY"),
19
- model_name="llama-3.1-70b-versatile"
20
- )
21
-
22
- # Streamlit App
23
- st.title("Cold Email Generator with LangChain")
24
-
25
- # Load CSV
26
- uploaded_file = st.file_uploader("Upload your portfolio CSV file", type=["csv"])
27
- if uploaded_file:
28
- df = pd.read_csv(uploaded_file)
29
- st.write("Portfolio Data:")
30
- st.dataframe(df)
31
-
32
- # Initialize ChromaDB
33
- client = chromadb.PersistentClient('vectorstore')
34
- collection = client.get_or_create_collection(name="my_portfolio.csv")
35
-
36
- if not collection.count():
37
- for _, row in df.iterrows():
38
- collection.add(documents=row["Techstack"],
39
- metadatas={"links": row["Links"]},
40
- ids=[str(uuid.uuid4())])
41
- st.success("Portfolio data added to the vectorstore!")
42
-
43
- # Scrape Job Description
44
- url = st.text_input("Enter the job posting URL:")
45
- if url:
46
- loader = WebBaseLoader(url)
47
- page_data = loader.load().pop().page_content
48
- st.write("Scraped Job Data:")
49
- st.text(page_data)
50
-
51
- # Extract Job Details
52
- prompt_extract = PromptTemplate.from_template(
53
- """
54
- ### SCRAPED TEXT FROM WEBSITE:
55
- {page_data}
56
- ### INSTRUCTION:
57
- The scraped text is from the career's page of a website.
58
- Your job is to extract the job postings and return them in JSON format containing the
59
- following keys: `role`, `experience`, `skills` and `description`.
60
- Only return the valid JSON.
61
- ### VALID JSON (NO PREAMBLE):
62
- """
63
- )
64
-
65
- chain_extract = prompt_extract | llm
66
- res = chain_extract.invoke(input={'page_data': page_data})
67
- json_parser = JsonOutputParser()
68
- job = json_parser.parse(res.content)
69
- st.write("Extracted Job Details:")
70
- st.json(job)
71
-
72
- # Query Portfolio Links
73
- links = collection.query(query_texts=job['skills'], n_results=2).get('metadatas', [])
74
- st.write("Relevant Portfolio Links:")
75
- st.write(links)
76
-
77
- # Generate Cold Email
78
- prompt_email = PromptTemplate.from_template(
79
- """
80
- ### JOB DESCRIPTION:
81
- {job_description}
82
-
83
- ### INSTRUCTION:
84
- You are Mohan, a business development executive at AtliQ. AtliQ is an AI & Software Consulting company dedicated to facilitating
85
- the seamless integration of business processes through automated tools.
86
- Over our experience, we have empowered numerous enterprises with tailored solutions, fostering scalability,
87
- process optimization, cost reduction, and heightened overall efficiency.
88
- Your job is to write a cold email to the client regarding the job mentioned above describing the capability of AtliQ
89
- in fulfilling their needs.
90
- Also add the most relevant ones from the following links to showcase Atliq's portfolio: {link_list}
91
- Remember you are Mohan, BDE at AtliQ.
92
- Do not provide a preamble.
93
- ### EMAIL (NO PREAMBLE):
94
-
95
- """
96
- )
97
-
98
- chain_email = prompt_email | llm
99
- email_res = chain_email.invoke({"job_description": str(job), "link_list": links})
100
- st.write("Generated Cold Email:")
101
- st.text(email_res.content)