abhinavyadav11 commited on
Commit
f7a54e7
·
verified ·
1 Parent(s): 6ebbff9

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -96
app.py DELETED
@@ -1,96 +0,0 @@
1
- import streamlit as st
2
- import pandas as pd
3
- import uuid
4
- import chromadb
5
- from langchain_groq import ChatGroq
6
- from langchain_community.document_loaders import WebBaseLoader
7
- from langchain_core.prompts import PromptTemplate
8
- from langchain_core.output_parsers import JsonOutputParser
9
-
10
- # Initialize LLM
11
- llm = ChatGroq(
12
- temperature=0,
13
- api_key="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",
14
- model_name="llama-3.1-70b-versatile"
15
- )
16
-
17
- # Streamlit App
18
- st.title("Cold Email Generator with LangChain")
19
-
20
- # Load CSV
21
- uploaded_file = st.file_uploader("Upload your portfolio CSV file", type=["csv"])
22
- if uploaded_file:
23
- df = pd.read_csv(uploaded_file)
24
- st.write("Portfolio Data:")
25
- st.dataframe(df)
26
-
27
- # Initialize ChromaDB
28
- client = chromadb.PersistentClient('vectorstore')
29
- collection = client.get_or_create_collection(name="my_portfolio.csv")
30
-
31
- if not collection.count():
32
- for _, row in df.iterrows():
33
- collection.add(documents=row["Techstack"],
34
- metadatas={"links": row["Links"]},
35
- ids=[str(uuid.uuid4())])
36
- st.success("Portfolio data added to the vectorstore!")
37
-
38
- # Scrape Job Description
39
- url = st.text_input("Enter the job posting URL:")
40
- if url:
41
- loader = WebBaseLoader(url)
42
- page_data = loader.load().pop().page_content
43
- st.write("Scraped Job Data:")
44
- st.text(page_data)
45
-
46
- # Extract Job Details
47
- prompt_extract = PromptTemplate.from_template(
48
- """
49
- ### SCRAPED TEXT FROM WEBSITE:
50
- {page_data}
51
- ### INSTRUCTION:
52
- The scraped text is from the career's page of a website.
53
- Your job is to extract the job postings and return them in JSON format containing the
54
- following keys: `role`, `experience`, `skills` and `description`.
55
- Only return the valid JSON.
56
- ### VALID JSON (NO PREAMBLE):
57
- """
58
- )
59
-
60
- chain_extract = prompt_extract | llm
61
- res = chain_extract.invoke(input={'page_data': page_data})
62
- json_parser = JsonOutputParser()
63
- job = json_parser.parse(res.content)
64
- st.write("Extracted Job Details:")
65
- st.json(job)
66
-
67
- # Query Portfolio Links
68
- links = collection.query(query_texts=job['skills'], n_results=2).get('metadatas', [])
69
- st.write("Relevant Portfolio Links:")
70
- st.write(links)
71
-
72
- # Generate Cold Email
73
- prompt_email = PromptTemplate.from_template(
74
- """
75
- ### JOB DESCRIPTION:
76
- {job_description}
77
-
78
- ### INSTRUCTION:
79
- You are Mohan, a business development executive at AtliQ. AtliQ is an AI & Software Consulting company dedicated to facilitating
80
- the seamless integration of business processes through automated tools.
81
- Over our experience, we have empowered numerous enterprises with tailored solutions, fostering scalability,
82
- process optimization, cost reduction, and heightened overall efficiency.
83
- Your job is to write a cold email to the client regarding the job mentioned above describing the capability of AtliQ
84
- in fulfilling their needs.
85
- Also add the most relevant ones from the following links to showcase Atliq's portfolio: {link_list}
86
- Remember you are Mohan, BDE at AtliQ.
87
- Do not provide a preamble.
88
- ### EMAIL (NO PREAMBLE):
89
-
90
- """
91
- )
92
-
93
- chain_email = prompt_email | llm
94
- email_res = chain_email.invoke({"job_description": str(job), "link_list": links})
95
- st.write("Generated Cold Email:")
96
- st.text(email_res.content)