Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -1,9 +1,11 @@
|
|
1 |
import os
|
|
|
2 |
import json
|
3 |
import time
|
|
|
4 |
from datetime import datetime
|
5 |
from typing import List, Dict, Any, Optional, Union
|
6 |
-
from pydantic import BaseModel, Field, EmailStr,
|
7 |
from fastapi import FastAPI, HTTPException, Query, Depends, Request
|
8 |
from fastapi.responses import JSONResponse, Response
|
9 |
from fastapi.middleware.cors import CORSMiddleware
|
@@ -29,7 +31,7 @@ SMARTLEAD_BASE_URL = "https://server.smartlead.ai/api/v1"
|
|
29 |
# Initialize FastAPI app
|
30 |
app = FastAPI(
|
31 |
title="Smartlead API - Complete Integration",
|
32 |
-
version="2.
|
33 |
description="Comprehensive FastAPI wrapper for Smartlead email automation platform",
|
34 |
docs_url="/docs",
|
35 |
redoc_url="/redoc"
|
@@ -82,16 +84,17 @@ class LeadInput(BaseModel):
|
|
82 |
linkedin_profile: Optional[str] = Field(None, description="Lead's LinkedIn profile URL")
|
83 |
company_url: Optional[str] = Field(None, description="Company website URL")
|
84 |
|
85 |
-
@
|
|
|
86 |
def validate_custom_fields(cls, v):
|
87 |
if v is not None and len(v) > 20:
|
88 |
raise ValueError('Custom fields cannot exceed 20 fields')
|
89 |
return v
|
90 |
|
91 |
-
@
|
|
|
92 |
def validate_phone_number(cls, v):
|
93 |
if v is not None:
|
94 |
-
# Convert to string if it's an integer
|
95 |
return str(v)
|
96 |
return v
|
97 |
|
@@ -143,8 +146,8 @@ class Campaign(BaseModel):
|
|
143 |
updated_at: datetime
|
144 |
status: str
|
145 |
name: str
|
146 |
-
track_settings: Union[str, List[Any]]
|
147 |
-
scheduler_cron_value: Optional[Union[str, Dict[str, Any]]] = None
|
148 |
min_time_btwn_emails: int
|
149 |
max_leads_per_day: int
|
150 |
stop_lead_settings: str
|
@@ -152,7 +155,7 @@ class Campaign(BaseModel):
|
|
152 |
client_id: Optional[int] = None
|
153 |
enable_ai_esp_matching: bool
|
154 |
send_as_plain_text: bool
|
155 |
-
follow_up_percentage: Optional[Union[str, int]] = None
|
156 |
|
157 |
class CampaignListResponse(BaseModel):
|
158 |
campaigns: List[Campaign]
|
@@ -255,6 +258,11 @@ class MessageHistoryRequest(BaseModel):
|
|
255 |
bcc: Optional[str] = Field(None, description="BCC recipients")
|
256 |
add_signature: bool = Field(True, description="Whether to add signature")
|
257 |
|
|
|
|
|
|
|
|
|
|
|
258 |
# ============================================================================
|
259 |
# HELPER FUNCTIONS
|
260 |
# ============================================================================
|
@@ -373,62 +381,40 @@ async def get_campaign_leads(campaign_id: int, offset: int = 0, limit: int = 100
|
|
373 |
params = {"offset": offset, "limit": limit}
|
374 |
return await call_smartlead_api("GET", f"campaigns/{campaign_id}/leads", params=params)
|
375 |
|
|
|
376 |
@app.post("/campaigns/{campaign_id}/leads", response_model=Dict[str, Any], tags=["Leads"])
|
377 |
async def add_leads_to_campaign(campaign_id: int, request: AddLeadsRequest):
|
378 |
"""Add leads to a campaign by ID with personalized welcome and closing messages"""
|
379 |
-
request_data = request.dict()
|
380 |
|
381 |
-
|
382 |
-
|
383 |
lead_cleaned = {k: v for k, v in lead.items() if v is not None and v != ""}
|
384 |
|
385 |
-
# Generate personalized welcome and closing messages using LLM
|
386 |
try:
|
387 |
personalized_messages = await generate_welcome_closing_messages(lead_cleaned)
|
388 |
-
|
389 |
-
# Initialize custom_fields if it doesn't exist
|
390 |
if "custom_fields" not in lead_cleaned:
|
391 |
lead_cleaned["custom_fields"] = {}
|
392 |
-
|
393 |
-
|
394 |
-
if personalized_messages.get("welcome_message"):
|
395 |
-
lead_cleaned["custom_fields"]["Welcome_Message"] = personalized_messages["welcome_message"]
|
396 |
-
if personalized_messages.get("closing_message"):
|
397 |
-
lead_cleaned["custom_fields"]["Closing_Message"] = personalized_messages["closing_message"]
|
398 |
-
|
399 |
except Exception as e:
|
400 |
-
print(f"Error generating
|
401 |
-
# Continue with template messages if LLM fails
|
402 |
template_messages = generate_template_welcome_closing_messages(lead_cleaned)
|
403 |
if "custom_fields" not in lead_cleaned:
|
404 |
lead_cleaned["custom_fields"] = {}
|
405 |
-
|
406 |
-
|
407 |
-
if template_messages.get("closing_message"):
|
408 |
-
lead_cleaned["custom_fields"]["Closing_Message"] = template_messages["closing_message"]
|
409 |
-
|
410 |
-
# Clean up custom_fields - remove None values and empty strings
|
411 |
-
if "custom_fields" in lead_cleaned:
|
412 |
-
custom_fields = lead_cleaned["custom_fields"]
|
413 |
-
if custom_fields:
|
414 |
-
custom_fields_cleaned = {k: v for k, v in custom_fields.items() if v is not None and v != ""}
|
415 |
-
if custom_fields_cleaned:
|
416 |
-
lead_cleaned["custom_fields"] = custom_fields_cleaned
|
417 |
-
else:
|
418 |
-
lead_cleaned.pop("custom_fields", None)
|
419 |
-
else:
|
420 |
-
lead_cleaned.pop("custom_fields", None)
|
421 |
|
422 |
-
|
423 |
-
|
424 |
-
|
425 |
-
|
426 |
-
|
427 |
-
|
428 |
-
|
429 |
-
|
430 |
-
|
431 |
-
|
|
|
432 |
|
433 |
return await call_smartlead_api("POST", f"campaigns/{campaign_id}/leads", data=request_data)
|
434 |
|
@@ -438,6 +424,35 @@ async def add_bulk_leads(campaign_id: int, leads: List[LeadInput]):
|
|
438 |
request = AddLeadsRequest(lead_list=leads)
|
439 |
return await add_leads_to_campaign(campaign_id, request)
|
440 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
441 |
@app.post("/campaigns/{campaign_id}/leads/{lead_id}/resume", response_model=Dict[str, Any], tags=["Leads"])
|
442 |
async def resume_lead_by_campaign_id(campaign_id: int, lead_id: int, request: ResumeLeadRequest):
|
443 |
"""Resume Lead By Campaign ID"""
|
@@ -565,18 +580,23 @@ async def save_campaign_sequences(campaign_id: int, request: SaveSequencesReques
|
|
565 |
"""Save Campaign Sequence"""
|
566 |
return await call_smartlead_api("POST", f"campaigns/{campaign_id}/sequences", data=request.dict())
|
567 |
|
|
|
568 |
@app.post("/campaigns/{campaign_id}/sequences/generate", response_model=Dict[str, Any], tags=["Sequences"])
|
569 |
async def generate_campaign_sequences(campaign_id: int, request: GenerateSequencesRequest):
|
570 |
-
"""Generate
|
571 |
job_description = request.job_description
|
572 |
-
|
|
|
|
|
|
|
|
|
573 |
save_request = SaveSequencesRequest(sequences=generated_sequences)
|
574 |
result = await call_smartlead_api("POST", f"campaigns/{campaign_id}/sequences", data=save_request.dict())
|
575 |
|
576 |
return {
|
577 |
"ok": True,
|
578 |
-
"message": "
|
579 |
-
"generated_sequences": [seq for seq in generated_sequences],
|
580 |
"save_result": result
|
581 |
}
|
582 |
|
@@ -747,136 +767,102 @@ async def generate_welcome_closing_messages(lead_data: Dict[str, Any]) -> Dict[s
|
|
747 |
return generate_template_welcome_closing_messages(lead_data)
|
748 |
|
749 |
llm = ChatOpenAI(
|
750 |
-
model="gpt-
|
751 |
temperature=0.7,
|
752 |
openai_api_key=openai_api_key
|
753 |
)
|
754 |
str_llm = llm.with_structured_output(structure)
|
755 |
|
756 |
-
# Extract relevant information from lead data
|
757 |
first_name = lead_data.get("first_name", "")
|
758 |
-
last_name = lead_data.get("last_name", "")
|
759 |
company_name = lead_data.get("company_name", "")
|
760 |
-
location = lead_data.get("location", "")
|
761 |
title = lead_data.get("custom_fields", {}).get("Title", "")
|
762 |
-
linkedin_profile = lead_data.get("linkedin_profile", "")
|
763 |
-
|
764 |
-
# Create a summary of the candidate's background
|
765 |
-
candidate_info = f"""
|
766 |
-
Name: {first_name}
|
767 |
-
Company: {company_name}
|
768 |
-
Location: {location}
|
769 |
-
Title: {title}
|
770 |
-
LinkedIn: {linkedin_profile}
|
771 |
-
"""
|
772 |
|
773 |
-
|
774 |
|
775 |
-
|
776 |
-
1. A personalized welcome message (2-3 sentences)
|
777 |
-
2. A personalized closing message (1-2 sentences)
|
778 |
-
|
779 |
-
Requirements:
|
780 |
-
- Professional but friendly tone
|
781 |
-
- Reference their specific background/company/role when possible
|
782 |
-
- Keep messages concise and engaging
|
783 |
-
- Make them feel valued and understood
|
784 |
-
|
785 |
-
IMPORTANT: Respond with ONLY valid JSON. No additional text."""
|
786 |
|
787 |
prompt_template = ChatPromptTemplate.from_messages([
|
788 |
("system", system_prompt),
|
789 |
-
("human", "Generate
|
790 |
])
|
791 |
|
792 |
messages = prompt_template.format_messages(candidate_info=candidate_info)
|
793 |
response = await str_llm.ainvoke(messages)
|
794 |
|
795 |
-
|
796 |
-
|
797 |
-
|
798 |
-
|
799 |
-
"closing_message": response.closing_message
|
800 |
-
}
|
801 |
-
|
802 |
-
except Exception as parse_error:
|
803 |
-
print(f"JSON parsing failed for welcome/closing messages: {parse_error}")
|
804 |
-
return generate_template_welcome_closing_messages(lead_data)
|
805 |
-
|
806 |
except Exception as e:
|
807 |
print(f"Error generating welcome/closing messages with LLM: {str(e)}")
|
808 |
return generate_template_welcome_closing_messages(lead_data)
|
809 |
|
810 |
def generate_template_welcome_closing_messages(lead_data: Dict[str, Any]) -> Dict[str, str]:
|
811 |
"""Generate template-based welcome and closing messages as fallback"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
812 |
|
813 |
-
|
814 |
-
company_name = lead_data.get("company_name", "")
|
815 |
-
title = lead_data.get("custom_fields", {}).get("Title", "")
|
816 |
-
|
817 |
-
# Personalized welcome message
|
818 |
-
if first_name and company_name:
|
819 |
-
welcome_message = f"Hi {first_name}, I came across your profile and was impressed by your work at {company_name}."
|
820 |
-
elif first_name:
|
821 |
-
welcome_message = f"Hi {first_name}, I came across your profile and was impressed by your background."
|
822 |
-
elif company_name:
|
823 |
-
welcome_message = f"Hi there, I came across your profile and was impressed by your work at {company_name}."
|
824 |
-
else:
|
825 |
-
welcome_message = "Hi there, I came across your profile and was impressed by your background."
|
826 |
-
|
827 |
-
# Personalized closing message
|
828 |
-
if first_name:
|
829 |
-
closing_message = f"Looking forward to connecting with you, {first_name}!"
|
830 |
-
else:
|
831 |
-
closing_message = "Looking forward to connecting with you!"
|
832 |
-
|
833 |
-
return {
|
834 |
-
"welcome_message": welcome_message,
|
835 |
-
"closing_message": closing_message
|
836 |
-
}
|
837 |
-
|
838 |
-
async def generate_sequences_with_llm(job_description: str) -> List[CampaignSequence]:
|
839 |
-
class email_seq(BaseModel):
|
840 |
subject: str = Field(description="Subject line for the email")
|
841 |
-
body: str = Field(description="Body of the email")
|
842 |
-
class structure(BaseModel):
|
843 |
-
introduction: email_seq = Field(description="Email sequence for sequence 1 asking for consent and interest in the role")
|
844 |
-
email_sequence_2: email_seq = Field(description="Email sequence for sequence 2 following up on updates and next steps")
|
845 |
-
email_sequence_3: email_seq = Field(description="Email sequence for sequence 3 Another variant on following up on updates and next steps")
|
846 |
|
|
|
|
|
|
|
|
|
847 |
|
848 |
-
|
849 |
-
"""Generate email sequences using LangChain and OpenAI based on job description"""
|
850 |
-
|
851 |
if not LANGCHAIN_AVAILABLE:
|
852 |
-
return
|
853 |
|
854 |
try:
|
855 |
openai_api_key = os.getenv("OPENAI_API_KEY")
|
856 |
if not openai_api_key:
|
857 |
print("Warning: OPENAI_API_KEY not set. Using template sequences.")
|
858 |
-
return
|
859 |
|
860 |
-
|
861 |
-
|
862 |
-
|
863 |
-
|
864 |
-
|
865 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
866 |
|
867 |
-
|
868 |
-
|
869 |
-
Generate ONLY the subject lines and email body content for 3 professional email sequences.
|
870 |
-
Write the email on behalf of Ali Taghikhani, CEO SRN
|
871 |
-
In the templates use placeholders like, Welcome_Message, Closing_Message, first_name, company_name,Title.
|
872 |
|
|
|
873 |
|
874 |
-
|
875 |
-
|
|
|
876 |
2. OUTREACH (Day 3): Provide detailed job information
|
877 |
3. FOLLOW-UP (Day 5): Follow up on updates and next steps
|
878 |
-
|
879 |
-
Requirements:
|
880 |
- First sequence will only ask about the consent and interest in the role, no other information is needed.
|
881 |
- Second and third sequences are follow-ups (no subject line needed) in the 3rd sequence try providing some information about the role and the company.
|
882 |
- All emails should be HTML formatted with proper <br> tags
|
@@ -884,287 +870,102 @@ async def generate_sequences_with_llm(job_description: str) -> List[CampaignSequ
|
|
884 |
- Include clear call-to-actions
|
885 |
- Focus on building consent and trust
|
886 |
|
887 |
-
|
888 |
-
|
889 |
-
|
890 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
891 |
|
892 |
-
|
893 |
("system", system_prompt),
|
894 |
-
("human", "Generate email
|
895 |
])
|
896 |
|
897 |
-
|
898 |
-
|
899 |
-
|
900 |
-
|
901 |
-
|
902 |
-
|
903 |
-
|
904 |
-
# Sequence 1: Introduction with A/B variants
|
905 |
-
if hasattr(response, 'introduction') and response.introduction:
|
906 |
-
# Check if body is a string or dict
|
907 |
-
intro_body = response.introduction.body
|
908 |
-
if isinstance(intro_body, dict):
|
909 |
-
# If it's a dict, extract the content
|
910 |
-
intro_body = str(intro_body)
|
911 |
-
|
912 |
-
sequences.append(CampaignSequence(
|
913 |
-
seq_number=1,
|
914 |
-
seq_delay_details=SeqDelayDetails(delay_in_days=1),
|
915 |
-
seq_variants=[
|
916 |
-
SeqVariant(
|
917 |
-
subject=response.introduction.subject,
|
918 |
-
email_body=intro_body,
|
919 |
-
variant_label="A"
|
920 |
-
)
|
921 |
-
]
|
922 |
-
))
|
923 |
-
|
924 |
-
# Sequence 2: Outreach
|
925 |
-
if hasattr(response, 'email_sequence_2') and response.email_sequence_2:
|
926 |
-
seq2_body = response.email_sequence_2.body
|
927 |
-
if isinstance(seq2_body, dict):
|
928 |
-
seq2_body = str(seq2_body)
|
929 |
-
|
930 |
-
sequences.append(CampaignSequence(
|
931 |
-
seq_number=2,
|
932 |
-
seq_delay_details=SeqDelayDetails(delay_in_days=3),
|
933 |
-
subject="",
|
934 |
-
email_body=seq2_body
|
935 |
-
))
|
936 |
-
|
937 |
-
# Sequence 3: Follow-up
|
938 |
-
if hasattr(response, 'email_sequence_3') and response.email_sequence_3:
|
939 |
-
seq3_body = response.email_sequence_3.body
|
940 |
-
if isinstance(seq3_body, dict):
|
941 |
-
seq3_body = str(seq3_body)
|
942 |
-
|
943 |
-
sequences.append(CampaignSequence(
|
944 |
-
seq_number=3,
|
945 |
-
seq_delay_details=SeqDelayDetails(delay_in_days=5),
|
946 |
-
subject="",
|
947 |
-
email_body=seq3_body
|
948 |
-
))
|
949 |
-
|
950 |
-
# Fill with templates if needed
|
951 |
-
while len(sequences) < 3:
|
952 |
-
if len(sequences) == 0:
|
953 |
-
sequences.append(CampaignSequence(
|
954 |
-
seq_number=1,
|
955 |
-
seq_delay_details=SeqDelayDetails(delay_in_days=1),
|
956 |
-
seq_variants=[
|
957 |
-
SeqVariant(
|
958 |
-
subject=f"Quick question about {job_description}",
|
959 |
-
email_body=f"""<p>Hi there,<br><br>
|
960 |
-
I came across your profile and noticed your experience in {job_description}.
|
961 |
-
I'm reaching out because we have some exciting opportunities that might be a great fit for your background.<br><br>
|
962 |
-
Before I share more details, I wanted to ask: Are you currently open to exploring new opportunities in this space?<br><br>
|
963 |
-
Would you be interested in hearing more about the roles we have available?<br><br>
|
964 |
-
Best regards,<br>
|
965 |
-
[Your Name]</p>""",
|
966 |
-
variant_label="A"
|
967 |
-
),
|
968 |
-
SeqVariant(
|
969 |
-
subject=f"Interested in {job_description} opportunities?",
|
970 |
-
email_body=f"""<p>Hello,<br><br>
|
971 |
-
I hope this message finds you well. I'm a recruiter specializing in {job_description} positions.<br><br>
|
972 |
-
I'd love to connect and share some opportunities that align with your expertise.
|
973 |
-
Are you currently open to exploring new roles in this space?<br><br>
|
974 |
-
If so, I can send you specific details about the positions we have available.<br><br>
|
975 |
-
Thanks,<br>
|
976 |
-
[Your Name]</p>""",
|
977 |
-
variant_label="B"
|
978 |
-
)
|
979 |
-
]
|
980 |
-
))
|
981 |
-
elif len(sequences) == 1:
|
982 |
-
sequences.append(CampaignSequence(
|
983 |
-
seq_number=2,
|
984 |
-
seq_delay_details=SeqDelayDetails(delay_in_days=3),
|
985 |
-
subject="",
|
986 |
-
email_body=f"""<p>Hi,<br><br>
|
987 |
-
Thanks for your interest! Here are more details about the {job_description} opportunities:<br><br>
|
988 |
-
<strong>Role Details:</strong><br>
|
989 |
-
• [Specific responsibilities]<br>
|
990 |
-
• [Required skills and experience]<br>
|
991 |
-
• [Team and company information]<br><br>
|
992 |
-
<strong>Benefits:</strong><br>
|
993 |
-
• [Compensation and benefits]<br>
|
994 |
-
• [Growth opportunities]<br>
|
995 |
-
• [Work environment]<br><br>
|
996 |
-
Would you be interested in a quick call to discuss this role in more detail?<br><br>
|
997 |
-
Best regards,<br>
|
998 |
-
[Your Name]</p>"""
|
999 |
-
))
|
1000 |
-
elif len(sequences) == 2:
|
1001 |
-
sequences.append(CampaignSequence(
|
1002 |
-
seq_number=3,
|
1003 |
-
seq_delay_details=SeqDelayDetails(delay_in_days=5),
|
1004 |
-
subject="",
|
1005 |
-
email_body=f"""<p>Hi,<br><br>
|
1006 |
-
Just wanted to follow up on the {job_description} opportunity I shared.<br><br>
|
1007 |
-
Have you had a chance to review the information? I'd love to hear your thoughts and answer any questions.<br><br>
|
1008 |
-
If you're interested, I can help schedule next steps. If not, no worries at all!<br><br>
|
1009 |
-
Thanks for your time!<br>
|
1010 |
-
[Your Name]</p>"""
|
1011 |
-
))
|
1012 |
-
|
1013 |
-
return sequences
|
1014 |
-
|
1015 |
-
except Exception as parse_error:
|
1016 |
-
print(f"JSON parsing failed: {parse_error}")
|
1017 |
-
return await generate_template_sequences(job_description)
|
1018 |
-
|
1019 |
-
except Exception as e:
|
1020 |
-
print(f"Error generating sequences with LLM: {str(e)}")
|
1021 |
-
return await generate_template_sequences(job_description)
|
1022 |
|
1023 |
-
|
1024 |
-
|
1025 |
-
|
1026 |
-
sequences = []
|
1027 |
-
|
1028 |
-
# Sequence 1: Introduction with A/B variants
|
1029 |
-
if "sequence1_variant_a" in content and "sequence1_variant_b" in content:
|
1030 |
-
variants = []
|
1031 |
|
1032 |
-
|
1033 |
-
|
1034 |
-
|
1035 |
-
|
1036 |
-
|
1037 |
-
|
1038 |
-
|
1039 |
-
|
1040 |
-
|
1041 |
-
|
1042 |
-
|
1043 |
-
subject=var_b.get("subject", f"Interested in {job_description} opportunities?"),
|
1044 |
-
email_body=var_b.get("body", ""),
|
1045 |
-
variant_label="B"
|
1046 |
-
))
|
1047 |
-
|
1048 |
-
sequences.append(CampaignSequence(
|
1049 |
-
seq_number=1,
|
1050 |
-
seq_delay_details=SeqDelayDetails(delay_in_days=1),
|
1051 |
-
seq_variants=variants
|
1052 |
-
))
|
1053 |
-
|
1054 |
-
# Sequence 2: Outreach
|
1055 |
-
if "sequence2" in content:
|
1056 |
-
seq2_body = content["sequence2"].get("body", "")
|
1057 |
-
sequences.append(CampaignSequence(
|
1058 |
-
seq_number=2,
|
1059 |
-
seq_delay_details=SeqDelayDetails(delay_in_days=3),
|
1060 |
-
subject="",
|
1061 |
-
email_body=seq2_body
|
1062 |
-
))
|
1063 |
-
|
1064 |
-
# Sequence 3: Follow-up
|
1065 |
-
if "sequence3" in content:
|
1066 |
-
seq3_body = content["sequence3"].get("body", "")
|
1067 |
-
sequences.append(CampaignSequence(
|
1068 |
-
seq_number=3,
|
1069 |
-
seq_delay_details=SeqDelayDetails(delay_in_days=5),
|
1070 |
-
subject="",
|
1071 |
-
email_body=seq3_body
|
1072 |
-
))
|
1073 |
-
|
1074 |
-
# Fill with templates if needed
|
1075 |
-
while len(sequences) < 3:
|
1076 |
-
if len(sequences) == 0:
|
1077 |
-
sequences.append(CampaignSequence(
|
1078 |
seq_number=1,
|
1079 |
seq_delay_details=SeqDelayDetails(delay_in_days=1),
|
1080 |
-
seq_variants=[
|
1081 |
-
|
1082 |
-
|
1083 |
-
|
1084 |
-
|
1085 |
-
|
1086 |
-
|
1087 |
-
Would you be interested in hearing more about the roles we have available?<br><br>
|
1088 |
-
Best regards,<br>
|
1089 |
-
[Your Name]</p>""",
|
1090 |
-
variant_label="A"
|
1091 |
-
),
|
1092 |
-
SeqVariant(
|
1093 |
-
subject=f"Interested in {job_description} opportunities?",
|
1094 |
-
email_body=f"""<p>Hello,<br><br>
|
1095 |
-
I hope this message finds you well. I'm a recruiter specializing in {job_description} positions.<br><br>
|
1096 |
-
I'd love to connect and share some opportunities that align with your expertise.
|
1097 |
-
Are you currently open to exploring new roles in this space?<br><br>
|
1098 |
-
If so, I can send you specific details about the positions we have available.<br><br>
|
1099 |
-
Thanks,<br>
|
1100 |
-
[Your Name]</p>""",
|
1101 |
-
variant_label="B"
|
1102 |
-
)
|
1103 |
-
]
|
1104 |
-
))
|
1105 |
-
elif len(sequences) == 1:
|
1106 |
-
sequences.append(CampaignSequence(
|
1107 |
seq_number=2,
|
1108 |
seq_delay_details=SeqDelayDetails(delay_in_days=3),
|
1109 |
-
subject="",
|
1110 |
-
email_body=
|
1111 |
-
|
1112 |
-
|
1113 |
-
• [Specific responsibilities]<br>
|
1114 |
-
• [Required skills and experience]<br>
|
1115 |
-
• [Team and company information]<br><br>
|
1116 |
-
<strong>Benefits:</strong><br>
|
1117 |
-
• [Compensation and benefits]<br>
|
1118 |
-
• [Growth opportunities]<br>
|
1119 |
-
• [Work environment]<br><br>
|
1120 |
-
Would you be interested in a quick call to discuss this role in more detail?<br><br>
|
1121 |
-
Best regards,<br>
|
1122 |
-
[Your Name]</p>"""
|
1123 |
-
))
|
1124 |
-
elif len(sequences) == 2:
|
1125 |
-
sequences.append(CampaignSequence(
|
1126 |
seq_number=3,
|
1127 |
seq_delay_details=SeqDelayDetails(delay_in_days=5),
|
1128 |
-
subject="",
|
1129 |
-
email_body=
|
1130 |
-
|
1131 |
-
|
1132 |
-
|
1133 |
-
|
1134 |
-
|
1135 |
-
|
1136 |
-
|
1137 |
-
return sequences
|
1138 |
|
1139 |
-
|
1140 |
-
"""Generate template-based sequences as fallback"""
|
1141 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1142 |
sequences = [
|
1143 |
CampaignSequence(
|
1144 |
seq_number=1,
|
1145 |
seq_delay_details=SeqDelayDetails(delay_in_days=1),
|
1146 |
seq_variants=[
|
1147 |
SeqVariant(
|
1148 |
-
subject=f"
|
1149 |
-
email_body=
|
1150 |
-
I came across your profile and noticed your experience in {job_description}.
|
1151 |
-
I'm reaching out because we have some exciting opportunities that might be a great fit for your background.<br><br>
|
1152 |
-
Before I share more details, I wanted to ask: Are you currently open to exploring new opportunities in this space?<br><br>
|
1153 |
-
Would you be interested in hearing more about the roles we have available?<br><br>
|
1154 |
-
Best regards,<br>
|
1155 |
-
[Your Name]</p>""",
|
1156 |
variant_label="A"
|
1157 |
-
),
|
1158 |
-
SeqVariant(
|
1159 |
-
subject=f"Interested in {job_description} opportunities?",
|
1160 |
-
email_body=f"""<p>Hello,<br><br>
|
1161 |
-
I hope this message finds you well. I'm a recruiter specializing in {job_description} positions.<br><br>
|
1162 |
-
I'd love to connect and share some opportunities that align with your expertise.
|
1163 |
-
Are you currently open to exploring new roles in this space?<br><br>
|
1164 |
-
If so, I can send you specific details about the positions we have available.<br><br>
|
1165 |
-
Thanks,<br>
|
1166 |
-
[Your Name]</p>""",
|
1167 |
-
variant_label="B"
|
1168 |
)
|
1169 |
]
|
1170 |
),
|
@@ -1172,33 +973,15 @@ Thanks,<br>
|
|
1172 |
seq_number=2,
|
1173 |
seq_delay_details=SeqDelayDetails(delay_in_days=3),
|
1174 |
subject="",
|
1175 |
-
email_body=
|
1176 |
-
Thanks for your interest! Here are more details about the {job_description} opportunities:<br><br>
|
1177 |
-
<strong>Role Details:</strong><br>
|
1178 |
-
• [Specific responsibilities]<br>
|
1179 |
-
• [Required skills and experience]<br>
|
1180 |
-
• [Team and company information]<br><br>
|
1181 |
-
<strong>Benefits:</strong><br>
|
1182 |
-
• [Compensation and benefits]<br>
|
1183 |
-
• [Growth opportunities]<br>
|
1184 |
-
• [Work environment]<br><br>
|
1185 |
-
Would you be interested in a quick call to discuss this role in more detail?<br><br>
|
1186 |
-
Best regards,<br>
|
1187 |
-
[Your Name]</p>"""
|
1188 |
),
|
1189 |
-
|
1190 |
seq_number=3,
|
1191 |
seq_delay_details=SeqDelayDetails(delay_in_days=5),
|
1192 |
subject="",
|
1193 |
-
email_body=
|
1194 |
-
Just wanted to follow up on the {job_description} opportunity I shared.<br><br>
|
1195 |
-
Have you had a chance to review the information? I'd love to hear your thoughts and answer any questions.<br><br>
|
1196 |
-
If you're interested, I can help schedule next steps. If not, no worries at all!<br><br>
|
1197 |
-
Thanks for your time!<br>
|
1198 |
-
[Your Name]</p>"""
|
1199 |
)
|
1200 |
]
|
1201 |
-
|
1202 |
return sequences
|
1203 |
|
1204 |
# ============================================================================
|
@@ -1213,16 +996,12 @@ class RateLimiter:
|
|
1213 |
|
1214 |
def is_allowed(self) -> bool:
|
1215 |
now = time.time()
|
1216 |
-
# Remove old requests outside the window
|
1217 |
self.requests = [req_time for req_time in self.requests if now - req_time < self.window_seconds]
|
1218 |
-
|
1219 |
if len(self.requests) >= self.max_requests:
|
1220 |
return False
|
1221 |
-
|
1222 |
self.requests.append(now)
|
1223 |
return True
|
1224 |
|
1225 |
-
# Global rate limiter instance
|
1226 |
rate_limiter = RateLimiter(max_requests=10, window_seconds=2)
|
1227 |
|
1228 |
@app.middleware("http")
|
@@ -1231,13 +1010,8 @@ async def rate_limit_middleware(request: Request, call_next):
|
|
1231 |
if not rate_limiter.is_allowed():
|
1232 |
return JSONResponse(
|
1233 |
status_code=429,
|
1234 |
-
content={
|
1235 |
-
"error": "Rate limit exceeded",
|
1236 |
-
"message": "Too many requests. Please wait before making another request.",
|
1237 |
-
"retry_after": 2
|
1238 |
-
}
|
1239 |
)
|
1240 |
-
|
1241 |
response = await call_next(request)
|
1242 |
return response
|
1243 |
|
@@ -1250,12 +1024,7 @@ async def http_exception_handler(request: Request, exc: HTTPException):
|
|
1250 |
"""Custom HTTP exception handler"""
|
1251 |
return JSONResponse(
|
1252 |
status_code=exc.status_code,
|
1253 |
-
content={
|
1254 |
-
"error": True,
|
1255 |
-
"message": exc.detail,
|
1256 |
-
"status_code": exc.status_code,
|
1257 |
-
"timestamp": datetime.now().isoformat()
|
1258 |
-
}
|
1259 |
)
|
1260 |
|
1261 |
@app.exception_handler(Exception)
|
@@ -1264,10 +1033,9 @@ async def general_exception_handler(request: Request, exc: Exception):
|
|
1264 |
return JSONResponse(
|
1265 |
status_code=500,
|
1266 |
content={
|
1267 |
-
"error": True,
|
1268 |
"message": "Internal server error",
|
1269 |
-
"detail": str(exc) if os.getenv("DEBUG"
|
1270 |
-
"timestamp": datetime.now().isoformat()
|
1271 |
}
|
1272 |
)
|
1273 |
|
@@ -1281,68 +1049,11 @@ def custom_openapi():
|
|
1281 |
|
1282 |
openapi_schema = get_openapi(
|
1283 |
title="Smartlead API - Complete Integration",
|
1284 |
-
version="2.
|
1285 |
-
description=""
|
1286 |
-
# Smartlead API - Complete Integration
|
1287 |
-
|
1288 |
-
A comprehensive FastAPI wrapper for the Smartlead email automation platform.
|
1289 |
-
|
1290 |
-
## Features
|
1291 |
-
- **Campaign Management**: Create, update, and manage email campaigns
|
1292 |
-
- **Lead Management**: Add, update, and manage leads across campaigns with AI-powered personalization
|
1293 |
-
- **Sequence Management**: Create and manage email sequences with AI generation
|
1294 |
-
- **Webhook Management**: Set up webhooks for real-time notifications
|
1295 |
-
- **Analytics**: Get detailed campaign analytics and statistics
|
1296 |
-
- **Email Account Management**: Manage email accounts and warmup
|
1297 |
-
- **Client Management**: Handle client accounts and permissions
|
1298 |
-
|
1299 |
-
## AI-Powered Personalization
|
1300 |
-
When adding leads to campaigns, the API automatically generates personalized welcome and closing messages using LLM (Language Model) based on candidate details. These messages are added to the custom_fields as:
|
1301 |
-
- `Welcome_Message`: Personalized greeting based on candidate's background
|
1302 |
-
- `Closing_Message`: Personalized closing statement
|
1303 |
-
|
1304 |
-
## Lead Schema
|
1305 |
-
The lead schema supports the following structure:
|
1306 |
-
```json
|
1307 |
-
{
|
1308 |
-
"lead_list": [
|
1309 |
-
{
|
1310 |
-
"first_name": "Cristiano",
|
1311 |
-
"last_name": "Ronaldo",
|
1312 |
-
"email": "[email protected]",
|
1313 |
-
"phone_number": "0239392029",
|
1314 |
-
"company_name": "Manchester United",
|
1315 |
-
"website": "mufc.com",
|
1316 |
-
"location": "London",
|
1317 |
-
"custom_fields": {
|
1318 |
-
"Title": "Regional Manager",
|
1319 |
-
"First_Line": "Loved your recent post about remote work on Linkedin"
|
1320 |
-
},
|
1321 |
-
"linkedin_profile": "http://www.linkedin.com/in/cristianoronaldo",
|
1322 |
-
"company_url": "mufc.com"
|
1323 |
-
}
|
1324 |
-
],
|
1325 |
-
"settings": {
|
1326 |
-
"ignore_global_block_list": true,
|
1327 |
-
"ignore_unsubscribe_list": true,
|
1328 |
-
"ignore_duplicate_leads_in_other_campaign": false
|
1329 |
-
}
|
1330 |
-
}
|
1331 |
-
```
|
1332 |
-
|
1333 |
-
## Authentication
|
1334 |
-
All requests require a Smartlead API key passed as a query parameter: `?api_key=YOUR_API_KEY`
|
1335 |
-
|
1336 |
-
## Rate Limits
|
1337 |
-
- 10 requests per 2 seconds (enforced automatically)
|
1338 |
-
|
1339 |
-
## Base URL
|
1340 |
-
- Smartlead API: `https://server.smartlead.ai/api/v1`
|
1341 |
-
""",
|
1342 |
routes=app.routes,
|
1343 |
)
|
1344 |
|
1345 |
-
# Add custom tags
|
1346 |
openapi_schema["tags"] = [
|
1347 |
{"name": "Campaigns", "description": "Campaign management operations"},
|
1348 |
{"name": "Leads", "description": "Lead management operations"},
|
@@ -1367,16 +1078,11 @@ app.openapi = custom_openapi
|
|
1367 |
if __name__ == "__main__":
|
1368 |
import uvicorn
|
1369 |
|
1370 |
-
print("
|
1371 |
-
print(f"�� API Documentation: http://localhost:8000/docs")
|
1372 |
-
print(f"📖 ReDoc Documentation: http://localhost:8000/redoc")
|
1373 |
-
print(f"�� Smartlead Base URL: {SMARTLEAD_BASE_URL}")
|
1374 |
-
print(f"⚡ Rate Limit: 10 requests per 2 seconds")
|
1375 |
-
|
1376 |
uvicorn.run(
|
1377 |
-
"
|
1378 |
host="0.0.0.0",
|
1379 |
port=8000,
|
1380 |
reload=True,
|
1381 |
log_level="info"
|
1382 |
-
)
|
|
|
1 |
import os
|
2 |
+
import re
|
3 |
import json
|
4 |
import time
|
5 |
+
import asyncio
|
6 |
from datetime import datetime
|
7 |
from typing import List, Dict, Any, Optional, Union
|
8 |
+
from pydantic import BaseModel, Field, EmailStr, field_validator
|
9 |
from fastapi import FastAPI, HTTPException, Query, Depends, Request
|
10 |
from fastapi.responses import JSONResponse, Response
|
11 |
from fastapi.middleware.cors import CORSMiddleware
|
|
|
31 |
# Initialize FastAPI app
|
32 |
app = FastAPI(
|
33 |
title="Smartlead API - Complete Integration",
|
34 |
+
version="2.1.0",
|
35 |
description="Comprehensive FastAPI wrapper for Smartlead email automation platform",
|
36 |
docs_url="/docs",
|
37 |
redoc_url="/redoc"
|
|
|
84 |
linkedin_profile: Optional[str] = Field(None, description="Lead's LinkedIn profile URL")
|
85 |
company_url: Optional[str] = Field(None, description="Company website URL")
|
86 |
|
87 |
+
@field_validator('custom_fields')
|
88 |
+
@classmethod
|
89 |
def validate_custom_fields(cls, v):
|
90 |
if v is not None and len(v) > 20:
|
91 |
raise ValueError('Custom fields cannot exceed 20 fields')
|
92 |
return v
|
93 |
|
94 |
+
@field_validator('phone_number')
|
95 |
+
@classmethod
|
96 |
def validate_phone_number(cls, v):
|
97 |
if v is not None:
|
|
|
98 |
return str(v)
|
99 |
return v
|
100 |
|
|
|
146 |
updated_at: datetime
|
147 |
status: str
|
148 |
name: str
|
149 |
+
track_settings: Union[str, List[Any]]
|
150 |
+
scheduler_cron_value: Optional[Union[str, Dict[str, Any]]] = None
|
151 |
min_time_btwn_emails: int
|
152 |
max_leads_per_day: int
|
153 |
stop_lead_settings: str
|
|
|
155 |
client_id: Optional[int] = None
|
156 |
enable_ai_esp_matching: bool
|
157 |
send_as_plain_text: bool
|
158 |
+
follow_up_percentage: Optional[Union[str, int]] = None
|
159 |
|
160 |
class CampaignListResponse(BaseModel):
|
161 |
campaigns: List[Campaign]
|
|
|
258 |
bcc: Optional[str] = Field(None, description="BCC recipients")
|
259 |
add_signature: bool = Field(True, description="Whether to add signature")
|
260 |
|
261 |
+
class AddLeadsAndSequencesRequest(BaseModel):
|
262 |
+
lead_list: List[LeadInput] = Field(..., max_items=100, description="List of leads to add (maximum 100 leads)")
|
263 |
+
settings: Optional[LeadSettings] = Field(None, description="Settings for lead processing")
|
264 |
+
job_description: str = Field(..., description="Job description to generate sequences for")
|
265 |
+
|
266 |
# ============================================================================
|
267 |
# HELPER FUNCTIONS
|
268 |
# ============================================================================
|
|
|
381 |
params = {"offset": offset, "limit": limit}
|
382 |
return await call_smartlead_api("GET", f"campaigns/{campaign_id}/leads", params=params)
|
383 |
|
384 |
+
# *** MODIFIED: add_leads_to_campaign now uses asyncio.gather for performance ***
|
385 |
@app.post("/campaigns/{campaign_id}/leads", response_model=Dict[str, Any], tags=["Leads"])
|
386 |
async def add_leads_to_campaign(campaign_id: int, request: AddLeadsRequest):
|
387 |
"""Add leads to a campaign by ID with personalized welcome and closing messages"""
|
|
|
388 |
|
389 |
+
async def process_lead(lead: Dict[str, Any]) -> Dict[str, Any]:
|
390 |
+
"""Inner function to process a single lead."""
|
391 |
lead_cleaned = {k: v for k, v in lead.items() if v is not None and v != ""}
|
392 |
|
|
|
393 |
try:
|
394 |
personalized_messages = await generate_welcome_closing_messages(lead_cleaned)
|
|
|
|
|
395 |
if "custom_fields" not in lead_cleaned:
|
396 |
lead_cleaned["custom_fields"] = {}
|
397 |
+
lead_cleaned["custom_fields"]["Welcome_Message"] = personalized_messages.get("welcome_message", "")
|
398 |
+
lead_cleaned["custom_fields"]["Closing_Message"] = personalized_messages.get("closing_message", "")
|
|
|
|
|
|
|
|
|
|
|
399 |
except Exception as e:
|
400 |
+
print(f"Error generating AI messages for {lead.get('email')}: {e}. Falling back to template.")
|
|
|
401 |
template_messages = generate_template_welcome_closing_messages(lead_cleaned)
|
402 |
if "custom_fields" not in lead_cleaned:
|
403 |
lead_cleaned["custom_fields"] = {}
|
404 |
+
lead_cleaned["custom_fields"]["Welcome_Message"] = template_messages["welcome_message"]
|
405 |
+
lead_cleaned["custom_fields"]["Closing_Message"] = template_messages["closing_message"]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
406 |
|
407 |
+
return lead_cleaned
|
408 |
+
|
409 |
+
# Create a list of concurrent tasks for AI processing
|
410 |
+
tasks = [process_lead(lead.dict()) for lead in request.lead_list]
|
411 |
+
processed_leads = await asyncio.gather(*tasks)
|
412 |
+
|
413 |
+
# Prepare the final request data for Smartlead
|
414 |
+
request_data = {
|
415 |
+
"lead_list": processed_leads,
|
416 |
+
"settings": request.settings.dict() if request.settings else LeadSettings().dict()
|
417 |
+
}
|
418 |
|
419 |
return await call_smartlead_api("POST", f"campaigns/{campaign_id}/leads", data=request_data)
|
420 |
|
|
|
424 |
request = AddLeadsRequest(lead_list=leads)
|
425 |
return await add_leads_to_campaign(campaign_id, request)
|
426 |
|
427 |
+
@app.post("/campaigns/{campaign_id}/leads-and-sequences", response_model=Dict[str, Any], tags=["Leads"])
|
428 |
+
async def add_leads_and_generate_sequences(campaign_id: int, request: AddLeadsAndSequencesRequest):
|
429 |
+
"""Add leads to campaign and immediately generate informed sequences using their data"""
|
430 |
+
|
431 |
+
# Step 1: Add leads with personalized messages
|
432 |
+
leads_request = AddLeadsRequest(lead_list=request.lead_list, settings=request.settings)
|
433 |
+
leads_result = await add_leads_to_campaign(campaign_id, leads_request)
|
434 |
+
|
435 |
+
# Step 2: Generate informed sequences using the newly added leads
|
436 |
+
try:
|
437 |
+
generated_sequences = await generate_sequences_with_llm(request.job_description, campaign_id)
|
438 |
+
save_request = SaveSequencesRequest(sequences=generated_sequences)
|
439 |
+
sequences_result = await call_smartlead_api("POST", f"campaigns/{campaign_id}/sequences", data=save_request.dict())
|
440 |
+
|
441 |
+
except Exception as e:
|
442 |
+
print(f"Error generating sequences after adding leads: {str(e)}")
|
443 |
+
# Fallback to generic sequences
|
444 |
+
generated_sequences = await generate_sequences_with_llm(request.job_description)
|
445 |
+
save_request = SaveSequencesRequest(sequences=generated_sequences)
|
446 |
+
sequences_result = await call_smartlead_api("POST", f"campaigns/{campaign_id}/sequences", data=save_request.dict())
|
447 |
+
|
448 |
+
return {
|
449 |
+
"ok": True,
|
450 |
+
"message": "Leads added and informed sequences generated successfully",
|
451 |
+
"leads_result": leads_result,
|
452 |
+
"sequences_result": sequences_result,
|
453 |
+
"generated_sequences": [seq.dict() for seq in generated_sequences]
|
454 |
+
}
|
455 |
+
|
456 |
@app.post("/campaigns/{campaign_id}/leads/{lead_id}/resume", response_model=Dict[str, Any], tags=["Leads"])
|
457 |
async def resume_lead_by_campaign_id(campaign_id: int, lead_id: int, request: ResumeLeadRequest):
|
458 |
"""Resume Lead By Campaign ID"""
|
|
|
580 |
"""Save Campaign Sequence"""
|
581 |
return await call_smartlead_api("POST", f"campaigns/{campaign_id}/sequences", data=request.dict())
|
582 |
|
583 |
+
# *** MODIFIED: generate_campaign_sequences now uses the corrected AI function ***
|
584 |
@app.post("/campaigns/{campaign_id}/sequences/generate", response_model=Dict[str, Any], tags=["Sequences"])
|
585 |
async def generate_campaign_sequences(campaign_id: int, request: GenerateSequencesRequest):
|
586 |
+
"""Generate a campaign sequence template using AI that leverages personalized custom fields."""
|
587 |
job_description = request.job_description
|
588 |
+
|
589 |
+
# Generate the smart template
|
590 |
+
generated_sequences = await generate_sequences_with_llm(job_description, campaign_id)
|
591 |
+
|
592 |
+
# Save the template to the campaign
|
593 |
save_request = SaveSequencesRequest(sequences=generated_sequences)
|
594 |
result = await call_smartlead_api("POST", f"campaigns/{campaign_id}/sequences", data=save_request.dict())
|
595 |
|
596 |
return {
|
597 |
"ok": True,
|
598 |
+
"message": "Sequence template generated and saved successfully. It will use personalized fields for each lead.",
|
599 |
+
"generated_sequences": [seq.dict() for seq in generated_sequences],
|
600 |
"save_result": result
|
601 |
}
|
602 |
|
|
|
767 |
return generate_template_welcome_closing_messages(lead_data)
|
768 |
|
769 |
llm = ChatOpenAI(
|
770 |
+
model="gpt-4o",
|
771 |
temperature=0.7,
|
772 |
openai_api_key=openai_api_key
|
773 |
)
|
774 |
str_llm = llm.with_structured_output(structure)
|
775 |
|
|
|
776 |
first_name = lead_data.get("first_name", "")
|
|
|
777 |
company_name = lead_data.get("company_name", "")
|
|
|
778 |
title = lead_data.get("custom_fields", {}).get("Title", "")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
779 |
|
780 |
+
candidate_info = f"Name: {first_name}, Company: {company_name}, Title: {title}"
|
781 |
|
782 |
+
system_prompt = """You are an expert recruiter creating personalized messages. Generate a 2-sentence welcome message and a 1-sentence closing message. Be professional and friendly and sound like real human recruitor. Reference their background. Respond with ONLY valid JSON."""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
783 |
|
784 |
prompt_template = ChatPromptTemplate.from_messages([
|
785 |
("system", system_prompt),
|
786 |
+
("human", "Generate messages for this candidate: {candidate_info}")
|
787 |
])
|
788 |
|
789 |
messages = prompt_template.format_messages(candidate_info=candidate_info)
|
790 |
response = await str_llm.ainvoke(messages)
|
791 |
|
792 |
+
return {
|
793 |
+
"welcome_message": response.welcome_message,
|
794 |
+
"closing_message": response.closing_message
|
795 |
+
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
796 |
except Exception as e:
|
797 |
print(f"Error generating welcome/closing messages with LLM: {str(e)}")
|
798 |
return generate_template_welcome_closing_messages(lead_data)
|
799 |
|
800 |
def generate_template_welcome_closing_messages(lead_data: Dict[str, Any]) -> Dict[str, str]:
|
801 |
"""Generate template-based welcome and closing messages as fallback"""
|
802 |
+
first_name = lead_data.get("first_name", "there")
|
803 |
+
welcome_message = f"Hi {first_name}, I came across your profile and was impressed by your background."
|
804 |
+
closing_message = f"Looking forward to connecting with you, {first_name}!"
|
805 |
+
return {"welcome_message": welcome_message, "closing_message": closing_message}
|
806 |
+
|
807 |
+
# *** MODIFIED: generate_sequences_with_llm now creates a smart template ***
|
808 |
+
async def generate_sequences_with_llm(job_description: str, campaign_id: Optional[int] = None) -> List[CampaignSequence]:
|
809 |
+
"""Generate an email sequence template using LangChain and OpenAI, optionally informed by campaign lead data."""
|
810 |
|
811 |
+
class EmailContent(BaseModel):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
812 |
subject: str = Field(description="Subject line for the email")
|
813 |
+
body: str = Field(description="Body of the email, using placeholders")
|
|
|
|
|
|
|
|
|
814 |
|
815 |
+
class SequenceStructure(BaseModel):
|
816 |
+
introduction: EmailContent
|
817 |
+
follow_up_1: EmailContent
|
818 |
+
follow_up_2: EmailContent
|
819 |
|
|
|
|
|
|
|
820 |
if not LANGCHAIN_AVAILABLE:
|
821 |
+
return generate_template_sequences(job_description)
|
822 |
|
823 |
try:
|
824 |
openai_api_key = os.getenv("OPENAI_API_KEY")
|
825 |
if not openai_api_key:
|
826 |
print("Warning: OPENAI_API_KEY not set. Using template sequences.")
|
827 |
+
return generate_template_sequences(job_description)
|
828 |
|
829 |
+
# If campaign_id is provided, fetch lead data to inform the template
|
830 |
+
lead_context = ""
|
831 |
+
if campaign_id:
|
832 |
+
try:
|
833 |
+
leads_response = await call_smartlead_api("GET", f"campaigns/{campaign_id}/leads", params={"limit": 10})
|
834 |
+
campaign_leads = leads_response.get("leads", []) if isinstance(leads_response, dict) else leads_response
|
835 |
+
|
836 |
+
if campaign_leads:
|
837 |
+
# Sample lead data to inform template generation
|
838 |
+
sample_leads = campaign_leads[:3]
|
839 |
+
lead_info = []
|
840 |
+
for lead in sample_leads:
|
841 |
+
custom_fields = lead.get("custom_fields", {})
|
842 |
+
lead_info.append({
|
843 |
+
"first_name": lead.get("first_name", ""),
|
844 |
+
"company": lead.get("company_name", ""),
|
845 |
+
"title": custom_fields.get("Title", ""),
|
846 |
+
"welcome_msg": custom_fields.get("Welcome_Message", ""),
|
847 |
+
"closing_msg": custom_fields.get("Closing_Message", "")
|
848 |
+
})
|
849 |
+
|
850 |
+
lead_context = f"\n\nCampaign Lead Context (sample of {len(campaign_leads)} leads):\n{json.dumps(lead_info, indent=2)}"
|
851 |
+
|
852 |
+
except Exception as e:
|
853 |
+
print(f"Could not fetch lead data for campaign {campaign_id}: {e}")
|
854 |
|
855 |
+
llm = ChatOpenAI(model="gpt-4o", temperature=0.7, openai_api_key=openai_api_key)
|
856 |
+
structured_llm = llm.with_structured_output(SequenceStructure)
|
|
|
|
|
|
|
857 |
|
858 |
+
system_prompt = """You are an expert email sequence template generator for recruitment campaigns on behalf of 'Ali Taghikhani, CEO SRN'.
|
859 |
|
860 |
+
Your task is to generate a 3-step email sequence template for a given job description.
|
861 |
+
Email Sequence Structure:
|
862 |
+
1. INTRODUCTION (Day 1): Ask for consent and interest in the role, In the starting use the welcome message placeholder after the salutation, and in the end use closing message template along with the name and title of sender
|
863 |
2. OUTREACH (Day 3): Provide detailed job information
|
864 |
3. FOLLOW-UP (Day 5): Follow up on updates and next steps
|
865 |
+
Requirements:
|
|
|
866 |
- First sequence will only ask about the consent and interest in the role, no other information is needed.
|
867 |
- Second and third sequences are follow-ups (no subject line needed) in the 3rd sequence try providing some information about the role and the company.
|
868 |
- All emails should be HTML formatted with proper <br> tags
|
|
|
870 |
- Include clear call-to-actions
|
871 |
- Focus on building consent and trust
|
872 |
|
873 |
+
**CRITICAL FORMATTING RULES:**
|
874 |
+
1. **PLACEHOLDER FORMAT IS ESSENTIAL:** You MUST use double curly braces for all placeholders.
|
875 |
+
- **CORRECT:** `{{first_name}}`
|
876 |
+
- **INCORRECT:** `{first_name}` or `[first_name]` or `<first_name>`
|
877 |
+
2. **REQUIRED PLACEHOLDERS:** You MUST include `{{Welcome_Message}}` and `{{Closing_Message}}` in the first email. You should also use `{{first_name}}` in follow-ups.
|
878 |
+
3. **FIRST EMAIL STRUCTURE:** The first email's body MUST begin with `{{Welcome_Message}}` and end with `{{Closing_Message}}`.
|
879 |
+
4. **SIGNATURE:** End EVERY email body with `<br><br>Best regards,<br>Ali Taghikhani<br>CEO, SRN`.
|
880 |
+
5. **EXAMPLE BODY:**
|
881 |
+
```html
|
882 |
+
{{Welcome_Message}}<br><br>I saw your profile and was impressed. We have an opening for a Senior Engineer that seems like a great fit.<br><br>{{Closing_Message}}<br><br>Best regards,<br>Ali Taghikhani<br>CEO, SRN>
|
883 |
+
```
|
884 |
+
Always try to start the message with the salutation except for the first email.
|
885 |
+
If lead context is provided, use it to make the templates more relevant.
|
886 |
+
Respond with ONLY a valid JSON object matching the required structure.
|
887 |
+
"""
|
888 |
|
889 |
+
prompt = ChatPromptTemplate.from_messages([
|
890 |
("system", system_prompt),
|
891 |
+
("human", "Generate the 3-step email sequence template for this job description: {job_description}{lead_context}")
|
892 |
])
|
893 |
|
894 |
+
# By using .partial, we tell LangChain to treat the Smartlead placeholders as literals
|
895 |
+
# and not expect them as input variables. This is the correct way to handle this.
|
896 |
+
partial_prompt = prompt.partial(
|
897 |
+
first_name="",
|
898 |
+
company_name="",
|
899 |
+
**{"Welcome_Message": "", "Closing_Message": "", "Title": ""}
|
900 |
+
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
901 |
|
902 |
+
chain = partial_prompt | structured_llm
|
903 |
+
response = await chain.ainvoke({"job_description": job_description, "lead_context": lead_context})
|
|
|
|
|
|
|
|
|
|
|
|
|
904 |
|
905 |
+
# Post-process the AI's response to enforce double curly braces.
|
906 |
+
# This is a robust way to fix the AI's tendency to use single braces.
|
907 |
+
def fix_braces(text: str) -> str:
|
908 |
+
if not text:
|
909 |
+
return ""
|
910 |
+
# This regex finds all occurrences of `{...}` that are not `{{...}}`
|
911 |
+
# and replaces them with `{{...}}`.
|
912 |
+
return re.sub(r'{([^{}\n]+)}', r'{{\1}}', text)
|
913 |
+
|
914 |
+
sequences = [
|
915 |
+
CampaignSequence(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
916 |
seq_number=1,
|
917 |
seq_delay_details=SeqDelayDetails(delay_in_days=1),
|
918 |
+
seq_variants=[SeqVariant(
|
919 |
+
subject=fix_braces(response.introduction.subject),
|
920 |
+
email_body=fix_braces(response.introduction.body),
|
921 |
+
variant_label="A"
|
922 |
+
)]
|
923 |
+
),
|
924 |
+
CampaignSequence(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
925 |
seq_number=2,
|
926 |
seq_delay_details=SeqDelayDetails(delay_in_days=3),
|
927 |
+
subject="", # Same thread
|
928 |
+
email_body=fix_braces(response.follow_up_1.body)
|
929 |
+
),
|
930 |
+
CampaignSequence(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
931 |
seq_number=3,
|
932 |
seq_delay_details=SeqDelayDetails(delay_in_days=5),
|
933 |
+
subject="", # Same thread
|
934 |
+
email_body=fix_braces(response.follow_up_2.body)
|
935 |
+
)
|
936 |
+
]
|
937 |
+
return sequences
|
938 |
+
|
939 |
+
except Exception as e:
|
940 |
+
print(f"Error generating sequences with LLM: {str(e)}. Falling back to template.")
|
941 |
+
return generate_template_sequences(job_description)
|
|
|
942 |
|
943 |
+
def generate_template_sequences(job_description: str) -> List[CampaignSequence]:
|
944 |
+
"""Generate template-based sequences as fallback, using correct placeholders."""
|
945 |
|
946 |
+
# This is the corrected structure for the first email
|
947 |
+
first_email_body = f"""<p>{{{{custom.Welcome_Message}}}}<br><br>
|
948 |
+
I'm reaching out because we have some exciting opportunities for a {job_description} that might be a great fit for your background. Are you currently open to exploring new roles?<br><br>
|
949 |
+
{{{{custom.Closing_Message}}}}<br><br>
|
950 |
+
Best regards,<br>Ali Taghikhani<br>CEO, SRN</p>"""
|
951 |
+
|
952 |
+
follow_up_1_body = f"""<p>Hi {{{{first_name}}}},<br><br>
|
953 |
+
Just wanted to follow up on my previous email regarding the {job_description} role. I'd love to hear your thoughts when you have a moment.<br><br>
|
954 |
+
Best regards,<br>Ali Taghikhani<br>CEO, SRN</p>"""
|
955 |
+
|
956 |
+
follow_up_2_body = f"""<p>Hi {{{{first_name}}}},<br><br>
|
957 |
+
Checking in one last time about the {job_description} opportunity. If the timing isn't right, no worries at all. Otherwise, I look forward to hearing from you.<br><br>
|
958 |
+
Best regards,<br>Ali Taghikhani<br>CEO, SRN</p>"""
|
959 |
+
|
960 |
sequences = [
|
961 |
CampaignSequence(
|
962 |
seq_number=1,
|
963 |
seq_delay_details=SeqDelayDetails(delay_in_days=1),
|
964 |
seq_variants=[
|
965 |
SeqVariant(
|
966 |
+
subject=f"Regarding a {job_description} opportunity",
|
967 |
+
email_body=first_email_body,
|
|
|
|
|
|
|
|
|
|
|
|
|
968 |
variant_label="A"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
969 |
)
|
970 |
]
|
971 |
),
|
|
|
973 |
seq_number=2,
|
974 |
seq_delay_details=SeqDelayDetails(delay_in_days=3),
|
975 |
subject="",
|
976 |
+
email_body=follow_up_1_body
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
977 |
),
|
978 |
+
CampaignSequence(
|
979 |
seq_number=3,
|
980 |
seq_delay_details=SeqDelayDetails(delay_in_days=5),
|
981 |
subject="",
|
982 |
+
email_body=follow_up_2_body
|
|
|
|
|
|
|
|
|
|
|
983 |
)
|
984 |
]
|
|
|
985 |
return sequences
|
986 |
|
987 |
# ============================================================================
|
|
|
996 |
|
997 |
def is_allowed(self) -> bool:
|
998 |
now = time.time()
|
|
|
999 |
self.requests = [req_time for req_time in self.requests if now - req_time < self.window_seconds]
|
|
|
1000 |
if len(self.requests) >= self.max_requests:
|
1001 |
return False
|
|
|
1002 |
self.requests.append(now)
|
1003 |
return True
|
1004 |
|
|
|
1005 |
rate_limiter = RateLimiter(max_requests=10, window_seconds=2)
|
1006 |
|
1007 |
@app.middleware("http")
|
|
|
1010 |
if not rate_limiter.is_allowed():
|
1011 |
return JSONResponse(
|
1012 |
status_code=429,
|
1013 |
+
content={"error": "Rate limit exceeded"}
|
|
|
|
|
|
|
|
|
1014 |
)
|
|
|
1015 |
response = await call_next(request)
|
1016 |
return response
|
1017 |
|
|
|
1024 |
"""Custom HTTP exception handler"""
|
1025 |
return JSONResponse(
|
1026 |
status_code=exc.status_code,
|
1027 |
+
content={"error": True, "message": exc.detail}
|
|
|
|
|
|
|
|
|
|
|
1028 |
)
|
1029 |
|
1030 |
@app.exception_handler(Exception)
|
|
|
1033 |
return JSONResponse(
|
1034 |
status_code=500,
|
1035 |
content={
|
1036 |
+
"error": True,
|
1037 |
"message": "Internal server error",
|
1038 |
+
"detail": str(exc) if os.getenv("DEBUG") else None
|
|
|
1039 |
}
|
1040 |
)
|
1041 |
|
|
|
1049 |
|
1050 |
openapi_schema = get_openapi(
|
1051 |
title="Smartlead API - Complete Integration",
|
1052 |
+
version="2.1.0",
|
1053 |
+
description="A comprehensive FastAPI wrapper for the Smartlead email automation platform.",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1054 |
routes=app.routes,
|
1055 |
)
|
1056 |
|
|
|
1057 |
openapi_schema["tags"] = [
|
1058 |
{"name": "Campaigns", "description": "Campaign management operations"},
|
1059 |
{"name": "Leads", "description": "Lead management operations"},
|
|
|
1078 |
if __name__ == "__main__":
|
1079 |
import uvicorn
|
1080 |
|
1081 |
+
print("Starting Smartlead API - Complete Integration")
|
|
|
|
|
|
|
|
|
|
|
1082 |
uvicorn.run(
|
1083 |
+
"__main__:app",
|
1084 |
host="0.0.0.0",
|
1085 |
port=8000,
|
1086 |
reload=True,
|
1087 |
log_level="info"
|
1088 |
+
)
|