The Problem
On Monday you tested the 3 prompts in ChatGPT. You saw how research → personalization → follow-up works. But copy-pasting investor names into ChatGPT 200 times? That's not a fundraising strategy. Your team spends hours researching portfolio companies, writing custom intros, and tracking follow-ups in spreadsheets. Meanwhile, competitors with automated pipelines are reaching 10x more investors with better personalization.
See It Work
Watch the 3 prompts chain together automatically. This is what you'll build.
The Code
Three levels: start simple, add reliability, then scale to production. Pick where you are.
Level 1: Simple API Calls
Good for: 0-50 investors/week | Setup time: 30 minutes
# Simple API Calls (0-50 investors/week) import openai import requests import json from typing import Dict, List # API Keys (set as environment variables) OPENAI_API_KEY = "your-openai-key" CRUNCHBASE_API_KEY = "your-crunchbase-key" LINKEDIN_API_KEY = "your-linkedin-key" def enrich_investor_data(investor_name: str, firm: str) -> Dict: """Step 1: Research and enrich investor data""" # Get Crunchbase data crunchbase_url = f"https://api.crunchbase.com/api/v4/entities/people/{investor_name}" headers = {"X-cb-user-key": CRUNCHBASE_API_KEY} cb_response = requests.get(crunchbase_url, headers=headers) cb_data = cb_response.json() # Get LinkedIn activity (simplified—real implementation uses LinkedIn API) linkedin_prompt = f"""Find recent LinkedIn activity for {investor_name} at {firm}. Focus on: posts about investments, industry trends, portfolio company mentions. Return as JSON with: recent_posts (list), topics_of_interest (list).""" linkedin_response = openai.chat.completions.create( model="gpt-4", messages=[{"role": "user", "content": linkedin_prompt}], temperature=0.3 ) linkedin_data = json.loads(linkedin_response.choices[0].message.content) # Calculate fit score enriched_data = { "investor_name": investor_name, "firm": firm, "title": cb_data.get("properties", {}).get("title"), "investment_focus": cb_data.get("properties", {}).get("investor_type", []), "recent_investments": cb_data.get("investments", [])[:5], "linkedin_activity": linkedin_data.get("recent_posts", []), "contact_info": { "email": cb_data.get("properties", {}).get("email"), "linkedin": cb_data.get("properties", {}).get("linkedin_url") } } return enriched_data def generate_personalized_email(investor_data: Dict, startup_info: Dict) -> Dict: """Step 2: Generate personalized intro email""" personalization_prompt = f"""Write a personalized cold email to this investor: Investor: {investor_data['investor_name']} at {investor_data['firm']} Recent activity: {investor_data['linkedin_activity']} Portfolio: {investor_data['recent_investments']} Our startup: - Product: {startup_info['product']} - Stage: {startup_info['stage']} - Metrics: {startup_info['metrics']} - Ask: {startup_info['ask']} Requirements: 1. Reference specific portfolio company or recent post 2. Show clear product-market fit with their thesis 3. Include 1-2 key metrics 4. Keep under 150 words 5. End with specific call-to-action Output as JSON with: subject_line, email_body, personalization_elements (list).""" response = openai.chat.completions.create( model="gpt-4", messages=[{"role": "user", "content": personalization_prompt}], temperature=0.7 ) email_data = json.loads(response.choices[0].message.content) return email_data def create_follow_up_sequence(investor_data: Dict, initial_email: Dict) -> List[Dict]: """Step 3: Build automated follow-up sequence""" sequence_prompt = f"""Create a 4-email follow-up sequence for this investor outreach: Initial email sent: Subject: {initial_email['subject_line']} Body: {initial_email['email_body']} Investor context: {investor_data['investment_focus']} Create follow-ups with: 1. Day 4: Value-add (share relevant content) 2. Day 8: Social proof (new customer/milestone) 3. Day 12: Humble final attempt For each email include: day, type, subject, preview (first 50 chars), key_element. Output as JSON array.""" response = openai.chat.completions.create( model="gpt-4", messages=[{"role": "user", "content": sequence_prompt}], temperature=0.7 ) sequence = json.loads(response.choices[0].message.content) return sequence # Usage startup_info = { "product": "B2B SaaS platform for supply chain optimization", "stage": "Series A", "metrics": "$2M ARR, 40% YoY growth", "ask": "Raising $5M Series A" } # Run the pipeline investor_data = enrich_investor_data("Sarah Chen", "Accel Partners") email = generate_personalized_email(investor_data, startup_info) sequence = create_follow_up_sequence(investor_data, email) print(f"Generated email: {email['subject_line']}") print(f"Follow-up sequence: {len(sequence)} emails")
Level 2: With Error Handling & CRM Integration
Good for: 50-200 investors/week | Setup time: 2 hours
// With Error Handling & CRM Integration (50-200 investors/week) import Anthropic from '@anthropic-ai/sdk'; import axios from 'axios'; import { HubSpotClient } from '@hubspot/api-client'; interface InvestorData { name: string; firm: string; email: string; enrichedData?: any; fitScore?: number; } interface EmailResult { subject: string; body: string; personalizationScore: number; } class InvestorOutreachPipeline { private anthropic: Anthropic; private hubspot: HubSpotClient; private crunchbaseApiKey: string; constructor() { this.anthropic = new Anthropic({ apiKey: process.env.ANTHROPIC_API_KEY!, }); this.hubspot = new HubSpotClient({ accessToken: process.env.HUBSPOT_API_KEY!, }); this.crunchbaseApiKey = process.env.CRUNCHBASE_API_KEY!; } async enrichInvestorWithRetry( investor: InvestorData, maxRetries: number = 3 ): Promise<InvestorData> { let lastError: Error | null = null; for (let attempt = 0; attempt < maxRetries; attempt++) { try { // Crunchbase enrichment const cbResponse = await axios.get( `https://api.crunchbase.com/api/v4/entities/people/${investor.name}`, { headers: { 'X-cb-user-key': this.crunchbaseApiKey }, timeout: 10000, } ); // Apollo.io enrichment for contact info const apolloResponse = await axios.post( 'https://api.apollo.io/v1/people/match', { first_name: investor.name.split(' ')[0], last_name: investor.name.split(' ')[1], organization_name: investor.firm, }, { headers: { 'X-Api-Key': process.env.APOLLO_API_KEY! }, timeout: 10000, } ); // AI-powered fit scoring const fitScoreResponse = await this.anthropic.messages.create({ model: 'claude-3-5-sonnet-20241022', max_tokens: 512, messages: [ { role: 'user', content: `Analyze investor fit (0-10 scale): Investor: ${JSON.stringify(cbResponse.data)} Our startup: B2B SaaS, supply chain, Series A, $2M ARR Output JSON: {fit_score: number, reasoning: string, key_overlaps: string[]}`, }, ], }); const content = fitScoreResponse.content[0]; if (content.type !== 'text') throw new Error('Invalid response'); const fitData = JSON.parse(content.text); investor.enrichedData = { crunchbase: cbResponse.data, apollo: apolloResponse.data, fitAnalysis: fitData, }; investor.fitScore = fitData.fit_score; // Save to HubSpot CRM await this.saveToHubSpot(investor); return investor; } catch (error) { lastError = error as Error; console.error(`Attempt ${attempt + 1} failed:`, error); if (attempt < maxRetries - 1) { // Exponential backoff await new Promise((resolve) => setTimeout(resolve, Math.pow(2, attempt) * 1000) ); } } } throw lastError || new Error('Enrichment failed'); } async generatePersonalizedEmail( investor: InvestorData, startupInfo: any ): Promise<EmailResult> { const response = await this.anthropic.messages.create({ model: 'claude-3-5-sonnet-20241022', max_tokens: 1024, messages: [ { role: 'user', content: `Write personalized cold email: Investor: ${JSON.stringify(investor.enrichedData)} Startup: ${JSON.stringify(startupInfo)} Requirements: - Reference specific portfolio company or recent activity - Tie to their investment thesis - Include 2 key metrics - Under 150 words - Specific CTA Output JSON: {subject: string, body: string, personalization_elements: string[], tone: string}`, }, ], }); const content = response.content[0]; if (content.type !== 'text') throw new Error('Invalid response'); const emailData = JSON.parse(content.text); // Calculate personalization score const score = this.calculatePersonalizationScore(emailData); return { subject: emailData.subject, body: emailData.body, personalizationScore: score, }; } private calculatePersonalizationScore(emailData: any): number { let score = 0; if (emailData.personalization_elements.length >= 3) score += 40; if (emailData.body.includes('portfolio')) score += 20; if (emailData.body.match(/\d+%/)) score += 20; // Has metrics if (emailData.body.length < 600) score += 20; // Concise return score; } private async saveToHubSpot(investor: InvestorData): Promise<void> { try { await this.hubspot.crm.contacts.basicApi.create({ properties: { email: investor.email, firstname: investor.name.split(' ')[0], lastname: investor.name.split(' ')[1], company: investor.firm, hs_lead_status: 'NEW', investor_fit_score: investor.fitScore?.toString(), }, }); } catch (error) { console.error('HubSpot save failed:', error); // Don't throw—continue pipeline even if CRM save fails } } async runPipeline(investors: InvestorData[]): Promise<void> { const startupInfo = { product: 'B2B SaaS supply chain optimization', stage: 'Series A', metrics: '$2M ARR, 40% YoY growth', }; for (const investor of investors) { try { console.log(`Processing ${investor.name}...`); // Step 1: Enrich const enriched = await this.enrichInvestorWithRetry(investor); // Skip low-fit investors if (enriched.fitScore! < 6) { console.log(`Skipping ${investor.name} (fit score: ${enriched.fitScore})`); continue; } // Step 2: Generate email const email = await this.generatePersonalizedEmail( enriched, startupInfo ); console.log(`Generated email for ${investor.name}`); console.log(`Personalization score: ${email.personalizationScore}`); // Step 3: Queue for sending (implement with SendGrid/Mailgun) // await this.queueEmail(investor, email); } catch (error) { console.error(`Failed to process ${investor.name}:`, error); // Continue with next investor } } } } // Usage const pipeline = new InvestorOutreachPipeline(); const investors: InvestorData[] = [ { name: 'Sarah Chen', firm: 'Accel Partners', email: 'schen@accel.com' }, { name: 'Mike Johnson', firm: 'Sequoia Capital', email: 'mjohnson@sequoia.com' }, ]; pipeline.runPipeline(investors);
Level 3: Production Pattern with LangGraph
Good for: 200-1,000 investors/week | Setup time: 1 day
# Production Pattern with LangGraph (200-1,000 investors/week) from langgraph.graph import Graph, END from typing import TypedDict, List, Dict import openai import requests from datetime import datetime, timedelta import asyncio class OutreachState(TypedDict): investor: Dict enriched_data: Dict fit_score: float email: Dict sequence: List[Dict] sent_emails: List[Dict] responses: List[Dict] status: str # 'enriching', 'generating', 'sending', 'nurturing', 'complete' retry_count: int def enrich_node(state: OutreachState) -> OutreachState: """Enrich investor data from multiple sources""" investor = state['investor'] # Parallel API calls crunchbase_data = fetch_crunchbase(investor['name'], investor['firm']) apollo_data = fetch_apollo(investor['name'], investor['firm']) linkedin_data = fetch_linkedin_activity(investor['linkedin_url']) # AI-powered fit analysis fit_prompt = f"""Analyze investor fit (0-10): Investor data: - Crunchbase: {crunchbase_data} - Recent activity: {linkedin_data} Our startup: B2B SaaS, supply chain, Series A, $2M ARR, 40% YoY Output JSON: {{fit_score: float, reasoning: str, key_overlaps: list}}""" response = openai.chat.completions.create( model="gpt-4", messages=[{"role": "user", "content": fit_prompt}] ) fit_data = json.loads(response.choices[0].message.content) state['enriched_data'] = { 'crunchbase': crunchbase_data, 'apollo': apollo_data, 'linkedin': linkedin_data, 'fit_analysis': fit_data } state['fit_score'] = fit_data['fit_score'] state['status'] = 'enriched' return state def check_fit_score(state: OutreachState) -> str: """Route based on fit score""" if state['fit_score'] >= 7: return "high_fit" elif state['fit_score'] >= 5: return "medium_fit" else: return "low_fit" def generate_email_node(state: OutreachState) -> OutreachState: """Generate personalized email""" email_prompt = f"""Write personalized cold email: Investor: {state['enriched_data']} Fit analysis: {state['enriched_data']['fit_analysis']} Requirements: - Reference specific portfolio company or recent post - Tie to investment thesis - Include 2 metrics - Under 150 words - Specific CTA Output JSON: {{subject: str, body: str, personalization_elements: list}}""" response = openai.chat.completions.create( model="gpt-4", messages=[{"role": "user", "content": email_prompt}], temperature=0.7 ) state['email'] = json.loads(response.choices[0].message.content) state['status'] = 'email_generated' return state def create_sequence_node(state: OutreachState) -> OutreachState: """Create follow-up sequence""" sequence_prompt = f"""Create 4-email follow-up sequence: Initial email: {state['email']} Investor context: {state['enriched_data']['fit_analysis']} Sequence: 1. Day 4: Value-add (share content) 2. Day 8: Social proof (milestone) 3. Day 12: Humble final attempt Output JSON array with: day, type, subject, preview, trigger_conditions""" response = openai.chat.completions.create( model="gpt-4", messages=[{"role": "user", "content": sequence_prompt}] ) state['sequence'] = json.loads(response.choices[0].message.content) state['status'] = 'sequence_created' return state def send_email_node(state: OutreachState) -> OutreachState: """Send initial email via SendGrid""" # Send via SendGrid API send_result = send_via_sendgrid( to_email=state['investor']['email'], subject=state['email']['subject'], body=state['email']['body'], tracking_enabled=True ) state['sent_emails'].append({ 'email_id': send_result['message_id'], 'sent_at': datetime.now().isoformat(), 'type': 'initial', 'status': 'sent' }) state['status'] = 'email_sent' return state def monitor_engagement_node(state: OutreachState) -> OutreachState: """Monitor email engagement (opens, clicks)""" # Check SendGrid webhook events events = check_sendgrid_events(state['sent_emails'][-1]['email_id']) if events.get('opened'): # Email opened—schedule follow-up schedule_follow_up(state, days=2) if events.get('clicked'): # Link clicked—send case study immediately send_case_study(state['investor']['email']) state['status'] = 'monitoring' return state def check_response(state: OutreachState) -> str: """Check if investor responded""" # Check inbox for replies (via Gmail API or IMAP) replies = check_for_replies(state['investor']['email']) if replies: state['responses'] = replies return "responded" # Check if all follow-ups sent if len(state['sent_emails']) >= 4: return "sequence_complete" # Check if next follow-up is due last_sent = datetime.fromisoformat(state['sent_emails'][-1]['sent_at']) next_followup_day = state['sequence'][len(state['sent_emails']) - 1]['day'] if datetime.now() >= last_sent + timedelta(days=next_followup_day): return "send_next" return "wait" def send_followup_node(state: OutreachState) -> OutreachState: """Send next email in sequence""" next_email = state['sequence'][len(state['sent_emails']) - 1] send_result = send_via_sendgrid( to_email=state['investor']['email'], subject=next_email['subject'], body=next_email['body'], tracking_enabled=True ) state['sent_emails'].append({ 'email_id': send_result['message_id'], 'sent_at': datetime.now().isoformat(), 'type': next_email['type'], 'status': 'sent' }) return state def nurture_node(state: OutreachState) -> OutreachState: """Move to long-term nurture campaign""" # Add to nurture list (monthly newsletter, relevant content) add_to_nurture_campaign(state['investor']['email']) state['status'] = 'nurturing' return state # Build the graph def build_outreach_graph(): graph = Graph() # Add nodes graph.add_node("enrich", enrich_node) graph.add_node("generate_email", generate_email_node) graph.add_node("create_sequence", create_sequence_node) graph.add_node("send_email", send_email_node) graph.add_node("monitor_engagement", monitor_engagement_node) graph.add_node("send_followup", send_followup_node) graph.add_node("nurture", nurture_node) # Add edges graph.set_entry_point("enrich") graph.add_conditional_edges( "enrich", check_fit_score, { "high_fit": "generate_email", "medium_fit": "generate_email", "low_fit": "nurture" } ) graph.add_edge("generate_email", "create_sequence") graph.add_edge("create_sequence", "send_email") graph.add_edge("send_email", "monitor_engagement") graph.add_conditional_edges( "monitor_engagement", check_response, { "responded": END, "send_next": "send_followup", "sequence_complete": "nurture", "wait": "monitor_engagement" } ) graph.add_edge("send_followup", "monitor_engagement") graph.add_edge("nurture", END) return graph.compile() # Helper functions (implement with real APIs) def fetch_crunchbase(name, firm): # Implement Crunchbase API call pass def fetch_apollo(name, firm): # Implement Apollo.io API call pass def fetch_linkedin_activity(linkedin_url): # Implement LinkedIn scraping or API pass def send_via_sendgrid(to_email, subject, body, tracking_enabled): # Implement SendGrid API call pass def check_sendgrid_events(message_id): # Check SendGrid webhook events pass def check_for_replies(investor_email): # Check Gmail API or IMAP for replies pass def add_to_nurture_campaign(email): # Add to HubSpot nurture list pass # Usage outreach_graph = build_outreach_graph() initial_state = { "investor": { "name": "Sarah Chen", "firm": "Accel Partners", "email": "schen@accel.com", "linkedin_url": "linkedin.com/in/sarahchen-accel" }, "enriched_data": {}, "fit_score": 0.0, "email": {}, "sequence": [], "sent_emails": [], "responses": [], "status": "pending", "retry_count": 0 } result = outreach_graph.invoke(initial_state) print(f"Outreach status: {result['status']}") print(f"Emails sent: {len(result['sent_emails'])}")
When to Level Up
Start: Simple API Calls
0-50 investors/week
- Sequential API calls to Crunchbase, LinkedIn, OpenAI
- Basic fit scoring (0-10 scale)
- Manual email sending via Gmail
- Spreadsheet tracking of outreach
Scale: Add Reliability & CRM
50-200 investors/week
- Automatic retries with exponential backoff
- HubSpot/Salesforce CRM integration for tracking
- Parallel API calls (enrich multiple investors simultaneously)
- Email queue with SendGrid/Mailgun
- Engagement tracking (opens, clicks)
- Personalization scoring (skip low-quality emails)
Production: Framework & Automation
200-1,000 investors/week
- LangGraph for complex workflows with conditional routing
- Automated follow-up sequences (4-email cadence)
- Real-time engagement monitoring (opens → trigger next email)
- Response detection (stop sequence when investor replies)
- A/B testing of subject lines and email copy
- Long-term nurture campaigns for low-fit investors
Enterprise: Multi-Agent System
1,000+ investors/week
- Multiple specialized agents (research, personalization, follow-up, response handling)
- Multi-channel outreach (email, LinkedIn InMail, Twitter DM)
- Predictive fit scoring with ML models (trained on your successful deals)
- Real-time dashboard (Grafana) showing pipeline health
- Distributed processing (Kubernetes + Redis queues)
- Advanced analytics (which personalization tactics convert best)
Fundraising-Specific Gotchas
The code examples above work. But investor outreach has unique challenges you need to handle.
Email Deliverability & Spam Filters
Sending 100+ cold emails per day will get you flagged as spam. You need proper SPF/DKIM/DMARC setup, domain warm-up, and engagement-based throttling. Use dedicated email infrastructure (SendGrid/Mailgun) and warm up your domain gradually (start with 10 emails/day, increase 20% weekly).
# Email deliverability best practices import sendgrid from sendgrid.helpers.mail import Mail class DeliverabilityManager: def __init__(self): self.sg = sendgrid.SendGridAPIClient(api_key=os.environ.get('SENDGRID_API_KEY')) self.daily_limit = self.get_current_daily_limit() self.sent_today = self.get_sent_count_today() def can_send_email(self) -> bool: """Check if we're within daily sending limits""" return self.sent_today < self.daily_limit def get_current_daily_limit(self) -> int: """Gradual warm-up schedule""" days_since_start = (datetime.now() - self.start_date).days if days_since_start < 7: return 10 # Week 1: 10 emails/day elif days_since_start < 14: return 25 # Week 2: 25 emails/day elif days_since_start < 21: return 50 # Week 3: 50 emails/day else: return 100 # Week 4+: 100 emails/day def send_with_tracking(self, to_email, subject, body): """Send with proper headers and tracking""" message = Mail( from_email='founders@yourstartup.com', to_emails=to_email, subject=subject, html_content=body ) # Enable click/open tracking message.tracking_settings = TrackingSettings( click_tracking=ClickTracking(enable=True), open_tracking=OpenTracking(enable=True) ) # Custom headers to improve deliverability message.add_header('X-Priority', '3') message.add_header('X-MSMail-Priority', 'Normal') response = self.sg.send(message) self.sent_today += 1 return response
LinkedIn Rate Limits & Account Bans
LinkedIn aggressively rate-limits automated activity. Sending 50+ connection requests per day will get your account restricted. Use LinkedIn's official Sales Navigator API (not scraping), respect rate limits (20-30 actions/day max), and add random delays between actions (30-90 seconds).
// LinkedIn automation with rate limiting import axios from 'axios'; import { setTimeout } from 'timers/promises'; class LinkedInAutomation { private actionsToday: number = 0; private readonly MAX_ACTIONS_PER_DAY = 25; private readonly MIN_DELAY_MS = 30000; // 30 seconds private readonly MAX_DELAY_MS = 90000; // 90 seconds async sendConnectionRequest( profileUrl: string, personalizedNote: string ): Promise<void> { // Check daily limit if (this.actionsToday >= this.MAX_ACTIONS_PER_DAY) { throw new Error('Daily LinkedIn action limit reached'); } // Random delay to avoid detection const delay = Math.random() * (this.MAX_DELAY_MS - this.MIN_DELAY_MS) + this.MIN_DELAY_MS; await setTimeout(delay); // Use Sales Navigator API (not scraping) await axios.post( 'https://api.linkedin.com/v2/invitations', { invitee: { profileUrl }, message: personalizedNote, }, { headers: { Authorization: `Bearer ${process.env.LINKEDIN_ACCESS_TOKEN}`, }, } ); this.actionsToday++; console.log(`Sent connection request (${this.actionsToday}/${this.MAX_ACTIONS_PER_DAY} today)`); } resetDailyCounter(): void { // Call this at midnight this.actionsToday = 0; } }
Stale Data in Crunchbase/Apollo
Investor data goes stale fast. Someone who was at Accel 6 months ago might have moved to a new firm. Always cross-reference multiple sources (Crunchbase + LinkedIn + firm website) and show a 'last updated' timestamp. Re-enrich investor data every 90 days.
# Cross-reference multiple data sources import requests from datetime import datetime, timedelta def verify_investor_current_firm(investor_name: str, firm_name: str) -> dict: """Cross-check investor's current firm across multiple sources""" # Source 1: Crunchbase cb_data = fetch_crunchbase(investor_name) cb_firm = cb_data.get('current_organization') cb_updated = cb_data.get('last_updated') # Source 2: LinkedIn (via API or scraping) linkedin_data = fetch_linkedin(investor_name) linkedin_firm = linkedin_data.get('current_company') linkedin_updated = linkedin_data.get('profile_updated') # Source 3: Firm website team page firm_website = f"https://{firm_name.lower().replace(' ', '')}.com/team" firm_team_page = requests.get(firm_website).text on_firm_website = investor_name.lower() in firm_team_page.lower() # Determine most reliable source sources_agree = (cb_firm == firm_name and linkedin_firm == firm_name) data_is_fresh = ( datetime.now() - datetime.fromisoformat(cb_updated) < timedelta(days=90) ) return { 'investor_name': investor_name, 'claimed_firm': firm_name, 'crunchbase_firm': cb_firm, 'linkedin_firm': linkedin_firm, 'on_firm_website': on_firm_website, 'sources_agree': sources_agree, 'data_is_fresh': data_is_fresh, 'confidence': 'high' if (sources_agree and data_is_fresh) else 'low', 'last_verified': datetime.now().isoformat() } # Usage: Skip outreach if confidence is low verification = verify_investor_current_firm('Sarah Chen', 'Accel Partners') if verification['confidence'] == 'low': print(f"Skipping {investor_name} - stale data (last verified: {verification['last_verified']})")
CAN-SPAM Compliance & Unsubscribe Handling
Cold emails to investors must comply with CAN-SPAM Act. Include physical address, clear unsubscribe link, and honor unsubscribes within 10 days. Track unsubscribes in your CRM and suppress future emails. Failure to comply = $46,517 fine per email.
# CAN-SPAM compliant email template def build_compliant_email(investor_email: str, subject: str, body: str) -> str: """Add required CAN-SPAM elements to email""" # Generate unique unsubscribe token unsubscribe_token = generate_unsubscribe_token(investor_email) unsubscribe_url = f"https://yourstartup.com/unsubscribe?token={unsubscribe_token}" # Add footer with required elements footer = f""" <hr> <p style="font-size: 11px; color: #666;"> You're receiving this because you're a VC investor in B2B SaaS.<br> If you'd prefer not to receive these emails, <a href="{unsubscribe_url}">unsubscribe here</a>.<br><br> YourStartup Inc.<br> 123 Main Street<br> San Francisco, CA 94105 </p> """ compliant_body = body + footer return compliant_body # Unsubscribe handler def handle_unsubscribe(token: str): """Process unsubscribe request""" investor_email = verify_unsubscribe_token(token) # Add to suppression list add_to_suppression_list(investor_email) # Remove from all active sequences stop_all_sequences(investor_email) # Update CRM update_crm_status(investor_email, status='unsubscribed') # Log for compliance log_unsubscribe(investor_email, timestamp=datetime.now()) return {"status": "unsubscribed", "email": investor_email} # Check suppression list before sending def is_suppressed(email: str) -> bool: """Check if email is on suppression list""" suppression_list = load_suppression_list() return email in suppression_list
Handling Investor Responses & Auto-Reply Detection
You need to detect when an investor replies (vs. auto-reply) and immediately stop the follow-up sequence. Use Gmail API or IMAP to monitor inbox, parse email headers to detect auto-replies (X-Autoreply, Auto-Submitted headers), and use AI to classify responses (interested, not interested, ask me later).
// Detect and classify investor responses import { gmail_v1, google } from 'googleapis'; import Anthropic from '@anthropic-ai/sdk'; interface EmailResponse { messageId: string; from: string; subject: string; body: string; isAutoReply: boolean; classification?: 'interested' | 'not_interested' | 'ask_later' | 'other'; } class ResponseHandler { private gmail: gmail_v1.Gmail; private anthropic: Anthropic; constructor() { const auth = new google.auth.GoogleAuth({ scopes: ['https://www.googleapis.com/auth/gmail.readonly'], }); this.gmail = google.gmail({ version: 'v1', auth }); this.anthropic = new Anthropic({ apiKey: process.env.ANTHROPIC_API_KEY!, }); } async checkForReplies(investorEmail: string): Promise<EmailResponse[]> { // Search for replies from investor const response = await this.gmail.users.messages.list({ userId: 'me', q: `from:${investorEmail} is:unread`, }); const messages = response.data.messages || []; const replies: EmailResponse[] = []; for (const message of messages) { const fullMessage = await this.gmail.users.messages.get({ userId: 'me', id: message.id!, format: 'full', }); const headers = fullMessage.data.payload?.headers || []; const body = this.extractBody(fullMessage.data); // Check if auto-reply const isAutoReply = this.isAutoReply(headers); if (!isAutoReply) { // Classify response with AI const classification = await this.classifyResponse(body); replies.push({ messageId: message.id!, from: investorEmail, subject: this.getHeader(headers, 'Subject'), body, isAutoReply: false, classification, }); // Stop follow-up sequence await this.stopSequence(investorEmail); } } return replies; } private isAutoReply(headers: any[]): boolean { // Check for auto-reply headers const autoReplyHeaders = [ 'X-Autoreply', 'X-Autorespond', 'Auto-Submitted', 'X-Auto-Response-Suppress', ]; return headers.some((header) => autoReplyHeaders.includes(header.name || '') ); } private async classifyResponse(body: string): Promise<string> { const response = await this.anthropic.messages.create({ model: 'claude-3-5-sonnet-20241022', max_tokens: 256, messages: [ { role: 'user', content: `Classify this investor response: "${body}" Categories: - interested: wants to meet, asks for more info, positive signal - not_interested: clear no, not a fit, not investing now - ask_later: timing is off, check back in X months - other: unclear or needs human review Output JSON: {classification: string, confidence: float, reasoning: string}`, }, ], }); const content = response.content[0]; if (content.type !== 'text') throw new Error('Invalid response'); const result = JSON.parse(content.text); return result.classification; } private async stopSequence(investorEmail: string): Promise<void> { // Remove from all active email sequences // Update CRM with response received console.log(`Stopped sequence for ${investorEmail}`); } private getHeader(headers: any[], name: string): string { const header = headers.find((h) => h.name === name); return header?.value || ''; } private extractBody(message: any): string { // Extract email body (simplified) const parts = message.payload?.parts || []; const textPart = parts.find((p: any) => p.mimeType === 'text/plain'); return textPart?.body?.data ? Buffer.from(textPart.body.data, 'base64').toString() : ''; } } // Usage const handler = new ResponseHandler(); const replies = await handler.checkForReplies('schen@accel.com'); console.log(`Found ${replies.length} real replies (filtered out auto-replies)`);
Cost Calculator
Manual Process
Limitations:
- • Can't scale beyond 10-15 investors/week
- • Inconsistent personalization quality
- • Manual follow-up tracking (things slip through cracks)
- • No engagement analytics
Automated Pipeline
Benefits:
- ✓ Process 200 investors/week (20x manual capacity)
- ✓ Consistent personalization quality (AI-scored)
- ✓ Automated follow-up sequences (4-email cadence)
- ✓ Real-time engagement tracking (opens, clicks, replies)
- ✓ Predictive fit scoring (focus on high-probability investors)