Extract insights from newsletters you follow
Summarizes the latest issue from newsletters you follow
What you will receive
Newsletter Takeaway: Stratechery
just now
Latest from Stratechery: "Apple's Vision Pro and the Future of Computing" Key takeaway: The Vision Pro isn't competing with the Meta Quest—it's positioning for a future where spatial computing replaces the desktop. Why it matters: Apple is betting that the next 10-year platform shift starts now, and they want to own the high end from day one. Read the full issue →
How it works
- 1Humrun fetches the latest issue from a newsletter archive
- 2AI extracts the key takeaway and why it matters
- 3You get a quick summary without reading the full issue
You configure
https://newsletter.example.com/archive
Substack, Buttondown, or any newsletter archive page
sk-...
For generating summaries
View Python code
import requests
from bs4 import BeautifulSoup
import feedparser
import os
NEWSLETTER_URL = os.environ.get("NEWSLETTER_URL")
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
# Try RSS first (most newsletters have one)
rss_urls = [
NEWSLETTER_URL.rstrip("/") + "/feed",
NEWSLETTER_URL.rstrip("/") + "/rss",
NEWSLETTER_URL
]
post = None
for rss_url in rss_urls:
feed = feedparser.parse(rss_url)
if feed.entries:
post = feed.entries[0]
break
if not post:
# Fallback: scrape the archive page
response = requests.get(NEWSLETTER_URL, headers={"User-Agent": "Mozilla/5.0"})
soup = BeautifulSoup(response.text, "html.parser")
# Find the first article link
links = soup.select("article a, .post-preview a, .post a, h2 a")
if links:
link = links[0].get("href", "")
if not link.startswith("http"):
from urllib.parse import urljoin
link = urljoin(NEWSLETTER_URL, link)
article_resp = requests.get(link, headers={"User-Agent": "Mozilla/5.0"})
article_soup = BeautifulSoup(article_resp.text, "html.parser")
title_elem = article_soup.select_one("h1, .post-title")
title = title_elem.get_text(strip=True) if title_elem else "Latest Issue"
content = article_soup.get_text(strip=True, separator=" ")[:2500]
else:
print("Could not find newsletter content")
exit()
else:
title = post.get("title", "Latest Issue")
content = post.get("summary", post.get("description", ""))[:2500]
link = post.get("link", NEWSLETTER_URL)
# Generate takeaway
prompt = f"""Summarize this newsletter issue into a quick takeaway.
Title: {title}
Content: {content}
Format:
- Key takeaway (1-2 sentences, the main insight)
- Why it matters (1 sentence)
Keep it under 75 words total. Be specific, not generic."""
response = requests.post(
"https://api.openai.com/v1/chat/completions",
headers={"Authorization": f"Bearer {OPENAI_API_KEY}"},
json={
"model": "gpt-4o-mini",
"messages": [{"role": "user", "content": prompt}],
"max_tokens": 200
}
)
takeaway = response.json()["choices"][0]["message"]["content"]
print(f"Latest: {title}\n")
print(takeaway)
print(f"\nRead more: {link}")Suggested schedule: Every day at 8 AM•Notifications: After every run