Fix feed OOM: lazy image loading, inView gating, WebKit memory tuning

- NoteContent: remove ImageGrid from inline mode — images now only load
  when note is inView (via mediaOnly), stopping runaway scrolling leak
- NoteCard: content-visibility:auto to skip layout/paint for off-screen
  cards; inView-gated media, NoteActions, NIP-05 verification
- useInView: new IntersectionObserver hook with 300px rootMargin
- useProfile: MAX_PROFILE_CONCURRENT=8 throttle with fetch queue
- useReplyCount/useZapCount/useReactions: enabled param, throttled queues
- feed.ts: MAX_FEED_SIZE 200→30, live sub disabled (pendingNotes pattern),
  250ms batch debounce on live events
- core.ts: MAX_CONCURRENT_FETCHES=25 global NDK cap, fetchWithTimeout
  uses subscribe+stop instead of fetchEvents (no zombie subscriptions)
- lib.rs: HardwareAccelerationPolicy::Never + CacheModel::DocumentViewer
- main.rs: WEBKIT_DISABLE_COMPOSITING_MODE=1 for Linux
- relay/db.rs: TTL eviction + 5000 event cap
- feedDiagnostics.ts: file-flushing diag log survives crashes
This commit is contained in:
Jure
2026-04-15 20:36:14 +02:00
parent 018ee0e0f3
commit 0894389fe0
20 changed files with 540 additions and 105 deletions

View File

@@ -38,6 +38,11 @@ Prerequisites: Node.js 20+, Rust stable, `@tauri-apps/cli`
3. Commit: `git commit -m "Bump to vX.Y.Z — <summary>"`
4. Tag: `git tag vX.Y.Z`
5. Push: `git push origin main vX.Y.Z`
6. Update AUR: in `/home/hoornet/projects/vega-aur/`, bump `pkgver=X.Y.Z` in `PKGBUILD`, then:
```bash
makepkg --printsrcinfo > .SRCINFO
git add PKGBUILD .SRCINFO && git commit -m "Bump to vX.Y.Z" && git push
```
CI triggers on the tag and builds all three platforms (Ubuntu, Windows, macOS ARM). All jobs must complete for `latest.json` to be assembled.

View File

@@ -27,6 +27,12 @@
{ "path": "**" }
]
},
{
"identifier": "fs:allow-write-text-file",
"allow": [
{ "path": "$HOME/vega-diag.log" }
]
},
"notification:default"
]
}

View File

@@ -441,13 +441,20 @@ pub fn run() {
{
let main_window = app.get_webview_window("main").unwrap();
main_window.with_webview(|webview| {
use webkit2gtk::{SettingsExt, WebViewExt};
use webkit2gtk::{CacheModel, SettingsExt, WebContextExt, WebViewExt};
let wv = webview.inner();
if let Some(settings) = wv.settings() {
settings.set_hardware_acceleration_policy(
webkit2gtk::HardwareAccelerationPolicy::Never,
);
}
// Minimize WebKit's in-memory content cache (decoded images, scripts, etc.)
// Default is WebBrowser which caches aggressively. DocumentViewer is the
// minimum: no back/forward page cache, smallest memory footprint.
// This is safe for Vega — it's a single-page app, never navigates between pages.
if let Some(ctx) = wv.context() {
ctx.set_cache_model(CacheModel::DocumentViewer);
}
}).ok();
}

View File

@@ -7,6 +7,12 @@ fn main() {
#[cfg(target_os = "linux")]
{
std::env::set_var("WEBKIT_DISABLE_DMABUF_RENDERER", "1");
// Required on Linux with large RAM/swap: WebKitGTK compositor pre-allocates
// ~25% of total virtual memory (RAM+swap) for its tile cache. On a 14GB RAM +
// 19GB swap system this is ~4 GB, filling all RAM and freezing the machine.
// Software rendering is slower but memory-safe. Fix: reduce swap or implement
// virtual scrolling (fewer compositor layers).
std::env::set_var("WEBKIT_DISABLE_COMPOSITING_MODE", "1");
}
vega_lib::run()

View File

@@ -3,6 +3,14 @@ use crate::relay::filter::Filter;
use rusqlite::{params, Connection};
use std::path::Path;
/// Keep at most this many text notes (kind 1) in the local relay cache.
/// Older notes beyond this limit are evicted on startup to bound memory usage.
const MAX_KIND1_EVENTS: usize = 5_000;
/// Delete text notes (kind 1) older than this many seconds on startup.
/// 7 days — remote relays have anything older.
const KIND1_TTL_SECS: i64 = 7 * 24 * 3600;
pub fn open_relay_db(data_dir: &Path) -> rusqlite::Result<Connection> {
std::fs::create_dir_all(data_dir).ok();
let path = data_dir.join("relay.db");
@@ -34,9 +42,53 @@ pub fn open_relay_db(data_dir: &Path) -> rusqlite::Result<Connection> {
CREATE INDEX IF NOT EXISTS idx_tags_name_value ON event_tags(tag_name, tag_value);
CREATE INDEX IF NOT EXISTS idx_tags_event ON event_tags(event_id);",
)?;
evict_old_events(&conn)?;
Ok(conn)
}
/// Remove stale text notes on startup to keep the relay cache bounded.
///
/// Two passes:
/// 1. Delete all kind-1 events older than KIND1_TTL_SECS (7 days).
/// 2. If more than MAX_KIND1_EVENTS remain, delete the oldest ones beyond that cap.
///
/// Other kinds (profiles, contact lists, etc.) are not evicted — they are
/// replaceable/parameterized-replaceable and stay small by design.
fn evict_old_events(conn: &Connection) -> rusqlite::Result<()> {
use std::time::{SystemTime, UNIX_EPOCH};
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.map(|d| d.as_secs() as i64)
.unwrap_or(0);
// Pass 1: TTL — delete kind-1 events older than 7 days
let cutoff = now - KIND1_TTL_SECS;
conn.execute(
"DELETE FROM events WHERE kind = 1 AND created_at < ?1",
params![cutoff],
)?;
// Pass 2: count cap — keep only the most recent MAX_KIND1_EVENTS kind-1 events
let count: i64 = conn.query_row(
"SELECT COUNT(*) FROM events WHERE kind = 1",
[],
|row| row.get(0),
)?;
if count > MAX_KIND1_EVENTS as i64 {
conn.execute(
"DELETE FROM events WHERE kind = 1 AND id NOT IN (
SELECT id FROM events WHERE kind = 1
ORDER BY created_at DESC LIMIT ?1
)",
params![MAX_KIND1_EVENTS as i64],
)?;
}
Ok(())
}
/// Store an event. Returns true if the event was newly inserted, false if it already existed.
/// Handles replaceable (kind 0/3/10000-19999) and parameterized replaceable (30000-39999) events.
pub fn store_event(conn: &Connection, event: &Event, raw: &str) -> rusqlite::Result<bool> {

View File

@@ -4,7 +4,7 @@ import { useUserStore } from "../../stores/user";
import { useMuteStore } from "../../stores/mute";
import { useUIStore } from "../../stores/ui";
import { useWoTStore } from "../../stores/wot";
import { fetchFollowFeed, getNDK, ensureConnected, batchFetchProfileAges } from "../../lib/nostr";
import { fetchFollowFeed, getNDK, ensureConnected } from "../../lib/nostr";
import { diagWrapFetch, logDiag } from "../../lib/feedDiagnostics";
import { detectScript, getEventLanguageTag, FILTER_SCRIPTS } from "../../lib/language";
import { NoteCard } from "./NoteCard";
@@ -26,6 +26,8 @@ function timeAgo(ts: number): string {
export function Feed() {
const notes = useFeedStore((s) => s.notes);
const pendingNotes = useFeedStore((s) => s.pendingNotes);
const flushPendingNotes = useFeedStore((s) => s.flushPendingNotes);
const loading = useFeedStore((s) => s.loading);
const error = useFeedStore((s) => s.error);
const connect = useFeedStore((s) => s.connect);
@@ -63,10 +65,7 @@ export function Feed() {
useEffect(() => {
// Show cached notes immediately, then fetch fresh ones once connected
loadCachedFeed();
connect().then(() => loadFeed().then(() => {
const pubkeys = [...new Set(useFeedStore.getState().notes.map((e) => e.pubkey))];
batchFetchProfileAges(pubkeys);
}));
connect().then(() => loadFeed());
}, []);
@@ -265,6 +264,16 @@ export function Feed() {
</div>
)}
{/* New notes banner — only shown on global tab */}
{tab === "global" && pendingNotes.length > 0 && (
<button
onClick={flushPendingNotes}
className="w-full py-2 text-[12px] text-accent border-b border-accent/20 bg-accent/5 hover:bg-accent/10 transition-colors"
>
{pendingNotes.length} new {pendingNotes.length === 1 ? "note" : "notes"} click to load
</button>
)}
{filteredNotes.map((event, index) =>
event.kind === 30023 ? (
<ArticleCard key={event.id} event={event} />

View File

@@ -17,9 +17,10 @@ interface NoteActionsProps {
event: NDKEvent;
onReplyToggle: () => void;
showReply: boolean;
enabled?: boolean;
}
export function NoteActions({ event, onReplyToggle, showReply }: NoteActionsProps) {
export function NoteActions({ event, onReplyToggle, showReply, enabled = true }: NoteActionsProps) {
const profile = useProfile(event.pubkey);
const name = profileName(profile, event.pubkey.slice(0, 8) + "…");
const avatar = typeof profile?.picture === "string" ? profile.picture : undefined;
@@ -27,12 +28,12 @@ export function NoteActions({ event, onReplyToggle, showReply }: NoteActionsProp
const { bookmarkedIds, addBookmark, removeBookmark } = useBookmarkStore();
const isBookmarked = bookmarkedIds.includes(event.id!);
const [reactionsData, addReaction] = useReactions(event.id);
const [reactionsData, addReaction] = useReactions(event.id, enabled);
const [reacting, setReacting] = useState(false);
const [showEmojiPicker, setShowEmojiPicker] = useState(false);
const [replyCount] = useReplyCount(event.id);
const [replyCount] = useReplyCount(event.id, enabled);
const [copied, setCopied] = useState(false);
const zapData = useZapCount(event.id);
const zapData = useZapCount(event.id, enabled);
const [showZap, setShowZap] = useState(false);
const [showQuote, setShowQuote] = useState(false);
const [reposting, setReposting] = useState(false);
@@ -227,10 +228,10 @@ export function NoteActions({ event, onReplyToggle, showReply }: NoteActionsProp
);
}
export function LoggedOutStats({ event }: { event: NDKEvent }) {
const [reactionsData] = useReactions(event.id);
const [replyCount] = useReplyCount(event.id);
const zapData = useZapCount(event.id);
export function LoggedOutStats({ event, enabled = true }: { event: NDKEvent; enabled?: boolean }) {
const [reactionsData] = useReactions(event.id, enabled);
const [replyCount] = useReplyCount(event.id, enabled);
const zapData = useZapCount(event.id, enabled);
const [copied, setCopied] = useState(false);
const handleShare = async () => {

View File

@@ -2,6 +2,7 @@ import { useState, useRef, useEffect, memo } from "react";
import { NDKEvent } from "@nostr-dev-kit/ndk";
import { useProfile } from "../../hooks/useProfile";
import { useNip05Verified } from "../../hooks/useNip05Verified";
import { useInView } from "../../hooks/useInView";
import { useUserStore } from "../../stores/user";
import { useMuteStore } from "../../stores/mute";
import { useUIStore } from "../../stores/ui";
@@ -27,12 +28,15 @@ function ParentAuthorName({ pubkey }: { pubkey: string }) {
}
export const NoteCard = memo(function NoteCard({ event, focused, onReplyInThread }: NoteCardProps) {
const cardRef = useRef<HTMLElement>(null);
const inView = useInView(cardRef);
const profile = useProfile(event.pubkey);
const rawName = profile?.displayName || profile?.name;
const name = (typeof rawName === "string" ? rawName : null) || shortenPubkey(event.pubkey);
const avatar = profile?.picture;
const nip05 = typeof profile?.nip05 === "string" ? profile.nip05 : null;
const verified = useNip05Verified(event.pubkey, nip05);
const verified = useNip05Verified(event.pubkey, nip05, inView);
const time = event.created_at ? timeAgo(event.created_at) : "";
const profileCreatedAt = getProfileAge(event.pubkey);
const isNewAccount = profileCreatedAt !== null && (Date.now() / 1000 - profileCreatedAt) < 60 * 24 * 3600;
@@ -56,7 +60,6 @@ export const NoteCard = memo(function NoteCard({ event, focused, onReplyInThread
const pTags = event.tags.filter((t) => t[0] === "p");
const parentAuthorPubkey = pTags.length > 0 ? pTags[pTags.length - 1][1] : null;
const cardRef = useRef<HTMLElement>(null);
useEffect(() => {
if (focused) cardRef.current?.scrollIntoView({ behavior: "smooth", block: "nearest" });
}, [focused]);
@@ -68,7 +71,7 @@ export const NoteCard = memo(function NoteCard({ event, focused, onReplyInThread
<article
ref={cardRef}
data-note-id={event.id}
className={`border-b border-border px-4 py-3 hover:bg-bg-hover transition-colors cursor-pointer group/card${focused ? " bg-accent/10 border-l-2 border-l-accent" : ""}`}
className={`border-b border-border px-4 py-3 hover:bg-bg-hover transition-colors cursor-pointer group/card [content-visibility:auto] [contain-intrinsic-size:auto_120px]${focused ? " bg-accent/10 border-l-2 border-l-accent" : ""}`}
onClick={(e) => {
// Don't navigate if clicking on interactive elements
const target = e.target as HTMLElement;
@@ -83,6 +86,8 @@ export const NoteCard = memo(function NoteCard({ event, focused, onReplyInThread
<img
src={avatar}
alt={`${name}'s avatar`}
width={36}
height={36}
className="w-9 h-9 rounded-sm object-cover bg-bg-raised ring-1 ring-transparent hover:ring-accent/40 transition-all"
loading="lazy"
onError={(e) => {
@@ -183,7 +188,7 @@ export const NoteCard = memo(function NoteCard({ event, focused, onReplyInThread
<div>
<NoteContent content={event.content} inline />
</div>
<NoteContent content={event.content} mediaOnly />
{inView && <NoteContent content={event.content} mediaOnly />}
{/* Poll options — kind 1068 */}
{event.kind === 1068 && <PollWidget event={event} />}
@@ -200,11 +205,12 @@ export const NoteCard = memo(function NoteCard({ event, focused, onReplyInThread
}
}}
showReply={showReply && !onReplyInThread}
enabled={inView}
/>
)}
{/* Stats visible when logged out */}
{!loggedIn && <LoggedOutStats event={event} />}
{!loggedIn && <LoggedOutStats event={event} enabled={inView} />}
{/* Inline reply box */}
{showReply && <InlineReplyBox event={event} name={name} />}

View File

@@ -164,13 +164,24 @@ export function NoteContent({ content, inline, mediaOnly }: NoteContentProps) {
const quoteIds: string[] = segments.filter((s) => s.type === "quote").map((s) => s.value);
const [lightboxIndex, setLightboxIndex] = useState<number | null>(null);
// --- Inline text + images (safe inside clickable wrapper) ---
// --- Inline text only (no images — images go in mediaOnly to allow inView gating) ---
if (inline) {
return (
<div>
<div className="note-content text-text text-[13px] break-words whitespace-pre-wrap leading-relaxed">
{renderTextSegments(segments, openHashtag, { resolveMentions: true })}
</div>
<div className="note-content text-text text-[13px] break-words whitespace-pre-wrap leading-relaxed">
{renderTextSegments(segments, openHashtag, { resolveMentions: true })}
</div>
);
}
// --- Media blocks only (rendered OUTSIDE the clickable wrapper, gated by inView) ---
// Images are included here so they only load when the note is near the viewport.
if (mediaOnly) {
const hasMedia = images.length > 0 || videos.length > 0 || audios.length > 0 || youtubes.length > 0
|| vimeos.length > 0 || spotifys.length > 0 || tidals.length > 0 || fountains.length > 0 || quoteIds.length > 0;
if (!hasMedia) return null;
return (
<div onClick={(e) => e.stopPropagation()}>
<ImageGrid images={images} onImageClick={setLightboxIndex} />
{lightboxIndex !== null && (
<ImageLightbox
@@ -180,18 +191,6 @@ export function NoteContent({ content, inline, mediaOnly }: NoteContentProps) {
onNavigate={setLightboxIndex}
/>
)}
</div>
);
}
// --- Media blocks only (rendered OUTSIDE the clickable wrapper) ---
if (mediaOnly) {
const hasMedia = videos.length > 0 || audios.length > 0 || youtubes.length > 0
|| vimeos.length > 0 || spotifys.length > 0 || tidals.length > 0 || fountains.length > 0 || quoteIds.length > 0;
if (!hasMedia) return null;
return (
<div onClick={(e) => e.stopPropagation()}>
<VideoBlock sources={videos} />
<AudioBlock sources={audios} />
{youtubes.map((seg, i) => <YouTubeCard key={`yt-${i}`} seg={seg} />)}

32
src/hooks/useInView.ts Normal file
View File

@@ -0,0 +1,32 @@
import { useEffect, useState } from "react";
/**
* Returns true once the referenced element has entered the viewport.
* Uses IntersectionObserver with a generous rootMargin so data fetches
* start slightly before the card scrolls fully into view.
*
* Once visible, stays true — we never un-fetch engagement data.
*/
export function useInView(ref: React.RefObject<HTMLElement | null>, rootMargin = "300px"): boolean {
const [inView, setInView] = useState(false);
useEffect(() => {
const el = ref.current;
if (!el || inView) return;
const observer = new IntersectionObserver(
([entry]) => {
if (entry.isIntersecting) {
setInView(true);
observer.disconnect();
}
},
{ rootMargin },
);
observer.observe(el);
return () => observer.disconnect();
}, [ref, inView, rootMargin]);
return inView;
}

View File

@@ -20,7 +20,7 @@ async function verifyNip05(pubkey: string, nip05: string): Promise<VerifyStatus>
}
}
export function useNip05Verified(pubkey: string, nip05: string | undefined): "valid" | "invalid" | "checking" | null {
export function useNip05Verified(pubkey: string, nip05: string | undefined, enabled = true): "valid" | "invalid" | "checking" | null {
const [status, setStatus] = useState<"valid" | "invalid" | "checking" | null>(() => {
if (!nip05) return null;
const cached = cache.get(pubkey);
@@ -30,6 +30,7 @@ export function useNip05Verified(pubkey: string, nip05: string | undefined): "va
useEffect(() => {
if (!nip05) { setStatus(null); return; }
if (!enabled) return;
const cached = cache.get(pubkey);
if (cached && Date.now() - cached.checkedAt < TTL) {
@@ -45,7 +46,7 @@ export function useNip05Verified(pubkey: string, nip05: string | undefined): "va
setStatus(result);
});
return () => { cancelled = true; };
}, [pubkey, nip05]);
}, [pubkey, nip05, enabled]);
return status;
}

View File

@@ -6,6 +6,20 @@ const PROFILE_CACHE_MAX = 500;
const profileCache = new Map<string, any>();
const pendingRequests = new Map<string, Promise<any>>();
// Hard cap on concurrent NDK profile fetches.
// Without this, rendering 200 cached notes triggers 200 simultaneous
// user.fetchProfile() calls (each creates an NDK subscription) → OOM.
let activeProfileFetches = 0;
const MAX_PROFILE_CONCURRENT = 8;
const profileFetchQueue: Array<() => void> = [];
function runNextProfileFetch() {
while (profileFetchQueue.length > 0 && activeProfileFetches < MAX_PROFILE_CONCURRENT) {
const next = profileFetchQueue.shift()!;
next();
}
}
function pruneProfileCache() {
if (profileCache.size > PROFILE_CACHE_MAX) {
// Drop oldest entries (Map preserves insertion order)
@@ -32,21 +46,33 @@ export function useProfile(pubkey: string) {
return;
}
// Kick off relay fetch (deduplicated across simultaneous callers)
// Kick off relay fetch (deduplicated + concurrency-throttled)
if (!pendingRequests.has(pubkey)) {
const request = fetchProfile(pubkey)
.then((p) => {
const result = p ?? null;
profileCache.set(pubkey, result);
pruneProfileCache();
pendingRequests.delete(pubkey);
if (result) dbSaveProfile(pubkey, JSON.stringify(result));
return result;
})
.catch(() => {
pendingRequests.delete(pubkey);
return null;
});
const request = new Promise<any>((resolve) => {
const doFetch = () => {
activeProfileFetches++;
fetchProfile(pubkey)
.then((p) => {
const result = p ?? null;
profileCache.set(pubkey, result);
pruneProfileCache();
if (result) dbSaveProfile(pubkey, JSON.stringify(result));
resolve(result);
})
.catch(() => resolve(null))
.finally(() => {
activeProfileFetches--;
pendingRequests.delete(pubkey);
runNextProfileFetch();
});
};
if (activeProfileFetches < MAX_PROFILE_CONCURRENT) {
doFetch();
} else {
profileFetchQueue.push(doFetch);
}
});
pendingRequests.set(pubkey, request);
}

View File

@@ -49,7 +49,7 @@ function throttledFetch(eventId: string, pubkey?: string): Promise<GroupedReacti
return promise;
}
export function useReactions(eventId: string): [GroupedReactions | null, (emoji: string) => void] {
export function useReactions(eventId: string, enabled = true): [GroupedReactions | null, (emoji: string) => void] {
const [data, setData] = useState<GroupedReactions | null>(() => cache.get(eventId) ?? null);
const pubkeyRef = useRef(useUserStore.getState().pubkey);
@@ -58,6 +58,7 @@ export function useReactions(eventId: string): [GroupedReactions | null, (emoji:
});
useEffect(() => {
if (!enabled) return;
if (cache.has(eventId)) {
setData(cache.get(eventId)!);
return;
@@ -70,7 +71,7 @@ export function useReactions(eventId: string): [GroupedReactions | null, (emoji:
}
});
return () => { cancelled = true; };
}, [eventId]);
}, [eventId, enabled]);
const addReaction = (emoji: string) => {
setData((prev) => {

View File

@@ -2,20 +2,63 @@ import { useEffect, useState } from "react";
import { fetchReplyCount } from "../lib/nostr";
const cache = new Map<string, number>();
const pending = new Map<string, Promise<number>>();
let activeCount = 0;
const MAX_CONCURRENT = 6;
const queue: Array<() => void> = [];
export function useReplyCount(eventId: string): [number | null, (delta: number) => void] {
function runNext() {
if (queue.length > 0 && activeCount < MAX_CONCURRENT) {
const next = queue.shift()!;
next();
}
}
function throttledFetch(eventId: string): Promise<number> {
if (pending.has(eventId)) return pending.get(eventId)!;
const promise = new Promise<number>((resolve) => {
const doFetch = () => {
activeCount++;
fetchReplyCount(eventId)
.then(resolve)
.catch(() => resolve(0))
.finally(() => {
activeCount--;
pending.delete(eventId);
runNext();
});
};
if (activeCount < MAX_CONCURRENT) {
doFetch();
} else {
queue.push(doFetch);
}
});
pending.set(eventId, promise);
return promise;
}
export function useReplyCount(eventId: string, enabled = true): [number | null, (delta: number) => void] {
const [count, setCount] = useState<number | null>(() => cache.get(eventId) ?? null);
useEffect(() => {
if (!enabled) return;
if (cache.has(eventId)) {
setCount(cache.get(eventId)!);
return;
}
fetchReplyCount(eventId).then((n) => {
cache.set(eventId, n);
setCount(n);
let cancelled = false;
throttledFetch(eventId).then((n) => {
if (!cancelled) {
cache.set(eventId, n);
setCount(n);
}
});
}, [eventId]);
return () => { cancelled = true; };
}, [eventId, enabled]);
const adjust = (delta: number) => {
setCount((prev) => {

View File

@@ -4,20 +4,63 @@ import { fetchZapCount } from "../lib/nostr";
interface ZapData { count: number; totalSats: number; }
const cache = new Map<string, ZapData>();
const pending = new Map<string, Promise<ZapData>>();
let activeCount = 0;
const MAX_CONCURRENT = 4;
const queue: Array<() => void> = [];
export function useZapCount(eventId: string): ZapData | null {
function runNext() {
if (queue.length > 0 && activeCount < MAX_CONCURRENT) {
const next = queue.shift()!;
next();
}
}
function throttledFetch(eventId: string): Promise<ZapData> {
if (pending.has(eventId)) return pending.get(eventId)!;
const promise = new Promise<ZapData>((resolve) => {
const doFetch = () => {
activeCount++;
fetchZapCount(eventId)
.then(resolve)
.catch(() => resolve({ count: 0, totalSats: 0 }))
.finally(() => {
activeCount--;
pending.delete(eventId);
runNext();
});
};
if (activeCount < MAX_CONCURRENT) {
doFetch();
} else {
queue.push(doFetch);
}
});
pending.set(eventId, promise);
return promise;
}
export function useZapCount(eventId: string, enabled = true): ZapData | null {
const [data, setData] = useState<ZapData | null>(() => cache.get(eventId) ?? null);
useEffect(() => {
if (!enabled) return;
if (cache.has(eventId)) {
setData(cache.get(eventId)!);
return;
}
fetchZapCount(eventId).then((d) => {
cache.set(eventId, d);
setData(d);
let cancelled = false;
throttledFetch(eventId).then((d) => {
if (!cancelled) {
cache.set(eventId, d);
setData(d);
}
});
}, [eventId]);
return () => { cancelled = true; };
}, [eventId, enabled]);
return data;
}

View File

@@ -4,9 +4,14 @@
* Data stored in localStorage under "wrystr_feed_diag".
* View in console: JSON.parse(localStorage.getItem("wrystr_feed_diag"))
* Or open DevTools and call: window.__feedDiag()
*
* File log: ~/vega-diag.log — written every 2s, survives WebKit crashes and hard reboots.
* Inspect after crash: tail -100 ~/vega-diag.log | python3 -c "import sys,json;[print(json.dumps(json.loads(l),indent=2)) for l in sys.stdin]"
*/
import { getNDK } from "./nostr/core";
import { writeTextFile } from "@tauri-apps/plugin-fs";
import { homeDir } from "@tauri-apps/api/path";
import { getNDK, getActiveFetchCount } from "./nostr/core";
import { debug } from "./debug";
const isDev = import.meta.env.DEV;
@@ -14,6 +19,70 @@ const isDev = import.meta.env.DEV;
const DIAG_KEY = "wrystr_feed_diag";
const MAX_ENTRIES = 200;
// ─── Disk-based diagnostic log ────────────────────────────────────────────────
// Writes JSON-lines to ~/vega-diag.log every 2s.
// Survives WebKit crashes and hard reboots — inspect after hang:
// tail -100 ~/vega-diag.log | python3 -c "import sys,json;[print(json.dumps(json.loads(l),indent=2)) for l in sys.stdin if l.strip()]"
const diagFileBuffer: string[] = [];
let diagFlushTimer: ReturnType<typeof setInterval> | null = null;
let diagLogPath: string | null = null;
async function getDiagLogPath(): Promise<string> {
if (!diagLogPath) {
try {
diagLogPath = (await homeDir()) + "/vega-diag.log";
} catch {
diagLogPath = "/tmp/vega-diag.log";
}
}
return diagLogPath;
}
async function flushDiagBuffer() {
if (diagFileBuffer.length === 0) return;
const lines = diagFileBuffer.splice(0);
try {
const path = await getDiagLogPath();
await writeTextFile(path, lines.join("\n") + "\n", { append: true });
} catch { /* never crash the app on diag write failure */ }
}
/**
* Start periodic disk flushing and memory snapshots.
* Call once at app startup. Data written to ~/vega-diag.log every 2s.
*/
export function startDiagFileFlusher() {
if (diagFlushTimer) return;
// Write a session-start marker
const marker = { ts: Date.now(), t: "session_start", v: "vega-diag-v1" };
diagFileBuffer.push(JSON.stringify(marker));
// Flush immediately so data hits disk before any crash
flushDiagBuffer();
diagFlushTimer = setInterval(async () => {
// Memory snapshot
const mem = (performance as unknown as { memory?: { usedJSHeapSize: number; totalJSHeapSize: number; jsHeapSizeLimit: number } }).memory;
const ndk = getNDK();
const relayCount = ndk.pool?.relays?.size ?? 0;
const connectedRelays = Array.from(ndk.pool?.relays?.values() ?? []).filter((r) => r.connected).length;
diagFileBuffer.push(JSON.stringify({
ts: Date.now(),
t: "mem",
heapMb: mem ? Math.round(mem.usedJSHeapSize / 1048576) : -1,
heapTotalMb: mem ? Math.round(mem.totalJSHeapSize / 1048576) : -1,
heapLimitMb: mem ? Math.round(mem.jsHeapSizeLimit / 1048576) : -1,
activeFetches: getActiveFetchCount(),
relays: `${connectedRelays}/${relayCount}`,
}));
await flushDiagBuffer();
}, 500); // 500ms — fast enough to capture pre-crash state
}
export interface DiagEntry {
ts: string; // ISO timestamp
action: string; // "global_fetch" | "follow_fetch" | "refresh_click" | "relay_state" | etc.
@@ -48,6 +117,9 @@ export function logDiag(entry: DiagEntry) {
log.push(entry);
saveLog(log);
// Also buffer to disk log (flushed every 2s by startDiagFileFlusher)
diagFileBuffer.push(JSON.stringify({ ...entry, _ms: Date.now() }));
// Also log to console with color coding
const style = entry.error
? "color: #ff4444; font-weight: bold"

View File

@@ -18,23 +18,86 @@ export const FEED_TIMEOUT = 8000; // 8s for feed fetches
export const THREAD_TIMEOUT = 5000; // 5s per thread round-trip
export const SINGLE_TIMEOUT = 5000; // 5s for single event lookups
const EMPTY_SET = new Set<NDKEvent>();
// ─── Active fetch counter + concurrency semaphore ──────────────────
let _activeFetchCount = 0;
/** Number of in-flight fetchWithTimeout calls (subscriptions currently open). */
export function getActiveFetchCount(): number { return _activeFetchCount; }
/** Fetch events with a timeout — returns empty set if relay hangs. */
export async function fetchWithTimeout(
// Hard cap on concurrent NDK subscriptions.
// Without this, rendering 200 cached notes triggers 400+ simultaneous subscriptions
// (useReplyCount + useZapCount per note), each receiving events from 7+ relays → OOM.
const MAX_CONCURRENT_FETCHES = 25;
const _fetchQueue: Array<() => void> = [];
function _runNextFetch() {
while (_fetchQueue.length > 0 && _activeFetchCount < MAX_CONCURRENT_FETCHES) {
const next = _fetchQueue.shift()!;
next();
}
}
/**
* Fetch events with explicit subscription lifecycle.
*
* IMPORTANT: Do NOT use instance.fetchEvents() here. fetchEvents() creates an
* NDK subscription internally that we cannot cancel if the timeout fires first.
* Abandoned subscriptions keep receiving relay data forever, leaking memory.
*
* This implementation uses subscribe() directly so we can call sub.stop() on
* both EOSE and timeout — guaranteeing no zombie subscriptions.
*
* Concurrency is capped at MAX_CONCURRENT_FETCHES. Excess calls queue and
* start as slots free up.
*/
export function fetchWithTimeout(
instance: NDK,
filter: NDKFilter,
timeoutMs: number,
relaySet?: NDKRelaySet,
): Promise<Set<NDKEvent>> {
const opts = {
cacheUsage: NDKSubscriptionCacheUsage.ONLY_RELAY,
groupable: false, // Prevent NDK from batching/reusing subscriptions
};
const promise = relaySet
? instance.fetchEvents(filter, opts, relaySet)
: instance.fetchEvents(filter, opts);
return withTimeout(promise, timeoutMs, EMPTY_SET);
return new Promise((resolve) => {
const start = () => {
const events = new Set<NDKEvent>();
let settled = false;
_activeFetchCount++;
const finish = () => {
if (settled) return;
settled = true;
_activeFetchCount--;
clearTimeout(timer);
try { sub.stop(); } catch { /* ignore */ }
resolve(events);
_runNextFetch();
};
const sub = instance.subscribe(
filter,
{
cacheUsage: NDKSubscriptionCacheUsage.ONLY_RELAY,
groupable: false,
closeOnEose: true,
},
relaySet,
);
sub.on("event", (event: NDKEvent) => {
if (!settled) events.add(event);
});
sub.on("eose", finish);
const timer = setTimeout(() => {
debug.warn(`[Vega] Fetch timed out after ${timeoutMs}ms (collected ${events.size} events, queue: ${_fetchQueue.length})`);
finish();
}, timeoutMs);
};
if (_activeFetchCount < MAX_CONCURRENT_FETCHES) {
start();
} else {
_fetchQueue.push(start);
}
});
}
export const RELAY_STORAGE_KEY = "wrystr_relays";
@@ -101,7 +164,10 @@ export function getNDK(): NDK {
if (!ndk) {
ndk = new NDK({
explicitRelayUrls: getStoredRelayUrls(),
outboxRelayUrls: OUTBOX_RELAYS,
// outboxRelayUrls intentionally omitted — enabling NDK's outbox model causes
// it to discover and connect to every event author's preferred relays, ballooning
// the relay pool from 7 to 40+ and flooding startLiveFeed() with a firehose of
// events from all those relays simultaneously → OOM crash.
});
ndkCreatedAt = Date.now();
}
@@ -121,12 +187,9 @@ export async function resetNDK(): Promise<void> {
const oldInstance = ndk;
const oldSigner = oldInstance?.signer ?? null;
// Preserve all relay URLs (stored + outbox-discovered) before resetting
const oldRelayUrls = oldInstance?.pool?.relays
? Array.from(oldInstance.pool.relays.keys()).map(normalizeRelayUrl)
: [];
// Only preserve the stored relay URLs — do NOT preserve outbox-discovered relays.
// Outbox-discovered relays are the source of the relay pool explosion (7 → 40+).
const storedUrls = getStoredRelayUrls();
const allUrls = [...new Set([...storedUrls, ...oldRelayUrls])];
// Disconnect all relays on old instance
if (oldInstance?.pool?.relays) {
@@ -135,10 +198,10 @@ export async function resetNDK(): Promise<void> {
}
}
// Create fresh instance with all known relay URLs
// Create fresh instance with only the stored relay URLs
ndk = new NDK({
explicitRelayUrls: allUrls,
outboxRelayUrls: OUTBOX_RELAYS,
explicitRelayUrls: storedUrls,
// outboxRelayUrls intentionally omitted — see getNDK() comment
});
ndkCreatedAt = Date.now();

View File

@@ -31,6 +31,10 @@ class ErrorBoundary extends Component<{ children: ReactNode }, { error: Error |
}
}
// Start disk diagnostics immediately — before any async work — so data
// reaches ~/vega-diag.log even if the app crashes in the first few seconds.
import("./lib/feedDiagnostics").then(({ startDiagFileFlusher }) => startDiagFileFlusher());
// Restore session — pubkey (read-only) or nsec via OS keychain
useUserStore.getState().restoreSession();

View File

@@ -4,25 +4,34 @@ import { connectToRelays, ensureConnected, resetNDK, fetchGlobalFeed, fetchBatch
import { seedReactionsCache } from "../hooks/useReactions";
import { useToastStore } from "./toast";
import { dbLoadFeed, dbSaveNotes } from "../lib/db";
import { diagWrapFetch, logDiag, startRelaySnapshots, getRelayStates } from "../lib/feedDiagnostics";
import { diagWrapFetch, logDiag, startRelaySnapshots, startDiagFileFlusher, getRelayStates } from "../lib/feedDiagnostics";
import { debug } from "../lib/debug";
// Local relay imports deferred to avoid circular dependency
// import { isLocalRelayEnabled, connectLocalRelay } from "../lib/localRelay";
const TRENDING_CACHE_KEY = "wrystr_trending_cache";
const TRENDING_TTL = 10 * 60 * 1000; // 10 minutes
const MAX_FEED_SIZE = 200;
const MAX_FEED_SIZE = 30;
// Live subscription handle — persists across store calls
let liveSub: NDKSubscription | null = null;
// Batch incoming live events — flush to state every 250ms instead of per-event.
// Without this, 8 relays × N events/s each trigger individual Zustand updates
// → individual React re-renders → cascading profile/image fetches → OOM.
let liveBatch: NDKEvent[] = [];
let liveBatchTimer: ReturnType<typeof setTimeout> | null = null;
export function isLiveSubActive(): boolean {
return liveSub !== null;
}
let saveTimer: ReturnType<typeof setTimeout> | null = null;
// Guards against React StrictMode double-invoke and concurrent calls
let connectCalled = false;
let checkInterval: ReturnType<typeof setInterval> | null = null;
interface FeedState {
notes: NDKEvent[];
pendingNotes: NDKEvent[];
loading: boolean;
connected: boolean;
error: string | null;
@@ -34,12 +43,14 @@ interface FeedState {
loadCachedFeed: () => Promise<void>;
loadFeed: () => Promise<void>;
startLiveFeed: () => void;
flushPendingNotes: () => void;
loadTrendingFeed: (force?: boolean) => Promise<void>;
setFocusedNoteIndex: (n: number) => void;
}
export const useFeedStore = create<FeedState>((set, get) => ({
notes: [],
pendingNotes: [],
loading: false,
connected: false,
error: null,
@@ -48,8 +59,22 @@ export const useFeedStore = create<FeedState>((set, get) => ({
trendingNotes: [],
trendingLoading: false,
setFocusedNoteIndex: (n: number) => set({ focusedNoteIndex: n }),
flushPendingNotes: () => {
const { pendingNotes, notes } = get();
if (pendingNotes.length === 0) return;
const existingIds = new Set(notes.map((n) => n.id));
const newEvents = pendingNotes.filter((e) => !existingIds.has(e.id));
const merged = [...newEvents, ...notes]
.sort((a, b) => (b.created_at ?? 0) - (a.created_at ?? 0))
.slice(0, MAX_FEED_SIZE);
set({ notes: merged, pendingNotes: [], lastUpdated: { ...get().lastUpdated, global: Date.now() } });
},
connect: async () => {
// Guard: React StrictMode double-invokes effects — only connect once.
if (connectCalled) return;
connectCalled = true;
try {
set({ error: null });
const connectStart = performance.now();
@@ -86,6 +111,7 @@ export const useFeedStore = create<FeedState>((set, get) => ({
details: `Initial connection complete`,
});
startRelaySnapshots();
startDiagFileFlusher(); // writes ~/vega-diag.log every 500ms — survives crashes
// Monitor relay connectivity — check every 5s, reconnect if needed.
// Always call getNDK() fresh — instance may be replaced by resetNDK().
@@ -130,8 +156,13 @@ export const useFeedStore = create<FeedState>((set, get) => ({
}
}
};
setInterval(checkConnection, 5000);
// Store interval handle so it's never duplicated (guard above prevents this,
// but be defensive in case resetNDK restarts things)
if (checkInterval) clearInterval(checkInterval);
checkInterval = setInterval(checkConnection, 5000);
} catch (err) {
connectCalled = false; // allow retry on error
set({ error: `Connection failed: ${err}` });
}
},
@@ -170,8 +201,10 @@ export const useFeedStore = create<FeedState>((set, get) => ({
// Persist fresh notes to SQLite (fire-and-forget)
dbSaveNotes(fresh.map((e) => JSON.stringify(e.rawEvent())));
// Start live subscription after initial load
get().startLiveFeed();
// Live subscription disabled: NDK accumulates all incoming firehose events
// internally regardless of our pendingNotes cap, causing unbounded memory growth.
// Feed is manual-refresh only until NDK subscription memory is resolved.
// get().startLiveFeed();
} catch (err) {
set({ error: `Feed failed: ${err}`, loading: false });
}
@@ -199,22 +232,28 @@ export const useFeedStore = create<FeedState>((set, get) => ({
});
sub.on("event", (event: NDKEvent) => {
const current = get().notes;
// Deduplicate
if (current.some((n) => n.id === event.id)) return;
// Accumulate incoming events into pendingNotes — do NOT render them
// immediately. Rendering new notes triggers profile image loads which
// accumulate in WebKit's decoded image cache without eviction, causing
// unbounded memory growth. The user clicks "N new notes" to flush.
liveBatch.push(event);
const updated = [event, ...current]
.sort((a, b) => (b.created_at ?? 0) - (a.created_at ?? 0))
.slice(0, MAX_FEED_SIZE);
set({ notes: updated, lastUpdated: { ...get().lastUpdated, global: Date.now() } });
if (!liveBatchTimer) {
liveBatchTimer = setTimeout(() => {
liveBatchTimer = null;
const batch = liveBatch;
liveBatch = [];
// Debounced save to SQLite — batch saves every 5s
if (!saveTimer) {
saveTimer = setTimeout(() => {
saveTimer = null;
const toSave = get().notes.slice(0, 20);
dbSaveNotes(toSave.map((e) => JSON.stringify(e.rawEvent())));
}, 5000);
const current = get().notes;
const pending = get().pendingNotes;
const existingIds = new Set([...current, ...pending].map((n) => n.id));
const newEvents = batch.filter((e) => !existingIds.has(e.id));
if (newEvents.length === 0) return;
// Cap pending at 100 to avoid unbounded accumulation
const updatedPending = [...newEvents, ...pending].slice(0, 100);
set({ pendingNotes: updatedPending });
}, 250);
}
});

View File

@@ -9,6 +9,26 @@ const host = process.env.TAURI_DEV_HOST;
export default defineConfig(async () => ({
plugins: [react(), tailwindcss()],
// Pre-bundle Tauri API modules so Vite never triggers a full-reload
// mid-session when it detects a "new" dependency.
optimizeDeps: {
include: ["@tauri-apps/api/path"],
},
build: {
rollupOptions: {
output: {
manualChunks: {
// NDK is the largest dependency (~300kB) — split it out so the
// main bundle parses faster at startup and the chunk can be cached.
"ndk": ["@nostr-dev-kit/ndk"],
// React runtime — stable across releases, cache-friendly.
"vendor": ["react", "react-dom"],
},
},
},
},
// Vite options tailored for Tauri development and only applied in `tauri dev` or `tauri build`
//
// 1. prevent Vite from obscuring rust errors