mirror of
https://github.com/hoornet/vega.git
synced 2026-05-10 22:29:11 -07:00
Fix relay connectivity: remove aggressive liveness probe, add fetch timeouts
The liveness probe in ensureConnected was causing a death spiral — it force-disconnected working relays when a 3s probe timed out, then resetNDK killed new connections before they could establish. Now ensureConnected trusts relay.connected and only reconnects when zero relays are connected. All fetchEvents calls have timeouts (5-10s) so nothing hangs. resetNDK kept as background-only recovery in the connection monitor after 30s of continuous failure. Also adds feed diagnostics (feedDiagnostics.ts) for tracking fetch timing, event freshness, and relay states via console helpers.
This commit is contained in:
209
src/lib/feedDiagnostics.ts
Normal file
209
src/lib/feedDiagnostics.ts
Normal file
@@ -0,0 +1,209 @@
|
||||
/**
|
||||
* Feed diagnostics logger.
|
||||
* Tracks every feed fetch with relay states, event freshness, timing.
|
||||
* Data stored in localStorage under "wrystr_feed_diag".
|
||||
* View in console: JSON.parse(localStorage.getItem("wrystr_feed_diag"))
|
||||
* Or open DevTools and call: window.__feedDiag()
|
||||
*/
|
||||
|
||||
import { getNDK } from "./nostr/core";
|
||||
|
||||
const DIAG_KEY = "wrystr_feed_diag";
|
||||
const MAX_ENTRIES = 200;
|
||||
|
||||
export interface DiagEntry {
|
||||
ts: string; // ISO timestamp
|
||||
action: string; // "global_fetch" | "follow_fetch" | "refresh_click" | "relay_state" | etc.
|
||||
durationMs?: number;
|
||||
eventsReturned?: number;
|
||||
newestEventAge?: number; // seconds since newest event was created
|
||||
oldestEventAge?: number; // seconds since oldest event was created
|
||||
medianEventAge?: number;
|
||||
relayStates?: Record<string, { connected: boolean; status: number }>;
|
||||
error?: string;
|
||||
details?: string;
|
||||
}
|
||||
|
||||
function getLog(): DiagEntry[] {
|
||||
try {
|
||||
return JSON.parse(localStorage.getItem(DIAG_KEY) || "[]");
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
function saveLog(entries: DiagEntry[]) {
|
||||
localStorage.setItem(DIAG_KEY, JSON.stringify(entries.slice(-MAX_ENTRIES)));
|
||||
}
|
||||
|
||||
export function logDiag(entry: DiagEntry) {
|
||||
const log = getLog();
|
||||
log.push(entry);
|
||||
saveLog(log);
|
||||
|
||||
// Also log to console with color coding
|
||||
const style = entry.error
|
||||
? "color: #ff4444; font-weight: bold"
|
||||
: entry.newestEventAge && entry.newestEventAge > 300
|
||||
? "color: #ffaa00; font-weight: bold"
|
||||
: "color: #44aa44";
|
||||
|
||||
console.log(
|
||||
`%c[FeedDiag] ${entry.action}`,
|
||||
style,
|
||||
entry.durationMs != null ? `${entry.durationMs}ms` : "",
|
||||
entry.eventsReturned != null ? `${entry.eventsReturned} events` : "",
|
||||
entry.newestEventAge != null ? `newest: ${formatAge(entry.newestEventAge)}` : "",
|
||||
entry.error || "",
|
||||
entry.details || "",
|
||||
);
|
||||
}
|
||||
|
||||
function formatAge(seconds: number): string {
|
||||
if (seconds < 60) return `${Math.round(seconds)}s`;
|
||||
if (seconds < 3600) return `${Math.round(seconds / 60)}m`;
|
||||
return `${(seconds / 3600).toFixed(1)}h`;
|
||||
}
|
||||
|
||||
export function getRelayStates(): Record<string, { connected: boolean; status: number }> {
|
||||
const ndk = getNDK();
|
||||
const states: Record<string, { connected: boolean; status: number }> = {};
|
||||
for (const [url, relay] of ndk.pool?.relays?.entries() ?? []) {
|
||||
states[url] = {
|
||||
connected: relay.connected,
|
||||
status: (relay as unknown as { status: number }).status ?? -1,
|
||||
};
|
||||
}
|
||||
return states;
|
||||
}
|
||||
|
||||
export function computeEventAges(events: { created_at?: number }[]): {
|
||||
newest: number;
|
||||
oldest: number;
|
||||
median: number;
|
||||
} | null {
|
||||
const now = Math.floor(Date.now() / 1000);
|
||||
const ages = events
|
||||
.map((e) => (e.created_at ? now - e.created_at : null))
|
||||
.filter((a): a is number => a !== null)
|
||||
.sort((a, b) => a - b);
|
||||
|
||||
if (ages.length === 0) return null;
|
||||
return {
|
||||
newest: ages[0],
|
||||
oldest: ages[ages.length - 1],
|
||||
median: ages[Math.floor(ages.length / 2)],
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Periodic relay health snapshot — logs relay states every 60s.
|
||||
*/
|
||||
let snapshotInterval: ReturnType<typeof setInterval> | null = null;
|
||||
|
||||
export function startRelaySnapshots() {
|
||||
if (snapshotInterval) return;
|
||||
snapshotInterval = setInterval(() => {
|
||||
const states = getRelayStates();
|
||||
const connectedCount = Object.values(states).filter((s) => s.connected).length;
|
||||
const totalCount = Object.keys(states).length;
|
||||
|
||||
// Only log if something interesting (not all connected)
|
||||
if (connectedCount < totalCount || totalCount === 0) {
|
||||
logDiag({
|
||||
ts: new Date().toISOString(),
|
||||
action: "relay_snapshot",
|
||||
relayStates: states,
|
||||
details: `${connectedCount}/${totalCount} connected`,
|
||||
});
|
||||
}
|
||||
}, 60_000);
|
||||
}
|
||||
|
||||
/**
|
||||
* Wrap a fetch function with diagnostics.
|
||||
*/
|
||||
export async function diagWrapFetch<T extends { created_at?: number }[]>(
|
||||
action: string,
|
||||
fetchFn: () => Promise<T>,
|
||||
): Promise<T> {
|
||||
const start = performance.now();
|
||||
const relaysBefore = getRelayStates();
|
||||
|
||||
try {
|
||||
const result = await fetchFn();
|
||||
const durationMs = Math.round(performance.now() - start);
|
||||
const ages = computeEventAges(result);
|
||||
|
||||
logDiag({
|
||||
ts: new Date().toISOString(),
|
||||
action,
|
||||
durationMs,
|
||||
eventsReturned: result.length,
|
||||
newestEventAge: ages?.newest,
|
||||
oldestEventAge: ages?.oldest,
|
||||
medianEventAge: ages?.median,
|
||||
relayStates: relaysBefore,
|
||||
});
|
||||
|
||||
// Warn if results seem stale
|
||||
if (ages && ages.newest > 600) {
|
||||
logDiag({
|
||||
ts: new Date().toISOString(),
|
||||
action: `${action}_STALE_WARNING`,
|
||||
details: `Newest event is ${formatAge(ages.newest)} old! Median: ${formatAge(ages.median)}. This suggests relays returned cached/old data.`,
|
||||
relayStates: relaysBefore,
|
||||
});
|
||||
}
|
||||
|
||||
// Warn if zero results
|
||||
if (result.length === 0) {
|
||||
logDiag({
|
||||
ts: new Date().toISOString(),
|
||||
action: `${action}_EMPTY_WARNING`,
|
||||
details: "Zero events returned from relays",
|
||||
relayStates: relaysBefore,
|
||||
});
|
||||
}
|
||||
|
||||
return result;
|
||||
} catch (err) {
|
||||
const durationMs = Math.round(performance.now() - start);
|
||||
logDiag({
|
||||
ts: new Date().toISOString(),
|
||||
action,
|
||||
durationMs,
|
||||
error: String(err),
|
||||
relayStates: relaysBefore,
|
||||
});
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
// Expose diagnostics globally for easy console access
|
||||
if (typeof window !== "undefined") {
|
||||
(window as unknown as Record<string, unknown>).__feedDiag = () => {
|
||||
const log = getLog();
|
||||
console.table(log.map((e) => ({
|
||||
time: e.ts.slice(11, 19),
|
||||
action: e.action,
|
||||
ms: e.durationMs,
|
||||
events: e.eventsReturned,
|
||||
newestAge: e.newestEventAge != null ? formatAge(e.newestEventAge) : "",
|
||||
error: e.error || "",
|
||||
details: e.details || "",
|
||||
})));
|
||||
return log;
|
||||
};
|
||||
|
||||
(window as unknown as Record<string, unknown>).__feedDiagRelays = () => {
|
||||
const states = getRelayStates();
|
||||
console.table(states);
|
||||
return states;
|
||||
};
|
||||
|
||||
(window as unknown as Record<string, unknown>).__feedDiagClear = () => {
|
||||
localStorage.removeItem(DIAG_KEY);
|
||||
console.log("Feed diagnostics cleared");
|
||||
};
|
||||
}
|
||||
@@ -1,4 +1,36 @@
|
||||
import NDK, { NDKRelay } from "@nostr-dev-kit/ndk";
|
||||
import NDK, { NDKEvent, NDKFilter, NDKRelay, NDKRelaySet, NDKSubscriptionCacheUsage } from "@nostr-dev-kit/ndk";
|
||||
|
||||
// ─── Fetch timeout helper ───────────────────────────────────────────
|
||||
|
||||
/** Race a promise against a timeout. Returns fallback on timeout. */
|
||||
export function withTimeout<T>(promise: Promise<T>, ms: number, fallback: T): Promise<T> {
|
||||
return Promise.race([
|
||||
promise,
|
||||
new Promise<T>((resolve) => setTimeout(() => {
|
||||
console.warn(`[Wrystr] Fetch timed out after ${ms}ms`);
|
||||
resolve(fallback);
|
||||
}, ms)),
|
||||
]);
|
||||
}
|
||||
|
||||
export const FEED_TIMEOUT = 8000; // 8s for feed fetches
|
||||
export const THREAD_TIMEOUT = 10000; // 10s per thread round-trip
|
||||
export const SINGLE_TIMEOUT = 5000; // 5s for single event lookups
|
||||
|
||||
const EMPTY_SET = new Set<NDKEvent>();
|
||||
|
||||
/** Fetch events with a timeout — returns empty set if relay hangs. */
|
||||
export async function fetchWithTimeout(
|
||||
instance: NDK,
|
||||
filter: NDKFilter,
|
||||
timeoutMs: number,
|
||||
relaySet?: NDKRelaySet,
|
||||
): Promise<Set<NDKEvent>> {
|
||||
const promise = relaySet
|
||||
? instance.fetchEvents(filter, { cacheUsage: NDKSubscriptionCacheUsage.ONLY_RELAY }, relaySet)
|
||||
: instance.fetchEvents(filter, { cacheUsage: NDKSubscriptionCacheUsage.ONLY_RELAY });
|
||||
return withTimeout(promise, timeoutMs, EMPTY_SET);
|
||||
}
|
||||
|
||||
export const RELAY_STORAGE_KEY = "wrystr_relays";
|
||||
|
||||
@@ -31,6 +63,41 @@ export function getNDK(): NDK {
|
||||
return ndk;
|
||||
}
|
||||
|
||||
/**
|
||||
* Destroy the current NDK instance and create a fresh one.
|
||||
* Preserves the signer (login state) but resets all relay connections.
|
||||
* Use as a last resort when relay connections are unrecoverable.
|
||||
*/
|
||||
export async function resetNDK(): Promise<void> {
|
||||
const oldInstance = ndk;
|
||||
const oldSigner = oldInstance?.signer ?? null;
|
||||
|
||||
// Disconnect all relays on old instance
|
||||
if (oldInstance?.pool?.relays) {
|
||||
for (const relay of oldInstance.pool.relays.values()) {
|
||||
try { relay.disconnect(); } catch { /* ignore */ }
|
||||
}
|
||||
}
|
||||
|
||||
// Create fresh instance
|
||||
ndk = new NDK({
|
||||
explicitRelayUrls: getStoredRelayUrls(),
|
||||
});
|
||||
|
||||
// Restore signer so user stays logged in
|
||||
if (oldSigner) {
|
||||
ndk.signer = oldSigner;
|
||||
}
|
||||
|
||||
// Connect fresh
|
||||
console.log("[Wrystr] NDK instance reset — connecting fresh relays");
|
||||
await ndk.connect();
|
||||
await waitForConnectedRelay(ndk, 5000);
|
||||
const relays = Array.from(ndk.pool?.relays?.values() ?? []);
|
||||
const connected = relays.filter((r) => r.connected).length;
|
||||
console.log(`[Wrystr] Fresh connection: ${connected}/${relays.length} relays connected`);
|
||||
}
|
||||
|
||||
export function addRelay(url: string): void {
|
||||
const instance = getNDK();
|
||||
const urls = getStoredRelayUrls();
|
||||
@@ -80,3 +147,32 @@ export async function connectToRelays(): Promise<void> {
|
||||
await instance.connect();
|
||||
await waitForConnectedRelay(instance);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure at least one relay is connected.
|
||||
* If relays report connected, trust them and return immediately.
|
||||
* Only reconnect if zero relays are connected — never force-disconnect working connections.
|
||||
*/
|
||||
export async function ensureConnected(): Promise<boolean> {
|
||||
const instance = getNDK();
|
||||
const relays = Array.from(instance.pool?.relays?.values() ?? []);
|
||||
const connectedCount = relays.filter((r) => r.connected).length;
|
||||
|
||||
if (connectedCount > 0) {
|
||||
return true; // Trust relay.connected — don't probe or disconnect
|
||||
}
|
||||
|
||||
console.warn(`[Wrystr] No relays connected (${relays.length} in pool) — attempting reconnect`);
|
||||
|
||||
try {
|
||||
await withTimeout(instance.connect(), 4000, undefined);
|
||||
await waitForConnectedRelay(instance, 3000);
|
||||
const after = Array.from(instance.pool?.relays?.values() ?? []);
|
||||
const nowConnected = after.some((r) => r.connected);
|
||||
console.log(`[Wrystr] Reconnect ${nowConnected ? "succeeded" : "failed"}`);
|
||||
return nowConnected;
|
||||
} catch {
|
||||
console.error("[Wrystr] Reconnect failed");
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
export { getNDK, connectToRelays, getStoredRelayUrls, addRelay, removeRelay } from "./core";
|
||||
export { getNDK, connectToRelays, ensureConnected, resetNDK, getStoredRelayUrls, addRelay, removeRelay, fetchWithTimeout, withTimeout, FEED_TIMEOUT, THREAD_TIMEOUT, SINGLE_TIMEOUT } from "./core";
|
||||
export { fetchGlobalFeed, fetchFollowFeed, fetchUserNotes, fetchUserNotesNIP65, fetchNoteById, fetchReplies, publishNote, publishReply, publishRepost, publishQuote, fetchHashtagFeed, fetchThreadEvents, fetchAncestors } from "./notes";
|
||||
export { publishProfile, publishContactList, fetchProfile, fetchFollowSuggestions, fetchMentions, fetchNewFollowers } from "./social";
|
||||
export { publishArticle, fetchArticle, fetchAuthorArticles, fetchArticleFeed, searchArticles, fetchByAddr } from "./articles";
|
||||
|
||||
@@ -1,49 +1,26 @@
|
||||
import { NDKEvent, NDKFilter, NDKKind, NDKRelaySet, NDKSubscriptionCacheUsage, nip19 } from "@nostr-dev-kit/ndk";
|
||||
import { getNDK, getStoredRelayUrls } from "./core";
|
||||
import { NDKEvent, NDKFilter, NDKKind, NDKRelaySet, nip19 } from "@nostr-dev-kit/ndk";
|
||||
import { getNDK, getStoredRelayUrls, fetchWithTimeout, withTimeout, FEED_TIMEOUT, THREAD_TIMEOUT, SINGLE_TIMEOUT } from "./core";
|
||||
import { fetchUserRelayList } from "./relays";
|
||||
|
||||
export async function fetchGlobalFeed(limit: number = 50): Promise<NDKEvent[]> {
|
||||
const instance = getNDK();
|
||||
|
||||
const filter: NDKFilter = {
|
||||
kinds: [NDKKind.Text],
|
||||
limit,
|
||||
};
|
||||
|
||||
const events = await instance.fetchEvents(filter, {
|
||||
cacheUsage: NDKSubscriptionCacheUsage.ONLY_RELAY,
|
||||
});
|
||||
|
||||
const filter: NDKFilter = { kinds: [NDKKind.Text], limit };
|
||||
const events = await fetchWithTimeout(instance, filter, FEED_TIMEOUT);
|
||||
return Array.from(events).sort((a, b) => (b.created_at ?? 0) - (a.created_at ?? 0));
|
||||
}
|
||||
|
||||
export async function fetchFollowFeed(pubkeys: string[], limit = 80): Promise<NDKEvent[]> {
|
||||
if (pubkeys.length === 0) return [];
|
||||
const instance = getNDK();
|
||||
|
||||
const filter: NDKFilter = {
|
||||
kinds: [NDKKind.Text],
|
||||
authors: pubkeys,
|
||||
limit,
|
||||
};
|
||||
|
||||
const events = await instance.fetchEvents(filter, {
|
||||
cacheUsage: NDKSubscriptionCacheUsage.ONLY_RELAY,
|
||||
});
|
||||
|
||||
const filter: NDKFilter = { kinds: [NDKKind.Text], authors: pubkeys, limit };
|
||||
const events = await fetchWithTimeout(instance, filter, FEED_TIMEOUT);
|
||||
return Array.from(events).sort((a, b) => (b.created_at ?? 0) - (a.created_at ?? 0));
|
||||
}
|
||||
|
||||
export async function fetchUserNotes(pubkey: string, limit = 30): Promise<NDKEvent[]> {
|
||||
const instance = getNDK();
|
||||
const filter: NDKFilter = {
|
||||
kinds: [NDKKind.Text],
|
||||
authors: [pubkey],
|
||||
limit,
|
||||
};
|
||||
const events = await instance.fetchEvents(filter, {
|
||||
cacheUsage: NDKSubscriptionCacheUsage.ONLY_RELAY,
|
||||
});
|
||||
const filter: NDKFilter = { kinds: [NDKKind.Text], authors: [pubkey], limit };
|
||||
const events = await fetchWithTimeout(instance, filter, FEED_TIMEOUT);
|
||||
return Array.from(events).sort((a, b) => (b.created_at ?? 0) - (a.created_at ?? 0));
|
||||
}
|
||||
|
||||
@@ -51,11 +28,11 @@ export async function fetchUserNotesNIP65(pubkey: string, limit = 30): Promise<N
|
||||
const instance = getNDK();
|
||||
const filter: NDKFilter = { kinds: [NDKKind.Text], authors: [pubkey], limit };
|
||||
try {
|
||||
const relayList = await fetchUserRelayList(pubkey);
|
||||
const relayList = await withTimeout(fetchUserRelayList(pubkey), SINGLE_TIMEOUT, { read: [], write: [] });
|
||||
if (relayList.write.length > 0) {
|
||||
const merged = Array.from(new Set([...relayList.write, ...getStoredRelayUrls()]));
|
||||
const relaySet = NDKRelaySet.fromRelayUrls(merged, instance);
|
||||
const events = await instance.fetchEvents(filter, { cacheUsage: NDKSubscriptionCacheUsage.ONLY_RELAY }, relaySet);
|
||||
const events = await fetchWithTimeout(instance, filter, FEED_TIMEOUT, relaySet);
|
||||
return Array.from(events).sort((a, b) => (b.created_at ?? 0) - (a.created_at ?? 0));
|
||||
}
|
||||
} catch { /* fallthrough */ }
|
||||
@@ -65,21 +42,14 @@ export async function fetchUserNotesNIP65(pubkey: string, limit = 30): Promise<N
|
||||
export async function fetchNoteById(eventId: string): Promise<NDKEvent | null> {
|
||||
const instance = getNDK();
|
||||
const filter: NDKFilter = { ids: [eventId], limit: 1 };
|
||||
const events = await instance.fetchEvents(filter, {
|
||||
cacheUsage: NDKSubscriptionCacheUsage.ONLY_RELAY,
|
||||
});
|
||||
const events = await fetchWithTimeout(instance, filter, SINGLE_TIMEOUT);
|
||||
return Array.from(events)[0] ?? null;
|
||||
}
|
||||
|
||||
export async function fetchReplies(eventId: string): Promise<NDKEvent[]> {
|
||||
const instance = getNDK();
|
||||
const filter: NDKFilter = {
|
||||
kinds: [NDKKind.Text],
|
||||
"#e": [eventId],
|
||||
};
|
||||
const events = await instance.fetchEvents(filter, {
|
||||
cacheUsage: NDKSubscriptionCacheUsage.ONLY_RELAY,
|
||||
});
|
||||
const filter: NDKFilter = { kinds: [NDKKind.Text], "#e": [eventId] };
|
||||
const events = await fetchWithTimeout(instance, filter, FEED_TIMEOUT);
|
||||
return Array.from(events).sort((a, b) => (a.created_at ?? 0) - (b.created_at ?? 0));
|
||||
}
|
||||
|
||||
@@ -107,7 +77,6 @@ export async function publishReply(
|
||||
event.content = content;
|
||||
|
||||
if (rootEvent && rootEvent.id !== replyTo.id) {
|
||||
// Replying to a reply — emit both root and reply markers (NIP-10)
|
||||
const pTags = new Set([rootEvent.pubkey, replyTo.pubkey]);
|
||||
event.tags = [
|
||||
["e", rootEvent.id, "", "root"],
|
||||
@@ -115,7 +84,6 @@ export async function publishReply(
|
||||
...Array.from(pTags).map((p) => ["p", p]),
|
||||
];
|
||||
} else {
|
||||
// Replying directly to root
|
||||
event.tags = [
|
||||
["e", replyTo.id, "", "root"],
|
||||
["p", replyTo.pubkey],
|
||||
@@ -130,7 +98,7 @@ export async function publishRepost(event: NDKEvent): Promise<void> {
|
||||
if (!instance.signer) throw new Error("Not logged in");
|
||||
|
||||
const repost = new NDKEvent(instance);
|
||||
repost.kind = NDKKind.Repost; // kind 6
|
||||
repost.kind = NDKKind.Repost;
|
||||
repost.content = JSON.stringify(event.rawEvent());
|
||||
repost.tags = [
|
||||
["e", event.id!, "", "mention"],
|
||||
@@ -161,20 +129,16 @@ export async function fetchThreadEvents(rootId: string): Promise<NDKEvent[]> {
|
||||
|
||||
// Round-trip 1: all events tagging the root
|
||||
const directFilter: NDKFilter = { kinds: [NDKKind.Text], "#e": [rootId] };
|
||||
const directEvents = await instance.fetchEvents(directFilter, {
|
||||
cacheUsage: NDKSubscriptionCacheUsage.ONLY_RELAY,
|
||||
});
|
||||
const directEvents = await fetchWithTimeout(instance, directFilter, THREAD_TIMEOUT);
|
||||
|
||||
const allEvents = new Map<string, NDKEvent>();
|
||||
for (const e of directEvents) allEvents.set(e.id, e);
|
||||
|
||||
// Round-trip 2: replies to any event in the thread (catches deep replies that only tag parent)
|
||||
// Round-trip 2: replies to any event in the thread
|
||||
const knownIds = Array.from(allEvents.keys());
|
||||
if (knownIds.length > 0) {
|
||||
const deepFilter: NDKFilter = { kinds: [NDKKind.Text], "#e": knownIds };
|
||||
const deepEvents = await instance.fetchEvents(deepFilter, {
|
||||
cacheUsage: NDKSubscriptionCacheUsage.ONLY_RELAY,
|
||||
});
|
||||
const deepEvents = await fetchWithTimeout(instance, deepFilter, THREAD_TIMEOUT);
|
||||
for (const e of deepEvents) allEvents.set(e.id, e);
|
||||
}
|
||||
|
||||
@@ -189,7 +153,6 @@ export async function fetchAncestors(event: NDKEvent, maxDepth = 5): Promise<NDK
|
||||
const eTags = current.tags.filter((t) => t[0] === "e");
|
||||
if (eTags.length === 0) break;
|
||||
|
||||
// Walk up: prefer "reply" marker, then "root", then last e-tag
|
||||
const parentId =
|
||||
eTags.find((t) => t[3] === "reply")?.[1] ??
|
||||
eTags.find((t) => t[3] === "root")?.[1] ??
|
||||
@@ -198,7 +161,7 @@ export async function fetchAncestors(event: NDKEvent, maxDepth = 5): Promise<NDK
|
||||
if (!parentId) break;
|
||||
const parent = await fetchNoteById(parentId);
|
||||
if (!parent) break;
|
||||
ancestors.unshift(parent); // root-first order
|
||||
ancestors.unshift(parent);
|
||||
current = parent;
|
||||
}
|
||||
|
||||
@@ -207,13 +170,7 @@ export async function fetchAncestors(event: NDKEvent, maxDepth = 5): Promise<NDK
|
||||
|
||||
export async function fetchHashtagFeed(tag: string, limit = 100): Promise<NDKEvent[]> {
|
||||
const instance = getNDK();
|
||||
const filter: NDKFilter = {
|
||||
kinds: [NDKKind.Text],
|
||||
"#t": [tag.toLowerCase()],
|
||||
limit,
|
||||
};
|
||||
const events = await instance.fetchEvents(filter, {
|
||||
cacheUsage: NDKSubscriptionCacheUsage.ONLY_RELAY,
|
||||
});
|
||||
const filter: NDKFilter = { kinds: [NDKKind.Text], "#t": [tag.toLowerCase()], limit };
|
||||
const events = await fetchWithTimeout(instance, filter, FEED_TIMEOUT);
|
||||
return Array.from(events).sort((a, b) => (b.created_at ?? 0) - (a.created_at ?? 0));
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user