Add SQLite note and profile cache (roadmap #3)

- Rust: rusqlite (bundled) with WAL mode; wrystr.db in app data dir
  - db_save_notes: upsert batch of raw event JSON, prune to 500 kind-1 notes
  - db_load_feed: return N most-recent kind-1 raws for instant startup display
  - db_save_profile / db_load_profile: cache NDKUserProfile JSON by pubkey
  - Falls back to in-memory SQLite if the on-disk open fails
- src/lib/db.ts: typed invoke wrappers; all errors silenced (cache is best-effort)
- feed store: loadCachedFeed() populates notes before relay connects;
  loadFeed() merges fresh+cached (so relay returning fewer notes doesn't
  erase cached ones), then saves fresh notes to SQLite
- useProfile: reads SQLite cache to show avatar/name instantly while
  relay request is in-flight; saves result to SQLite after relay responds
- Feed: calls loadCachedFeed() first → notes visible before relay connects

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
Jure
2026-03-10 17:53:00 +01:00
parent a8627b7305
commit e3ba3dbcee
7 changed files with 283 additions and 14 deletions

74
src-tauri/Cargo.lock generated
View File

@@ -8,6 +8,18 @@ version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
[[package]]
name = "ahash"
version = "0.8.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5a15f179cd60c4584b8a8c596927aadc462e27f2ca70c04e0071964a73ba7a75"
dependencies = [
"cfg-if",
"once_cell",
"version_check",
"zerocopy",
]
[[package]]
name = "aho-corasick"
version = "1.1.4"
@@ -862,6 +874,18 @@ dependencies = [
"pin-project-lite",
]
[[package]]
name = "fallible-iterator"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2acce4a10f12dc2fb14a218589d4f1f62ef011b2d0cc4b3cb1bba8e94da14649"
[[package]]
name = "fallible-streaming-iterator"
version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7360491ce676a36bf9bb3c56c1aa791658183a54d2744120f27285738d90465a"
[[package]]
name = "fastrand"
version = "2.3.0"
@@ -1364,6 +1388,15 @@ version = "0.12.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
[[package]]
name = "hashbrown"
version = "0.14.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1"
dependencies = [
"ahash",
]
[[package]]
name = "hashbrown"
version = "0.15.5"
@@ -1379,6 +1412,15 @@ version = "0.16.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "841d1cc9bed7f9236f321df977030373f4a4163ae1a7dbfe1a51a2c1a51d9100"
[[package]]
name = "hashlink"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6ba4ff7128dee98c7dc9794b6a411377e1404dba1c97deb8d1a55297bd25d8af"
dependencies = [
"hashbrown 0.14.5",
]
[[package]]
name = "heck"
version = "0.4.1"
@@ -1884,6 +1926,17 @@ dependencies = [
"libc",
]
[[package]]
name = "libsqlite3-sys"
version = "0.30.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2e99fb7a497b1e3339bc746195567ed8d3e24945ecd636e3619d20b9de9e9149"
dependencies = [
"cc",
"pkg-config",
"vcpkg",
]
[[package]]
name = "linux-raw-sys"
version = "0.12.1"
@@ -2844,6 +2897,20 @@ dependencies = [
"web-sys",
]
[[package]]
name = "rusqlite"
version = "0.32.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7753b721174eb8ff87a9a0e799e2d7bc3749323e773db92e0984debb00019d6e"
dependencies = [
"bitflags 2.11.0",
"fallible-iterator",
"fallible-streaming-iterator",
"hashlink",
"libsqlite3-sys",
"smallvec",
]
[[package]]
name = "rustc_version"
version = "0.4.1"
@@ -4122,6 +4189,12 @@ dependencies = [
"wasm-bindgen",
]
[[package]]
name = "vcpkg"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426"
[[package]]
name = "version-compare"
version = "0.2.1"
@@ -4996,6 +5069,7 @@ name = "wrystr"
version = "0.1.1"
dependencies = [
"keyring",
"rusqlite",
"serde",
"serde_json",
"tauri",

View File

@@ -23,4 +23,5 @@ tauri-plugin-opener = "2"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
keyring = "3"
rusqlite = { version = "0.32", features = ["bundled"] }

View File

@@ -1,4 +1,9 @@
use keyring::Entry;
use rusqlite::{params, Connection};
use std::sync::Mutex;
use tauri::Manager;
// ── OS keychain ─────────────────────────────────────────────────────────────
const KEYRING_SERVICE: &str = "wrystr";
@@ -31,11 +36,131 @@ fn delete_nsec(pubkey: String) -> Result<(), String> {
}
}
// ── SQLite note/profile cache ────────────────────────────────────────────────
struct DbState(Mutex<Connection>);
fn open_db(data_dir: std::path::PathBuf) -> rusqlite::Result<Connection> {
std::fs::create_dir_all(&data_dir).ok();
let path = data_dir.join("wrystr.db");
let conn = Connection::open(path)?;
conn.execute_batch(
"PRAGMA journal_mode=WAL;
CREATE TABLE IF NOT EXISTS notes (
id TEXT PRIMARY KEY,
pubkey TEXT NOT NULL,
created_at INTEGER NOT NULL,
kind INTEGER NOT NULL,
raw TEXT NOT NULL
);
CREATE INDEX IF NOT EXISTS idx_notes_created ON notes(created_at DESC);
CREATE TABLE IF NOT EXISTS profiles (
pubkey TEXT PRIMARY KEY,
content TEXT NOT NULL,
cached_at INTEGER NOT NULL
);",
)?;
Ok(conn)
}
/// Upsert a batch of raw Nostr event JSON strings into the notes cache.
/// Prunes the kind-1 table to the most recent 500 entries after insert.
#[tauri::command]
fn db_save_notes(state: tauri::State<DbState>, notes: Vec<String>) -> Result<(), String> {
let conn = state.0.lock().map_err(|e| e.to_string())?;
for raw in &notes {
let v: serde_json::Value = serde_json::from_str(raw).map_err(|e| e.to_string())?;
let id = v["id"].as_str().unwrap_or_default();
let pubkey = v["pubkey"].as_str().unwrap_or_default();
let created_at = v["created_at"].as_i64().unwrap_or(0);
let kind = v["kind"].as_i64().unwrap_or(0);
conn.execute(
"INSERT OR REPLACE INTO notes (id, pubkey, created_at, kind, raw) VALUES (?1,?2,?3,?4,?5)",
params![id, pubkey, created_at, kind, raw],
)
.map_err(|e| e.to_string())?;
}
// Keep only the most recent 500 kind-1 notes
conn.execute(
"DELETE FROM notes WHERE kind=1 AND id NOT IN \
(SELECT id FROM notes WHERE kind=1 ORDER BY created_at DESC LIMIT 500)",
[],
)
.map_err(|e| e.to_string())?;
Ok(())
}
/// Return up to `limit` recent kind-1 note JSONs, newest first.
#[tauri::command]
fn db_load_feed(state: tauri::State<DbState>, limit: u32) -> Result<Vec<String>, String> {
let conn = state.0.lock().map_err(|e| e.to_string())?;
let mut stmt = conn
.prepare("SELECT raw FROM notes WHERE kind=1 ORDER BY created_at DESC LIMIT ?1")
.map_err(|e| e.to_string())?;
let rows = stmt
.query_map([limit], |row| row.get::<_, String>(0))
.map_err(|e| e.to_string())?;
let mut result = Vec::new();
for row in rows {
result.push(row.map_err(|e| e.to_string())?);
}
Ok(result)
}
/// Cache a profile's JSON content (the NDKUserProfile object) keyed by pubkey.
#[tauri::command]
fn db_save_profile(state: tauri::State<DbState>, pubkey: String, content: String) -> Result<(), String> {
let conn = state.0.lock().map_err(|e| e.to_string())?;
let now = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs() as i64;
conn.execute(
"INSERT OR REPLACE INTO profiles (pubkey, content, cached_at) VALUES (?1,?2,?3)",
params![pubkey, content, now],
)
.map_err(|e| e.to_string())?;
Ok(())
}
/// Load a cached profile JSON for `pubkey`. Returns None if not cached.
#[tauri::command]
fn db_load_profile(state: tauri::State<DbState>, pubkey: String) -> Result<Option<String>, String> {
let conn = state.0.lock().map_err(|e| e.to_string())?;
match conn.query_row(
"SELECT content FROM profiles WHERE pubkey=?1",
[&pubkey],
|row| row.get::<_, String>(0),
) {
Ok(content) => Ok(Some(content)),
Err(rusqlite::Error::QueryReturnedNoRows) => Ok(None),
Err(e) => Err(e.to_string()),
}
}
// ── App entry ────────────────────────────────────────────────────────────────
#[cfg_attr(mobile, tauri::mobile_entry_point)]
pub fn run() {
tauri::Builder::default()
.plugin(tauri_plugin_opener::init())
.invoke_handler(tauri::generate_handler![store_nsec, load_nsec, delete_nsec])
.setup(|app| {
let data_dir = app.path().app_data_dir()?;
// Fall back to in-memory DB if the on-disk open fails (e.g. permissions).
let conn = open_db(data_dir)
.unwrap_or_else(|_| Connection::open_in_memory().expect("in-memory SQLite"));
app.manage(DbState(Mutex::new(conn)));
Ok(())
})
.invoke_handler(tauri::generate_handler![
store_nsec,
load_nsec,
delete_nsec,
db_save_notes,
db_load_feed,
db_save_profile,
db_load_profile,
])
.run(tauri::generate_context!())
.expect("error while running tauri application");
}

View File

@@ -9,7 +9,7 @@ import { NDKEvent } from "@nostr-dev-kit/ndk";
type FeedTab = "global" | "following";
export function Feed() {
const { notes, loading, connected, error, connect, loadFeed } = useFeedStore();
const { notes, loading, connected, error, connect, loadCachedFeed, loadFeed } = useFeedStore();
const { loggedIn, follows } = useUserStore();
const [tab, setTab] = useState<FeedTab>("global");
@@ -17,6 +17,8 @@ export function Feed() {
const [followLoading, setFollowLoading] = useState(false);
useEffect(() => {
// Show cached notes immediately, then fetch fresh ones once connected
loadCachedFeed();
connect().then(() => loadFeed());
}, []);

View File

@@ -1,5 +1,6 @@
import { useEffect, useState } from "react";
import { fetchProfile } from "../lib/nostr";
import { dbLoadProfile, dbSaveProfile } from "../lib/db";
const profileCache = new Map<string, any>();
const pendingRequests = new Map<string, Promise<any>>();
@@ -18,20 +19,38 @@ export function useProfile(pubkey: string) {
return;
}
// Deduplicate requests for the same pubkey
// Kick off relay fetch (deduplicated across simultaneous callers)
if (!pendingRequests.has(pubkey)) {
const request = fetchProfile(pubkey).then((p) => {
profileCache.set(pubkey, p ?? null);
pendingRequests.delete(pubkey);
return p;
}).catch(() => {
pendingRequests.delete(pubkey);
return null;
});
const request = fetchProfile(pubkey)
.then((p) => {
const result = p ?? null;
profileCache.set(pubkey, result);
pendingRequests.delete(pubkey);
if (result) dbSaveProfile(pubkey, JSON.stringify(result));
return result;
})
.catch(() => {
pendingRequests.delete(pubkey);
return null;
});
pendingRequests.set(pubkey, request);
}
// Show SQLite cached profile immediately while the relay request is in-flight.
// `settled` prevents the stale cached value from overwriting a fresh relay result.
let settled = false;
dbLoadProfile(pubkey).then((cached) => {
if (!settled && cached && !profileCache.has(pubkey)) {
try {
setProfile(JSON.parse(cached));
} catch {
// Corrupt cache entry — ignore
}
}
});
pendingRequests.get(pubkey)!.then((p) => {
settled = true;
setProfile(p ?? null);
});
}, [pubkey]);

22
src/lib/db.ts Normal file
View File

@@ -0,0 +1,22 @@
import { invoke } from "@tauri-apps/api/core";
/** Upsert a batch of raw Nostr event JSON strings into the SQLite note cache. */
export function dbSaveNotes(notes: string[]): void {
if (notes.length === 0) return;
invoke("db_save_notes", { notes }).catch(() => {});
}
/** Load up to `limit` recent kind-1 note JSONs from cache (newest first). */
export async function dbLoadFeed(limit = 200): Promise<string[]> {
return invoke<string[]>("db_load_feed", { limit }).catch(() => []);
}
/** Cache a profile object (NDKUserProfile) for `pubkey`. Fire-and-forget. */
export function dbSaveProfile(pubkey: string, content: string): void {
invoke("db_save_profile", { pubkey, content }).catch(() => {});
}
/** Load a cached profile JSON for `pubkey`. Returns null if not cached. */
export async function dbLoadProfile(pubkey: string): Promise<string | null> {
return invoke<string | null>("db_load_profile", { pubkey }).catch(() => null);
}

View File

@@ -1,6 +1,7 @@
import { create } from "zustand";
import { NDKEvent } from "@nostr-dev-kit/ndk";
import { connectToRelays, fetchGlobalFeed } from "../lib/nostr";
import { connectToRelays, fetchGlobalFeed, getNDK } from "../lib/nostr";
import { dbLoadFeed, dbSaveNotes } from "../lib/db";
interface FeedState {
notes: NDKEvent[];
@@ -8,6 +9,7 @@ interface FeedState {
connected: boolean;
error: string | null;
connect: () => Promise<void>;
loadCachedFeed: () => Promise<void>;
loadFeed: () => Promise<void>;
}
@@ -27,12 +29,36 @@ export const useFeedStore = create<FeedState>((set, get) => ({
}
},
loadCachedFeed: async () => {
try {
const rawNotes = await dbLoadFeed(200);
if (rawNotes.length === 0) return;
const ndk = getNDK();
const events = rawNotes.map((raw) => new NDKEvent(ndk, JSON.parse(raw)));
set({ notes: events });
} catch {
// Cache read failure is non-critical
}
},
loadFeed: async () => {
if (get().loading) return;
set({ loading: true, error: null });
try {
const notes = await fetchGlobalFeed(80);
set({ notes, loading: false });
const fresh = await fetchGlobalFeed(80);
// Merge with currently displayed notes so cached notes aren't lost
// if the relay returns fewer results than the cache had.
const freshIds = new Set(fresh.map((n) => n.id));
const kept = get().notes.filter((n) => !freshIds.has(n.id));
const merged = [...fresh, ...kept]
.sort((a, b) => (b.created_at ?? 0) - (a.created_at ?? 0))
.slice(0, 200);
set({ notes: merged, loading: false });
// Persist fresh notes to SQLite (fire-and-forget)
dbSaveNotes(fresh.map((e) => JSON.stringify(e.rawEvent())));
} catch (err) {
set({ error: `Feed failed: ${err}`, loading: false });
}