@@ -144,6 +153,20 @@
color: var(--text-muted);
}
+ .message-hash {
+ font-family: var(--font-mono, 'SF Mono', 'Fira Code', monospace);
+ font-size: 10px;
+ color: var(--text-muted);
+ opacity: 0.5;
+ cursor: default;
+ user-select: all;
+ transition: opacity var(--transition-fast);
+ }
+
+ .message-hash:hover {
+ opacity: 1;
+ }
+
.message-content {
padding: var(--space-sm) var(--space-md);
border-radius: var(--radius-lg);
@@ -262,7 +285,7 @@
overflow-y: auto;
}
- .ai-stats-bar {
+ .message-actions-bar {
display: flex;
align-items: center;
gap: var(--space-sm);
@@ -271,9 +294,21 @@
font-size: 11px;
color: var(--text-muted);
flex-wrap: wrap;
+ opacity: 0;
+ transition: opacity var(--transition-fast);
}
- .ai-stat-btn {
+ .message:hover .message-actions-bar,
+ .message-actions-bar:focus-within {
+ opacity: 1;
+ }
+
+ /* Always show for AI messages with stats */
+ .ai-message .message-actions-bar {
+ opacity: 1;
+ }
+
+ .msg-action-btn {
display: inline-flex;
align-items: center;
justify-content: center;
@@ -284,12 +319,12 @@
line-height: 1;
}
- .ai-stat-btn:hover {
+ .msg-action-btn:hover {
background: var(--bg-hover);
color: var(--text-primary);
}
- .ai-stat-btn.copied {
+ .msg-action-btn.copied {
color: var(--success);
}
@@ -345,10 +380,14 @@
diff --git a/client/src/services/api.js b/client/src/services/api.js
index 5d8084f..961370a 100644
--- a/client/src/services/api.js
+++ b/client/src/services/api.js
@@ -1,15 +1,24 @@
const API_BASE = '/api'
+// Global callback for 401 responses (set by app component to trigger auto-logout)
+let onUnauthorized = null
+
+export function setOnUnauthorized(callback) {
+ onUnauthorized = callback
+}
+
function getToken() {
return localStorage.getItem('token')
}
function authHeaders() {
+ // Keep auth header creation in one place so every request follows the same rule.
const token = getToken()
return token ? { Authorization: `Bearer ${token}` } : {}
}
async function request(method, path, body) {
+ // Most client API calls are JSON and share the same error handling path.
const opts = {
method,
headers: {
@@ -25,8 +34,15 @@ async function request(method, path, body) {
const res = await fetch(`${API_BASE}${path}`, opts)
if (!res.ok) {
+ // Auto-logout on 401 for any authenticated request (not login/register)
+ if (res.status === 401 && path !== '/auth/login' && path !== '/auth/register' && path !== '/auth/nostr/verify') {
+ if (onUnauthorized) onUnauthorized()
+ throw new Error('Session expired — please log in again')
+ }
const text = await res.text()
- throw new Error(text || `HTTP ${res.status}`)
+ const err = new Error(text || `HTTP ${res.status}`)
+ err.status = res.status
+ throw err
}
if (res.status === 204 || res.headers.get('content-length') === '0') {
@@ -65,6 +81,7 @@ export const api = {
createRoom: (data) => request('POST', '/rooms', data),
getRoom: (roomId) => request('GET', `/rooms/${roomId}`),
getMessages: (roomId, limit = 50, before) => {
+ // `before` supports paginating older messages without changing the base endpoint.
const params = new URLSearchParams({ limit: String(limit) })
if (before) params.set('before', before)
return request('GET', `/rooms/${roomId}/messages?${params}`)
@@ -72,6 +89,7 @@ export const api = {
joinRoom: (roomId) => request('POST', `/rooms/${roomId}/join`),
deleteRoom: (roomId) => request('DELETE', `/rooms/${roomId}`),
clearRoom: (roomId) => request('POST', `/rooms/${roomId}/clear`),
+ resolveMessageHash: (hash) => request('GET', `/messages/hash/${hash}`),
// Models
listModels: () => request('GET', '/models'),
@@ -95,9 +113,15 @@ export const api = {
// Invites
createInvite: (data) => request('POST', '/invites', data),
acceptInvite: (token) => request('POST', `/invites/${token}/accept`),
+ inviteByNostr: (data) => request('POST', '/invites/nostr', data),
+
+ // Nostr auth
+ nostrChallenge: () => request('GET', '/auth/nostr/challenge'),
+ nostrVerify: (data) => request('POST', '/auth/nostr/verify', data),
}
export function saveAuth(token, user) {
+ // Token + user stay together so the UI can repaint immediately on refresh.
localStorage.setItem('token', token)
localStorage.setItem('user', JSON.stringify(user))
}
diff --git a/client/src/services/avatar.js b/client/src/services/avatar.js
index 78f5b5e..be735ff 100644
--- a/client/src/services/avatar.js
+++ b/client/src/services/avatar.js
@@ -11,7 +11,13 @@
* @returns {string} Avatar URL
*/
export function getAvatarUrl(user, size = 64) {
- if (user?.avatar_url) return user.avatar_url
+ if (user?.avatar_url) {
+ // External URLs (e.g. Nostr profile pictures) are used as-is
+ if (user.avatar_url.startsWith('http://') || user.avatar_url.startsWith('https://')) {
+ return user.avatar_url
+ }
+ return user.avatar_url
+ }
return avatarFromEmail(user?.email, size)
}
diff --git a/client/src/services/markdown.js b/client/src/services/markdown.js
index 91a4b9a..18ab183 100644
--- a/client/src/services/markdown.js
+++ b/client/src/services/markdown.js
@@ -1,6 +1,7 @@
import MarkdownIt from 'markdown-it'
import hljs from 'highlight.js'
+// One shared renderer keeps markdown output consistent everywhere messages appear.
const md = new MarkdownIt({
html: false,
linkify: true,
diff --git a/client/src/services/nostr.js b/client/src/services/nostr.js
new file mode 100644
index 0000000..3082053
--- /dev/null
+++ b/client/src/services/nostr.js
@@ -0,0 +1,69 @@
+/**
+ * NIP-07 browser extension helpers + relay profile fetch
+ */
+
+export function hasNostrExtension() {
+ return typeof window !== 'undefined' && !!window.nostr
+}
+
+export async function getPublicKey() {
+ return window.nostr.getPublicKey()
+}
+
+export async function signEvent(event) {
+ return window.nostr.signEvent(event)
+}
+
+const RELAYS = [
+ 'wss://relay.damus.io',
+ 'wss://relay.nostr.band',
+ 'wss://nos.lol',
+]
+
+/**
+ * Fetch a Nostr kind:0 profile from relays via WebSocket.
+ * Races multiple relays, returns first result.
+ * Returns { name, picture } or null on timeout/error.
+ */
+export function fetchNostrProfile(pubkeyHex, timeoutMs = 5000) {
+ return new Promise((resolve) => {
+ let resolved = false
+ const connections = []
+
+ const done = (result) => {
+ if (resolved) return
+ resolved = true
+ connections.forEach(ws => { try { ws.close() } catch {} })
+ resolve(result)
+ }
+
+ setTimeout(() => done(null), timeoutMs)
+
+ for (const relay of RELAYS) {
+ try {
+ const ws = new WebSocket(relay)
+ connections.push(ws)
+
+ ws.onopen = () => {
+ const subId = 'p_' + Math.random().toString(36).slice(2, 8)
+ ws.send(JSON.stringify(['REQ', subId, { kinds: [0], authors: [pubkeyHex], limit: 1 }]))
+ }
+
+ ws.onmessage = (msg) => {
+ try {
+ const data = JSON.parse(msg.data)
+ if (data[0] === 'EVENT' && data[2]?.kind === 0) {
+ const profile = JSON.parse(data[2].content)
+ done({
+ name: profile.name || profile.display_name || null,
+ picture: profile.picture || null,
+ })
+ }
+ } catch {}
+ }
+
+ ws.onerror = () => {}
+ } catch {}
+ }
+ })
+}
diff --git a/client/src/services/websocket.js b/client/src/services/websocket.js
index 1935130..9351a2c 100644
--- a/client/src/services/websocket.js
+++ b/client/src/services/websocket.js
@@ -10,6 +10,7 @@ class WebSocketManager {
this.reconnectDelay = 1000
this.maxReconnectDelay = 30000
this.token = null
+ // Track joined rooms so reconnect can restore live updates automatically.
this.subscribedRooms = new Set()
}
@@ -17,6 +18,7 @@ class WebSocketManager {
if (this.ws && this.ws.readyState === WebSocket.OPEN) return
this.token = token
+ this._authFailed = false
const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:'
const host = window.location.host
@@ -25,6 +27,7 @@ class WebSocketManager {
this.ws.onopen = () => {
console.log('[WS] Connected')
this.reconnectDelay = 1000
+ this._authFailed = false
// Re-subscribe to all rooms we were watching
for (const roomId of this.subscribedRooms) {
@@ -46,8 +49,25 @@ class WebSocketManager {
}
this.ws.onclose = (event) => {
- console.log('[WS] Disconnected', event.code)
+ console.log('[WS] Disconnected, code:', event.code)
this.emit('disconnected')
+
+ // Code 1008 = Policy Violation (server rejected auth)
+ // Code 4401 = custom auth failure
+ // Also detect immediate close without open (HTTP 401 on upgrade)
+ if (event.code === 1008 || event.code === 4401 || !event.wasClean) {
+ // Two failed attempts in a row usually means the saved token is stale,
+ // so the app should stop reconnecting and send the user back to login.
+ // If we never successfully connected, the token is likely invalid
+ if (this._authFailed || (!event.wasClean && this.reconnectDelay > 4000)) {
+ console.warn('[WS] Auth appears invalid, stopping reconnect')
+ this.token = null
+ this.emit('auth_failed')
+ return
+ }
+ this._authFailed = true
+ }
+
if (this.token) {
this.scheduleReconnect()
}
@@ -61,6 +81,7 @@ class WebSocketManager {
disconnect() {
this.token = null
this.subscribedRooms.clear()
+ this.listeners.clear()
if (this.reconnectTimer) {
clearTimeout(this.reconnectTimer)
this.reconnectTimer = null
@@ -98,6 +119,7 @@ class WebSocketManager {
}
joinRoom(roomId) {
+ // Joining is idempotent: we keep the room in the set and let the server ignore duplicates.
this.subscribedRooms.add(roomId)
this.send({ type: 'join_room', room_id: roomId })
}
diff --git a/prod.sh b/prod.sh
index 6fd642c..8dde32d 100755
--- a/prod.sh
+++ b/prod.sh
@@ -38,12 +38,24 @@ check_env() {
source "$ROOT/server/.env"
set +a
+ SEARCH_PROVIDER="${SEARCH_PROVIDER:-tavily}"
+
if [ -z "$OPENROUTER_API_KEY" ] || [ "$OPENROUTER_API_KEY" = "your-openrouter-api-key-here" ]; then
echo -e "${RED}[prod] OPENROUTER_API_KEY not set in server/.env${NC}"
exit 1
fi
- if [ -z "$BRAVE_API_KEY" ] || [ "$BRAVE_API_KEY" = "your-brave-api-key-here" ]; then
- echo -e "${RED}[prod] BRAVE_API_KEY not set in server/.env${NC}"
+ if [ "$SEARCH_PROVIDER" = "tavily" ]; then
+ if [ -z "$TAVILY_API_KEY" ] || [ "$TAVILY_API_KEY" = "tvly-your-key-here" ]; then
+ echo -e "${RED}[prod] TAVILY_API_KEY not set in server/.env${NC}"
+ exit 1
+ fi
+ elif [ "$SEARCH_PROVIDER" = "brave" ]; then
+ if [ -z "$BRAVE_API_KEY" ] || [ "$BRAVE_API_KEY" = "your-brave-api-key-here" ]; then
+ echo -e "${RED}[prod] BRAVE_API_KEY not set in server/.env${NC}"
+ exit 1
+ fi
+ else
+ echo -e "${RED}[prod] SEARCH_PROVIDER must be 'tavily' or 'brave'${NC}"
exit 1
fi
if [ -z "$JWT_SECRET" ] || [ "$JWT_SECRET" = "dev-secret-change-me" ]; then
diff --git a/server/.env.example b/server/.env.example
index 7da919b..52c093e 100644
--- a/server/.env.example
+++ b/server/.env.example
@@ -11,7 +11,13 @@ JWT_SECRET=change-me-to-a-random-secret
# OpenRouter API
OPENROUTER_API_KEY=sk-or-v1-your-key-here
-# Brave Search API
+# Search provider: tavily or brave
+SEARCH_PROVIDER=tavily
+
+# Tavily Search API
+TAVILY_API_KEY=tvly-your-key-here
+
+# Brave Search API (optional unless SEARCH_PROVIDER=brave)
BRAVE_API_KEY=your-brave-api-key-here
# Production: path to built client files (default: ../client/dist)
diff --git a/server/Cargo.lock b/server/Cargo.lock
index a615c56..2714a0a 100644
--- a/server/Cargo.lock
+++ b/server/Cargo.lock
@@ -8,6 +8,16 @@ version = "2.0.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "320119579fcad9c21884f5c4861d16174d0e06250625266f50fe6898340abefa"
+[[package]]
+name = "aead"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d122413f284cf2d62fb1b7db97e02edb8cda96d769b16e443a4f6195e35662b0"
+dependencies = [
+ "crypto-common",
+ "generic-array",
+]
+
[[package]]
name = "ahash"
version = "0.8.12"
@@ -78,6 +88,12 @@ dependencies = [
"password-hash",
]
+[[package]]
+name = "arrayvec"
+version = "0.7.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7c02d123df017efcdfbd739ef81735b36c5ba83ec3c59c80a9d7ecc718f92e50"
+
[[package]]
name = "async-compression"
version = "0.4.41"
@@ -235,6 +251,40 @@ version = "1.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2af50177e190e07a26ab74f8b1efbfe2ef87da2116221318cb1c2e82baf7de06"
+[[package]]
+name = "bech32"
+version = "0.11.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "32637268377fc7b10a8c6d51de3e7fba1ce5dd371a96e342b34e6078db558e7f"
+
+[[package]]
+name = "bip39"
+version = "2.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "90dbd31c98227229239363921e60fcf5e558e43ec69094d46fc4996f08d1d5bc"
+dependencies = [
+ "bitcoin_hashes",
+ "serde",
+ "unicode-normalization",
+]
+
+[[package]]
+name = "bitcoin-io"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2dee39a0ee5b4095224a0cfc6bf4cc1baf0f9624b96b367e53b66d974e51d953"
+
+[[package]]
+name = "bitcoin_hashes"
+version = "0.14.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "26ec84b80c482df901772e931a9a681e26a1b9ee2302edeff23cb30328745c8b"
+dependencies = [
+ "bitcoin-io",
+ "hex-conservative",
+ "serde",
+]
+
[[package]]
name = "bitflags"
version = "2.11.0"
@@ -262,6 +312,15 @@ dependencies = [
"generic-array",
]
+[[package]]
+name = "block-padding"
+version = "0.3.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a8894febbff9f758034a5b8e12d87918f56dfc64a8e1fe757d65e29041538d93"
+dependencies = [
+ "generic-array",
+]
+
[[package]]
name = "brotli"
version = "8.0.2"
@@ -301,6 +360,15 @@ version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e748733b7cbc798e1434b6ac524f0c1ff2ab456fe201501e6497c8417a4fc33"
+[[package]]
+name = "cbc"
+version = "0.1.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "26b52a9543ae338f279b96b0b9fed9c8093744685043739079ce85cd58f289a6"
+dependencies = [
+ "cipher",
+]
+
[[package]]
name = "cc"
version = "1.2.56"
@@ -317,6 +385,30 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
+[[package]]
+name = "chacha20"
+version = "0.9.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c3613f74bd2eac03dad61bd53dbe620703d4371614fe0bc3b9f04dd36fe4e818"
+dependencies = [
+ "cfg-if",
+ "cipher",
+ "cpufeatures",
+]
+
+[[package]]
+name = "chacha20poly1305"
+version = "0.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "10cd79432192d1c0f4e1a0fef9527696cc039165d729fb41b3f4f4f354c2dc35"
+dependencies = [
+ "aead",
+ "chacha20",
+ "cipher",
+ "poly1305",
+ "zeroize",
+]
+
[[package]]
name = "chrono"
version = "0.4.44"
@@ -331,6 +423,17 @@ dependencies = [
"windows-link",
]
+[[package]]
+name = "cipher"
+version = "0.4.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "773f3b9af64447d2ce9850330c473515014aa235e6a783b02db81ff39e4a3dad"
+dependencies = [
+ "crypto-common",
+ "inout",
+ "zeroize",
+]
+
[[package]]
name = "compression-codecs"
version = "0.4.37"
@@ -436,6 +539,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "78c8292055d1c1df0cce5d180393dc8cce0abec0a7102adb6c7b1eef6016d60a"
dependencies = [
"generic-array",
+ "rand_core",
"typenum",
]
@@ -858,11 +962,13 @@ dependencies = [
"futures",
"jsonwebtoken",
"md-5",
+ "nostr",
"rand",
"reqwest",
"scraper",
"serde",
"serde_json",
+ "sha2",
"sqlx",
"tokio",
"tower 0.4.13",
@@ -970,6 +1076,15 @@ version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
+[[package]]
+name = "hex-conservative"
+version = "0.2.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fda06d18ac606267c40c04e41b9947729bf8b9efe74bd4e82b61a5f26a510b9f"
+dependencies = [
+ "arrayvec",
+]
+
[[package]]
name = "hkdf"
version = "0.12.4"
@@ -1284,6 +1399,28 @@ dependencies = [
"serde_core",
]
+[[package]]
+name = "inout"
+version = "0.1.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "879f10e63c20629ecabbb64a8010319738c66a5cd0c29b02d63d272b03751d01"
+dependencies = [
+ "block-padding",
+ "generic-array",
+]
+
+[[package]]
+name = "instant"
+version = "0.1.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e0242819d153cba4b4b05a5a8f2a7e9bbf97b6055b2a002b395c96b5ff3c0222"
+dependencies = [
+ "cfg-if",
+ "js-sys",
+ "wasm-bindgen",
+ "web-sys",
+]
+
[[package]]
name = "ipnet"
version = "2.12.0"
@@ -1563,6 +1700,30 @@ dependencies = [
"minimal-lexical",
]
+[[package]]
+name = "nostr"
+version = "0.44.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3aa5e3b6a278ed061835fe1ee293b71641e6bf8b401cfe4e1834bbf4ef0a34e1"
+dependencies = [
+ "base64 0.22.1",
+ "bech32",
+ "bip39",
+ "bitcoin_hashes",
+ "cbc",
+ "chacha20",
+ "chacha20poly1305",
+ "getrandom 0.2.17",
+ "hex",
+ "instant",
+ "scrypt",
+ "secp256k1",
+ "serde",
+ "serde_json",
+ "unicode-normalization",
+ "url",
+]
+
[[package]]
name = "nu-ansi-term"
version = "0.50.3"
@@ -1640,6 +1801,12 @@ version = "1.21.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d"
+[[package]]
+name = "opaque-debug"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c08d65885ee38876c4f86fa503fb49d7b507c2b62552df7c70b2fce627e06381"
+
[[package]]
name = "openssl"
version = "0.10.75"
@@ -1724,6 +1891,16 @@ version = "1.0.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "57c0d7b74b563b49d38dae00a0c37d4d6de9b432382b2892f0574ddcae73fd0a"
+[[package]]
+name = "pbkdf2"
+version = "0.12.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "f8ed6a7761f76e3b9f92dfb0a60a6a6477c61024b775147ff0973a02653abaf2"
+dependencies = [
+ "digest",
+ "hmac",
+]
+
[[package]]
name = "pem"
version = "3.0.6"
@@ -1846,6 +2023,17 @@ version = "0.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b4596b6d070b27117e987119b4dac604f3c58cfb0b191112e24771b2faeac1a6"
+[[package]]
+name = "poly1305"
+version = "0.8.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8159bd90725d2df49889a078b54f4f79e87f1f8a8444194cdca81d38f5393abf"
+dependencies = [
+ "cpufeatures",
+ "opaque-debug",
+ "universal-hash",
+]
+
[[package]]
name = "potential_utf"
version = "0.1.4"
@@ -2116,6 +2304,15 @@ version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f"
+[[package]]
+name = "salsa20"
+version = "0.10.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "97a22f5af31f73a954c10289c93e8a50cc23d971e80ee446f1f6f7137a088213"
+dependencies = [
+ "cipher",
+]
+
[[package]]
name = "schannel"
version = "0.1.28"
@@ -2146,6 +2343,38 @@ dependencies = [
"tendril",
]
+[[package]]
+name = "scrypt"
+version = "0.11.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0516a385866c09368f0b5bcd1caff3366aace790fcd46e2bb032697bb172fd1f"
+dependencies = [
+ "password-hash",
+ "pbkdf2",
+ "salsa20",
+ "sha2",
+]
+
+[[package]]
+name = "secp256k1"
+version = "0.29.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "9465315bc9d4566e1724f0fffcbcc446268cb522e60f9a27bcded6b19c108113"
+dependencies = [
+ "rand",
+ "secp256k1-sys",
+ "serde",
+]
+
+[[package]]
+name = "secp256k1-sys"
+version = "0.10.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "d4387882333d3aa8cb20530a17c69a3752e97837832f34f6dccc760e715001d9"
+dependencies = [
+ "cc",
+]
+
[[package]]
name = "security-framework"
version = "3.7.0"
@@ -3163,6 +3392,16 @@ version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "39ec24b3121d976906ece63c9daad25b85969647682eee313cb5779fdd69e14e"
+[[package]]
+name = "universal-hash"
+version = "0.5.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "fc1de2c688dc15305988b563c3854064043356019f97a4b46276fe734c4f07ea"
+dependencies = [
+ "crypto-common",
+ "subtle",
+]
+
[[package]]
name = "untrusted"
version = "0.9.0"
@@ -3179,6 +3418,7 @@ dependencies = [
"idna",
"percent-encoding",
"serde",
+ "serde_derive",
]
[[package]]
diff --git a/server/Cargo.toml b/server/Cargo.toml
index 766e299..7067a3a 100644
--- a/server/Cargo.toml
+++ b/server/Cargo.toml
@@ -25,4 +25,6 @@ rand = "0.8"
async-trait = "0.1"
scraper = "0.22"
md-5 = "0.10"
+sha2 = "0.10"
base64 = "0.22"
+nostr = { version = "0.44", default-features = false, features = ["std"] }
diff --git a/server/README.md b/server/README.md
new file mode 100644
index 0000000..04ce0be
--- /dev/null
+++ b/server/README.md
@@ -0,0 +1,155 @@
+# GroupChat2 Server
+
+The server is a Rust/Axum application that provides authentication, room and message APIs, WebSocket chat delivery, AI response orchestration, file uploads, and SQLite-backed persistence for GroupChat2.
+
+## Stack
+
+- Rust 2021
+- Axum and Tokio
+- SQLx with SQLite
+- JWT auth and Argon2 password hashing
+- OpenRouter for AI completions
+- Tavily or Brave for web search tools
+
+## Responsibilities
+
+- Register and authenticate users
+- Support Nostr challenge/verify login
+- Manage rooms, members, invites, and profile updates
+- Persist messages, hashes, room settings, and AI metadata
+- Broadcast real-time events over WebSockets
+- Stream AI responses and tool usage events to connected clients
+- Store uploaded avatars and chat images
+- Serve built frontend assets in production
+
+## Requirements
+
+- Rust toolchain with `cargo`
+- A valid `OPENROUTER_API_KEY`
+- A search provider configured through either Tavily or Brave
+
+## Environment
+
+Copy `.env.example` to `.env` and fill in the values you need.
+
+```bash
+cp .env.example .env
+```
+
+Important variables:
+
+- `BIND_ADDR`: server bind address, default `0.0.0.0:3001`
+- `RUST_LOG`: log level, default `info`
+- `DATABASE_URL`: SQLite connection string, default `sqlite:chat.db?mode=rwc`
+- `JWT_SECRET`: JWT signing secret
+- `OPENROUTER_API_KEY`: required
+- `SEARCH_PROVIDER`: `tavily` or `brave`
+- `TAVILY_API_KEY`: required when `SEARCH_PROVIDER=tavily`
+- `BRAVE_API_KEY`: required when `SEARCH_PROVIDER=brave`
+- `STATIC_DIR`: optional path to built client assets for production serving
+
+## Commands
+
+Run the backend directly:
+
+```bash
+cargo run
+```
+
+Build a release binary:
+
+```bash
+cargo build --release
+```
+
+From the repo root, production build/run is also available through:
+
+```bash
+./prod.sh
+```
+
+## Startup Behavior
+
+On startup the server:
+
+- loads environment variables from `.env`
+- validates required AI/search configuration
+- creates a timestamped backup of the SQLite database if it already exists
+- keeps the 10 most recent backups
+- opens the SQLite database
+- applies SQL migrations embedded in the binary
+
+## API Surface
+
+Main routes exposed by the server:
+
+- `/api/auth/register`
+- `/api/auth/login`
+- `/api/auth/me`
+- `/api/auth/profile`
+- `/api/auth/avatar`
+- `/api/auth/nostr/challenge`
+- `/api/auth/nostr/verify`
+- `/api/rooms`
+- `/api/rooms/:room_id`
+- `/api/rooms/:room_id/messages`
+- `/api/rooms/:room_id/join`
+- `/api/rooms/:room_id/clear`
+- `/api/messages/hash/:hash`
+- `/api/models`
+- `/api/invites`
+- `/api/invites/:token/accept`
+- `/api/invites/nostr`
+- `/api/upload`
+- `/ws`
+- `/uploads/*`
+
+## Real-Time Flow
+
+WebSocket clients connect to `/ws` with a JWT token in the query string.
+
+Client messages include:
+
+- `join_room`
+- `typing`
+- `send_message`
+
+Server events include:
+
+- new messages
+- AI typing notifications
+- AI stream chunks and stream end markers
+- AI tool usage updates
+- user typing notifications
+- room deleted and room cleared events
+
+## Storage Layout
+
+```text
+server/
+|- migrations/ SQL schema and incremental changes
+|- src/ handlers, middleware, models, services
+|- uploads/ avatars and chat images
+|- backups/ automatic database backups
+|- chat.db SQLite database
+```
+
+Uploaded file behavior:
+
+- avatars are stored under `uploads/avatars/`
+- chat images are stored under `uploads/chat-images/`
+- avatar uploads are limited to 2 MB
+- chat image uploads are limited to 5 MB
+
+## AI Behavior
+
+When a user mentions the assistant or the room is configured to always respond, the server:
+
+- loads recent room history
+- includes image context for human-uploaded images when available
+- calls OpenRouter in streaming mode
+- executes tool calls for search or page fetches
+- broadcasts streaming output to the room
+- stores the final AI response with usage metadata
+
+For frontend behavior and local browser setup, see the [Client README](../client/README.md).
diff --git a/server/migrations/007_message_hash.sql b/server/migrations/007_message_hash.sql
new file mode 100644
index 0000000..93d7b94
--- /dev/null
+++ b/server/migrations/007_message_hash.sql
@@ -0,0 +1,5 @@
+-- Add SHA-256 integrity hash column to messages
+ALTER TABLE messages ADD COLUMN hash TEXT;
+
+-- Backfill hashes for existing messages is done in Rust (see main.rs)
+-- because SQLite doesn't have a built-in SHA-256 function.
diff --git a/server/migrations/008_nostr.sql b/server/migrations/008_nostr.sql
new file mode 100644
index 0000000..84692e8
--- /dev/null
+++ b/server/migrations/008_nostr.sql
@@ -0,0 +1,2 @@
+ALTER TABLE users ADD COLUMN nostr_pubkey TEXT;
+CREATE UNIQUE INDEX IF NOT EXISTS idx_users_nostr_pubkey ON users(nostr_pubkey);
diff --git a/server/src/handlers/auth.rs b/server/src/handlers/auth.rs
index 6066bf2..54f1cd0 100644
--- a/server/src/handlers/auth.rs
+++ b/server/src/handlers/auth.rs
@@ -1,24 +1,36 @@
-use axum::{extract::State, http::StatusCode, Json};
use argon2::{
password_hash::{rand_core::OsRng, PasswordHash, PasswordHasher, PasswordVerifier, SaltString},
Argon2,
};
+use axum::{extract::State, http::StatusCode, Json};
use std::sync::Arc;
use uuid::Uuid;
use crate::{
middleware::auth::{create_token, AuthUser},
- models::{AuthResponse, LoginRequest, RegisterRequest, UserPublic},
+ models::{self, AuthResponse, LoginRequest, RegisterRequest, UserPublic},
AppState,
};
+/// Create a new password-based account and immediately return a JWT.
pub async fn register(
State(state): State
>,
Json(body): Json,
) -> Result, (StatusCode, String)> {
- // Check if email already exists
- let existing = sqlx::query_scalar::<_, String>("SELECT id FROM users WHERE email = ?")
- .bind(&body.email)
+ // Normalize email: trim whitespace and lowercase for consistent matching
+ let email = body.email.trim().to_lowercase();
+ let display_name = body.display_name.trim().to_string();
+
+ if email.is_empty() {
+ return Err((StatusCode::BAD_REQUEST, "Email is required".into()));
+ }
+ if display_name.is_empty() {
+ return Err((StatusCode::BAD_REQUEST, "Display name is required".into()));
+ }
+
+ // Check if email already exists (case-insensitive for safety with legacy data)
+ let existing = sqlx::query_scalar::<_, String>("SELECT id FROM users WHERE LOWER(email) = ?")
+ .bind(&email)
.fetch_optional(&state.db)
.await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
@@ -36,35 +48,40 @@ pub async fn register(
sqlx::query("INSERT INTO users (id, email, display_name, password_hash) VALUES (?, ?, ?, ?)")
.bind(&user_id)
- .bind(&body.email)
- .bind(&body.display_name)
+ .bind(&email)
+ .bind(&display_name)
.bind(&password_hash)
.execute(&state.db)
.await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
- let token = create_token(&user_id, &body.email, &body.display_name, &state.jwt_secret)
+ let token = create_token(&user_id, &email, &display_name, &state.jwt_secret)
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
Ok(Json(AuthResponse {
token,
user: UserPublic {
id: user_id,
- email: body.email,
- display_name: body.display_name,
+ email: models::public_email(&email),
+ display_name,
avatar_url: None,
+ nostr_pubkey: None,
},
}))
}
+/// Authenticate an existing password-based account and return a fresh JWT.
pub async fn login(
State(state): State>,
Json(body): Json,
) -> Result, (StatusCode, String)> {
+ // Normalize email: trim whitespace and lowercase for case-insensitive matching
+ let email = body.email.trim().to_lowercase();
+
let user = sqlx::query_as::<_, (String, String, String, String, Option)>(
- "SELECT id, email, display_name, password_hash, avatar_url FROM users WHERE email = ?",
+ "SELECT id, email, display_name, password_hash, avatar_url FROM users WHERE LOWER(email) = ?",
)
- .bind(&body.email)
+ .bind(&email)
.fetch_optional(&state.db)
.await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?
@@ -72,8 +89,8 @@ pub async fn login(
let (user_id, email, display_name, hash, avatar_url) = user;
- let parsed_hash = PasswordHash::new(&hash)
- .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
+ let parsed_hash =
+ PasswordHash::new(&hash).map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
Argon2::default()
.verify_password(body.password.as_bytes(), &parsed_hash)
@@ -86,29 +103,33 @@ pub async fn login(
token,
user: UserPublic {
id: user_id,
- email,
+ email: models::public_email(&email),
display_name,
avatar_url,
+ nostr_pubkey: None,
},
}))
}
+/// Return the caller's current public profile information.
pub async fn me(
auth: AuthUser,
State(state): State>,
) -> Result, (StatusCode, String)> {
- let avatar_url: Option =
- sqlx::query_scalar("SELECT avatar_url FROM users WHERE id = ?")
- .bind(&auth.user_id)
- .fetch_optional(&state.db)
- .await
- .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?
- .flatten();
+ let row = sqlx::query_as::<_, (Option, Option)>(
+ "SELECT avatar_url, nostr_pubkey FROM users WHERE id = ?",
+ )
+ .bind(&auth.user_id)
+ .fetch_optional(&state.db)
+ .await
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?
+ .unwrap_or((None, None));
Ok(Json(UserPublic {
id: auth.user_id,
- email: auth.email,
+ email: models::public_email(&auth.email),
display_name: auth.display_name,
- avatar_url,
+ avatar_url: row.0,
+ nostr_pubkey: row.1,
}))
}
diff --git a/server/src/handlers/invites.rs b/server/src/handlers/invites.rs
index f7e24c9..c1bf65f 100644
--- a/server/src/handlers/invites.rs
+++ b/server/src/handlers/invites.rs
@@ -9,10 +9,11 @@ use uuid::Uuid;
use crate::{
middleware::auth::AuthUser,
- models::CreateInviteRequest,
+ models::{CreateInviteRequest, NostrInviteRequest},
AppState,
};
+/// Response payload for a newly created invite link.
#[derive(serde::Serialize)]
pub struct InviteResponse {
pub id: String,
@@ -20,6 +21,7 @@ pub struct InviteResponse {
pub invite_url: String,
}
+/// Create a one-time invite token for a room member to share.
pub async fn create_invite(
State(state): State>,
auth: AuthUser,
@@ -46,15 +48,17 @@ pub async fn create_invite(
.map(char::from)
.collect();
- sqlx::query("INSERT INTO invites (id, room_id, invited_by, email, token) VALUES (?, ?, ?, ?, ?)")
- .bind(&invite_id)
- .bind(&body.room_id)
- .bind(&auth.user_id)
- .bind(&body.email)
- .bind(&token)
- .execute(&state.db)
- .await
- .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
+ sqlx::query(
+ "INSERT INTO invites (id, room_id, invited_by, email, token) VALUES (?, ?, ?, ?, ?)",
+ )
+ .bind(&invite_id)
+ .bind(&body.room_id)
+ .bind(&auth.user_id)
+ .bind(&body.email)
+ .bind(&token)
+ .execute(&state.db)
+ .await
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
Ok(Json(InviteResponse {
id: invite_id,
@@ -63,11 +67,13 @@ pub async fn create_invite(
}))
}
+/// Response payload returned after consuming an invite.
#[derive(serde::Serialize)]
pub struct AcceptInviteResponse {
pub room_id: String,
}
+/// Consume an invite token and add the caller to the room.
pub async fn accept_invite(
State(state): State>,
auth: AuthUser,
@@ -89,13 +95,12 @@ pub async fn accept_invite(
}
// Verify room is not deleted
- let room_active = sqlx::query_scalar::<_, String>(
- "SELECT id FROM rooms WHERE id = ? AND deleted_at IS NULL",
- )
- .bind(&room_id)
- .fetch_optional(&state.db)
- .await
- .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
+ let room_active =
+ sqlx::query_scalar::<_, String>("SELECT id FROM rooms WHERE id = ? AND deleted_at IS NULL")
+ .bind(&room_id)
+ .fetch_optional(&state.db)
+ .await
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
if room_active.is_none() {
return Err((StatusCode::GONE, "This room has been deleted".into()));
@@ -118,3 +123,80 @@ pub async fn accept_invite(
Ok(Json(AcceptInviteResponse { room_id }))
}
+
+/// Result of a Nostr-based room invite attempt.
+#[derive(serde::Serialize)]
+pub struct NostrInviteResponse {
+ pub status: String,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub display_name: Option,
+}
+
+/// Add a user to a room by their Nostr public key if they already have an account.
+pub async fn invite_by_nostr(
+ State(state): State>,
+ auth: AuthUser,
+ Json(body): Json,
+) -> Result, (StatusCode, String)> {
+ // Normalize pubkey: if it starts with "npub", decode bech32
+ let pubkey_hex = if body.nostr_pubkey.starts_with("npub") {
+ nostr::prelude::PublicKey::parse(&body.nostr_pubkey)
+ .map(|pk| pk.to_hex())
+ .map_err(|_| (StatusCode::BAD_REQUEST, "Invalid npub format".to_string()))?
+ } else {
+ // Validate it's valid hex
+ if body.nostr_pubkey.len() != 64
+ || !body.nostr_pubkey.chars().all(|c| c.is_ascii_hexdigit())
+ {
+ return Err((
+ StatusCode::BAD_REQUEST,
+ "Invalid pubkey: must be 64-char hex or npub".to_string(),
+ ));
+ }
+ body.nostr_pubkey.clone()
+ };
+
+ // Verify inviter is a member of the room
+ let is_member = sqlx::query_scalar::<_, String>(
+ "SELECT user_id FROM room_members WHERE room_id = ? AND user_id = ?",
+ )
+ .bind(&body.room_id)
+ .bind(&auth.user_id)
+ .fetch_optional(&state.db)
+ .await
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
+
+ if is_member.is_none() {
+ return Err((StatusCode::FORBIDDEN, "Not a member of this room".into()));
+ }
+
+ // Lookup user by nostr_pubkey
+ let target_user = sqlx::query_as::<_, (String, String)>(
+ "SELECT id, display_name FROM users WHERE nostr_pubkey = ?",
+ )
+ .bind(&pubkey_hex)
+ .fetch_optional(&state.db)
+ .await
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
+
+ match target_user {
+ Some((user_id, display_name)) => {
+ // Add to room
+ sqlx::query("INSERT OR IGNORE INTO room_members (room_id, user_id) VALUES (?, ?)")
+ .bind(&body.room_id)
+ .bind(&user_id)
+ .execute(&state.db)
+ .await
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
+
+ Ok(Json(NostrInviteResponse {
+ status: "added".to_string(),
+ display_name: Some(display_name),
+ }))
+ }
+ None => Ok(Json(NostrInviteResponse {
+ status: "not_found".to_string(),
+ display_name: None,
+ })),
+ }
+}
diff --git a/server/src/handlers/mod.rs b/server/src/handlers/mod.rs
index ff74873..cb34c9b 100644
--- a/server/src/handlers/mod.rs
+++ b/server/src/handlers/mod.rs
@@ -1,6 +1,12 @@
+//! HTTP and WebSocket entry points for the server.
+//!
+//! Each submodule exposes route handlers that Axum wires into the router in
+//! `main.rs`.
+
pub mod auth;
pub mod invites;
pub mod models;
+pub mod nostr_auth;
pub mod profile;
pub mod rooms;
pub mod upload;
diff --git a/server/src/handlers/models.rs b/server/src/handlers/models.rs
index 1b63a20..b48b6f0 100644
--- a/server/src/handlers/models.rs
+++ b/server/src/handlers/models.rs
@@ -1,19 +1,16 @@
-use axum::{
- extract::State,
- http::StatusCode,
- Json,
-};
+use axum::{extract::State, http::StatusCode, Json};
use serde::{Deserialize, Serialize};
use std::sync::Arc;
-use tokio::sync::OnceCell;
use std::time::{Duration, Instant};
use tokio::sync::Mutex;
+use tokio::sync::OnceCell;
use crate::AppState;
/// Cached model list with expiry.
static MODEL_CACHE: OnceCell> = OnceCell::const_new();
+/// Process-wide cache for the OpenRouter model catalog.
struct CachedModels {
models: Vec,
fetched_at: Instant,
@@ -21,6 +18,7 @@ struct CachedModels {
const CACHE_TTL: Duration = Duration::from_secs(60 * 30); // 30 minutes
+/// Model metadata exposed to the client for room creation and model selection.
#[derive(Debug, Clone, Serialize)]
pub struct ModelInfo {
pub id: String,
@@ -56,6 +54,10 @@ struct OpenRouterArchitecture {
input_modalities: Option>,
}
+/// Fetch the model catalog directly from OpenRouter.
+///
+/// The result is normalized into the smaller `ModelInfo` shape that the client
+/// UI needs.
async fn fetch_models(api_key: &str) -> Result, String> {
let client = reqwest::Client::new();
@@ -82,7 +84,8 @@ async fn fetch_models(api_key: &str) -> Result, String> {
.into_iter()
.map(|m| {
let pricing = m.pricing.as_ref();
- let supports_vision = m.architecture
+ let supports_vision = m
+ .architecture
.as_ref()
.and_then(|a| a.input_modalities.as_ref())
.map(|mods| mods.iter().any(|m| m == "image"))
@@ -102,6 +105,7 @@ async fn fetch_models(api_key: &str) -> Result, String> {
Ok(models)
}
+/// Return the cached OpenRouter model list, refreshing it when the cache expires.
pub async fn list_models(
State(state): State>,
) -> Result>, (StatusCode, String)> {
diff --git a/server/src/handlers/nostr_auth.rs b/server/src/handlers/nostr_auth.rs
new file mode 100644
index 0000000..1bacdc6
--- /dev/null
+++ b/server/src/handlers/nostr_auth.rs
@@ -0,0 +1,182 @@
+use axum::{extract::State, http::StatusCode, Json};
+use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, Validation};
+use nostr::prelude::*;
+use std::sync::Arc;
+use uuid::Uuid;
+
+use crate::{
+ middleware::auth::create_token,
+ models::{AuthResponse, NostrChallengeResponse, NostrVerifyRequest, UserPublic},
+ AppState,
+};
+
+/// Claims embedded in the short-lived challenge token used during Nostr login.
+#[derive(Debug, serde::Serialize, serde::Deserialize)]
+struct ChallengeClaims {
+ pub nonce: String,
+ pub exp: usize,
+}
+
+/// GET /api/auth/nostr/challenge — return a short-lived JWT containing a random nonce
+pub async fn challenge(
+ State(state): State>,
+) -> Result, (StatusCode, String)> {
+ // Generate 32 random bytes as hex nonce
+ let mut nonce_bytes = [0u8; 32];
+ use rand::RngCore;
+ rand::thread_rng().fill_bytes(&mut nonce_bytes);
+ let nonce = hex::encode(&nonce_bytes);
+
+ let exp = (chrono::Utc::now().timestamp() + 120) as usize; // 2 minutes
+
+ let claims = ChallengeClaims { nonce, exp };
+
+ let token = encode(
+ &Header::default(),
+ &claims,
+ &EncodingKey::from_secret(state.jwt_secret.as_bytes()),
+ )
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
+
+ Ok(Json(NostrChallengeResponse { challenge: token }))
+}
+
+/// Simple hex encoder (avoid adding the `hex` crate just for this)
+mod hex {
+ /// Convert raw bytes into a lowercase hexadecimal string.
+ pub fn encode(bytes: &[u8]) -> String {
+ bytes.iter().map(|b| format!("{:02x}", b)).collect()
+ }
+}
+
+/// POST /api/auth/nostr/verify — verify signed event, create/login user
+pub async fn verify(
+ State(state): State>,
+ Json(body): Json,
+) -> Result, (StatusCode, String)> {
+ // 1. Decode challenge JWT, verify not expired, extract nonce
+ let challenge_data = decode::(
+ &body.challenge,
+ &DecodingKey::from_secret(state.jwt_secret.as_bytes()),
+ &Validation::default(),
+ )
+ .map_err(|_| {
+ (
+ StatusCode::BAD_REQUEST,
+ "Invalid or expired challenge".to_string(),
+ )
+ })?;
+
+ let nonce = &challenge_data.claims.nonce;
+
+ // 2. Deserialize signed_event as nostr::Event
+ let event: Event = serde_json::from_str(&body.signed_event).map_err(|e| {
+ (
+ StatusCode::BAD_REQUEST,
+ format!("Invalid event JSON: {}", e),
+ )
+ })?;
+
+ // 3. Verify Schnorr signature
+ if !event.verify_signature() {
+ return Err((
+ StatusCode::UNAUTHORIZED,
+ "Invalid event signature".to_string(),
+ ));
+ }
+
+ // 4. Verify event.content == nonce
+ if event.content.as_str() != nonce.as_str() {
+ return Err((StatusCode::BAD_REQUEST, "Nonce mismatch".to_string()));
+ }
+
+ // 5. Verify event.created_at within 5 minutes
+ let now = chrono::Utc::now().timestamp() as u64;
+ let event_ts = event.created_at.as_secs();
+ if now.abs_diff(event_ts) > 300 {
+ return Err((
+ StatusCode::BAD_REQUEST,
+ "Event timestamp too far off".to_string(),
+ ));
+ }
+
+ // 6. Extract pubkey hex
+ let pubkey_hex = event.pubkey.to_hex();
+
+ // 7. Lookup user by nostr_pubkey
+ let existing = sqlx::query_as::<_, (String, String, String, Option)>(
+ "SELECT id, email, display_name, avatar_url FROM users WHERE nostr_pubkey = ?",
+ )
+ .bind(&pubkey_hex)
+ .fetch_optional(&state.db)
+ .await
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
+
+ let (user_id, email, display_name, avatar_url) = if let Some(user) = existing {
+ // Update avatar if provided and user doesn't have a custom one
+ if let Some(ref pic) = body.profile_picture {
+ if user.3.is_none() || user.3.as_deref() == Some("") {
+ let _ = sqlx::query("UPDATE users SET avatar_url = ? WHERE id = ?")
+ .bind(pic)
+ .bind(&user.0)
+ .execute(&state.db)
+ .await;
+ (user.0, user.1, user.2, Some(pic.clone()))
+ } else {
+ user
+ }
+ } else {
+ user
+ }
+ } else {
+ // Create new user
+ let user_id = Uuid::new_v4().to_string();
+ let sentinel_email = format!("nostr:{}", &pubkey_hex[..16]);
+ let display_name = body
+ .profile_name
+ .clone()
+ .filter(|n| !n.trim().is_empty())
+ .unwrap_or_else(|| {
+ let npub = PublicKey::from_hex(&pubkey_hex)
+ .map(|pk| pk.to_bech32().unwrap_or_default())
+ .unwrap_or_default();
+ if npub.len() > 8 {
+ format!("npub...{}", &npub[npub.len() - 8..])
+ } else {
+ format!("nostr-{}", &pubkey_hex[..8])
+ }
+ });
+
+ let avatar_url = body.profile_picture.clone();
+
+ sqlx::query(
+ "INSERT INTO users (id, email, display_name, password_hash, nostr_pubkey, avatar_url) VALUES (?, ?, ?, ?, ?, ?)",
+ )
+ .bind(&user_id)
+ .bind(&sentinel_email)
+ .bind(&display_name)
+ .bind("")
+ .bind(&pubkey_hex)
+ .bind(&avatar_url)
+ .execute(&state.db)
+ .await
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
+
+ (user_id, sentinel_email, display_name, avatar_url)
+ };
+
+ // 8. Issue JWT, return AuthResponse
+ let token = create_token(&user_id, &email, &display_name, &state.jwt_secret)
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
+
+ Ok(Json(AuthResponse {
+ token,
+ user: UserPublic {
+ id: user_id,
+ email: crate::models::public_email(&email),
+ display_name,
+ avatar_url,
+ nostr_pubkey: Some(pubkey_hex),
+ },
+ }))
+}
diff --git a/server/src/handlers/profile.rs b/server/src/handlers/profile.rs
index a0d3206..8a55bb8 100644
--- a/server/src/handlers/profile.rs
+++ b/server/src/handlers/profile.rs
@@ -7,10 +7,11 @@ use std::sync::Arc;
use crate::{
middleware::auth::{create_token, AuthUser},
- models::{AuthResponse, UserPublic},
+ models::{self, AuthResponse, UserPublic},
AppState,
};
+/// Request body for profile updates.
#[derive(Debug, serde::Deserialize)]
pub struct UpdateProfileRequest {
pub display_name: Option,
@@ -25,7 +26,10 @@ pub async fn update_profile(
let display_name = body.display_name.unwrap_or(auth.display_name.clone());
if display_name.trim().is_empty() {
- return Err((StatusCode::BAD_REQUEST, "Display name cannot be empty".into()));
+ return Err((
+ StatusCode::BAD_REQUEST,
+ "Display name cannot be empty".into(),
+ ));
}
sqlx::query("UPDATE users SET display_name = ? WHERE id = ?")
@@ -52,9 +56,10 @@ pub async fn update_profile(
token,
user: UserPublic {
id: auth.user_id,
- email: auth.email,
+ email: models::public_email(&auth.email),
display_name,
avatar_url,
+ nostr_pubkey: None,
},
}))
}
@@ -82,7 +87,12 @@ pub async fn upload_avatar(
"image/jpeg" | "image/jpg" => "jpg",
"image/gif" => "gif",
"image/webp" => "webp",
- _ => return Err((StatusCode::BAD_REQUEST, "Only PNG, JPG, GIF, and WebP images are allowed".into())),
+ _ => {
+ return Err((
+ StatusCode::BAD_REQUEST,
+ "Only PNG, JPG, GIF, and WebP images are allowed".into(),
+ ))
+ }
};
let data = field
@@ -129,16 +139,22 @@ pub async fn upload_avatar(
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
// Issue new token
- let token = create_token(&auth.user_id, &auth.email, &auth.display_name, &state.jwt_secret)
- .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
+ let token = create_token(
+ &auth.user_id,
+ &auth.email,
+ &auth.display_name,
+ &state.jwt_secret,
+ )
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
Ok(Json(AuthResponse {
token,
user: UserPublic {
id: auth.user_id,
- email: auth.email,
+ email: models::public_email(&auth.email),
display_name: auth.display_name,
avatar_url: Some(avatar_url),
+ nostr_pubkey: None,
},
}))
}
@@ -166,16 +182,22 @@ pub async fn delete_avatar(
.await
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
- let token = create_token(&auth.user_id, &auth.email, &auth.display_name, &state.jwt_secret)
- .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
+ let token = create_token(
+ &auth.user_id,
+ &auth.email,
+ &auth.display_name,
+ &state.jwt_secret,
+ )
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
Ok(Json(AuthResponse {
token,
user: UserPublic {
id: auth.user_id,
- email: auth.email,
+ email: models::public_email(&auth.email),
display_name: auth.display_name,
avatar_url: None,
+ nostr_pubkey: None,
},
}))
}
diff --git a/server/src/handlers/rooms.rs b/server/src/handlers/rooms.rs
index 0f89bb2..5150566 100644
--- a/server/src/handlers/rooms.rs
+++ b/server/src/handlers/rooms.rs
@@ -8,10 +8,13 @@ use uuid::Uuid;
use crate::{
middleware::auth::AuthUser,
- models::{CreateRoomRequest, MessagePayload, PaginationParams, Room, RoomResponse, UserPublic},
+ models::{
+ self, CreateRoomRequest, MessagePayload, PaginationParams, Room, RoomResponse, UserPublic,
+ },
AppState,
};
+/// Create a room, persist it, and add the creator as the first member.
pub async fn create_room(
State(state): State>,
auth: AuthUser,
@@ -52,13 +55,15 @@ pub async fn create_room(
created_at: chrono::Utc::now().to_rfc3339(),
members: vec![UserPublic {
id: auth.user_id,
- email: auth.email,
+ email: models::public_email(&auth.email),
display_name: auth.display_name,
avatar_url: None,
+ nostr_pubkey: None,
}],
}))
}
+/// List all active rooms the caller belongs to, including current room members.
pub async fn list_rooms(
State(state): State>,
auth: AuthUser,
@@ -73,8 +78,8 @@ pub async fn list_rooms(
let mut result = Vec::new();
for room in rooms {
- let members = sqlx::query_as::<_, (String, String, String, Option)>(
- "SELECT u.id, u.email, u.display_name, u.avatar_url FROM users u JOIN room_members rm ON u.id = rm.user_id WHERE rm.room_id = ?",
+ let members = sqlx::query_as::<_, (String, String, String, Option, Option)>(
+ "SELECT u.id, u.email, u.display_name, u.avatar_url, u.nostr_pubkey FROM users u JOIN room_members rm ON u.id = rm.user_id WHERE rm.room_id = ?",
)
.bind(&room.id)
.fetch_all(&state.db)
@@ -92,12 +97,15 @@ pub async fn list_rooms(
created_at: room.created_at,
members: members
.into_iter()
- .map(|(id, email, display_name, avatar_url)| UserPublic {
- id,
- email,
- display_name,
- avatar_url,
- })
+ .map(
+ |(id, email, display_name, avatar_url, nostr_pubkey)| UserPublic {
+ id,
+ email: models::public_email(&email),
+ display_name,
+ avatar_url,
+ nostr_pubkey,
+ },
+ )
.collect(),
});
}
@@ -105,6 +113,7 @@ pub async fn list_rooms(
Ok(Json(result))
}
+/// Return details for a single room after verifying the caller is a member.
pub async fn get_room(
State(state): State>,
auth: AuthUser,
@@ -131,8 +140,8 @@ pub async fn get_room(
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?
.ok_or((StatusCode::NOT_FOUND, "Room not found".into()))?;
- let members = sqlx::query_as::<_, (String, String, String, Option)>(
- "SELECT u.id, u.email, u.display_name, u.avatar_url FROM users u JOIN room_members rm ON u.id = rm.user_id WHERE rm.room_id = ?",
+ let members = sqlx::query_as::<_, (String, String, String, Option, Option)>(
+ "SELECT u.id, u.email, u.display_name, u.avatar_url, u.nostr_pubkey FROM users u JOIN room_members rm ON u.id = rm.user_id WHERE rm.room_id = ?",
)
.bind(&room_id)
.fetch_all(&state.db)
@@ -150,16 +159,20 @@ pub async fn get_room(
created_at: room.created_at,
members: members
.into_iter()
- .map(|(id, email, display_name, avatar_url)| UserPublic {
- id,
- email,
- display_name,
- avatar_url,
- })
+ .map(
+ |(id, email, display_name, avatar_url, nostr_pubkey)| UserPublic {
+ id,
+ email: models::public_email(&email),
+ display_name,
+ avatar_url,
+ nostr_pubkey,
+ },
+ )
.collect(),
}))
}
+/// Return paginated message history for a room the caller can access.
pub async fn get_messages(
State(state): State>,
auth: AuthUser,
@@ -180,10 +193,10 @@ pub async fn get_messages(
return Err((StatusCode::FORBIDDEN, "Not a member of this room".into()));
}
- // Query messages with user email via LEFT JOIN for Gravatar hash
+ // Query messages with user email + avatar_url via LEFT JOIN
let rows = if let Some(before) = ¶ms.before {
- sqlx::query_as::<_, (String, String, String, String, String, String, bool, String, Option, Option, Option)>(
- "SELECT m.id, m.room_id, m.sender_id, m.sender_name, m.content, m.mentions, m.is_ai, m.created_at, m.ai_meta, m.image_url, u.email \
+ sqlx::query_as::<_, (String, String, String, String, String, String, bool, String, Option, Option, Option, Option, Option)>(
+ "SELECT m.id, m.room_id, m.sender_id, m.sender_name, m.content, m.mentions, m.is_ai, m.created_at, m.ai_meta, m.image_url, u.email, u.avatar_url, m.hash \
FROM messages m LEFT JOIN users u ON m.sender_id = u.id \
WHERE m.room_id = ? AND m.created_at < ? ORDER BY m.created_at DESC LIMIT ?",
)
@@ -193,8 +206,8 @@ pub async fn get_messages(
.fetch_all(&state.db)
.await
} else {
- sqlx::query_as::<_, (String, String, String, String, String, String, bool, String, Option, Option, Option)>(
- "SELECT m.id, m.room_id, m.sender_id, m.sender_name, m.content, m.mentions, m.is_ai, m.created_at, m.ai_meta, m.image_url, u.email \
+ sqlx::query_as::<_, (String, String, String, String, String, String, bool, String, Option, Option, Option, Option, Option)>(
+ "SELECT m.id, m.room_id, m.sender_id, m.sender_name, m.content, m.mentions, m.is_ai, m.created_at, m.ai_meta, m.image_url, u.email, u.avatar_url, m.hash \
FROM messages m LEFT JOIN users u ON m.sender_id = u.id \
WHERE m.room_id = ? ORDER BY m.created_at DESC LIMIT ?",
)
@@ -205,46 +218,98 @@ pub async fn get_messages(
}
.map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
+ // The SQL query reads newest-first for efficient pagination, but clients
+ // render chat oldest-to-newest, so reverse the rows before serializing.
let payloads: Vec = rows
.into_iter()
.rev()
- .map(|(id, room_id, sender_id, sender_name, content, mentions, is_ai, created_at, ai_meta_str, image_url, email)| {
- let ai_meta = ai_meta_str
- .as_deref()
- .and_then(|s| serde_json::from_str::(s).ok());
- let avatar_hash = email
- .map(|e| crate::models::gravatar_hash(&e))
- .unwrap_or_default();
- MessagePayload {
+ .map(
+ |(
id,
room_id,
sender_id,
sender_name,
content,
- mentions: serde_json::from_str(&mentions).unwrap_or_default(),
+ mentions,
is_ai,
created_at,
- ai_meta,
- avatar_hash,
+ ai_meta_str,
image_url,
- }
- })
+ email,
+ avatar_url,
+ hash,
+ )| {
+ let ai_meta = ai_meta_str
+ .as_deref()
+ .and_then(|s| serde_json::from_str::(s).ok());
+ let avatar_hash = email
+ .map(|e| crate::models::gravatar_hash(&e))
+ .unwrap_or_default();
+ MessagePayload {
+ id,
+ room_id,
+ sender_id,
+ sender_name,
+ content,
+ mentions: serde_json::from_str(&mentions).unwrap_or_default(),
+ is_ai,
+ created_at,
+ ai_meta,
+ avatar_hash,
+ avatar_url,
+ image_url,
+ hash,
+ }
+ },
+ )
.collect();
Ok(Json(payloads))
}
+/// Resolve a stable message hash into the room that contains it.
+pub async fn resolve_message_hash(
+ State(state): State>,
+ auth: AuthUser,
+ Path(hash): Path,
+) -> Result, (StatusCode, String)> {
+ // Find the message by hash
+ let row = sqlx::query_as::<_, (String,)>(
+ "SELECT m.room_id FROM messages m \
+ JOIN room_members rm ON rm.room_id = m.room_id AND rm.user_id = ? \
+ JOIN rooms r ON r.id = m.room_id AND r.deleted_at IS NULL \
+ WHERE m.hash = ? LIMIT 1",
+ )
+ .bind(&auth.user_id)
+ .bind(&hash)
+ .fetch_optional(&state.db)
+ .await
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
+
+ match row {
+ Some((room_id,)) => Ok(Json(
+ serde_json::json!({ "room_id": room_id, "hash": hash }),
+ )),
+ None => Err((
+ StatusCode::NOT_FOUND,
+ "Message not found or no access".into(),
+ )),
+ }
+}
+
+/// Add the caller to a room directly when they already know its ID.
pub async fn join_room(
State(state): State>,
auth: AuthUser,
Path(room_id): Path,
) -> Result {
// Check room exists
- let room_exists = sqlx::query_scalar::<_, String>("SELECT id FROM rooms WHERE id = ? AND deleted_at IS NULL")
- .bind(&room_id)
- .fetch_optional(&state.db)
- .await
- .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
+ let room_exists =
+ sqlx::query_scalar::<_, String>("SELECT id FROM rooms WHERE id = ? AND deleted_at IS NULL")
+ .bind(&room_id)
+ .fetch_optional(&state.db)
+ .await
+ .map_err(|e| (StatusCode::INTERNAL_SERVER_ERROR, e.to_string()))?;
if room_exists.is_none() {
return Err((StatusCode::NOT_FOUND, "Room not found".into()));
@@ -260,6 +325,7 @@ pub async fn join_room(
Ok(StatusCode::OK)
}
+/// Soft-delete a room and broadcast the deletion event to connected members.
pub async fn delete_room(
State(state): State>,
auth: AuthUser,
@@ -274,7 +340,10 @@ pub async fn delete_room(
.ok_or((StatusCode::NOT_FOUND, "Room not found".into()))?;
if room.created_by != auth.user_id {
- return Err((StatusCode::FORBIDDEN, "Only the room creator can delete this room".into()));
+ return Err((
+ StatusCode::FORBIDDEN,
+ "Only the room creator can delete this room".into(),
+ ));
}
// Soft-delete
@@ -295,6 +364,7 @@ pub async fn delete_room(
Ok(StatusCode::OK)
}
+/// Permanently remove all messages from a room without deleting the room itself.
pub async fn clear_room(
State(state): State>,
auth: AuthUser,
@@ -309,7 +379,10 @@ pub async fn clear_room(
.ok_or((StatusCode::NOT_FOUND, "Room not found".into()))?;
if room.created_by != auth.user_id {
- return Err((StatusCode::FORBIDDEN, "Only the room creator can clear messages".into()));
+ return Err((
+ StatusCode::FORBIDDEN,
+ "Only the room creator can clear messages".into(),
+ ));
}
// Hard-delete all messages
diff --git a/server/src/handlers/upload.rs b/server/src/handlers/upload.rs
index 107a026..90f5900 100644
--- a/server/src/handlers/upload.rs
+++ b/server/src/handlers/upload.rs
@@ -1,18 +1,16 @@
-use axum::{
- extract::Multipart,
- http::StatusCode,
- Json,
-};
+use axum::{extract::Multipart, http::StatusCode, Json};
use serde::Serialize;
use uuid::Uuid;
use crate::middleware::auth::AuthUser;
+/// Response returned after a chat image upload succeeds.
#[derive(Serialize)]
pub struct UploadResponse {
pub url: String,
}
+/// Accept a multipart chat image upload and store it under `uploads/chat-images`.
pub async fn upload_chat_image(
_auth: AuthUser,
mut multipart: Multipart,
diff --git a/server/src/handlers/ws.rs b/server/src/handlers/ws.rs
index 08575c6..6221c14 100644
--- a/server/src/handlers/ws.rs
+++ b/server/src/handlers/ws.rs
@@ -1,3 +1,9 @@
+//! WebSocket workflow for live chat delivery and AI responses.
+//!
+//! This module does two jobs:
+//! - fan out database-backed room events to subscribed browser sockets
+//! - turn incoming user chat messages into stored messages and optional AI replies
+
use axum::{
extract::{
ws::{Message, WebSocket},
@@ -12,7 +18,7 @@ use uuid::Uuid;
use crate::{
middleware::auth::decode_token,
models::{BroadcastEvent, MessagePayload, WsClientMessage, WsServerMessage},
- services::{brave, fetch, openrouter},
+ services::{fetch, openrouter, search},
AppState,
};
@@ -24,6 +30,7 @@ pub struct WsQuery {
token: String,
}
+/// Upgrade an authenticated request into a WebSocket connection.
pub async fn ws_handler(
ws: WebSocketUpgrade,
State(state): State>,
@@ -37,10 +44,19 @@ pub async fn ws_handler(
}
};
- ws.on_upgrade(move |socket| handle_socket(socket, state, claims.sub, claims.display_name, claims.email))
+ ws.on_upgrade(move |socket| {
+ handle_socket(socket, state, claims.sub, claims.display_name, claims.email)
+ })
}
-async fn handle_socket(socket: WebSocket, state: Arc, user_id: String, display_name: String, email: String) {
+/// Drive a single WebSocket connection until either the send or receive side ends.
+async fn handle_socket(
+ socket: WebSocket,
+ state: Arc,
+ user_id: String,
+ display_name: String,
+ email: String,
+) {
let (mut ws_tx, mut ws_rx) = socket.split();
let mut broadcast_rx = state.tx.subscribe();
@@ -50,7 +66,8 @@ async fn handle_socket(socket: WebSocket, state: Arc, user_id: String,
let rooms_clone = subscribed_rooms.clone();
- // Task: forward broadcast events to this client
+ // Task 1: forward room events from the shared broadcast channel into this
+ // specific socket, but only for rooms the browser subscribed to.
let mut send_task = tokio::spawn(async move {
loop {
match broadcast_rx.recv().await {
@@ -81,7 +98,8 @@ async fn handle_socket(socket: WebSocket, state: Arc, user_id: String,
let email_clone = email.clone();
let rooms_clone2 = subscribed_rooms.clone();
- // Task: receive messages from client
+ // Task 2: receive commands from the browser and translate them into
+ // database writes, broadcasts, or AI work.
let mut recv_task = tokio::spawn(async move {
while let Some(Ok(msg)) = ws_rx.next().await {
let text = match msg {
@@ -141,7 +159,7 @@ async fn handle_socket(socket: WebSocket, state: Arc, user_id: String,
}
});
- // Wait for either task to finish, then abort the other
+ // If either half of the connection ends, stop the companion task too.
tokio::select! {
_ = &mut send_task => recv_task.abort(),
_ = &mut recv_task => send_task.abort(),
@@ -150,6 +168,7 @@ async fn handle_socket(socket: WebSocket, state: Arc, user_id: String,
tracing::info!("WebSocket disconnected: {}", user_id);
}
+/// Persist a user message, broadcast it, and optionally generate an AI reply.
async fn handle_send_message(
state: &Arc,
user_id: &str,
@@ -164,9 +183,12 @@ async fn handle_send_message(
let mentions_json = serde_json::to_string(mentions).unwrap_or_else(|_| "[]".to_string());
let now = chrono::Utc::now().to_rfc3339();
+ // Compute integrity hash from timestamp + content
+ let hash = crate::models::message_hash(&now, content);
+
// Store in database (with image_url)
let _ = sqlx::query(
- "INSERT INTO messages (id, room_id, sender_id, sender_name, content, mentions, is_ai, created_at, image_url) VALUES (?, ?, ?, ?, ?, ?, 0, ?, ?)",
+ "INSERT INTO messages (id, room_id, sender_id, sender_name, content, mentions, is_ai, created_at, image_url, hash) VALUES (?, ?, ?, ?, ?, ?, 0, ?, ?, ?)",
)
.bind(&msg_id)
.bind(room_id)
@@ -176,9 +198,20 @@ async fn handle_send_message(
.bind(&mentions_json)
.bind(&now)
.bind(image_url)
+ .bind(&hash)
.execute(&state.db)
.await;
+ // Look up the sender's custom avatar (if any) for the message payload
+ let avatar_url: Option =
+ sqlx::query_scalar("SELECT avatar_url FROM users WHERE id = ?")
+ .bind(user_id)
+ .fetch_optional(&state.db)
+ .await
+ .ok()
+ .flatten()
+ .flatten();
+
// Broadcast human message
let payload = MessagePayload {
id: msg_id,
@@ -191,17 +224,18 @@ async fn handle_send_message(
created_at: now,
ai_meta: None,
avatar_hash: crate::models::gravatar_hash(email),
+ avatar_url,
image_url: image_url.map(String::from),
+ hash: Some(hash),
};
let _ = state.tx.send(BroadcastEvent {
room_id: room_id.to_string(),
- message: WsServerMessage::NewMessage {
- message: payload,
- },
+ message: WsServerMessage::NewMessage { message: payload },
});
- // Check if AI should respond
+ // The AI only replies when explicitly mentioned or when the room is set to
+ // auto-reply to every message.
let ai_user_id = "ai-assistant";
let should_respond = mentions.contains(&ai_user_id.to_string());
@@ -239,7 +273,8 @@ async fn handle_send_message(
.await
.unwrap_or_default();
- // Process history: encode images as base64 data URLs for OpenRouter
+ // OpenRouter accepts image inputs as data URLs, so local uploads need to be
+ // loaded from disk and encoded before they are sent upstream.
let mut history: Vec<(String, String, bool, Option)> = Vec::new();
for (sender_name, msg_content, is_ai, msg_image_url) in recent_messages.into_iter().rev() {
let image_data_url = match &msg_image_url {
@@ -257,7 +292,8 @@ async fn handle_send_message(
// Pre-generate AI message ID so we can reference it in stream chunks
let ai_msg_id = Uuid::new_v4().to_string();
- // Call OpenRouter with tool loop — uses streaming for all rounds
+ // Run the AI in a loop because the model may first request tools, then need
+ // follow-up rounds after those tool results are added to history.
let mut total_prompt_tokens: u32 = 0;
let mut total_completion_tokens: u32 = 0;
let mut total_response_ms: u64 = 0;
@@ -298,16 +334,24 @@ async fn handle_send_message(
tracing::info!(
"AI requesting tool calls (round {}): {:?}",
round + 1,
- assistant_msg.tool_calls.as_ref().map(|tc| tc.iter().map(|t| &t.function.name).collect::>())
+ assistant_msg
+ .tool_calls
+ .as_ref()
+ .map(|tc| tc.iter().map(|t| &t.function.name).collect::>())
);
- // Add the assistant's tool-call message to history
+ // Preserve the assistant tool-call message so the next round
+ // has the same context the model produced.
let tool_calls = assistant_msg.tool_calls.clone().unwrap_or_default();
chat_history.push(assistant_msg);
- // Execute each tool call and add results
+ // Tool results are fed back into the conversation as
+ // synthetic `tool` messages, matching the upstream API.
for tool_call in &tool_calls {
- let tool_input = extract_tool_input(&tool_call.function.name, &tool_call.function.arguments);
+ let tool_input = extract_tool_input(
+ &tool_call.function.name,
+ &tool_call.function.arguments,
+ );
// Broadcast real-time tool usage event
let _ = state.tx.send(BroadcastEvent {
@@ -322,7 +366,9 @@ async fn handle_send_message(
let tool_result = execute_tool(
&tool_call.function.name,
&tool_call.function.arguments,
- &state.brave_api_key,
+ state.search_provider,
+ state.tavily_api_key.as_deref(),
+ state.brave_api_key.as_deref(),
)
.await;
@@ -345,7 +391,7 @@ async fn handle_send_message(
tool_call_id: Some(tool_call.id.clone()),
});
}
- // Continue to next round (tool loop)
+ // Ask the model to continue now that tool output exists.
continue 'tool_loop;
}
openrouter::StreamEvent::Done(stats) => {
@@ -365,9 +411,12 @@ async fn handle_send_message(
}
}
- // If we exhausted all rounds without a text response, note it
+ // Guardrail: if the model never produced final prose, store a clear fallback
+ // instead of leaving the client waiting indefinitely.
if ai_response.is_empty() && !had_error {
- ai_response = "*I used several tools but couldn't formulate a final response. Please try again.*".to_string();
+ ai_response =
+ "*I used several tools but couldn't formulate a final response. Please try again.*"
+ .to_string();
}
// Signal stream end so client can finalize rendering
@@ -402,8 +451,11 @@ async fn handle_send_message(
// Serialize ai_meta for database storage
let ai_meta_json = ai_meta.as_ref().and_then(|m| serde_json::to_string(m).ok());
+ // Compute integrity hash from timestamp + content
+ let ai_hash = crate::models::message_hash(&ai_now, &ai_response);
+
let _ = sqlx::query(
- "INSERT INTO messages (id, room_id, sender_id, sender_name, content, mentions, is_ai, created_at, ai_meta) VALUES (?, ?, ?, ?, ?, '[]', 1, ?, ?)",
+ "INSERT INTO messages (id, room_id, sender_id, sender_name, content, mentions, is_ai, created_at, ai_meta, hash) VALUES (?, ?, ?, ?, ?, '[]', 1, ?, ?, ?)",
)
.bind(&ai_msg_id)
.bind(room_id)
@@ -412,6 +464,7 @@ async fn handle_send_message(
.bind(&ai_response)
.bind(&ai_now)
.bind(&ai_meta_json)
+ .bind(&ai_hash)
.execute(&state.db)
.await;
@@ -427,7 +480,9 @@ async fn handle_send_message(
created_at: ai_now,
ai_meta,
avatar_hash: String::new(),
+ avatar_url: None,
image_url: None,
+ hash: Some(ai_hash),
};
let _ = state.tx.send(BroadcastEvent {
@@ -465,16 +520,22 @@ async fn encode_image_as_data_url(url: &str) -> Option {
fn extract_tool_input(tool_name: &str, arguments: &str) -> String {
let args: serde_json::Value = serde_json::from_str(arguments).unwrap_or_default();
match tool_name {
- "brave_search" => args["query"].as_str().unwrap_or("").to_string(),
+ "web_search" | "brave_search" => args["query"].as_str().unwrap_or("").to_string(),
"web_fetch" => args["url"].as_str().unwrap_or("").to_string(),
_ => arguments.to_string(),
}
}
/// Execute a tool call by name, returning the result as a string.
-async fn execute_tool(name: &str, arguments: &str, brave_api_key: &str) -> String {
+async fn execute_tool(
+ name: &str,
+ arguments: &str,
+ search_provider: search::SearchProvider,
+ tavily_api_key: Option<&str>,
+ brave_api_key: Option<&str>,
+) -> String {
match name {
- "brave_search" => {
+ "web_search" | "brave_search" => {
let args: serde_json::Value = serde_json::from_str(arguments).unwrap_or_default();
let query = args["query"].as_str().unwrap_or("").to_string();
let count = args["count"].as_u64().unwrap_or(5) as u8;
@@ -483,8 +544,16 @@ async fn execute_tool(name: &str, arguments: &str, brave_api_key: &str) -> Strin
return "Error: search query is required".into();
}
- match brave::search(&query, brave_api_key, count).await {
- Ok(results) => brave::format_results(&results),
+ match search::search(
+ search_provider,
+ &query,
+ tavily_api_key,
+ brave_api_key,
+ count,
+ )
+ .await
+ {
+ Ok(results) => search::format_results(&results),
Err(e) => format!("Search error: {}", e),
}
}
diff --git a/server/src/main.rs b/server/src/main.rs
index 8d0c5bf..06881dc 100644
--- a/server/src/main.rs
+++ b/server/src/main.rs
@@ -1,3 +1,12 @@
+//! Application bootstrap for the GroupChat server.
+//!
+//! This file is responsible for:
+//! - loading environment configuration
+//! - opening and migrating the SQLite database
+//! - constructing shared application state
+//! - registering HTTP/WebSocket routes
+//! - serving the SPA frontend in production
+
mod handlers;
mod middleware;
mod models;
@@ -14,6 +23,8 @@ use tower_http::cors::{Any, CorsLayer};
use tower_http::services::{ServeDir, ServeFile};
use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt};
+use crate::services::search::SearchProvider;
+
/// Extract the file path from a SQLite DATABASE_URL like "sqlite:chat.db?mode=rwc"
fn db_file_path(database_url: &str) -> Option {
let path = database_url.strip_prefix("sqlite:")?;
@@ -49,14 +60,8 @@ fn backup_database(database_url: &str) {
}
// Build timestamped backup filename: chat.db -> chat_2026-03-09_143022.db
- let stem = db_file
- .file_stem()
- .and_then(|s| s.to_str())
- .unwrap_or("db");
- let ext = db_file
- .extension()
- .and_then(|s| s.to_str())
- .unwrap_or("db");
+ let stem = db_file.file_stem().and_then(|s| s.to_str()).unwrap_or("db");
+ let ext = db_file.extension().and_then(|s| s.to_str()).unwrap_or("db");
let now = chrono::Local::now();
let backup_name = format!("{}_{}.{}", stem, now.format("%Y-%m-%d_%H%M%S"), ext);
@@ -80,11 +85,21 @@ fn backup_database(database_url: &str) {
let wal_path = format!("{}-wal", db_path);
let shm_path = format!("{}-shm", db_path);
if std::path::Path::new(&wal_path).exists() {
- let wal_backup = backup_dir.join(format!("{}_{}.{}-wal", stem, now.format("%Y-%m-%d_%H%M%S"), ext));
+ let wal_backup = backup_dir.join(format!(
+ "{}_{}.{}-wal",
+ stem,
+ now.format("%Y-%m-%d_%H%M%S"),
+ ext
+ ));
let _ = std::fs::copy(&wal_path, &wal_backup);
}
if std::path::Path::new(&shm_path).exists() {
- let shm_backup = backup_dir.join(format!("{}_{}.{}-shm", stem, now.format("%Y-%m-%d_%H%M%S"), ext));
+ let shm_backup = backup_dir.join(format!(
+ "{}_{}.{}-shm",
+ stem,
+ now.format("%Y-%m-%d_%H%M%S"),
+ ext
+ ));
let _ = std::fs::copy(&shm_path, &shm_backup);
}
@@ -117,25 +132,42 @@ fn prune_old_backups(backup_dir: &std::path::Path, stem: &str, keep: usize) {
let to_remove = backups.len() - keep;
for entry in backups.into_iter().take(to_remove) {
let path = entry.path();
- let name = path.file_name().unwrap_or_default().to_string_lossy().to_string();
+ let name = path
+ .file_name()
+ .unwrap_or_default()
+ .to_string_lossy()
+ .to_string();
if let Err(e) = std::fs::remove_file(&path) {
tracing::warn!("Failed to remove old backup {}: {}", name, e);
} else {
tracing::debug!("Pruned old backup: {}", name);
// Also remove associated WAL/SHM backups
- let wal = path.with_extension(format!("{}-wal", path.extension().unwrap_or_default().to_string_lossy()));
- let shm = path.with_extension(format!("{}-shm", path.extension().unwrap_or_default().to_string_lossy()));
+ let wal = path.with_extension(format!(
+ "{}-wal",
+ path.extension().unwrap_or_default().to_string_lossy()
+ ));
+ let shm = path.with_extension(format!(
+ "{}-shm",
+ path.extension().unwrap_or_default().to_string_lossy()
+ ));
let _ = std::fs::remove_file(&wal);
let _ = std::fs::remove_file(&shm);
}
}
}
+/// Shared state injected into every handler.
+///
+/// Axum stores this behind an `Arc`, so handlers can cheaply clone the pointer
+/// while all requests still talk to the same database pool, API keys, and
+/// broadcast channel.
pub struct AppState {
pub db: sqlx::SqlitePool,
pub jwt_secret: String,
pub openrouter_key: String,
- pub brave_api_key: String,
+ pub search_provider: SearchProvider,
+ pub tavily_api_key: Option,
+ pub brave_api_key: Option,
pub tx: broadcast::Sender,
}
@@ -150,10 +182,27 @@ async fn main() {
.with(tracing_subscriber::fmt::layer())
.init();
- let database_url = std::env::var("DATABASE_URL").unwrap_or_else(|_| "sqlite:chat.db?mode=rwc".into());
+ // Load the runtime configuration needed to start the server.
+ let database_url =
+ std::env::var("DATABASE_URL").unwrap_or_else(|_| "sqlite:chat.db?mode=rwc".into());
let jwt_secret = std::env::var("JWT_SECRET").unwrap_or_else(|_| "dev-secret-change-me".into());
- let openrouter_key = std::env::var("OPENROUTER_API_KEY").expect("OPENROUTER_API_KEY must be set");
- let brave_api_key = std::env::var("BRAVE_API_KEY").expect("BRAVE_API_KEY must be set");
+ let openrouter_key =
+ std::env::var("OPENROUTER_API_KEY").expect("OPENROUTER_API_KEY must be set");
+ let search_provider =
+ SearchProvider::from_env(std::env::var("SEARCH_PROVIDER").ok().as_deref())
+ .unwrap_or_else(|e| panic!("{}", e));
+ let tavily_api_key = std::env::var("TAVILY_API_KEY").ok();
+ let brave_api_key = std::env::var("BRAVE_API_KEY").ok();
+
+ match search_provider {
+ SearchProvider::Tavily if tavily_api_key.as_deref().unwrap_or("").is_empty() => {
+ panic!("TAVILY_API_KEY must be set when SEARCH_PROVIDER=tavily");
+ }
+ SearchProvider::Brave if brave_api_key.as_deref().unwrap_or("").is_empty() => {
+ panic!("BRAVE_API_KEY must be set when SEARCH_PROVIDER=brave");
+ }
+ _ => {}
+ }
// Backup the database before connecting and running migrations
backup_database(&database_url);
@@ -164,7 +213,8 @@ async fn main() {
.await
.expect("Failed to connect to database");
- // Run migrations
+ // Run migrations in order. Each one is written so startup can safely try it
+ // again and skip work that already happened in an earlier run.
let migration_sql = include_str!("../migrations/001_init.sql");
sqlx::raw_sql(migration_sql)
.execute(&db)
@@ -221,14 +271,60 @@ async fn main() {
Err(e) => panic!("Failed to run migration 006: {}", e),
}
+ // Run migration 007 - SHA-256 integrity hash on messages
+ let migration_007 = include_str!("../migrations/007_message_hash.sql");
+ match sqlx::raw_sql(migration_007).execute(&db).await {
+ Ok(_) => {
+ tracing::info!("Migration 007 applied, backfilling hashes for existing messages...");
+ // Backfill hashes for all existing messages that don't have one
+ let rows = sqlx::query_as::<_, (String, String, String)>(
+ "SELECT id, created_at, content FROM messages WHERE hash IS NULL",
+ )
+ .fetch_all(&db)
+ .await
+ .unwrap_or_default();
+
+ let count = rows.len();
+ for (id, created_at, content) in rows {
+ let hash = models::message_hash(&created_at, &content);
+ let _ = sqlx::query("UPDATE messages SET hash = ? WHERE id = ?")
+ .bind(&hash)
+ .bind(&id)
+ .execute(&db)
+ .await;
+ }
+ if count > 0 {
+ tracing::info!("Backfilled hashes for {} existing messages", count);
+ }
+ }
+ Err(e) if e.to_string().contains("duplicate column") => {
+ tracing::debug!("Migration 007 already applied, skipping");
+ }
+ Err(e) => panic!("Failed to run migration 007: {}", e),
+ }
+
+ // Run migration 008 - nostr pubkey on users
+ let migration_008 = include_str!("../migrations/008_nostr.sql");
+ match sqlx::raw_sql(migration_008).execute(&db).await {
+ Ok(_) => tracing::info!("Migration 008 applied"),
+ Err(e) if e.to_string().contains("duplicate column") => {
+ tracing::debug!("Migration 008 already applied, skipping");
+ }
+ Err(e) => panic!("Failed to run migration 008: {}", e),
+ }
+
tracing::info!("Database initialized");
+ // WebSocket tasks subscribe to this channel to receive room events without
+ // polling the database.
let (tx, _rx) = broadcast::channel::(4096);
let state = Arc::new(AppState {
db,
jwt_secret,
openrouter_key,
+ search_provider,
+ tavily_api_key,
brave_api_key,
tx,
});
@@ -241,27 +337,61 @@ async fn main() {
// Serve static files from client dist in production
let static_dir = std::env::var("STATIC_DIR").unwrap_or_else(|_| "../client/dist".into());
+ // Keep API routes separate from the static-file fallback so `/api/*` and
+ // `/ws` requests never get mistaken for SPA routes.
let api_routes = Router::new()
// Auth routes
.route("/api/auth/register", post(handlers::auth::register))
.route("/api/auth/login", post(handlers::auth::login))
.route("/api/auth/me", get(handlers::auth::me))
+ // Nostr auth routes
+ .route(
+ "/api/auth/nostr/challenge",
+ get(handlers::nostr_auth::challenge),
+ )
+ .route("/api/auth/nostr/verify", post(handlers::nostr_auth::verify))
// Profile routes
.route("/api/auth/profile", put(handlers::profile::update_profile))
- .route("/api/auth/avatar", post(handlers::profile::upload_avatar).delete(handlers::profile::delete_avatar))
+ .route(
+ "/api/auth/avatar",
+ post(handlers::profile::upload_avatar).delete(handlers::profile::delete_avatar),
+ )
// Room routes
- .route("/api/rooms", get(handlers::rooms::list_rooms).post(handlers::rooms::create_room))
- .route("/api/rooms/:room_id", get(handlers::rooms::get_room).delete(handlers::rooms::delete_room))
- .route("/api/rooms/:room_id/messages", get(handlers::rooms::get_messages))
+ .route(
+ "/api/rooms",
+ get(handlers::rooms::list_rooms).post(handlers::rooms::create_room),
+ )
+ .route(
+ "/api/rooms/:room_id",
+ get(handlers::rooms::get_room).delete(handlers::rooms::delete_room),
+ )
+ .route(
+ "/api/rooms/:room_id/messages",
+ get(handlers::rooms::get_messages),
+ )
.route("/api/rooms/:room_id/join", post(handlers::rooms::join_room))
- .route("/api/rooms/:room_id/clear", post(handlers::rooms::clear_room))
+ .route(
+ "/api/rooms/:room_id/clear",
+ post(handlers::rooms::clear_room),
+ )
+ .route(
+ "/api/messages/hash/:hash",
+ get(handlers::rooms::resolve_message_hash),
+ )
// Upload (chat images)
.route("/api/upload", post(handlers::upload::upload_chat_image))
// Models
.route("/api/models", get(handlers::models::list_models))
// Invite routes
.route("/api/invites", post(handlers::invites::create_invite))
- .route("/api/invites/:token/accept", post(handlers::invites::accept_invite))
+ .route(
+ "/api/invites/:token/accept",
+ post(handlers::invites::accept_invite),
+ )
+ .route(
+ "/api/invites/nostr",
+ post(handlers::invites::invite_by_nostr),
+ )
// Uploaded files (avatars)
.nest_service("/uploads", ServeDir::new("uploads"))
// WebSocket
diff --git a/server/src/middleware/auth.rs b/server/src/middleware/auth.rs
index 37610e9..311742b 100644
--- a/server/src/middleware/auth.rs
+++ b/server/src/middleware/auth.rs
@@ -1,14 +1,11 @@
use async_trait::async_trait;
-use axum::{
- extract::FromRequestParts,
- http::request::Parts,
-};
+use axum::{extract::FromRequestParts, http::request::Parts};
use jsonwebtoken::{decode, encode, DecodingKey, EncodingKey, Header, Validation};
use std::sync::Arc;
use crate::{models::Claims, AppState};
-/// Extract authenticated user from JWT in Authorization header
+/// Authenticated user information extracted from the bearer token.
pub struct AuthUser {
pub user_id: String,
pub email: String,
@@ -19,7 +16,15 @@ pub struct AuthUser {
impl FromRequestParts> for AuthUser {
type Rejection = axum::http::StatusCode;
- async fn from_request_parts(parts: &mut Parts, state: &Arc) -> Result {
+ /// Read the `Authorization: Bearer ` header and decode the JWT.
+ ///
+ /// Axum runs this automatically for any handler parameter of type
+ /// `AuthUser`, which keeps individual handlers free from repeated token
+ /// parsing logic.
+ async fn from_request_parts(
+ parts: &mut Parts,
+ state: &Arc,
+ ) -> Result {
let auth_header = parts
.headers
.get("Authorization")
@@ -41,7 +46,16 @@ impl FromRequestParts> for AuthUser {
}
}
-pub fn create_token(user_id: &str, email: &str, display_name: &str, secret: &str) -> Result {
+/// Create a signed JWT for a logged-in user.
+///
+/// The token expires after seven days and carries the small amount of identity
+/// data the server wants available on every request.
+pub fn create_token(
+ user_id: &str,
+ email: &str,
+ display_name: &str,
+ secret: &str,
+) -> Result {
let expiration = chrono::Utc::now()
.checked_add_signed(chrono::Duration::days(7))
.unwrap()
@@ -61,6 +75,7 @@ pub fn create_token(user_id: &str, email: &str, display_name: &str, secret: &str
)
}
+/// Decode and validate a previously issued JWT.
pub fn decode_token(token: &str, secret: &str) -> Result {
let token_data = decode::(
token,
diff --git a/server/src/middleware/mod.rs b/server/src/middleware/mod.rs
index 0e4a05d..70723e0 100644
--- a/server/src/middleware/mod.rs
+++ b/server/src/middleware/mod.rs
@@ -1 +1,3 @@
+//! Reusable request-processing layers shared across handlers.
+
pub mod auth;
diff --git a/server/src/models/mod.rs b/server/src/models/mod.rs
index db08f06..7a272bf 100644
--- a/server/src/models/mod.rs
+++ b/server/src/models/mod.rs
@@ -1,7 +1,14 @@
+//! Core data structures shared across the server.
+//!
+//! This file intentionally mixes database row types, HTTP payloads, WebSocket
+//! payloads, and a few helper functions so the rest of the codebase can import
+//! common shapes from one place.
+
use serde::{Deserialize, Serialize};
// ── Database models ──
+/// Row from the `users` table.
#[derive(Debug, Clone, Serialize, Deserialize, sqlx::FromRow)]
pub struct User {
pub id: String,
@@ -11,6 +18,7 @@ pub struct User {
pub created_at: String,
}
+/// Row from the `rooms` table.
#[derive(Debug, Clone, Serialize, Deserialize, sqlx::FromRow)]
pub struct Room {
pub id: String,
@@ -24,6 +32,7 @@ pub struct Room {
pub deleted_at: Option,
}
+/// Row from the `messages` table.
#[derive(Debug, Clone, Serialize, Deserialize, sqlx::FromRow)]
pub struct Message {
pub id: String,
@@ -35,8 +44,10 @@ pub struct Message {
pub is_ai: bool,
pub created_at: String,
pub ai_meta: Option,
+ pub hash: Option,
}
+/// Row from the `invites` table.
#[derive(Debug, Clone, Serialize, Deserialize, sqlx::FromRow)]
pub struct Invite {
pub id: String,
@@ -50,6 +61,7 @@ pub struct Invite {
// ── API request/response types ──
+/// JSON body expected by the registration endpoint.
#[derive(Debug, Deserialize)]
pub struct RegisterRequest {
pub email: String,
@@ -57,18 +69,21 @@ pub struct RegisterRequest {
pub display_name: String,
}
+/// JSON body expected by the login endpoint.
#[derive(Debug, Deserialize)]
pub struct LoginRequest {
pub email: String,
pub password: String,
}
+/// Standard auth response returned after login, registration, or profile update.
#[derive(Debug, Serialize)]
pub struct AuthResponse {
pub token: String,
pub user: UserPublic,
}
+/// Public user data safe to return to any authenticated client.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UserPublic {
pub id: String,
@@ -76,8 +91,11 @@ pub struct UserPublic {
pub display_name: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub avatar_url: Option,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub nostr_pubkey: Option,
}
+/// JSON body used when a user creates a new chat room.
#[derive(Debug, Deserialize)]
pub struct CreateRoomRequest {
pub name: String,
@@ -90,10 +108,10 @@ pub struct CreateRoomRequest {
pub ai_name: String,
}
+/// Pick a friendly default AI display name when the creator does not specify one.
fn default_ai_name() -> String {
let names = [
- "Nova", "Atlas", "Sage", "Echo", "Pixel",
- "Cosmo", "Ember", "Flux", "Lyra", "Onyx",
+ "Nova", "Atlas", "Sage", "Echo", "Pixel", "Cosmo", "Ember", "Flux", "Lyra", "Onyx",
];
let idx = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
@@ -102,10 +120,12 @@ fn default_ai_name() -> String {
names[idx].to_string()
}
+/// Default prompt that defines the AI assistant's behavior inside a room.
fn default_system_prompt() -> String {
- "You are a helpful AI assistant participating in a group chat. Be conversational, helpful, and concise. You can see messages from all participants. When mentioned with @ai, respond helpfully.\n\nYou have access to tools:\n- **brave_search**: Search the web for current information. Use this when asked about recent events, news, facts you're unsure about, or anything that needs up-to-date information.\n- **web_fetch**: Fetch and read the content of a web page. Use this when a user shares a URL and wants you to read/summarize it, or when you need more details from a search result.\n\nUse tools proactively when they would help answer the question better. You don't need to ask permission to use them.".to_string()
+ "You are a helpful AI assistant participating in a group chat. Be conversational, helpful, and concise. You can see messages from all participants. When mentioned with @ai, respond helpfully.\n\nYou have access to tools:\n- **web_search**: Search the web for current information. Use this when asked about recent events, news, facts you're unsure about, or anything that needs up-to-date information.\n- **web_fetch**: Fetch and read the content of a web page. Use this when a user shares a URL and wants you to read/summarize it, or when you need more details from a search result.\n\nUse tools proactively when they would help answer the question better. You don't need to ask permission to use them.".to_string()
}
+/// Full room payload returned to the client, including current members.
#[derive(Debug, Serialize)]
pub struct RoomResponse {
pub id: String,
@@ -119,6 +139,7 @@ pub struct RoomResponse {
pub members: Vec,
}
+/// JSON body for an email-based room invite.
#[derive(Debug, Deserialize)]
pub struct CreateInviteRequest {
pub room_id: String,
@@ -127,6 +148,7 @@ pub struct CreateInviteRequest {
// ── WebSocket event types ──
+/// Messages the browser can send over the WebSocket connection.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum WsClientMessage {
@@ -145,17 +167,14 @@ pub enum WsClientMessage {
Typing { room_id: String },
}
+/// Messages the server can push to browsers over the WebSocket connection.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum WsServerMessage {
#[serde(rename = "new_message")]
- NewMessage {
- message: MessagePayload,
- },
+ NewMessage { message: MessagePayload },
#[serde(rename = "ai_typing")]
- AiTyping {
- room_id: String,
- },
+ AiTyping { room_id: String },
#[serde(rename = "user_typing")]
UserTyping {
room_id: String,
@@ -163,21 +182,13 @@ pub enum WsServerMessage {
display_name: String,
},
#[serde(rename = "error")]
- Error {
- message: String,
- },
+ Error { message: String },
#[serde(rename = "joined")]
- Joined {
- room_id: String,
- },
+ Joined { room_id: String },
#[serde(rename = "room_deleted")]
- RoomDeleted {
- room_id: String,
- },
+ RoomDeleted { room_id: String },
#[serde(rename = "room_cleared")]
- RoomCleared {
- room_id: String,
- },
+ RoomCleared { room_id: String },
#[serde(rename = "ai_tool_usage")]
AiToolUsage {
room_id: String,
@@ -191,12 +202,10 @@ pub enum WsServerMessage {
delta: String,
},
#[serde(rename = "ai_stream_end")]
- AiStreamEnd {
- room_id: String,
- message_id: String,
- },
+ AiStreamEnd { room_id: String, message_id: String },
}
+/// Message shape sent to clients for history loading and live updates.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MessagePayload {
pub id: String,
@@ -212,17 +221,31 @@ pub struct MessagePayload {
#[serde(default)]
pub avatar_hash: String,
#[serde(skip_serializing_if = "Option::is_none")]
+ pub avatar_url: Option,
+ #[serde(skip_serializing_if = "Option::is_none")]
pub image_url: Option,
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub hash: Option,
}
/// Compute Gravatar-compatible MD5 hash from an email address.
pub fn gravatar_hash(email: &str) -> String {
- use md5::{Md5, Digest};
+ use md5::{Digest, Md5};
let normalized = email.trim().to_lowercase();
let result = Md5::digest(normalized.as_bytes());
format!("{:x}", result)
}
+/// Compute SHA-256 integrity hash from created_at timestamp + message content.
+pub fn message_hash(created_at: &str, content: &str) -> String {
+ use sha2::{Digest, Sha256};
+ let mut hasher = Sha256::new();
+ hasher.update(created_at.as_bytes());
+ hasher.update(content.as_bytes());
+ format!("{:x}", hasher.finalize())
+}
+
+/// Usage and tool metadata captured for AI-generated messages.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AiMeta {
pub model: String,
@@ -234,6 +257,7 @@ pub struct AiMeta {
pub tool_results: Option>,
}
+/// One tool invocation performed while generating an AI answer.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ToolResult {
pub tool: String,
@@ -243,6 +267,7 @@ pub struct ToolResult {
// ── Broadcast event (internal channel) ──
+/// Internal event sent through a Tokio broadcast channel to WebSocket tasks.
#[derive(Debug, Clone)]
pub struct BroadcastEvent {
pub room_id: String,
@@ -251,9 +276,10 @@ pub struct BroadcastEvent {
// ── JWT Claims ──
+/// Claims stored inside the server-issued JWT.
#[derive(Debug, Serialize, Deserialize)]
pub struct Claims {
- pub sub: String, // user_id
+ pub sub: String, // user_id
pub email: String,
pub display_name: String,
pub exp: usize,
@@ -261,6 +287,7 @@ pub struct Claims {
// ── Pagination ──
+/// Common pagination parameters for message history endpoints.
#[derive(Debug, Deserialize)]
pub struct PaginationParams {
#[serde(default = "default_limit")]
@@ -271,3 +298,36 @@ pub struct PaginationParams {
fn default_limit() -> i64 {
50
}
+
+/// Hide placeholder `nostr:*` emails from normal client responses.
+pub fn public_email(email: &str) -> String {
+ if email.starts_with("nostr:") {
+ String::new()
+ } else {
+ email.to_string()
+ }
+}
+
+// ── Nostr auth types ──
+
+/// Response returned by the Nostr challenge endpoint.
+#[derive(Debug, Serialize)]
+pub struct NostrChallengeResponse {
+ pub challenge: String,
+}
+
+/// JSON body sent by the client when proving Nostr ownership.
+#[derive(Debug, Deserialize)]
+pub struct NostrVerifyRequest {
+ pub signed_event: String,
+ pub challenge: String,
+ pub profile_name: Option,
+ pub profile_picture: Option,
+}
+
+/// JSON body for inviting an already-known Nostr user into a room.
+#[derive(Debug, Deserialize)]
+pub struct NostrInviteRequest {
+ pub room_id: String,
+ pub nostr_pubkey: String,
+}
diff --git a/server/src/services/brave.rs b/server/src/services/brave.rs
index fd431f9..c2eceea 100644
--- a/server/src/services/brave.rs
+++ b/server/src/services/brave.rs
@@ -1,7 +1,10 @@
use serde::Deserialize;
+use crate::services::search::SearchResult;
+
const BRAVE_SEARCH_URL: &str = "https://api.search.brave.com/res/v1/web/search";
+/// Partial Brave API response containing only the fields this app needs.
#[derive(Debug, Deserialize)]
struct BraveResponse {
web: Option,
@@ -23,22 +26,9 @@ struct BraveResult {
extra_snippets: Option>,
}
-/// A simplified search result for consumption by the AI.
-#[derive(Debug)]
-pub struct SearchResult {
- pub title: String,
- pub url: String,
- pub description: String,
- pub age: Option,
-}
-
/// Search the web using the Brave Search API.
/// Returns a list of simplified search results.
-pub async fn search(
- query: &str,
- api_key: &str,
- count: u8,
-) -> Result, String> {
+pub async fn search(query: &str, api_key: &str, count: u8) -> Result, String> {
let count = count.clamp(1, 10);
let client = reqwest::Client::new();
@@ -91,21 +81,3 @@ pub async fn search(
Ok(results)
}
-
-/// Format search results into a readable string for the AI to consume.
-pub fn format_results(results: &[SearchResult]) -> String {
- if results.is_empty() {
- return "No search results found.".to_string();
- }
-
- let mut output = String::new();
- for (i, r) in results.iter().enumerate() {
- output.push_str(&format!("{}. {}\n", i + 1, r.title));
- output.push_str(&format!(" URL: {}\n", r.url));
- if let Some(age) = &r.age {
- output.push_str(&format!(" Age: {}\n", age));
- }
- output.push_str(&format!(" {}\n\n", r.description));
- }
- output
-}
diff --git a/server/src/services/fetch.rs b/server/src/services/fetch.rs
index 0227156..81ff54e 100644
--- a/server/src/services/fetch.rs
+++ b/server/src/services/fetch.rs
@@ -19,9 +19,29 @@ const STRIP_TAGS: &[&str] = &[
/// Block-level tags that should produce newlines in text output.
const BLOCK_TAGS: &[&str] = &[
- "p", "div", "h1", "h2", "h3", "h4", "h5", "h6", "li", "br", "tr",
- "blockquote", "pre", "section", "article", "main", "header",
- "dt", "dd", "figcaption", "table", "thead", "tbody",
+ "p",
+ "div",
+ "h1",
+ "h2",
+ "h3",
+ "h4",
+ "h5",
+ "h6",
+ "li",
+ "br",
+ "tr",
+ "blockquote",
+ "pre",
+ "section",
+ "article",
+ "main",
+ "header",
+ "dt",
+ "dd",
+ "figcaption",
+ "table",
+ "thead",
+ "tbody",
];
/// Fetch a URL and extract its text content.
diff --git a/server/src/services/mod.rs b/server/src/services/mod.rs
index c9e0c75..07cb773 100644
--- a/server/src/services/mod.rs
+++ b/server/src/services/mod.rs
@@ -1,3 +1,11 @@
+//! Integrations with external systems used by the chat server.
+//!
+//! These modules wrap search providers, web page fetching, and the OpenRouter
+//! chat completion API so the rest of the application can call them with simple
+//! Rust types.
+
pub mod brave;
pub mod fetch;
pub mod openrouter;
+pub mod search;
+pub mod tavily;
diff --git a/server/src/services/openrouter.rs b/server/src/services/openrouter.rs
index a1f44a6..b10e7fd 100644
--- a/server/src/services/openrouter.rs
+++ b/server/src/services/openrouter.rs
@@ -149,13 +149,13 @@ pub struct CompletionStats {
pub response_ms: u64,
}
-/// Build the tool definitions for brave_search and web_fetch.
+/// Build the tool definitions for web_search and web_fetch.
pub fn build_tools() -> Vec {
vec![
Tool {
r#type: "function".into(),
function: ToolFunction {
- name: "brave_search".into(),
+ name: "web_search".into(),
description: "Search the web for current information. Use this when users ask about recent events, need factual data you're unsure about, or want up-to-date information.".into(),
parameters: serde_json::json!({
"type": "object",
@@ -235,7 +235,9 @@ pub async fn chat_completion_stream(
{
Ok(r) => r,
Err(e) => {
- let _ = tx.send(StreamEvent::Error(format!("Request failed: {}", e))).await;
+ let _ = tx
+ .send(StreamEvent::Error(format!("Request failed: {}", e)))
+ .await;
return;
}
};
@@ -243,7 +245,12 @@ pub async fn chat_completion_stream(
if !response.status().is_success() {
let status = response.status();
let body = response.text().await.unwrap_or_default();
- let _ = tx.send(StreamEvent::Error(format!("OpenRouter error {}: {}", status, body))).await;
+ let _ = tx
+ .send(StreamEvent::Error(format!(
+ "OpenRouter error {}: {}",
+ status, body
+ )))
+ .await;
return;
}
@@ -264,7 +271,9 @@ pub async fn chat_completion_stream(
let bytes = match chunk_result {
Ok(b) => b,
Err(e) => {
- let _ = tx.send(StreamEvent::Error(format!("Stream error: {}", e))).await;
+ let _ = tx
+ .send(StreamEvent::Error(format!("Stream error: {}", e)))
+ .await;
return;
}
};
@@ -338,7 +347,10 @@ pub async fn chat_completion_stream(
tool_call_accum[idx].function.name.push_str(name);
}
if let Some(args) = &func.arguments {
- tool_call_accum[idx].function.arguments.push_str(args);
+ tool_call_accum[idx]
+ .function
+ .arguments
+ .push_str(args);
}
}
}
@@ -373,7 +385,11 @@ pub async fn chat_completion_stream(
// AI requested tool calls
let assistant_msg = ChatMessage {
role: "assistant".into(),
- content: if full_content.is_empty() { None } else { Some(Content::Text(full_content)) },
+ content: if full_content.is_empty() {
+ None
+ } else {
+ Some(Content::Text(full_content))
+ },
tool_calls: Some(tool_call_accum),
tool_call_id: None,
};
@@ -420,7 +436,9 @@ pub fn build_chat_history(
Content::Parts(vec![
ContentPart::Text { text },
ContentPart::ImageUrl {
- image_url: ImageUrlData { url: data_url.clone() },
+ image_url: ImageUrlData {
+ url: data_url.clone(),
+ },
},
])
} else {
diff --git a/server/src/services/search.rs b/server/src/services/search.rs
new file mode 100644
index 0000000..ed62969
--- /dev/null
+++ b/server/src/services/search.rs
@@ -0,0 +1,90 @@
+use serde::{Deserialize, Serialize};
+
+use super::{brave, tavily};
+
+/// Which search backend the AI tool layer should call.
+#[derive(Debug, Clone, Copy, PartialEq, Eq)]
+pub enum SearchProvider {
+ Tavily,
+ Brave,
+}
+
+impl SearchProvider {
+ /// Parse the `SEARCH_PROVIDER` environment variable into a supported variant.
+ pub fn from_env(value: Option<&str>) -> Result {
+ match value
+ .unwrap_or("tavily")
+ .trim()
+ .to_ascii_lowercase()
+ .as_str()
+ {
+ "tavily" => Ok(Self::Tavily),
+ "brave" => Ok(Self::Brave),
+ other => Err(format!(
+ "Unsupported SEARCH_PROVIDER '{}'. Expected 'tavily' or 'brave'.",
+ other
+ )),
+ }
+ }
+
+ /// Return the environment variable name required by the selected provider.
+ pub fn required_key_name(self) -> &'static str {
+ match self {
+ Self::Tavily => "TAVILY_API_KEY",
+ Self::Brave => "BRAVE_API_KEY",
+ }
+ }
+}
+
+/// Normalized search result shape shared across providers.
+#[derive(Debug, Clone, Serialize, Deserialize)]
+pub struct SearchResult {
+ pub title: String,
+ pub url: String,
+ pub description: String,
+ pub age: Option,
+}
+
+/// Dispatch a search request to whichever provider the server is configured to use.
+pub async fn search(
+ provider: SearchProvider,
+ query: &str,
+ tavily_api_key: Option<&str>,
+ brave_api_key: Option<&str>,
+ count: u8,
+) -> Result, String> {
+ match provider {
+ SearchProvider::Tavily => {
+ let api_key = tavily_api_key
+ .filter(|key| !key.is_empty())
+ .ok_or_else(|| "TAVILY_API_KEY is not configured".to_string())?;
+ tavily::search(query, api_key, count).await
+ }
+ SearchProvider::Brave => {
+ let api_key = brave_api_key
+ .filter(|key| !key.is_empty())
+ .ok_or_else(|| "BRAVE_API_KEY is not configured".to_string())?;
+ brave::search(query, api_key, count).await
+ }
+ }
+}
+
+/// Turn search results into plain text the AI model can read as tool output.
+pub fn format_results(results: &[SearchResult]) -> String {
+ if results.is_empty() {
+ return "No search results found.".to_string();
+ }
+
+ let mut output = String::new();
+ for (i, r) in results.iter().enumerate() {
+ output.push_str(&format!("{}. {}\n", i + 1, r.title));
+ if !r.url.is_empty() {
+ output.push_str(&format!(" URL: {}\n", r.url));
+ }
+ if let Some(age) = &r.age {
+ output.push_str(&format!(" Age: {}\n", age));
+ }
+ output.push_str(&format!(" {}\n\n", r.description));
+ }
+ output
+}
diff --git a/server/src/services/tavily.rs b/server/src/services/tavily.rs
new file mode 100644
index 0000000..ff2102f
--- /dev/null
+++ b/server/src/services/tavily.rs
@@ -0,0 +1,90 @@
+use serde::Deserialize;
+
+use crate::services::search::SearchResult;
+
+const TAVILY_SEARCH_URL: &str = "https://api.tavily.com/search";
+
+#[derive(Debug, Deserialize)]
+struct TavilyResponse {
+ #[serde(default)]
+ answer: Option,
+ #[serde(default)]
+ results: Vec,
+}
+
+#[derive(Debug, Deserialize)]
+struct TavilyResult {
+ title: String,
+ url: String,
+ #[serde(default)]
+ content: String,
+ #[serde(default)]
+ #[serde(alias = "publishedDate")]
+ published_date: Option,
+}
+
+pub async fn search(query: &str, api_key: &str, count: u8) -> Result, String> {
+ let max_results = count.clamp(1, 10);
+ let client = reqwest::Client::new();
+
+ let response = client
+ .post(TAVILY_SEARCH_URL)
+ .header("Authorization", format!("Bearer {}", api_key))
+ .header("Content-Type", "application/json")
+ .json(&serde_json::json!({
+ "query": query,
+ "topic": "general",
+ "search_depth": "advanced",
+ "include_answer": true,
+ "include_raw_content": false,
+ "max_results": max_results,
+ }))
+ .send()
+ .await
+ .map_err(|e| format!("Tavily search request failed: {}", e))?;
+
+ if !response.status().is_success() {
+ let status = response.status();
+ let body = response.text().await.unwrap_or_default();
+ return Err(format!("Tavily search error {}: {}", status, body));
+ }
+
+ let tavily_response: TavilyResponse = response
+ .json()
+ .await
+ .map_err(|e| format!("Failed to parse Tavily response: {}", e))?;
+
+ let answer = tavily_response.answer.unwrap_or_default();
+ let mut results: Vec = tavily_response
+ .results
+ .into_iter()
+ .map(|result| SearchResult {
+ title: result.title,
+ url: result.url,
+ description: result.content,
+ age: result.published_date,
+ })
+ .collect();
+
+ if !answer.is_empty() {
+ if let Some(first) = results.first_mut() {
+ if first.description.is_empty() {
+ first.description = format!("AI summary: {}", answer);
+ } else {
+ first.description = format!(
+ "AI summary: {}\nSource excerpt: {}",
+ answer, first.description
+ );
+ }
+ } else {
+ results.push(SearchResult {
+ title: "AI Summary".to_string(),
+ url: String::new(),
+ description: answer,
+ age: None,
+ });
+ }
+ }
+
+ Ok(results)
+}
diff --git a/server/uploads/avatars/17fc5479-fa75-4a91-818a-06e6ea01e689.png b/server/uploads/avatars/17fc5479-fa75-4a91-818a-06e6ea01e689.png
new file mode 100644
index 0000000..cfe1e6f
Binary files /dev/null and b/server/uploads/avatars/17fc5479-fa75-4a91-818a-06e6ea01e689.png differ