feat(ui): add Button, Modal, Spinner, Toast, and Tooltip components with styles
All checks were successful
Build and Release / build-and-release (push) Successful in 13m12s

- Implemented Button component with various props for customization.
- Created Modal component with header, content, and footer subcomponents.
- Added Spinner component for loading indicators.
- Developed Toast component for displaying notifications.
- Introduced Tooltip component for contextual hints with keyboard shortcuts.
- Added corresponding CSS modules for styling each component.
- Updated index file to export new components.
- Configured TypeScript settings for the UI package.
This commit is contained in:
Bryan1029384756
2026-04-14 09:02:14 -05:00
parent 9ef839938e
commit b7a4cf4ce8
376 changed files with 52619 additions and 167641 deletions

View File

@@ -17,13 +17,16 @@ import type * as dms from "../dms.js";
import type * as files from "../files.js";
import type * as gifs from "../gifs.js";
import type * as invites from "../invites.js";
import type * as links from "../links.js";
import type * as members from "../members.js";
import type * as messages from "../messages.js";
import type * as polls from "../polls.js";
import type * as presence from "../presence.js";
import type * as reactions from "../reactions.js";
import type * as readState from "../readState.js";
import type * as recovery from "../recovery.js";
import type * as roles from "../roles.js";
import type * as savedMedia from "../savedMedia.js";
import type * as serverSettings from "../serverSettings.js";
import type * as storageUrl from "../storageUrl.js";
import type * as typing from "../typing.js";
@@ -46,13 +49,16 @@ declare const fullApi: ApiFromModules<{
files: typeof files;
gifs: typeof gifs;
invites: typeof invites;
links: typeof links;
members: typeof members;
messages: typeof messages;
polls: typeof polls;
presence: typeof presence;
reactions: typeof reactions;
readState: typeof readState;
recovery: typeof recovery;
roles: typeof roles;
savedMedia: typeof savedMedia;
serverSettings: typeof serverSettings;
storageUrl: typeof storageUrl;
typing: typeof typing;

View File

@@ -208,6 +208,7 @@ export const getPublicKeys = query({
aboutMe: v.optional(v.string()),
customStatus: v.optional(v.string()),
joinSoundUrl: v.optional(v.union(v.string(), v.null())),
accentColor: v.optional(v.string()),
})
),
handler: async (ctx) => {
@@ -232,6 +233,7 @@ export const getPublicKeys = query({
aboutMe: u.aboutMe,
customStatus: u.customStatus,
joinSoundUrl,
accentColor: u.accentColor,
});
}
return results;
@@ -248,6 +250,7 @@ export const updateProfile = mutation({
customStatus: v.optional(v.string()),
joinSoundStorageId: v.optional(v.id("_storage")),
removeJoinSound: v.optional(v.boolean()),
accentColor: v.optional(v.string()),
},
returns: v.null(),
handler: async (ctx, args) => {
@@ -258,6 +261,7 @@ export const updateProfile = mutation({
if (args.customStatus !== undefined) patch.customStatus = args.customStatus;
if (args.joinSoundStorageId !== undefined) patch.joinSoundStorageId = args.joinSoundStorageId;
if (args.removeJoinSound) patch.joinSoundStorageId = undefined;
if (args.accentColor !== undefined) patch.accentColor = args.accentColor;
await ctx.db.patch(args.userId, patch);
return null;
},

View File

@@ -1,6 +1,78 @@
import { query, mutation } from "./_generated/server";
import { v } from "convex/values";
/**
* Rotate the symmetric key for a DM channel. Inserts a brand-new
* versioned row for each participant — existing rows are left alone
* so previously-encrypted messages remain decryptable.
*
* The caller proves they're a DM participant by passing their own
* userId; the server cross-checks against `dmParticipants` for the
* channel. Every recipient userId in `entries` must also be a
* participant — no leaking keys to random users.
*
* The new rows are tagged with `maxExistingVersion + 1`.
*/
export const rotateDMKey = mutation({
args: {
channelId: v.id("channels"),
initiatorUserId: v.id("userProfiles"),
entries: v.array(
v.object({
userId: v.id("userProfiles"),
encryptedKeyBundle: v.string(),
}),
),
},
returns: v.object({ keyVersion: v.number() }),
handler: async (ctx, args) => {
const channel = await ctx.db.get(args.channelId);
if (!channel) throw new Error("Channel not found");
if (channel.type !== "dm") {
throw new Error("rotateDMKey is only supported for DM channels");
}
// Verify every (initiator + entries) userId is in dmParticipants.
const participants = await ctx.db
.query("dmParticipants")
.withIndex("by_channel", (q) => q.eq("channelId", args.channelId))
.collect();
const participantSet = new Set(participants.map((p) => p.userId as string));
if (!participantSet.has(args.initiatorUserId as unknown as string)) {
throw new Error("Not a participant in this DM");
}
for (const entry of args.entries) {
if (!participantSet.has(entry.userId as unknown as string)) {
throw new Error("Target userId is not a participant in this DM");
}
}
// Find the current max keyVersion for this channel. New rows go
// one above that. If no rows exist yet, start at 2 so legacy
// messages tagged version 1 still hit their original key.
const existing = await ctx.db
.query("channelKeys")
.withIndex("by_channel", (q) => q.eq("channelId", args.channelId))
.collect();
const maxVersion = existing.reduce(
(m, k) => (k.keyVersion > m ? k.keyVersion : m),
0,
);
const newVersion = maxVersion + 1;
for (const entry of args.entries) {
await ctx.db.insert("channelKeys", {
channelId: args.channelId,
userId: entry.userId,
encryptedKeyBundle: entry.encryptedKeyBundle,
keyVersion: newVersion,
});
}
return { keyVersion: newVersion };
},
});
// Batch upsert encrypted key bundles
export const uploadKeys = mutation({
args: {

View File

@@ -12,12 +12,20 @@ export const list = query({
emojis.map(async (emoji) => {
const src = await getPublicStorageUrl(ctx, emoji.storageId);
const user = await ctx.db.get(emoji.uploadedBy);
let avatarUrl: string | null = null;
if (user?.avatarStorageId) {
avatarUrl = await getPublicStorageUrl(ctx, user.avatarStorageId);
}
return {
_id: emoji._id,
name: emoji.name,
src,
createdAt: emoji.createdAt,
animated: emoji.animated ?? false,
uploadedById: emoji.uploadedBy,
uploadedByUsername: user?.username || "Unknown",
uploadedByDisplayName: user?.displayName || null,
uploadedByAvatarUrl: avatarUrl,
};
})
);
@@ -30,6 +38,7 @@ export const upload = mutation({
userId: v.id("userProfiles"),
name: v.string(),
storageId: v.id("_storage"),
animated: v.optional(v.boolean()),
},
returns: v.null(),
handler: async (ctx, args) => {
@@ -64,6 +73,7 @@ export const upload = mutation({
name,
storageId: args.storageId,
uploadedBy: args.userId,
animated: args.animated ?? false,
createdAt: Date.now(),
});
@@ -71,6 +81,53 @@ export const upload = mutation({
},
});
/**
* Rename a custom emoji in place. Enforces the same name validation
* as `upload` and rejects collisions with other existing emojis so
* two rows can't end up sharing a shortcode.
*/
export const rename = mutation({
args: {
userId: v.id("userProfiles"),
emojiId: v.id("customEmojis"),
name: v.string(),
},
returns: v.null(),
handler: async (ctx, args) => {
const roles = await getRolesForUser(ctx, args.userId);
const canManage = roles.some(
(role) => (role.permissions as Record<string, boolean>)?.["manage_channels"]
);
if (!canManage) {
throw new Error("You don't have permission to manage emojis");
}
const emoji = await ctx.db.get(args.emojiId);
if (!emoji) throw new Error("Emoji not found");
const name = args.name.trim();
if (!/^[a-zA-Z0-9_]+$/.test(name)) {
throw new Error("Emoji name can only contain letters, numbers, and underscores");
}
if (name.length < 2 || name.length > 32) {
throw new Error("Emoji name must be between 2 and 32 characters");
}
if (name !== emoji.name) {
const clash = await ctx.db
.query("customEmojis")
.withIndex("by_name", (q) => q.eq("name", name))
.first();
if (clash && clash._id !== args.emojiId) {
throw new Error(`A custom emoji named "${name}" already exists`);
}
}
await ctx.db.patch(args.emojiId, { name });
return null;
},
});
export const remove = mutation({
args: {
userId: v.id("userProfiles"),

View File

@@ -3,41 +3,265 @@
import { action } from "./_generated/server";
import { v } from "convex/values";
// Search GIFs via Tenor API
/**
* GIF search action — backed by the Klipy GIF API (Tenor's
* Google-shutdown replacement). Klipy embeds the customer id in the
* URL path and returns results under `data.data[]` with sized
* variants under `file.{hd|md|sm}.gif.url`. We normalize the response
* into a flat `{results: [{id, title, url, previewUrl, width, height}]}`
* shape so callers don't have to know which provider is upstream.
*
* Reads `KLIPY_API_KEY` from the Convex environment. Falls back to
* the legacy `TENOR_API_KEY` env var so existing deployments keep
* working as soon as the old key is replaced with a Klipy customer
* id — no `convex env set` rename required.
*/
interface NormalizedGif {
id: string;
title: string;
url: string;
previewUrl: string;
width?: number;
height?: number;
}
interface GifSearchResponse {
results: NormalizedGif[];
}
/**
* In-memory TTL cache for Klipy responses. Convex Node actions run on
* warm container instances, so this survives between invocations on
* the same worker until it's recycled. Best-effort only — a cold
* start will re-fetch upstream. Search queries live for 5 minutes,
* trending / categories for 30 minutes since they change slowly.
*/
interface CacheEntry<T> {
value: T;
expiresAt: number;
}
const CACHE = new Map<string, CacheEntry<unknown>>();
const MAX_CACHE_SIZE = 200;
const SEARCH_TTL_MS = 5 * 60 * 1000;
const TRENDING_TTL_MS = 30 * 60 * 1000;
const CATEGORIES_TTL_MS = 30 * 60 * 1000;
function cacheGet<T>(key: string): T | null {
const entry = CACHE.get(key);
if (!entry) return null;
if (entry.expiresAt < Date.now()) {
CACHE.delete(key);
return null;
}
return entry.value as T;
}
function cacheSet<T>(key: string, value: T, ttlMs: number): void {
if (CACHE.size >= MAX_CACHE_SIZE) {
// Drop the oldest insertion — Map iteration order is insertion order.
const oldest = CACHE.keys().next().value;
if (oldest !== undefined) CACHE.delete(oldest);
}
CACHE.set(key, { value, expiresAt: Date.now() + ttlMs });
}
function normalizeGifItems(items: any[]): NormalizedGif[] {
return items
.map((item, idx): NormalizedGif => {
const file = item?.file ?? {};
const md = file?.md?.gif ?? file?.sm?.gif ?? file?.hd?.gif ?? {};
const previewVariant =
file?.sm?.gif ?? file?.md?.gif ?? file?.hd?.gif ?? {};
const fullUrl: string = md?.url ?? item?.url ?? "";
const previewUrl: string = previewVariant?.url ?? fullUrl;
const width: number | undefined =
typeof md?.width === "number" ? md.width : undefined;
const height: number | undefined =
typeof md?.height === "number" ? md.height : undefined;
return {
id: String(item?.slug ?? item?.id ?? `${idx}`),
title: String(item?.title ?? ""),
url: fullUrl,
previewUrl,
width,
height,
};
})
.filter((r) => !!r.url);
}
export const search = action({
args: {
q: v.string(),
limit: v.optional(v.number()),
},
returns: v.any(),
handler: async (_ctx, args) => {
const apiKey = process.env.TENOR_API_KEY;
handler: async (_ctx, args): Promise<GifSearchResponse> => {
const apiKey = process.env.KLIPY_API_KEY || process.env.TENOR_API_KEY;
if (!apiKey) {
console.warn("TENOR_API_KEY missing");
console.warn("KLIPY_API_KEY missing");
return { results: [] };
}
const limit = args.limit || 8;
const url = `https://tenor.googleapis.com/v2/search?q=${encodeURIComponent(args.q)}&key=${apiKey}&limit=${limit}`;
const limit = Math.min(Math.max(args.limit ?? 24, 1), 50);
const query = args.q.trim().toLowerCase();
const cacheKey = `search:${query}:${limit}`;
const cached = cacheGet<GifSearchResponse>(cacheKey);
if (cached) return cached;
// Klipy customer id goes in the path; per-page caps the result
// set without a separate `limit` query param.
const url = `https://api.klipy.com/api/v1/${encodeURIComponent(apiKey)}/gifs/search?q=${encodeURIComponent(args.q)}&per_page=${limit}&page=1&locale=en`;
let response: Response;
try {
response = await fetch(url, {
headers: {
Accept: "application/json",
"User-Agent":
"Brycord/1.0 (+https://brycord.com) Klipy-Client",
},
});
} catch (err) {
console.error("Klipy fetch error:", err);
return { results: [] };
}
const response = await fetch(url);
if (!response.ok) {
console.error("Tenor API Error:", response.statusText);
console.error("Klipy API error:", response.status, response.statusText);
return { results: [] };
}
return await response.json();
const json = (await response.json()) as any;
const items: any[] = Array.isArray(json?.data?.data) ? json.data.data : [];
const results = normalizeGifItems(items);
const payload: GifSearchResponse = { results };
cacheSet(cacheKey, payload, SEARCH_TTL_MS);
return payload;
},
});
// Get GIF categories
/**
* Trending GIFs — used for the picker's home feed when no query is
* typed. Returns the same normalized shape as `search` so callers
* can use a single render path for both.
*/
export const trending = action({
args: {
limit: v.optional(v.number()),
},
returns: v.any(),
handler: async (_ctx, args): Promise<GifSearchResponse> => {
const apiKey = process.env.KLIPY_API_KEY || process.env.TENOR_API_KEY;
if (!apiKey) return { results: [] };
const limit = Math.min(Math.max(args.limit ?? 24, 1), 50);
const cacheKey = `trending:${limit}`;
const cached = cacheGet<GifSearchResponse>(cacheKey);
if (cached) return cached;
const url = `https://api.klipy.com/api/v1/${encodeURIComponent(apiKey)}/gifs/trending?per_page=${limit}&page=1&locale=en`;
let response: Response;
try {
response = await fetch(url, {
headers: {
Accept: "application/json",
"User-Agent": "Brycord/1.0 (+https://brycord.com) Klipy-Client",
},
});
} catch (err) {
console.error("Klipy trending fetch error:", err);
return { results: [] };
}
if (!response.ok) {
console.error(
"Klipy trending API error:",
response.status,
response.statusText,
);
return { results: [] };
}
const json = (await response.json()) as any;
const items: any[] = Array.isArray(json?.data?.data) ? json.data.data : [];
const results = normalizeGifItems(items);
const payload: GifSearchResponse = { results };
cacheSet(cacheKey, payload, TRENDING_TTL_MS);
return payload;
},
});
/**
* Trending categories — Klipy exposes `/categories` returning a list
* of slugs the picker can show as quick-search chips. Normalized
* into `{categories: [{name, image, query}]}` so the consumer
* doesn't depend on the upstream shape.
*/
interface NormalizedCategory {
name: string;
image: string;
query: string;
}
export const categories = action({
args: {},
returns: v.any(),
handler: async () => {
// Return static categories (same as the JSON file in backend)
// These are loaded from the frontend data file
return { categories: [] };
handler: async (): Promise<{ categories: NormalizedCategory[] }> => {
const apiKey = process.env.KLIPY_API_KEY || process.env.TENOR_API_KEY;
if (!apiKey) {
return { categories: [] };
}
const cacheKey = `categories`;
const cached = cacheGet<{ categories: NormalizedCategory[] }>(cacheKey);
if (cached) return cached;
const url = `https://api.klipy.com/api/v1/${encodeURIComponent(apiKey)}/gifs/categories?locale=en`;
let response: Response;
try {
response = await fetch(url, {
headers: {
Accept: "application/json",
"User-Agent":
"Brycord/1.0 (+https://brycord.com) Klipy-Client",
},
});
} catch (err) {
console.error("Klipy categories fetch error:", err);
return { categories: [] };
}
if (!response.ok) {
console.error(
"Klipy categories API error:",
response.status,
response.statusText,
);
return { categories: [] };
}
const json = (await response.json()) as any;
const items: any[] = Array.isArray(json?.data?.data)
? json.data.data
: Array.isArray(json?.data)
? json.data
: [];
const categories: NormalizedCategory[] = items
.map((item) => ({
name: String(item?.name ?? item?.title ?? ""),
image: String(item?.image ?? item?.preview ?? ""),
query: String(item?.query ?? item?.search_term ?? item?.name ?? ""),
}))
.filter((c) => !!c.query);
const payload = { categories };
cacheSet(cacheKey, payload, CATEGORIES_TTL_MS);
return payload;
},
});

116
convex/links.ts Normal file
View File

@@ -0,0 +1,116 @@
"use node";
import { action } from "./_generated/server";
import { v } from "convex/values";
export const fetchPreview = action({
args: { url: v.string() },
returns: v.union(
v.object({
url: v.string(),
title: v.optional(v.string()),
description: v.optional(v.string()),
image: v.optional(v.string()),
siteName: v.optional(v.string()),
}),
v.null(),
),
handler: async (_ctx, args) => {
try {
// Validate URL + prevent loopback SSRF
const u = new URL(args.url);
if (u.protocol !== "http:" && u.protocol !== "https:") return null;
const controller = new AbortController();
const timeout = setTimeout(() => controller.abort(), 8000);
const res = await fetch(u.toString(), {
method: "GET",
headers: {
// Discordbot User-Agent — a lot of sites (YouTube included)
// only emit og: metadata when they recognise a known crawler,
// and the generic Brycord UA gets routed to consent / interstitial
// pages that never include the tags we're after.
"User-Agent":
"Mozilla/5.0 (compatible; Discordbot/2.0; +https://discordapp.com)",
Accept:
"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
"Accept-Language": "en-US,en;q=0.9",
},
signal: controller.signal,
redirect: "follow",
});
clearTimeout(timeout);
if (!res.ok) return null;
const contentType = res.headers.get("content-type") || "";
if (!contentType.includes("text/html")) return null;
// Read up to 512 KB so giant pages don't DOS the action
const reader = res.body?.getReader();
if (!reader) return null;
const chunks: Uint8Array[] = [];
let total = 0;
const MAX = 512 * 1024;
while (total < MAX) {
const { value, done } = await reader.read();
if (done) break;
if (value) {
chunks.push(value);
total += value.length;
}
}
try { await reader.cancel(); } catch {}
const merged = new Uint8Array(total);
let offset = 0;
for (const c of chunks) {
merged.set(c, offset);
offset += c.length;
}
const html = new TextDecoder("utf-8").decode(merged);
// Parse OG / twitter / <title> tags with regex — no DOM in Node
const pick = (re: RegExp): string | undefined => {
const m = html.match(re);
return m ? decodeEntities(m[1].trim()) : undefined;
};
const title =
pick(/<meta[^>]+property=["']og:title["'][^>]+content=["']([^"']+)["']/i) ??
pick(/<meta[^>]+name=["']twitter:title["'][^>]+content=["']([^"']+)["']/i) ??
pick(/<title[^>]*>([^<]+)<\/title>/i);
const description =
pick(/<meta[^>]+property=["']og:description["'][^>]+content=["']([^"']+)["']/i) ??
pick(/<meta[^>]+name=["']twitter:description["'][^>]+content=["']([^"']+)["']/i) ??
pick(/<meta[^>]+name=["']description["'][^>]+content=["']([^"']+)["']/i);
let image =
pick(/<meta[^>]+property=["']og:image(?::secure_url)?["'][^>]+content=["']([^"']+)["']/i) ??
pick(/<meta[^>]+name=["']twitter:image(?::src)?["'][^>]+content=["']([^"']+)["']/i);
const siteName =
pick(/<meta[^>]+property=["']og:site_name["'][^>]+content=["']([^"']+)["']/i);
// Resolve relative image URLs
if (image) {
try {
image = new URL(image, u).toString();
} catch {}
}
if (!title && !description && !image) return null;
return { url: u.toString(), title, description, image, siteName };
} catch {
return null;
}
},
});
function decodeEntities(s: string): string {
return s
.replace(/&amp;/g, "&")
.replace(/&lt;/g, "<")
.replace(/&gt;/g, ">")
.replace(/&quot;/g, '"')
.replace(/&#39;/g, "'")
.replace(/&nbsp;/g, " ")
.replace(/&#(\d+);/g, (_, n) => String.fromCodePoint(Number(n)));
}

View File

@@ -4,6 +4,8 @@ import { v } from "convex/values";
import { getPublicStorageUrl } from "./storageUrl";
import { getRolesForUser } from "./roles";
const DEFAULT_ROLE_COLOR = "#99aab5";
async function enrichMessage(ctx: any, msg: any, userId?: any) {
const sender = await ctx.db.get(msg.senderId);
@@ -12,19 +14,112 @@ async function enrichMessage(ctx: any, msg: any, userId?: any) {
avatarUrl = await getPublicStorageUrl(ctx, sender.avatarStorageId);
}
// Highest-position role with a non-default colour — mirrors how
// Discord tints usernames in chat. The Owner role is deliberately
// skipped so owners fall through to the next non-default role's
// colour (per product decision: we don't want every owner's name
// to glow in the bootstrap pink). Default grey (`#99aab5`) is
// treated as "no colour" so regular users fall back to
// `--text-primary`.
let senderRoleColor: string | null = null;
try {
const senderRoleDocs = await ctx.db
.query("userRoles")
.withIndex("by_user", (q: any) => q.eq("userId", msg.senderId))
.collect();
let best: { position: number; color: string } | null = null;
for (const ur of senderRoleDocs) {
const role = await ctx.db.get(ur.roleId);
if (!role) continue;
if ((role as any).name === "Owner") continue;
const color = (role as any).color as string | undefined;
const position = (role as any).position ?? 0;
if (!color || color.toLowerCase() === DEFAULT_ROLE_COLOR) continue;
if (!best || position > best.position) {
best = { position, color };
}
}
senderRoleColor = best?.color ?? null;
} catch {
senderRoleColor = null;
}
const reactionDocs = await ctx.db
.query("messageReactions")
.withIndex("by_message", (q: any) => q.eq("messageId", msg._id))
.collect();
const reactions: Record<string, { count: number; me: boolean }> = {};
// Accumulate into a Map so we don't use emoji surrogates as object
// field names — Convex's return-value validator rejects non-ASCII
// field names, which is what caused "Field name 👍 has invalid
// character" errors on any channel with a unicode reaction.
//
// For each emoji we also collect a capped list of reactor profiles
// (up to MAX_REACTION_USERS) so the client can render the hover
// tooltip + the full reactions modal without a second round-trip.
// Reactor profiles are cached per-message so the same user picked
// for multiple emojis only costs one db lookup.
const MAX_REACTION_USERS = 100;
const profileCache = new Map<
string,
{ userId: string; username: string; displayName: string | null }
>();
const resolveProfile = async (reactorUserId: any) => {
const key = String(reactorUserId);
const cached = profileCache.get(key);
if (cached) return cached;
const profile = await ctx.db.get(reactorUserId);
const shaped = {
userId: key,
username: profile?.username || "Unknown",
displayName: profile?.displayName || null,
};
profileCache.set(key, shaped);
return shaped;
};
interface ReactionAccumulator {
count: number;
me: boolean;
users: Array<{
userId: string;
username: string;
displayName: string | null;
}>;
}
const reactionMap = new Map<string, ReactionAccumulator>();
for (const r of reactionDocs) {
const entry = (reactions[r.emoji] ??= { count: 0, me: false });
let entry = reactionMap.get(r.emoji);
if (!entry) {
entry = { count: 0, me: false, users: [] };
reactionMap.set(r.emoji, entry);
}
entry.count++;
if (userId && r.userId === userId) {
entry.me = true;
}
if (entry.users.length < MAX_REACTION_USERS) {
entry.users.push(await resolveProfile(r.userId));
}
}
const reactions: Array<{
emoji: string;
count: number;
me: boolean;
users: Array<{
userId: string;
username: string;
displayName: string | null;
}>;
}> = [];
reactionMap.forEach((info, emoji) => {
reactions.push({
emoji,
count: info.count,
me: info.me,
users: info.users,
});
});
let replyToUsername: string | null = null;
let replyToDisplayName: string | null = null;
@@ -58,7 +153,8 @@ async function enrichMessage(ctx: any, msg: any, userId?: any) {
displayName: sender?.displayName || null,
public_signing_key: sender?.publicSigningKey || "",
avatarUrl,
reactions: Object.keys(reactions).length > 0 ? reactions : null,
senderRoleColor,
reactions: reactions.length > 0 ? reactions : null,
replyToId: msg.replyTo || null,
replyToUsername,
replyToDisplayName,
@@ -92,6 +188,43 @@ export const list = query({
},
});
/**
* Pull the latest N messages from each of the given channel IDs in a
* single round-trip. Used by SearchPanel to scan across every channel
* the user can read without spinning up N independent useQuery hooks
* (React would error on hook-count drift between renders).
*
* `perChannelLimit` is clamped to 200 server-side to keep payload
* bounded even if the client over-asks.
*/
export const searchScan = query({
args: {
channelIds: v.array(v.id("channels")),
perChannelLimit: v.optional(v.number()),
userId: v.optional(v.id("userProfiles")),
},
returns: v.any(),
handler: async (ctx, args) => {
const limit = Math.min(Math.max(args.perChannelLimit ?? 100, 1), 200);
const out: Array<{
channelId: string;
messages: any[];
}> = [];
for (const channelId of args.channelIds) {
const rows = await ctx.db
.query("messages")
.withIndex("by_channel", (q) => q.eq("channelId", channelId))
.order("desc")
.take(limit);
const enriched = await Promise.all(
rows.map((msg) => enrichMessage(ctx, msg, args.userId)),
);
out.push({ channelId, messages: enriched });
}
return out;
},
});
export const send = mutation({
args: {
channelId: v.id("channels"),

358
convex/polls.ts Normal file
View File

@@ -0,0 +1,358 @@
import { mutation, query } from "./_generated/server";
import { v } from "convex/values";
const pollOptionValidator = v.object({
id: v.string(),
text: v.string(),
});
const pollDocValidator = v.object({
_id: v.id("polls"),
_creationTime: v.number(),
channelId: v.id("channels"),
createdBy: v.id("userProfiles"),
question: v.string(),
options: v.array(pollOptionValidator),
allowMultiple: v.boolean(),
disclosed: v.boolean(),
closed: v.boolean(),
closesAt: v.optional(v.number()),
createdAt: v.number(),
});
const pollReactionValidator = v.object({
emoji: v.string(),
count: v.number(),
me: v.boolean(),
});
const pollResultsValidator = v.object({
poll: pollDocValidator,
totals: v.record(v.string(), v.number()),
totalVotes: v.number(),
myVote: v.union(v.array(v.string()), v.null()),
// Aggregated as an array — arrays keep emoji out of object field
// names (Convex's return-value validator rejects non-ASCII fields,
// same issue we hit on messages.list).
reactions: v.array(pollReactionValidator),
});
export const create = mutation({
args: {
channelId: v.id("channels"),
createdBy: v.id("userProfiles"),
question: v.string(),
options: v.array(pollOptionValidator),
allowMultiple: v.boolean(),
disclosed: v.boolean(),
closesAt: v.optional(v.number()),
},
returns: v.id("polls"),
handler: async (ctx, args) => {
const question = args.question.trim();
if (question.length === 0) {
throw new Error("Poll question cannot be empty");
}
if (question.length > 500) {
throw new Error("Poll question is too long");
}
const cleanOptions = args.options
.map((o) => ({ id: o.id, text: o.text.trim() }))
.filter((o) => o.text.length > 0);
if (cleanOptions.length < 2) {
throw new Error("Polls need at least 2 options");
}
if (cleanOptions.length > 20) {
throw new Error("Polls support at most 20 options");
}
// Enforce unique option ids so vote diffing is unambiguous.
const seen = new Set<string>();
for (const opt of cleanOptions) {
if (seen.has(opt.id)) {
throw new Error("Duplicate option id");
}
seen.add(opt.id);
}
const pollId = await ctx.db.insert("polls", {
channelId: args.channelId,
createdBy: args.createdBy,
question,
options: cleanOptions,
allowMultiple: args.allowMultiple,
disclosed: args.disclosed,
closed: false,
closesAt: args.closesAt,
createdAt: Date.now(),
});
return pollId;
},
});
export const vote = mutation({
args: {
pollId: v.id("polls"),
userId: v.id("userProfiles"),
optionIds: v.array(v.string()),
},
returns: v.null(),
handler: async (ctx, args) => {
const poll = await ctx.db.get(args.pollId);
if (!poll) throw new Error("Poll not found");
if (poll.closed) throw new Error("Poll is closed");
if (poll.closesAt && poll.closesAt < Date.now()) {
throw new Error("Poll has expired");
}
// Validate the submitted option ids exist on the poll.
const validIds = new Set(poll.options.map((o) => o.id));
for (const id of args.optionIds) {
if (!validIds.has(id)) {
throw new Error(`Unknown option id "${id}"`);
}
}
if (args.optionIds.length === 0) {
throw new Error("Select at least one option");
}
if (!poll.allowMultiple && args.optionIds.length > 1) {
throw new Error("This poll only allows one answer");
}
// Upsert: one row per (pollId, userId).
const existing = await ctx.db
.query("pollVotes")
.withIndex("by_poll_and_user", (q) =>
q.eq("pollId", args.pollId).eq("userId", args.userId),
)
.unique();
if (existing) {
await ctx.db.patch(existing._id, {
optionIds: args.optionIds,
votedAt: Date.now(),
});
} else {
await ctx.db.insert("pollVotes", {
pollId: args.pollId,
userId: args.userId,
optionIds: args.optionIds,
votedAt: Date.now(),
});
}
return null;
},
});
export const clearVote = mutation({
args: {
pollId: v.id("polls"),
userId: v.id("userProfiles"),
},
returns: v.null(),
handler: async (ctx, args) => {
const existing = await ctx.db
.query("pollVotes")
.withIndex("by_poll_and_user", (q) =>
q.eq("pollId", args.pollId).eq("userId", args.userId),
)
.unique();
if (existing) {
await ctx.db.delete(existing._id);
}
return null;
},
});
export const close = mutation({
args: {
pollId: v.id("polls"),
userId: v.id("userProfiles"),
},
returns: v.null(),
handler: async (ctx, args) => {
const poll = await ctx.db.get(args.pollId);
if (!poll) throw new Error("Poll not found");
if (poll.createdBy !== args.userId) {
throw new Error("Only the poll creator can close it");
}
if (poll.closed) return null;
await ctx.db.patch(args.pollId, { closed: true });
return null;
},
});
export const remove = mutation({
args: {
pollId: v.id("polls"),
userId: v.id("userProfiles"),
},
returns: v.null(),
handler: async (ctx, args) => {
const poll = await ctx.db.get(args.pollId);
if (!poll) return null;
if (poll.createdBy !== args.userId) {
throw new Error("Only the poll creator can delete it");
}
const votes = await ctx.db
.query("pollVotes")
.withIndex("by_poll", (q) => q.eq("pollId", args.pollId))
.collect();
await Promise.all(votes.map((v) => ctx.db.delete(v._id)));
const reactions = await ctx.db
.query("pollReactions")
.withIndex("by_poll", (q) => q.eq("pollId", args.pollId))
.collect();
await Promise.all(reactions.map((r) => ctx.db.delete(r._id)));
await ctx.db.delete(args.pollId);
return null;
},
});
export const listByChannel = query({
args: {
channelId: v.id("channels"),
},
returns: v.array(pollDocValidator),
handler: async (ctx, args) => {
const polls = await ctx.db
.query("polls")
.withIndex("by_channel", (q) => q.eq("channelId", args.channelId))
.order("desc")
.collect();
return polls;
},
});
export const get = query({
args: {
pollId: v.id("polls"),
userId: v.optional(v.id("userProfiles")),
},
returns: v.union(pollResultsValidator, v.null()),
handler: async (ctx, args) => {
const poll = await ctx.db.get(args.pollId);
if (!poll) return null;
const votes = await ctx.db
.query("pollVotes")
.withIndex("by_poll", (q) => q.eq("pollId", args.pollId))
.collect();
// Tally totals per option. Voters that picked multiple options
// each contribute a +1 to every option they picked.
const totals: Record<string, number> = {};
for (const opt of poll.options) {
totals[opt.id] = 0;
}
for (const vote of votes) {
for (const id of vote.optionIds) {
if (totals[id] !== undefined) {
totals[id] += 1;
}
}
}
let myVote: string[] | null = null;
if (args.userId) {
const mine = votes.find((v) => v.userId === args.userId);
myVote = mine ? mine.optionIds : null;
}
// Aggregate reactions into an array of {emoji, count, me} rows.
// Using a Map avoids putting unicode surrogates into object field
// names, which Convex's return-value validator would reject.
const reactionDocs = await ctx.db
.query("pollReactions")
.withIndex("by_poll", (q) => q.eq("pollId", args.pollId))
.collect();
const reactionMap = new Map<string, { count: number; me: boolean }>();
for (const r of reactionDocs) {
let entry = reactionMap.get(r.emoji);
if (!entry) {
entry = { count: 0, me: false };
reactionMap.set(r.emoji, entry);
}
entry.count++;
if (args.userId && r.userId === args.userId) {
entry.me = true;
}
}
const reactions: Array<{ emoji: string; count: number; me: boolean }> = [];
reactionMap.forEach((info, emoji) => {
reactions.push({ emoji, count: info.count, me: info.me });
});
return {
poll,
totals,
totalVotes: votes.length,
myVote,
reactions,
};
},
});
/**
* Toggle-add a reaction on a poll. Idempotent per (pollId, userId,
* emoji) — re-adding the same reaction is a no-op.
*/
export const addReaction = mutation({
args: {
pollId: v.id("polls"),
userId: v.id("userProfiles"),
emoji: v.string(),
},
returns: v.null(),
handler: async (ctx, args) => {
const existing = await ctx.db
.query("pollReactions")
.withIndex("by_poll_user_emoji", (q) =>
q
.eq("pollId", args.pollId)
.eq("userId", args.userId)
.eq("emoji", args.emoji),
)
.unique();
if (!existing) {
await ctx.db.insert("pollReactions", {
pollId: args.pollId,
userId: args.userId,
emoji: args.emoji,
});
}
return null;
},
});
/**
* Remove a reaction on a poll. No-op if the user hasn't reacted
* with that emoji.
*/
export const removeReaction = mutation({
args: {
pollId: v.id("polls"),
userId: v.id("userProfiles"),
emoji: v.string(),
},
returns: v.null(),
handler: async (ctx, args) => {
const existing = await ctx.db
.query("pollReactions")
.withIndex("by_poll_user_emoji", (q) =>
q
.eq("pollId", args.pollId)
.eq("userId", args.userId)
.eq("emoji", args.emoji),
)
.unique();
if (existing) {
await ctx.db.delete(existing._id);
}
return null;
},
});

View File

@@ -77,6 +77,12 @@ export const update = mutation({
handler: async (ctx, args) => {
const role = await ctx.db.get(args.id);
if (!role) throw new Error("Role not found");
// Owner is frozen — we can't let a client rename/recolour it
// or strip its permissions, since that would defeat the
// assign/unassign guards in one shot.
if (role.name === "Owner") {
throw new Error("The Owner role can't be edited.");
}
const { id, ...fields } = args;
const updates: Record<string, unknown> = {};
@@ -92,6 +98,33 @@ export const update = mutation({
},
});
/**
* Batch-reorder roles by passing a list of `{id, position}` pairs.
* Used by the Roles & Permissions settings surface after a drag-
* drop drop. Owner and @everyone are refused so their positions
* stay pinned at the natural extremes of the list.
*/
export const reorder = mutation({
args: {
updates: v.array(
v.object({
id: v.id("roles"),
position: v.number(),
}),
),
},
returns: v.null(),
handler: async (ctx, args) => {
for (const u of args.updates) {
const role = await ctx.db.get(u.id);
if (!role) continue;
if (role.name === "Owner" || role.name === "@everyone") continue;
await ctx.db.patch(u.id, { position: u.position });
}
return null;
},
});
// Delete role
export const remove = mutation({
args: { id: v.id("roles") },
@@ -99,6 +132,12 @@ export const remove = mutation({
handler: async (ctx, args) => {
const role = await ctx.db.get(args.id);
if (!role) throw new Error("Role not found");
if (role.name === "Owner") {
throw new Error("The Owner role can't be deleted.");
}
if (role.name === "@everyone") {
throw new Error("The @everyone role can't be deleted.");
}
const assignments = await ctx.db
.query("userRoles")
@@ -139,6 +178,16 @@ export const assign = mutation({
},
returns: v.object({ success: v.boolean() }),
handler: async (ctx, args) => {
// Owner is immutable — it's granted once during first-user
// bootstrap (convex/auth.ts) and the app never reassigns it. The
// UI already hides it from the ManageRoles checkbox list, but we
// also reject it server-side so a crafted client can't sneak it
// onto another user.
const role = await ctx.db.get(args.roleId);
if (role?.name === "Owner") {
throw new Error("The Owner role can't be assigned.");
}
const existing = await ctx.db
.query("userRoles")
.withIndex("by_user_and_role", (q) =>
@@ -165,6 +214,13 @@ export const unassign = mutation({
},
returns: v.object({ success: v.boolean() }),
handler: async (ctx, args) => {
// Owner is immutable — see `assign` above. Removing it would
// leave the server without a permission-bearing admin.
const role = await ctx.db.get(args.roleId);
if (role?.name === "Owner") {
throw new Error("The Owner role can't be removed.");
}
const existing = await ctx.db
.query("userRoles")
.withIndex("by_user_and_role", (q) =>

113
convex/savedMedia.ts Normal file
View File

@@ -0,0 +1,113 @@
import { mutation, query } from "./_generated/server";
import { v } from "convex/values";
const savedMediaValidator = v.object({
_id: v.id("savedMedia"),
_creationTime: v.number(),
userId: v.id("userProfiles"),
url: v.string(),
kind: v.string(),
filename: v.string(),
mimeType: v.optional(v.string()),
width: v.optional(v.number()),
height: v.optional(v.number()),
size: v.optional(v.number()),
encryptionKey: v.string(),
encryptionIv: v.string(),
savedAt: v.number(),
});
/**
* Save (favorite) an attachment to the user's media library. Idempotent
* per (userId, url) — re-saving the same media just updates the
* existing row's filename / metadata in case it changed.
*/
export const save = mutation({
args: {
userId: v.id("userProfiles"),
url: v.string(),
kind: v.string(),
filename: v.string(),
mimeType: v.optional(v.string()),
width: v.optional(v.number()),
height: v.optional(v.number()),
size: v.optional(v.number()),
encryptionKey: v.string(),
encryptionIv: v.string(),
},
returns: v.null(),
handler: async (ctx, args) => {
const existing = await ctx.db
.query("savedMedia")
.withIndex("by_user_and_url", (q) =>
q.eq("userId", args.userId).eq("url", args.url),
)
.unique();
if (existing) {
await ctx.db.patch(existing._id, {
kind: args.kind,
filename: args.filename,
mimeType: args.mimeType,
width: args.width,
height: args.height,
size: args.size,
encryptionKey: args.encryptionKey,
encryptionIv: args.encryptionIv,
});
return null;
}
await ctx.db.insert("savedMedia", {
userId: args.userId,
url: args.url,
kind: args.kind,
filename: args.filename,
mimeType: args.mimeType,
width: args.width,
height: args.height,
size: args.size,
encryptionKey: args.encryptionKey,
encryptionIv: args.encryptionIv,
savedAt: Date.now(),
});
return null;
},
});
/**
* Remove a saved-media entry by (userId, url). No-op if not present.
*/
export const remove = mutation({
args: {
userId: v.id("userProfiles"),
url: v.string(),
},
returns: v.null(),
handler: async (ctx, args) => {
const existing = await ctx.db
.query("savedMedia")
.withIndex("by_user_and_url", (q) =>
q.eq("userId", args.userId).eq("url", args.url),
)
.unique();
if (existing) await ctx.db.delete(existing._id);
return null;
},
});
/**
* List the user's saved media in reverse-chron order (newest first).
*/
export const list = query({
args: {
userId: v.id("userProfiles"),
},
returns: v.array(savedMediaValidator),
handler: async (ctx, args) => {
const items = await ctx.db
.query("savedMedia")
.withIndex("by_user", (q) => q.eq("userId", args.userId))
.order("desc")
.collect();
return items;
},
});

View File

@@ -17,6 +17,7 @@ export default defineSchema({
aboutMe: v.optional(v.string()),
customStatus: v.optional(v.string()),
joinSoundStorageId: v.optional(v.id("_storage")),
accentColor: v.optional(v.string()),
}).index("by_username", ["username"]),
categories: defineTable({
@@ -142,8 +143,71 @@ export default defineSchema({
name: v.string(),
storageId: v.id("_storage"),
uploadedBy: v.id("userProfiles"),
// `true` for animated (GIF / APNG) uploads so the settings UI
// can split Static vs Animated in separate sections. Optional
// so existing rows without the flag still validate — they
// surface as static in the UI by default.
animated: v.optional(v.boolean()),
createdAt: v.number(),
}).index("by_name", ["name"])
.index("by_uploader", ["uploadedBy"]),
polls: defineTable({
channelId: v.id("channels"),
createdBy: v.id("userProfiles"),
question: v.string(),
options: v.array(
v.object({
id: v.string(),
text: v.string(),
}),
),
allowMultiple: v.boolean(),
disclosed: v.boolean(),
closed: v.boolean(),
closesAt: v.optional(v.number()),
createdAt: v.number(),
})
.index("by_channel", ["channelId"])
.index("by_creator", ["createdBy"]),
pollVotes: defineTable({
pollId: v.id("polls"),
userId: v.id("userProfiles"),
optionIds: v.array(v.string()),
votedAt: v.number(),
})
.index("by_poll", ["pollId"])
.index("by_poll_and_user", ["pollId", "userId"]),
pollReactions: defineTable({
pollId: v.id("polls"),
userId: v.id("userProfiles"),
emoji: v.string(),
})
.index("by_poll", ["pollId"])
.index("by_poll_user_emoji", ["pollId", "userId", "emoji"])
.index("by_user", ["userId"]),
savedMedia: defineTable({
userId: v.id("userProfiles"),
// Convex storage URL — also the dedupe key for a single user.
url: v.string(),
kind: v.string(), // 'image' | 'video' | 'audio'
filename: v.string(),
mimeType: v.optional(v.string()),
width: v.optional(v.number()),
height: v.optional(v.number()),
size: v.optional(v.number()),
// Re-post path: keep the per-file AES key + iv so the same
// attachment metadata can be embedded in a future message
// without re-uploading. The file stays encrypted in storage —
// saving just bookmarks the metadata.
encryptionKey: v.string(),
encryptionIv: v.string(),
savedAt: v.number(),
})
.index("by_user", ["userId"])
.index("by_user_and_url", ["userId", "url"]),
});

View File

@@ -2,9 +2,24 @@
import { action } from "./_generated/server";
import { v } from "convex/values";
import { AccessToken } from "livekit-server-sdk";
import { AccessToken, RoomServiceClient } from "livekit-server-sdk";
// Generate LiveKit token for voice channel
/**
* Generate a LiveKit join token for a voice channel.
*
* LiveKit servers run with `room.auto_create: false` reject joins for
* rooms that don't already exist — the client gets a 404 "requested
* room does not exist" back from the /rtc/v1/validate endpoint. To
* make this deployment-agnostic, we pre-create the room via the
* LiveKit Server SDK before minting the token. When auto-create is
* enabled the `createRoom` call is idempotent (409 Conflict is
* swallowed silently), so the same code path works on both
* configurations.
*
* Requires `LIVEKIT_URL` (or the frontend's `VITE_LIVEKIT_URL` as a
* fallback) in the Convex environment so the RoomServiceClient
* can talk to the LiveKit API.
*/
export const getToken = action({
args: {
channelId: v.string(),
@@ -15,6 +30,46 @@ export const getToken = action({
handler: async (_ctx, args) => {
const apiKey = process.env.LIVEKIT_API_KEY || "devkey";
const apiSecret = process.env.LIVEKIT_API_SECRET || "secret";
const livekitUrl =
process.env.LIVEKIT_URL || process.env.VITE_LIVEKIT_URL || "";
// Ensure the room exists. The LiveKit API accepts `http(s)` URLs
// for the management endpoint, but the frontend connect URL is a
// `wss://` — swap the scheme when needed.
if (livekitUrl) {
const httpUrl = livekitUrl
.replace(/^wss:\/\//i, "https://")
.replace(/^ws:\/\//i, "http://");
try {
const roomService = new RoomServiceClient(httpUrl, apiKey, apiSecret);
await roomService.createRoom({
name: args.channelId,
// Empty rooms auto-destroy after 5 minutes with no participants,
// matching LiveKit's own default so stale rooms from a crashed
// client don't pile up forever.
emptyTimeout: 5 * 60,
// 50 participants is plenty for a voice channel in this
// single-server deployment and keeps any runaway join loop
// from hitting the global limit.
maxParticipants: 50,
});
} catch (err: any) {
// 409 / "already exists" is expected when a room has already
// been created by an earlier join — swallow it and continue.
const message = String(err?.message ?? err ?? "");
const status = err?.status ?? err?.statusCode;
const alreadyExists =
status === 409 ||
/already exists/i.test(message) ||
/AlreadyExists/i.test(message);
if (!alreadyExists) {
// Non-fatal: log and fall through to token generation. If the
// real issue was misconfiguration the client will surface the
// 404 it already does.
console.warn("LiveKit createRoom failed:", message);
}
}
}
const at = new AccessToken(apiKey, apiSecret, {
identity: args.userId,