Mega push vol 5, working on messaging now

This commit is contained in:
AlacrisDevs
2026-02-07 01:31:55 +02:00
parent d8bbfd9dc3
commit e55881b38b
77 changed files with 8478 additions and 1554 deletions

38
src/lib/api/activity.ts Normal file
View File

@@ -0,0 +1,38 @@
import type { SupabaseClient } from '@supabase/supabase-js';
import type { Database, Json } from '$lib/supabase/types';
import { createLogger } from '$lib/utils/logger';
const log = createLogger('api.activity');
export type ActivityAction = 'create' | 'update' | 'delete' | 'move' | 'rename';
export type EntityType = 'document' | 'folder' | 'kanban_board' | 'kanban_card' | 'kanban_column' | 'member' | 'role' | 'invite';
interface LogActivityParams {
orgId: string;
userId: string;
action: ActivityAction;
entityType: EntityType;
entityId?: string;
entityName?: string;
metadata?: Record<string, unknown>;
}
export async function logActivity(
supabase: SupabaseClient<Database>,
params: LogActivityParams
): Promise<void> {
const { error } = await supabase.from('activity_log').insert({
org_id: params.orgId,
user_id: params.userId,
action: params.action,
entity_type: params.entityType,
entity_id: params.entityId ?? null,
entity_name: params.entityName ?? null,
metadata: (params.metadata ?? {}) as Json,
});
if (error) {
// Activity logging should never block the main action — just warn
log.warn('Failed to log activity', { error: { message: error.message } });
}
}

View File

@@ -0,0 +1,90 @@
import { describe, it, expect } from 'vitest';
import { getMonthDays, isSameDay, formatTime } from './calendar';
describe('getMonthDays', () => {
it('returns exactly 42 days (6 weeks grid)', () => {
const days = getMonthDays(2024, 0); // January 2024
expect(days).toHaveLength(42);
});
it('first day of grid is a Monday', () => {
const days = getMonthDays(2024, 0); // January 2024
// getDay() returns 0=Sun, 1=Mon
expect(days[0].getDay()).toBe(1);
});
it('contains all days of the target month', () => {
const days = getMonthDays(2024, 1); // February 2024 (leap year, 29 days)
const febDays = days.filter(
(d) => d.getMonth() === 1 && d.getFullYear() === 2024,
);
expect(febDays).toHaveLength(29);
});
it('contains all days of a 31-day month', () => {
const days = getMonthDays(2024, 2); // March 2024
const marchDays = days.filter(
(d) => d.getMonth() === 2 && d.getFullYear() === 2024,
);
expect(marchDays).toHaveLength(31);
});
it('pads with previous month days at the start', () => {
// January 2024 starts on Monday, so no padding needed from December
const days = getMonthDays(2024, 0);
expect(days[0].getDate()).toBe(1);
expect(days[0].getMonth()).toBe(0);
});
it('pads with next month days at the end', () => {
const days = getMonthDays(2024, 0); // January 2024
const lastDay = days[days.length - 1];
// Last day should be in February
expect(lastDay.getMonth()).toBe(1);
});
it('handles December correctly (month 11)', () => {
const days = getMonthDays(2024, 11);
expect(days).toHaveLength(42);
const decDays = days.filter(
(d) => d.getMonth() === 11 && d.getFullYear() === 2024,
);
expect(decDays).toHaveLength(31);
});
});
describe('isSameDay', () => {
it('returns true for same date', () => {
const a = new Date(2024, 5, 15, 10, 30);
const b = new Date(2024, 5, 15, 22, 0);
expect(isSameDay(a, b)).toBe(true);
});
it('returns false for different days', () => {
const a = new Date(2024, 5, 15);
const b = new Date(2024, 5, 16);
expect(isSameDay(a, b)).toBe(false);
});
it('returns false for different months', () => {
const a = new Date(2024, 5, 15);
const b = new Date(2024, 6, 15);
expect(isSameDay(a, b)).toBe(false);
});
it('returns false for different years', () => {
const a = new Date(2024, 5, 15);
const b = new Date(2025, 5, 15);
expect(isSameDay(a, b)).toBe(false);
});
});
describe('formatTime', () => {
it('returns a string with hours and minutes', () => {
const date = new Date(2024, 0, 1, 14, 30);
const result = formatTime(date);
// Format varies by locale, but should contain "30" for minutes
expect(result).toContain('30');
expect(result.length).toBeGreaterThan(0);
});
});

View File

@@ -27,14 +27,7 @@ export async function getLockInfo(
const { data: lock } = await supabase
.from('document_locks')
.select(`
id,
document_id,
user_id,
locked_at,
last_heartbeat,
profiles:user_id (full_name, email)
`)
.select('id, document_id, user_id, locked_at, last_heartbeat')
.eq('document_id', documentId)
.gt('last_heartbeat', cutoff)
.single();
@@ -43,11 +36,23 @@ export async function getLockInfo(
return { isLocked: false, lockedBy: null, lockedByName: null, isOwnLock: false };
}
const profile = (lock as any).profiles; // join type not inferred by Supabase
// Fetch profile separately — document_locks.user_id FK points to auth.users, not profiles
let lockedByName = 'Someone';
if (lock.user_id) {
const { data: profile } = await supabase
.from('profiles')
.select('full_name, email')
.eq('id', lock.user_id)
.single();
if (profile) {
lockedByName = profile.full_name || profile.email || 'Someone';
}
}
return {
isLocked: true,
lockedBy: lock.user_id,
lockedByName: profile?.full_name || profile?.email || 'Someone',
lockedByName,
isOwnLock: lock.user_id === currentUserId,
};
}

View File

@@ -0,0 +1,132 @@
import { describe, it, expect, vi } from 'vitest';
import { createDocument, updateDocument, deleteDocument, moveDocument, copyDocument, fetchDocuments } from './documents';
// Lightweight Supabase mock builder
function mockSupabase(response: { data?: unknown; error?: unknown }) {
const chain: Record<string, unknown> = {};
const methods = ['from', 'select', 'insert', 'update', 'delete', 'eq', 'in', 'order', 'single'];
for (const m of methods) {
chain[m] = vi.fn().mockReturnValue(chain);
}
// Terminal calls resolve the response
chain['single'] = vi.fn().mockResolvedValue(response);
chain['order'] = vi.fn().mockReturnValue({ ...chain, order: vi.fn().mockResolvedValue(response) });
// For delete → eq chain (no .select().single())
const eqAfterDelete = vi.fn().mockResolvedValue(response);
const originalDelete = chain['delete'];
chain['delete'] = vi.fn().mockReturnValue({ eq: eqAfterDelete, in: vi.fn().mockResolvedValue(response) });
// For update → eq (moveDocument has no .select().single())
chain['update'] = vi.fn().mockReturnValue({ ...chain, eq: vi.fn().mockReturnValue({ select: vi.fn().mockReturnValue({ single: vi.fn().mockResolvedValue(response) }), ...response }) });
return chain as any;
}
function mockSupabaseSuccess(data: unknown) {
return mockSupabase({ data, error: null });
}
function mockSupabaseError(message: string) {
return mockSupabase({ data: null, error: { message, code: 'ERROR' } });
}
const fakeDoc = {
id: 'doc-1',
org_id: 'org-1',
name: 'Test Doc',
type: 'document' as const,
parent_id: null,
path: null,
position: 0,
content: { type: 'doc', content: [] },
created_by: 'user-1',
created_at: '2024-01-01',
updated_at: '2024-01-01',
};
describe('createDocument', () => {
it('creates a document with default content for type "document"', async () => {
const sb = mockSupabaseSuccess(fakeDoc);
const result = await createDocument(sb, 'org-1', 'Test Doc', 'document', null, 'user-1');
expect(result).toEqual(fakeDoc);
expect(sb.from).toHaveBeenCalledWith('documents');
});
it('creates a folder with null content', async () => {
const folderDoc = { ...fakeDoc, type: 'folder', content: null };
const sb = mockSupabaseSuccess(folderDoc);
const result = await createDocument(sb, 'org-1', 'Folder', 'folder', null, 'user-1');
expect(result.type).toBe('folder');
});
it('creates a kanban document with custom id and content', async () => {
const kanbanDoc = { ...fakeDoc, id: 'board-1', type: 'kanban', content: { type: 'kanban', board_id: 'board-1' } };
const sb = mockSupabaseSuccess(kanbanDoc);
const result = await createDocument(
sb, 'org-1', 'Board', 'kanban', null, 'user-1',
{ id: 'board-1', content: { type: 'kanban', board_id: 'board-1' } },
);
expect(result.id).toBe('board-1');
});
it('throws on Supabase error', async () => {
const sb = mockSupabaseError('insert failed');
await expect(createDocument(sb, 'org-1', 'Fail', 'document', null, 'user-1'))
.rejects.toEqual({ message: 'insert failed', code: 'ERROR' });
});
});
describe('copyDocument', () => {
it('appends " (copy)" to the document name', async () => {
const copiedDoc = { ...fakeDoc, id: 'doc-2', name: 'Test Doc (copy)' };
const sb = mockSupabaseSuccess(copiedDoc);
const result = await copyDocument(sb, fakeDoc, 'org-1', 'user-1');
expect(result.name).toBe('Test Doc (copy)');
});
it('throws on Supabase error', async () => {
const sb = mockSupabaseError('copy failed');
await expect(copyDocument(sb, fakeDoc, 'org-1', 'user-1'))
.rejects.toEqual({ message: 'copy failed', code: 'ERROR' });
});
});
describe('deleteDocument', () => {
it('calls delete with correct id', async () => {
const sb = mockSupabase({ data: null, error: null });
await deleteDocument(sb, 'doc-1');
expect(sb.from).toHaveBeenCalledWith('documents');
expect(sb.delete).toHaveBeenCalled();
});
it('throws on Supabase error', async () => {
const sb = mockSupabase({ data: null, error: { message: 'delete failed', code: 'ERROR' } });
await expect(deleteDocument(sb, 'doc-1'))
.rejects.toEqual({ message: 'delete failed', code: 'ERROR' });
});
});
describe('fetchDocuments', () => {
it('returns documents array on success', async () => {
const docs = [fakeDoc];
// fetchDocuments calls .from().select().eq().order().order() — need deeper chain
const orderFn2 = vi.fn().mockResolvedValue({ data: docs, error: null });
const orderFn1 = vi.fn().mockReturnValue({ order: orderFn2 });
const eqFn = vi.fn().mockReturnValue({ order: orderFn1 });
const selectFn = vi.fn().mockReturnValue({ eq: eqFn });
const sb = { from: vi.fn().mockReturnValue({ select: selectFn }) } as any;
const result = await fetchDocuments(sb, 'org-1');
expect(result).toEqual(docs);
expect(sb.from).toHaveBeenCalledWith('documents');
});
it('throws on Supabase error', async () => {
const orderFn2 = vi.fn().mockResolvedValue({ data: null, error: { message: 'fetch failed' } });
const orderFn1 = vi.fn().mockReturnValue({ order: orderFn2 });
const eqFn = vi.fn().mockReturnValue({ order: orderFn1 });
const selectFn = vi.fn().mockReturnValue({ eq: eqFn });
const sb = { from: vi.fn().mockReturnValue({ select: selectFn }) } as any;
await expect(fetchDocuments(sb, 'org-1')).rejects.toEqual({ message: 'fetch failed' });
});
});

View File

@@ -27,19 +27,26 @@ export async function createDocument(
supabase: SupabaseClient<Database>,
orgId: string,
name: string,
type: 'folder' | 'document',
type: 'folder' | 'document' | 'kanban',
parentId: string | null = null,
userId: string
userId: string,
options?: { id?: string; content?: import('$lib/supabase/types').Json }
): Promise<Document> {
let content: import('$lib/supabase/types').Json | null = options?.content ?? null;
if (!content && type === 'document') {
content = { type: 'doc', content: [] };
}
const { data, error } = await supabase
.from('documents')
.insert({
...(options?.id ? { id: options.id } : {}),
org_id: orgId,
name,
type,
parent_id: parentId,
created_by: userId,
content: type === 'document' ? { type: 'doc', content: [] } : null
content,
})
.select()
.single();
@@ -99,6 +106,33 @@ export async function moveDocument(
}
export async function copyDocument(
supabase: SupabaseClient<Database>,
doc: Pick<Document, 'name' | 'type' | 'parent_id' | 'content'>,
orgId: string,
userId: string
): Promise<Document> {
const { data, error } = await supabase
.from('documents')
.insert({
org_id: orgId,
name: `${doc.name} (copy)`,
type: doc.type,
parent_id: doc.parent_id,
created_by: userId,
content: doc.content,
})
.select()
.single();
if (error) {
log.error('copyDocument failed', { error, data: { orgId, name: doc.name } });
throw error;
}
log.info('copyDocument ok', { data: { id: data.id, name: data.name } });
return data;
}
export function subscribeToDocuments(
supabase: SupabaseClient<Database>,
orgId: string,

View File

@@ -0,0 +1,218 @@
import { GoogleAuth } from 'google-auth-library';
import { createLogger } from '$lib/utils/logger';
const log = createLogger('api.google-calendar-push');
const CALENDAR_API_BASE = 'https://www.googleapis.com/calendar/v3';
const SCOPES = ['https://www.googleapis.com/auth/calendar.events'];
/**
* Google Calendar push integration via Service Account.
*
* Setup:
* 1. Create a service account in Google Cloud Console
* 2. Download the JSON key file
* 3. Set GOOGLE_SERVICE_ACCOUNT_KEY env var to the JSON string (or base64-encoded)
* 4. Share the Google Calendar with the service account email (give "Make changes to events" permission)
*/
interface ServiceAccountCredentials {
client_email: string;
private_key: string;
project_id?: string;
}
let cachedAuth: GoogleAuth | null = null;
function getServiceAccountCredentials(keyJson: string): ServiceAccountCredentials {
try {
// Try parsing directly as JSON
const parsed = JSON.parse(keyJson);
return parsed;
} catch {
// Try base64 decode first
try {
const decoded = Buffer.from(keyJson, 'base64').toString('utf-8');
return JSON.parse(decoded);
} catch {
throw new Error('GOOGLE_SERVICE_ACCOUNT_KEY must be valid JSON or base64-encoded JSON');
}
}
}
function getAuth(keyJson: string): GoogleAuth {
if (cachedAuth) return cachedAuth;
const credentials = getServiceAccountCredentials(keyJson);
cachedAuth = new GoogleAuth({
credentials: {
client_email: credentials.client_email,
private_key: credentials.private_key,
},
scopes: SCOPES,
});
return cachedAuth;
}
async function getAccessToken(keyJson: string): Promise<string> {
const auth = getAuth(keyJson);
const client = await auth.getClient();
const tokenResponse = await client.getAccessToken();
const token = typeof tokenResponse === 'string' ? tokenResponse : tokenResponse?.token;
if (!token) throw new Error('Failed to get access token from service account');
return token;
}
export function getServiceAccountEmail(keyJson: string): string | null {
try {
const creds = getServiceAccountCredentials(keyJson);
return creds.client_email;
} catch {
return null;
}
}
/**
* Fetch events from a Google Calendar using the service account.
* No need for the calendar to be public — just shared with the service account.
*/
export async function fetchCalendarEventsViaServiceAccount(
keyJson: string,
calendarId: string,
timeMin: Date,
timeMax: Date
): Promise<unknown[]> {
const token = await getAccessToken(keyJson);
const params = new URLSearchParams({
timeMin: timeMin.toISOString(),
timeMax: timeMax.toISOString(),
singleEvents: 'true',
orderBy: 'startTime',
maxResults: '250',
});
const response = await fetch(
`${CALENDAR_API_BASE}/calendars/${encodeURIComponent(calendarId)}/events?${params}`,
{
headers: { Authorization: `Bearer ${token}` },
}
);
if (!response.ok) {
const errorText = await response.text();
log.error('Failed to fetch calendar events via service account', {
error: errorText,
data: { calendarId },
});
throw new Error(`Google Calendar API error (${response.status}): ${errorText}`);
}
const data = await response.json();
return data.items ?? [];
}
export interface GoogleEventPayload {
summary: string;
description?: string | null;
start: { dateTime?: string; date?: string; timeZone?: string };
end: { dateTime?: string; date?: string; timeZone?: string };
colorId?: string;
}
/**
* Create an event in Google Calendar.
* Returns the Google event ID.
*/
export async function pushEventToGoogle(
keyJson: string,
calendarId: string,
event: GoogleEventPayload
): Promise<string> {
const token = await getAccessToken(keyJson);
const response = await fetch(
`${CALENDAR_API_BASE}/calendars/${encodeURIComponent(calendarId)}/events`,
{
method: 'POST',
headers: {
Authorization: `Bearer ${token}`,
'Content-Type': 'application/json',
},
body: JSON.stringify(event),
}
);
if (!response.ok) {
const errorText = await response.text();
log.error('Failed to create Google Calendar event', { error: errorText, data: { calendarId } });
throw new Error(`Google Calendar API error (${response.status}): ${errorText}`);
}
const data = await response.json();
log.info('Created Google Calendar event', { data: { googleEventId: data.id, calendarId } });
return data.id;
}
/**
* Update an existing event in Google Calendar.
*/
export async function updateGoogleEvent(
keyJson: string,
calendarId: string,
googleEventId: string,
event: GoogleEventPayload
): Promise<void> {
const token = await getAccessToken(keyJson);
const response = await fetch(
`${CALENDAR_API_BASE}/calendars/${encodeURIComponent(calendarId)}/events/${encodeURIComponent(googleEventId)}`,
{
method: 'PUT',
headers: {
Authorization: `Bearer ${token}`,
'Content-Type': 'application/json',
},
body: JSON.stringify(event),
}
);
if (!response.ok) {
const errorText = await response.text();
log.error('Failed to update Google Calendar event', { error: errorText, data: { calendarId, googleEventId } });
throw new Error(`Google Calendar API error (${response.status}): ${errorText}`);
}
log.info('Updated Google Calendar event', { data: { googleEventId, calendarId } });
}
/**
* Delete an event from Google Calendar.
*/
export async function deleteGoogleEvent(
keyJson: string,
calendarId: string,
googleEventId: string
): Promise<void> {
const token = await getAccessToken(keyJson);
const response = await fetch(
`${CALENDAR_API_BASE}/calendars/${encodeURIComponent(calendarId)}/events/${encodeURIComponent(googleEventId)}`,
{
method: 'DELETE',
headers: {
Authorization: `Bearer ${token}`,
},
}
);
// 410 Gone means already deleted — treat as success
if (!response.ok && response.status !== 410) {
const errorText = await response.text();
log.error('Failed to delete Google Calendar event', { error: errorText, data: { calendarId, googleEventId } });
throw new Error(`Google Calendar API error (${response.status}): ${errorText}`);
}
log.info('Deleted Google Calendar event', { data: { googleEventId, calendarId } });
}

View File

@@ -0,0 +1,61 @@
import { describe, it, expect } from 'vitest';
import { extractCalendarId, getCalendarSubscribeUrl } from './google-calendar';
describe('extractCalendarId', () => {
it('returns null for empty input', () => {
expect(extractCalendarId('')).toBeNull();
});
it('returns email-style calendar ID as-is', () => {
expect(extractCalendarId('user@gmail.com')).toBe('user@gmail.com');
});
it('trims whitespace from email-style IDs', () => {
expect(extractCalendarId(' user@gmail.com ')).toBe('user@gmail.com');
});
it('returns group calendar ID as-is', () => {
const id = 'abc123@group.calendar.google.com';
expect(extractCalendarId(id)).toBe(id);
});
it('extracts calendar ID from cid parameter (base64)', () => {
const calId = 'user@gmail.com';
const encoded = btoa(calId);
const url = `https://calendar.google.com/calendar/u/0?cid=${encoded}`;
expect(extractCalendarId(url)).toBe(calId);
});
it('extracts calendar ID from src parameter', () => {
const url = 'https://calendar.google.com/calendar/embed?src=user@gmail.com';
expect(extractCalendarId(url)).toBe('user@gmail.com');
});
it('extracts calendar ID from ical path', () => {
const url = 'https://calendar.google.com/calendar/ical/user%40gmail.com/public/basic.ics';
expect(extractCalendarId(url)).toBe('user@gmail.com');
});
it('returns null for non-URL non-email input', () => {
expect(extractCalendarId('random-string')).toBeNull();
});
it('handles URL without recognized parameters', () => {
expect(extractCalendarId('https://example.com/page')).toBeNull();
});
});
describe('getCalendarSubscribeUrl', () => {
it('generates a subscribe URL with base64-encoded calendar ID', () => {
const calId = 'user@gmail.com';
const url = getCalendarSubscribeUrl(calId);
expect(url).toContain('https://calendar.google.com/calendar/u/0?cid=');
expect(url).toContain(btoa(calId));
});
it('roundtrips with extractCalendarId', () => {
const calId = 'test@group.calendar.google.com';
const url = getCalendarSubscribeUrl(calId);
expect(extractCalendarId(url)).toBe(calId);
});
});

View File

@@ -90,9 +90,8 @@ export async function fetchPublicCalendarEvents(
);
if (!response.ok) {
const error = await response.text();
console.error('Google Calendar API error:', error);
throw new Error('Failed to fetch calendar events. Make sure the calendar is set to public.');
const errorText = await response.text();
throw new Error(`Failed to fetch calendar events (${response.status}): ${errorText}`);
}
const data = await response.json();

View File

@@ -34,30 +34,30 @@ export async function fetchBoardWithColumns(
supabase: SupabaseClient<Database>,
boardId: string
): Promise<BoardWithColumns | null> {
const { data: board, error: boardError } = await supabase
.from('kanban_boards')
.select('*')
.eq('id', boardId)
.single();
// Fetch board and columns in parallel
const [boardResult, columnsResult] = await Promise.all([
supabase.from('kanban_boards').select('*').eq('id', boardId).single(),
supabase.from('kanban_columns').select('*').eq('board_id', boardId).order('position'),
]);
if (boardError) {
log.error('fetchBoardWithColumns failed (board)', { error: boardError, data: { boardId } });
throw boardError;
if (boardResult.error) {
log.error('fetchBoardWithColumns failed (board)', { error: boardResult.error, data: { boardId } });
throw boardResult.error;
}
if (!board) return null;
if (!boardResult.data) return null;
const { data: columns, error: colError } = await supabase
.from('kanban_columns')
.select('*')
.eq('board_id', boardId)
.order('position');
if (colError) {
log.error('fetchBoardWithColumns failed (columns)', { error: colError, data: { boardId } });
throw colError;
if (columnsResult.error) {
log.error('fetchBoardWithColumns failed (columns)', { error: columnsResult.error, data: { boardId } });
throw columnsResult.error;
}
const columnIds = (columns ?? []).map((c) => c.id);
const board = boardResult.data;
const columns = columnsResult.data ?? [];
const columnIds = columns.map((c) => c.id);
if (columnIds.length === 0) {
return { ...board, columns: columns.map((col) => ({ ...col, cards: [] })) };
}
const { data: cards, error: cardError } = await supabase
.from('kanban_cards')
@@ -70,42 +70,71 @@ export async function fetchBoardWithColumns(
throw cardError;
}
// Fetch tags for all cards in one query
const cardIds = (cards ?? []).map((c) => c.id);
let cardTagsMap = new Map<string, { id: string; name: string; color: string | null }[]>();
const cardTagsMap = new Map<string, { id: string; name: string; color: string | null }[]>();
const checklistMap = new Map<string, { total: number; done: number }>();
const assigneeMap = new Map<string, { name: string | null; avatar: string | null }>();
if (cardIds.length > 0) {
const { data: cardTags } = await supabase
.from('card_tags')
.select('card_id, tags:tag_id (id, name, color)')
.in('card_id', cardIds);
const assigneeIds = [...new Set((cards ?? []).map((c) => c.assignee_id).filter(Boolean))] as string[];
(cardTags ?? []).forEach((ct: any) => {
const tag = Array.isArray(ct.tags) ? ct.tags[0] : ct.tags;
// Fetch tags, checklists, and assignee profiles in parallel
const [cardTagsResult, checklistResult, profilesResult] = await Promise.all([
supabase.from('card_tags').select('card_id, tags:tag_id (id, name, color)').in('card_id', cardIds),
supabase.from('kanban_checklist_items').select('card_id, completed').in('card_id', cardIds),
assigneeIds.length > 0
? supabase.from('profiles').select('id, full_name, avatar_url').in('id', assigneeIds)
: Promise.resolve({ data: null }),
]);
(cardTagsResult.data ?? []).forEach((ct: Record<string, unknown>) => {
const rawTags = ct.tags;
const tag = Array.isArray(rawTags) ? rawTags[0] : rawTags;
if (!tag) return;
if (!cardTagsMap.has(ct.card_id)) {
cardTagsMap.set(ct.card_id, []);
const cardId = ct.card_id as string;
if (!cardTagsMap.has(cardId)) {
cardTagsMap.set(cardId, []);
}
cardTagsMap.get(ct.card_id)!.push(tag);
cardTagsMap.get(cardId)!.push(tag as { id: string; name: string; color: string | null });
});
(checklistResult.data ?? []).forEach((item: Record<string, unknown>) => {
const cardId = item.card_id as string;
if (!checklistMap.has(cardId)) {
checklistMap.set(cardId, { total: 0, done: 0 });
}
const entry = checklistMap.get(cardId)!;
entry.total++;
if (item.completed) entry.done++;
});
(profilesResult.data ?? []).forEach((p: Record<string, unknown>) => {
assigneeMap.set(p.id as string, { name: p.full_name as string | null, avatar: p.avatar_url as string | null });
});
}
const cardsByColumn = new Map<string, (KanbanCard & { tags?: { id: string; name: string; color: string | null }[] })[]>();
const cardsByColumn = new Map<string, (KanbanCard & { tags?: { id: string; name: string; color: string | null }[]; checklist_total?: number; checklist_done?: number; assignee_name?: string | null; assignee_avatar?: string | null })[]>();
(cards ?? []).forEach((card) => {
const colId = card.column_id;
if (!colId) return;
if (!cardsByColumn.has(colId)) {
cardsByColumn.set(colId, []);
}
const cl = checklistMap.get(card.id);
const assignee = card.assignee_id ? assigneeMap.get(card.assignee_id) : null;
cardsByColumn.get(colId)!.push({
...card,
tags: cardTagsMap.get(card.id) ?? []
tags: cardTagsMap.get(card.id) ?? [],
checklist_total: cl?.total ?? 0,
checklist_done: cl?.done ?? 0,
assignee_name: assignee?.name ?? null,
assignee_avatar: assignee?.avatar ?? null,
});
});
return {
...board,
columns: (columns ?? []).map((col) => ({
columns: columns.map((col) => ({
...col,
cards: cardsByColumn.get(col.id) ?? []
}))
@@ -283,39 +312,76 @@ export async function moveCard(
...otherCards.slice(newPosition),
];
// Batch update: move card to column + set position, then update siblings
const updates = reordered.map((c, i) => {
if (c.id === cardId) {
// Build a map of old positions to detect what actually changed
const oldPositionMap = new Map((targetCards ?? []).map((c) => [c.id, c.position]));
// Only update cards whose position or column actually changed
const updates = reordered
.map((c, i) => {
if (c.id === cardId) {
// The moved card always needs updating (column + position)
return supabase
.from('kanban_cards')
.update({ column_id: newColumnId, position: i })
.eq('id', c.id);
}
// Skip siblings whose position hasn't changed
if (oldPositionMap.get(c.id) === i) return null;
return supabase
.from('kanban_cards')
.update({ column_id: newColumnId, position: i })
.update({ position: i })
.eq('id', c.id);
}
return supabase
.from('kanban_cards')
.update({ position: i })
.eq('id', c.id);
});
})
.filter(Boolean);
if (updates.length === 0) return;
const results = await Promise.all(updates);
const failed = results.find((r) => r.error);
const failed = results.find((r) => r && r.error);
if (failed?.error) {
log.error('moveCard failed', { error: failed.error, data: { cardId, newColumnId, newPosition } });
throw failed.error;
}
}
export interface RealtimeChangePayload<T = Record<string, unknown>> {
event: 'INSERT' | 'UPDATE' | 'DELETE';
new: T;
old: Partial<T>;
}
export function subscribeToBoard(
supabase: SupabaseClient<Database>,
boardId: string,
onColumnChange: () => void,
onCardChange: () => void
columnIds: string[],
onColumnChange: (payload: RealtimeChangePayload<KanbanColumn>) => void,
onCardChange: (payload: RealtimeChangePayload<KanbanCard>) => void
) {
const channel = supabase.channel(`kanban:${boardId}`);
const columnIdSet = new Set(columnIds);
channel
.on('postgres_changes', { event: '*', schema: 'public', table: 'kanban_columns', filter: `board_id=eq.${boardId}` }, onColumnChange)
.on('postgres_changes', { event: '*', schema: 'public', table: 'kanban_cards' }, onCardChange)
.on('postgres_changes', { event: '*', schema: 'public', table: 'kanban_columns', filter: `board_id=eq.${boardId}` },
(payload) => onColumnChange({
event: payload.eventType as 'INSERT' | 'UPDATE' | 'DELETE',
new: payload.new as KanbanColumn,
old: payload.old as Partial<KanbanColumn>,
})
)
.on('postgres_changes', { event: '*', schema: 'public', table: 'kanban_cards' },
(payload) => {
// Client-side filter: only process cards belonging to this board's columns
const card = (payload.new ?? payload.old) as Partial<KanbanCard>;
const colId = card.column_id ?? (payload.old as Partial<KanbanCard>)?.column_id;
if (colId && !columnIdSet.has(colId)) return;
onCardChange({
event: payload.eventType as 'INSERT' | 'UPDATE' | 'DELETE',
new: payload.new as KanbanCard,
old: payload.old as Partial<KanbanCard>,
});
}
)
.subscribe();
return channel;