('/dashboard/connectivity').then(({ data }) => setConnectivity(data)).catch(() => {}),
);
}
await Promise.allSettled(promises);
@@ -280,21 +285,20 @@ export default function DashboardPage() {
{/* === Welcome Banner === */}
-
-
-
-
- Welcome{user?.name ? `, ${user.name}` : ''}
-
-
- {lastRefresh && `Updated ${lastRefresh.toLocaleTimeString()}`}
-
-
+
+
+
+ Welcome{user?.name ? `, ${user.name}` : ''}
+
+
+ {lastRefresh && `Updated ${lastRefresh.toLocaleTimeString()}`}
+
{isSuperAdmin && homepageUrl && (
setActiveView(val as 'dashboard' | 'homepage')}
options={[
@@ -306,35 +310,29 @@ export default function DashboardPage() {
)}
{activeView === 'dashboard' && (
-
- {showInfluence && (
- } onClick={() => navigate('/app/campaigns')}>Campaign
- )}
- {showMap && (
- } onClick={() => navigate('/app/map')}>Location
- )}
- {showMedia && (
- } onClick={() => navigate('/app/media/library')}>Video
- )}
- } onClick={() => navigate('/app/pages')}>Page
+
+ {showInfluence && } onClick={() => navigate('/app/campaigns')} />}
+ {showMap && } onClick={() => navigate('/app/map')} />}
+ {showMedia && } onClick={() => navigate('/app/media/library')} />}
+ } onClick={() => navigate('/app/pages')} />
{isSuperAdmin && (
<>
- } onClick={() => navigate('/app/observability')}>Monitoring
- } onClick={() => navigate('/app/tunnel')}>Tunnel
- } onClick={() => navigate('/app/services/nocodb')}>NocoDB
- } onClick={() => navigate('/app/services/n8n')}>Workflows
- } onClick={() => navigate('/app/services/gitea')}>Git
- } onClick={() => navigate('/app/code')}>Code
- } onClick={() => navigate('/app/docs')}>Docs
- } onClick={() => navigate('/app/services/miniqr')}>QR
- } onClick={() => navigate('/app/map/data-quality')}>Data Quality
+ } onClick={() => navigate('/app/observability')} />
+ } onClick={() => navigate('/app/tunnel')} />
+ } onClick={() => navigate('/app/services/nocodb')} />
+ } onClick={() => navigate('/app/services/n8n')} />
+ } onClick={() => navigate('/app/services/gitea')} />
+ } onClick={() => navigate('/app/code')} />
+ } onClick={() => navigate('/app/docs')} />
+ } onClick={() => navigate('/app/services/miniqr')} />
+ } onClick={() => navigate('/app/map/data-quality')} />
>
)}
- } onClick={handleRefresh}>Refresh
+ } onClick={handleRefresh} />
)}
{activeView === 'homepage' && (
- } onClick={handleRefresh}>Refresh
+ } onClick={handleRefresh} />
)}
@@ -400,107 +398,130 @@ export default function DashboardPage() {
);
})()}
- {/* === Weather + Key Metrics Row === */}
-
- {weather && (
-
-
- {getWeatherIcon(weather.weatherCode, weather.isDay)}
- {Math.round(weather.temperature)}{'°C'}
-
-
- {weather.weatherDescription}
-
-
-
- {'Feels ' + Math.round(weather.apparentTemperature) + '° · ' + weather.humidity + '% · ' + Math.round(weather.windSpeed) + ' km/h'}
-
-
-
- )}
+ {/* === Status Bar (weather + stats + pending actions + connectivity) === */}
+ {summary && (
+
+
+
+ {weather && (
+
+ {getWeatherIcon(weather.weatherCode, weather.isDay)}
+ {Math.round(weather.temperature)}°C
+ {weather.weatherDescription}
+
+ )}
+ {/* Quick stat chips */}
+ } color="#1890ff" value={summary.users.total} label="Users" onClick={() => navigate('/app/users')} />
+ {showInfluence && } color="#52c41a" value={summary.campaigns.active} label={`of ${summary.campaigns.total}`} onClick={() => navigate('/app/campaigns')} />}
+ {showMap && } color="#722ed1" value={summary.locations.total.toLocaleString()} label={`${geocodePct}% geo`} onClick={() => navigate('/app/map')} />}
+ {showInfluence && } color="#faad14" value={summary.emails.sent} label="sent" onClick={() => navigate('/app/email-queue')} />}
+ {showMedia && } color="#13c2c2" value={summary.videos.published} label={`of ${summary.videos.total}`} onClick={() => navigate('/app/media/library')} />}
+ {showMap && } color="#eb2f96" value={summary.shifts.upcoming} label={`${summary.shifts.open} open`} onClick={() => navigate('/app/map/shifts')} />}
+ {/* Pending action tags */}
+ {summary.responses.pending > 0 && (
+ navigate('/app/responses')}>
+ {summary.responses.pending} pending
+
+ )}
+ {summary.locations.total > 0 && summary.locations.total - summary.locations.geocoded > 0 && (
+ navigate('/app/map')}>
+ {summary.locations.total - summary.locations.geocoded} ungeocoded
+
+ )}
+ {summary.emails.queued > 0 && (
+ navigate('/app/email-queue')}>
+ {summary.emails.queued} queued
+
+ )}
+ {summary.campaignModeration.pendingReview > 0 && (
+ navigate('/app/campaign-moderation')}>
+ {summary.campaignModeration.pendingReview} review
+
+ )}
+
+ {isSuperAdmin && connectivity && (
+
+
+
+
+
+
+ )}
+
+
+ )}
-
- } color="#1890ff" onClick={() => navigate('/app/users')} />
-
+ {/* === Email Queue Widget (shown if queue has items) === */}
+ {queue && (queue.waiting > 0 || queue.active > 0 || queue.failed > 0) && (
+ 0 ? '#ff4d4f' : queue.paused ? '#faad14' : '#1890ff'}` }}
+ styles={{ body: { padding: '6px 16px' } }}
+ >
+
+
+
+
+ Email Queue
+ {queue.paused && Paused}
+
+
+ Waiting} value={queue.waiting} valueStyle={{ fontSize: 18 }} />
+ Active} value={queue.active} valueStyle={{ fontSize: 18, color: '#1890ff' }} />
+ {queue.failed > 0 && (
+ Failed} value={queue.failed} valueStyle={{ fontSize: 18, color: '#ff4d4f' }} />
+ )}
+
+
+
+
+
+ )}
+ {/* === Module Overview Row (3 columns) === */}
+
{showInfluence && (
-
- } color="#52c41a" onClick={() => navigate('/app/campaigns')} />
-
- )}
-
- {showMap && (
-
- } color="#722ed1" onClick={() => navigate('/app/map')} />
-
- )}
-
- {showInfluence && (
-
- } color="#faad14" onClick={() => navigate('/app/email-queue')} />
-
- )}
-
- {showMedia && (
-
- } color="#13c2c2" onClick={() => navigate('/app/media/library')} />
-
- )}
-
- {showMap && (
-
- } color="#eb2f96" onClick={() => navigate('/app/map/shifts')} />
-
- )}
-
-
- {/* === Module Overview Row === */}
-
- {showInfluence && (
-
+
Influence>}
+ title={
+
+
+ Influence
+ {summary && {summary.campaigns.active} active / {summary.campaigns.total}}
+
+ }
size="small"
extra={}
style={{ height: '100%' }}
>
{summary && (
-
-
-
-
- Campaigns:
- {summary.campaigns.active} Active
- {summary.campaigns.draft} Draft
- {summary.campaigns.paused > 0 && {summary.campaigns.paused} Paused}
+
+
+
+ {summary.campaigns.active} Active
+ {summary.campaigns.draft} Draft
+ {summary.campaigns.paused > 0 && {summary.campaigns.paused} Paused}
+
+
+ Responses: {summary.responses.total}
+ {summary.responses.pending > 0 && {summary.responses.pending} pending}
+
+
+ Emails: {summary.emails.sent} sent
+ {summary.emails.failed > 0 && {summary.emails.failed} failed}
+
+ {queue && (
+
+ Queue: {queue.waiting} waiting
+ {queue.paused && Paused}
-
- Responses:
- {summary.responses.total} total
- {summary.responses.pending > 0 && {summary.responses.pending} pending}
-
-
- Queue:
- {queue ? (
- <>
- {queue.waiting} waiting, {queue.active} active
- {queue.paused && Paused}
- >
- ) : unavailable}
-
- {summary.campaignModeration.pendingReview > 0 && (
-
-
-
- )}
-
-
- {/* Campaign status donut */}
- {summary.campaigns.total > 0 && screens.sm && (
-
-
+ )}
+
+ {summary.campaigns.total > 0 && screens.md && (
+
+
)}
@@ -510,9 +531,15 @@ export default function DashboardPage() {
)}
{showMap && (
-
+
Map & Canvassing>}
+ title={
+
+
+ Map
+ {summary && {summary.locations.total.toLocaleString()} locations}
+
+ }
size="small"
extra={}
style={{ height: '100%' }}
@@ -520,84 +547,90 @@ export default function DashboardPage() {
{summary && (
- Geocoding:
+ Geocoded:
{summary.locations.geocoded.toLocaleString()}/{summary.locations.total.toLocaleString()}
-
- Addresses:
- {summary.locations.addresses.toLocaleString()}
- {summary.cuts.total} cuts
-
-
- Canvassing:
- {summary.canvass.totalVisits} visits
- {summary.canvass.activeSessions > 0 && {summary.canvass.activeSessions} active}
-
+
+ Addresses: {summary.locations.addresses.toLocaleString()}
+ {summary.cuts.total} cuts
+
+
+ Canvassing: {summary.canvass.totalVisits} visits
+ {summary.canvass.activeSessions > 0 && {summary.canvass.activeSessions} active}
+
+
+ Shifts: {summary.shifts.upcoming} upcoming
+ {summary.shifts.open} open
+
)}
)}
-
+
Content>}
- size="small"
- extra={}
- style={{ height: '100%' }}
- >
- {summary && (
-
-
- Pages:
- {summary.pages.published} published
- / {summary.pages.total}
-
-
- Templates:
- {summary.emailTemplates.total}
-
- {showInfluence && (
-
- Rep Cache:
- {summary.representatives.totalCached}
-
- )}
- {showMedia && (
-
- Videos:
- {summary.videos.published} published / {summary.videos.total}
-
- )}
+ title={
+
+
+ Users & Content
+ {summary && {summary.users.total} users}
- )}
-
-
-
-
-
Users>}
+ }
size="small"
extra={}
style={{ height: '100%' }}
>
{summary && (
-
- {Object.entries(summary.users.byRole)
- .filter(([, count]) => count > 0)
- .map(([role, count]) => (
-
- {ROLE_LABELS[role] || role}: {count}
-
- ))}
- {summary.users.suspended > 0 && Suspended: {summary.users.suspended}}
-
+
+
+ {Object.entries(summary.users.byRole)
+ .filter(([, count]) => count > 0)
+ .map(([role, count]) => (
+
+ {ROLE_LABELS[role] || role}: {count}
+
+ ))}
+ {summary.users.suspended > 0 && Suspended: {summary.users.suspended}}
+
+
+ Pages: {summary.pages.published} published
+ / {summary.pages.total}
+
+
+ Templates: {summary.emailTemplates.total}
+ {showInfluence && Reps: {summary.representatives.totalCached}}
+
+ {showMedia && (
+
+ Videos: {summary.videos.published} published
+ / {summary.videos.total}
+
+ )}
+
)}
+ {/* === Activity Feed + Events + Chat === */}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
{/* === System + Docker Section (SUPER_ADMIN only) === */}
{isSuperAdmin && (
<>
@@ -876,34 +909,6 @@ function MiniSystemChart({ timeSeries }: { timeSeries: TimeSeriesResult }) {
);
}
-// --- Stat Card Component ---
-
-function StatCard({ title, value, subtitle, icon, color, onClick }: {
- title: string;
- value?: number | null;
- subtitle: string;
- icon: React.ReactNode;
- color: string;
- onClick: () => void;
-}) {
- return (
-
- {title}}
- value={value ?? '--'}
- prefix={icon}
- valueStyle={{ color, fontSize: 22 }}
- />
- {subtitle}
-
- );
-}
// --- Service Badge Component (with pulse animation) ---
@@ -927,6 +932,47 @@ const SERVICE_ICONS: Record
= {
homepage: ,
};
+// --- Quick Stat chip (for status bar) ---
+
+function QuickStat({ icon, color, value, label, onClick }: {
+ icon: React.ReactNode;
+ color: string;
+ value: string | number;
+ label: string;
+ onClick: () => void;
+}) {
+ return (
+
+ {icon}
+ {value}
+ {label}
+
+ );
+}
+
+function ConnectivityDot({ label, online }: { label: string; online: boolean }) {
+ return (
+
+
+
+ {label}
+
+
+ );
+}
+
function ServiceBadge({ name, online, icon }: {
name: string;
online?: boolean;
diff --git a/admin/src/pages/GancioPage.tsx b/admin/src/pages/GancioPage.tsx
new file mode 100644
index 00000000..8ca381b0
--- /dev/null
+++ b/admin/src/pages/GancioPage.tsx
@@ -0,0 +1,127 @@
+import { useState, useEffect, useCallback, useMemo } from 'react';
+import { useOutletContext } from 'react-router-dom';
+import { Button, Space, Badge, Spin, Grid, Result } from 'antd';
+import { ReloadOutlined, LinkOutlined, CalendarOutlined } from '@ant-design/icons';
+import { api } from '@/lib/api';
+import type { AppOutletContext } from '@/components/AppLayout';
+import type { ServicesStatus, ServicesConfig } from '@/types/api';
+import { buildServiceUrl } from '@/lib/service-url';
+
+export default function GancioPage() {
+ const { setPageHeader } = useOutletContext();
+ const screens = Grid.useBreakpoint();
+ const isMobile = !screens.md;
+
+ const [online, setOnline] = useState(null);
+ const [config, setConfig] = useState(null);
+ const [loading, setLoading] = useState(true);
+
+ const fetchStatus = useCallback(async () => {
+ try {
+ const [statusRes, configRes] = await Promise.all([
+ api.get('/services/status'),
+ api.get('/services/config'),
+ ]);
+ setOnline(statusRes.data.gancio.online);
+ setConfig(configRes.data);
+ } catch {
+ setOnline(false);
+ } finally {
+ setLoading(false);
+ }
+ }, []);
+
+ useEffect(() => {
+ fetchStatus();
+ }, [fetchStatus]);
+
+ const serviceUrl = config
+ ? buildServiceUrl(config.gancioSubdomain, config.domain, config.gancioPort)
+ : null;
+
+ const handleRefresh = useCallback(() => {
+ fetchStatus();
+ }, [fetchStatus]);
+
+ const headerActions = useMemo(() => (
+
+
+ }
+ onClick={handleRefresh}
+ size="small"
+ >
+ Refresh
+
+ {serviceUrl && (
+ }
+ href={serviceUrl}
+ target="_blank"
+ size="small"
+ >
+ Open in New Tab
+
+ )}
+
+ ), [online, handleRefresh, serviceUrl]);
+
+ useEffect(() => {
+ setPageHeader({
+ title: 'Events',
+ actions: headerActions,
+ fullBleed: true
+ });
+ return () => setPageHeader(null);
+ }, [setPageHeader, headerActions]);
+
+ if (isMobile) {
+ return (
+ }
+ />
+ );
+ }
+
+ if (loading) {
+ return (
+
+
+
+ );
+ }
+
+ if (!online || !serviceUrl) {
+ return (
+
+ Retry
+
+ }
+ />
+ );
+ }
+
+ return (
+
+ );
+}
diff --git a/admin/src/pages/ListmonkPage.tsx b/admin/src/pages/ListmonkPage.tsx
index 549922ba..9b733707 100644
--- a/admin/src/pages/ListmonkPage.tsx
+++ b/admin/src/pages/ListmonkPage.tsx
@@ -44,6 +44,7 @@ export default function ListmonkPage() {
const [status, setStatus] = useState(null);
const [stats, setStats] = useState(null);
const [config, setConfig] = useState(null);
+ const [eventSyncStats, setEventSyncStats] = useState<{ enabled: boolean; lastSyncAt: string | null; todaySyncCount: number } | null>(null);
const [loading, setLoading] = useState(true);
const [syncing, setSyncing] = useState>({});
const [iframeSrc, setIframeSrc] = useState(null);
@@ -79,11 +80,20 @@ export default function ListmonkPage() {
}
}, []);
+ const fetchEventSyncStats = useCallback(async () => {
+ try {
+ const res = await api.get<{ enabled: boolean; lastSyncAt: string | null; todaySyncCount: number }>('/listmonk/event-sync-stats');
+ setEventSyncStats(res.data);
+ } catch {
+ // Event sync stats fetch failed — leave null
+ }
+ }, []);
+
const fetchAll = useCallback(async () => {
setLoading(true);
- await Promise.all([fetchStatus(), fetchStats(), fetchConfig()]);
+ await Promise.all([fetchStatus(), fetchStats(), fetchConfig(), fetchEventSyncStats()]);
setLoading(false);
- }, [fetchStatus, fetchStats, fetchConfig]);
+ }, [fetchStatus, fetchStats, fetchConfig, fetchEventSyncStats]);
useEffect(() => {
fetchAll();
@@ -350,6 +360,29 @@ export default function ListmonkPage() {
/>
+
+
+
+
+
+
+ {eventSyncStats?.lastSyncAt ? dayjs(eventSyncStats.lastSyncAt).fromNow() : 'No events yet'}
+
+
+ {eventSyncStats?.todaySyncCount ?? 0}
+
+
+
+
+
();
const screens = Grid.useBreakpoint();
@@ -15,6 +17,9 @@ export default function NocoDBPage() {
const [online, setOnline] = useState(null);
const [config, setConfig] = useState(null);
const [loading, setLoading] = useState(true);
+ const [bannerDismissed, setBannerDismissed] = useState(
+ () => localStorage.getItem(BANNER_DISMISSED_KEY) === 'true'
+ );
const fetchStatus = useCallback(async () => {
try {
@@ -109,15 +114,38 @@ export default function NocoDBPage() {
}
return (
-
+
+ {!bannerDismissed && (
+
+ If the database browser appears blank, you may need to{' '}
+
+ sign in to NocoDB in a new tab
+ {' '}
+ first, then refresh this page.
+ >
+ }
+ type="info"
+ showIcon
+ closable
+ onClose={() => {
+ setBannerDismissed(true);
+ localStorage.setItem(BANNER_DISMISSED_KEY, 'true');
+ }}
+ style={{ borderRadius: 0, flexShrink: 0 }}
+ />
+ )}
+
+
);
}
diff --git a/admin/src/pages/RocketChatPage.tsx b/admin/src/pages/RocketChatPage.tsx
new file mode 100644
index 00000000..1b940b8d
--- /dev/null
+++ b/admin/src/pages/RocketChatPage.tsx
@@ -0,0 +1,208 @@
+import { useState, useEffect, useCallback, useMemo, useRef } from 'react';
+import { useOutletContext } from 'react-router-dom';
+import { Button, Space, Badge, Spin, Grid, Result } from 'antd';
+import { ReloadOutlined, LinkOutlined, MessageOutlined } from '@ant-design/icons';
+import { api } from '@/lib/api';
+import type { AppOutletContext } from '@/components/AppLayout';
+import type { ServicesConfig } from '@/types/api';
+import { buildServiceUrl } from '@/lib/service-url';
+
+interface RCConfig {
+ enabled: boolean;
+ embedPort: number;
+ subdomain: string;
+ domain: string;
+}
+
+interface RCAuthResponse {
+ authToken: string;
+ rcUserId: string;
+}
+
+export default function RocketChatPage() {
+ const { setPageHeader } = useOutletContext();
+ const screens = Grid.useBreakpoint();
+ const isMobile = !screens.md;
+
+ const [online, setOnline] = useState(null);
+ const [config, setConfig] = useState(null);
+ const [rcConfig, setRcConfig] = useState(null);
+ const [authToken, setAuthToken] = useState(null);
+ const [loading, setLoading] = useState(true);
+ const [authError, setAuthError] = useState(null);
+ const iframeRef = useRef(null);
+
+ const fetchStatus = useCallback(async () => {
+ setLoading(true);
+ setAuthError(null);
+ try {
+ const [statusRes, configRes, rcConfigRes] = await Promise.all([
+ api.get<{ online: boolean; enabled: boolean }>('/rocketchat/status'),
+ api.get('/services/config'),
+ api.get('/rocketchat/config'),
+ ]);
+ setOnline(statusRes.data.online);
+ setConfig(configRes.data);
+ setRcConfig(rcConfigRes.data);
+
+ // If online, get auth token for SSO
+ if (statusRes.data.online && statusRes.data.enabled) {
+ try {
+ const authRes = await api.post('/rocketchat/auth');
+ setAuthToken(authRes.data.authToken);
+ } catch (err) {
+ setAuthError('Failed to authenticate with Rocket.Chat');
+ }
+ }
+ } catch {
+ setOnline(false);
+ } finally {
+ setLoading(false);
+ }
+ }, []);
+
+ useEffect(() => {
+ fetchStatus();
+ }, [fetchStatus]);
+
+ // Inject auth token into iframe via postMessage when iframe loads.
+ // RC's iframe integration listener may not be ready immediately after the
+ // login page renders, so we retry a few times with a delay.
+ const retryTimers = useRef[]>([]);
+
+ const handleIframeLoad = useCallback(() => {
+ // Clear any pending retries from a previous load
+ retryTimers.current.forEach(clearTimeout);
+ retryTimers.current = [];
+
+ if (!authToken || !iframeRef.current?.contentWindow) return;
+
+ const sendToken = () => {
+ if (!iframeRef.current?.contentWindow) return;
+ iframeRef.current.contentWindow.postMessage(
+ { event: 'login-with-token', loginToken: authToken },
+ '*',
+ );
+ };
+
+ // Send immediately, then retry after short delays in case the
+ // RC login page hasn't registered its listener yet
+ sendToken();
+ retryTimers.current.push(setTimeout(sendToken, 1000));
+ retryTimers.current.push(setTimeout(sendToken, 3000));
+ }, [authToken]);
+
+ // Cleanup timers on unmount
+ useEffect(() => {
+ return () => retryTimers.current.forEach(clearTimeout);
+ }, []);
+
+ const serviceUrl = useMemo(() => {
+ if (!config || !rcConfig) return null;
+ return buildServiceUrl(rcConfig.subdomain, rcConfig.domain, rcConfig.embedPort);
+ }, [config, rcConfig]);
+
+ const headerActions = useMemo(() => (
+
+
+ } onClick={fetchStatus} size="small">
+ Refresh
+
+ {serviceUrl && (
+ } href={serviceUrl} target="_blank" size="small">
+ Open in New Tab
+
+ )}
+
+ ), [online, fetchStatus, serviceUrl]);
+
+ useEffect(() => {
+ setPageHeader({
+ title: 'Team Chat',
+ actions: headerActions,
+ fullBleed: true,
+ });
+ return () => setPageHeader(null);
+ }, [setPageHeader, headerActions]);
+
+ if (isMobile) {
+ return (
+ }
+ />
+ );
+ }
+
+ if (loading) {
+ return (
+
+
+
+ );
+ }
+
+ if (!rcConfig?.enabled) {
+ return (
+
+ );
+ }
+
+ if (!online || !serviceUrl) {
+ return (
+
+ Retry
+
+ }
+ />
+ );
+ }
+
+ if (authError) {
+ return (
+
+ Retry
+
+ }
+ />
+ );
+ }
+
+ // Load full RC UI (with sidebar) — layout=embedded hides the channel sidebar
+ const iframeSrc = serviceUrl;
+
+ return (
+
+ );
+}
diff --git a/admin/src/pages/SettingsPage.tsx b/admin/src/pages/SettingsPage.tsx
index fefe0a27..c2833dc4 100644
--- a/admin/src/pages/SettingsPage.tsx
+++ b/admin/src/pages/SettingsPage.tsx
@@ -501,6 +501,89 @@ export default function SettingsPage() {
+
+
+
+
+
+
+
+ ),
+ },
+ {
+ key: 'notifications',
+ label: 'Notifications',
+ children: (
+
+
+
Admin Alerts
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
Volunteer Emails
+
+
+
+
+
+
+
+
+
+
),
},
diff --git a/admin/src/pages/VaultwardenPage.tsx b/admin/src/pages/VaultwardenPage.tsx
new file mode 100644
index 00000000..0e6218b3
--- /dev/null
+++ b/admin/src/pages/VaultwardenPage.tsx
@@ -0,0 +1,160 @@
+import { useState, useEffect, useCallback, useMemo } from 'react';
+import { useOutletContext } from 'react-router-dom';
+import { Button, Space, Badge, Spin, Grid, Result, Alert, Typography } from 'antd';
+import { ReloadOutlined, LinkOutlined, LockOutlined, WarningOutlined } from '@ant-design/icons';
+import { api } from '@/lib/api';
+import type { AppOutletContext } from '@/components/AppLayout';
+import type { ServicesStatus, ServicesConfig } from '@/types/api';
+import { buildServiceUrl } from '@/lib/service-url';
+
+export default function VaultwardenPage() {
+ const { setPageHeader } = useOutletContext();
+ const screens = Grid.useBreakpoint();
+ const isMobile = !screens.md;
+
+ const [online, setOnline] = useState(null);
+ const [config, setConfig] = useState(null);
+ const [loading, setLoading] = useState(true);
+
+ const fetchStatus = useCallback(async () => {
+ try {
+ const [statusRes, configRes] = await Promise.all([
+ api.get('/services/status'),
+ api.get('/services/config'),
+ ]);
+ setOnline(statusRes.data.vaultwarden.online);
+ setConfig(configRes.data);
+ } catch {
+ setOnline(false);
+ } finally {
+ setLoading(false);
+ }
+ }, []);
+
+ useEffect(() => {
+ fetchStatus();
+ }, [fetchStatus]);
+
+ const serviceUrl = config
+ ? buildServiceUrl(config.vaultwardenSubdomain, config.domain, config.vaultwardenPort)
+ : null;
+
+ // Detect if we're serving via HTTP (localhost/embed port) instead of HTTPS (tunnel)
+ const isLocalAccess = !window.location.hostname.includes('.');
+ const isHttpAccess = window.location.protocol === 'http:';
+ const httpsUrl = config ? `https://${config.vaultwardenSubdomain}.${config.domain}` : null;
+
+ const handleRefresh = useCallback(() => {
+ fetchStatus();
+ }, [fetchStatus]);
+
+ const headerActions = useMemo(() => (
+
+
+ }
+ onClick={handleRefresh}
+ size="small"
+ >
+ Refresh
+
+ {serviceUrl && (
+ }
+ href={serviceUrl}
+ target="_blank"
+ size="small"
+ >
+ Open in New Tab
+
+ )}
+
+ ), [online, handleRefresh, serviceUrl]);
+
+ useEffect(() => {
+ setPageHeader({
+ title: 'Password Manager',
+ actions: headerActions,
+ fullBleed: true
+ });
+ return () => setPageHeader(null);
+ }, [setPageHeader, headerActions]);
+
+ if (isMobile) {
+ return (
+ }
+ />
+ );
+ }
+
+ if (loading) {
+ return (
+
+
+
+ );
+ }
+
+ if (!online || !serviceUrl) {
+ return (
+
+ Retry
+
+ }
+ />
+ );
+ }
+
+ const showHttpWarning = isLocalAccess || isHttpAccess;
+
+ return (
+
+ {showHttpWarning && (
+
}
+ showIcon
+ banner
+ message={
+
+ HTTPS required for account creation.
+ {' '}You are accessing Vaultwarden over HTTP (localhost). Browsing an existing vault works,
+ but creating accounts or accepting invites requires HTTPS.
+ {httpsUrl ? (
+ <>
+ {' '}Use your tunnel URL instead:{' '}
+ {httpsUrl}
+ >
+ ) : (
+ <> Configure a Pangolin tunnel resource for the vault subdomain to enable HTTPS access.>
+ )}
+
+ }
+ closable
+ />
+ )}
+
+
+ );
+}
diff --git a/admin/src/pages/volunteer/VolunteerChatPage.tsx b/admin/src/pages/volunteer/VolunteerChatPage.tsx
new file mode 100644
index 00000000..2157b8e5
--- /dev/null
+++ b/admin/src/pages/volunteer/VolunteerChatPage.tsx
@@ -0,0 +1,125 @@
+import { useState, useEffect, useCallback, useRef } from 'react';
+import { Button, Spin, Result, Grid } from 'antd';
+import { MessageOutlined } from '@ant-design/icons';
+import { api } from '@/lib/api';
+import { buildServiceUrl } from '@/lib/service-url';
+
+interface RCConfig {
+ enabled: boolean;
+ embedPort: number;
+ subdomain: string;
+ domain: string;
+}
+
+interface RCAuthResponse {
+ authToken: string;
+ rcUserId: string;
+}
+
+export default function VolunteerChatPage() {
+ const screens = Grid.useBreakpoint();
+ const isMobile = !screens.md;
+
+ const [online, setOnline] = useState(null);
+ const [rcConfig, setRcConfig] = useState(null);
+ const [authToken, setAuthToken] = useState(null);
+ const [loading, setLoading] = useState(true);
+ const [error, setError] = useState(null);
+ const iframeRef = useRef(null);
+
+ const fetchAndAuth = useCallback(async () => {
+ setLoading(true);
+ setError(null);
+ try {
+ const [statusRes, configRes] = await Promise.all([
+ api.get<{ online: boolean; enabled: boolean }>('/rocketchat/status'),
+ api.get('/rocketchat/config'),
+ ]);
+ setOnline(statusRes.data.online);
+ setRcConfig(configRes.data);
+
+ if (statusRes.data.online && statusRes.data.enabled) {
+ const authRes = await api.post('/rocketchat/auth');
+ setAuthToken(authRes.data.authToken);
+ }
+ } catch {
+ setOnline(false);
+ setError('Could not connect to chat service');
+ } finally {
+ setLoading(false);
+ }
+ }, []);
+
+ useEffect(() => {
+ fetchAndAuth();
+ }, [fetchAndAuth]);
+
+ const handleIframeLoad = useCallback(() => {
+ if (authToken && iframeRef.current?.contentWindow) {
+ iframeRef.current.contentWindow.postMessage(
+ { externalCommand: 'login-with-token', token: authToken },
+ '*',
+ );
+ }
+ }, [authToken]);
+
+ if (isMobile) {
+ return (
+
+ }
+ title="Desktop Recommended"
+ subTitle="Chat works best on a larger screen."
+ />
+
+ );
+ }
+
+ if (loading) {
+ return (
+
+
+
+ );
+ }
+
+ if (!rcConfig?.enabled) {
+ return (
+
+
+
+ );
+ }
+
+ if (!online || error) {
+ return (
+
+ Retry}
+ />
+
+ );
+ }
+
+ const serviceUrl = buildServiceUrl(rcConfig.subdomain, rcConfig.domain, rcConfig.embedPort);
+ const iframeSrc = `${serviceUrl}/channel/general?layout=embedded`;
+
+ return (
+
+ );
+}
diff --git a/admin/src/types/api.ts b/admin/src/types/api.ts
index f43648ba..b24c3d2f 100644
--- a/admin/src/types/api.ts
+++ b/admin/src/types/api.ts
@@ -1005,6 +1005,9 @@ export interface ServicesStatus {
miniqr: { online: boolean; url: string };
excalidraw: { online: boolean; url: string };
homepage: { online: boolean; url: string };
+ vaultwarden: { online: boolean; url: string };
+ rocketchat: { online: boolean; url: string };
+ gancio: { online: boolean; url: string };
}
export interface ServicesConfig {
@@ -1039,6 +1042,15 @@ export interface ServicesConfig {
// Homepage (service dashboard)
homepagePort: number;
homepageSubdomain: string;
+ // Vaultwarden (password manager)
+ vaultwardenPort: number;
+ vaultwardenSubdomain: string;
+ // Rocket.Chat (team chat)
+ rocketchatPort: number;
+ rocketchatSubdomain: string;
+ // Gancio (event management)
+ gancioPort: number;
+ gancioSubdomain: string;
}
// --- Site Settings ---
@@ -1088,6 +1100,16 @@ export interface SiteSettings {
enableMediaFeatures: boolean;
enablePayments: boolean;
enableGalleryAds: boolean;
+ enableChat: boolean;
+ enableEvents: boolean;
+ // Notification settings
+ notifyAdminShiftSignup: boolean;
+ notifyAdminResponseSubmitted: boolean;
+ notifyAdminSignRequested: boolean;
+ notifyAdminShiftCancellation: boolean;
+ notifyVolunteerSessionSummary: boolean;
+ notifyVolunteerCancellation: boolean;
+ notifyVolunteerShiftReminder: boolean;
createdAt: string;
updatedAt: string;
}
@@ -1693,6 +1715,96 @@ export interface ApiMetrics {
statusBreakdown: { status: string; count: number }[];
}
+// --- Canvass Export ---
+
+export interface ExportContactsPreviewResult {
+ totalContacts: number;
+ contactsWithEmail: number;
+ byCut: { cutId: string; cutName: string; contacts: number; withEmail: number }[];
+ byOutcome: Record;
+ bySupportLevel: Record;
+}
+
+export interface ExportContactsResult {
+ created: number;
+ skippedDuplicate: number;
+ skippedNoEmail: number;
+ campaignId: string;
+ campaignTitle: string;
+}
+
+export interface CutCampaignAnalytics {
+ cutId: string;
+ cutName: string;
+ totalAddresses: number;
+ visitedAddresses: number;
+ completionPct: number;
+ addressesWithEmail: number;
+ supportBreakdown: Record;
+}
+
+// --- Dashboard Activity Feed ---
+
+export interface ActivityItem {
+ id: string;
+ type: 'shift_signup' | 'response_submitted' | 'canvass_completed' | 'email_sent' | 'user_created';
+ module: 'map' | 'influence' | 'users';
+ title: string;
+ description: string;
+ timestamp: string;
+}
+
+export interface ActivityFeedResult {
+ items: ActivityItem[];
+ total: number;
+ page: number;
+ limit: number;
+}
+
+// --- Dashboard Connectivity ---
+
+export interface ConnectivityStatus {
+ smtp: boolean;
+ listmonk: boolean;
+ rocketchat: boolean;
+ gancio: boolean;
+}
+
+// --- Dashboard Today Events (Gancio) ---
+
+export interface TodayEvent {
+ id: number;
+ title: string;
+ description: string;
+ placeName: string;
+ startTime: string;
+ endTime: string | null;
+ tags: string[];
+}
+
+export interface TodayEventsResult {
+ enabled: boolean;
+ events: TodayEvent[];
+ total: number;
+}
+
+// --- Dashboard Chat Summary (Rocket.Chat) ---
+
+export interface ChatMessage {
+ id: string;
+ channel: string;
+ username: string;
+ text: string;
+ timestamp: string;
+ isBot: boolean;
+}
+
+export interface ChatSummaryResult {
+ enabled: boolean;
+ messages: ChatMessage[];
+ unreadChannels: number;
+}
+
// --- Dashboard Time-Series ---
export interface TimeSeriesPoint {
diff --git a/api/prisma/init-gancio-db.sh b/api/prisma/init-gancio-db.sh
new file mode 100755
index 00000000..cae44ae3
--- /dev/null
+++ b/api/prisma/init-gancio-db.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+###############################################################################
+# Gancio Database Initialization Script
+###############################################################################
+# Creates a separate PostgreSQL database for Gancio event management.
+#
+# Database: gancio
+# Purpose: Stores Gancio events, users, and configuration
+# Runs: Automatically on first PostgreSQL container startup via docker-entrypoint-initdb.d
+###############################################################################
+set -e
+
+psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
+ -- Create Gancio database if it doesn't exist
+ SELECT 'CREATE DATABASE gancio'
+ WHERE NOT EXISTS (SELECT FROM pg_database WHERE datname = 'gancio')\gexec
+
+ -- Grant all privileges to the main user
+ GRANT ALL PRIVILEGES ON DATABASE gancio TO ${POSTGRES_USER};
+EOSQL
+
+echo "Gancio database 'gancio' created successfully"
diff --git a/api/prisma/migrations/20260218000000_add_shift_cancellation_notification/migration.sql b/api/prisma/migrations/20260218000000_add_shift_cancellation_notification/migration.sql
new file mode 100644
index 00000000..58e852a4
--- /dev/null
+++ b/api/prisma/migrations/20260218000000_add_shift_cancellation_notification/migration.sql
@@ -0,0 +1,2 @@
+-- AlterTable
+ALTER TABLE "site_settings" ADD COLUMN IF NOT EXISTS "notifyAdminShiftCancellation" BOOLEAN NOT NULL DEFAULT true;
diff --git a/api/prisma/migrations/20260218100000_add_gancio_event_id/migration.sql b/api/prisma/migrations/20260218100000_add_gancio_event_id/migration.sql
new file mode 100644
index 00000000..6a8d7f2d
--- /dev/null
+++ b/api/prisma/migrations/20260218100000_add_gancio_event_id/migration.sql
@@ -0,0 +1,2 @@
+-- AlterTable
+ALTER TABLE "shifts" ADD COLUMN "gancioEventId" INTEGER;
diff --git a/api/prisma/migrations/20260218200000_add_enable_events_setting/migration.sql b/api/prisma/migrations/20260218200000_add_enable_events_setting/migration.sql
new file mode 100644
index 00000000..9fcd349a
--- /dev/null
+++ b/api/prisma/migrations/20260218200000_add_enable_events_setting/migration.sql
@@ -0,0 +1,2 @@
+-- AlterTable
+ALTER TABLE "site_settings" ADD COLUMN IF NOT EXISTS "enable_events" BOOLEAN NOT NULL DEFAULT false;
diff --git a/api/prisma/schema.prisma b/api/prisma/schema.prisma
index c54b1619..c0e841ea 100644
--- a/api/prisma/schema.prisma
+++ b/api/prisma/schema.prisma
@@ -647,6 +647,9 @@ model Shift {
series ShiftSeries? @relation(fields: [seriesId], references: [id], onDelete: SetNull)
isException Boolean @default(false)
+ // Gancio event sync
+ gancioEventId Int?
+
createdBy String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
@@ -844,6 +847,17 @@ model SiteSettings {
enableMediaFeatures Boolean @default(true) @map("enable_media_features")
enablePayments Boolean @default(false)
enableGalleryAds Boolean @default(false) @map("enable_gallery_ads")
+ enableChat Boolean @default(false) @map("enable_chat")
+ enableEvents Boolean @default(false) @map("enable_events")
+
+ // Notification settings
+ notifyAdminShiftSignup Boolean @default(true)
+ notifyAdminResponseSubmitted Boolean @default(true)
+ notifyAdminSignRequested Boolean @default(true)
+ notifyAdminShiftCancellation Boolean @default(true)
+ notifyVolunteerSessionSummary Boolean @default(true)
+ notifyVolunteerCancellation Boolean @default(true)
+ notifyVolunteerShiftReminder Boolean @default(true)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
diff --git a/api/prisma/seed.ts b/api/prisma/seed.ts
index 33d5afdd..527f8dd0 100644
--- a/api/prisma/seed.ts
+++ b/api/prisma/seed.ts
@@ -311,6 +311,25 @@ async function main() {
buttonText: 'Buy Now',
},
},
+ {
+ id: 'default-gancio-events',
+ type: 'gancio-events',
+ label: 'Event Calendar',
+ category: 'Content',
+ sortOrder: 12,
+ schema: {
+ maxlength: { type: 'number', label: 'Max Events', default: 10 },
+ theme: { type: 'select', label: 'Theme', options: ['dark', 'light'], default: 'dark' },
+ tags: { type: 'string', label: 'Filter by Tags (comma-separated)' },
+ title: { type: 'string', label: 'Section Title', default: 'Upcoming Events' },
+ },
+ defaults: {
+ maxlength: 10,
+ theme: 'dark',
+ tags: '',
+ title: 'Upcoming Events',
+ },
+ },
];
for (const block of defaultBlocks) {
@@ -531,6 +550,89 @@ async function seedEmailTemplates(admin: { id: string; email: string }) {
{ key: 'ORGANIZATION_NAME', label: 'Organization Name', description: 'Name of the organization', isRequired: true, isConditional: false, sampleValue: 'Changemaker Lite', sortOrder: 7 },
],
},
+ {
+ key: 'admin-shift-signup-alert',
+ name: 'Admin: New Shift Signup Alert',
+ description: 'Notification sent to admins when a volunteer signs up for a shift',
+ category: EmailTemplateCategory.MAP,
+ subjectLine: 'New shift signup — {{SHIFT_TITLE}}',
+ isSystem: true,
+ variables: [
+ { key: 'ORGANIZATION_NAME', label: 'Organization Name', description: 'Name of the organization', isRequired: true, isConditional: false, sampleValue: 'Changemaker Lite', sortOrder: 0 },
+ { key: 'SHIFT_TITLE', label: 'Shift Title', description: 'Title of the shift', isRequired: true, isConditional: false, sampleValue: 'Weekend Canvassing - Downtown', sortOrder: 1 },
+ { key: 'SHIFT_DATE', label: 'Shift Date', description: 'Date of the shift', isRequired: true, isConditional: false, sampleValue: 'Saturday, February 22, 2026', sortOrder: 2 },
+ { key: 'VOLUNTEER_NAME', label: 'Volunteer Name', description: 'Name of the volunteer who signed up', isRequired: true, isConditional: false, sampleValue: 'Jane Doe', sortOrder: 3 },
+ { key: 'VOLUNTEER_EMAIL', label: 'Volunteer Email', description: 'Email of the volunteer', isRequired: true, isConditional: false, sampleValue: 'jane@example.com', sortOrder: 4 },
+ { key: 'SIGNUP_SOURCE', label: 'Signup Source', description: 'How the volunteer signed up (Public Form, Authenticated Volunteer)', isRequired: true, isConditional: false, sampleValue: 'Public Form', sortOrder: 5 },
+ { key: 'ADMIN_URL', label: 'Admin URL', description: 'URL to the admin shifts page', isRequired: true, isConditional: false, sampleValue: 'https://app.cmlite.org/app/map/shifts', sortOrder: 6 },
+ ],
+ },
+ {
+ key: 'admin-response-submitted-alert',
+ name: 'Admin: Response Submitted Alert',
+ description: 'Notification sent to admins when a new response is submitted to the response wall',
+ category: EmailTemplateCategory.INFLUENCE,
+ subjectLine: 'New response submitted — {{CAMPAIGN_TITLE}}',
+ isSystem: true,
+ variables: [
+ { key: 'ORGANIZATION_NAME', label: 'Organization Name', description: 'Name of the organization', isRequired: true, isConditional: false, sampleValue: 'Changemaker Lite', sortOrder: 0 },
+ { key: 'CAMPAIGN_TITLE', label: 'Campaign Title', description: 'Title of the campaign', isRequired: true, isConditional: false, sampleValue: 'Support Climate Action Bill C-12', sortOrder: 1 },
+ { key: 'REPRESENTATIVE_NAME', label: 'Representative Name', description: 'Name of the representative the response is about', isRequired: true, isConditional: false, sampleValue: 'Hon. John Smith', sortOrder: 2 },
+ { key: 'RESPONSE_TYPE', label: 'Response Type', description: 'Type of response (Support, Oppose, etc.)', isRequired: true, isConditional: false, sampleValue: 'SUPPORT', sortOrder: 3 },
+ { key: 'SUBMITTER_NAME', label: 'Submitter Name', description: 'Name of the person who submitted', isRequired: true, isConditional: false, sampleValue: 'Jane Doe', sortOrder: 4 },
+ { key: 'ADMIN_URL', label: 'Admin URL', description: 'URL to the admin responses page', isRequired: true, isConditional: false, sampleValue: 'https://app.cmlite.org/app/influence/responses', sortOrder: 5 },
+ ],
+ },
+ {
+ key: 'admin-sign-requested-alert',
+ name: 'Admin: Sign Requested Alert',
+ description: 'Notification sent to admins when a resident requests a yard sign during canvassing',
+ category: EmailTemplateCategory.MAP,
+ subjectLine: 'Sign requested — {{ADDRESS}}',
+ isSystem: true,
+ variables: [
+ { key: 'ORGANIZATION_NAME', label: 'Organization Name', description: 'Name of the organization', isRequired: true, isConditional: false, sampleValue: 'Changemaker Lite', sortOrder: 0 },
+ { key: 'VOLUNTEER_NAME', label: 'Canvasser Name', description: 'Name of the volunteer who recorded the sign request', isRequired: true, isConditional: false, sampleValue: 'John Smith', sortOrder: 1 },
+ { key: 'ADDRESS', label: 'Address', description: 'Street address where sign was requested', isRequired: true, isConditional: false, sampleValue: '123 Main Street', sortOrder: 2 },
+ { key: 'SHIFT_TITLE', label: 'Shift Title', description: 'Title of the canvassing shift', isRequired: true, isConditional: false, sampleValue: 'Weekend Canvassing - Downtown', sortOrder: 3 },
+ { key: 'SIGN_SIZE', label: 'Sign Size', description: 'Requested sign size', isRequired: false, isConditional: true, sampleValue: 'Large', sortOrder: 4 },
+ { key: 'ADMIN_URL', label: 'Admin URL', description: 'URL to the admin canvass dashboard', isRequired: true, isConditional: false, sampleValue: 'https://app.cmlite.org/app/canvass/dashboard', sortOrder: 5 },
+ ],
+ },
+ {
+ key: 'volunteer-session-summary',
+ name: 'Volunteer: Canvass Session Summary',
+ description: 'Summary email sent to a volunteer after completing a canvassing session',
+ category: EmailTemplateCategory.MAP,
+ subjectLine: 'Canvass session summary — {{CUT_NAME}}',
+ isSystem: true,
+ variables: [
+ { key: 'ORGANIZATION_NAME', label: 'Organization Name', description: 'Name of the organization', isRequired: true, isConditional: false, sampleValue: 'Changemaker Lite', sortOrder: 0 },
+ { key: 'VOLUNTEER_NAME', label: 'Volunteer Name', description: 'Name of the volunteer', isRequired: true, isConditional: false, sampleValue: 'Jane Doe', sortOrder: 1 },
+ { key: 'CUT_NAME', label: 'Cut/Area Name', description: 'Name of the canvassing area', isRequired: true, isConditional: false, sampleValue: 'Downtown Core', sortOrder: 2 },
+ { key: 'SESSION_DATE', label: 'Session Date', description: 'Date of the session', isRequired: true, isConditional: false, sampleValue: 'Saturday, February 22, 2026', sortOrder: 3 },
+ { key: 'VISIT_COUNT', label: 'Visit Count', description: 'Number of doors visited', isRequired: true, isConditional: false, sampleValue: '42', sortOrder: 4 },
+ { key: 'DURATION_MINUTES', label: 'Duration (minutes)', description: 'Session duration in minutes', isRequired: true, isConditional: false, sampleValue: '95', sortOrder: 5 },
+ { key: 'DISTANCE_KM', label: 'Distance (km)', description: 'Distance walked in kilometers', isRequired: true, isConditional: false, sampleValue: '2.3', sortOrder: 6 },
+ { key: 'OUTCOME_BREAKDOWN', label: 'Outcome Breakdown', description: 'HTML table (email) or text list (plain text) of visit outcomes', isRequired: false, isConditional: true, sampleValue: 'Spoke With: 20, Not Home: 15, Refused: 7', sortOrder: 7 },
+ ],
+ },
+ {
+ key: 'volunteer-cancellation-ack',
+ name: 'Volunteer: Signup Cancellation Acknowledgement',
+ description: 'Confirmation email sent to a volunteer when their shift signup is cancelled',
+ category: EmailTemplateCategory.MAP,
+ subjectLine: 'Signup cancelled — {{SHIFT_TITLE}}',
+ isSystem: true,
+ variables: [
+ { key: 'ORGANIZATION_NAME', label: 'Organization Name', description: 'Name of the organization', isRequired: true, isConditional: false, sampleValue: 'Changemaker Lite', sortOrder: 0 },
+ { key: 'VOLUNTEER_NAME', label: 'Volunteer Name', description: 'Name of the volunteer', isRequired: true, isConditional: false, sampleValue: 'Jane Doe', sortOrder: 1 },
+ { key: 'SHIFT_TITLE', label: 'Shift Title', description: 'Title of the cancelled shift signup', isRequired: true, isConditional: false, sampleValue: 'Weekend Canvassing - Downtown', sortOrder: 2 },
+ { key: 'SHIFT_DATE', label: 'Shift Date', description: 'Date of the shift', isRequired: true, isConditional: false, sampleValue: 'Saturday, February 22, 2026', sortOrder: 3 },
+ { key: 'SHIFT_TIME', label: 'Shift Time', description: 'Time range of the shift', isRequired: true, isConditional: false, sampleValue: '10:00 AM — 2:00 PM', sortOrder: 4 },
+ { key: 'SIGNUP_URL', label: 'Signup URL', description: 'URL to browse available shifts', isRequired: true, isConditional: false, sampleValue: 'https://app.cmlite.org/shifts', sortOrder: 5 },
+ ],
+ },
];
let seededCount = 0;
diff --git a/api/src/config/env.ts b/api/src/config/env.ts
index 2f331f19..0a0a3937 100644
--- a/api/src/config/env.ts
+++ b/api/src/config/env.ts
@@ -11,6 +11,11 @@ const envSchema = z.object({
ADMIN_URL: z.string().default('http://localhost:3000'),
DOMAIN: z.string().default('cmlite.org'),
+ // Bunker Ops (Fleet Management)
+ INSTANCE_LABEL: z.string().default(''),
+ BUNKER_OPS_ENABLED: z.string().default('false'),
+ BUNKER_OPS_REMOTE_WRITE_URL: z.string().default(''),
+
// Database
DATABASE_URL: z.string(),
@@ -45,6 +50,7 @@ const envSchema = z.object({
LISTMONK_ADMIN_USER: z.string().default('admin'),
LISTMONK_ADMIN_PASSWORD: z.string().default(''),
LISTMONK_SYNC_ENABLED: z.string().default('false'),
+ LISTMONK_WEBHOOK_SECRET: z.string().default(''),
LISTMONK_PROXY_PORT: z.coerce.number().default(9002),
// Represent API (Canadian electoral data)
@@ -101,6 +107,25 @@ const envSchema = z.object({
HOMEPAGE_URL: z.string().default('http://homepage-changemaker:3000'),
HOMEPAGE_EMBED_PORT: z.coerce.number().default(8887),
+ // Vaultwarden (password manager)
+ VAULTWARDEN_URL: z.string().default('http://vaultwarden-changemaker:80'),
+ VAULTWARDEN_EMBED_PORT: z.coerce.number().default(8890),
+
+ // Rocket.Chat (team chat)
+ ROCKETCHAT_URL: z.string().default('http://rocketchat-changemaker:3000'),
+ ROCKETCHAT_ADMIN_USER: z.string().default(''),
+ ROCKETCHAT_ADMIN_PASSWORD: z.string().default(''),
+ ROCKETCHAT_EMBED_PORT: z.coerce.number().default(8891),
+ ENABLE_CHAT: z.string().default('false'),
+
+ // Gancio (event management)
+ GANCIO_URL: z.string().default('http://gancio-changemaker:13120'),
+ GANCIO_PORT: z.coerce.number().default(8092),
+ GANCIO_EMBED_PORT: z.coerce.number().default(8892),
+ GANCIO_ADMIN_USER: z.string().default('admin'),
+ GANCIO_ADMIN_PASSWORD: z.string().default(''),
+ GANCIO_SYNC_ENABLED: z.string().default('false'),
+
// Pangolin (tunnel / reverse proxy)
PANGOLIN_API_URL: z.string()
.default('')
diff --git a/api/src/modules/dashboard/dashboard.routes.ts b/api/src/modules/dashboard/dashboard.routes.ts
index c4bc1132..07ab0d54 100644
--- a/api/src/modules/dashboard/dashboard.routes.ts
+++ b/api/src/modules/dashboard/dashboard.routes.ts
@@ -9,6 +9,10 @@ import {
getApiMetrics,
getTimeSeries,
getContainerResources,
+ getActivityFeed,
+ getConnectivity,
+ getTodayEvents,
+ getChatSummary,
} from './dashboard.service';
const router = Router();
@@ -121,4 +125,47 @@ router.get('/container-resources', requireRole('SUPER_ADMIN'), async (_req: Requ
}
});
+// GET /api/dashboard/activity — recent activity feed (paginated)
+router.get('/activity', async (req: Request, res: Response, next: NextFunction) => {
+ try {
+ const page = Math.max(1, parseInt(req.query.page as string) || 1);
+ const limit = Math.min(50, Math.max(1, parseInt(req.query.limit as string) || 20));
+ const module = (req.query.module as string) || 'all';
+ const result = await getActivityFeed({ page, limit, module });
+ res.json(result);
+ } catch (err) {
+ next(err);
+ }
+});
+
+// GET /api/dashboard/connectivity — service connectivity checks
+router.get('/connectivity', async (_req: Request, res: Response, next: NextFunction) => {
+ try {
+ const connectivity = await getConnectivity();
+ res.json(connectivity);
+ } catch (err) {
+ next(err);
+ }
+});
+
+// GET /api/dashboard/today-events — today's events from Gancio
+router.get('/today-events', async (_req: Request, res: Response, next: NextFunction) => {
+ try {
+ const events = await getTodayEvents();
+ res.json(events);
+ } catch (err) {
+ next(err);
+ }
+});
+
+// GET /api/dashboard/chat-summary — recent messages from Rocket.Chat
+router.get('/chat-summary', async (_req: Request, res: Response, next: NextFunction) => {
+ try {
+ const summary = await getChatSummary();
+ res.json(summary);
+ } catch (err) {
+ next(err);
+ }
+});
+
export const dashboardRouter = router;
diff --git a/api/src/modules/dashboard/dashboard.service.ts b/api/src/modules/dashboard/dashboard.service.ts
index df1fd899..f4a44bd9 100644
--- a/api/src/modules/dashboard/dashboard.service.ts
+++ b/api/src/modules/dashboard/dashboard.service.ts
@@ -7,6 +7,9 @@ import { env } from '../../config/env';
import { fetchWithTimeout } from '../../utils/fetch-with-timeout';
import { validatePromQLQueries } from '../../utils/promql-validator';
import { isServiceOnline } from '../../utils/health-check';
+import { listmonkClient } from '../../services/listmonk.client';
+import { gancioClient } from '../../services/gancio.client';
+import { rocketchatClient } from '../../services/rocketchat.client';
import { logger } from '../../utils/logger';
// --- Types ---
@@ -264,6 +267,171 @@ export async function getDashboardSummary(): Promise {
};
}
+// --- Activity Feed ---
+
+export interface ActivityItem {
+ id: string;
+ type: 'shift_signup' | 'response_submitted' | 'canvass_completed' | 'email_sent' | 'user_created';
+ module: 'map' | 'influence' | 'users';
+ title: string;
+ description: string;
+ timestamp: string;
+}
+
+export interface ActivityFeedResult {
+ items: ActivityItem[];
+ total: number;
+ page: number;
+ limit: number;
+}
+
+export async function getActivityFeed(opts: {
+ page: number;
+ limit: number;
+ module?: string;
+}): Promise {
+ const { page, limit, module } = opts;
+ const skip = (page - 1) * limit;
+ const since = new Date(Date.now() - 7 * 24 * 60 * 60 * 1000); // last 7 days
+
+ const items: ActivityItem[] = [];
+
+ const queries: Promise[] = [];
+
+ // Shift signups (map module)
+ if (!module || module === 'all' || module === 'map') {
+ queries.push(
+ prisma.shiftSignup.findMany({
+ where: { signupDate: { gte: since }, status: 'CONFIRMED' },
+ orderBy: { signupDate: 'desc' },
+ take: limit * 2,
+ select: { id: true, userName: true, userEmail: true, shiftTitle: true, signupDate: true },
+ }).then(signups => {
+ for (const s of signups) {
+ items.push({
+ id: `signup-${s.id}`,
+ type: 'shift_signup',
+ module: 'map',
+ title: 'Shift Signup',
+ description: `${s.userName || s.userEmail} signed up for ${s.shiftTitle}`,
+ timestamp: s.signupDate.toISOString(),
+ });
+ }
+ }).catch(() => {}),
+ );
+ }
+
+ // Canvass sessions (map module)
+ if (!module || module === 'all' || module === 'map') {
+ queries.push(
+ prisma.canvassSession.findMany({
+ where: { status: 'COMPLETED', endedAt: { gte: since } },
+ orderBy: { endedAt: 'desc' },
+ take: limit * 2,
+ select: { id: true, endedAt: true, userId: true, cut: { select: { name: true } } },
+ }).then(async sessions => {
+ const userIds = sessions.map(s => s.userId);
+ const users = await prisma.user.findMany({
+ where: { id: { in: userIds } },
+ select: { id: true, name: true, email: true },
+ });
+ const userMap = new Map(users.map(u => [u.id, u]));
+ for (const s of sessions) {
+ const user = userMap.get(s.userId);
+ items.push({
+ id: `canvass-${s.id}`,
+ type: 'canvass_completed',
+ module: 'map',
+ title: 'Canvass Completed',
+ description: `${user?.name || user?.email || 'Unknown'} completed a session${s.cut?.name ? ` in ${s.cut.name}` : ''}`,
+ timestamp: (s.endedAt || new Date()).toISOString(),
+ });
+ }
+ }).catch(() => {}),
+ );
+ }
+
+ // Responses (influence module)
+ if (!module || module === 'all' || module === 'influence') {
+ queries.push(
+ prisma.representativeResponse.findMany({
+ where: { createdAt: { gte: since } },
+ orderBy: { createdAt: 'desc' },
+ take: limit * 2,
+ select: { id: true, createdAt: true, submittedByName: true, campaign: { select: { title: true } } },
+ }).then(responses => {
+ for (const r of responses) {
+ items.push({
+ id: `response-${r.id}`,
+ type: 'response_submitted',
+ module: 'influence',
+ title: 'Response Submitted',
+ description: `${r.submittedByName || 'Anonymous'} submitted a response for ${r.campaign.title}`,
+ timestamp: r.createdAt.toISOString(),
+ });
+ }
+ }).catch(() => {}),
+ );
+ }
+
+ // Users (users module)
+ if (!module || module === 'all' || module === 'users') {
+ queries.push(
+ prisma.user.findMany({
+ where: { createdAt: { gte: since } },
+ orderBy: { createdAt: 'desc' },
+ take: limit * 2,
+ select: { id: true, name: true, email: true, role: true, createdAt: true },
+ }).then(users => {
+ for (const u of users) {
+ items.push({
+ id: `user-${u.id}`,
+ type: 'user_created',
+ module: 'users',
+ title: 'New User',
+ description: `${u.name || u.email} (${u.role.replace(/_/g, ' ')})`,
+ timestamp: u.createdAt.toISOString(),
+ });
+ }
+ }).catch(() => {}),
+ );
+ }
+
+ await Promise.all(queries);
+
+ // Sort by timestamp descending, paginate
+ items.sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime());
+ const total = items.length;
+ const paged = items.slice(skip, skip + limit);
+
+ return { items: paged, total, page, limit };
+}
+
+// --- Connectivity Checks ---
+
+export interface ConnectivityStatus {
+ smtp: boolean;
+ listmonk: boolean;
+ rocketchat: boolean;
+ gancio: boolean;
+}
+
+export async function getConnectivity(): Promise {
+ const [smtp, listmonk, rocketchat, gancio] = await Promise.all([
+ isServiceOnline(`${env.SMTP_HOST}`, 3000).catch(() => false),
+ listmonkClient.checkHealth().catch(() => false),
+ isServiceOnline(env.ROCKETCHAT_URL || '', 3000).catch(() => false),
+ gancioClient.isAvailable().catch(() => false),
+ ]);
+
+ return {
+ smtp: !!smtp,
+ listmonk: !!listmonk,
+ rocketchat: !!rocketchat,
+ gancio: !!gancio,
+ };
+}
+
export function getSystemInfo(): SystemInfo {
const totalMem = os.totalmem();
const freeMem = os.freemem();
@@ -619,3 +787,140 @@ export async function getApiMetrics(): Promise {
return null;
}
}
+
+// --- Today's Events from Gancio ---
+
+export interface TodayEvent {
+ id: number;
+ title: string;
+ description: string;
+ placeName: string;
+ startTime: string; // ISO string
+ endTime: string | null;
+ tags: string[];
+}
+
+export interface TodayEventsResult {
+ enabled: boolean;
+ events: TodayEvent[];
+ total: number;
+}
+
+export async function getTodayEvents(): Promise {
+ if (!gancioClient.enabled) {
+ return { enabled: false, events: [], total: 0 };
+ }
+
+ try {
+ // Gancio public API: GET /api/events returns all upcoming events
+ const url = `${env.GANCIO_URL}/api/events`;
+ const res = await fetch(url, { signal: AbortSignal.timeout(5000) });
+
+ if (!res.ok) {
+ logger.debug(`Gancio events API returned ${res.status}`);
+ return { enabled: true, events: [], total: 0 };
+ }
+
+ const rawEvents = await res.json() as Array<{
+ id: number;
+ title: string;
+ description: string;
+ place_name: string;
+ place_address: string;
+ start_datetime: number;
+ end_datetime?: number;
+ tags: string[];
+ }>;
+
+ // Filter to today's events (UTC-based day boundaries matching local server time)
+ const now = new Date();
+ const todayStart = new Date(now.getFullYear(), now.getMonth(), now.getDate());
+ const todayEnd = new Date(todayStart.getTime() + 86400000);
+ const todayStartUnix = Math.floor(todayStart.getTime() / 1000);
+ const todayEndUnix = Math.floor(todayEnd.getTime() / 1000);
+
+ const todayEvents = rawEvents
+ .filter(e => e.start_datetime >= todayStartUnix && e.start_datetime < todayEndUnix)
+ .sort((a, b) => a.start_datetime - b.start_datetime)
+ .slice(0, 20)
+ .map(e => ({
+ id: e.id,
+ title: e.title,
+ description: (e.description || '').slice(0, 200),
+ placeName: e.place_name || '',
+ startTime: new Date(e.start_datetime * 1000).toISOString(),
+ endTime: e.end_datetime ? new Date(e.end_datetime * 1000).toISOString() : null,
+ tags: Array.isArray(e.tags) ? e.tags : [],
+ }));
+
+ return { enabled: true, events: todayEvents, total: todayEvents.length };
+ } catch (err) {
+ logger.debug('Failed to fetch today events from Gancio:', err);
+ return { enabled: true, events: [], total: 0 };
+ }
+}
+
+// --- Chat Summary from Rocket.Chat ---
+
+export interface ChatMessage {
+ id: string;
+ channel: string;
+ username: string;
+ text: string;
+ timestamp: string;
+ isBot: boolean;
+}
+
+export interface ChatSummaryResult {
+ enabled: boolean;
+ messages: ChatMessage[];
+ unreadChannels: number;
+}
+
+export async function getChatSummary(): Promise {
+ const chatEnabled = env.ENABLE_CHAT === 'true';
+ if (!chatEnabled) {
+ return { enabled: false, messages: [], unreadChannels: 0 };
+ }
+
+ try {
+ const online = await rocketchatClient.healthCheck();
+ if (!online) {
+ return { enabled: true, messages: [], unreadChannels: 0 };
+ }
+
+ // Fetch recent messages from our notification channels
+ const channels = ['general', 'shifts', 'canvassing', 'campaigns'];
+ const allMessages: ChatMessage[] = [];
+
+ await Promise.all(
+ channels.map(async (channelName) => {
+ const messages = await rocketchatClient.getChannelHistory(channelName, 5);
+ for (const msg of messages) {
+ if (!msg.msg?.trim()) continue;
+ allMessages.push({
+ id: msg._id,
+ channel: channelName,
+ username: msg.alias || msg.u?.username || 'unknown',
+ text: msg.msg.slice(0, 300),
+ timestamp: msg.ts,
+ isBot: !!(msg.bot || msg.alias),
+ });
+ }
+ }),
+ );
+
+ // Sort by timestamp descending, take most recent 15
+ allMessages.sort((a, b) => new Date(b.timestamp).getTime() - new Date(a.timestamp).getTime());
+ const messages = allMessages.slice(0, 15);
+
+ return {
+ enabled: true,
+ messages,
+ unreadChannels: 0,
+ };
+ } catch (err) {
+ logger.debug('Failed to fetch chat summary:', err);
+ return { enabled: true, messages: [], unreadChannels: 0 };
+ }
+}
diff --git a/api/src/modules/influence/responses/responses.service.ts b/api/src/modules/influence/responses/responses.service.ts
index 99b309eb..9fcfbff0 100644
--- a/api/src/modules/influence/responses/responses.service.ts
+++ b/api/src/modules/influence/responses/responses.service.ts
@@ -1,10 +1,14 @@
import { randomBytes } from 'crypto';
-import { CampaignStatus, Prisma, ResponseStatus } from '@prisma/client';
+import { CampaignStatus, Prisma, ResponseStatus, UserRole } from '@prisma/client';
import { prisma } from '../../../config/database';
import { AppError } from '../../../middleware/error-handler';
import { emailService } from '../../../services/email.service';
+import { notificationQueueService } from '../../../services/notification-queue.service';
+import { getAdminEmailsByRole, isNotificationEnabled } from '../../../services/notification.helper';
import { env } from '../../../config/env';
+import { logger } from '../../../utils/logger';
import { recordResponseSubmission } from '../../../utils/metrics';
+import { rocketchatWebhookService } from '../../../services/rocketchat-webhook.service';
import type {
SubmitResponseInput,
ListPublicResponsesInput,
@@ -77,6 +81,33 @@ export const responsesService = {
recordResponseSubmission();
+ // Notification: admin response submitted alert
+ try {
+ if (await isNotificationEnabled('notifyAdminResponseSubmitted')) {
+ const adminEmails = await getAdminEmailsByRole([UserRole.SUPER_ADMIN, UserRole.INFLUENCE_ADMIN]);
+ if (adminEmails.length > 0) {
+ const adminUrl = `${env.ADMIN_URL || 'http://localhost:3000'}/app/influence/responses`;
+ await notificationQueueService.enqueue({
+ type: 'admin-response-submitted',
+ adminEmails,
+ campaignTitle: campaign.title,
+ representativeName: data.representativeName,
+ responseType: data.responseType,
+ submitterName: data.isAnonymous ? 'Anonymous' : (data.submittedByName || 'Anonymous'),
+ adminUrl,
+ });
+ }
+ }
+ } catch (err) {
+ logger.error('Failed to enqueue response submitted notification:', err);
+ }
+
+ // Notify Rocket.Chat
+ rocketchatWebhookService.onCampaignResponseSubmitted({
+ campaignTitle: campaign.title,
+ representativeName: data.representativeName,
+ }).catch(() => {});
+
return {
id: response.id,
status: response.status,
diff --git a/api/src/modules/listmonk/listmonk-webhook.routes.ts b/api/src/modules/listmonk/listmonk-webhook.routes.ts
new file mode 100644
index 00000000..f5939064
--- /dev/null
+++ b/api/src/modules/listmonk/listmonk-webhook.routes.ts
@@ -0,0 +1,69 @@
+import { Router, Request, Response, NextFunction } from 'express';
+import { prisma } from '../../config/database';
+import { env } from '../../config/env';
+import { logger } from '../../utils/logger';
+
+const router = Router();
+
+/**
+ * POST /api/listmonk/webhook?secret=...
+ *
+ * Handles Listmonk webhook events for reverse sync (e.g., unsubscribes).
+ * Validates a shared secret query parameter. No JWT auth — Listmonk calls this.
+ */
+router.post(
+ '/webhook',
+ async (req: Request, res: Response, next: NextFunction) => {
+ try {
+ const secret = req.query.secret as string;
+ if (!env.LISTMONK_WEBHOOK_SECRET || secret !== env.LISTMONK_WEBHOOK_SECRET) {
+ res.status(403).json({ error: 'Invalid webhook secret' });
+ return;
+ }
+
+ const event = req.body;
+ const eventType = event?.event;
+
+ if (eventType === 'subscriber.unsubscribed' || eventType === 'subscriber.disabled') {
+ const email = event?.data?.subscriber?.email;
+ if (!email) {
+ res.json({ ok: true, action: 'skipped', reason: 'no email' });
+ return;
+ }
+
+ // Store opt-out flag in user's permissions JSON field
+ const user = await prisma.user.findUnique({
+ where: { email },
+ select: { id: true, permissions: true },
+ });
+
+ if (user) {
+ const permissions = (user.permissions as Record) || {};
+ permissions.listmonkOptOut = true;
+ permissions.listmonkOptOutAt = new Date().toISOString();
+
+ await prisma.user.update({
+ where: { id: user.id },
+ data: { permissions: permissions as any },
+ });
+
+ logger.info(`Listmonk webhook: marked user ${email} as opted-out`);
+ res.json({ ok: true, action: 'opted_out', email });
+ return;
+ }
+
+ logger.debug(`Listmonk webhook: no user found for ${email}`);
+ res.json({ ok: true, action: 'skipped', reason: 'user not found' });
+ return;
+ }
+
+ // Unknown event type — acknowledge but don't process
+ logger.debug(`Listmonk webhook: unhandled event type "${eventType}"`);
+ res.json({ ok: true, action: 'ignored', eventType });
+ } catch (err) {
+ next(err);
+ }
+ },
+);
+
+export { router as listmonkWebhookRouter };
diff --git a/api/src/modules/listmonk/listmonk.routes.ts b/api/src/modules/listmonk/listmonk.routes.ts
index 19841788..7c7f9cce 100644
--- a/api/src/modules/listmonk/listmonk.routes.ts
+++ b/api/src/modules/listmonk/listmonk.routes.ts
@@ -4,6 +4,7 @@ import { authenticate } from '../../middleware/auth.middleware';
import { requireRole } from '../../middleware/rbac.middleware';
import { listmonkClient } from '../../services/listmonk.client';
import { listmonkSyncService } from '../../services/listmonk-sync.service';
+import { listmonkEventSyncService } from '../../services/listmonk-event-sync.service';
import { env } from '../../config/env';
const router = Router();
@@ -141,6 +142,14 @@ router.post(
},
);
+// GET /api/listmonk/event-sync-stats — event-driven sync stats
+router.get(
+ '/event-sync-stats',
+ (_req: Request, res: Response) => {
+ res.json(listmonkEventSyncService.getStats());
+ },
+);
+
// GET /api/listmonk/proxy-url — get proxy port + token for iframe embedding
router.get(
'/proxy-url',
diff --git a/api/src/modules/map/canvass/canvass-export.routes.ts b/api/src/modules/map/canvass/canvass-export.routes.ts
new file mode 100644
index 00000000..b1bf80c8
--- /dev/null
+++ b/api/src/modules/map/canvass/canvass-export.routes.ts
@@ -0,0 +1,57 @@
+import { Router, Request, Response, NextFunction } from 'express';
+import { authenticate } from '../../../middleware/auth.middleware';
+import { requireRole } from '../../../middleware/rbac.middleware';
+import { validate } from '../../../middleware/validate';
+import { exportContactsPreviewSchema, exportContactsSchema } from './canvass-export.schemas';
+import {
+ previewExportContacts,
+ exportContactsToCampaign,
+ getCutCampaignAnalytics,
+} from './canvass-export.service';
+
+const router = Router();
+router.use(authenticate);
+router.use(requireRole('SUPER_ADMIN', 'MAP_ADMIN', 'INFLUENCE_ADMIN'));
+
+// POST /api/map/canvass/export-contacts/preview — preview matching contacts
+router.post(
+ '/export-contacts/preview',
+ validate(exportContactsPreviewSchema),
+ async (req: Request, res: Response, next: NextFunction) => {
+ try {
+ const result = await previewExportContacts(req.body);
+ res.json(result);
+ } catch (err) {
+ next(err);
+ }
+ },
+);
+
+// POST /api/map/canvass/export-contacts — export contacts to campaign
+router.post(
+ '/export-contacts',
+ validate(exportContactsSchema),
+ async (req: Request, res: Response, next: NextFunction) => {
+ try {
+ const result = await exportContactsToCampaign(req.body);
+ res.json(result);
+ } catch (err) {
+ next(err);
+ }
+ },
+);
+
+// GET /api/map/canvass/analytics/cuts — per-cut campaign analytics
+router.get(
+ '/analytics/cuts',
+ async (_req: Request, res: Response, next: NextFunction) => {
+ try {
+ const analytics = await getCutCampaignAnalytics();
+ res.json({ cuts: analytics });
+ } catch (err) {
+ next(err);
+ }
+ },
+);
+
+export { router as canvassExportRouter };
diff --git a/api/src/modules/map/canvass/canvass-export.schemas.ts b/api/src/modules/map/canvass/canvass-export.schemas.ts
new file mode 100644
index 00000000..33b07e69
--- /dev/null
+++ b/api/src/modules/map/canvass/canvass-export.schemas.ts
@@ -0,0 +1,21 @@
+import { z } from 'zod';
+
+export const exportContactsPreviewSchema = z.object({
+ cutIds: z.array(z.string()).min(1, 'At least one cut required'),
+ outcomes: z.array(z.enum([
+ 'NOT_HOME', 'REFUSED', 'MOVED', 'ALREADY_VOTED',
+ 'SPOKE_WITH', 'LEFT_LITERATURE', 'COME_BACK_LATER',
+ ])).optional(),
+ supportLevelMin: z.number().int().min(1).max(4).optional(),
+ supportLevelMax: z.number().int().min(1).max(4).optional(),
+ hasEmail: z.boolean().optional(),
+ hasSign: z.boolean().optional(),
+ visitedSince: z.string().datetime().optional(),
+});
+
+export const exportContactsSchema = exportContactsPreviewSchema.extend({
+ campaignId: z.string().min(1, 'Campaign ID required'),
+});
+
+export type ExportContactsPreviewInput = z.infer;
+export type ExportContactsInput = z.infer;
diff --git a/api/src/modules/map/canvass/canvass-export.service.ts b/api/src/modules/map/canvass/canvass-export.service.ts
new file mode 100644
index 00000000..a90df97a
--- /dev/null
+++ b/api/src/modules/map/canvass/canvass-export.service.ts
@@ -0,0 +1,392 @@
+import { Prisma, SupportLevel, VisitOutcome } from '@prisma/client';
+import { prisma } from '../../../config/database';
+import { isPointInPolygon, parseGeoJsonPolygon, calculateBounds } from '../../../utils/spatial';
+import { logger } from '../../../utils/logger';
+import type { ExportContactsPreviewInput, ExportContactsInput } from './canvass-export.schemas';
+
+const SUPPORT_LEVEL_ORDER: Record = {
+ LEVEL_1: 1,
+ LEVEL_2: 2,
+ LEVEL_3: 3,
+ LEVEL_4: 4,
+};
+
+const MAX_EXPORT = 10_000;
+
+// --- Types ---
+
+interface ContactCandidate {
+ addressId: string;
+ firstName: string | null;
+ lastName: string | null;
+ email: string | null;
+ supportLevel: SupportLevel | null;
+ sign: boolean;
+ cutName: string;
+ locationAddress: string;
+ unitNumber: string | null;
+ latestOutcome: VisitOutcome | null;
+ latestVisitDate: Date | null;
+}
+
+export interface ExportPreviewResult {
+ totalContacts: number;
+ contactsWithEmail: number;
+ byCut: { cutId: string; cutName: string; contacts: number; withEmail: number }[];
+ byOutcome: Record;
+ bySupportLevel: Record;
+}
+
+export interface ExportResult {
+ created: number;
+ skippedDuplicate: number;
+ skippedNoEmail: number;
+ campaignId: string;
+ campaignTitle: string;
+}
+
+export interface CutCampaignAnalytics {
+ cutId: string;
+ cutName: string;
+ totalAddresses: number;
+ visitedAddresses: number;
+ completionPct: number;
+ addressesWithEmail: number;
+ supportBreakdown: Record;
+}
+
+// --- Helpers ---
+
+async function resolveContactsFromCuts(
+ filters: ExportContactsPreviewInput,
+): Promise {
+ const cuts = await prisma.cut.findMany({
+ where: { id: { in: filters.cutIds } },
+ select: { id: true, name: true, geojson: true },
+ });
+
+ const allContacts: ContactCandidate[] = [];
+
+ for (const cut of cuts) {
+ const polygons = parseGeoJsonPolygon(cut.geojson);
+
+ // Calculate bounds for a fast DB pre-filter
+ const allCoords = polygons.flat();
+ const bounds = calculateBounds(allCoords);
+
+ const locations = await prisma.location.findMany({
+ where: {
+ latitude: { gte: new Prisma.Decimal(bounds.minLat.toString()), lte: new Prisma.Decimal(bounds.maxLat.toString()) },
+ longitude: { gte: new Prisma.Decimal(bounds.minLng.toString()), lte: new Prisma.Decimal(bounds.maxLng.toString()) },
+ },
+ select: {
+ id: true,
+ latitude: true,
+ longitude: true,
+ address: true,
+ addresses: {
+ select: {
+ id: true,
+ unitNumber: true,
+ firstName: true,
+ lastName: true,
+ email: true,
+ supportLevel: true,
+ sign: true,
+ },
+ },
+ },
+ });
+
+ // In-memory polygon filter
+ for (const loc of locations) {
+ const lat = Number(loc.latitude);
+ const lng = Number(loc.longitude);
+ if (!polygons.some(p => isPointInPolygon(lat, lng, p))) continue;
+
+ for (const addr of loc.addresses) {
+ allContacts.push({
+ addressId: addr.id,
+ firstName: addr.firstName,
+ lastName: addr.lastName,
+ email: addr.email,
+ supportLevel: addr.supportLevel,
+ sign: addr.sign,
+ cutName: cut.name,
+ locationAddress: loc.address,
+ unitNumber: addr.unitNumber,
+ latestOutcome: null,
+ latestVisitDate: null,
+ });
+ }
+ }
+ }
+
+ if (allContacts.length === 0) return [];
+
+ // Fetch latest visit for each address to apply outcome/date filters
+ const addressIds = [...new Set(allContacts.map(c => c.addressId))];
+ const latestVisits = await prisma.canvassVisit.findMany({
+ where: { addressId: { in: addressIds } },
+ distinct: ['addressId'],
+ orderBy: { visitedAt: 'desc' },
+ select: { addressId: true, outcome: true, visitedAt: true },
+ });
+ const visitMap = new Map(latestVisits.map(v => [v.addressId, v]));
+
+ // Annotate contacts with latest visit data
+ for (const contact of allContacts) {
+ const visit = visitMap.get(contact.addressId);
+ if (visit) {
+ contact.latestOutcome = visit.outcome;
+ contact.latestVisitDate = visit.visitedAt;
+ }
+ }
+
+ // Apply filters
+ let filtered = allContacts;
+
+ if (filters.outcomes && filters.outcomes.length > 0) {
+ const outcomeSet = new Set(filters.outcomes);
+ filtered = filtered.filter(c => c.latestOutcome && outcomeSet.has(c.latestOutcome));
+ }
+
+ if (filters.supportLevelMin !== undefined || filters.supportLevelMax !== undefined) {
+ const min = filters.supportLevelMin ?? 1;
+ const max = filters.supportLevelMax ?? 4;
+ filtered = filtered.filter(c => {
+ if (!c.supportLevel) return false;
+ const level = SUPPORT_LEVEL_ORDER[c.supportLevel] ?? 0;
+ return level >= min && level <= max;
+ });
+ }
+
+ if (filters.hasEmail === true) {
+ filtered = filtered.filter(c => !!c.email);
+ }
+
+ if (filters.hasSign === true) {
+ filtered = filtered.filter(c => c.sign);
+ }
+
+ if (filters.visitedSince) {
+ const since = new Date(filters.visitedSince);
+ filtered = filtered.filter(c => c.latestVisitDate && c.latestVisitDate >= since);
+ }
+
+ // Deduplicate by addressId (in case of overlapping cuts)
+ const seen = new Set();
+ const deduped: ContactCandidate[] = [];
+ for (const c of filtered) {
+ if (!seen.has(c.addressId)) {
+ seen.add(c.addressId);
+ deduped.push(c);
+ }
+ }
+
+ return deduped.slice(0, MAX_EXPORT);
+}
+
+// --- Service Functions ---
+
+export async function previewExportContacts(
+ filters: ExportContactsPreviewInput,
+): Promise {
+ const contacts = await resolveContactsFromCuts(filters);
+
+ const byCut: Record = {};
+ const byOutcome: Record = {};
+ const bySupportLevel: Record = {};
+ let withEmail = 0;
+
+ for (const c of contacts) {
+ // By cut
+ const cutKey = c.cutName;
+ if (!byCut[cutKey]) byCut[cutKey] = { cutName: cutKey, contacts: 0, withEmail: 0 };
+ byCut[cutKey].contacts++;
+ if (c.email) {
+ byCut[cutKey].withEmail++;
+ withEmail++;
+ }
+
+ // By outcome
+ if (c.latestOutcome) {
+ byOutcome[c.latestOutcome] = (byOutcome[c.latestOutcome] || 0) + 1;
+ }
+
+ // By support level
+ if (c.supportLevel) {
+ bySupportLevel[c.supportLevel] = (bySupportLevel[c.supportLevel] || 0) + 1;
+ }
+ }
+
+ // Map byCut to array with cutIds
+ const cuts = await prisma.cut.findMany({
+ where: { id: { in: filters.cutIds } },
+ select: { id: true, name: true },
+ });
+ const cutIdMap = new Map(cuts.map(c => [c.name, c.id]));
+
+ return {
+ totalContacts: contacts.length,
+ contactsWithEmail: withEmail,
+ byCut: Object.entries(byCut).map(([name, data]) => ({
+ cutId: cutIdMap.get(name) || '',
+ cutName: name,
+ contacts: data.contacts,
+ withEmail: data.withEmail,
+ })),
+ byOutcome,
+ bySupportLevel,
+ };
+}
+
+export async function exportContactsToCampaign(
+ filters: ExportContactsInput,
+): Promise {
+ const campaign = await prisma.campaign.findUnique({
+ where: { id: filters.campaignId },
+ select: { id: true, title: true, slug: true, allowCustomRecipients: true },
+ });
+
+ if (!campaign) {
+ throw new Error('Campaign not found');
+ }
+
+ // Auto-enable custom recipients if not already
+ if (!campaign.allowCustomRecipients) {
+ await prisma.campaign.update({
+ where: { id: campaign.id },
+ data: { allowCustomRecipients: true },
+ });
+ }
+
+ const contacts = await resolveContactsFromCuts(filters);
+
+ // Get existing custom recipients to deduplicate
+ const existing = await prisma.customRecipient.findMany({
+ where: { campaignId: campaign.id },
+ select: { recipientEmail: true },
+ });
+ const existingEmails = new Set(existing.map(r => r.recipientEmail.toLowerCase()));
+
+ let created = 0;
+ let skippedDuplicate = 0;
+ let skippedNoEmail = 0;
+
+ // Deduplicate by email across all contacts
+ const emailsSeen = new Set();
+
+ const toCreate: Prisma.CustomRecipientCreateManyInput[] = [];
+
+ for (const c of contacts) {
+ if (!c.email) {
+ skippedNoEmail++;
+ continue;
+ }
+
+ const emailLower = c.email.toLowerCase();
+
+ if (existingEmails.has(emailLower) || emailsSeen.has(emailLower)) {
+ skippedDuplicate++;
+ continue;
+ }
+
+ emailsSeen.add(emailLower);
+
+ const name = [c.firstName, c.lastName].filter(Boolean).join(' ') || c.email;
+ const addrLabel = c.unitNumber
+ ? `${c.locationAddress} Unit ${c.unitNumber}`
+ : c.locationAddress;
+
+ toCreate.push({
+ campaignId: campaign.id,
+ campaignSlug: campaign.slug,
+ recipientName: name,
+ recipientEmail: c.email,
+ notes: `From canvass: ${c.cutName} — ${addrLabel}${c.supportLevel ? ` (${c.supportLevel})` : ''}${c.latestOutcome ? ` [${c.latestOutcome}]` : ''}`,
+ });
+ }
+
+ if (toCreate.length > 0) {
+ await prisma.customRecipient.createMany({ data: toCreate });
+ created = toCreate.length;
+ }
+
+ logger.info(`Exported ${created} contacts to campaign "${campaign.title}" (${skippedDuplicate} dupes, ${skippedNoEmail} no email)`);
+
+ return {
+ created,
+ skippedDuplicate,
+ skippedNoEmail,
+ campaignId: campaign.id,
+ campaignTitle: campaign.title,
+ };
+}
+
+export async function getCutCampaignAnalytics(): Promise {
+ const cuts = await prisma.cut.findMany({
+ select: { id: true, name: true, geojson: true },
+ orderBy: { name: 'asc' },
+ });
+
+ const results: CutCampaignAnalytics[] = [];
+
+ for (const cut of cuts) {
+ const polygons = parseGeoJsonPolygon(cut.geojson);
+ const allCoords = polygons.flat();
+ const bounds = calculateBounds(allCoords);
+
+ const locations = await prisma.location.findMany({
+ where: {
+ latitude: { gte: new Prisma.Decimal(bounds.minLat.toString()), lte: new Prisma.Decimal(bounds.maxLat.toString()) },
+ longitude: { gte: new Prisma.Decimal(bounds.minLng.toString()), lte: new Prisma.Decimal(bounds.maxLng.toString()) },
+ },
+ select: {
+ latitude: true,
+ longitude: true,
+ addresses: {
+ select: { id: true, email: true, supportLevel: true },
+ },
+ },
+ });
+
+ let totalAddresses = 0;
+ let addressesWithEmail = 0;
+ const addressIds: string[] = [];
+ const supportBreakdown: Record = {};
+
+ for (const loc of locations) {
+ if (!polygons.some(p => isPointInPolygon(Number(loc.latitude), Number(loc.longitude), p))) continue;
+
+ for (const addr of loc.addresses) {
+ totalAddresses++;
+ addressIds.push(addr.id);
+ if (addr.email) addressesWithEmail++;
+ if (addr.supportLevel) {
+ supportBreakdown[addr.supportLevel] = (supportBreakdown[addr.supportLevel] || 0) + 1;
+ }
+ }
+ }
+
+ const visitedCount = addressIds.length > 0
+ ? await prisma.canvassVisit.findMany({
+ where: { addressId: { in: addressIds } },
+ distinct: ['addressId'],
+ select: { addressId: true },
+ }).then(r => r.length)
+ : 0;
+
+ results.push({
+ cutId: cut.id,
+ cutName: cut.name,
+ totalAddresses,
+ visitedAddresses: visitedCount,
+ completionPct: totalAddresses > 0 ? Math.round((visitedCount / totalAddresses) * 100) : 0,
+ addressesWithEmail,
+ supportBreakdown,
+ });
+ }
+
+ return results;
+}
diff --git a/api/src/modules/map/canvass/canvass.service.ts b/api/src/modules/map/canvass/canvass.service.ts
index 6c9ee0ce..24ff346e 100644
--- a/api/src/modules/map/canvass/canvass.service.ts
+++ b/api/src/modules/map/canvass/canvass.service.ts
@@ -7,6 +7,11 @@ import { recordLocationQuery } from '../../../utils/metrics';
import { isPointInPolygon, parseGeoJsonPolygon } from '../../../utils/spatial';
import { calculateWalkingRoute } from './canvass-route.service';
import { recordCanvassVisit, setActiveCanvassSessions } from '../../../utils/metrics';
+import { notificationQueueService } from '../../../services/notification-queue.service';
+import { getAdminEmailsByRole, isNotificationEnabled } from '../../../services/notification.helper';
+import { env } from '../../../config/env';
+import { rocketchatWebhookService } from '../../../services/rocketchat-webhook.service';
+import { listmonkEventSyncService } from '../../../services/listmonk-event-sync.service';
import type {
RecordVisitInput,
BulkRecordVisitInput,
@@ -237,6 +242,90 @@ export const canvassService = {
// Recalculate cut completion percentage
await this.recalculateCutCompletion(session.cutId);
+ // Notify Rocket.Chat
+ try {
+ const [rcUser, rcCut, rcVisitCount] = await Promise.all([
+ prisma.user.findUnique({ where: { id: userId }, select: { name: true, email: true } }),
+ prisma.cut.findUnique({ where: { id: session.cutId }, select: { name: true } }),
+ prisma.canvassVisit.count({ where: { sessionId } }),
+ ]);
+ rocketchatWebhookService.onCanvassSessionCompleted({
+ userName: rcUser?.name || rcUser?.email || 'Unknown',
+ visitCount: rcVisitCount,
+ cutName: rcCut?.name || undefined,
+ }).catch(() => {});
+ } catch { /* non-critical */ }
+
+ // Notification: volunteer session summary
+ try {
+ if (await isNotificationEnabled('notifyVolunteerSessionSummary')) {
+ const [user, cut, visitCount, outcomeGroups, trackingSession] = await Promise.all([
+ prisma.user.findUnique({ where: { id: userId }, select: { email: true, name: true } }),
+ prisma.cut.findUnique({ where: { id: session.cutId }, select: { name: true } }),
+ prisma.canvassVisit.count({ where: { sessionId } }),
+ prisma.canvassVisit.groupBy({ by: ['outcome'], where: { sessionId }, _count: true }),
+ prisma.trackingSession.findFirst({
+ where: { userId, canvassSessionId: sessionId },
+ select: { totalDistanceM: true },
+ }),
+ ]);
+
+ if (user && visitCount > 0) {
+ const durationMs = updated.endedAt
+ ? updated.endedAt.getTime() - session.startedAt.getTime()
+ : 0;
+ const durationMinutes = Math.round(durationMs / 60000);
+ const distanceKm = trackingSession?.totalDistanceM
+ ? Number(trackingSession.totalDistanceM) / 1000
+ : 0;
+
+ const outcomeBreakdown: Record = {};
+ for (const row of outcomeGroups) {
+ outcomeBreakdown[row.outcome] = row._count;
+ }
+
+ await notificationQueueService.enqueue({
+ type: 'volunteer-session-summary',
+ volunteerEmail: user.email,
+ volunteerName: user.name || user.email,
+ cutName: cut?.name || 'Unknown area',
+ sessionDate: session.startedAt.toLocaleDateString('en-CA', {
+ weekday: 'long', year: 'numeric', month: 'long', day: 'numeric',
+ }),
+ visitCount,
+ durationMinutes,
+ distanceKm,
+ outcomeBreakdown,
+ });
+ }
+ }
+ } catch (err) {
+ logger.error('Failed to enqueue session summary notification:', err);
+ }
+
+ // Listmonk event sync — add canvasser to subscribers
+ try {
+ const [syncUser, syncCut, syncVisitCount, syncOutcomes] = await Promise.all([
+ prisma.user.findUnique({ where: { id: userId }, select: { email: true, name: true } }),
+ prisma.cut.findUnique({ where: { id: session.cutId }, select: { name: true } }),
+ prisma.canvassVisit.count({ where: { sessionId } }),
+ prisma.canvassVisit.groupBy({ by: ['outcome'], where: { sessionId }, _count: true }),
+ ]);
+ if (syncUser) {
+ const outcomes: Record = {};
+ for (const row of syncOutcomes) {
+ outcomes[row.outcome] = row._count;
+ }
+ listmonkEventSyncService.onCanvassSessionCompleted({
+ email: syncUser.email,
+ name: syncUser.name || syncUser.email,
+ cutName: syncCut?.name || 'Unknown',
+ visitCount: syncVisitCount,
+ outcomes,
+ }).catch(() => {});
+ }
+ } catch { /* non-critical */ }
+
return updated;
},
@@ -549,6 +638,36 @@ export const canvassService = {
recordCanvassVisit(data.outcome);
+ // Notification: sign request alert for admins
+ if (data.signRequested) {
+ try {
+ if (await isNotificationEnabled('notifyAdminSignRequested')) {
+ const adminEmails = await getAdminEmailsByRole([UserRole.SUPER_ADMIN, UserRole.MAP_ADMIN]);
+ if (adminEmails.length > 0) {
+ const [volunteer, shift] = await Promise.all([
+ prisma.user.findUnique({ where: { id: userId }, select: { name: true } }),
+ data.shiftId ? prisma.shift.findUnique({ where: { id: data.shiftId }, select: { title: true } }) : null,
+ ]);
+
+ const addressStr = visit.address?.location?.address || 'Unknown address';
+ const adminUrl = `${env.ADMIN_URL || 'http://localhost:3000'}/app/canvass/dashboard`;
+
+ await notificationQueueService.enqueue({
+ type: 'admin-sign-requested',
+ adminEmails,
+ volunteerName: volunteer?.name || 'Unknown',
+ address: addressStr,
+ shiftTitle: shift?.title || 'No shift',
+ signSize: data.signSize || '',
+ adminUrl,
+ });
+ }
+ }
+ } catch (err) {
+ logger.error('Failed to enqueue sign request notification:', err);
+ }
+ }
+
return visit;
},
diff --git a/api/src/modules/map/shifts/shifts.service.ts b/api/src/modules/map/shifts/shifts.service.ts
index af912a72..4bcf1012 100644
--- a/api/src/modules/map/shifts/shifts.service.ts
+++ b/api/src/modules/map/shifts/shifts.service.ts
@@ -1,11 +1,16 @@
import bcrypt from 'bcryptjs';
-import { Prisma, ShiftStatus, SignupStatus, SignupSource } from '@prisma/client';
+import { Prisma, ShiftStatus, SignupStatus, SignupSource, UserRole } from '@prisma/client';
import { prisma } from '../../../config/database';
import { AppError } from '../../../middleware/error-handler';
import { emailService } from '../../../services/email.service';
+import { notificationQueueService } from '../../../services/notification-queue.service';
+import { getAdminEmailsByRole, isNotificationEnabled } from '../../../services/notification.helper';
import { env } from '../../../config/env';
import { logger } from '../../../utils/logger';
import { recordShiftSignup } from '../../../utils/metrics';
+import { rocketchatWebhookService } from '../../../services/rocketchat-webhook.service';
+import { listmonkEventSyncService } from '../../../services/listmonk-event-sync.service';
+import { gancioClient } from '../../../services/gancio.client';
import type {
CreateShiftInput,
UpdateShiftInput,
@@ -121,6 +126,27 @@ export const shiftsService = {
},
});
+ // Gancio event sync (fire-and-forget)
+ if (gancioClient.enabled) {
+ gancioClient.createEvent({
+ title: shift.title,
+ description: shift.description,
+ location: shift.location,
+ date: shift.date,
+ startTime: shift.startTime,
+ endTime: shift.endTime,
+ }).then(async (eventId) => {
+ if (eventId) {
+ await prisma.shift.update({
+ where: { id: shift.id },
+ data: { gancioEventId: eventId },
+ });
+ }
+ }).catch((err) => {
+ logger.warn('Gancio sync on shift create failed:', err);
+ });
+ }
+
return shift;
},
@@ -150,6 +176,20 @@ export const shiftsService = {
data: updateData,
});
+ // Gancio event sync (fire-and-forget)
+ if (gancioClient.enabled && shift.gancioEventId) {
+ gancioClient.updateEvent(shift.gancioEventId, {
+ title: shift.title,
+ description: shift.description,
+ location: shift.location,
+ date: shift.date,
+ startTime: shift.startTime,
+ endTime: shift.endTime,
+ }).catch((err) => {
+ logger.warn('Gancio sync on shift update failed:', err);
+ });
+ }
+
return shift;
},
@@ -159,6 +199,13 @@ export const shiftsService = {
throw new AppError(404, 'Shift not found', 'SHIFT_NOT_FOUND');
}
+ // Delete Gancio event before deleting shift (fire-and-forget)
+ if (gancioClient.enabled && existing.gancioEventId) {
+ gancioClient.deleteEvent(existing.gancioEventId).catch((err) => {
+ logger.warn('Gancio sync on shift delete failed:', err);
+ });
+ }
+
await prisma.shift.delete({ where: { id } });
},
@@ -246,6 +293,14 @@ export const shiftsService = {
}),
]);
+ // Listmonk event sync
+ listmonkEventSyncService.onShiftSignup({
+ email: data.userEmail,
+ name: data.userName || data.userEmail,
+ shiftTitle: shift.title,
+ shiftDate: new Date(shift.date).toISOString().split('T')[0],
+ }).catch(() => {});
+
return signup;
},
@@ -403,8 +458,74 @@ export const shiftsService = {
logger.error('Failed to send shift signup confirmation email:', err);
}
+ // Notify Rocket.Chat
+ const shiftDateStr = new Date(shift.date).toLocaleDateString('en-CA', { month: 'short', day: 'numeric' });
+ rocketchatWebhookService.onShiftSignup({
+ userName: data.name || data.email,
+ shiftTitle: shift.title,
+ shiftDate: shiftDateStr,
+ }).catch(() => {});
+
+ // Notification: admin shift signup alert
+ try {
+ if (await isNotificationEnabled('notifyAdminShiftSignup')) {
+ const adminEmails = await getAdminEmailsByRole([UserRole.SUPER_ADMIN, UserRole.MAP_ADMIN]);
+ if (adminEmails.length > 0) {
+ const adminUrl = `${env.ADMIN_URL || 'http://localhost:3000'}/app/map/shifts`;
+ const shiftDate = new Date(shift.date);
+ const dateStr = shiftDate.toLocaleDateString('en-CA', { weekday: 'long', year: 'numeric', month: 'long', day: 'numeric' });
+ await notificationQueueService.enqueue({
+ type: 'admin-shift-signup',
+ adminEmails,
+ shiftTitle: shift.title,
+ shiftDate: dateStr,
+ volunteerName: data.name,
+ volunteerEmail: data.email,
+ signupSource: 'Public Form',
+ adminUrl,
+ });
+ }
+ }
+ } catch (err) {
+ logger.error('Failed to enqueue admin shift signup notification:', err);
+ }
+
+ // Notification: schedule 24h pre-shift reminder
+ try {
+ if (await isNotificationEnabled('notifyVolunteerShiftReminder')) {
+ const shiftDatetime = new Date(shift.date);
+ const [startH, startM] = shift.startTime.split(':').map(Number);
+ shiftDatetime.setHours(startH || 0, startM || 0, 0, 0);
+
+ await notificationQueueService.scheduleShiftReminder({
+ type: 'volunteer-shift-reminder',
+ recipientEmail: data.email,
+ recipientName: data.name,
+ shiftTitle: shift.title,
+ shiftDate: shiftDatetime.toLocaleDateString('en-CA', { weekday: 'long', year: 'numeric', month: 'long', day: 'numeric' }),
+ shiftStartTime: shift.startTime,
+ shiftEndTime: shift.endTime,
+ shiftLocation: shift.location || 'TBD',
+ shiftDescription: shift.description || '',
+ currentVolunteers: shift.currentVolunteers + 1,
+ maxVolunteers: shift.maxVolunteers,
+ shiftStatus: shift.status,
+ }, shiftDatetime);
+ }
+ } catch (err) {
+ logger.error('Failed to schedule shift reminder:', err);
+ }
+
recordShiftSignup();
+ // Listmonk event sync
+ listmonkEventSyncService.onShiftSignup({
+ email: data.email,
+ name: data.name,
+ shiftTitle: shift.title,
+ shiftDate: new Date(shift.date).toISOString().split('T')[0],
+ }).catch(() => {});
+
return { signup, isNewUser };
},
@@ -421,6 +542,8 @@ export const shiftsService = {
throw new AppError(400, 'Signup already cancelled', 'ALREADY_CANCELLED');
}
+ const shift = await prisma.shift.findUnique({ where: { id: shiftId } });
+
await prisma.$transaction([
prisma.shiftSignup.update({
where: { id: signup.id },
@@ -434,6 +557,68 @@ export const shiftsService = {
},
}),
]);
+
+ // Notification: cancellation acknowledgement + cancel reminder
+ try {
+ if (shift && await isNotificationEnabled('notifyVolunteerCancellation')) {
+ const shiftDate = new Date(shift.date);
+ const dateStr = shiftDate.toLocaleDateString('en-CA', { weekday: 'long', year: 'numeric', month: 'long', day: 'numeric' });
+ const signupUrl = `${env.CORS_ORIGINS.split(',')[0].trim()}/shifts`;
+ await notificationQueueService.enqueue({
+ type: 'volunteer-cancellation',
+ volunteerEmail: userEmail,
+ volunteerName: signup.userName || userEmail,
+ shiftTitle: shift.title,
+ shiftDate: dateStr,
+ shiftTime: `${shift.startTime} — ${shift.endTime}`,
+ signupUrl,
+ });
+ }
+
+ // Cancel the pending shift reminder
+ if (shift) {
+ const shiftDatetime = new Date(shift.date);
+ const [startH, startM] = shift.startTime.split(':').map(Number);
+ shiftDatetime.setHours(startH || 0, startM || 0, 0, 0);
+ await notificationQueueService.cancelShiftReminder(userEmail, shiftDatetime);
+ }
+ } catch (err) {
+ logger.error('Failed to enqueue cancellation notification:', err);
+ }
+
+ // Notify Rocket.Chat of cancellation
+ if (shift) {
+ const shiftDateStr = new Date(shift.date).toLocaleDateString('en-CA', { month: 'short', day: 'numeric' });
+ rocketchatWebhookService.onShiftCancellation({
+ userName: signup.userName || userEmail,
+ shiftTitle: shift.title,
+ shiftDate: shiftDateStr,
+ }).catch(() => {});
+ }
+
+ // Notification: admin shift cancellation alert
+ try {
+ if (shift && await isNotificationEnabled('notifyAdminShiftCancellation')) {
+ const adminEmails = await getAdminEmailsByRole([UserRole.SUPER_ADMIN, UserRole.MAP_ADMIN]);
+ if (adminEmails.length > 0) {
+ const adminUrl = `${env.ADMIN_URL || 'http://localhost:3000'}/app/map/shifts`;
+ const shiftDate = new Date(shift.date);
+ const dateStr = shiftDate.toLocaleDateString('en-CA', { weekday: 'long', year: 'numeric', month: 'long', day: 'numeric' });
+ await notificationQueueService.enqueue({
+ type: 'admin-shift-cancellation',
+ adminEmails,
+ shiftTitle: shift.title,
+ shiftDate: dateStr,
+ volunteerName: signup.userName || userEmail,
+ volunteerEmail: userEmail,
+ cancellationSource: 'Public Form',
+ adminUrl,
+ });
+ }
+ }
+ } catch (err) {
+ logger.error('Failed to enqueue admin shift cancellation notification:', err);
+ }
},
async getUpcomingForVolunteer(userId: string) {
@@ -560,11 +745,77 @@ export const shiftsService = {
logger.error('Failed to send volunteer shift signup confirmation email:', err);
}
+ // Notify Rocket.Chat
+ const shiftDateStr = new Date(shift.date).toLocaleDateString('en-CA', { month: 'short', day: 'numeric' });
+ rocketchatWebhookService.onShiftSignup({
+ userName: user.name || user.email,
+ shiftTitle: shift.title,
+ shiftDate: shiftDateStr,
+ }).catch(() => {});
+
+ // Notification: admin shift signup alert
+ try {
+ if (await isNotificationEnabled('notifyAdminShiftSignup')) {
+ const adminEmails = await getAdminEmailsByRole([UserRole.SUPER_ADMIN, UserRole.MAP_ADMIN]);
+ if (adminEmails.length > 0) {
+ const adminUrl = `${env.ADMIN_URL || 'http://localhost:3000'}/app/map/shifts`;
+ const shiftDate = new Date(shift.date);
+ const dateStr = shiftDate.toLocaleDateString('en-CA', { weekday: 'long', year: 'numeric', month: 'long', day: 'numeric' });
+ await notificationQueueService.enqueue({
+ type: 'admin-shift-signup',
+ adminEmails,
+ shiftTitle: shift.title,
+ shiftDate: dateStr,
+ volunteerName: user.name || user.email,
+ volunteerEmail: user.email,
+ signupSource: 'Authenticated Volunteer',
+ adminUrl,
+ });
+ }
+ }
+ } catch (err) {
+ logger.error('Failed to enqueue admin shift signup notification:', err);
+ }
+
+ // Notification: schedule 24h pre-shift reminder
+ try {
+ if (await isNotificationEnabled('notifyVolunteerShiftReminder')) {
+ const shiftDatetime = new Date(shift.date);
+ const [startH, startM] = shift.startTime.split(':').map(Number);
+ shiftDatetime.setHours(startH || 0, startM || 0, 0, 0);
+
+ await notificationQueueService.scheduleShiftReminder({
+ type: 'volunteer-shift-reminder',
+ recipientEmail: user.email,
+ recipientName: user.name || user.email,
+ shiftTitle: shift.title,
+ shiftDate: shiftDatetime.toLocaleDateString('en-CA', { weekday: 'long', year: 'numeric', month: 'long', day: 'numeric' }),
+ shiftStartTime: shift.startTime,
+ shiftEndTime: shift.endTime,
+ shiftLocation: shift.location || 'TBD',
+ shiftDescription: shift.description || '',
+ currentVolunteers: shift.currentVolunteers + 1,
+ maxVolunteers: shift.maxVolunteers,
+ shiftStatus: shift.status,
+ }, shiftDatetime);
+ }
+ } catch (err) {
+ logger.error('Failed to schedule shift reminder:', err);
+ }
+
+ // Listmonk event sync
+ listmonkEventSyncService.onShiftSignup({
+ email: user.email,
+ name: user.name || user.email,
+ shiftTitle: shift.title,
+ shiftDate: new Date(shift.date).toISOString().split('T')[0],
+ }).catch(() => {});
+
return signup;
},
async cancelVolunteerSignup(shiftId: string, userId: string) {
- const user = await prisma.user.findUnique({ where: { id: userId }, select: { email: true } });
+ const user = await prisma.user.findUnique({ where: { id: userId }, select: { email: true, name: true } });
if (!user) throw new AppError(404, 'User not found', 'USER_NOT_FOUND');
const signup = await prisma.shiftSignup.findUnique({
@@ -574,6 +825,8 @@ export const shiftsService = {
if (!signup) throw new AppError(404, 'Signup not found', 'SIGNUP_NOT_FOUND');
if (signup.status === SignupStatus.CANCELLED) throw new AppError(400, 'Already cancelled', 'ALREADY_CANCELLED');
+ const shift = await prisma.shift.findUnique({ where: { id: shiftId } });
+
await prisma.$transaction([
prisma.shiftSignup.update({
where: { id: signup.id },
@@ -587,6 +840,68 @@ export const shiftsService = {
},
}),
]);
+
+ // Notification: cancellation acknowledgement + cancel reminder
+ try {
+ if (shift && await isNotificationEnabled('notifyVolunteerCancellation')) {
+ const shiftDate = new Date(shift.date);
+ const dateStr = shiftDate.toLocaleDateString('en-CA', { weekday: 'long', year: 'numeric', month: 'long', day: 'numeric' });
+ const signupUrl = `${env.CORS_ORIGINS.split(',')[0].trim()}/shifts`;
+ await notificationQueueService.enqueue({
+ type: 'volunteer-cancellation',
+ volunteerEmail: user.email,
+ volunteerName: user.name || user.email,
+ shiftTitle: shift.title,
+ shiftDate: dateStr,
+ shiftTime: `${shift.startTime} — ${shift.endTime}`,
+ signupUrl,
+ });
+ }
+
+ // Cancel the pending shift reminder
+ if (shift) {
+ const shiftDatetime = new Date(shift.date);
+ const [startH, startM] = shift.startTime.split(':').map(Number);
+ shiftDatetime.setHours(startH || 0, startM || 0, 0, 0);
+ await notificationQueueService.cancelShiftReminder(user.email, shiftDatetime);
+ }
+ } catch (err) {
+ logger.error('Failed to enqueue cancellation notification:', err);
+ }
+
+ // Notify Rocket.Chat of cancellation
+ if (shift) {
+ const shiftDateStr = new Date(shift.date).toLocaleDateString('en-CA', { month: 'short', day: 'numeric' });
+ rocketchatWebhookService.onShiftCancellation({
+ userName: user.name || user.email,
+ shiftTitle: shift.title,
+ shiftDate: shiftDateStr,
+ }).catch(() => {});
+ }
+
+ // Notification: admin shift cancellation alert
+ try {
+ if (shift && await isNotificationEnabled('notifyAdminShiftCancellation')) {
+ const adminEmails = await getAdminEmailsByRole([UserRole.SUPER_ADMIN, UserRole.MAP_ADMIN]);
+ if (adminEmails.length > 0) {
+ const adminUrl = `${env.ADMIN_URL || 'http://localhost:3000'}/app/map/shifts`;
+ const shiftDate = new Date(shift.date);
+ const dateStr = shiftDate.toLocaleDateString('en-CA', { weekday: 'long', year: 'numeric', month: 'long', day: 'numeric' });
+ await notificationQueueService.enqueue({
+ type: 'admin-shift-cancellation',
+ adminEmails,
+ shiftTitle: shift.title,
+ shiftDate: dateStr,
+ volunteerName: user.name || user.email,
+ volunteerEmail: user.email,
+ cancellationSource: 'Volunteer Portal',
+ adminUrl,
+ });
+ }
+ }
+ } catch (err) {
+ logger.error('Failed to enqueue admin shift cancellation notification:', err);
+ }
},
async getMySignups(userId: string) {
diff --git a/api/src/modules/rocketchat/rocketchat.routes.ts b/api/src/modules/rocketchat/rocketchat.routes.ts
new file mode 100644
index 00000000..df42f7e5
--- /dev/null
+++ b/api/src/modules/rocketchat/rocketchat.routes.ts
@@ -0,0 +1,81 @@
+import { Router, Request, Response, NextFunction } from 'express';
+import { authenticate } from '../../middleware/auth.middleware';
+import { requireNonTemp } from '../../middleware/rbac.middleware';
+import { env } from '../../config/env';
+import { logger } from '../../utils/logger';
+import { rocketchatClient } from '../../services/rocketchat.client';
+import { rocketchatService } from './rocketchat.service';
+import { siteSettingsService } from '../settings/settings.service';
+
+const router = Router();
+
+/** Check if chat is enabled (DB setting wins, env var is fallback for first boot) */
+async function isChatEnabled(): Promise {
+ try {
+ const settings = await siteSettingsService.get();
+ // DB setting is authoritative; env var is the initial default
+ return settings.enableChat;
+ } catch {
+ return env.ENABLE_CHAT === 'true';
+ }
+}
+
+// GET /api/rocketchat/status — health check (any authenticated user)
+router.get(
+ '/status',
+ authenticate,
+ async (_req: Request, res: Response, next: NextFunction) => {
+ try {
+ const enabled = await isChatEnabled();
+ const online = enabled ? await rocketchatClient.healthCheck() : false;
+ res.json({ online, enabled });
+ } catch (err) {
+ logger.error('RC status check failed:', err);
+ next(err);
+ }
+ },
+);
+
+// GET /api/rocketchat/config — return RC URLs + enabled status
+router.get(
+ '/config',
+ authenticate,
+ async (_req: Request, res: Response, _next: NextFunction) => {
+ const enabled = await isChatEnabled();
+ res.json({
+ enabled,
+ embedPort: env.ROCKETCHAT_EMBED_PORT,
+ subdomain: 'chat',
+ domain: env.DOMAIN,
+ });
+ },
+);
+
+// POST /api/rocketchat/auth — get RC session token for current user (SSO)
+router.post(
+ '/auth',
+ authenticate,
+ requireNonTemp,
+ async (req: Request, res: Response, next: NextFunction) => {
+ try {
+ const enabled = await isChatEnabled();
+ if (!enabled) {
+ res.status(400).json({ error: 'Chat is not enabled' });
+ return;
+ }
+
+ const userId = req.user!.id;
+ const tokenData = await rocketchatService.getAuthToken(userId);
+
+ res.json({
+ authToken: tokenData.authToken,
+ rcUserId: tokenData.rcUserId,
+ });
+ } catch (err) {
+ logger.error('RC auth failed:', err);
+ next(err);
+ }
+ },
+);
+
+export const rocketchatRouter = router;
diff --git a/api/src/modules/rocketchat/rocketchat.service.ts b/api/src/modules/rocketchat/rocketchat.service.ts
new file mode 100644
index 00000000..f92e2176
--- /dev/null
+++ b/api/src/modules/rocketchat/rocketchat.service.ts
@@ -0,0 +1,129 @@
+import { createHmac } from 'crypto';
+import { prisma } from '../../config/database';
+import { env } from '../../config/env';
+import { logger } from '../../utils/logger';
+import { rocketchatClient } from '../../services/rocketchat.client';
+
+// Changemaker role → Rocket.Chat role mapping
+const ROLE_MAP: Record = {
+ SUPER_ADMIN: ['admin'],
+ INFLUENCE_ADMIN: ['moderator'],
+ MAP_ADMIN: ['moderator'],
+ USER: ['user'],
+ TEMP: ['user'],
+};
+
+/**
+ * Generate a deterministic password for a Rocket.Chat user.
+ * Never exposed to users — only used for RC internal auth.
+ */
+function generateRCPassword(userId: string): string {
+ return createHmac('sha256', env.JWT_ACCESS_SECRET)
+ .update(`rc:${userId}`)
+ .digest('hex');
+}
+
+/**
+ * Generate a safe username from email, with collision avoidance suffix.
+ */
+function generateUsername(email: string, suffix = 0): string {
+ const base = email.split('@')[0].toLowerCase().replace(/[^a-z0-9._-]/g, '');
+ return suffix > 0 ? `${base}${suffix}` : base;
+}
+
+class RocketChatService {
+ /**
+ * Get a Rocket.Chat auth token for the given Changemaker user.
+ * Provisions / syncs the RC user as needed.
+ */
+ async getAuthToken(changemakerUserId: string): Promise<{
+ authToken: string;
+ rcUserId: string;
+ }> {
+ // 1. Look up Changemaker user
+ const user = await prisma.user.findUnique({
+ where: { id: changemakerUserId },
+ });
+ if (!user) throw new Error('User not found');
+
+ // 2. Check for cached RC user ID in permissions JSON
+ const permissions = (user.permissions as Record) || {};
+ let rcUserId = permissions._rcUserId as string | undefined;
+
+ if (rcUserId) {
+ // Sync roles on every access
+ const rcRoles = ROLE_MAP[user.role] || ['user'];
+ try {
+ await rocketchatClient.updateUser(rcUserId, {
+ name: user.name || user.email.split('@')[0],
+ roles: rcRoles,
+ });
+ } catch (err) {
+ logger.warn('RC role sync failed, continuing:', err);
+ }
+ } else {
+ // 3. Find or create RC user
+ let rcUser = await rocketchatClient.findUserByEmail(user.email);
+
+ if (!rcUser) {
+ // Generate unique username with collision handling
+ let username = generateUsername(user.email);
+ let suffix = 0;
+ const maxAttempts = 5;
+ while (suffix < maxAttempts) {
+ try {
+ rcUser = await rocketchatClient.createUser({
+ email: user.email,
+ name: user.name || user.email.split('@')[0],
+ username,
+ password: generateRCPassword(user.id),
+ roles: ROLE_MAP[user.role] || ['user'],
+ });
+ break;
+ } catch (err) {
+ if (err instanceof Error && err.message.includes('already in use')) {
+ suffix++;
+ username = generateUsername(user.email, suffix);
+ } else {
+ throw err;
+ }
+ }
+ }
+ if (!rcUser) throw new Error('Failed to create RC user after retries');
+ }
+
+ rcUserId = rcUser._id;
+
+ // 4. Cache RC user ID in permissions JSON (no migration needed)
+ await prisma.user.update({
+ where: { id: user.id },
+ data: {
+ permissions: { ...permissions, _rcUserId: rcUserId },
+ },
+ });
+ }
+
+ // 5. Generate login token
+ const tokenData = await rocketchatClient.createUserToken(rcUserId);
+ return {
+ authToken: tokenData.authToken,
+ rcUserId: tokenData.userId,
+ };
+ }
+
+ /**
+ * Setup default channels on first use
+ */
+ async ensureDefaultChannels(): Promise {
+ try {
+ await rocketchatClient.ensureChannel('shifts', 'Shift coordination and updates');
+ await rocketchatClient.ensureChannel('canvassing', 'Canvass activity and updates');
+ await rocketchatClient.ensureChannel('campaigns', 'Campaign activity and responses');
+ logger.info('RC default channels verified');
+ } catch (err) {
+ logger.warn('RC ensureDefaultChannels failed:', err);
+ }
+ }
+}
+
+export const rocketchatService = new RocketChatService();
diff --git a/api/src/modules/services/services.routes.ts b/api/src/modules/services/services.routes.ts
index d66a2161..5e1bd336 100644
--- a/api/src/modules/services/services.routes.ts
+++ b/api/src/modules/services/services.routes.ts
@@ -17,7 +17,7 @@ router.get(
'/status',
async (_req: Request, res: Response, next: NextFunction) => {
try {
- const [nocodbOnline, n8nOnline, giteaOnline, mailhogOnline, miniqrOnline, excalidrawOnline, homepageOnline] = await Promise.all([
+ const [nocodbOnline, n8nOnline, giteaOnline, mailhogOnline, miniqrOnline, excalidrawOnline, homepageOnline, vaultwardenOnline, rocketchatOnline, gancioOnline] = await Promise.all([
isServiceOnline(env.NOCODB_URL),
isServiceOnline(env.N8N_URL),
isServiceOnline(env.GITEA_URL),
@@ -25,6 +25,9 @@ router.get(
isServiceOnline(env.MINI_QR_URL),
isServiceOnline(env.EXCALIDRAW_URL),
isServiceOnline(env.HOMEPAGE_URL),
+ isServiceOnline(env.VAULTWARDEN_URL),
+ isServiceOnline(`${env.ROCKETCHAT_URL}/api/info`),
+ isServiceOnline(env.GANCIO_URL),
]);
// Update Prometheus gauges
@@ -35,6 +38,9 @@ router.get(
setServiceUp('miniqr', miniqrOnline);
setServiceUp('excalidraw', excalidrawOnline);
setServiceUp('homepage', homepageOnline);
+ setServiceUp('vaultwarden', vaultwardenOnline);
+ setServiceUp('rocketchat', rocketchatOnline);
+ setServiceUp('gancio', gancioOnline);
res.json({
nocodb: { online: nocodbOnline, url: env.NOCODB_URL },
@@ -44,6 +50,9 @@ router.get(
miniqr: { online: miniqrOnline, url: env.MINI_QR_URL },
excalidraw: { online: excalidrawOnline, url: env.EXCALIDRAW_URL },
homepage: { online: homepageOnline, url: env.HOMEPAGE_URL },
+ vaultwarden: { online: vaultwardenOnline, url: env.VAULTWARDEN_URL },
+ rocketchat: { online: rocketchatOnline, url: env.ROCKETCHAT_URL },
+ gancio: { online: gancioOnline, url: env.GANCIO_URL },
});
} catch (err) {
logger.error('Failed to check services status', err);
@@ -88,6 +97,15 @@ router.get(
// Homepage (service dashboard)
homepagePort: env.HOMEPAGE_EMBED_PORT,
homepageSubdomain: 'home',
+ // Vaultwarden (password manager)
+ vaultwardenPort: env.VAULTWARDEN_EMBED_PORT,
+ vaultwardenSubdomain: 'vault',
+ // Rocket.Chat (team chat)
+ rocketchatPort: env.ROCKETCHAT_EMBED_PORT,
+ rocketchatSubdomain: 'chat',
+ // Gancio (event management)
+ gancioPort: env.GANCIO_EMBED_PORT,
+ gancioSubdomain: 'events',
});
},
);
diff --git a/api/src/modules/settings/settings.schemas.ts b/api/src/modules/settings/settings.schemas.ts
index 7ac0ec9e..2cb797e1 100644
--- a/api/src/modules/settings/settings.schemas.ts
+++ b/api/src/modules/settings/settings.schemas.ts
@@ -49,6 +49,17 @@ export const updateSiteSettingsSchema = z.object({
enableMediaFeatures: z.boolean().optional(),
enablePayments: z.boolean().optional(),
enableGalleryAds: z.boolean().optional(),
+ enableChat: z.boolean().optional(),
+ enableEvents: z.boolean().optional(),
+
+ // Notification settings
+ notifyAdminShiftSignup: z.boolean().optional(),
+ notifyAdminResponseSubmitted: z.boolean().optional(),
+ notifyAdminSignRequested: z.boolean().optional(),
+ notifyAdminShiftCancellation: z.boolean().optional(),
+ notifyVolunteerSessionSummary: z.boolean().optional(),
+ notifyVolunteerCancellation: z.boolean().optional(),
+ notifyVolunteerShiftReminder: z.boolean().optional(),
});
export type UpdateSiteSettingsInput = z.infer;
diff --git a/api/src/server.ts b/api/src/server.ts
index d528a3e2..c98fec6a 100644
--- a/api/src/server.ts
+++ b/api/src/server.ts
@@ -27,6 +27,7 @@ import shiftSeriesRouter from './modules/map/shifts/shift-series.routes';
import { mapSettingsRouter } from './modules/map/settings/settings.routes';
import { qrRouter } from './modules/qr/qr.routes';
import { listmonkRouter } from './modules/listmonk/listmonk.routes';
+import { listmonkWebhookRouter } from './modules/listmonk/listmonk-webhook.routes';
import { pagesPublicRouter } from './modules/pages/pages-public.routes';
import { pagesAdminRouter } from './modules/pages/pages-admin.routes';
import { blocksRouter } from './modules/pages/blocks.routes';
@@ -34,9 +35,12 @@ import { docsRouter } from './modules/docs/docs.routes';
import { servicesRouter } from './modules/services/services.routes';
import { siteSettingsRouter } from './modules/settings/settings.routes';
import { canvassVolunteerRouter, canvassAdminRouter } from './modules/map/canvass/canvass.routes';
+import { canvassExportRouter } from './modules/map/canvass/canvass-export.routes';
import { trackingVolunteerRouter, trackingAdminRouter } from './modules/map/tracking/tracking.routes';
import { geocodingRouter } from './modules/map/geocoding/geocoding.routes';
import { pangolinRouter } from './modules/pangolin/pangolin.routes';
+import { rocketchatRouter } from './modules/rocketchat/rocketchat.routes';
+import { rocketchatWebhookService } from './services/rocketchat-webhook.service';
import { narImportRouter } from './modules/map/locations/nar-import.routes';
import { areaImportRouter } from './modules/map/locations/area-import.routes';
import emailTemplatesRouter from './modules/email-templates/email-templates-admin.routes';
@@ -45,6 +49,7 @@ import { dashboardRouter } from './modules/dashboard/dashboard.routes';
import { initEncryption } from './utils/crypto';
import { emailService } from './services/email.service';
import { emailQueueService } from './services/email-queue.service';
+import { notificationQueueService } from './services/notification-queue.service';
import { geocodeQueueService } from './services/geocode-queue.service';
import { startProxy, stopProxy } from './services/listmonk-proxy.service';
import { pagesService } from './modules/pages/pages.service';
@@ -167,6 +172,7 @@ app.use('/api/map/shifts', shiftsAdminRouter); // Admin shift CRUD (au
app.use('/api/map/geocoding', geocodingRouter); // Geocoding search (MAP_ADMIN+)
app.use('/api/map/settings', mapSettingsRouter); // Map settings (public GET, auth PUT)
app.use('/api/qr', qrRouter); // QR code generation (public)
+app.use('/api/listmonk', listmonkWebhookRouter); // Listmonk webhook (shared secret, no JWT)
app.use('/api/listmonk', listmonkRouter); // Listmonk newsletter sync (SUPER_ADMIN)
app.use('/api/email-templates', emailTemplatesRouter); // Email template management (ADMIN roles)
app.use('/api/pages', pagesPublicRouter); // Public landing pages (no auth)
@@ -176,10 +182,12 @@ app.use('/api/docs', docsRouter); // Docs status + config
app.use('/api/services', servicesRouter); // Platform services status (SUPER_ADMIN)
app.use('/api/map/canvass', canvassVolunteerRouter); // Volunteer canvass routes (auth required)
app.use('/api/map/canvass', canvassAdminRouter); // Admin canvass routes (MAP_ADMIN+)
+app.use('/api/map/canvass', canvassExportRouter); // Canvass-to-campaign export (admin roles)
app.use('/api/map/tracking', trackingVolunteerRouter); // Volunteer GPS tracking (auth required)
app.use('/api/map/tracking', trackingAdminRouter); // Admin GPS tracking (MAP_ADMIN+)
app.use('/api/settings', siteSettingsRouter); // Site settings (public GET, SUPER_ADMIN PUT)
app.use('/api/pangolin', pangolinRouter); // Pangolin tunnel management (SUPER_ADMIN)
+app.use('/api/rocketchat', rocketchatRouter); // Rocket.Chat SSO + status (auth required)
app.use('/api/observability', observabilityRouter); // Observability / monitoring (SUPER_ADMIN)
app.use('/api/dashboard', dashboardRouter); // Dashboard summary (ADMIN roles)
app.use('/api/payments', paymentsPublicRouter); // Public payment routes (plans, checkout, my subscription)
@@ -210,6 +218,7 @@ async function start() {
await emailService.rebuildTransporter();
emailQueueService.startWorker();
+ notificationQueueService.startWorker();
geocodeQueueService.startWorker();
startProxy();
@@ -239,6 +248,9 @@ async function start() {
docsAnalyticsService.cleanupOldData(90).catch(() => {});
setInterval(() => docsAnalyticsService.cleanupOldData(90).catch(() => {}), 24 * 60 * 60 * 1000);
+ // Setup Rocket.Chat notification channels (non-blocking)
+ rocketchatWebhookService.setupChannels().catch(() => {});
+
// Sync MkDocs overrides on startup
pagesService.syncOverrides()
.then(({ imported, updated }) => {
@@ -283,6 +295,7 @@ for (const signal of ['SIGTERM', 'SIGINT']) {
logger.info(`${signal} received, shutting down...`);
await stopProxy();
await emailQueueService.close();
+ await notificationQueueService.close();
await geocodeQueueService.close();
await prisma.$disconnect();
redis.disconnect();
diff --git a/api/src/services/email-queue.service.ts b/api/src/services/email-queue.service.ts
index 87debabc..3ebc30d9 100644
--- a/api/src/services/email-queue.service.ts
+++ b/api/src/services/email-queue.service.ts
@@ -5,6 +5,7 @@ import { prisma } from '../config/database';
import { logger } from '../utils/logger';
import { emailService } from './email.service';
import { recordEmailSent, recordEmailFailed, setEmailQueueSize, emailSendDuration } from '../utils/metrics';
+import { listmonkEventSyncService } from './listmonk-event-sync.service';
interface CampaignEmailJobData {
campaignEmailId: string;
@@ -65,6 +66,13 @@ class EmailQueueService {
if (result.success) {
recordEmailSent(campaignId);
+ // Listmonk event sync
+ listmonkEventSyncService.onCampaignEmailSent({
+ email: emailData.userEmail,
+ name: emailData.userName,
+ campaignSlug: emailData.campaignTitle,
+ postalCode: emailData.postalCode,
+ }).catch(() => {});
} else {
recordEmailFailed(campaignId, 'send_failure');
throw new Error(`Failed to send email to ${emailData.recipientEmail}`);
diff --git a/api/src/services/email.service.ts b/api/src/services/email.service.ts
index 79fdf29a..af7ddfbe 100644
--- a/api/src/services/email.service.ts
+++ b/api/src/services/email.service.ts
@@ -748,6 +748,260 @@ class EmailService {
});
}
+ // ─── Notification Emails ────────────────────────────────────────────
+
+ async sendAdminShiftSignupAlert(options: {
+ adminEmails: string[];
+ shiftTitle: string;
+ shiftDate: string;
+ volunteerName: string;
+ volunteerEmail: string;
+ signupSource: string;
+ adminUrl: string;
+ }): Promise {
+ const orgName = await this.getOrganizationName();
+ const vars: Record = {
+ ORGANIZATION_NAME: orgName,
+ SHIFT_TITLE: options.shiftTitle,
+ SHIFT_DATE: options.shiftDate,
+ VOLUNTEER_NAME: options.volunteerName,
+ VOLUNTEER_EMAIL: options.volunteerEmail,
+ SIGNUP_SOURCE: options.signupSource,
+ ADMIN_URL: options.adminUrl,
+ };
+
+ const dbTemplate = await this.loadTemplateFromDatabase('admin-shift-signup-alert');
+
+ let html: string, text: string, subject: string;
+ if (dbTemplate) {
+ html = await this.processTemplate(dbTemplate.html, vars);
+ text = await this.processTextTemplate(dbTemplate.text, vars);
+ subject = this.processSubject(dbTemplate.subject, vars);
+ } else {
+ const htmlTemplate = this.loadTemplate('admin-shift-signup-alert', 'html');
+ const txtTemplate = this.loadTemplate('admin-shift-signup-alert', 'txt');
+ html = await this.processTemplate(htmlTemplate, vars);
+ text = await this.processTextTemplate(txtTemplate, vars);
+ subject = `New shift signup — ${options.shiftTitle}`;
+ }
+
+ for (const email of options.adminEmails) {
+ await this.sendEmail({ to: email, subject, html, text });
+ }
+ }
+
+ async sendAdminShiftCancellationAlert(options: {
+ adminEmails: string[];
+ shiftTitle: string;
+ shiftDate: string;
+ volunteerName: string;
+ volunteerEmail: string;
+ cancellationSource: string;
+ adminUrl: string;
+ }): Promise {
+ const orgName = await this.getOrganizationName();
+ const vars: Record = {
+ ORGANIZATION_NAME: orgName,
+ SHIFT_TITLE: options.shiftTitle,
+ SHIFT_DATE: options.shiftDate,
+ VOLUNTEER_NAME: options.volunteerName,
+ VOLUNTEER_EMAIL: options.volunteerEmail,
+ CANCELLATION_SOURCE: options.cancellationSource,
+ ADMIN_URL: options.adminUrl,
+ };
+
+ const dbTemplate = await this.loadTemplateFromDatabase('admin-shift-cancellation-alert');
+
+ let html: string, text: string, subject: string;
+ if (dbTemplate) {
+ html = await this.processTemplate(dbTemplate.html, vars);
+ text = await this.processTextTemplate(dbTemplate.text, vars);
+ subject = this.processSubject(dbTemplate.subject, vars);
+ } else {
+ const htmlTemplate = this.loadTemplate('admin-shift-cancellation-alert', 'html');
+ const txtTemplate = this.loadTemplate('admin-shift-cancellation-alert', 'txt');
+ html = await this.processTemplate(htmlTemplate, vars);
+ text = await this.processTextTemplate(txtTemplate, vars);
+ subject = `Shift cancellation — ${options.shiftTitle}`;
+ }
+
+ for (const email of options.adminEmails) {
+ await this.sendEmail({ to: email, subject, html, text });
+ }
+ }
+
+ async sendAdminResponseSubmittedAlert(options: {
+ adminEmails: string[];
+ campaignTitle: string;
+ representativeName: string;
+ responseType: string;
+ submitterName: string;
+ adminUrl: string;
+ }): Promise {
+ const orgName = await this.getOrganizationName();
+ const vars: Record = {
+ ORGANIZATION_NAME: orgName,
+ CAMPAIGN_TITLE: options.campaignTitle,
+ REPRESENTATIVE_NAME: options.representativeName,
+ RESPONSE_TYPE: options.responseType.replace(/_/g, ' '),
+ SUBMITTER_NAME: options.submitterName,
+ ADMIN_URL: options.adminUrl,
+ };
+
+ const dbTemplate = await this.loadTemplateFromDatabase('admin-response-submitted-alert');
+
+ let html: string, text: string, subject: string;
+ if (dbTemplate) {
+ html = await this.processTemplate(dbTemplate.html, vars);
+ text = await this.processTextTemplate(dbTemplate.text, vars);
+ subject = this.processSubject(dbTemplate.subject, vars);
+ } else {
+ const htmlTemplate = this.loadTemplate('admin-response-submitted-alert', 'html');
+ const txtTemplate = this.loadTemplate('admin-response-submitted-alert', 'txt');
+ html = await this.processTemplate(htmlTemplate, vars);
+ text = await this.processTextTemplate(txtTemplate, vars);
+ subject = `New response submitted — ${options.campaignTitle}`;
+ }
+
+ for (const email of options.adminEmails) {
+ await this.sendEmail({ to: email, subject, html, text });
+ }
+ }
+
+ async sendAdminSignRequestedAlert(options: {
+ adminEmails: string[];
+ volunteerName: string;
+ address: string;
+ shiftTitle: string;
+ signSize: string;
+ adminUrl: string;
+ }): Promise {
+ const orgName = await this.getOrganizationName();
+ const vars: Record = {
+ ORGANIZATION_NAME: orgName,
+ VOLUNTEER_NAME: options.volunteerName,
+ ADDRESS: options.address,
+ SHIFT_TITLE: options.shiftTitle,
+ SIGN_SIZE: options.signSize || 'Not specified',
+ ADMIN_URL: options.adminUrl,
+ };
+
+ const dbTemplate = await this.loadTemplateFromDatabase('admin-sign-requested-alert');
+
+ let html: string, text: string, subject: string;
+ if (dbTemplate) {
+ html = await this.processTemplate(dbTemplate.html, vars);
+ text = await this.processTextTemplate(dbTemplate.text, vars);
+ subject = this.processSubject(dbTemplate.subject, vars);
+ } else {
+ const htmlTemplate = this.loadTemplate('admin-sign-requested-alert', 'html');
+ const txtTemplate = this.loadTemplate('admin-sign-requested-alert', 'txt');
+ html = await this.processTemplate(htmlTemplate, vars);
+ text = await this.processTextTemplate(txtTemplate, vars);
+ subject = `Sign requested — ${options.address}`;
+ }
+
+ for (const email of options.adminEmails) {
+ await this.sendEmail({ to: email, subject, html, text });
+ }
+ }
+
+ async sendVolunteerSessionSummary(options: {
+ volunteerEmail: string;
+ volunteerName: string;
+ cutName: string;
+ sessionDate: string;
+ visitCount: number;
+ durationMinutes: number;
+ distanceKm: number;
+ outcomeBreakdown: Record;
+ }): Promise {
+ const orgName = await this.getOrganizationName();
+
+ // Build outcome breakdown as HTML table and plain text list
+ const outcomeEntries = Object.entries(options.outcomeBreakdown);
+ let outcomeHtml = '';
+ let outcomeText = '';
+
+ if (outcomeEntries.length > 0) {
+ outcomeHtml = '| Outcome | Count |
';
+ outcomeText = 'Outcome Breakdown:\n';
+ for (const [outcome, count] of outcomeEntries) {
+ const label = outcome.replace(/_/g, ' ');
+ outcomeHtml += `| ${this.escapeHtml(label)} | ${count} |
`;
+ outcomeText += ` ${label}: ${count}\n`;
+ }
+ outcomeHtml += '
';
+ }
+
+ const vars: Record = {
+ ORGANIZATION_NAME: orgName,
+ VOLUNTEER_NAME: options.volunteerName,
+ CUT_NAME: options.cutName,
+ SESSION_DATE: options.sessionDate,
+ VISIT_COUNT: options.visitCount.toString(),
+ DURATION_MINUTES: options.durationMinutes.toString(),
+ DISTANCE_KM: options.distanceKm.toFixed(1),
+ OUTCOME_BREAKDOWN: outcomeHtml,
+ };
+
+ const dbTemplate = await this.loadTemplateFromDatabase('volunteer-session-summary');
+
+ let html: string, text: string, subject: string;
+ if (dbTemplate) {
+ html = await this.processTemplate(dbTemplate.html, vars);
+ // Use plain text breakdown for text version
+ vars.OUTCOME_BREAKDOWN = outcomeText;
+ text = await this.processTextTemplate(dbTemplate.text, vars);
+ subject = this.processSubject(dbTemplate.subject, vars);
+ } else {
+ const htmlTemplate = this.loadTemplate('volunteer-session-summary', 'html');
+ const txtTemplate = this.loadTemplate('volunteer-session-summary', 'txt');
+ html = await this.processTemplate(htmlTemplate, vars);
+ vars.OUTCOME_BREAKDOWN = outcomeText;
+ text = await this.processTextTemplate(txtTemplate, vars);
+ subject = `Canvass session summary — ${options.cutName}`;
+ }
+
+ await this.sendEmail({ to: options.volunteerEmail, subject, html, text });
+ }
+
+ async sendVolunteerCancellationAck(options: {
+ volunteerEmail: string;
+ volunteerName: string;
+ shiftTitle: string;
+ shiftDate: string;
+ shiftTime: string;
+ signupUrl: string;
+ }): Promise {
+ const orgName = await this.getOrganizationName();
+ const vars: Record = {
+ ORGANIZATION_NAME: orgName,
+ VOLUNTEER_NAME: options.volunteerName,
+ SHIFT_TITLE: options.shiftTitle,
+ SHIFT_DATE: options.shiftDate,
+ SHIFT_TIME: options.shiftTime,
+ SIGNUP_URL: options.signupUrl,
+ };
+
+ const dbTemplate = await this.loadTemplateFromDatabase('volunteer-cancellation-ack');
+
+ let html: string, text: string, subject: string;
+ if (dbTemplate) {
+ html = await this.processTemplate(dbTemplate.html, vars);
+ text = await this.processTextTemplate(dbTemplate.text, vars);
+ subject = this.processSubject(dbTemplate.subject, vars);
+ } else {
+ const htmlTemplate = this.loadTemplate('volunteer-cancellation-ack', 'html');
+ const txtTemplate = this.loadTemplate('volunteer-cancellation-ack', 'txt');
+ html = await this.processTemplate(htmlTemplate, vars);
+ text = await this.processTextTemplate(txtTemplate, vars);
+ subject = `Signup cancelled — ${options.shiftTitle}`;
+ }
+
+ await this.sendEmail({ to: options.volunteerEmail, subject, html, text });
+ }
+
async sendResponseVerification(options: {
recipientEmail: string;
campaignTitle: string;
diff --git a/api/src/services/gancio.client.ts b/api/src/services/gancio.client.ts
new file mode 100644
index 00000000..1f7cec95
--- /dev/null
+++ b/api/src/services/gancio.client.ts
@@ -0,0 +1,246 @@
+import { env } from '../config/env';
+import { logger } from '../utils/logger';
+
+// --- Types ---
+
+export interface GancioEvent {
+ id: number;
+ title: string;
+ description: string;
+ place_name: string;
+ place_address: string;
+ start_datetime: number; // Unix timestamp
+ end_datetime?: number;
+ tags: string[];
+}
+
+interface GancioLoginResponse {
+ access_token: string;
+ token_type: string;
+}
+
+// --- Client ---
+
+class GancioClient {
+ private accessToken: string | null = null;
+ private tokenExpiresAt = 0;
+
+ private get baseUrl(): string {
+ return env.GANCIO_URL;
+ }
+
+ get enabled(): boolean {
+ return env.GANCIO_SYNC_ENABLED === 'true' && !!env.GANCIO_ADMIN_PASSWORD;
+ }
+
+ /**
+ * Authenticate with Gancio OAuth password grant, cache token for 1 hour.
+ * Gancio uses POST /oauth/login with application/x-www-form-urlencoded body
+ * (oauth2orize standard). Fields: username, password, client_id="self", grant_type="password".
+ */
+ private async login(): Promise {
+ if (this.accessToken && Date.now() < this.tokenExpiresAt) return;
+
+ const url = `${this.baseUrl}/oauth/login`;
+ const controller = new AbortController();
+ const timeout = setTimeout(() => controller.abort(), 10000);
+
+ // Gancio's oauth2orize endpoint requires URL-encoded form data (not JSON)
+ const formBody = new URLSearchParams({
+ username: env.GANCIO_ADMIN_USER,
+ password: env.GANCIO_ADMIN_PASSWORD,
+ client_id: 'self',
+ grant_type: 'password',
+ });
+
+ try {
+ const res = await fetch(url, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/x-www-form-urlencoded' },
+ body: formBody.toString(),
+ signal: controller.signal,
+ });
+
+ if (!res.ok) {
+ const text = await res.text().catch(() => '');
+ throw new Error(`Gancio login failed (${res.status}): ${text}`);
+ }
+
+ const data = await res.json() as GancioLoginResponse;
+ this.accessToken = data.access_token;
+ // Cache for 1 hour
+ this.tokenExpiresAt = Date.now() + 60 * 60 * 1000;
+ logger.debug('Gancio auth token refreshed');
+ } finally {
+ clearTimeout(timeout);
+ }
+ }
+
+ /**
+ * Make an authenticated JSON request to the Gancio API
+ */
+ private async request(
+ method: string,
+ path: string,
+ body?: Record,
+ ): Promise {
+ await this.login();
+
+ const url = `${this.baseUrl}${path}`;
+ const controller = new AbortController();
+ const timeout = setTimeout(() => controller.abort(), 10000);
+
+ const headers: Record = {};
+ if (this.accessToken) {
+ headers['Authorization'] = `Bearer ${this.accessToken}`;
+ }
+
+ let fetchBody: string | undefined;
+ if (body) {
+ headers['Content-Type'] = 'application/json';
+ fetchBody = JSON.stringify(body);
+ }
+
+ try {
+ const res = await fetch(url, {
+ method,
+ headers,
+ body: fetchBody,
+ signal: controller.signal,
+ });
+
+ if (!res.ok) {
+ const text = await res.text().catch(() => '');
+ throw new Error(`Gancio API ${method} ${path} returned ${res.status}: ${text}`);
+ }
+
+ const contentType = res.headers.get('content-type') || '';
+ if (contentType.includes('application/json')) {
+ return await res.json() as T;
+ }
+ return {} as T;
+ } finally {
+ clearTimeout(timeout);
+ }
+ }
+
+ // --- Health ---
+
+ async isAvailable(): Promise {
+ try {
+ const controller = new AbortController();
+ const timeout = setTimeout(() => controller.abort(), 5000);
+ try {
+ const res = await fetch(`${this.baseUrl}/api/events`, {
+ signal: controller.signal,
+ });
+ return res.ok;
+ } finally {
+ clearTimeout(timeout);
+ }
+ } catch {
+ return false;
+ }
+ }
+
+ // --- Event CRUD ---
+
+ /**
+ * Create a Gancio event from a shift
+ */
+ async createEvent(shift: {
+ title: string;
+ description?: string | null;
+ location?: string | null;
+ date: Date;
+ startTime: string;
+ endTime: string;
+ }): Promise {
+ if (!this.enabled) return null;
+
+ try {
+ const startDatetime = this.buildTimestamp(shift.date, shift.startTime);
+ const endDatetime = this.buildTimestamp(shift.date, shift.endTime);
+ const placeName = shift.location || 'TBD';
+
+ const event = await this.request('POST', '/api/event', {
+ title: shift.title,
+ description: shift.description || '',
+ place_name: placeName,
+ place_address: shift.location || placeName,
+ start_datetime: startDatetime,
+ end_datetime: endDatetime,
+ tags: ['volunteer', 'shift'],
+ });
+
+ logger.info(`Gancio: created event ${event.id} for shift "${shift.title}"`);
+ return event.id;
+ } catch (err) {
+ logger.warn('Gancio createEvent failed:', err instanceof Error ? err.message : err);
+ return null;
+ }
+ }
+
+ /**
+ * Update an existing Gancio event
+ */
+ async updateEvent(eventId: number, shift: {
+ title: string;
+ description?: string | null;
+ location?: string | null;
+ date: Date;
+ startTime: string;
+ endTime: string;
+ }): Promise {
+ if (!this.enabled) return;
+
+ try {
+ const startDatetime = this.buildTimestamp(shift.date, shift.startTime);
+ const endDatetime = this.buildTimestamp(shift.date, shift.endTime);
+ const placeName = shift.location || 'TBD';
+
+ await this.request('PUT', '/api/event', {
+ id: eventId,
+ title: shift.title,
+ description: shift.description || '',
+ place_name: placeName,
+ place_address: shift.location || placeName,
+ start_datetime: startDatetime,
+ end_datetime: endDatetime,
+ tags: ['volunteer', 'shift'],
+ });
+
+ logger.info(`Gancio: updated event ${eventId}`);
+ } catch (err) {
+ logger.warn(`Gancio updateEvent(${eventId}) failed:`, err instanceof Error ? err.message : err);
+ }
+ }
+
+ /**
+ * Delete a Gancio event
+ */
+ async deleteEvent(eventId: number): Promise {
+ if (!this.enabled) return;
+
+ try {
+ await this.request('DELETE', `/api/event/${eventId}`);
+ logger.info(`Gancio: deleted event ${eventId}`);
+ } catch (err) {
+ logger.warn(`Gancio deleteEvent(${eventId}) failed:`, err instanceof Error ? err.message : err);
+ }
+ }
+
+ // --- Helpers ---
+
+ /**
+ * Combine a Date and "HH:MM" time string into a Unix timestamp (seconds)
+ */
+ private buildTimestamp(date: Date, time: string): number {
+ const d = new Date(date);
+ const [h, m] = time.split(':').map(Number);
+ d.setHours(h || 0, m || 0, 0, 0);
+ return Math.floor(d.getTime() / 1000);
+ }
+}
+
+export const gancioClient = new GancioClient();
diff --git a/api/src/services/listmonk-event-sync.service.ts b/api/src/services/listmonk-event-sync.service.ts
new file mode 100644
index 00000000..5fececb4
--- /dev/null
+++ b/api/src/services/listmonk-event-sync.service.ts
@@ -0,0 +1,153 @@
+import { env } from '../config/env';
+import { logger } from '../utils/logger';
+import { listmonkClient } from './listmonk.client';
+import { listmonkSyncService } from './listmonk-sync.service';
+
+/**
+ * Event-driven Listmonk sync — fire-and-forget subscriber upserts
+ * triggered by application events (shift signups, canvass completions, campaign emails).
+ *
+ * All methods silently fail if LISTMONK_SYNC_ENABLED is false or Listmonk is unreachable.
+ */
+class ListmonkEventSyncService {
+ private _lastSyncAt: Date | null = null;
+ private _todaySyncCount = 0;
+ private _todayDate = '';
+
+ private get enabled(): boolean {
+ return env.LISTMONK_SYNC_ENABLED === 'true';
+ }
+
+ private incrementCounter(): void {
+ const today = new Date().toISOString().split('T')[0];
+ if (today !== this._todayDate) {
+ this._todayDate = today;
+ this._todaySyncCount = 0;
+ }
+ this._todaySyncCount++;
+ this._lastSyncAt = new Date();
+ }
+
+ /**
+ * Sync a shift signup to Listmonk "All Contacts" + "Volunteers" lists.
+ */
+ async onShiftSignup(data: {
+ email: string;
+ name: string;
+ shiftTitle: string;
+ shiftDate: string;
+ cutName?: string;
+ }): Promise {
+ if (!this.enabled) return;
+
+ try {
+ await listmonkSyncService.ensureInitialized();
+ const allContactsId = listmonkSyncService.getListId('All Contacts');
+ const volunteersId = listmonkSyncService.getListId('Volunteers');
+ if (!allContactsId || !volunteersId) return;
+
+ await listmonkClient.upsertSubscriber(
+ data.email,
+ data.name,
+ [allContactsId, volunteersId],
+ {
+ source: 'shift_signup',
+ last_shift_title: data.shiftTitle,
+ last_shift_date: data.shiftDate,
+ cut_name: data.cutName || null,
+ last_synced: new Date().toISOString(),
+ },
+ );
+ this.incrementCounter();
+ logger.debug(`Listmonk event sync: shift signup for ${data.email}`);
+ } catch (err) {
+ logger.debug('Listmonk event sync failed (onShiftSignup):', err);
+ }
+ }
+
+ /**
+ * Sync a completed canvass session to Listmonk "All Contacts" + "Canvassers" lists.
+ */
+ async onCanvassSessionCompleted(data: {
+ email: string;
+ name: string;
+ cutName: string;
+ visitCount: number;
+ outcomes: Record;
+ }): Promise {
+ if (!this.enabled) return;
+
+ try {
+ await listmonkSyncService.ensureInitialized();
+ const allContactsId = listmonkSyncService.getListId('All Contacts');
+ const canvassersId = listmonkSyncService.getListId('Canvassers');
+ if (!allContactsId || !canvassersId) return;
+
+ await listmonkClient.upsertSubscriber(
+ data.email,
+ data.name,
+ [allContactsId, canvassersId],
+ {
+ source: 'canvasser',
+ last_cut: data.cutName,
+ last_visit_count: data.visitCount,
+ last_outcomes: data.outcomes,
+ last_synced: new Date().toISOString(),
+ },
+ );
+ this.incrementCounter();
+ logger.debug(`Listmonk event sync: canvass session for ${data.email}`);
+ } catch (err) {
+ logger.debug('Listmonk event sync failed (onCanvassSessionCompleted):', err);
+ }
+ }
+
+ /**
+ * Sync a sent campaign email to Listmonk "All Contacts" + "Campaign Participants" lists.
+ */
+ async onCampaignEmailSent(data: {
+ email: string;
+ name: string;
+ campaignSlug: string;
+ postalCode?: string;
+ }): Promise {
+ if (!this.enabled) return;
+
+ try {
+ await listmonkSyncService.ensureInitialized();
+ const allContactsId = listmonkSyncService.getListId('All Contacts');
+ const participantsId = listmonkSyncService.getListId('Campaign Participants');
+ if (!allContactsId || !participantsId) return;
+
+ await listmonkClient.upsertSubscriber(
+ data.email,
+ data.name,
+ [allContactsId, participantsId],
+ {
+ source: 'campaign_participant',
+ campaign_slug: data.campaignSlug,
+ postal_code: data.postalCode || null,
+ last_synced: new Date().toISOString(),
+ },
+ );
+ this.incrementCounter();
+ logger.debug(`Listmonk event sync: campaign email for ${data.email}`);
+ } catch (err) {
+ logger.debug('Listmonk event sync failed (onCampaignEmailSent):', err);
+ }
+ }
+
+ getStats(): {
+ enabled: boolean;
+ lastSyncAt: string | null;
+ todaySyncCount: number;
+ } {
+ return {
+ enabled: this.enabled,
+ lastSyncAt: this._lastSyncAt?.toISOString() || null,
+ todaySyncCount: this._todaySyncCount,
+ };
+ }
+}
+
+export const listmonkEventSyncService = new ListmonkEventSyncService();
diff --git a/api/src/services/listmonk-sync.service.ts b/api/src/services/listmonk-sync.service.ts
index e0e62612..1b1601c8 100644
--- a/api/src/services/listmonk-sync.service.ts
+++ b/api/src/services/listmonk-sync.service.ts
@@ -15,6 +15,8 @@ const LIST_DEFINITIONS: Array<{ name: string; tags: string[] }> = [
{ name: 'Support Level 4 (Opposition)', tags: ['v2', 'map', 'support'] },
{ name: 'Has Campaign Sign', tags: ['v2', 'map', 'signs'] },
{ name: 'Users', tags: ['v2', 'users'] },
+ { name: 'Volunteers', tags: ['v2', 'map', 'shifts'] },
+ { name: 'Canvassers', tags: ['v2', 'map', 'canvass'] },
];
const SUPPORT_LEVEL_LIST_MAP: Record = {
@@ -49,12 +51,16 @@ class ListmonkSyncService {
logger.info('Listmonk lists initialized', { listIds: this.listIds });
}
- private async ensureInitialized(): Promise {
+ async ensureInitialized(): Promise {
if (!this.initialized) {
await this.initializeLists();
}
}
+ getListId(name: string): number | undefined {
+ return this.listIds[name];
+ }
+
async syncCampaignParticipants(): Promise {
await this.ensureInitialized();
const result: BulkSyncResult = { total: 0, success: 0, failed: 0, errors: [] };
diff --git a/api/src/services/notification-queue.service.ts b/api/src/services/notification-queue.service.ts
new file mode 100644
index 00000000..0a338c3a
--- /dev/null
+++ b/api/src/services/notification-queue.service.ts
@@ -0,0 +1,228 @@
+import { Queue, Worker, type Job } from 'bullmq';
+import { env } from '../config/env';
+import { logger } from '../utils/logger';
+import { emailService } from './email.service';
+
+// ─── Job Data Types ────────────────────────────────────────────────
+
+interface AdminShiftSignupJob {
+ type: 'admin-shift-signup';
+ adminEmails: string[];
+ shiftTitle: string;
+ shiftDate: string;
+ volunteerName: string;
+ volunteerEmail: string;
+ signupSource: string;
+ adminUrl: string;
+}
+
+interface AdminResponseSubmittedJob {
+ type: 'admin-response-submitted';
+ adminEmails: string[];
+ campaignTitle: string;
+ representativeName: string;
+ responseType: string;
+ submitterName: string;
+ adminUrl: string;
+}
+
+interface AdminSignRequestedJob {
+ type: 'admin-sign-requested';
+ adminEmails: string[];
+ volunteerName: string;
+ address: string;
+ shiftTitle: string;
+ signSize: string;
+ adminUrl: string;
+}
+
+interface AdminShiftCancellationJob {
+ type: 'admin-shift-cancellation';
+ adminEmails: string[];
+ shiftTitle: string;
+ shiftDate: string;
+ volunteerName: string;
+ volunteerEmail: string;
+ cancellationSource: string;
+ adminUrl: string;
+}
+
+interface VolunteerSessionSummaryJob {
+ type: 'volunteer-session-summary';
+ volunteerEmail: string;
+ volunteerName: string;
+ cutName: string;
+ sessionDate: string;
+ visitCount: number;
+ durationMinutes: number;
+ distanceKm: number;
+ outcomeBreakdown: Record;
+}
+
+interface VolunteerCancellationJob {
+ type: 'volunteer-cancellation';
+ volunteerEmail: string;
+ volunteerName: string;
+ shiftTitle: string;
+ shiftDate: string;
+ shiftTime: string;
+ signupUrl: string;
+}
+
+interface VolunteerShiftReminderJob {
+ type: 'volunteer-shift-reminder';
+ recipientEmail: string;
+ recipientName: string;
+ shiftTitle: string;
+ shiftDate: string;
+ shiftStartTime: string;
+ shiftEndTime: string;
+ shiftLocation: string;
+ shiftDescription: string;
+ currentVolunteers: number;
+ maxVolunteers: number;
+ shiftStatus: string;
+}
+
+type NotificationJobData =
+ | AdminShiftSignupJob
+ | AdminResponseSubmittedJob
+ | AdminSignRequestedJob
+ | AdminShiftCancellationJob
+ | VolunteerSessionSummaryJob
+ | VolunteerCancellationJob
+ | VolunteerShiftReminderJob;
+
+// ─── Queue Service ─────────────────────────────────────────────────
+
+class NotificationQueueService {
+ private queue: Queue;
+ private worker: Worker | null = null;
+
+ constructor() {
+ this.queue = new Queue('notification-emails', {
+ connection: { url: env.REDIS_URL },
+ defaultJobOptions: {
+ attempts: 3,
+ backoff: { type: 'exponential', delay: 5000 },
+ removeOnComplete: { age: 24 * 60 * 60, count: 500 },
+ removeOnFail: { age: 7 * 24 * 60 * 60 },
+ },
+ });
+ }
+
+ startWorker() {
+ this.worker = new Worker(
+ 'notification-emails',
+ async (job: Job) => {
+ const { data } = job;
+ logger.info(`Processing notification job ${job.id} type=${data.type}`);
+
+ switch (data.type) {
+ case 'admin-shift-signup':
+ await emailService.sendAdminShiftSignupAlert(data);
+ break;
+ case 'admin-response-submitted':
+ await emailService.sendAdminResponseSubmittedAlert(data);
+ break;
+ case 'admin-sign-requested':
+ await emailService.sendAdminSignRequestedAlert(data);
+ break;
+ case 'admin-shift-cancellation':
+ await emailService.sendAdminShiftCancellationAlert(data);
+ break;
+ case 'volunteer-session-summary':
+ await emailService.sendVolunteerSessionSummary(data);
+ break;
+ case 'volunteer-cancellation':
+ await emailService.sendVolunteerCancellationAck(data);
+ break;
+ case 'volunteer-shift-reminder':
+ await emailService.sendShiftDetailsEmail({
+ recipientEmail: data.recipientEmail,
+ recipientName: data.recipientName,
+ shiftTitle: data.shiftTitle,
+ shiftDate: data.shiftDate,
+ shiftStartTime: data.shiftStartTime,
+ shiftEndTime: data.shiftEndTime,
+ shiftLocation: data.shiftLocation,
+ shiftDescription: data.shiftDescription,
+ currentVolunteers: data.currentVolunteers,
+ maxVolunteers: data.maxVolunteers,
+ shiftStatus: data.shiftStatus,
+ });
+ break;
+ }
+ },
+ {
+ connection: { url: env.REDIS_URL },
+ concurrency: 2,
+ },
+ );
+
+ this.worker.on('completed', (job) => {
+ logger.info(`Notification job ${job.id} completed`);
+ });
+
+ this.worker.on('failed', (job, err) => {
+ logger.error(`Notification job ${job?.id} failed: ${err.message}`);
+ });
+
+ logger.info('Notification queue worker started');
+ }
+
+ /** Enqueue an immediate notification job. */
+ async enqueue(data: NotificationJobData): Promise {
+ const job = await this.queue.add(data.type, data);
+ return job.id!;
+ }
+
+ /**
+ * Schedule a shift reminder as a delayed job.
+ * Uses a deterministic jobId so we can cancel it later.
+ */
+ async scheduleShiftReminder(
+ data: VolunteerShiftReminderJob,
+ shiftDatetime: Date,
+ ): Promise {
+ const reminderTime = new Date(shiftDatetime.getTime() - 24 * 60 * 60 * 1000);
+ const delay = reminderTime.getTime() - Date.now();
+
+ if (delay <= 0) {
+ logger.debug('Shift is less than 24h away, skipping reminder scheduling');
+ return null;
+ }
+
+ const jobId = `shift-reminder-${data.recipientEmail}-${shiftDatetime.getTime()}`;
+ const job = await this.queue.add(data.type, data, {
+ delay,
+ jobId,
+ });
+ logger.info(`Scheduled shift reminder jobId=${jobId} delay=${Math.round(delay / 60000)}min`);
+ return job.id!;
+ }
+
+ /** Cancel a pending shift reminder. */
+ async cancelShiftReminder(email: string, shiftDatetime: Date): Promise {
+ const jobId = `shift-reminder-${email}-${shiftDatetime.getTime()}`;
+ try {
+ const job = await this.queue.getJob(jobId);
+ if (job) {
+ await job.remove();
+ logger.info(`Cancelled shift reminder jobId=${jobId}`);
+ }
+ } catch (err) {
+ logger.warn(`Failed to cancel shift reminder jobId=${jobId}:`, err);
+ }
+ }
+
+ async close() {
+ if (this.worker) {
+ await this.worker.close();
+ }
+ await this.queue.close();
+ logger.info('Notification queue closed');
+ }
+}
+
+export const notificationQueueService = new NotificationQueueService();
diff --git a/api/src/services/notification.helper.ts b/api/src/services/notification.helper.ts
new file mode 100644
index 00000000..0130a3a9
--- /dev/null
+++ b/api/src/services/notification.helper.ts
@@ -0,0 +1,33 @@
+import { UserRole, UserStatus } from '@prisma/client';
+import { prisma } from '../config/database';
+import { siteSettingsService } from '../modules/settings/settings.service';
+import { logger } from '../utils/logger';
+
+/**
+ * Fetch email addresses for active admin users with the specified role(s).
+ */
+export async function getAdminEmailsByRole(roles: UserRole[]): Promise {
+ const users = await prisma.user.findMany({
+ where: {
+ role: { in: roles },
+ status: UserStatus.ACTIVE,
+ },
+ select: { email: true },
+ });
+ return users.map((u) => u.email);
+}
+
+/**
+ * Check whether a specific notification toggle is enabled in SiteSettings.
+ * Fail-open: returns true if the field is missing or settings query fails.
+ */
+export async function isNotificationEnabled(key: string): Promise {
+ try {
+ const settings = await siteSettingsService.get();
+ const value = (settings as Record)[key];
+ return value !== false;
+ } catch (err) {
+ logger.warn(`Failed to read notification setting "${key}", defaulting to enabled:`, err);
+ return true;
+ }
+}
diff --git a/api/src/services/rocketchat-webhook.service.ts b/api/src/services/rocketchat-webhook.service.ts
new file mode 100644
index 00000000..726f9e77
--- /dev/null
+++ b/api/src/services/rocketchat-webhook.service.ts
@@ -0,0 +1,78 @@
+import { env } from '../config/env';
+import { logger } from '../utils/logger';
+import { rocketchatClient } from './rocketchat.client';
+
+class RocketChatWebhookService {
+ private get enabled(): boolean {
+ return env.ENABLE_CHAT === 'true';
+ }
+
+ /**
+ * Post a notification to a Rocket.Chat channel.
+ * Silently fails if chat is disabled or RC is unreachable.
+ */
+ private async notify(channel: string, text: string, color?: string): Promise {
+ if (!this.enabled) return;
+ try {
+ await rocketchatClient.postMessage(channel, text, 'Changemaker Bot');
+ } catch (err) {
+ logger.debug(`RC notification to ${channel} failed (non-critical):`, err);
+ }
+ }
+
+ // --- Event Formatters ---
+
+ async onShiftSignup(data: {
+ userName: string;
+ shiftTitle: string;
+ shiftDate: string;
+ }): Promise {
+ const text = `:calendar: **${data.userName}** signed up for shift: *${data.shiftTitle}* (${data.shiftDate})`;
+ await this.notify('#shifts', text, '#27ae60');
+ }
+
+ async onShiftCancellation(data: {
+ userName: string;
+ shiftTitle: string;
+ shiftDate: string;
+ }): Promise {
+ const text = `:x: **${data.userName}** cancelled signup for shift: *${data.shiftTitle}* (${data.shiftDate})`;
+ await this.notify('#shifts', text, '#e74c3c');
+ }
+
+ async onCanvassSessionCompleted(data: {
+ userName: string;
+ visitCount: number;
+ cutName?: string;
+ }): Promise {
+ const location = data.cutName ? ` in ${data.cutName}` : '';
+ const text = `:door: **${data.userName}** completed ${data.visitCount} visits${location}`;
+ await this.notify('#canvassing', text, '#3498db');
+ }
+
+ async onCampaignResponseSubmitted(data: {
+ campaignTitle: string;
+ representativeName: string;
+ }): Promise {
+ const text = `:mega: New response submitted for campaign *${data.campaignTitle}* from **${data.representativeName}**`;
+ await this.notify('#campaigns', text, '#9b59b6');
+ }
+
+ /**
+ * Ensure default notification channels exist in Rocket.Chat.
+ * Called during service startup.
+ */
+ async setupChannels(): Promise {
+ if (!this.enabled) return;
+ try {
+ await rocketchatClient.ensureChannel('shifts', 'Shift coordination and updates');
+ await rocketchatClient.ensureChannel('canvassing', 'Canvass activity and updates');
+ await rocketchatClient.ensureChannel('campaigns', 'Campaign activity and responses');
+ logger.info('RC notification channels verified');
+ } catch (err) {
+ logger.warn('RC channel setup failed (will retry on next notification):', err);
+ }
+ }
+}
+
+export const rocketchatWebhookService = new RocketChatWebhookService();
diff --git a/api/src/services/rocketchat.client.ts b/api/src/services/rocketchat.client.ts
new file mode 100644
index 00000000..dda06100
--- /dev/null
+++ b/api/src/services/rocketchat.client.ts
@@ -0,0 +1,331 @@
+import { env } from '../config/env';
+import { logger } from '../utils/logger';
+
+// --- Types ---
+
+export interface RCUser {
+ _id: string;
+ username: string;
+ name: string;
+ emails: { address: string; verified: boolean }[];
+ roles: string[];
+ active: boolean;
+}
+
+export interface RCLoginResponse {
+ status: string;
+ data: {
+ userId: string;
+ authToken: string;
+ me: RCUser;
+ };
+}
+
+export interface RCUserCreateResponse {
+ user: RCUser;
+ success: boolean;
+}
+
+export interface RCUserTokenResponse {
+ data: {
+ userId: string;
+ authToken: string;
+ };
+ success: boolean;
+}
+
+export interface RCInfoResponse {
+ info: {
+ version: string;
+ };
+ success: boolean;
+}
+
+export interface RCChannelCreateResponse {
+ channel: {
+ _id: string;
+ name: string;
+ };
+ success: boolean;
+}
+
+// --- Client ---
+
+class RocketChatClient {
+ private adminToken: string | null = null;
+ private adminUserId: string | null = null;
+ private tokenExpiresAt = 0;
+
+ private get baseUrl(): string {
+ return env.ROCKETCHAT_URL;
+ }
+
+ private get hasCredentials(): boolean {
+ return !!env.ROCKETCHAT_ADMIN_USER &&
+ !!env.ROCKETCHAT_ADMIN_PASSWORD;
+ }
+
+ /**
+ * Make an authenticated request to Rocket.Chat REST API
+ */
+ private async request(
+ method: string,
+ path: string,
+ body?: unknown,
+ skipAuth = false,
+ ): Promise {
+ const url = `${this.baseUrl}/api/v1${path}`;
+ const headers: Record = {
+ 'Content-Type': 'application/json',
+ };
+
+ if (!skipAuth) {
+ await this.ensureAdminAuth();
+ if (this.adminToken && this.adminUserId) {
+ headers['X-Auth-Token'] = this.adminToken;
+ headers['X-User-Id'] = this.adminUserId;
+ }
+ }
+
+ const res = await fetch(url, {
+ method,
+ headers,
+ body: body ? JSON.stringify(body) : undefined,
+ });
+
+ const data = await res.json() as T & { success?: boolean; error?: string };
+
+ if (!res.ok || data.success === false) {
+ const msg = data.error || `RC API error ${res.status}`;
+ throw new Error(msg);
+ }
+
+ return data;
+ }
+
+ // --- Auth ---
+
+ /**
+ * Authenticate as admin, cache token for 1 hour
+ */
+ async ensureAdminAuth(): Promise {
+ if (this.adminToken && Date.now() < this.tokenExpiresAt) return;
+
+ const url = `${this.baseUrl}/api/v1/login`;
+ const res = await fetch(url, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify({
+ user: env.ROCKETCHAT_ADMIN_USER,
+ password: env.ROCKETCHAT_ADMIN_PASSWORD,
+ }),
+ });
+
+ const data = await res.json() as RCLoginResponse;
+ if (!res.ok || data.status !== 'success') {
+ throw new Error('Failed to authenticate with Rocket.Chat admin');
+ }
+
+ this.adminToken = data.data.authToken;
+ this.adminUserId = data.data.userId;
+ // Cache for 1 hour
+ this.tokenExpiresAt = Date.now() + 60 * 60 * 1000;
+ logger.debug('Rocket.Chat admin auth refreshed');
+ }
+
+ // --- Health ---
+
+ async healthCheck(): Promise {
+ if (!this.hasCredentials) return false;
+ try {
+ const url = `${this.baseUrl}/api/info`;
+ const res = await fetch(url, { method: 'GET', signal: AbortSignal.timeout(5000) });
+ return res.ok;
+ } catch {
+ return false;
+ }
+ }
+
+ // --- Channel History ---
+
+ /**
+ * Get recent messages from a channel by name
+ */
+ async getChannelHistory(channelName: string, count = 5): Promise> {
+ try {
+ const data = await this.request<{
+ messages: Array<{
+ _id: string;
+ msg: string;
+ u: { username: string };
+ ts: string;
+ bot?: { i: string };
+ alias?: string;
+ }>;
+ success: boolean;
+ }>('GET', `/channels.history?roomName=${encodeURIComponent(channelName)}&count=${count}`);
+ return data.messages || [];
+ } catch {
+ return [];
+ }
+ }
+
+ // --- User Management ---
+
+ /**
+ * Find a Rocket.Chat user by email address
+ */
+ async findUserByEmail(email: string): Promise {
+ try {
+ const data = await this.request<{ users: RCUser[]; success: boolean }>(
+ 'GET',
+ `/users.list?query=${encodeURIComponent(email)}&count=1`,
+ );
+ const match = data.users?.find(u =>
+ u.emails?.some(e => e.address.toLowerCase() === email.toLowerCase()),
+ );
+ return match || null;
+ } catch (err) {
+ logger.warn('RC findUserByEmail failed:', err);
+ return null;
+ }
+ }
+
+ /**
+ * Create a new Rocket.Chat user
+ */
+ async createUser(data: {
+ email: string;
+ name: string;
+ username: string;
+ password: string;
+ roles?: string[];
+ }): Promise {
+ const res = await this.request('POST', '/users.create', {
+ email: data.email,
+ name: data.name,
+ username: data.username,
+ password: data.password,
+ roles: data.roles || ['user'],
+ verified: true,
+ joinDefaultChannels: true,
+ requirePasswordChange: false,
+ sendWelcomeEmail: false,
+ });
+ return res.user;
+ }
+
+ /**
+ * Update an existing Rocket.Chat user's roles and name
+ */
+ async updateUser(userId: string, data: { name?: string; roles?: string[] }): Promise {
+ await this.request('POST', '/users.update', {
+ userId,
+ data,
+ });
+ }
+
+ /**
+ * Set user active/inactive status
+ */
+ async setUserActive(userId: string, active: boolean): Promise {
+ await this.request('POST', '/users.setActiveStatus', {
+ userId,
+ activeStatus: active,
+ });
+ }
+
+ /**
+ * Generate a one-time login token for a user (SSO)
+ * Requires CREATE_TOKENS_FOR_USERS=true on the RC instance
+ */
+ async createUserToken(userId: string): Promise<{ authToken: string; userId: string }> {
+ const res = await this.request('POST', '/users.createToken', {
+ userId,
+ });
+ return res.data;
+ }
+
+ // --- Channels ---
+
+ /**
+ * Create a channel (idempotent — returns existing if name taken)
+ */
+ async ensureChannel(name: string, topic?: string): Promise {
+ try {
+ const res = await this.request('POST', '/channels.create', {
+ name,
+ readOnly: false,
+ });
+ if (topic) {
+ await this.request('POST', '/channels.setTopic', {
+ roomId: res.channel._id,
+ topic,
+ });
+ }
+ return res.channel._id;
+ } catch (err) {
+ // Channel already exists — look it up
+ if (err instanceof Error && (err.message.includes('already in use') || err.message.includes('duplicate-channel-name') || err.message.includes('exists'))) {
+ const info = await this.request<{ channel: { _id: string }; success: boolean }>(
+ 'GET',
+ `/channels.info?roomName=${encodeURIComponent(name)}`,
+ );
+ return info.channel._id;
+ }
+ throw err;
+ }
+ }
+
+ // --- Webhooks ---
+
+ /**
+ * Post a message to an incoming webhook URL
+ */
+ async postWebhook(webhookUrl: string, payload: {
+ text?: string;
+ channel?: string;
+ alias?: string;
+ emoji?: string;
+ attachments?: Array<{
+ title?: string;
+ text?: string;
+ color?: string;
+ fields?: Array<{ title: string; value: string; short?: boolean }>;
+ }>;
+ }): Promise {
+ try {
+ await fetch(webhookUrl, {
+ method: 'POST',
+ headers: { 'Content-Type': 'application/json' },
+ body: JSON.stringify(payload),
+ });
+ } catch (err) {
+ logger.warn('RC webhook post failed:', err);
+ }
+ }
+
+ /**
+ * Post a message to a channel using the REST API (no webhook needed)
+ */
+ async postMessage(channel: string, text: string, alias?: string): Promise {
+ try {
+ await this.request('POST', '/chat.postMessage', {
+ channel,
+ text,
+ alias: alias || 'Changemaker Bot',
+ });
+ } catch (err) {
+ logger.warn(`RC postMessage to ${channel} failed:`, err);
+ }
+ }
+}
+
+export const rocketchatClient = new RocketChatClient();
diff --git a/api/src/templates/email/admin-response-submitted-alert.html b/api/src/templates/email/admin-response-submitted-alert.html
new file mode 100644
index 00000000..d6462e8c
--- /dev/null
+++ b/api/src/templates/email/admin-response-submitted-alert.html
@@ -0,0 +1,112 @@
+
+
+
+
+ New Response Submitted — {{ORGANIZATION_NAME}}
+
+
+
+
+
+
+
+
A new response has been submitted to the response wall and is awaiting moderation:
+
+
+
+ Campaign: {{CAMPAIGN_TITLE}}
+
+
+ Representative: {{REPRESENTATIVE_NAME}}
+
+
+ Response Type: {{RESPONSE_TYPE}}
+
+
+ Submitted By: {{SUBMITTER_NAME}}
+
+
+
+
+
+
+
+
+
+
diff --git a/api/src/templates/email/admin-response-submitted-alert.txt b/api/src/templates/email/admin-response-submitted-alert.txt
new file mode 100644
index 00000000..344c32d8
--- /dev/null
+++ b/api/src/templates/email/admin-response-submitted-alert.txt
@@ -0,0 +1,12 @@
+{{ORGANIZATION_NAME}} — New Response Submitted
+
+A new response has been submitted to the response wall and is awaiting moderation:
+
+Campaign: {{CAMPAIGN_TITLE}}
+Representative: {{REPRESENTATIVE_NAME}}
+Response Type: {{RESPONSE_TYPE}}
+Submitted By: {{SUBMITTER_NAME}}
+
+Review responses: {{ADMIN_URL}}
+
+— {{ORGANIZATION_NAME}}
diff --git a/api/src/templates/email/admin-shift-cancellation-alert.html b/api/src/templates/email/admin-shift-cancellation-alert.html
new file mode 100644
index 00000000..d58ecf04
--- /dev/null
+++ b/api/src/templates/email/admin-shift-cancellation-alert.html
@@ -0,0 +1,115 @@
+
+
+
+
+ Shift Cancellation — {{ORGANIZATION_NAME}}
+
+
+
+
+
+
+
+
A volunteer has cancelled their shift signup:
+
+
+
+ Shift: {{SHIFT_TITLE}}
+
+
+ Date: {{SHIFT_DATE}}
+
+
+ Volunteer: {{VOLUNTEER_NAME}}
+
+
+ Email: {{VOLUNTEER_EMAIL}}
+
+
+ Source: {{CANCELLATION_SOURCE}}
+
+
+
+
+
+
+
+
+
+
diff --git a/api/src/templates/email/admin-shift-cancellation-alert.txt b/api/src/templates/email/admin-shift-cancellation-alert.txt
new file mode 100644
index 00000000..94645eba
--- /dev/null
+++ b/api/src/templates/email/admin-shift-cancellation-alert.txt
@@ -0,0 +1,13 @@
+{{ORGANIZATION_NAME}} — Shift Cancellation
+
+A volunteer has cancelled their shift signup:
+
+Shift: {{SHIFT_TITLE}}
+Date: {{SHIFT_DATE}}
+Volunteer: {{VOLUNTEER_NAME}}
+Email: {{VOLUNTEER_EMAIL}}
+Source: {{CANCELLATION_SOURCE}}
+
+View in admin: {{ADMIN_URL}}
+
+— {{ORGANIZATION_NAME}}
diff --git a/api/src/templates/email/admin-shift-signup-alert.html b/api/src/templates/email/admin-shift-signup-alert.html
new file mode 100644
index 00000000..0953523a
--- /dev/null
+++ b/api/src/templates/email/admin-shift-signup-alert.html
@@ -0,0 +1,115 @@
+
+
+
+
+ New Shift Signup — {{ORGANIZATION_NAME}}
+
+
+
+
+
+
+
+
A volunteer has signed up for a shift:
+
+
+
+ Shift: {{SHIFT_TITLE}}
+
+
+ Date: {{SHIFT_DATE}}
+
+
+ Volunteer: {{VOLUNTEER_NAME}}
+
+
+ Email: {{VOLUNTEER_EMAIL}}
+
+
+ Source: {{SIGNUP_SOURCE}}
+
+
+
+
+
+
+
+
+
+
diff --git a/api/src/templates/email/admin-shift-signup-alert.txt b/api/src/templates/email/admin-shift-signup-alert.txt
new file mode 100644
index 00000000..38316857
--- /dev/null
+++ b/api/src/templates/email/admin-shift-signup-alert.txt
@@ -0,0 +1,13 @@
+{{ORGANIZATION_NAME}} — New Shift Signup
+
+A volunteer has signed up for a shift:
+
+Shift: {{SHIFT_TITLE}}
+Date: {{SHIFT_DATE}}
+Volunteer: {{VOLUNTEER_NAME}}
+Email: {{VOLUNTEER_EMAIL}}
+Source: {{SIGNUP_SOURCE}}
+
+View in admin: {{ADMIN_URL}}
+
+— {{ORGANIZATION_NAME}}
diff --git a/api/src/templates/email/admin-sign-requested-alert.html b/api/src/templates/email/admin-sign-requested-alert.html
new file mode 100644
index 00000000..ef6b5982
--- /dev/null
+++ b/api/src/templates/email/admin-sign-requested-alert.html
@@ -0,0 +1,112 @@
+
+
+
+
+ Sign Requested — {{ORGANIZATION_NAME}}
+
+
+
+
+
+
+
+
A resident has requested a yard sign during canvassing:
+
+
+
+ Address: {{ADDRESS}}
+
+
+ Sign Size: {{SIGN_SIZE}}
+
+
+ Canvasser: {{VOLUNTEER_NAME}}
+
+
+ Shift: {{SHIFT_TITLE}}
+
+
+
+
+
+
+
+
+
+
diff --git a/api/src/templates/email/admin-sign-requested-alert.txt b/api/src/templates/email/admin-sign-requested-alert.txt
new file mode 100644
index 00000000..35e42575
--- /dev/null
+++ b/api/src/templates/email/admin-sign-requested-alert.txt
@@ -0,0 +1,12 @@
+{{ORGANIZATION_NAME}} — Sign Requested
+
+A resident has requested a yard sign during canvassing:
+
+Address: {{ADDRESS}}
+Sign Size: {{SIGN_SIZE}}
+Canvasser: {{VOLUNTEER_NAME}}
+Shift: {{SHIFT_TITLE}}
+
+View in admin: {{ADMIN_URL}}
+
+— {{ORGANIZATION_NAME}}
diff --git a/api/src/templates/email/volunteer-cancellation-ack.html b/api/src/templates/email/volunteer-cancellation-ack.html
new file mode 100644
index 00000000..0731635d
--- /dev/null
+++ b/api/src/templates/email/volunteer-cancellation-ack.html
@@ -0,0 +1,112 @@
+
+
+
+
+ Signup Cancelled — {{ORGANIZATION_NAME}}
+
+
+
+
+
+
+
+
Hi {{VOLUNTEER_NAME}},
+
Your shift signup has been cancelled. Here are the details:
+
+
+
+ Shift: {{SHIFT_TITLE}}
+
+
+ Date: {{SHIFT_DATE}}
+
+
+ Time: {{SHIFT_TIME}}
+
+
+
+
If this was a mistake, you can sign up again:
+
+
+
+
+
+
+
+
diff --git a/api/src/templates/email/volunteer-cancellation-ack.txt b/api/src/templates/email/volunteer-cancellation-ack.txt
new file mode 100644
index 00000000..d0c5d411
--- /dev/null
+++ b/api/src/templates/email/volunteer-cancellation-ack.txt
@@ -0,0 +1,13 @@
+{{ORGANIZATION_NAME}} — Signup Cancelled
+
+Hi {{VOLUNTEER_NAME}},
+
+Your shift signup has been cancelled:
+
+Shift: {{SHIFT_TITLE}}
+Date: {{SHIFT_DATE}}
+Time: {{SHIFT_TIME}}
+
+If this was a mistake, you can sign up again: {{SIGNUP_URL}}
+
+— {{ORGANIZATION_NAME}}
diff --git a/api/src/templates/email/volunteer-session-summary.html b/api/src/templates/email/volunteer-session-summary.html
new file mode 100644
index 00000000..4c46a3cc
--- /dev/null
+++ b/api/src/templates/email/volunteer-session-summary.html
@@ -0,0 +1,145 @@
+
+
+
+
+ Canvass Session Summary — {{ORGANIZATION_NAME}}
+
+
+
+
+
+
+
+
Great work, {{VOLUNTEER_NAME}}! Here's a summary of your canvassing session:
+
+
+
+ Area: {{CUT_NAME}}
+
+
+ Date: {{SESSION_DATE}}
+
+
+
+
+
+
{{VISIT_COUNT}}
+
Doors Visited
+
+
+
{{DURATION_MINUTES}}
+
Minutes
+
+
+
{{DISTANCE_KM}}
+
km Walked
+
+
+
+ {{OUTCOME_BREAKDOWN}}
+
+
+
+
+
+
diff --git a/api/src/templates/email/volunteer-session-summary.txt b/api/src/templates/email/volunteer-session-summary.txt
new file mode 100644
index 00000000..3619b901
--- /dev/null
+++ b/api/src/templates/email/volunteer-session-summary.txt
@@ -0,0 +1,13 @@
+{{ORGANIZATION_NAME}} — Canvass Session Summary
+
+Great work, {{VOLUNTEER_NAME}}! Here's a summary of your canvassing session:
+
+Area: {{CUT_NAME}}
+Date: {{SESSION_DATE}}
+Doors Visited: {{VISIT_COUNT}}
+Duration: {{DURATION_MINUTES}} minutes
+Distance: {{DISTANCE_KM}} km
+
+{{OUTCOME_BREAKDOWN}}
+
+Thank you for volunteering with {{ORGANIZATION_NAME}}!
diff --git a/api/src/utils/metrics.ts b/api/src/utils/metrics.ts
index 42b48be5..f0319122 100644
--- a/api/src/utils/metrics.ts
+++ b/api/src/utils/metrics.ts
@@ -1,8 +1,12 @@
import client from 'prom-client';
+import { env } from '../config/env';
const register = new client.Registry();
-register.setDefaultLabels({ app: 'changemaker-v2-api' });
+register.setDefaultLabels({
+ app: 'changemaker-v2-api',
+ instance: env.INSTANCE_LABEL || env.DOMAIN || 'unknown',
+});
client.collectDefaultMetrics({ register });
diff --git a/bunker-ops/.gitignore b/bunker-ops/.gitignore
new file mode 100644
index 00000000..c36b90fc
--- /dev/null
+++ b/bunker-ops/.gitignore
@@ -0,0 +1,13 @@
+# Vault password file (NEVER commit)
+.vault_pass
+
+# Encrypted vault files should be committed, but plaintext should not
+# (bootstrap-vault.sh encrypts automatically if .vault_pass exists)
+
+# Ansible retry files
+*.retry
+
+# SSH keys
+*.pem
+*.key
+id_rsa*
diff --git a/bunker-ops/HOWTO.md b/bunker-ops/HOWTO.md
new file mode 100644
index 00000000..2ec3b47b
--- /dev/null
+++ b/bunker-ops/HOWTO.md
@@ -0,0 +1,519 @@
+# Bunker Ops — How-To Guide
+
+Operational handbook for managing Changemaker Lite instances with Ansible.
+
+---
+
+## Table of Contents
+
+1. [Prerequisites](#1-prerequisites)
+2. [Initial Setup (Control Machine)](#2-initial-setup-control-machine)
+3. [Adding a New Instance](#3-adding-a-new-instance)
+4. [Deploying an Instance](#4-deploying-an-instance)
+5. [Day-to-Day Operations](#5-day-to-day-operations)
+6. [Secret Management](#6-secret-management)
+7. [Monitoring & Fleet Observability](#7-monitoring--fleet-observability)
+8. [Troubleshooting](#8-troubleshooting)
+9. [Variable Reference](#9-variable-reference)
+
+---
+
+## 1. Prerequisites
+
+### Control Machine (your laptop / jump server)
+
+- **Ansible 2.14+** — `pip install ansible` or `apt install ansible`
+- **SSH access** — key-based auth to all target servers
+- **OpenSSL** — for secret generation (`openssl rand`)
+
+### Target Servers (each Changemaker instance)
+
+- **Ubuntu 22.04 or 24.04** (Debian-based)
+- **2+ GB RAM** (4 GB recommended; swap is auto-created on low-memory hosts)
+- **20+ GB disk** (50 GB recommended for media features)
+- **SSH access** for a `deploy` user with passwordless sudo
+- **Outbound internet** (pulls Docker images, Git repo)
+- Ports 80, 443, and SSH accessible
+
+---
+
+## 2. Initial Setup (Control Machine)
+
+### 2.1 Clone the repository
+
+```bash
+git clone changemaker.lite
+cd changemaker.lite/bunker-ops
+```
+
+### 2.2 Create a vault password
+
+This single password encrypts all per-instance secrets. Store it securely (password manager, not Git).
+
+```bash
+# Generate a strong vault password
+openssl rand -base64 32 > .vault_pass
+chmod 600 .vault_pass
+```
+
+The `.vault_pass` file is in `.gitignore` and must never be committed.
+
+### 2.3 Verify Ansible can run
+
+```bash
+ansible --version
+ansible-playbook playbooks/deploy.yml --syntax-check
+```
+
+### 2.4 Prepare SSH access
+
+Ensure your SSH key can reach target servers:
+
+```bash
+# Test connectivity
+ssh deploy@10.0.1.10 "hostname && docker --version"
+```
+
+If you use a non-default SSH key:
+
+```bash
+# In ansible.cfg or per-host
+ansible_ssh_private_key_file: ~/.ssh/bunker_ops_ed25519
+```
+
+---
+
+## 3. Adding a New Instance
+
+### 3.1 Quick method (recommended)
+
+The `add-instance.sh` script scaffolds everything:
+
+```bash
+./scripts/add-instance.sh edmonton-prod betteredmonton.org 10.0.1.10
+
+# With fleet observability (Tier 2):
+./scripts/add-instance.sh edmonton-prod betteredmonton.org 10.0.1.10 --tier 2
+```
+
+This creates:
+- `inventory/host_vars/edmonton-prod/main.yml` — instance configuration
+- `inventory/host_vars/edmonton-prod/vault.yml` — 19+ generated secrets (encrypted)
+
+### 3.2 Add to inventory
+
+Edit `inventory/hosts.yml` and add the host:
+
+```yaml
+all:
+ children:
+ changemaker_instances:
+ hosts:
+ edmonton-prod:
+ ansible_host: 10.0.1.10
+ ansible_user: deploy
+ cml_domain: betteredmonton.org
+```
+
+### 3.3 Customize configuration
+
+Edit `inventory/host_vars/edmonton-prod/main.yml`:
+
+```yaml
+cml_domain: betteredmonton.org
+cml_node_env: production
+
+# Enable features
+cml_enable_media: "true"
+cml_listmonk_sync_enabled: "true"
+cml_email_test_mode: "false"
+cml_monitoring_enabled: true
+
+# Production SMTP
+cml_smtp_host: smtp.protonmail.ch
+cml_smtp_port: 587
+cml_smtp_user: "noreply@betteredmonton.org"
+
+# Pangolin tunnel
+cml_pangolin_api_url: "https://api.bnkserve.org/v1"
+cml_pangolin_org_id: "org_abc123"
+```
+
+### 3.4 Edit secrets (if needed)
+
+```bash
+# Decrypt, edit, re-encrypt
+ansible-vault edit inventory/host_vars/edmonton-prod/vault.yml
+
+# Or set a specific value
+ansible-vault decrypt inventory/host_vars/edmonton-prod/vault.yml
+# ... edit ...
+ansible-vault encrypt inventory/host_vars/edmonton-prod/vault.yml
+```
+
+### 3.5 Verify connectivity
+
+```bash
+ansible edmonton-prod -m ping
+```
+
+---
+
+## 4. Deploying an Instance
+
+### 4.1 Full initial deploy
+
+Installs Docker, configures the OS, clones the repo, generates `.env`, starts all containers, runs migrations, and sets up backup cron:
+
+```bash
+ansible-playbook playbooks/deploy.yml --limit edmonton-prod
+```
+
+What happens (in order):
+1. **common** role — apt update, Docker install, UFW firewall, fail2ban, swap
+2. **changemaker** role — git clone, create dirs, generate `.env`, `docker compose up`, Prisma migrations, seed, health checks, backup cron
+3. **monitoring** role (if enabled) — Prometheus config, `--profile monitoring up`
+
+### 4.2 Deploy all instances
+
+```bash
+# One at a time (safe):
+ansible-playbook playbooks/deploy.yml
+
+# Show what would change (dry run):
+ansible-playbook playbooks/deploy.yml --check --diff
+```
+
+### 4.3 Deploy with specific tags
+
+```bash
+# Only regenerate .env (no Docker restart):
+ansible-playbook playbooks/deploy.yml --limit edmonton-prod --tags env
+
+# Only clone + update code:
+ansible-playbook playbooks/deploy.yml --limit edmonton-prod --tags clone
+
+# Only run health checks:
+ansible-playbook playbooks/deploy.yml --limit edmonton-prod --tags health
+```
+
+---
+
+## 5. Day-to-Day Operations
+
+### 5.1 Rolling upgrade (code + images)
+
+Pulls latest Git commits, rebuilds images, runs migrations, restarts — in 25% batches:
+
+```bash
+# All instances:
+ansible-playbook playbooks/upgrade.yml
+
+# Single instance:
+ansible-playbook playbooks/upgrade.yml --limit edmonton-prod
+```
+
+### 5.2 Configuration change (no rebuild)
+
+Regenerates `.env` and restarts the API. Use when changing feature flags, SMTP settings, CORS origins, etc.:
+
+```bash
+# Change a variable first:
+# Edit inventory/host_vars/edmonton-prod/main.yml
+# e.g., cml_enable_media: "true"
+
+# Then apply:
+ansible-playbook playbooks/configure.yml --limit edmonton-prod
+```
+
+### 5.3 Trigger backups
+
+```bash
+# All instances:
+ansible-playbook playbooks/backup.yml
+
+# Single instance:
+ansible-playbook playbooks/backup.yml --limit edmonton-prod
+```
+
+### 5.4 Enable/reconfigure monitoring
+
+```bash
+ansible-playbook playbooks/monitoring.yml --limit edmonton-prod
+```
+
+### 5.5 Run ad-hoc commands
+
+```bash
+# Check Docker status on all instances:
+ansible changemaker_instances -m command -a "docker compose ps" --become
+
+# View API logs on one instance:
+ansible edmonton-prod -m command -a "docker compose logs api --tail 50" \
+ --become -e "chdir=/opt/changemaker-lite"
+
+# Restart a specific service:
+ansible edmonton-prod -m command -a "docker compose restart api" \
+ --become -e "chdir=/opt/changemaker-lite"
+
+# Check disk space across fleet:
+ansible changemaker_instances -m command -a "df -h /"
+```
+
+### 5.6 Rotate a secret
+
+1. Generate a new value:
+ ```bash
+ openssl rand -hex 32
+ ```
+2. Update the vault:
+ ```bash
+ ansible-vault edit inventory/host_vars/edmonton-prod/vault.yml
+ # Change vault_cml_jwt_access_secret (or whichever secret)
+ ```
+3. Apply and restart:
+ ```bash
+ ansible-playbook playbooks/configure.yml --limit edmonton-prod
+ ```
+
+---
+
+## 6. Secret Management
+
+### Naming convention
+
+| Prefix | Purpose | Example |
+|--------|---------|---------|
+| `cml_*` | Non-secret configuration | `cml_domain`, `cml_smtp_host` |
+| `vault_cml_*` | Encrypted secrets | `vault_cml_v2_postgres_password` |
+| `vault_bunker_*` | Bunker Ops shared secrets | `vault_bunker_ops_remote_write_token` |
+
+### What gets encrypted
+
+All 19+ secrets per instance:
+- Database passwords (PostgreSQL, Redis, Listmonk DB, Gitea DB)
+- JWT secrets (access + refresh) and encryption key
+- Admin passwords (initial admin, NocoDB, n8n, Grafana, Gotify, Vaultwarden, Rocket.Chat, Gancio)
+- API tokens (Listmonk API, Pangolin, Bunker Ops remote write)
+- SMTP password
+
+### Vault operations
+
+```bash
+# View encrypted file:
+ansible-vault view inventory/host_vars/edmonton-prod/vault.yml
+
+# Edit in-place (decrypts → opens $EDITOR → re-encrypts):
+ansible-vault edit inventory/host_vars/edmonton-prod/vault.yml
+
+# Re-key all vaults (change master password):
+find inventory/host_vars -name vault.yml -exec ansible-vault rekey {} +
+
+# Encrypt a new plaintext file:
+ansible-vault encrypt inventory/host_vars/new-instance/vault.yml
+```
+
+### Vault password management
+
+- The `.vault_pass` file is referenced in `ansible.cfg`
+- For CI/CD, pass via environment: `ANSIBLE_VAULT_PASSWORD=... ansible-playbook ...`
+- For teams, use `--vault-password-file` pointing to a shared secrets manager script
+
+---
+
+## 7. Monitoring & Fleet Observability
+
+### Tier model
+
+| Tier | What it means | How to set |
+|------|--------------|-----------|
+| **0: Standalone** | No Ansible management (manual `config.sh` install) | N/A |
+| **1: Managed** | Ansible deploys/updates, local monitoring only | `bunker_ops_enabled: false` |
+| **2: Fleet** | Ansible + metrics pushed to central VictoriaMetrics | `bunker_ops_enabled: true` |
+
+### Enabling Tier 2 on an instance
+
+1. Set in `host_vars//main.yml`:
+ ```yaml
+ bunker_ops_enabled: true
+ bunker_ops_remote_write_url: "https://ops.bnkserve.org/api/v1/write"
+ cml_monitoring_enabled: true
+ ```
+2. Set the write token in `host_vars//vault.yml`:
+ ```yaml
+ vault_bunker_ops_remote_write_token: "your-token-here"
+ ```
+3. Apply:
+ ```bash
+ ansible-playbook playbooks/monitoring.yml --limit edmonton-prod
+ ```
+
+### What metrics are sent (Tier 2)
+
+Only filtered, non-PII metrics leave the instance:
+
+- `cm_*` — Application metrics (emails sent, canvass visits, queue sizes, login attempts)
+- `node_*` — System metrics (CPU, memory, disk, network)
+- `http_request*` — API latency and request counts
+- `up` — Service availability
+
+**Never sent:** Database content, user data, campaign text, participant records, cAdvisor container details.
+
+### Backup metrics
+
+When `BUNKER_OPS_ENABLED=true`, the backup script automatically pushes:
+- `cm_backup_last_success_timestamp` — Unix timestamp of last successful backup
+- `cm_backup_size_bytes` — Size of the backup archive
+
+These enable "backup staleness" alerts on the central dashboard.
+
+---
+
+## 8. Troubleshooting
+
+### Ansible can't connect
+
+```
+UNREACHABLE! => {"msg": "Failed to connect to the host via ssh"}
+```
+
+- Verify SSH: `ssh deploy@ hostname`
+- Check `ansible_user` in hosts.yml matches the SSH user
+- Ensure the user has passwordless sudo: `echo 'deploy ALL=(ALL) NOPASSWD: ALL' > /etc/sudoers.d/deploy`
+
+### Vault password error
+
+```
+ERROR! Decryption failed on ...vault.yml
+```
+
+- Verify `.vault_pass` file exists and is correct
+- Or pass explicitly: `ansible-playbook ... --vault-password-file /path/to/.vault_pass`
+
+### Deploy fails at "Wait for PostgreSQL"
+
+PostgreSQL hasn't started yet. Check:
+
+```bash
+ansible -m command -a "docker compose logs v2-postgres --tail 30" \
+ --become -e "chdir=/opt/changemaker-lite"
+```
+
+Common causes:
+- Disk full (`df -h`)
+- Wrong `V2_POSTGRES_PASSWORD` (check vault.yml matches what's in the running DB)
+- First deploy: PostgreSQL needs time to initialize
+
+### Health check fails after deploy
+
+API not responding on `/api/health`:
+
+```bash
+# Check if container is running:
+ansible -m command -a "docker compose ps api" --become -e "chdir=/opt/changemaker-lite"
+
+# Check API logs:
+ansible -m command -a "docker compose logs api --tail 50" --become -e "chdir=/opt/changemaker-lite"
+```
+
+Common causes:
+- Missing environment variable (check `.env` generation)
+- Database migration failure (check Prisma output)
+- Port conflict (another process on 4000)
+
+### .env has wrong values
+
+Compare generated `.env` with expected:
+
+```bash
+# Show diff of what Ansible would change:
+ansible-playbook playbooks/configure.yml --limit --check --diff
+```
+
+### Remote write not working (Tier 2)
+
+```bash
+# Check Prometheus config on instance:
+ansible -m command -a "cat /opt/changemaker-lite/configs/prometheus/prometheus.yml" --become
+
+# Check Prometheus logs for remote write errors:
+ansible -m command -a "docker compose logs prometheus-changemaker --tail 30" \
+ --become -e "chdir=/opt/changemaker-lite"
+```
+
+Common issues:
+- `bunker_ops_enabled` not set to `true`
+- Wrong `bunker_ops_remote_write_url`
+- Invalid auth token
+- Central VictoriaMetrics not reachable (firewall, DNS)
+
+---
+
+## 9. Variable Reference
+
+### Configuration variables (`cml_*`)
+
+Set these in `host_vars//main.yml` or `group_vars/`.
+
+| Variable | Default | Description |
+|----------|---------|-------------|
+| `cml_domain` | `cmlite.org` | Instance domain (drives CORS, SMTP, URLs) |
+| `cml_node_env` | `production` | Node.js environment |
+| `cml_api_port` | `4000` | Express API port |
+| `cml_admin_port` | `3000` | React admin port |
+| `cml_media_api_port` | `4100` | Fastify media API port |
+| `cml_postgres_port` | `5433` | PostgreSQL host port |
+| `cml_enable_media` | `"false"` | Enable video library |
+| `cml_enable_payments` | `"false"` | Enable Stripe payments |
+| `cml_enable_chat` | `"false"` | Enable Rocket.Chat |
+| `cml_listmonk_sync_enabled` | `"false"` | Enable newsletter sync |
+| `cml_gancio_sync_enabled` | `"false"` | Enable event sync |
+| `cml_email_test_mode` | `"true"` | Use MailHog (`true`) or SMTP (`false`) |
+| `cml_monitoring_enabled` | `false` | Enable Prometheus/Grafana stack |
+| `cml_smtp_host` | `mailhog-changemaker` | SMTP server hostname |
+| `cml_smtp_port` | `1025` | SMTP server port |
+| `cml_smtp_user` | `""` | SMTP username |
+| `cml_mapbox_api_key` | `""` | Mapbox geocoding key |
+| `cml_google_maps_api_key` | `""` | Google Maps geocoding key |
+| `cml_pangolin_api_url` | `""` | Pangolin tunnel API |
+| `cml_pangolin_org_id` | `""` | Pangolin organization |
+| `cml_backup_retention_days` | `30` | Days to keep local backups |
+| `cml_backup_cron_hour` | `3` | Backup cron hour (UTC) |
+| `cml_backup_s3_enabled` | `false` | Upload backups to S3 |
+| `bunker_ops_enabled` | `false` | Enable fleet observability |
+| `bunker_ops_instance_label` | `{{ cml_domain }}` | Label in central metrics |
+| `bunker_ops_remote_write_url` | `""` | VictoriaMetrics write endpoint |
+
+### Secret variables (`vault_cml_*`)
+
+Set these in `host_vars//vault.yml` (encrypted).
+
+| Variable | Purpose |
+|----------|---------|
+| `vault_cml_v2_postgres_password` | PostgreSQL password |
+| `vault_cml_redis_password` | Redis authentication |
+| `vault_cml_jwt_access_secret` | JWT access token signing (64-char hex) |
+| `vault_cml_jwt_refresh_secret` | JWT refresh token signing (64-char hex) |
+| `vault_cml_encryption_key` | Database field encryption (64-char hex) |
+| `vault_cml_initial_admin_email` | Initial admin email |
+| `vault_cml_initial_admin_password` | Initial admin password (12+ chars, complexity) |
+| `vault_cml_listmonk_db_password` | Listmonk PostgreSQL password |
+| `vault_cml_listmonk_web_admin_password` | Listmonk web UI password |
+| `vault_cml_listmonk_api_token` | Listmonk API token |
+| `vault_cml_nocodb_admin_password` | NocoDB admin password |
+| `vault_cml_gitea_db_passwd` | Gitea database password |
+| `vault_cml_gitea_db_root_password` | Gitea DB root password |
+| `vault_cml_n8n_encryption_key` | n8n encryption key |
+| `vault_cml_n8n_user_password` | n8n admin password |
+| `vault_cml_grafana_admin_password` | Grafana admin password |
+| `vault_cml_gotify_admin_password` | Gotify admin password |
+| `vault_cml_vaultwarden_admin_token` | Vaultwarden admin token (64-char hex) |
+| `vault_cml_rocketchat_admin_password` | Rocket.Chat admin password |
+| `vault_cml_gancio_admin_password` | Gancio admin password |
+| `vault_cml_smtp_pass` | SMTP password |
+| `vault_cml_pangolin_api_key` | Pangolin API key |
+| `vault_cml_pangolin_newt_id` | Pangolin Newt container ID |
+| `vault_cml_pangolin_newt_secret` | Pangolin Newt secret |
+| `vault_cml_pangolin_site_id` | Pangolin site ID |
+| `vault_cml_pangolin_endpoint` | Pangolin endpoint URL |
+| `vault_bunker_ops_remote_write_token` | Central VM write auth token |
diff --git a/bunker-ops/ROLLOUT_PLAN.md b/bunker-ops/ROLLOUT_PLAN.md
new file mode 100644
index 00000000..50b0cc60
--- /dev/null
+++ b/bunker-ops/ROLLOUT_PLAN.md
@@ -0,0 +1,543 @@
+# Bunker Ops — Staged Rollout Plan
+
+Full plan for rolling out the fleet management and observability system across Changemaker Lite instances.
+
+---
+
+## Current State (Completed)
+
+### Phase 0: Foundation ✅
+
+**Repo changes (v2 branch):**
+- `INSTANCE_LABEL`, `BUNKER_OPS_ENABLED`, `BUNKER_OPS_REMOTE_WRITE_URL` env vars added
+- Prometheus metrics tagged with `instance` label
+- Redis-exporter auth fixed (correct container name + password)
+- Backup script pushes metrics when Bunker Ops is enabled
+- `docker-compose.override.yml` in `.gitignore`
+
+**Ansible skeleton (`bunker-ops/`):**
+- `ansible.cfg` — SSH pipelining, yaml callback, vault password path
+- Inventory structure with example host_vars and group defaults
+- 3 roles: `common` (OS/Docker/UFW), `changemaker` (full deploy), `monitoring` (Prometheus/remote_write)
+- 5 playbooks: `deploy`, `upgrade`, `backup`, `configure`, `monitoring`
+- 2 scripts: `bootstrap-vault.sh` (secret generation), `add-instance.sh` (instance scaffolding)
+- `env.j2` template mapping all 100+ `.env` variables to Ansible vars
+
+---
+
+## Phase 1: First Managed Instance (Week 1-2)
+
+**Goal:** Validate the full Ansible pipeline end-to-end on a single real instance.
+
+### 1.1 Prepare a test server
+
+- Provision a fresh Ubuntu 24.04 VM (e.g., a low-cost VPS or local Proxmox VM)
+- Set up SSH key access for a `deploy` user with passwordless sudo
+- Ensure ports 80, 443, SSH are reachable
+
+### 1.2 Scaffold the instance
+
+```bash
+cd bunker-ops
+echo "$(openssl rand -base64 32)" > .vault_pass
+chmod 600 .vault_pass
+
+./scripts/add-instance.sh test-01 test.cmlite.org --tier 1
+```
+
+### 1.3 Run the full deploy
+
+```bash
+ansible-playbook playbooks/deploy.yml --limit test-01
+```
+
+### 1.4 Validate
+
+- [ ] All containers running (`docker compose ps`)
+- [ ] API responds at `/api/health`
+- [ ] Admin GUI loads and login works
+- [ ] Prisma migrations applied cleanly
+- [ ] Backup cron is installed (`crontab -l`)
+- [ ] UFW is active with correct rules
+- [ ] fail2ban is running
+
+### 1.5 Test day-2 operations
+
+- [ ] `configure.yml` — change a feature flag, verify API restarts
+- [ ] `upgrade.yml` — make a Git commit, run upgrade, verify new code is live
+- [ ] `backup.yml` — trigger backup, verify archive created
+- [ ] Secret rotation — change Redis password in vault, reconfigure, verify connectivity
+
+### 1.6 Fix and iterate
+
+Document anything that fails. Update roles, templates, and defaults. The Ansible skeleton is a starting framework — real deployments will surface edge cases in:
+- Docker image pull timing
+- Prisma migration ordering
+- Directory permission edge cases
+- OS-specific package availability
+
+**Deliverable:** One fully Ansible-managed instance running in production.
+
+---
+
+## Phase 2: Pangolin Tunnel Integration (Week 2-3)
+
+**Goal:** Automate the full Pangolin tunnel setup within Ansible.
+
+### 2.1 Add Pangolin setup task
+
+Create `roles/changemaker/tasks/pangolin.yml`:
+- Call Pangolin API to create a site (if `cml_pangolin_api_url` is set)
+- Store returned `PANGOLIN_SITE_ID`, `PANGOLIN_NEWT_ID`, `PANGOLIN_NEWT_SECRET` in vault
+- Sync resource definitions from `configs/pangolin/resources.yml`
+- Set all resources to "Not Protected"
+- Restart the Newt container
+
+This replaces the manual Pangolin setup flow that currently lives in the admin GUI.
+
+### 2.2 Validate tunnel works
+
+- [ ] Instance accessible via `https://app.` through Pangolin
+- [ ] API accessible via `https://api.`
+- [ ] All 12 subdomains route correctly
+- [ ] CORS headers present
+
+### 2.3 Idempotency
+
+Ensure re-running the playbook doesn't duplicate Pangolin resources. The task should check for existing site/resources before creating new ones.
+
+**Deliverable:** Single-command deployment from bare server to publicly accessible instance.
+
+---
+
+## Phase 3: Onboard Existing Instances (Week 3-4)
+
+**Goal:** Migrate manually-installed instances to Ansible management.
+
+### 3.1 Import strategy
+
+For each existing instance that was set up with `config.sh`:
+
+1. **Scaffold host_vars:**
+ ```bash
+ ./scripts/add-instance.sh --tier 1
+ ```
+
+2. **Import existing secrets** from the server's `.env` into the vault:
+ ```bash
+ # SSH in and extract current secrets:
+ ssh deploy@ "grep -E '(PASSWORD|SECRET|KEY|TOKEN)' /opt/changemaker-lite/.env"
+ # Copy into vault.yml (replace generated values with existing ones)
+ ansible-vault edit inventory/host_vars//vault.yml
+ ```
+
+3. **Test with `--check --diff`** first:
+ ```bash
+ ansible-playbook playbooks/configure.yml --limit --check --diff
+ ```
+ This shows what `.env` lines would change without actually changing anything.
+
+4. **Apply configuration management:**
+ ```bash
+ ansible-playbook playbooks/configure.yml --limit
+ ```
+
+### 3.2 Avoid disruption
+
+- **Do NOT re-run the `common` role** on production servers that are already set up. Use `--tags env,deploy` to skip OS provisioning.
+- **Do NOT re-run the seed** on instances with existing data. The seed task has `failed_when: false` for safety, but verify.
+- **Backup first** — always run `playbooks/backup.yml` before importing an existing instance.
+
+### 3.3 Instance inventory target
+
+| Instance | Domain | Status | Tier |
+|----------|--------|--------|------|
+| test-01 | test.cmlite.org | Phase 1 deploy | 1 |
+| edmonton-prod | betteredmonton.org | Import from config.sh | 1 |
+| ... | ... | ... | ... |
+
+Populate this table as instances are onboarded. Aim for 3-5 instances managed by end of Phase 3.
+
+**Deliverable:** All existing production instances under Ansible management (Tier 1).
+
+---
+
+## Phase 4: Central Observability Server (Week 4-6)
+
+**Goal:** Deploy the Bunker Ops central server with VictoriaMetrics, Grafana, and Uptime Kuma.
+
+### 4.1 Create `roles/bunker-ops/`
+
+New role for the central server:
+
+```
+roles/bunker-ops/
+├── tasks/main.yml
+├── templates/
+│ ├── docker-compose.yml.j2
+│ └── nginx.conf.j2
+├── defaults/main.yml
+└── handlers/main.yml
+```
+
+**Docker Compose stack:**
+
+| Service | Image | Purpose |
+|---------|-------|---------|
+| VictoriaMetrics | `victoriametrics/victoria-metrics` | Receives `remote_write` from instances, 12-month retention |
+| Grafana | `grafana/grafana` | Fleet dashboards, VM as datasource |
+| Uptime Kuma | `louislam/uptime-kuma` | HTTP health monitors per instance |
+| Nginx | `nginx:alpine` | TLS termination, auth on write endpoint |
+
+**Key configuration:**
+- VictoriaMetrics listens on `:8428` for writes, `:8428/select` for queries
+- Nginx authenticates `remote_write` requests with Bearer token
+- Grafana auto-provisioned with VictoriaMetrics as default datasource
+- Uptime Kuma monitors `https://api./api/health` for each instance
+
+### 4.2 Create `playbooks/central.yml`
+
+```yaml
+- name: Deploy Bunker Ops Central
+ hosts: bunker_ops_central
+ become: true
+ roles:
+ - common
+ - bunker-ops
+```
+
+### 4.3 Authentication for remote_write
+
+- Generate a shared write token: `openssl rand -hex 32`
+- Store in central server's Nginx config (validates incoming `Authorization: Bearer `)
+- Distribute same token to all Tier 2 instances via `vault_bunker_ops_remote_write_token`
+- This ensures only authorized instances can push metrics
+
+### 4.4 Deploy and verify
+
+```bash
+ansible-playbook playbooks/central.yml
+```
+
+Verify:
+- [ ] VictoriaMetrics accepts test write: `curl -X POST 'https://ops.bnkserve.org/api/v1/write' -H 'Authorization: Bearer ' --data-binary 'test_metric{instance="test"} 1'`
+- [ ] Grafana accessible at `https://grafana.ops.bnkserve.org`
+- [ ] Uptime Kuma accessible and monitoring test instance
+
+**Deliverable:** Central server running VictoriaMetrics + Grafana + Uptime Kuma.
+
+---
+
+## Phase 5: Fleet Dashboards (Week 6-7)
+
+**Goal:** Build three Grafana dashboards for fleet-wide visibility.
+
+### 5.1 Fleet Overview Dashboard
+
+File: `files/grafana/fleet-overview.json`
+
+**Panels:**
+- **Stat row:** Total instances up/down — `count(up{job="changemaker-v2-api"} == 1)`
+- **Instance table:** All instances with columns for status, p95 latency, email queue depth, active canvass sessions, last backup age
+- **Time series — Canvass visits:** `sum(rate(cm_canvass_visits_total[5m])) by (instance)`
+- **Time series — Emails sent:** `sum(rate(cm_emails_sent_total[5m])) by (instance)`
+- **Time series — HTTP request rate:** `sum(rate(http_requests_total[5m])) by (instance)`
+- **Gauge — Fleet email queue:** `sum(cm_email_queue_size) by (instance)`
+
+**Variables:**
+- `$instance` — Multi-select, populated from `label_values(up{job="changemaker-v2-api"}, instance)`
+
+### 5.2 Instance Drill-Down Dashboard
+
+File: `files/grafana/instance-drilldown.json`
+
+**Variables:**
+- `$instance` — Single-select
+
+**Panel groups:**
+- **Health:** API uptime, HTTP error rate, p50/p95/p99 latency
+- **Influence:** Emails sent/failed, queue depth, response submissions
+- **Canvass:** Active sessions, visits by outcome, shift signups
+- **Geocoding:** Cache hit rate, request rate by provider, duration
+- **System:** CPU usage, memory, disk I/O, network (from `node_*` metrics)
+
+This mirrors the existing per-instance Grafana dashboards but sources data from VictoriaMetrics.
+
+### 5.3 Backup Status Dashboard
+
+File: `files/grafana/backup-status.json`
+
+**Panels:**
+- **Gauge — Time since last backup:** `time() - cm_backup_last_success_timestamp` per instance. Green < 24h, yellow < 48h, red > 48h.
+- **Table — Backup sizes:** `cm_backup_size_bytes` per instance with sparkline trend
+- **Alert rule — BackupStale:** Fires when any instance hasn't backed up in 25 hours (1h grace past daily cron)
+
+### 5.4 Auto-provisioning
+
+Grafana dashboards auto-provisioned from JSON files via a `dashboards.yml` provisioner config, same pattern as the existing per-instance Grafana setup.
+
+**Deliverable:** Three operational Grafana dashboards showing fleet health, per-instance detail, and backup status.
+
+---
+
+## Phase 6: Promote Instances to Tier 2 (Week 7-8)
+
+**Goal:** Enable fleet observability on all managed instances.
+
+### 6.1 For each instance
+
+1. Update `host_vars//main.yml`:
+ ```yaml
+ bunker_ops_enabled: true
+ bunker_ops_remote_write_url: "https://ops.bnkserve.org/api/v1/write"
+ ```
+
+2. Add write token to `host_vars//vault.yml`:
+ ```yaml
+ vault_bunker_ops_remote_write_token: ""
+ ```
+
+3. Apply:
+ ```bash
+ ansible-playbook playbooks/monitoring.yml --limit
+ ```
+
+### 6.2 Verify data flow
+
+- Check VictoriaMetrics for incoming data: `curl 'https://ops.bnkserve.org/api/v1/query?query=up{instance=""}'`
+- Check Grafana fleet overview shows the new instance
+- Verify backup metrics appear after next backup run
+
+### 6.3 Bandwidth audit
+
+Each instance sends ~50 time series at 15s intervals ≈ 200 samples/minute ≈ 12KB/min ≈ 17MB/day. With 10 instances: ~170MB/day. VictoriaMetrics compresses efficiently — expect ~2GB/month total storage for a 10-instance fleet.
+
+**Deliverable:** All instances reporting to central dashboards.
+
+---
+
+## Phase 7: Alerting & Notifications (Week 8-9)
+
+**Goal:** Central alerting for fleet-wide issues.
+
+### 7.1 Alert rules on central VictoriaMetrics
+
+Create `roles/bunker-ops/templates/alerts.yml.j2`:
+
+| Alert | Condition | Severity |
+|-------|-----------|----------|
+| `InstanceDown` | `up{job="changemaker-v2-api"} == 0` for 5m | critical |
+| `HighErrorRate` | `rate(http_requests_total{status_code=~"5.."}[5m]) > 0.1` | warning |
+| `EmailQueueBacklog` | `cm_email_queue_size > 100` for 15m | warning |
+| `BackupStale` | `time() - cm_backup_last_success_timestamp > 90000` (25h) | critical |
+| `DiskSpaceLow` | `node_filesystem_avail_bytes{mountpoint="/"} / node_filesystem_size_bytes{mountpoint="/"} < 0.1` | critical |
+| `HighMemoryUsage` | `node_memory_MemAvailable_bytes / node_memory_MemTotal_bytes < 0.1` for 10m | warning |
+| `CanvassSessionAbandoned` | `cm_active_canvass_sessions > 20` for 1h | info |
+
+### 7.2 Notification channels
+
+Central Alertmanager routes alerts to:
+- **Gotify** — Push notifications to admin phone
+- **Email** — Summary digests to fleet admin email
+- **Webhook** — Optional Rocket.Chat / Slack integration
+
+### 7.3 Silence rules
+
+- Suppress `InstanceDown` during planned maintenance windows
+- Group alerts by instance to avoid notification storms
+
+**Deliverable:** Automated alerts for instance health, backups, and resource exhaustion.
+
+---
+
+## Phase 8: Upgrade Automation & CI (Week 9-11)
+
+**Goal:** Streamline the upgrade pipeline.
+
+### 8.1 Gitea webhook → n8n → Ansible
+
+When a new commit is pushed to the `v2` branch on the central Gitea:
+
+1. **Gitea** fires a webhook to **n8n**
+2. **n8n** workflow triggers `ansible-playbook playbooks/upgrade.yml`
+3. Rolling upgrade proceeds (25% batches)
+4. Health checks gate each batch
+5. n8n sends a summary notification
+
+### 8.2 Canary deployment
+
+Add a `canary` group to inventory:
+
+```yaml
+all:
+ children:
+ canary:
+ hosts:
+ test-01:
+ changemaker_instances:
+ hosts:
+ edmonton-prod:
+ calgary-prod:
+ ...
+```
+
+New `playbooks/canary-upgrade.yml`:
+1. Upgrade canary instance first
+2. Wait 30 minutes
+3. Run health checks
+4. If healthy, proceed with `upgrade.yml` on remaining instances
+5. If unhealthy, alert and stop
+
+### 8.3 Rollback playbook
+
+Create `playbooks/rollback.yml`:
+- `git checkout ` on the instance
+- `docker compose up -d --build`
+- Run health checks
+- Requires knowing the previous good commit (store in a fact file per host)
+
+**Deliverable:** Semi-automated upgrade pipeline with canary gates and rollback capability.
+
+---
+
+## Phase 9: Self-Service Instance Provisioning (Week 11-13)
+
+**Goal:** Enable clients to request and receive a new instance with minimal operator intervention.
+
+### 9.1 Provisioning API
+
+Build a lightweight FastAPI or Express service on the central server:
+
+**Endpoints:**
+- `POST /api/instances` — Create a new instance (accepts domain, features, tier)
+- `GET /api/instances` — List all instances with status
+- `GET /api/instances/:id/status` — Health + metrics summary
+- `DELETE /api/instances/:id` — Decommission
+
+**Workflow:**
+1. API receives request with domain, SSH host, feature flags
+2. Runs `add-instance.sh` to scaffold host_vars
+3. Triggers `ansible-playbook playbooks/deploy.yml --limit `
+4. Monitors deployment progress
+5. Returns status when deployment completes
+
+### 9.2 Fleet admin dashboard
+
+A simple web UI (could be a dedicated page in the central Grafana or a standalone React app):
+- Instance list with health status
+- One-click upgrade, backup, configure
+- New instance wizard
+- Grafana iframe embeds for metrics
+
+### 9.3 DNS automation
+
+If using Pangolin for all instances:
+- Pangolin handles DNS + TLS automatically
+- The provisioning API creates Pangolin resources as part of deploy
+
+If using Cloudflare or other DNS:
+- Add a `roles/dns/` role with Cloudflare API integration
+- Automatically create A/CNAME records for all subdomains
+
+**Deliverable:** Operator can provision a new instance with a single API call or form submission.
+
+---
+
+## Phase 10: Multi-Tenant Hardening (Week 13-16)
+
+**Goal:** Security and isolation for a fleet of independent client instances.
+
+### 10.1 Network isolation
+
+Each instance runs on its own server — already isolated at the OS level. Additional hardening:
+- UFW rules restrict outbound to essential services only (Docker Hub, Git, SMTP, Pangolin, VictoriaMetrics)
+- No inter-instance SSH access
+- Central server can SSH to instances, not vice versa
+
+### 10.2 Secret rotation schedule
+
+Automate periodic secret rotation:
+
+| Secret | Rotation frequency | Method |
+|--------|-------------------|--------|
+| JWT access secret | Quarterly | vault edit + configure playbook |
+| Database passwords | Annually | vault edit + full redeploy |
+| Redis password | Annually | vault edit + configure playbook |
+| Pangolin tokens | On-demand | Re-run Pangolin setup |
+| Remote write token | Annually | Update central + all instances |
+
+Create a `playbooks/rotate-secrets.yml` that generates new secrets and applies them.
+
+### 10.3 Audit logging
+
+- Ansible logs all operations to a central log file
+- Each playbook run produces a summary (host, timestamp, changes made)
+- Integrate with Git: all inventory changes are committed to a private repo
+
+### 10.4 Compliance documentation
+
+For each instance, Ansible can generate:
+- Inventory of services and versions
+- Security posture report (UFW rules, fail2ban status, TLS cert expiry)
+- Backup compliance (last backup date, retention policy)
+- Data residency confirmation (server location, no PII in metrics)
+
+**Deliverable:** Hardened fleet with automated rotation, audit trail, and compliance artifacts.
+
+---
+
+## Timeline Summary
+
+| Phase | Duration | Milestone |
+|-------|----------|-----------|
+| 0: Foundation | ✅ Done | Ansible skeleton + repo changes |
+| 1: First instance | Week 1-2 | End-to-end deploy validated |
+| 2: Pangolin integration | Week 2-3 | Single-command public deployment |
+| 3: Import existing | Week 3-4 | All instances under management |
+| 4: Central server | Week 4-6 | VictoriaMetrics + Grafana running |
+| 5: Fleet dashboards | Week 6-7 | 3 operational dashboards |
+| 6: Tier 2 promotion | Week 7-8 | All instances reporting centrally |
+| 7: Alerting | Week 8-9 | Automated health + backup alerts |
+| 8: CI/Upgrade automation | Week 9-11 | Canary + rolling upgrades |
+| 9: Self-service | Week 11-13 | Provisioning API + admin UI |
+| 10: Multi-tenant hardening | Week 13-16 | Rotation, audit, compliance |
+
+**Total: ~16 weeks from foundation to fully hardened fleet.**
+
+Phases 1-3 are the critical path — they validate the core pipeline and bring existing instances under management. Phases 4-7 add observability. Phases 8-10 are operational maturity.
+
+---
+
+## FOSS Stack Summary
+
+Every component is Free and Open Source Software:
+
+| Component | License | Role in Stack |
+|-----------|---------|---------------|
+| Ansible | GPL-3.0 | Deployment automation & configuration management |
+| VictoriaMetrics | Apache-2.0 | Centralized time-series database (Prometheus-compatible) |
+| Grafana | AGPL-3.0 | Fleet dashboards & visualization |
+| Uptime Kuma | MIT | HTTP health monitoring |
+| Prometheus | Apache-2.0 | Per-instance metrics collection (existing) |
+| Alertmanager | Apache-2.0 | Alert routing & deduplication |
+| Docker + Compose | Apache-2.0 | Container orchestration |
+| Ubuntu | Various FOSS | Host operating system |
+| UFW / iptables | GPL | Firewall |
+| fail2ban | GPL-2.0 | Brute-force protection |
+| OpenSSL | Apache-2.0 | Secret generation |
+
+No proprietary SaaS dependencies. The entire fleet can run air-gapped after initial image pulls.
+
+---
+
+## Risk Register
+
+| Risk | Impact | Mitigation |
+|------|--------|------------|
+| Vault password lost | Cannot decrypt any secrets | Store in password manager + offline backup |
+| Central server down | No fleet dashboards (instances unaffected) | `remote_write` WAL retries for ~2h; instances self-sufficient |
+| SSH key compromise | Attacker gains access to managed servers | Rotate keys, use separate deploy user, enable 2FA on SSH |
+| Ansible playbook bug | Bad config deployed to fleet | `serial: 1` for deploys, `--check --diff` before apply, canary phase |
+| Docker Hub rate limits | Image pulls fail during upgrade | Use a registry mirror or pre-pull images |
+| Prisma migration conflict | Database schema mismatch | Always run `migrate deploy` (applies pending only), never `migrate dev` in production |
+| Instance disk full | Backup fails, containers crash | `BackupStale` + `DiskSpaceLow` alerts, retention cleanup |
diff --git a/bunker-ops/ansible.cfg b/bunker-ops/ansible.cfg
new file mode 100644
index 00000000..ada69497
--- /dev/null
+++ b/bunker-ops/ansible.cfg
@@ -0,0 +1,18 @@
+[defaults]
+inventory = inventory/hosts.yml
+roles_path = roles
+vault_password_file = .vault_pass
+host_key_checking = False
+retry_files_enabled = False
+stdout_callback = yaml
+forks = 10
+timeout = 30
+
+[privilege_escalation]
+become = True
+become_method = sudo
+become_ask_pass = False
+
+[ssh_connection]
+pipelining = True
+ssh_args = -o ControlMaster=auto -o ControlPersist=60s -o StrictHostKeyChecking=no
diff --git a/bunker-ops/inventory/group_vars/all/main.yml b/bunker-ops/inventory/group_vars/all/main.yml
new file mode 100644
index 00000000..06ee7604
--- /dev/null
+++ b/bunker-ops/inventory/group_vars/all/main.yml
@@ -0,0 +1,29 @@
+---
+# Shared defaults for all hosts
+
+# Git repository
+cml_repo_url: "https://github.com/bunker-admin/changemaker.lite.git"
+cml_repo_branch: v2
+
+# Deployment paths
+cml_deploy_path: /opt/changemaker-lite
+cml_backup_path: /opt/changemaker-lite/backups
+cml_data_path: /opt/changemaker-lite/data
+
+# System packages
+common_packages:
+ - curl
+ - wget
+ - git
+ - htop
+ - jq
+ - unzip
+ - ufw
+ - fail2ban
+ - logrotate
+
+# Docker
+docker_compose_version: "v2.27.0"
+
+# SSH
+ssh_port: 22
diff --git a/bunker-ops/inventory/group_vars/changemaker_instances/main.yml b/bunker-ops/inventory/group_vars/changemaker_instances/main.yml
new file mode 100644
index 00000000..74b55b47
--- /dev/null
+++ b/bunker-ops/inventory/group_vars/changemaker_instances/main.yml
@@ -0,0 +1,52 @@
+---
+# Default values for all Changemaker instances
+# Override per-host in host_vars//main.yml
+
+# --- Core ---
+cml_node_env: production
+cml_domain: cmlite.org
+
+# --- Ports (internal defaults, rarely changed) ---
+cml_api_port: 4000
+cml_admin_port: 3000
+cml_media_api_port: 4100
+cml_postgres_port: 5433
+
+# --- Feature Flags ---
+cml_enable_media: "false"
+cml_enable_payments: "false"
+cml_enable_chat: "false"
+cml_listmonk_sync_enabled: "false"
+cml_gancio_sync_enabled: "false"
+cml_email_test_mode: "true"
+
+# --- Monitoring ---
+cml_monitoring_enabled: false
+
+# --- Bunker Ops ---
+bunker_ops_enabled: false
+bunker_ops_instance_label: "{{ cml_domain }}"
+bunker_ops_remote_write_url: ""
+bunker_ops_remote_write_token: ""
+
+# --- Backup ---
+cml_backup_retention_days: 30
+cml_backup_cron_hour: 3
+cml_backup_cron_minute: 0
+cml_backup_s3_enabled: false
+
+# --- SMTP (defaults to MailHog) ---
+cml_smtp_host: mailhog-changemaker
+cml_smtp_port: 1025
+cml_smtp_user: ""
+cml_smtp_pass: ""
+
+# --- Geocoding ---
+cml_mapbox_api_key: ""
+cml_google_maps_api_key: ""
+cml_google_maps_enabled: "false"
+
+# --- Pangolin ---
+cml_pangolin_api_url: ""
+cml_pangolin_api_key: ""
+cml_pangolin_org_id: ""
diff --git a/bunker-ops/inventory/host_vars/example-instance/main.yml b/bunker-ops/inventory/host_vars/example-instance/main.yml
new file mode 100644
index 00000000..b9f566cd
--- /dev/null
+++ b/bunker-ops/inventory/host_vars/example-instance/main.yml
@@ -0,0 +1,28 @@
+---
+# Example host_vars — copy this directory for each new instance
+# Rename to match the hostname in hosts.yml
+
+cml_domain: example.org
+cml_node_env: production
+
+# Feature toggles (override group defaults)
+cml_enable_media: "true"
+cml_listmonk_sync_enabled: "true"
+cml_email_test_mode: "false"
+cml_monitoring_enabled: true
+
+# SMTP (production)
+cml_smtp_host: smtp.example.org
+cml_smtp_port: 587
+cml_smtp_user: "noreply@example.org"
+# cml_smtp_pass is in vault.yml
+
+# Pangolin tunnel
+cml_pangolin_api_url: "https://api.bnkserve.org/v1"
+# cml_pangolin_api_key is in vault.yml
+cml_pangolin_org_id: "org_example"
+
+# Bunker Ops (Tier 2 — fleet observability)
+bunker_ops_enabled: true
+bunker_ops_remote_write_url: "https://ops.bnkserve.org/api/v1/write"
+# bunker_ops_remote_write_token is in vault.yml
diff --git a/bunker-ops/inventory/host_vars/example-instance/vault.yml b/bunker-ops/inventory/host_vars/example-instance/vault.yml
new file mode 100644
index 00000000..1cae5805
--- /dev/null
+++ b/bunker-ops/inventory/host_vars/example-instance/vault.yml
@@ -0,0 +1,63 @@
+---
+# EXAMPLE — Replace with real values, then encrypt with:
+# ansible-vault encrypt inventory/host_vars/example-instance/vault.yml
+#
+# Or generate all secrets automatically:
+# ./scripts/bootstrap-vault.sh example-instance
+
+# --- Database ---
+vault_cml_v2_postgres_password: "GENERATE_ME"
+
+# --- Redis ---
+vault_cml_redis_password: "GENERATE_ME"
+
+# --- JWT & Encryption ---
+vault_cml_jwt_access_secret: "GENERATE_ME"
+vault_cml_jwt_refresh_secret: "GENERATE_ME"
+vault_cml_encryption_key: "GENERATE_ME"
+
+# --- Admin ---
+vault_cml_initial_admin_email: "admin@example.org"
+vault_cml_initial_admin_password: "GENERATE_ME"
+
+# --- Listmonk ---
+vault_cml_listmonk_db_password: "GENERATE_ME"
+vault_cml_listmonk_web_admin_password: "GENERATE_ME"
+vault_cml_listmonk_api_token: "GENERATE_ME"
+
+# --- NocoDB ---
+vault_cml_nocodb_admin_password: "GENERATE_ME"
+
+# --- Gitea ---
+vault_cml_gitea_db_passwd: "GENERATE_ME"
+vault_cml_gitea_db_root_password: "GENERATE_ME"
+
+# --- n8n ---
+vault_cml_n8n_encryption_key: "GENERATE_ME"
+vault_cml_n8n_user_password: "GENERATE_ME"
+
+# --- Monitoring ---
+vault_cml_grafana_admin_password: "GENERATE_ME"
+vault_cml_gotify_admin_password: "GENERATE_ME"
+
+# --- Vaultwarden ---
+vault_cml_vaultwarden_admin_token: "GENERATE_ME"
+
+# --- Rocket.Chat ---
+vault_cml_rocketchat_admin_password: "GENERATE_ME"
+
+# --- Gancio ---
+vault_cml_gancio_admin_password: "GENERATE_ME"
+
+# --- SMTP ---
+vault_cml_smtp_pass: ""
+
+# --- Pangolin ---
+vault_cml_pangolin_api_key: ""
+vault_cml_pangolin_newt_id: ""
+vault_cml_pangolin_newt_secret: ""
+vault_cml_pangolin_site_id: ""
+vault_cml_pangolin_endpoint: ""
+
+# --- Bunker Ops ---
+vault_bunker_ops_remote_write_token: ""
diff --git a/bunker-ops/inventory/hosts.yml b/bunker-ops/inventory/hosts.yml
new file mode 100644
index 00000000..1a04c34a
--- /dev/null
+++ b/bunker-ops/inventory/hosts.yml
@@ -0,0 +1,26 @@
+---
+# Bunker Ops — Fleet Inventory
+# Add instances under changemaker_instances group
+# Each host needs a matching host_vars// directory
+
+all:
+ children:
+ changemaker_instances:
+ hosts:
+ # Example: Uncomment and customize per-instance
+ # edmonton-prod:
+ # ansible_host: 10.0.1.10
+ # ansible_user: deploy
+ # cml_domain: betteredmonton.org
+ #
+ # calgary-staging:
+ # ansible_host: 10.0.2.20
+ # ansible_user: deploy
+ # cml_domain: staging.bettercalgary.org
+
+ # Central Bunker Ops server (future — VictoriaMetrics, Grafana, Uptime Kuma)
+ # bunker_ops_central:
+ # hosts:
+ # ops-server:
+ # ansible_host: 10.0.0.1
+ # ansible_user: deploy
diff --git a/bunker-ops/playbooks/backup.yml b/bunker-ops/playbooks/backup.yml
new file mode 100644
index 00000000..70c4f015
--- /dev/null
+++ b/bunker-ops/playbooks/backup.yml
@@ -0,0 +1,19 @@
+---
+# Trigger backups across all instances
+# Usage: ansible-playbook playbooks/backup.yml [--limit hostname]
+
+- name: Run Changemaker Lite backups
+ hosts: changemaker_instances
+ become: true
+
+ tasks:
+ - name: Run backup script
+ ansible.builtin.command:
+ cmd: ./scripts/backup.sh --retention {{ cml_backup_retention_days }}
+ chdir: "{{ cml_deploy_path }}"
+ register: backup_result
+ changed_when: "'Backup complete' in backup_result.stdout"
+
+ - name: Show backup result
+ ansible.builtin.debug:
+ msg: "{{ backup_result.stdout_lines | last }}"
diff --git a/bunker-ops/playbooks/configure.yml b/bunker-ops/playbooks/configure.yml
new file mode 100644
index 00000000..4d9e868f
--- /dev/null
+++ b/bunker-ops/playbooks/configure.yml
@@ -0,0 +1,36 @@
+---
+# Update .env configuration without full redeploy
+# Usage: ansible-playbook playbooks/configure.yml [--limit hostname]
+# Regenerates .env + services.yaml and restarts API
+
+- name: Reconfigure Changemaker Lite
+ hosts: changemaker_instances
+ become: true
+
+ tasks:
+ - name: Regenerate .env
+ ansible.builtin.template:
+ src: "{{ playbook_dir }}/../roles/changemaker/templates/env.j2"
+ dest: "{{ cml_deploy_path }}/.env"
+ mode: "0600"
+ backup: true
+ register: env_result
+
+ - name: Regenerate Homepage services.yaml
+ ansible.builtin.template:
+ src: "{{ playbook_dir }}/../roles/changemaker/templates/services.yaml.j2"
+ dest: "{{ cml_deploy_path }}/configs/homepage/services.yaml"
+ mode: "0644"
+
+ - name: Restart API to pick up new config
+ ansible.builtin.command:
+ cmd: docker compose restart api
+ chdir: "{{ cml_deploy_path }}"
+ when: env_result.changed
+ changed_when: true
+
+ - name: Configuration summary
+ ansible.builtin.debug:
+ msg: |
+ Configuration {{ 'updated' if env_result.changed else 'unchanged' }} for {{ cml_domain }}
+ {{ 'API restarted to apply changes' if env_result.changed else 'No restart needed' }}
diff --git a/bunker-ops/playbooks/deploy.yml b/bunker-ops/playbooks/deploy.yml
new file mode 100644
index 00000000..72da3f6f
--- /dev/null
+++ b/bunker-ops/playbooks/deploy.yml
@@ -0,0 +1,43 @@
+---
+# Full initial deployment of Changemaker Lite instances
+# Usage: ansible-playbook playbooks/deploy.yml [--limit hostname]
+
+- name: Deploy Changemaker Lite
+ hosts: changemaker_instances
+ serial: 1 # One at a time for initial deploys
+ become: true
+
+ pre_tasks:
+ - name: Validate required vault variables
+ ansible.builtin.assert:
+ that:
+ - vault_cml_v2_postgres_password is defined
+ - vault_cml_v2_postgres_password != 'GENERATE_ME'
+ - vault_cml_redis_password is defined
+ - vault_cml_redis_password != 'GENERATE_ME'
+ - vault_cml_jwt_access_secret is defined
+ - vault_cml_jwt_access_secret != 'GENERATE_ME'
+ - vault_cml_encryption_key is defined
+ - vault_cml_encryption_key != 'GENERATE_ME'
+ - vault_cml_initial_admin_password is defined
+ - vault_cml_initial_admin_password != 'GENERATE_ME'
+ fail_msg: >
+ Required secrets not configured. Run:
+ ./scripts/bootstrap-vault.sh {{ inventory_hostname }}
+ quiet: true
+
+ roles:
+ - common
+ - changemaker
+ - role: monitoring
+ when: cml_monitoring_enabled | bool
+
+ post_tasks:
+ - name: Deployment summary
+ ansible.builtin.debug:
+ msg: |
+ Deployment complete for {{ cml_domain }}
+ Admin: https://app.{{ cml_domain }}
+ API: https://api.{{ cml_domain }}
+ Monitoring: {{ 'enabled' if cml_monitoring_enabled | bool else 'disabled' }}
+ Bunker Ops: {{ 'Tier 2 (fleet)' if bunker_ops_enabled | bool else 'Standalone' }}
diff --git a/bunker-ops/playbooks/monitoring.yml b/bunker-ops/playbooks/monitoring.yml
new file mode 100644
index 00000000..0bceb1a8
--- /dev/null
+++ b/bunker-ops/playbooks/monitoring.yml
@@ -0,0 +1,18 @@
+---
+# Enable or reconfigure monitoring on instances
+# Usage: ansible-playbook playbooks/monitoring.yml [--limit hostname]
+
+- name: Configure Monitoring
+ hosts: changemaker_instances
+ become: true
+
+ roles:
+ - monitoring
+
+ post_tasks:
+ - name: Monitoring summary
+ ansible.builtin.debug:
+ msg: |
+ Monitoring configured for {{ cml_domain }}
+ Profile: {{ 'enabled' if cml_monitoring_enabled | bool else 'disabled' }}
+ Remote write: {{ 'enabled → ' + bunker_ops_remote_write_url if bunker_ops_enabled | bool else 'disabled' }}
diff --git a/bunker-ops/playbooks/upgrade.yml b/bunker-ops/playbooks/upgrade.yml
new file mode 100644
index 00000000..a74fbb4a
--- /dev/null
+++ b/bunker-ops/playbooks/upgrade.yml
@@ -0,0 +1,78 @@
+---
+# Rolling upgrade of Changemaker Lite instances
+# Usage: ansible-playbook playbooks/upgrade.yml [--limit hostname]
+# Pulls latest code, rebuilds images, runs migrations, restarts
+
+- name: Upgrade Changemaker Lite
+ hosts: changemaker_instances
+ serial: "25%" # Rolling upgrade in batches
+ become: true
+
+ tasks:
+ - name: Pull latest code
+ ansible.builtin.git:
+ repo: "{{ cml_repo_url }}"
+ dest: "{{ cml_deploy_path }}"
+ version: "{{ cml_repo_branch }}"
+ force: false
+ update: true
+ register: git_result
+
+ - name: Regenerate .env (pick up new vars)
+ ansible.builtin.template:
+ src: "{{ playbook_dir }}/../roles/changemaker/templates/env.j2"
+ dest: "{{ cml_deploy_path }}/.env"
+ mode: "0600"
+ backup: true
+
+ - name: Pull updated Docker images
+ ansible.builtin.command:
+ cmd: docker compose pull
+ chdir: "{{ cml_deploy_path }}"
+ changed_when: true
+
+ - name: Rebuild custom images
+ ansible.builtin.command:
+ cmd: docker compose build --no-cache
+ chdir: "{{ cml_deploy_path }}"
+ changed_when: true
+ when: git_result.changed
+
+ - name: Apply database migrations
+ ansible.builtin.command:
+ cmd: docker compose exec -T api npx prisma migrate deploy
+ chdir: "{{ cml_deploy_path }}"
+ register: migrate_result
+ changed_when: "'applied' in migrate_result.stdout"
+
+ - name: Restart stack with new images
+ ansible.builtin.command:
+ cmd: docker compose up -d --remove-orphans
+ chdir: "{{ cml_deploy_path }}"
+ changed_when: true
+
+ - name: Restart monitoring (if enabled)
+ ansible.builtin.command:
+ cmd: docker compose --profile monitoring up -d
+ chdir: "{{ cml_deploy_path }}"
+ when: cml_monitoring_enabled | bool
+ changed_when: true
+
+ - name: Wait for API health
+ ansible.builtin.uri:
+ url: "http://localhost:{{ cml_api_port }}/api/health"
+ method: GET
+ status_code: 200
+ timeout: 5
+ register: health
+ retries: 15
+ delay: 3
+ until: health.status == 200
+
+ - name: Upgrade summary
+ ansible.builtin.debug:
+ msg: |
+ Upgraded {{ cml_domain }}
+ Git: {{ git_result.before[:8] | default('?') }} → {{ git_result.after[:8] | default('?') }}
+ Migrations: {{ migrate_result.stdout_lines | default([]) | length }} applied
+ API health: OK
diff --git a/bunker-ops/roles/changemaker/defaults/main.yml b/bunker-ops/roles/changemaker/defaults/main.yml
new file mode 100644
index 00000000..28181d46
--- /dev/null
+++ b/bunker-ops/roles/changemaker/defaults/main.yml
@@ -0,0 +1,53 @@
+---
+# Changemaker role defaults
+
+# PostgreSQL
+cml_v2_postgres_user: changemaker
+cml_v2_postgres_db: changemaker_v2
+
+# JWT
+cml_jwt_access_expiry: "15m"
+cml_jwt_refresh_expiry: "7d"
+
+# User/group IDs (match host deploy user)
+cml_user_id: 1000
+cml_group_id: 1000
+cml_docker_group_id: 984
+
+# Listmonk
+cml_listmonk_admin_user: admin
+cml_listmonk_smtp_tls_type: STARTTLS
+
+# NocoDB
+cml_nocodb_port: 8091
+
+# Gitea
+cml_gitea_port: 3030
+
+# n8n
+cml_n8n_port: 5678
+
+# Gancio
+cml_gancio_port: 8092
+cml_gancio_admin_user: admin
+
+# Vaultwarden
+cml_vaultwarden_port: 8445
+
+# Directories that must exist before docker compose up
+cml_required_dirs:
+ - configs/code-server/.config
+ - configs/code-server/.local
+ - configs/homepage/logs
+ - mkdocs/.cache
+ - mkdocs/site
+ - assets/uploads
+ - assets/images
+ - assets/icons
+ - media/local/inbox
+ - media/local/thumbnails
+ - media/public
+ - local-files
+ - data
+ - backups
+ - logs
diff --git a/bunker-ops/roles/changemaker/handlers/main.yml b/bunker-ops/roles/changemaker/handlers/main.yml
new file mode 100644
index 00000000..74fc049e
--- /dev/null
+++ b/bunker-ops/roles/changemaker/handlers/main.yml
@@ -0,0 +1,18 @@
+---
+- name: Restart Changemaker stack
+ ansible.builtin.command:
+ cmd: docker compose up -d --remove-orphans
+ chdir: "{{ cml_deploy_path }}"
+ changed_when: true
+
+- name: Restart API only
+ ansible.builtin.command:
+ cmd: docker compose restart api
+ chdir: "{{ cml_deploy_path }}"
+ changed_when: true
+
+- name: Restart Nginx only
+ ansible.builtin.command:
+ cmd: docker compose restart nginx
+ chdir: "{{ cml_deploy_path }}"
+ changed_when: true
diff --git a/bunker-ops/roles/changemaker/tasks/backup.yml b/bunker-ops/roles/changemaker/tasks/backup.yml
new file mode 100644
index 00000000..c2dda07a
--- /dev/null
+++ b/bunker-ops/roles/changemaker/tasks/backup.yml
@@ -0,0 +1,15 @@
+---
+# Configure automated backup cron job
+
+- name: Ensure backup script is executable
+ ansible.builtin.file:
+ path: "{{ cml_deploy_path }}/scripts/backup.sh"
+ mode: "0755"
+
+- name: Configure daily backup cron
+ ansible.builtin.cron:
+ name: "changemaker-lite-backup"
+ minute: "{{ cml_backup_cron_minute }}"
+ hour: "{{ cml_backup_cron_hour }}"
+ job: "cd {{ cml_deploy_path }} && ./scripts/backup.sh{% if cml_backup_s3_enabled %} --s3{% endif %} --retention {{ cml_backup_retention_days }} >> {{ cml_deploy_path }}/logs/backup.log 2>&1"
+ user: "{{ ansible_user }}"
diff --git a/bunker-ops/roles/changemaker/tasks/clone.yml b/bunker-ops/roles/changemaker/tasks/clone.yml
new file mode 100644
index 00000000..9f704ed4
--- /dev/null
+++ b/bunker-ops/roles/changemaker/tasks/clone.yml
@@ -0,0 +1,21 @@
+---
+# Clone or update the Changemaker Lite repository
+
+- name: Ensure deploy directory exists
+ ansible.builtin.file:
+ path: "{{ cml_deploy_path }}"
+ state: directory
+ mode: "0755"
+
+- name: Clone repository
+ ansible.builtin.git:
+ repo: "{{ cml_repo_url }}"
+ dest: "{{ cml_deploy_path }}"
+ version: "{{ cml_repo_branch }}"
+ force: false
+ update: true
+ register: git_result
+
+- name: Show git status
+ ansible.builtin.debug:
+ msg: "Repository {{ 'updated' if git_result.changed else 'unchanged' }} at {{ git_result.after[:8] | default('unknown') }}"
diff --git a/bunker-ops/roles/changemaker/tasks/deploy.yml b/bunker-ops/roles/changemaker/tasks/deploy.yml
new file mode 100644
index 00000000..16357f68
--- /dev/null
+++ b/bunker-ops/roles/changemaker/tasks/deploy.yml
@@ -0,0 +1,54 @@
+---
+# Deploy Docker Compose stack
+
+- name: Pull latest images
+ ansible.builtin.command:
+ cmd: docker compose pull
+ chdir: "{{ cml_deploy_path }}"
+ changed_when: true
+ tags: [pull]
+
+- name: Build custom images
+ ansible.builtin.command:
+ cmd: docker compose build --no-cache
+ chdir: "{{ cml_deploy_path }}"
+ changed_when: true
+ tags: [build]
+
+- name: Start core services
+ ansible.builtin.command:
+ cmd: docker compose up -d --remove-orphans
+ chdir: "{{ cml_deploy_path }}"
+ changed_when: true
+
+- name: Wait for PostgreSQL to be ready
+ ansible.builtin.command:
+ cmd: docker compose exec -T v2-postgres pg_isready -U {{ cml_v2_postgres_user }}
+ chdir: "{{ cml_deploy_path }}"
+ register: pg_ready
+ retries: 15
+ delay: 2
+ until: pg_ready.rc == 0
+ changed_when: false
+
+- name: Run Prisma migrations
+ ansible.builtin.command:
+ cmd: docker compose exec -T api npx prisma migrate deploy
+ chdir: "{{ cml_deploy_path }}"
+ register: migrate_result
+ changed_when: "'applied' in migrate_result.stdout"
+
+- name: Run database seed (first deploy only)
+ ansible.builtin.command:
+ cmd: docker compose exec -T api npx prisma db seed
+ chdir: "{{ cml_deploy_path }}"
+ register: seed_result
+ changed_when: "'created' in seed_result.stdout"
+ failed_when: false
+
+- name: Start monitoring services (if enabled)
+ ansible.builtin.command:
+ cmd: docker compose --profile monitoring up -d
+ chdir: "{{ cml_deploy_path }}"
+ when: cml_monitoring_enabled | bool
+ changed_when: true
diff --git a/bunker-ops/roles/changemaker/tasks/dirs.yml b/bunker-ops/roles/changemaker/tasks/dirs.yml
new file mode 100644
index 00000000..5ea930de
--- /dev/null
+++ b/bunker-ops/roles/changemaker/tasks/dirs.yml
@@ -0,0 +1,20 @@
+---
+# Create all directories required by Docker containers
+
+- name: Create required directories
+ ansible.builtin.file:
+ path: "{{ cml_deploy_path }}/{{ item }}"
+ state: directory
+ mode: "0775"
+ loop: "{{ cml_required_dirs }}"
+
+- name: Ensure .gitkeep files exist
+ ansible.builtin.copy:
+ content: ""
+ dest: "{{ cml_deploy_path }}/{{ item }}/.gitkeep"
+ force: false
+ mode: "0644"
+ loop:
+ - configs/code-server/.config
+ - configs/code-server/.local
+ - configs/homepage/logs
diff --git a/bunker-ops/roles/changemaker/tasks/env.yml b/bunker-ops/roles/changemaker/tasks/env.yml
new file mode 100644
index 00000000..7b37674e
--- /dev/null
+++ b/bunker-ops/roles/changemaker/tasks/env.yml
@@ -0,0 +1,14 @@
+---
+# Generate .env from Jinja2 template (replaces config.sh)
+
+- name: Generate .env file
+ ansible.builtin.template:
+ src: env.j2
+ dest: "{{ cml_deploy_path }}/.env"
+ mode: "0600"
+ backup: true
+ register: env_result
+
+- name: Report .env status
+ ansible.builtin.debug:
+ msg: ".env {{ 'updated' if env_result.changed else 'unchanged' }}"
diff --git a/bunker-ops/roles/changemaker/tasks/health.yml b/bunker-ops/roles/changemaker/tasks/health.yml
new file mode 100644
index 00000000..239ceb7e
--- /dev/null
+++ b/bunker-ops/roles/changemaker/tasks/health.yml
@@ -0,0 +1,38 @@
+---
+# Post-deploy health checks
+
+- name: Wait for API to respond
+ ansible.builtin.uri:
+ url: "http://localhost:{{ cml_api_port }}/api/health"
+ method: GET
+ status_code: 200
+ timeout: 5
+ register: api_health
+ retries: 15
+ delay: 3
+ until: api_health.status == 200
+
+- name: Wait for Admin GUI to respond
+ ansible.builtin.uri:
+ url: "http://localhost:{{ cml_admin_port }}"
+ method: GET
+ status_code: 200
+ timeout: 5
+ register: admin_health
+ retries: 10
+ delay: 3
+ until: admin_health.status == 200
+
+- name: Check container health
+ ansible.builtin.command:
+ cmd: docker compose ps --format json
+ chdir: "{{ cml_deploy_path }}"
+ register: compose_ps
+ changed_when: false
+
+- name: Report deployment status
+ ansible.builtin.debug:
+ msg: |
+ Deployment health check:
+ API: {{ 'OK' if api_health.status == 200 else 'FAILED' }}
+ Admin: {{ 'OK' if admin_health.status == 200 else 'FAILED' }}
diff --git a/bunker-ops/roles/changemaker/tasks/main.yml b/bunker-ops/roles/changemaker/tasks/main.yml
new file mode 100644
index 00000000..b550b103
--- /dev/null
+++ b/bunker-ops/roles/changemaker/tasks/main.yml
@@ -0,0 +1,30 @@
+---
+# Changemaker role — Full deployment orchestration
+
+- name: Clone/update repository
+ ansible.builtin.include_tasks: clone.yml
+ tags: [clone, deploy]
+
+- name: Create required directories
+ ansible.builtin.include_tasks: dirs.yml
+ tags: [dirs, deploy]
+
+- name: Generate .env configuration
+ ansible.builtin.include_tasks: env.yml
+ tags: [env, configure, deploy]
+
+- name: Generate Homepage services.yaml
+ ansible.builtin.include_tasks: services.yml
+ tags: [services, configure, deploy]
+
+- name: Deploy Docker stack
+ ansible.builtin.include_tasks: deploy.yml
+ tags: [deploy]
+
+- name: Run health checks
+ ansible.builtin.include_tasks: health.yml
+ tags: [health, deploy]
+
+- name: Configure backup cron
+ ansible.builtin.include_tasks: backup.yml
+ tags: [backup, deploy]
diff --git a/bunker-ops/roles/changemaker/tasks/services.yml b/bunker-ops/roles/changemaker/tasks/services.yml
new file mode 100644
index 00000000..bf73df45
--- /dev/null
+++ b/bunker-ops/roles/changemaker/tasks/services.yml
@@ -0,0 +1,14 @@
+---
+# Generate Homepage services.yaml from template
+
+- name: Ensure Homepage config directory exists
+ ansible.builtin.file:
+ path: "{{ cml_deploy_path }}/configs/homepage"
+ state: directory
+ mode: "0755"
+
+- name: Generate Homepage services.yaml
+ ansible.builtin.template:
+ src: services.yaml.j2
+ dest: "{{ cml_deploy_path }}/configs/homepage/services.yaml"
+ mode: "0644"
diff --git a/bunker-ops/roles/changemaker/templates/env.j2 b/bunker-ops/roles/changemaker/templates/env.j2
new file mode 100644
index 00000000..82b8df4a
--- /dev/null
+++ b/bunker-ops/roles/changemaker/templates/env.j2
@@ -0,0 +1,195 @@
+# ==============================================================================
+# Changemaker Lite v2 — Environment Variables
+# Generated by Ansible (Bunker Ops) — DO NOT EDIT MANUALLY
+# Instance: {{ cml_domain }}
+# Generated: {{ ansible_date_time.iso8601 }}
+# ==============================================================================
+
+# --- General ---
+NODE_ENV={{ cml_node_env }}
+DOMAIN={{ cml_domain }}
+USER_ID={{ cml_user_id }}
+GROUP_ID={{ cml_group_id }}
+DOCKER_GROUP_ID={{ cml_docker_group_id }}
+
+# --- V2 PostgreSQL ---
+V2_POSTGRES_USER={{ cml_v2_postgres_user }}
+V2_POSTGRES_PASSWORD={{ vault_cml_v2_postgres_password }}
+V2_POSTGRES_DB={{ cml_v2_postgres_db }}
+V2_POSTGRES_PORT={{ cml_postgres_port }}
+DATABASE_URL=postgresql://{{ cml_v2_postgres_user }}:{{ vault_cml_v2_postgres_password }}@localhost:{{ cml_postgres_port }}/{{ cml_v2_postgres_db }}
+
+# --- Redis ---
+REDIS_PASSWORD={{ vault_cml_redis_password }}
+REDIS_URL=redis://:{{ vault_cml_redis_password }}@redis-changemaker:6379
+
+# --- JWT Auth ---
+JWT_ACCESS_SECRET={{ vault_cml_jwt_access_secret }}
+JWT_REFRESH_SECRET={{ vault_cml_jwt_refresh_secret }}
+JWT_ACCESS_EXPIRY={{ cml_jwt_access_expiry }}
+JWT_REFRESH_EXPIRY={{ cml_jwt_refresh_expiry }}
+
+# --- Encryption ---
+ENCRYPTION_KEY={{ vault_cml_encryption_key }}
+
+# --- Initial Super Admin ---
+INITIAL_ADMIN_EMAIL={{ vault_cml_initial_admin_email | default('admin@' + cml_domain) }}
+INITIAL_ADMIN_PASSWORD={{ vault_cml_initial_admin_password }}
+
+# --- API ---
+API_PORT={{ cml_api_port }}
+API_URL=http://localhost:{{ cml_api_port }}
+CORS_ORIGINS=http://app.{{ cml_domain }},https://app.{{ cml_domain }},http://{{ cml_domain }},https://{{ cml_domain }},http://localhost:3000,http://localhost,http://localhost:4003
+
+# --- Admin ---
+ADMIN_URL=http://localhost:{{ cml_admin_port }}
+
+# --- SMTP ---
+SMTP_HOST={{ cml_smtp_host }}
+SMTP_PORT={{ cml_smtp_port }}
+SMTP_USER={{ cml_smtp_user }}
+SMTP_PASS={{ vault_cml_smtp_pass | default('') }}
+SMTP_FROM=noreply@{{ cml_domain }}
+SMTP_FROM_NAME=Changemaker Lite
+EMAIL_TEST_MODE={{ cml_email_test_mode }}
+TEST_EMAIL_RECIPIENT={{ vault_cml_initial_admin_email | default('admin@' + cml_domain) }}
+
+# --- Listmonk ---
+LISTMONK_URL=http://listmonk-app:9000
+LISTMONK_ADMIN_USER={{ cml_listmonk_admin_user }}
+LISTMONK_ADMIN_PASSWORD={{ vault_cml_listmonk_api_token }}
+LISTMONK_SYNC_ENABLED={{ cml_listmonk_sync_enabled }}
+LISTMONK_WEBHOOK_SECRET={{ vault_cml_listmonk_api_token }}
+LISTMONK_DB_HOST=listmonk-db
+LISTMONK_DB_PORT=5432
+LISTMONK_DB_USER=listmonk
+LISTMONK_DB_PASSWORD={{ vault_cml_listmonk_db_password }}
+LISTMONK_DB_NAME=listmonk
+LISTMONK_WEB_ADMIN_USER=admin
+LISTMONK_WEB_ADMIN_PASSWORD={{ vault_cml_listmonk_web_admin_password }}
+LISTMONK_API_USER=api
+LISTMONK_API_TOKEN={{ vault_cml_listmonk_api_token }}
+LISTMONK_SMTP_HOST={{ cml_smtp_host }}
+LISTMONK_SMTP_PORT={{ cml_smtp_port }}
+LISTMONK_SMTP_USER={{ cml_smtp_user }}
+LISTMONK_SMTP_PASSWORD={{ vault_cml_smtp_pass | default('') }}
+LISTMONK_SMTP_TLS_TYPE={{ cml_listmonk_smtp_tls_type }}
+LISTMONK_SMTP_FROM=Changemaker Lite
+
+# --- Represent API ---
+REPRESENT_API_URL=https://represent.opennorth.ca
+
+# --- Geocoding ---
+{% if cml_mapbox_api_key %}
+MAPBOX_API_KEY={{ cml_mapbox_api_key }}
+{% endif %}
+GEOCODING_RATE_LIMIT_MS=1100
+GEOCODING_CACHE_ENABLED=true
+GEOCODING_CACHE_TTL_HOURS=24
+{% if cml_google_maps_api_key %}
+GOOGLE_MAPS_API_KEY={{ cml_google_maps_api_key }}
+{% endif %}
+GOOGLE_MAPS_ENABLED={{ cml_google_maps_enabled }}
+GEOCODING_PARALLEL_ENABLED=true
+GEOCODING_BATCH_SIZE=10
+BULK_GEOCODE_ENABLED=true
+BULK_GEOCODE_MAX_BATCH=5000
+
+# --- Platform Services ---
+NOCODB_URL=http://changemaker-v2-nocodb:8080
+NOCODB_PORT={{ cml_nocodb_port }}
+NC_ADMIN_EMAIL={{ vault_cml_initial_admin_email | default('admin@' + cml_domain) }}
+NC_ADMIN_PASSWORD={{ vault_cml_nocodb_admin_password }}
+
+N8N_URL=http://n8n-changemaker:5678
+N8N_PORT={{ cml_n8n_port }}
+N8N_HOST=n8n.{{ cml_domain }}
+N8N_ENCRYPTION_KEY={{ vault_cml_n8n_encryption_key }}
+N8N_USER_EMAIL={{ vault_cml_initial_admin_email | default('admin@' + cml_domain) }}
+N8N_USER_PASSWORD={{ vault_cml_n8n_user_password }}
+
+GITEA_URL=http://gitea-changemaker:3000
+GITEA_PORT={{ cml_gitea_port }}
+GITEA_ROOT_URL=https://git.{{ cml_domain }}
+GITEA_DOMAIN=git.{{ cml_domain }}
+GITEA_DB_PASSWD={{ vault_cml_gitea_db_passwd }}
+GITEA_DB_ROOT_PASSWORD={{ vault_cml_gitea_db_root_password }}
+
+# --- MailHog ---
+MAILHOG_URL=http://mailhog-changemaker:8025
+
+# --- Mini QR ---
+MINI_QR_URL=http://mini-qr:8080
+MINI_QR_PORT=8089
+
+# --- Excalidraw ---
+EXCALIDRAW_URL=http://excalidraw-changemaker:80
+EXCALIDRAW_PORT=8090
+EXCALIDRAW_WS_URL=wss://draw.{{ cml_domain }}
+
+# --- Homepage ---
+HOMEPAGE_URL=http://homepage-changemaker:3000
+HOMEPAGE_VAR_BASE_URL=https://{{ cml_domain }}
+
+# --- Vaultwarden ---
+VAULTWARDEN_URL=http://vaultwarden-changemaker:80
+VAULTWARDEN_DOMAIN=https://vault.{{ cml_domain }}
+VAULTWARDEN_ADMIN_TOKEN={{ vault_cml_vaultwarden_admin_token }}
+
+# --- Rocket.Chat ---
+ROCKETCHAT_URL=http://rocketchat-changemaker:3000
+ROCKETCHAT_ADMIN_USER=admin
+ROCKETCHAT_ADMIN_PASSWORD={{ vault_cml_rocketchat_admin_password }}
+ENABLE_CHAT={{ cml_enable_chat }}
+
+# --- Gancio ---
+GANCIO_PORT={{ cml_gancio_port }}
+GANCIO_URL=http://gancio-changemaker:13120
+GANCIO_BASE_URL=https://events.{{ cml_domain }}
+GANCIO_ADMIN_USER={{ cml_gancio_admin_user }}
+GANCIO_ADMIN_PASSWORD={{ vault_cml_gancio_admin_password }}
+GANCIO_SYNC_ENABLED={{ cml_gancio_sync_enabled }}
+
+# --- Pangolin ---
+PANGOLIN_API_URL={{ cml_pangolin_api_url }}
+PANGOLIN_API_KEY={{ vault_cml_pangolin_api_key | default('') }}
+PANGOLIN_ORG_ID={{ cml_pangolin_org_id }}
+PANGOLIN_SITE_ID={{ vault_cml_pangolin_site_id | default('') }}
+PANGOLIN_ENDPOINT={{ vault_cml_pangolin_endpoint | default('') }}
+PANGOLIN_NEWT_ID={{ vault_cml_pangolin_newt_id | default('') }}
+PANGOLIN_NEWT_SECRET={{ vault_cml_pangolin_newt_secret | default('') }}
+
+# --- NAR ---
+NAR_DATA_DIR=/data
+
+# --- Payments ---
+ENABLE_PAYMENTS={{ cml_enable_payments }}
+
+# --- Media ---
+ENABLE_MEDIA_FEATURES={{ cml_enable_media }}
+MEDIA_API_PORT={{ cml_media_api_port }}
+MEDIA_API_PUBLIC_URL=http://media-api:{{ cml_media_api_port }}
+
+# --- Docs / Code Server ---
+CODE_SERVER_URL=http://code-server-changemaker:8080
+CODE_SERVER_PORT=8888
+MKDOCS_PREVIEW_URL=http://mkdocs-changemaker:8000
+MKDOCS_PORT=4003
+
+# --- Monitoring ---
+PROMETHEUS_PORT=9090
+GRAFANA_PORT=3005
+GRAFANA_ADMIN_PASSWORD={{ vault_cml_grafana_admin_password }}
+GRAFANA_ROOT_URL=http://localhost:3005
+CADVISOR_PORT=8086
+NODE_EXPORTER_PORT=9100
+REDIS_EXPORTER_PORT=9121
+ALERTMANAGER_PORT=9093
+GOTIFY_PORT=8889
+GOTIFY_ADMIN_USER=admin
+GOTIFY_ADMIN_PASSWORD={{ vault_cml_gotify_admin_password }}
+
+# --- Bunker Ops (Fleet Management) ---
+INSTANCE_LABEL={{ bunker_ops_instance_label | default(cml_domain) }}
+BUNKER_OPS_ENABLED={{ bunker_ops_enabled | string | lower }}
+BUNKER_OPS_REMOTE_WRITE_URL={{ bunker_ops_remote_write_url }}
diff --git a/bunker-ops/roles/changemaker/templates/services.yaml.j2 b/bunker-ops/roles/changemaker/templates/services.yaml.j2
new file mode 100644
index 00000000..dd310026
--- /dev/null
+++ b/bunker-ops/roles/changemaker/templates/services.yaml.j2
@@ -0,0 +1,127 @@
+---
+# Homepage Services Configuration — Generated by Ansible (Bunker Ops)
+# Instance: {{ cml_domain }}
+
+- Production - Core:
+
+ - Admin GUI:
+ icon: mdi-view-dashboard
+ href: "https://app.{{ cml_domain }}"
+ description: Application dashboard and public pages
+ container: changemaker-v2-admin
+
+ - API:
+ icon: mdi-api
+ href: "https://api.{{ cml_domain }}"
+ description: V2 REST API
+ container: changemaker-v2-api
+
+ - Media API:
+ icon: mdi-video
+ href: "https://media.{{ cml_domain }}"
+ description: Video library and streaming
+ container: changemaker-media-api
+
+ - Main Site:
+ icon: mdi-web
+ href: "https://{{ cml_domain }}"
+ description: Documentation and marketing site
+ container: mkdocs-site-server-changemaker
+
+- Production - Tools:
+
+ - Code Server:
+ icon: mdi-code-braces
+ href: "https://code.{{ cml_domain }}"
+ description: VS Code in the browser
+ container: code-server-changemaker
+
+ - NocoDB:
+ icon: mdi-database
+ href: "https://db.{{ cml_domain }}"
+ description: Database browser (read-only)
+ container: changemaker-v2-nocodb
+
+ - MkDocs (Live):
+ icon: mdi-book-open-page-variant
+ href: "https://docs.{{ cml_domain }}"
+ description: Live documentation with hot reload
+ container: mkdocs-changemaker
+
+ - Mini QR:
+ icon: mdi-qrcode
+ href: "https://qr.{{ cml_domain }}"
+ description: QR code generator
+ container: mini-qr
+
+ - Excalidraw:
+ icon: mdi-draw
+ href: "https://draw.{{ cml_domain }}"
+ description: Collaborative whiteboard
+ container: excalidraw-changemaker
+
+ - Vaultwarden:
+ icon: mdi-lock
+ href: "https://vault.{{ cml_domain }}"
+ description: Password manager (Bitwarden-compatible)
+ container: vaultwarden-changemaker
+
+ - Gancio:
+ icon: mdi-calendar-multiple
+ href: "https://events.{{ cml_domain }}"
+ description: Event management platform
+ container: gancio-changemaker
+
+- Production - Integrations:
+
+ - Listmonk:
+ icon: mdi-email-newsletter
+ href: "https://listmonk.{{ cml_domain }}"
+ description: Newsletter and mailing list manager
+ container: listmonk-app
+
+ - MailHog:
+ icon: mdi-email-check
+ href: "https://mail.{{ cml_domain }}"
+ description: Email capture for testing
+ container: mailhog-changemaker
+
+ - n8n:
+ icon: mdi-robot-industrial
+ href: "https://n8n.{{ cml_domain }}"
+ description: Workflow automation platform
+ container: n8n-changemaker
+
+ - Gitea:
+ icon: mdi-git
+ href: "https://git.{{ cml_domain }}"
+ description: Git repository hosting
+ container: gitea-changemaker
+
+- Production - Monitoring:
+
+ - Grafana:
+ icon: mdi-chart-box
+ href: "https://grafana.{{ cml_domain }}"
+ description: Monitoring dashboards
+ container: grafana-changemaker
+
+ - Prometheus:
+ icon: mdi-chart-line
+ href: "https://prometheus.{{ cml_domain }}"
+ description: Metrics collection
+ container: prometheus-changemaker
+
+- Local - Core:
+
+ - Admin GUI:
+ icon: mdi-view-dashboard
+ href: "http://localhost:{{ cml_admin_port }}"
+ description: Application dashboard (port {{ cml_admin_port }})
+ container: changemaker-v2-admin
+
+ - API:
+ icon: mdi-api
+ href: "http://localhost:{{ cml_api_port }}"
+ description: V2 REST API (port {{ cml_api_port }})
+ container: changemaker-v2-api
diff --git a/bunker-ops/roles/common/defaults/main.yml b/bunker-ops/roles/common/defaults/main.yml
new file mode 100644
index 00000000..c0a01796
--- /dev/null
+++ b/bunker-ops/roles/common/defaults/main.yml
@@ -0,0 +1,16 @@
+---
+# Common role defaults
+
+# Firewall
+ufw_allowed_ports:
+ - { port: "{{ ssh_port | default(22) }}", proto: tcp, comment: "SSH" }
+ - { port: 80, proto: tcp, comment: "HTTP" }
+ - { port: 443, proto: tcp, comment: "HTTPS" }
+
+# fail2ban
+fail2ban_maxretry: 5
+fail2ban_bantime: 3600
+fail2ban_findtime: 600
+
+# Swap (create if < 2GB RAM)
+swap_size_mb: 2048
diff --git a/bunker-ops/roles/common/handlers/main.yml b/bunker-ops/roles/common/handlers/main.yml
new file mode 100644
index 00000000..93d55645
--- /dev/null
+++ b/bunker-ops/roles/common/handlers/main.yml
@@ -0,0 +1,14 @@
+---
+- name: Restart fail2ban
+ ansible.builtin.service:
+ name: fail2ban
+ state: restarted
+
+- name: Reload UFW
+ community.general.ufw:
+ state: reloaded
+
+- name: Restart Docker
+ ansible.builtin.service:
+ name: docker
+ state: restarted
diff --git a/bunker-ops/roles/common/tasks/docker.yml b/bunker-ops/roles/common/tasks/docker.yml
new file mode 100644
index 00000000..99e2733e
--- /dev/null
+++ b/bunker-ops/roles/common/tasks/docker.yml
@@ -0,0 +1,42 @@
+---
+# Install Docker CE + Compose plugin
+
+- name: Install Docker prerequisites
+ ansible.builtin.apt:
+ name:
+ - ca-certificates
+ - gnupg
+ - lsb-release
+ state: present
+
+- name: Add Docker GPG key
+ ansible.builtin.apt_key:
+ url: https://download.docker.com/linux/ubuntu/gpg
+ state: present
+
+- name: Add Docker repository
+ ansible.builtin.apt_repository:
+ repo: "deb [arch=amd64] https://download.docker.com/linux/ubuntu {{ ansible_distribution_release }} stable"
+ state: present
+
+- name: Install Docker CE
+ ansible.builtin.apt:
+ name:
+ - docker-ce
+ - docker-ce-cli
+ - containerd.io
+ - docker-compose-plugin
+ state: present
+ update_cache: true
+
+- name: Ensure Docker service is running
+ ansible.builtin.service:
+ name: docker
+ state: started
+ enabled: true
+
+- name: Add deploy user to docker group
+ ansible.builtin.user:
+ name: "{{ ansible_user }}"
+ groups: docker
+ append: true
diff --git a/bunker-ops/roles/common/tasks/fail2ban.yml b/bunker-ops/roles/common/tasks/fail2ban.yml
new file mode 100644
index 00000000..5ccc9443
--- /dev/null
+++ b/bunker-ops/roles/common/tasks/fail2ban.yml
@@ -0,0 +1,30 @@
+---
+# Configure fail2ban for SSH brute-force protection
+
+- name: Ensure fail2ban is installed
+ ansible.builtin.apt:
+ name: fail2ban
+ state: present
+
+- name: Configure fail2ban jail
+ ansible.builtin.copy:
+ dest: /etc/fail2ban/jail.local
+ content: |
+ [DEFAULT]
+ bantime = {{ fail2ban_bantime }}
+ findtime = {{ fail2ban_findtime }}
+ maxretry = {{ fail2ban_maxretry }}
+
+ [sshd]
+ enabled = true
+ port = {{ ssh_port | default(22) }}
+ filter = sshd
+ logpath = /var/log/auth.log
+ mode: "0644"
+ notify: Restart fail2ban
+
+- name: Ensure fail2ban is running
+ ansible.builtin.service:
+ name: fail2ban
+ state: started
+ enabled: true
diff --git a/bunker-ops/roles/common/tasks/main.yml b/bunker-ops/roles/common/tasks/main.yml
new file mode 100644
index 00000000..93920f28
--- /dev/null
+++ b/bunker-ops/roles/common/tasks/main.yml
@@ -0,0 +1,29 @@
+---
+# Common role — OS setup, Docker, firewall, fail2ban
+
+- name: Update apt cache
+ ansible.builtin.apt:
+ update_cache: true
+ cache_valid_time: 3600
+
+- name: Install base packages
+ ansible.builtin.apt:
+ name: "{{ common_packages }}"
+ state: present
+
+- name: Set timezone to UTC
+ community.general.timezone:
+ name: UTC
+
+- name: Configure swap (if needed)
+ ansible.builtin.include_tasks: swap.yml
+ when: ansible_memtotal_mb < 3072
+
+- name: Install Docker
+ ansible.builtin.include_tasks: docker.yml
+
+- name: Configure UFW firewall
+ ansible.builtin.include_tasks: ufw.yml
+
+- name: Configure fail2ban
+ ansible.builtin.include_tasks: fail2ban.yml
diff --git a/bunker-ops/roles/common/tasks/swap.yml b/bunker-ops/roles/common/tasks/swap.yml
new file mode 100644
index 00000000..93bc7bd0
--- /dev/null
+++ b/bunker-ops/roles/common/tasks/swap.yml
@@ -0,0 +1,36 @@
+---
+# Create swap file on low-memory servers
+
+- name: Check if swap file exists
+ ansible.builtin.stat:
+ path: /swapfile
+ register: swap_check
+
+- name: Create swap file
+ when: not swap_check.stat.exists
+ block:
+ - name: Allocate swap file
+ ansible.builtin.command:
+ cmd: "dd if=/dev/zero of=/swapfile bs=1M count={{ swap_size_mb }}"
+ changed_when: true
+
+ - name: Set swap file permissions
+ ansible.builtin.file:
+ path: /swapfile
+ mode: "0600"
+
+ - name: Format swap file
+ ansible.builtin.command:
+ cmd: mkswap /swapfile
+ changed_when: true
+
+ - name: Enable swap file
+ ansible.builtin.command:
+ cmd: swapon /swapfile
+ changed_when: true
+
+ - name: Add swap to fstab
+ ansible.builtin.lineinfile:
+ path: /etc/fstab
+ line: "/swapfile none swap sw 0 0"
+ state: present
diff --git a/bunker-ops/roles/common/tasks/ufw.yml b/bunker-ops/roles/common/tasks/ufw.yml
new file mode 100644
index 00000000..c48d2fec
--- /dev/null
+++ b/bunker-ops/roles/common/tasks/ufw.yml
@@ -0,0 +1,24 @@
+---
+# Configure UFW firewall
+
+- name: Set UFW default deny incoming
+ community.general.ufw:
+ direction: incoming
+ policy: deny
+
+- name: Set UFW default allow outgoing
+ community.general.ufw:
+ direction: outgoing
+ policy: allow
+
+- name: Allow required ports
+ community.general.ufw:
+ rule: allow
+ port: "{{ item.port | string }}"
+ proto: "{{ item.proto }}"
+ comment: "{{ item.comment }}"
+ loop: "{{ ufw_allowed_ports }}"
+
+- name: Enable UFW
+ community.general.ufw:
+ state: enabled
diff --git a/bunker-ops/roles/monitoring/defaults/main.yml b/bunker-ops/roles/monitoring/defaults/main.yml
new file mode 100644
index 00000000..8c721520
--- /dev/null
+++ b/bunker-ops/roles/monitoring/defaults/main.yml
@@ -0,0 +1,12 @@
+---
+# Monitoring role defaults
+
+# Prometheus scrape intervals
+prometheus_scrape_interval: 15s
+prometheus_evaluation_interval: 15s
+prometheus_api_scrape_interval: 10s
+
+# Remote write (Bunker Ops Tier 2)
+prometheus_remote_write_enabled: "{{ bunker_ops_enabled | default(false) }}"
+prometheus_remote_write_url: "{{ bunker_ops_remote_write_url | default('') }}"
+prometheus_remote_write_token: "{{ vault_bunker_ops_remote_write_token | default('') }}"
diff --git a/bunker-ops/roles/monitoring/handlers/main.yml b/bunker-ops/roles/monitoring/handlers/main.yml
new file mode 100644
index 00000000..9ae3895e
--- /dev/null
+++ b/bunker-ops/roles/monitoring/handlers/main.yml
@@ -0,0 +1,6 @@
+---
+- name: Restart monitoring stack
+ ansible.builtin.command:
+ cmd: docker compose --profile monitoring up -d --force-recreate prometheus-changemaker
+ chdir: "{{ cml_deploy_path }}"
+ changed_when: true
diff --git a/bunker-ops/roles/monitoring/tasks/main.yml b/bunker-ops/roles/monitoring/tasks/main.yml
new file mode 100644
index 00000000..1b054a6d
--- /dev/null
+++ b/bunker-ops/roles/monitoring/tasks/main.yml
@@ -0,0 +1,16 @@
+---
+# Monitoring role — Prometheus config + optional remote_write
+
+- name: Generate Prometheus config
+ ansible.builtin.template:
+ src: prometheus.yml.j2
+ dest: "{{ cml_deploy_path }}/configs/prometheus/prometheus.yml"
+ mode: "0644"
+ notify: Restart monitoring stack
+
+- name: Start monitoring profile
+ ansible.builtin.command:
+ cmd: docker compose --profile monitoring up -d
+ chdir: "{{ cml_deploy_path }}"
+ changed_when: true
+ when: cml_monitoring_enabled | bool
diff --git a/bunker-ops/roles/monitoring/templates/prometheus.yml.j2 b/bunker-ops/roles/monitoring/templates/prometheus.yml.j2
new file mode 100644
index 00000000..893bb9c2
--- /dev/null
+++ b/bunker-ops/roles/monitoring/templates/prometheus.yml.j2
@@ -0,0 +1,86 @@
+# Prometheus configuration — Generated by Ansible (Bunker Ops)
+# Instance: {{ cml_domain }}
+
+global:
+ scrape_interval: {{ prometheus_scrape_interval }}
+ evaluation_interval: {{ prometheus_evaluation_interval }}
+ external_labels:
+ monitor: 'changemaker-lite'
+ instance: '{{ bunker_ops_instance_label | default(cml_domain) }}'
+ domain: '{{ cml_domain }}'
+ environment: '{{ cml_node_env }}'
+
+# Alertmanager configuration
+alerting:
+ alertmanagers:
+ - static_configs:
+ - targets: ['alertmanager:9093']
+
+# Load rules once and periodically evaluate them
+rule_files:
+ - "alerts.yml"
+
+# Scrape configurations
+scrape_configs:
+ # V2 Unified API Metrics
+ - job_name: 'changemaker-v2-api'
+ static_configs:
+ - targets: ['changemaker-v2-api:{{ cml_api_port }}']
+ metrics_path: '/api/metrics'
+ scrape_interval: {{ prometheus_api_scrape_interval }}
+ scrape_timeout: 5s
+
+ # N8N Metrics (if available)
+ - job_name: 'n8n'
+ static_configs:
+ - targets: ['n8n-changemaker:5678']
+ metrics_path: '/metrics'
+ scrape_interval: 30s
+
+ # Redis Metrics
+ - job_name: 'redis'
+ static_configs:
+ - targets: ['redis-exporter-changemaker:9121']
+ scrape_interval: 15s
+
+ # cAdvisor - Docker container metrics
+ - job_name: 'cadvisor'
+ static_configs:
+ - targets: ['cadvisor:8080']
+ scrape_interval: 15s
+
+ # Node Exporter - System metrics
+ - job_name: 'node'
+ static_configs:
+ - targets: ['node-exporter:9100']
+ scrape_interval: 15s
+
+ # Prometheus self-monitoring
+ - job_name: 'prometheus'
+ static_configs:
+ - targets: ['localhost:9090']
+
+ # Alertmanager monitoring
+ - job_name: 'alertmanager'
+ static_configs:
+ - targets: ['alertmanager:9093']
+ scrape_interval: 30s
+
+{% if prometheus_remote_write_enabled | bool %}
+# Remote write to Bunker Ops central (VictoriaMetrics)
+remote_write:
+ - url: "{{ prometheus_remote_write_url }}"
+{% if prometheus_remote_write_token %}
+ authorization:
+ credentials: "{{ prometheus_remote_write_token }}"
+{% endif %}
+ queue_config:
+ max_samples_per_send: 1000
+ batch_send_deadline: 5s
+ max_shards: 3
+ write_relabel_configs:
+ # Only send cm_* and node_* metrics to central (not raw container metrics)
+ - source_labels: [__name__]
+ regex: '(cm_.*|node_.*|http_request.*|up)'
+ action: keep
+{% endif %}
diff --git a/bunker-ops/scripts/add-instance.sh b/bunker-ops/scripts/add-instance.sh
new file mode 100755
index 00000000..6d1e27d3
--- /dev/null
+++ b/bunker-ops/scripts/add-instance.sh
@@ -0,0 +1,113 @@
+#!/usr/bin/env bash
+# =============================================================================
+# Bunker Ops — Scaffold a New Instance
+# Creates host_vars directory with main.yml + vault.yml for a new instance
+#
+# Usage: ./scripts/add-instance.sh [--tier 0|1|2]
+# =============================================================================
+set -euo pipefail
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
+INVENTORY_DIR="${PROJECT_DIR}/inventory/host_vars"
+
+# --- Parse args ---
+HOSTNAME="${1:-}"
+DOMAIN="${2:-}"
+SSH_HOST="${3:-}"
+TIER=1
+
+shift 3 2>/dev/null || true
+while [[ $# -gt 0 ]]; do
+ case "$1" in
+ --tier) TIER="$2"; shift 2 ;;
+ *) echo "Unknown option: $1"; exit 1 ;;
+ esac
+done
+
+if [[ -z "$HOSTNAME" ]] || [[ -z "$DOMAIN" ]] || [[ -z "$SSH_HOST" ]]; then
+ echo "Usage: $0 [--tier 0|1|2]"
+ echo ""
+ echo "Arguments:"
+ echo " hostname Ansible inventory name (e.g., edmonton-prod)"
+ echo " domain Instance domain (e.g., betteredmonton.org)"
+ echo " ssh-host SSH address (e.g., 10.0.1.10 or user@host)"
+ echo ""
+ echo "Options:"
+ echo " --tier N Bunker Ops tier (0=standalone, 1=managed, 2=fleet)"
+ echo ""
+ echo "Examples:"
+ echo " $0 edmonton-prod betteredmonton.org 10.0.1.10"
+ echo " $0 calgary-staging staging.bettercalgary.org deploy@10.0.2.20 --tier 2"
+ exit 1
+fi
+
+HOST_DIR="${INVENTORY_DIR}/${HOSTNAME}"
+
+if [[ -d "$HOST_DIR" ]]; then
+ echo "ERROR: Host directory already exists: ${HOST_DIR}"
+ echo "Remove it first if you want to recreate."
+ exit 1
+fi
+
+# --- Determine Bunker Ops settings ---
+BUNKER_OPS_ENABLED="false"
+if [[ "$TIER" == "2" ]]; then
+ BUNKER_OPS_ENABLED="true"
+fi
+
+MONITORING_ENABLED="false"
+if [[ "$TIER" -ge 1 ]]; then
+ MONITORING_ENABLED="true"
+fi
+
+# --- Create main.yml ---
+mkdir -p "$HOST_DIR"
+
+cat > "${HOST_DIR}/main.yml" << EOF
+---
+# Instance: ${HOSTNAME}
+# Domain: ${DOMAIN}
+# Tier: ${TIER}
+
+cml_domain: ${DOMAIN}
+cml_node_env: production
+
+# Feature toggles
+cml_enable_media: "false"
+cml_listmonk_sync_enabled: "false"
+cml_gancio_sync_enabled: "false"
+cml_email_test_mode: "true"
+cml_monitoring_enabled: ${MONITORING_ENABLED}
+
+# SMTP (update for production email)
+cml_smtp_host: mailhog-changemaker
+cml_smtp_port: 1025
+cml_smtp_user: ""
+
+# Pangolin tunnel (configure after setup)
+cml_pangolin_api_url: "https://api.bnkserve.org/v1"
+cml_pangolin_org_id: ""
+
+# Bunker Ops
+bunker_ops_enabled: ${BUNKER_OPS_ENABLED}
+bunker_ops_instance_label: "${DOMAIN}"
+bunker_ops_remote_write_url: ""
+EOF
+
+echo "Created: ${HOST_DIR}/main.yml"
+
+# --- Generate vault ---
+"${SCRIPT_DIR}/bootstrap-vault.sh" "$HOSTNAME" --domain "$DOMAIN"
+
+# --- Append to hosts.yml reminder ---
+echo ""
+echo "Add to inventory/hosts.yml under changemaker_instances:"
+echo ""
+echo " ${HOSTNAME}:"
+echo " ansible_host: ${SSH_HOST}"
+echo " ansible_user: deploy"
+echo " cml_domain: ${DOMAIN}"
+echo ""
+echo "Then deploy:"
+echo " ansible-playbook playbooks/deploy.yml --limit ${HOSTNAME}"
diff --git a/bunker-ops/scripts/bootstrap-vault.sh b/bunker-ops/scripts/bootstrap-vault.sh
new file mode 100755
index 00000000..296de1c4
--- /dev/null
+++ b/bunker-ops/scripts/bootstrap-vault.sh
@@ -0,0 +1,153 @@
+#!/usr/bin/env bash
+# =============================================================================
+# Bunker Ops — Bootstrap Vault for a New Instance
+# Generates all 19+ secrets and creates an encrypted vault.yml
+#
+# Usage: ./scripts/bootstrap-vault.sh [--domain example.org]
+# =============================================================================
+set -euo pipefail
+
+SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
+PROJECT_DIR="$(dirname "$SCRIPT_DIR")"
+INVENTORY_DIR="${PROJECT_DIR}/inventory/host_vars"
+
+# --- Parse args ---
+HOSTNAME="${1:-}"
+DOMAIN=""
+
+shift || true
+while [[ $# -gt 0 ]]; do
+ case "$1" in
+ --domain) DOMAIN="$2"; shift 2 ;;
+ *) echo "Unknown option: $1"; exit 1 ;;
+ esac
+done
+
+if [[ -z "$HOSTNAME" ]]; then
+ echo "Usage: $0 [--domain example.org]"
+ echo ""
+ echo "Generates a vault.yml with random secrets for the given host."
+ echo "The file will be encrypted with ansible-vault."
+ exit 1
+fi
+
+HOST_DIR="${INVENTORY_DIR}/${HOSTNAME}"
+VAULT_FILE="${HOST_DIR}/vault.yml"
+
+# --- Helpers ---
+generate_secret() { openssl rand -hex 32; }
+generate_password() { openssl rand -base64 48 | tr -dc 'a-zA-Z0-9' | head -c "${1:-24}"; }
+generate_token() { openssl rand -hex 16; }
+
+# --- Create host_vars directory ---
+mkdir -p "$HOST_DIR"
+
+# --- Generate secrets ---
+echo "Generating secrets for: ${HOSTNAME}"
+echo ""
+
+V2_PG_PASS=$(generate_password 24)
+REDIS_PASS=$(generate_password 24)
+JWT_ACCESS=$(generate_secret)
+JWT_REFRESH=$(generate_secret)
+ENC_KEY=$(generate_secret)
+ADMIN_PASS=$(generate_password 20)
+LM_DB_PASS=$(generate_password 24)
+LM_WEB_PASS=$(generate_password 20)
+LM_API_TOKEN=$(generate_token)
+NC_PASS=$(generate_password 20)
+GITEA_DB=$(generate_password 20)
+GITEA_ROOT=$(generate_password 20)
+N8N_ENC=$(generate_password 32)
+N8N_PASS=$(generate_password 20)
+GRAFANA_PASS=$(generate_password 20)
+GOTIFY_PASS=$(generate_password 20)
+VW_TOKEN=$(generate_secret)
+RC_PASS=$(generate_password 20)
+GANCIO_PASS=$(generate_password 20)
+
+ADMIN_EMAIL="admin@${DOMAIN:-cmlite.org}"
+
+# --- Write vault file ---
+cat > "$VAULT_FILE" << EOF
+---
+# Bunker Ops — Secrets for ${HOSTNAME}
+# Generated: $(date -Iseconds)
+# Encrypt with: ansible-vault encrypt ${VAULT_FILE}
+
+# --- Database ---
+vault_cml_v2_postgres_password: "${V2_PG_PASS}"
+
+# --- Redis ---
+vault_cml_redis_password: "${REDIS_PASS}"
+
+# --- JWT & Encryption ---
+vault_cml_jwt_access_secret: "${JWT_ACCESS}"
+vault_cml_jwt_refresh_secret: "${JWT_REFRESH}"
+vault_cml_encryption_key: "${ENC_KEY}"
+
+# --- Admin ---
+vault_cml_initial_admin_email: "${ADMIN_EMAIL}"
+vault_cml_initial_admin_password: "${ADMIN_PASS}"
+
+# --- Listmonk ---
+vault_cml_listmonk_db_password: "${LM_DB_PASS}"
+vault_cml_listmonk_web_admin_password: "${LM_WEB_PASS}"
+vault_cml_listmonk_api_token: "${LM_API_TOKEN}"
+
+# --- NocoDB ---
+vault_cml_nocodb_admin_password: "${NC_PASS}"
+
+# --- Gitea ---
+vault_cml_gitea_db_passwd: "${GITEA_DB}"
+vault_cml_gitea_db_root_password: "${GITEA_ROOT}"
+
+# --- n8n ---
+vault_cml_n8n_encryption_key: "${N8N_ENC}"
+vault_cml_n8n_user_password: "${N8N_PASS}"
+
+# --- Monitoring ---
+vault_cml_grafana_admin_password: "${GRAFANA_PASS}"
+vault_cml_gotify_admin_password: "${GOTIFY_PASS}"
+
+# --- Vaultwarden ---
+vault_cml_vaultwarden_admin_token: "${VW_TOKEN}"
+
+# --- Rocket.Chat ---
+vault_cml_rocketchat_admin_password: "${RC_PASS}"
+
+# --- Gancio ---
+vault_cml_gancio_admin_password: "${GANCIO_PASS}"
+
+# --- SMTP (set manually) ---
+vault_cml_smtp_pass: ""
+
+# --- Pangolin (set after tunnel setup) ---
+vault_cml_pangolin_api_key: ""
+vault_cml_pangolin_newt_id: ""
+vault_cml_pangolin_newt_secret: ""
+vault_cml_pangolin_site_id: ""
+vault_cml_pangolin_endpoint: ""
+
+# --- Bunker Ops (set if Tier 2) ---
+vault_bunker_ops_remote_write_token: ""
+EOF
+
+echo "Created: ${VAULT_FILE}"
+echo ""
+
+# --- Encrypt if vault password file exists ---
+VAULT_PASS_FILE="${PROJECT_DIR}/.vault_pass"
+if [[ -f "$VAULT_PASS_FILE" ]]; then
+ ansible-vault encrypt "$VAULT_FILE" --vault-password-file "$VAULT_PASS_FILE"
+ echo "Encrypted with ansible-vault."
+else
+ echo "WARNING: No .vault_pass file found at ${VAULT_PASS_FILE}"
+ echo "Encrypt manually with: ansible-vault encrypt ${VAULT_FILE}"
+fi
+
+echo ""
+echo "Next steps:"
+echo " 1. Create ${HOST_DIR}/main.yml with instance config (cml_domain, features, etc.)"
+echo " 2. Add ${HOSTNAME} to inventory/hosts.yml"
+echo " 3. Deploy: ansible-playbook playbooks/deploy.yml --limit ${HOSTNAME}"
diff --git a/config.sh b/config.sh
index 1b06a516..97d0d01a 100755
--- a/config.sh
+++ b/config.sh
@@ -211,6 +211,8 @@ configure_domain() {
update_env_var "EXCALIDRAW_WS_URL" "wss://draw.$domain"
update_env_var "LISTMONK_SMTP_FROM" "Changemaker Lite "
update_env_var "HOMEPAGE_VAR_BASE_URL" "https://$domain"
+ update_env_var "VAULTWARDEN_DOMAIN" "https://vault.$domain"
+ update_env_var "GANCIO_BASE_URL" "https://events.$domain"
# Update mkdocs.yml
if [[ -f "$MKDOCS_YML" ]]; then
@@ -223,6 +225,16 @@ configure_domain() {
success "Domain set to: $domain"
+ echo ""
+ if prompt_yes_no "Is this a production deployment?"; then
+ update_env_var "NODE_ENV" "production"
+ success "NODE_ENV set to production"
+ IS_PRODUCTION="yes"
+ else
+ info "NODE_ENV stays as development"
+ IS_PRODUCTION="no"
+ fi
+
# Store for later use
CONFIGURED_DOMAIN="$domain"
}
@@ -263,7 +275,7 @@ configure_admin() {
generate_all_secrets() {
header "Generating Secrets"
- info "Auto-generating 16 unique secrets and passwords..."
+ info "Auto-generating 19 unique secrets and passwords..."
echo ""
# JWT & Encryption (64-char hex)
@@ -283,6 +295,7 @@ generate_all_secrets() {
redis_pass=$(generate_password 24)
update_env_var "V2_POSTGRES_PASSWORD" "$pg_pass"
+ update_env_var "DATABASE_URL" "postgresql://changemaker:${pg_pass}@localhost:5433/changemaker_v2"
update_env_var "REDIS_PASSWORD" "$redis_pass"
update_env_var "REDIS_URL" "redis://:${redis_pass}@redis-changemaker:6379"
success "PostgreSQL + Redis passwords"
@@ -329,8 +342,26 @@ generate_all_secrets() {
update_env_var "GOTIFY_ADMIN_PASSWORD" "$gotify_pass"
success "Grafana + Gotify admin passwords"
+ # Vaultwarden
+ local vw_admin_token
+ vw_admin_token=$(generate_secret)
+ update_env_var "VAULTWARDEN_ADMIN_TOKEN" "$vw_admin_token"
+ success "Vaultwarden admin token"
+
+ # Rocket.Chat
+ local rc_pass
+ rc_pass=$(generate_password 20)
+ update_env_var "ROCKETCHAT_ADMIN_PASSWORD" "$rc_pass"
+ success "Rocket.Chat admin password"
+
+ # Gancio
+ local gancio_pass
+ gancio_pass=$(generate_password 20)
+ update_env_var "GANCIO_ADMIN_PASSWORD" "$gancio_pass"
+ success "Gancio admin password"
+
echo ""
- success "All 16 secrets generated. No placeholder passwords remain."
+ success "All 19 secrets generated. No placeholder passwords remain."
}
configure_smtp() {
@@ -388,6 +419,30 @@ configure_features() {
else
LISTMONK_SYNC="no"
fi
+
+ if prompt_yes_no "Enable Payments (Stripe)?"; then
+ update_env_var "ENABLE_PAYMENTS" "true"
+ success "Payments enabled"
+ PAYMENTS_ENABLED="yes"
+ else
+ PAYMENTS_ENABLED="no"
+ fi
+
+ if prompt_yes_no "Enable Rocket.Chat (team chat)?"; then
+ update_env_var "ENABLE_CHAT" "true"
+ success "Rocket.Chat enabled"
+ CHAT_ENABLED="yes"
+ else
+ CHAT_ENABLED="no"
+ fi
+
+ if prompt_yes_no "Enable Gancio event sync (shift → event)?"; then
+ update_env_var "GANCIO_SYNC_ENABLED" "true"
+ success "Gancio sync enabled"
+ GANCIO_SYNC="yes"
+ else
+ GANCIO_SYNC="no"
+ fi
}
configure_pangolin() {
@@ -417,11 +472,10 @@ configure_pangolin() {
configure_cors() {
local domain="${CONFIGURED_DOMAIN:-cmlite.org}"
- if [[ "$domain" != "cmlite.org" ]]; then
- local origins="http://app.$domain,https://app.$domain,http://localhost:3000,http://localhost"
- update_env_var "CORS_ORIGINS" "$origins"
- success "CORS origins set for $domain"
- fi
+ # Include app subdomain + root domain (for MkDocs payment widgets) + localhost fallbacks
+ local origins="http://app.$domain,https://app.$domain,http://$domain,https://$domain,http://localhost:3000,http://localhost,http://localhost:4003"
+ update_env_var "CORS_ORIGINS" "$origins"
+ success "CORS origins set for $domain"
}
# =============================================================================
@@ -497,6 +551,18 @@ generate_services_yaml() {
description: Collaborative whiteboard
container: excalidraw-changemaker
+ - Vaultwarden:
+ icon: mdi-lock
+ href: "https://vault.$domain"
+ description: Password manager (Bitwarden-compatible)
+ container: vaultwarden-changemaker
+
+ - Gancio:
+ icon: mdi-calendar-multiple
+ href: "https://events.$domain"
+ description: Event management platform
+ container: gancio-changemaker
+
- Production - Integrations:
- Listmonk:
@@ -613,8 +679,8 @@ generate_services_yaml() {
- Main Site:
icon: mdi-web
- href: "http://localhost:4001"
- description: Documentation site (port 4001)
+ href: "http://localhost:4004"
+ description: Documentation site (port 4004)
container: mkdocs-site-server-changemaker
- Homepage:
@@ -655,6 +721,18 @@ generate_services_yaml() {
description: Collaborative whiteboard (port 8090)
container: excalidraw-changemaker
+ - Vaultwarden:
+ icon: mdi-lock
+ href: "http://localhost:8445"
+ description: Password manager (port 8445)
+ container: vaultwarden-changemaker
+
+ - Gancio:
+ icon: mdi-calendar-multiple
+ href: "http://localhost:8092"
+ description: Event management (port 8092)
+ container: gancio-changemaker
+
- Local - Integrations:
- Listmonk:
@@ -705,8 +783,8 @@ generate_services_yaml() {
- Grafana:
icon: mdi-chart-box
- href: "http://localhost:3001"
- description: Monitoring dashboards (port 3001)
+ href: "http://localhost:3005"
+ description: Monitoring dashboards (port 3005)
container: grafana-changemaker
- Prometheus:
@@ -729,8 +807,8 @@ generate_services_yaml() {
- cAdvisor:
icon: mdi-docker
- href: "http://localhost:8080"
- description: Container metrics (port 8080)
+ href: "http://localhost:8086"
+ description: Container metrics (port 8086)
container: cadvisor-changemaker
- Node Exporter:
@@ -806,9 +884,12 @@ print_summary() {
echo -e " ${BOLD}Admin password:${NC} [set]"
echo -e " ${BOLD}SMTP:${NC} ${SMTP_MODE:-mailhog}"
echo -e " ${BOLD}Media Manager:${NC} ${MEDIA_ENABLED:-no}"
+ echo -e " ${BOLD}Payments:${NC} ${PAYMENTS_ENABLED:-no}"
echo -e " ${BOLD}Listmonk sync:${NC} ${LISTMONK_SYNC:-no}"
+ echo -e " ${BOLD}Rocket.Chat:${NC} ${CHAT_ENABLED:-no}"
+ echo -e " ${BOLD}Gancio sync:${NC} ${GANCIO_SYNC:-no}"
echo -e " ${BOLD}Pangolin:${NC} ${PANGOLIN_CONFIGURED:-no}"
- echo -e " ${BOLD}Secrets:${NC} 16 auto-generated"
+ echo -e " ${BOLD}Secrets:${NC} 19 auto-generated"
echo ""
echo -e " ${DIM}Config file: $ENV_FILE${NC}"
}
diff --git a/configs/pangolin/resources.yml b/configs/pangolin/resources.yml
index bbb16b35..b21123f7 100644
--- a/configs/pangolin/resources.yml
+++ b/configs/pangolin/resources.yml
@@ -77,12 +77,30 @@ resources:
port: 80
required: false
+ - subdomain: vault
+ name: Vaultwarden
+ container: vaultwarden-changemaker
+ port: 80
+ required: false
+
- subdomain: mail
name: MailHog
container: mailhog-changemaker
port: 8025
required: false
+ - subdomain: chat
+ name: Rocket.Chat
+ container: rocketchat-changemaker
+ port: 3000
+ required: false
+
+ - subdomain: events
+ name: Gancio Events
+ container: gancio-changemaker
+ port: 13120
+ required: false
+
# Monitoring services (auto-detect profile)
- subdomain: grafana
name: Grafana
diff --git a/docker-compose.yml b/docker-compose.yml
index 4614d226..f72e93c9 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -65,6 +65,18 @@ services:
- EXCALIDRAW_EMBED_PORT=${EXCALIDRAW_EMBED_PORT:-8886}
- HOMEPAGE_URL=${HOMEPAGE_URL:-http://homepage-changemaker:3000}
- HOMEPAGE_EMBED_PORT=${HOMEPAGE_EMBED_PORT:-8887}
+ - VAULTWARDEN_URL=${VAULTWARDEN_URL:-http://vaultwarden-changemaker:80}
+ - VAULTWARDEN_EMBED_PORT=${VAULTWARDEN_EMBED_PORT:-8890}
+ - ROCKETCHAT_URL=${ROCKETCHAT_URL:-http://rocketchat-changemaker:3000}
+ - ROCKETCHAT_ADMIN_USER=${ROCKETCHAT_ADMIN_USER:-rcadmin}
+ - ROCKETCHAT_ADMIN_PASSWORD=${ROCKETCHAT_ADMIN_PASSWORD:-changeme}
+ - ROCKETCHAT_EMBED_PORT=${ROCKETCHAT_EMBED_PORT:-8891}
+ - ENABLE_CHAT=${ENABLE_CHAT:-false}
+ - GANCIO_URL=${GANCIO_URL:-http://gancio-changemaker:13120}
+ - GANCIO_EMBED_PORT=${GANCIO_EMBED_PORT:-8892}
+ - GANCIO_ADMIN_USER=${GANCIO_ADMIN_USER:-admin}
+ - GANCIO_ADMIN_PASSWORD=${GANCIO_ADMIN_PASSWORD:-}
+ - GANCIO_SYNC_ENABLED=${GANCIO_SYNC_ENABLED:-false}
volumes:
- ./api:/app
- /app/node_modules
@@ -165,6 +177,7 @@ services:
volumes:
- v2-postgres-data:/var/lib/postgresql/data
- ./api/prisma/init-nocodb-db.sh:/docker-entrypoint-initdb.d/init-nocodb-db.sh:ro
+ - ./api/prisma/init-gancio-db.sh:/docker-entrypoint-initdb.d/init-gancio-db.sh:ro
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${V2_POSTGRES_USER:-changemaker}"]
interval: 10s
@@ -189,6 +202,9 @@ services:
- "8885:8885" # Mini QR embed proxy
- "8886:8886" # Excalidraw embed proxy
- "8887:8887" # Homepage embed proxy
+ - "8890:8890" # Vaultwarden embed proxy
+ - "8891:8891" # Rocket.Chat embed proxy
+ - "8892:8892" # Gancio embed proxy
healthcheck:
test: ["CMD", "wget", "-q", "--spider", "http://127.0.0.1:80/"]
interval: 30s
@@ -305,7 +321,7 @@ services:
container_name: listmonk-db
restart: unless-stopped
ports:
- - "127.0.0.1:${LISTMONK_DB_PORT:-5432}:5432"
+ - "127.0.0.1:${LISTMONK_DB_PORT:-5434}:5432"
environment:
POSTGRES_USER: ${LISTMONK_DB_USER:-listmonk}
POSTGRES_PASSWORD: ${LISTMONK_DB_PASSWORD:-listmonk}
@@ -431,6 +447,8 @@ services:
- BASE_DOMAIN=${BASE_DOMAIN:-}
- API_URL=${API_URL:-}
- API_PORT=${API_PORT:-4000}
+ - GANCIO_URL=${GANCIO_URL:-http://gancio-changemaker:13120}
+ - GANCIO_PORT=${GANCIO_PORT:-8092}
command: serve --dev-addr=0.0.0.0:8000 --watch-theme --livereload
restart: unless-stopped
networks:
@@ -592,6 +610,196 @@ services:
networks:
- changemaker-lite
+ # Vaultwarden — Password manager (Bitwarden-compatible)
+ vaultwarden:
+ image: vaultwarden/server:latest
+ container_name: vaultwarden-changemaker
+ restart: unless-stopped
+ ports:
+ - "${VAULTWARDEN_PORT:-8445}:80"
+ healthcheck:
+ test: ["CMD", "curl", "-sf", "http://localhost:80/alive"]
+ interval: 30s
+ timeout: 5s
+ retries: 3
+ start_period: 15s
+ environment:
+ - ADMIN_TOKEN=${VAULTWARDEN_ADMIN_TOKEN:-}
+ - DOMAIN=${VAULTWARDEN_DOMAIN:-https://vault.cmlite.org}
+ - SIGNUPS_ALLOWED=${VAULTWARDEN_SIGNUPS_ALLOWED:-false}
+ - WEBSOCKET_ENABLED=${VAULTWARDEN_WEBSOCKET_ENABLED:-true}
+ - ROCKET_PORT=80
+ - LOG_LEVEL=info
+ - SMTP_HOST=${SMTP_HOST:-mailhog-changemaker}
+ - SMTP_PORT=${SMTP_PORT:-1025}
+ - SMTP_FROM=${SMTP_USER:-noreply@cmlite.org}
+ - SMTP_FROM_NAME=${SMTP_FROM_NAME:-Vaultwarden}
+ - SMTP_SECURITY=${VAULTWARDEN_SMTP_SECURITY:-off}
+ - SMTP_USERNAME=${SMTP_USER:-}
+ - SMTP_PASSWORD=${SMTP_PASS:-}
+ volumes:
+ - vaultwarden-data:/data
+ networks:
+ - changemaker-lite
+
+ # One-shot: invites the initial admin user into Vaultwarden after it starts.
+ # Uses the admin panel API to send an invitation email (lands in MailHog or real SMTP).
+ # Safe to re-run (Vaultwarden ignores duplicate invites for existing users). Exits 0 on success.
+ vaultwarden-init:
+ image: alpine/curl:latest
+ container_name: vaultwarden-init
+ depends_on:
+ vaultwarden:
+ condition: service_healthy
+ restart: "no"
+ environment:
+ VAULTWARDEN_URL: http://vaultwarden-changemaker:80
+ VAULTWARDEN_ADMIN_TOKEN: ${VAULTWARDEN_ADMIN_TOKEN:-}
+ INVITE_EMAIL: ${INITIAL_ADMIN_EMAIL:-admin@cmlite.org}
+ entrypoint: ["/bin/sh", "-c"]
+ command:
+ - |
+ echo "[vaultwarden-init] Waiting for Vaultwarden..."
+ for i in $(seq 1 20); do
+ if curl -sf http://vaultwarden-changemaker:80/alive >/dev/null 2>&1; then
+ break
+ fi
+ sleep 2
+ done
+
+ if [ -z "$$VAULTWARDEN_ADMIN_TOKEN" ]; then
+ echo "[vaultwarden-init] VAULTWARDEN_ADMIN_TOKEN not set, skipping invite"
+ exit 0
+ fi
+
+ echo "[vaultwarden-init] Authenticating with admin panel..."
+ SESSION_COOKIE=$(mktemp)
+ HTTP_CODE=$(curl -s -o /dev/null -w "%{http_code}" \
+ -c "$$SESSION_COOKIE" \
+ -X POST "$$VAULTWARDEN_URL/admin" \
+ -H "Content-Type: application/x-www-form-urlencoded" \
+ -d "token=$$VAULTWARDEN_ADMIN_TOKEN")
+
+ if [ "$$HTTP_CODE" != "200" ] && [ "$$HTTP_CODE" != "302" ]; then
+ echo "[vaultwarden-init] Admin auth failed (HTTP $$HTTP_CODE)"
+ rm -f "$$SESSION_COOKIE"
+ exit 1
+ fi
+ echo "[vaultwarden-init] Authenticated"
+
+ echo "[vaultwarden-init] Inviting $$INVITE_EMAIL..."
+ INVITE_CODE=$(curl -s -w "\n%{http_code}" \
+ -b "$$SESSION_COOKIE" \
+ -X POST "$$VAULTWARDEN_URL/admin/invite" \
+ -H "Content-Type: application/json" \
+ -d "{\"email\":\"$$INVITE_EMAIL\"}")
+
+ INVITE_HTTP=$(echo "$$INVITE_CODE" | tail -1)
+ INVITE_BODY=$(echo "$$INVITE_CODE" | head -n -1)
+
+ if [ "$$INVITE_HTTP" = "200" ] || [ "$$INVITE_HTTP" = "422" ]; then
+ echo "[vaultwarden-init] Invite sent (or user already exists)"
+ else
+ echo "[vaultwarden-init] Invite failed (HTTP $$INVITE_HTTP): $$INVITE_BODY"
+ fi
+
+ rm -f "$$SESSION_COOKIE"
+ echo "[vaultwarden-init] Done"
+ networks:
+ - changemaker-lite
+
+ # Rocket.Chat — Team coordination chat
+ rocketchat:
+ image: rocketchat/rocket.chat:7.9.7
+ container_name: rocketchat-changemaker
+ restart: unless-stopped
+ depends_on:
+ mongodb-rocketchat:
+ condition: service_healthy
+ nats-rocketchat:
+ condition: service_started
+ environment:
+ - ROOT_URL=http://chat.${DOMAIN:-cmlite.org}
+ - MONGO_URL=mongodb://mongodb-rocketchat:27017/rocketchat?replicaSet=rs0
+ - MONGO_OPLOG_URL=mongodb://mongodb-rocketchat:27017/local?replicaSet=rs0
+ - TRANSPORTER=monolith+nats://nats-rocketchat:4222
+ - PORT=3000
+ - ADMIN_USERNAME=${ROCKETCHAT_ADMIN_USER:-rcadmin}
+ - ADMIN_NAME=Admin
+ - ADMIN_EMAIL=${INITIAL_ADMIN_EMAIL:-admin@cmlite.org}
+ - ADMIN_PASS=${ROCKETCHAT_ADMIN_PASSWORD:-changeme}
+ - CREATE_TOKENS_FOR_USERS=true
+ - OVERWRITE_SETTING_Iframe_Integration_send_enable=true
+ - OVERWRITE_SETTING_Iframe_Integration_receive_enable=true
+ - OVERWRITE_SETTING_Iframe_Integration_receive_origin=http://app.${DOMAIN:-cmlite.org},https://app.${DOMAIN:-cmlite.org},http://localhost:${ADMIN_PORT:-3000}
+ volumes:
+ - rocketchat-uploads:/app/uploads
+ networks:
+ - changemaker-lite
+ healthcheck:
+ test: ["CMD", "wget", "-q", "--spider", "http://127.0.0.1:3000/api/info"]
+ interval: 30s
+ timeout: 10s
+ retries: 10
+ start_period: 90s
+
+ # NATS (required by Rocket.Chat 7.x+ for microservice messaging)
+ nats-rocketchat:
+ image: nats:2.11-alpine
+ container_name: nats-rocketchat
+ restart: unless-stopped
+ command: --http_port 8222
+ networks:
+ - changemaker-lite
+
+ # MongoDB (required by Rocket.Chat — replica set for oplog tailing)
+ mongodb-rocketchat:
+ image: mongo:6.0
+ container_name: mongodb-rocketchat
+ restart: unless-stopped
+ command: ["mongod", "--replSet", "rs0", "--bind_ip_all"]
+ volumes:
+ - mongodb-rocketchat-data:/data/db
+ networks:
+ - changemaker-lite
+ healthcheck:
+ test: ["CMD", "mongosh", "--quiet", "--eval", "try { rs.status().ok } catch(e) { rs.initiate({_id:'rs0',members:[{_id:0,host:'mongodb-rocketchat:27017'}]}).ok }"]
+ interval: 10s
+ timeout: 10s
+ retries: 10
+ start_period: 30s
+
+ # Gancio — Event management platform (uses shared PostgreSQL)
+ gancio:
+ image: cisti/gancio:latest
+ container_name: gancio-changemaker
+ restart: unless-stopped
+ depends_on:
+ v2-postgres:
+ condition: service_healthy
+ ports:
+ - "${GANCIO_PORT:-8092}:13120"
+ healthcheck:
+ test: ["CMD", "node", "-e", "require('http').get('http://localhost:13120/', r => process.exit(r.statusCode < 400 ? 0 : 1)).on('error', () => process.exit(1))"]
+ interval: 30s
+ timeout: 10s
+ retries: 5
+ start_period: 60s
+ environment:
+ - GANCIO_DATA=/home/node/data
+ - NODE_ENV=production
+ - GANCIO_DB_DIALECT=postgres
+ - GANCIO_DB_HOST=changemaker-v2-postgres
+ - GANCIO_DB_PORT=5432
+ - GANCIO_DB_DATABASE=gancio
+ - GANCIO_DB_USERNAME=${V2_POSTGRES_USER:-changemaker}
+ - GANCIO_DB_PASSWORD=${V2_POSTGRES_PASSWORD:-changemaker}
+ - server__baseurl=${GANCIO_BASE_URL:-https://events.cmlite.org}
+ volumes:
+ - gancio-data:/home/node/data
+ networks:
+ - changemaker-lite
+
# MailHog — Email testing (dev)
mailhog:
image: mailhog/mailhog:latest
@@ -715,10 +923,11 @@ services:
ports:
- "${REDIS_EXPORTER_PORT:-9121}:9121"
environment:
- - REDIS_ADDR=redis:6379
+ - REDIS_ADDR=redis://redis-changemaker:6379
+ - REDIS_PASSWORD=${REDIS_PASSWORD}
restart: always
depends_on:
- - redis
+ - redis-changemaker
networks:
- changemaker-lite
profiles:
@@ -777,6 +986,12 @@ volumes:
n8n-data:
gitea-data:
mysql-data:
+ vaultwarden-data:
+ # Rocket.Chat
+ rocketchat-uploads:
+ mongodb-rocketchat-data:
+ # Gancio
+ gancio-data:
# Monitoring
prometheus-data:
grafana-data:
diff --git a/mkdocs/.cache/plugin/social/assets/images/social/docs/phil.png b/mkdocs/.cache/plugin/social/assets/images/social/docs/phil.png
new file mode 100644
index 00000000..75ab827a
Binary files /dev/null and b/mkdocs/.cache/plugin/social/assets/images/social/docs/phil.png differ
diff --git a/mkdocs/.cache/plugin/social/manifest.json b/mkdocs/.cache/plugin/social/manifest.json
index b3666e97..6cbb1549 100644
--- a/mkdocs/.cache/plugin/social/manifest.json
+++ b/mkdocs/.cache/plugin/social/manifest.json
@@ -27,6 +27,7 @@
"assets/images/social/docs/getting-started/environment-variables.png": "a2ac6ca4cb56f9697fc7fd25f9cca6f1aa83a58b",
"assets/images/social/docs/getting-started/index.png": "c38af587f205180bab6232e517438de6db89fc88",
"assets/images/social/docs/index.png": "473afed8e6ed44768b1a64ad90c4a8595667a8f3",
+ "assets/images/social/docs/phil.png": "b39fd027ebf087814d7756b839d109eff9e819f7",
"assets/images/social/docs/services/index.png": "9fcf00324266a9f7b58c7b277da2127e8882aa47",
"assets/images/social/docs/troubleshooting/index.png": "b0ffadb8b01b261dfe7dea1b55fe02a3d639b7e7",
"assets/images/social/docs/volunteer/index.png": "a38ac1baf53fc19a77832926d14d56b92bbc5662",
diff --git a/mkdocs/docs/assets/js/env-config.js b/mkdocs/docs/assets/js/env-config.js
index 4a9228a9..c577e135 100644
--- a/mkdocs/docs/assets/js/env-config.js
+++ b/mkdocs/docs/assets/js/env-config.js
@@ -3,8 +3,9 @@
(function() {
window.MEDIA_API_URL = 'http://localhost:4100';
window.PUBLIC_URL = 'http://localhost:3002';
- window.PAYMENT_API_URL = 'http://localhost:4000';
+ window.PAYMENT_API_URL = 'http://localhost:4002';
window.APP_URL = 'http://localhost:3002';
+ window.GANCIO_URL = 'http://localhost:8092';
window.VIDEO_PLAYER_DEBUG = false;
console.log('[EnvConfig] Loaded from environment:', {
@@ -12,6 +13,7 @@
publicUrl: window.PUBLIC_URL,
paymentApiUrl: window.PAYMENT_API_URL,
appUrl: window.APP_URL,
+ gancioUrl: window.GANCIO_URL,
debug: window.VIDEO_PLAYER_DEBUG
});
})();
\ No newline at end of file
diff --git a/mkdocs/docs/assets/js/gancio-events.js b/mkdocs/docs/assets/js/gancio-events.js
new file mode 100644
index 00000000..349b7c6e
--- /dev/null
+++ b/mkdocs/docs/assets/js/gancio-events.js
@@ -0,0 +1,131 @@
+/**
+ * Gancio Events Block Hydration for MkDocs
+ *
+ * Scans for .gancio-events-block elements and replaces them with
+ * Gancio's official web component.
+ *
+ * Follows the video-player.js hydration pattern.
+ */
+(function () {
+ 'use strict';
+
+ var scriptLoaded = false;
+ var scriptLoading = false;
+
+ function getGancioUrl() {
+ // Check env-config.js injected global
+ if (window.GANCIO_URL) return window.GANCIO_URL;
+
+ // Fallback: derive from current hostname
+ var host = window.location.hostname;
+ if (host !== 'localhost' && host.indexOf('.') !== -1) {
+ var parts = host.split('.');
+ var base = parts.slice(-2).join('.');
+ return window.location.protocol + '//events.' + base;
+ }
+ return 'http://localhost:8092';
+ }
+
+ function loadGancioScript(gancioUrl, callback) {
+ if (scriptLoaded) {
+ callback();
+ return;
+ }
+ if (scriptLoading) {
+ // Wait for existing load
+ var interval = setInterval(function () {
+ if (scriptLoaded) {
+ clearInterval(interval);
+ callback();
+ }
+ }, 100);
+ return;
+ }
+
+ scriptLoading = true;
+ var script = document.createElement('script');
+ script.src = gancioUrl + '/gancio-events.es.js';
+ script.type = 'module';
+ script.onload = function () {
+ scriptLoaded = true;
+ scriptLoading = false;
+ console.log('[GancioEvents] Web component script loaded');
+ callback();
+ };
+ script.onerror = function () {
+ scriptLoading = false;
+ console.warn('[GancioEvents] Failed to load web component script from', script.src);
+ // Show fallback message
+ var blocks = document.querySelectorAll('.gancio-events-block');
+ blocks.forEach(function (block) {
+ if (!block.querySelector('gancio-events')) {
+ block.innerHTML =
+ '';
+ }
+ });
+ };
+ document.head.appendChild(script);
+ }
+
+ function hydrateBlocks() {
+ var blocks = document.querySelectorAll('.gancio-events-block');
+ if (blocks.length === 0) return;
+
+ var gancioUrl = getGancioUrl();
+ console.log('[GancioEvents] Found', blocks.length, 'block(s), Gancio URL:', gancioUrl);
+
+ loadGancioScript(gancioUrl, function () {
+ blocks.forEach(function (block) {
+ // Skip if already hydrated
+ if (block.querySelector('gancio-events')) return;
+
+ var maxlength = block.getAttribute('data-maxlength') || '10';
+ var theme = block.getAttribute('data-theme') || 'dark';
+ var tags = block.getAttribute('data-tags') || '';
+ var title = block.getAttribute('data-title') || 'Upcoming Events';
+
+ // Build the web component
+ var container = document.createElement('div');
+
+ // Add title if provided
+ if (title) {
+ var heading = document.createElement('h2');
+ heading.textContent = title;
+ heading.style.cssText = 'text-align:center; margin-bottom:24px; font-size:1.75rem;';
+ container.appendChild(heading);
+ }
+
+ var widget = document.createElement('gancio-events');
+ widget.setAttribute('baseurl', gancioUrl);
+ widget.setAttribute('maxlength', maxlength);
+ widget.setAttribute('theme', theme);
+ if (tags) {
+ widget.setAttribute('tags', tags);
+ }
+
+ container.appendChild(widget);
+
+ // Replace placeholder content
+ block.innerHTML = '';
+ block.appendChild(container);
+ });
+ });
+ }
+
+ // Initial hydration
+ if (document.readyState === 'loading') {
+ document.addEventListener('DOMContentLoaded', hydrateBlocks);
+ } else {
+ hydrateBlocks();
+ }
+
+ // Re-hydrate on MkDocs SPA navigation
+ if (typeof document$ !== 'undefined') {
+ document$.subscribe(function () {
+ setTimeout(hydrateBlocks, 100);
+ });
+ }
+})();
diff --git a/mkdocs/docs/assets/repo-data/admin-changemaker.lite.json b/mkdocs/docs/assets/repo-data/admin-changemaker.lite.json
index dcc79a74..9063760b 100644
--- a/mkdocs/docs/assets/repo-data/admin-changemaker.lite.json
+++ b/mkdocs/docs/assets/repo-data/admin-changemaker.lite.json
@@ -7,10 +7,10 @@
"stars_count": 0,
"forks_count": 0,
"open_issues_count": 23,
- "updated_at": "2026-02-17T15:49:30-07:00",
+ "updated_at": "2026-02-18T10:02:02-07:00",
"created_at": "2025-05-28T14:54:59-06:00",
"clone_url": "https://gitea.bnkops.com/admin/changemaker.lite.git",
"ssh_url": "git@gitea.bnkops.com:admin/changemaker.lite.git",
"default_branch": "main",
- "last_build_update": "2026-02-17T15:49:30-07:00"
+ "last_build_update": "2026-02-18T10:02:02-07:00"
}
\ No newline at end of file
diff --git a/mkdocs/docs/assets/repo-data/anthropics-claude-code.json b/mkdocs/docs/assets/repo-data/anthropics-claude-code.json
index d09a7d71..10c9bb37 100644
--- a/mkdocs/docs/assets/repo-data/anthropics-claude-code.json
+++ b/mkdocs/docs/assets/repo-data/anthropics-claude-code.json
@@ -4,10 +4,10 @@
"description": "Claude Code is an agentic coding tool that lives in your terminal, understands your codebase, and helps you code faster by executing routine tasks, explaining complex code, and handling git workflows - all through natural language commands.",
"html_url": "https://github.com/anthropics/claude-code",
"language": "Shell",
- "stars_count": 67411,
- "forks_count": 5260,
- "open_issues_count": 6231,
- "updated_at": "2026-02-18T04:18:30Z",
+ "stars_count": 67570,
+ "forks_count": 5278,
+ "open_issues_count": 6297,
+ "updated_at": "2026-02-18T18:41:22Z",
"created_at": "2025-02-22T17:41:21Z",
"clone_url": "https://github.com/anthropics/claude-code.git",
"ssh_url": "git@github.com:anthropics/claude-code.git",
diff --git a/mkdocs/docs/assets/repo-data/coder-code-server.json b/mkdocs/docs/assets/repo-data/coder-code-server.json
index 5ca93262..281dc233 100644
--- a/mkdocs/docs/assets/repo-data/coder-code-server.json
+++ b/mkdocs/docs/assets/repo-data/coder-code-server.json
@@ -4,10 +4,10 @@
"description": "VS Code in the browser",
"html_url": "https://github.com/coder/code-server",
"language": "TypeScript",
- "stars_count": 76281,
- "forks_count": 6512,
- "open_issues_count": 176,
- "updated_at": "2026-02-18T03:57:21Z",
+ "stars_count": 76286,
+ "forks_count": 6513,
+ "open_issues_count": 177,
+ "updated_at": "2026-02-18T17:58:26Z",
"created_at": "2019-02-27T16:50:41Z",
"clone_url": "https://github.com/coder/code-server.git",
"ssh_url": "git@github.com:coder/code-server.git",
diff --git a/mkdocs/docs/assets/repo-data/gethomepage-homepage.json b/mkdocs/docs/assets/repo-data/gethomepage-homepage.json
index ea6ce6f6..116b66fb 100644
--- a/mkdocs/docs/assets/repo-data/gethomepage-homepage.json
+++ b/mkdocs/docs/assets/repo-data/gethomepage-homepage.json
@@ -4,13 +4,13 @@
"description": "A highly customizable homepage (or startpage / application dashboard) with Docker and service API integrations.",
"html_url": "https://github.com/gethomepage/homepage",
"language": "JavaScript",
- "stars_count": 28454,
- "forks_count": 1790,
+ "stars_count": 28459,
+ "forks_count": 1789,
"open_issues_count": 1,
- "updated_at": "2026-02-18T03:37:12Z",
+ "updated_at": "2026-02-18T17:08:54Z",
"created_at": "2022-08-24T07:29:42Z",
"clone_url": "https://github.com/gethomepage/homepage.git",
"ssh_url": "git@github.com:gethomepage/homepage.git",
"default_branch": "dev",
- "last_build_update": "2026-02-18T00:48:06Z"
+ "last_build_update": "2026-02-18T12:23:05Z"
}
\ No newline at end of file
diff --git a/mkdocs/docs/assets/repo-data/go-gitea-gitea.json b/mkdocs/docs/assets/repo-data/go-gitea-gitea.json
index 4f7f8916..469475a5 100644
--- a/mkdocs/docs/assets/repo-data/go-gitea-gitea.json
+++ b/mkdocs/docs/assets/repo-data/go-gitea-gitea.json
@@ -4,13 +4,13 @@
"description": "Git with a cup of tea! Painless self-hosted all-in-one software development service, including Git hosting, code review, team collaboration, package registry and CI/CD",
"html_url": "https://github.com/go-gitea/gitea",
"language": "Go",
- "stars_count": 53756,
- "forks_count": 6393,
- "open_issues_count": 2831,
- "updated_at": "2026-02-18T04:13:31Z",
+ "stars_count": 53764,
+ "forks_count": 6394,
+ "open_issues_count": 2832,
+ "updated_at": "2026-02-18T18:54:21Z",
"created_at": "2016-11-01T02:13:26Z",
"clone_url": "https://github.com/go-gitea/gitea.git",
"ssh_url": "git@github.com:go-gitea/gitea.git",
"default_branch": "main",
- "last_build_update": "2026-02-18T04:14:21Z"
+ "last_build_update": "2026-02-18T07:31:28Z"
}
\ No newline at end of file
diff --git a/mkdocs/docs/assets/repo-data/knadh-listmonk.json b/mkdocs/docs/assets/repo-data/knadh-listmonk.json
index 18fb37c1..942c02c9 100644
--- a/mkdocs/docs/assets/repo-data/knadh-listmonk.json
+++ b/mkdocs/docs/assets/repo-data/knadh-listmonk.json
@@ -4,10 +4,10 @@
"description": "High performance, self-hosted, newsletter and mailing list manager with a modern dashboard. Single binary app.",
"html_url": "https://github.com/knadh/listmonk",
"language": "Go",
- "stars_count": 19075,
- "forks_count": 1926,
+ "stars_count": 19078,
+ "forks_count": 1928,
"open_issues_count": 113,
- "updated_at": "2026-02-17T21:39:10Z",
+ "updated_at": "2026-02-18T10:42:44Z",
"created_at": "2019-06-26T05:08:39Z",
"clone_url": "https://github.com/knadh/listmonk.git",
"ssh_url": "git@github.com:knadh/listmonk.git",
diff --git a/mkdocs/docs/assets/repo-data/n8n-io-n8n.json b/mkdocs/docs/assets/repo-data/n8n-io-n8n.json
index 9eb123cd..257cd9c4 100644
--- a/mkdocs/docs/assets/repo-data/n8n-io-n8n.json
+++ b/mkdocs/docs/assets/repo-data/n8n-io-n8n.json
@@ -4,13 +4,13 @@
"description": "Fair-code workflow automation platform with native AI capabilities. Combine visual building with custom code, self-host or cloud, 400+ integrations.",
"html_url": "https://github.com/n8n-io/n8n",
"language": "TypeScript",
- "stars_count": 174994,
- "forks_count": 54934,
- "open_issues_count": 1392,
- "updated_at": "2026-02-18T04:21:26Z",
+ "stars_count": 175133,
+ "forks_count": 54959,
+ "open_issues_count": 1379,
+ "updated_at": "2026-02-18T18:44:06Z",
"created_at": "2019-06-22T09:24:21Z",
"clone_url": "https://github.com/n8n-io/n8n.git",
"ssh_url": "git@github.com:n8n-io/n8n.git",
"default_branch": "master",
- "last_build_update": "2026-02-18T01:57:27Z"
+ "last_build_update": "2026-02-18T18:45:01Z"
}
\ No newline at end of file
diff --git a/mkdocs/docs/assets/repo-data/nocodb-nocodb.json b/mkdocs/docs/assets/repo-data/nocodb-nocodb.json
index bb22a8b5..a61d9422 100644
--- a/mkdocs/docs/assets/repo-data/nocodb-nocodb.json
+++ b/mkdocs/docs/assets/repo-data/nocodb-nocodb.json
@@ -4,13 +4,13 @@
"description": "\ud83d\udd25 \ud83d\udd25 \ud83d\udd25 A Free & Self-hostable Airtable Alternative",
"html_url": "https://github.com/nocodb/nocodb",
"language": "TypeScript",
- "stars_count": 62030,
- "forks_count": 4633,
- "open_issues_count": 592,
- "updated_at": "2026-02-18T02:31:38Z",
+ "stars_count": 62055,
+ "forks_count": 4637,
+ "open_issues_count": 595,
+ "updated_at": "2026-02-18T18:47:12Z",
"created_at": "2017-10-29T18:51:48Z",
"clone_url": "https://github.com/nocodb/nocodb.git",
"ssh_url": "git@github.com:nocodb/nocodb.git",
"default_branch": "develop",
- "last_build_update": "2026-02-18T02:34:42Z"
+ "last_build_update": "2026-02-18T17:46:29Z"
}
\ No newline at end of file
diff --git a/mkdocs/docs/assets/repo-data/ollama-ollama.json b/mkdocs/docs/assets/repo-data/ollama-ollama.json
index 3f1f0e8a..abc006b7 100644
--- a/mkdocs/docs/assets/repo-data/ollama-ollama.json
+++ b/mkdocs/docs/assets/repo-data/ollama-ollama.json
@@ -4,10 +4,10 @@
"description": "Get up and running with Kimi-K2.5, GLM-5, MiniMax, DeepSeek, gpt-oss, Qwen, Gemma and other models.",
"html_url": "https://github.com/ollama/ollama",
"language": "Go",
- "stars_count": 162798,
- "forks_count": 14601,
- "open_issues_count": 2426,
- "updated_at": "2026-02-18T03:55:46Z",
+ "stars_count": 162853,
+ "forks_count": 14611,
+ "open_issues_count": 2427,
+ "updated_at": "2026-02-18T18:55:27Z",
"created_at": "2023-06-26T19:39:32Z",
"clone_url": "https://github.com/ollama/ollama.git",
"ssh_url": "git@github.com:ollama/ollama.git",
diff --git a/mkdocs/docs/assets/repo-data/squidfunk-mkdocs-material.json b/mkdocs/docs/assets/repo-data/squidfunk-mkdocs-material.json
index 768ff060..e19aeb2a 100644
--- a/mkdocs/docs/assets/repo-data/squidfunk-mkdocs-material.json
+++ b/mkdocs/docs/assets/repo-data/squidfunk-mkdocs-material.json
@@ -4,13 +4,13 @@
"description": "Documentation that simply works",
"html_url": "https://github.com/squidfunk/mkdocs-material",
"language": "Python",
- "stars_count": 26068,
+ "stars_count": 26074,
"forks_count": 4044,
"open_issues_count": 2,
- "updated_at": "2026-02-18T01:56:58Z",
+ "updated_at": "2026-02-18T16:48:45Z",
"created_at": "2016-01-28T22:09:23Z",
"clone_url": "https://github.com/squidfunk/mkdocs-material.git",
"ssh_url": "git@github.com:squidfunk/mkdocs-material.git",
"default_branch": "master",
- "last_build_update": "2026-01-21T14:19:54Z"
+ "last_build_update": "2026-02-18T15:52:24Z"
}
\ No newline at end of file
diff --git a/mkdocs/docs/docs/deployment/index.md b/mkdocs/docs/docs/deployment/index.md
index 974b19be..91387b83 100644
--- a/mkdocs/docs/docs/deployment/index.md
+++ b/mkdocs/docs/docs/deployment/index.md
@@ -23,7 +23,7 @@ Nginx handles all subdomain routing internally. Every service is accessed throug
| `app.DOMAIN` | Admin GUI + public pages | 3000 |
| `api.DOMAIN` | Express API | 4000 |
| `media.DOMAIN` | Fastify Media API | 4100 |
-| `DOMAIN` (root) | MkDocs documentation site | 4001 |
+| `DOMAIN` (root) | MkDocs documentation site | 4004 |
| `db.DOMAIN` | NocoDB | 8091 |
| `docs.DOMAIN` | MkDocs live preview | 4003 |
| `code.DOMAIN` | Code Server | 8888 |
@@ -34,7 +34,10 @@ Nginx handles all subdomain routing internally. Every service is accessed throug
| `mail.DOMAIN` | MailHog (dev email) | 8025 |
| `qr.DOMAIN` | Mini QR generator | 8089 |
| `draw.DOMAIN` | Excalidraw whiteboard | 8090 |
-| `grafana.DOMAIN` | Monitoring dashboards | 3001 |
+| `vault.DOMAIN` | Vaultwarden password manager | 8445 |
+| `chat.DOMAIN` | Rocket.Chat team chat | — |
+| `events.DOMAIN` | Gancio event management | 8092 |
+| `grafana.DOMAIN` | Monitoring dashboards | 3005 |
---
@@ -478,9 +481,9 @@ This starts:
| Service | Port | Purpose |
|---------|------|---------|
| Prometheus | 9090 | Metrics collection and queries |
-| Grafana | 3001 | Dashboards and visualization |
+| Grafana | 3005 | Dashboards and visualization |
| Alertmanager | 9093 | Alert routing and notifications |
-| cAdvisor | 8080 | Container resource metrics |
+| cAdvisor | 8086 | Container resource metrics |
| Node Exporter | 9100 | Host system metrics |
| Redis Exporter | 9121 | Redis metrics |
| Gotify | 8889 | Push notifications |
diff --git a/mkdocs/docs/docs/features/index.md b/mkdocs/docs/docs/features/index.md
index e1d897a0..3b125ce6 100644
--- a/mkdocs/docs/docs/features/index.md
+++ b/mkdocs/docs/docs/features/index.md
@@ -159,3 +159,93 @@ Create reusable email templates with variable substitution for campaign communic
### Admin Routes
- `/app/email-templates` — create and manage email templates with a visual editor
+
+---
+
+## Payments (Stripe)
+
+Accept memberships, product sales, and donations through Stripe. Enable with `ENABLE_PAYMENTS=true`.
+
+### How It Works
+
+1. Enable payments in the `.env` file or admin Settings page
+2. Configure your Stripe API keys in **Admin → Settings → Payments** (keys are stored encrypted in the database)
+3. Payment widgets become available on landing pages and MkDocs pages
+
+### Key Features
+
+- **Memberships** — recurring payment support for campaign subscribers
+- **Products** — one-time purchases for campaign merchandise or events
+- **Donations** — accept and track contributions
+- **Encrypted storage** — Stripe API keys stored with AES encryption (`ENCRYPTION_KEY`)
+
+### Admin Routes
+
+- `/app/settings` — configure Stripe API keys in the Payments tab
+
+---
+
+## Events (Gancio)
+
+Integrated with [Gancio](https://gancio.org/) for self-hosted event management. When enabled, volunteer shifts are automatically published as public events.
+
+### Shift-to-Event Sync
+
+When `GANCIO_SYNC_ENABLED=true`, the platform:
+
+1. **Creates** a Gancio event whenever a new shift is published
+2. **Updates** the event if the shift time, location, or details change
+3. **Deletes** the event if the shift is cancelled
+
+Sync uses OAuth authentication with the Gancio admin account.
+
+### Key Features
+
+- **Automatic sync** — shifts appear as public events without manual entry
+- **Embeddable calendar** — GrapesJS block and MkDocs widget for embedding the event calendar on pages
+- **Public events page** — linked from the public navigation when `enableEvents` is enabled in settings
+
+### Admin Routes
+
+- `/app/gancio` — Gancio service status and iframe embed
+
+### Public Routes
+
+- `/events` — public events navigation link (when enabled)
+- `events.DOMAIN` — Gancio web interface for browsing and RSVPs
+
+---
+
+## Team Chat (Rocket.Chat)
+
+Self-hosted team chat for volunteer coordination and campaign communication. Enable with `ENABLE_CHAT=true`.
+
+### Key Features
+
+- **Channels & DMs** — organize conversations by topic, team, or campaign
+- **Iframe integration** — embedded directly in the admin dashboard and volunteer portal
+- **SSO-ready** — supports iframe authentication for seamless login from the admin panel
+- **File sharing** — share documents, images, and campaign materials
+- **Mobile apps** — native Rocket.Chat mobile apps work with your self-hosted instance
+
+### Admin Routes
+
+- `/app/chat` — embedded Rocket.Chat interface
+
+---
+
+## Password Manager (Vaultwarden)
+
+Self-hosted Bitwarden-compatible password vault for secure credential sharing across the campaign team.
+
+### Key Features
+
+- **Bitwarden client compatible** — use the official Bitwarden browser extensions, desktop apps, and mobile apps
+- **Secure sharing** — share login credentials, notes, and API keys between team members
+- **Auto-invite** — the initial admin user is automatically invited on first startup
+- **HTTPS required** — account creation requires HTTPS (provided by Pangolin tunnel)
+
+### Access
+
+- `vault.DOMAIN` — Vaultwarden web vault
+- Bitwarden clients — point them to `https://vault.DOMAIN`
diff --git a/mkdocs/docs/docs/getting-started/environment-variables.md b/mkdocs/docs/docs/getting-started/environment-variables.md
index 36656d7e..d3025027 100644
--- a/mkdocs/docs/docs/getting-started/environment-variables.md
+++ b/mkdocs/docs/docs/getting-started/environment-variables.md
@@ -139,6 +139,14 @@ Shared by rate limiting, BullMQ job queues, geocoding cache, and session data.
---
+## Payments (Stripe) :material-flask:
+
+| Variable | Default | Description |
+|----------|---------|-------------|
+| `ENABLE_PAYMENTS` | `false` | :material-flask: Set to `true` to enable the payments feature (memberships, products, donations). Stripe API keys are stored encrypted in the database via the admin settings page. |
+
+---
+
## Email / SMTP :material-tune-variant:
| Variable | Default | Description |
@@ -164,7 +172,7 @@ Listmonk handles newsletter/marketing campaigns. Sync with the main platform is
| Variable | Default | Description |
|----------|---------|-------------|
| `LISTMONK_PORT` | `9001` | Listmonk web UI port. |
-| `LISTMONK_DB_PORT` | `5432` | Listmonk's own PostgreSQL port (separate from the main DB). |
+| `LISTMONK_DB_PORT` | `5434` | Listmonk's own PostgreSQL port (separate from the main DB). Uses 5434 to avoid conflict with the main PostgreSQL (5432 internal / 5433 host). |
| `LISTMONK_DB_USER` | `listmonk` | Listmonk database user. |
| `LISTMONK_DB_PASSWORD` | — | :material-alert-circle:{ .text-red } Listmonk database password. |
| `LISTMONK_DB_NAME` | `listmonk` | Listmonk database name. |
@@ -224,6 +232,7 @@ Video library with upload, analytics, scheduling, and a public gallery.
| `MEDIA_ROOT` | `/media/library` | Path to the video library inside the container. |
| `MEDIA_UPLOADS` | `/media/uploads` | Path for upload processing. |
| `MAX_UPLOAD_SIZE_GB` | `10` | Maximum single-file upload size in gigabytes. |
+| `PUBLIC_MEDIA_PORT` | `3100` | Public media gallery server port. |
| `VIDEO_PLAYER_DEBUG` | `false` | Enable verbose video player logging. |
??? example "Analytics & scheduling settings"
@@ -275,7 +284,7 @@ Self-hosted Git repository. Optional service.
| Variable | Default | Description |
|----------|---------|-------------|
| `MKDOCS_PORT` | `4003` | MkDocs dev server port (live preview). |
-| `MKDOCS_SITE_SERVER_PORT` | `4001` | MkDocs static site server port. |
+| `MKDOCS_SITE_SERVER_PORT` | `4004` | MkDocs static site server port. |
| `BASE_DOMAIN` | `https://cmlite.org` | Base URL for generated documentation links. |
| `MKDOCS_PREVIEW_URL` | `http://mkdocs:8000` | Internal container URL. |
| `MKDOCS_DOCS_PATH` | `/mkdocs/docs` | Documentation source directory inside the container. |
@@ -323,6 +332,56 @@ Self-hosted Git repository. Optional service.
---
+## Vaultwarden (Password Manager) :material-tune-variant:
+
+Self-hosted Bitwarden-compatible password manager. Optional service.
+
+| Variable | Default | Description |
+|----------|---------|-------------|
+| `VAULTWARDEN_PORT` | `8445` | Vaultwarden web UI port. |
+| `VAULTWARDEN_URL` | `http://vaultwarden-changemaker:80` | Internal container URL. |
+| `VAULTWARDEN_EMBED_PORT` | `8890` | Port for iframe embedding in admin. |
+| `VAULTWARDEN_ADMIN_TOKEN` | *(empty)* | Admin panel token (access at `/admin`). Generate with `openssl rand -hex 32`. |
+| `VAULTWARDEN_DOMAIN` | `https://vault.cmlite.org` | Public-facing URL. **Must use HTTPS** — Bitwarden web vault enforces HTTPS for account creation. Set to your Pangolin tunnel URL. |
+| `VAULTWARDEN_SIGNUPS_ALLOWED` | `false` | Allow new user self-registration. Keep `false` and use admin panel invites. |
+| `VAULTWARDEN_WEBSOCKET_ENABLED` | `true` | Enable WebSocket notifications for real-time sync. |
+| `VAULTWARDEN_SMTP_SECURITY` | `off` | SMTP security mode: `off` for MailHog, `starttls` or `force_tls` for production. Uses the main `SMTP_*` variables for host/credentials. |
+
+!!! info "Initial setup"
+ The `vaultwarden-init` container automatically invites the `INITIAL_ADMIN_EMAIL` user when starting. Check MailHog (or your SMTP) for the invitation email.
+
+---
+
+## Rocket.Chat (Team Chat) :material-flask:
+
+Self-hosted team chat for volunteer coordination. Requires MongoDB (auto-configured).
+
+| Variable | Default | Description |
+|----------|---------|-------------|
+| `ENABLE_CHAT` | `false` | :material-flask: Set to `true` to enable the Rocket.Chat integration. The initial default; once saved in admin Settings, the DB value is authoritative. |
+| `ROCKETCHAT_ADMIN_USER` | `rcadmin` | Rocket.Chat admin username. |
+| `ROCKETCHAT_ADMIN_PASSWORD` | — | :material-alert-circle:{ .text-red } Rocket.Chat admin password. |
+| `ROCKETCHAT_URL` | `http://rocketchat-changemaker:3000` | Internal container URL. |
+| `ROCKETCHAT_EMBED_PORT` | `8891` | Port for iframe embedding in admin. |
+
+---
+
+## Gancio (Event Management) :material-flask:
+
+Self-hosted event management platform. Uses the shared PostgreSQL database (auto-created by `init-gancio-db.sh`).
+
+| Variable | Default | Description |
+|----------|---------|-------------|
+| `GANCIO_PORT` | `8092` | Gancio web UI port. |
+| `GANCIO_URL` | `http://gancio-changemaker:13120` | Internal container URL. |
+| `GANCIO_EMBED_PORT` | `8892` | Port for iframe embedding in admin. |
+| `GANCIO_BASE_URL` | `https://events.cmlite.org` | Public-facing URL for Gancio. Used in event links. |
+| `GANCIO_ADMIN_USER` | `admin` | Gancio admin username for shift-to-event sync (OAuth login). |
+| `GANCIO_ADMIN_PASSWORD` | — | :material-alert-circle:{ .text-red } Gancio admin password. |
+| `GANCIO_SYNC_ENABLED` | `false` | :material-flask: Set to `true` to enable automatic shift → Gancio event synchronization. |
+
+---
+
## MailHog (Development Email)
| Variable | Default | Description |
@@ -405,10 +464,10 @@ docker compose --profile monitoring up -d
| Variable | Default | Description |
|----------|---------|-------------|
| `PROMETHEUS_PORT` | `9090` | Prometheus web UI / query port. |
-| `GRAFANA_PORT` | `3001` | Grafana dashboard port. |
+| `GRAFANA_PORT` | `3005` | Grafana dashboard port. |
| `GRAFANA_ADMIN_PASSWORD` | `admin` | :material-tune-variant: Change in production. |
-| `GRAFANA_ROOT_URL` | `http://localhost:3001` | Public URL for Grafana (used in links). |
-| `CADVISOR_PORT` | `8080` | cAdvisor container metrics port. |
+| `GRAFANA_ROOT_URL` | `http://localhost:3005` | Public URL for Grafana (used in links). |
+| `CADVISOR_PORT` | `8086` | cAdvisor container metrics port. |
| `NODE_EXPORTER_PORT` | `9100` | Prometheus node exporter port. |
| `REDIS_EXPORTER_PORT` | `9121` | Redis metrics exporter port. |
| `ALERTMANAGER_PORT` | `9093` | Alertmanager web UI port. |
@@ -448,6 +507,15 @@ echo "N8N_ENCRYPTION_KEY=$(openssl rand -hex 32)"
echo "N8N_USER_PASSWORD=$(openssl rand -hex 16)"
echo "NC_ADMIN_PASSWORD=$(openssl rand -hex 16)"
echo "INITIAL_ADMIN_PASSWORD=$(openssl rand -base64 18)"
+
+# Vaultwarden
+echo "VAULTWARDEN_ADMIN_TOKEN=$(openssl rand -hex 32)"
+
+# Rocket.Chat
+echo "ROCKETCHAT_ADMIN_PASSWORD=$(openssl rand -hex 16)"
+
+# Gancio
+echo "GANCIO_ADMIN_PASSWORD=$(openssl rand -hex 16)"
```
!!! tip
@@ -481,7 +549,10 @@ echo "INITIAL_ADMIN_PASSWORD=$(openssl rand -base64 18)"
```bash title="Additional variables needed"
# Everything above, plus:
ENABLE_MEDIA_FEATURES=true
+ ENABLE_PAYMENTS=true
+ ENABLE_CHAT=true
LISTMONK_SYNC_ENABLED=true
+ GANCIO_SYNC_ENABLED=true
LISTMONK_DB_PASSWORD=...
LISTMONK_WEB_ADMIN_PASSWORD=...
LISTMONK_API_TOKEN=...
@@ -490,6 +561,9 @@ echo "INITIAL_ADMIN_PASSWORD=$(openssl rand -base64 18)"
GITEA_DB_ROOT_PASSWORD=...
N8N_ENCRYPTION_KEY=...
N8N_USER_PASSWORD=...
+ VAULTWARDEN_ADMIN_TOKEN=...
+ ROCKETCHAT_ADMIN_PASSWORD=...
+ GANCIO_ADMIN_PASSWORD=...
EMAIL_TEST_MODE=false
SMTP_HOST=smtp.your-provider.com
SMTP_PORT=587
diff --git a/mkdocs/docs/docs/phil.md b/mkdocs/docs/docs/phil.md
new file mode 100644
index 00000000..0081e76f
--- /dev/null
+++ b/mkdocs/docs/docs/phil.md
@@ -0,0 +1,2 @@
+# phil
+
diff --git a/mkdocs/docs/docs/services/index.md b/mkdocs/docs/docs/services/index.md
index e3bcbff7..3a805918 100644
--- a/mkdocs/docs/docs/services/index.md
+++ b/mkdocs/docs/docs/services/index.md
@@ -118,7 +118,7 @@ The essential services that power the application.
Material-themed documentation site with full-text search, blog, social cards, and Jinja2 template overrides. Two containers: live preview (dev) and static site (production).
- **Port:** `4003` (dev) / `4001` (static) · **Container:** `mkdocs-changemaker` · **Subdomain:** `docs.DOMAIN`
+ **Port:** `4003` (dev) / `4004` (static) · **Container:** `mkdocs-changemaker` · **Subdomain:** `docs.DOMAIN`
[:octicons-arrow-right-24: MkDocs Material](https://squidfunk.github.io/mkdocs-material/){ target="_blank" }
@@ -206,6 +206,44 @@ The essential services that power the application.
[:octicons-arrow-right-24: Excalidraw](https://excalidraw.com/){ target="_blank" }
+- :material-shield-lock:{ .lg .middle } **Vaultwarden**
+
+ ---
+
+ Self-hosted Bitwarden-compatible password manager. Secure credential sharing for campaign teams. Requires HTTPS for account creation; local browsing works on HTTP.
+
+ **Port:** `8445` · **Container:** `vaultwarden-changemaker` · **Subdomain:** `vault.DOMAIN`
+
+ [:octicons-arrow-right-24: Vaultwarden Wiki](https://github.com/dani-garcia/vaultwarden/wiki){ target="_blank" }
+
+
+
+---
+
+## Team Communication
+
+
+
+- :material-chat:{ .lg .middle } **Rocket.Chat**
+
+ ---
+
+ Self-hosted team chat for volunteer coordination. Supports channels, direct messaging, threads, and file sharing. Embeddable in the admin dashboard via iframe. Enable with `ENABLE_CHAT=true`.
+
+ **Port:** `3000` (internal) · **Container:** `rocketchat-changemaker` · **Subdomain:** `chat.DOMAIN`
+
+ [:octicons-arrow-right-24: Rocket.Chat Docs](https://docs.rocket.chat/){ target="_blank" }
+
+- :material-calendar-multiple:{ .lg .middle } **Gancio**
+
+ ---
+
+ Self-hosted event management platform. Automatic shift-to-event sync (when `GANCIO_SYNC_ENABLED=true`) publishes shifts as public events. Uses the shared PostgreSQL database. Embeddable calendar widget available for MkDocs pages.
+
+ **Port:** `8092` · **Container:** `gancio-changemaker` · **Subdomain:** `events.DOMAIN`
+
+ [:octicons-arrow-right-24: Gancio Docs](https://gancio.org/){ target="_blank" }
+
---
@@ -254,7 +292,7 @@ docker compose --profile monitoring up -d
Metrics visualization with 3 auto-provisioned dashboards: API Overview, Infrastructure, and Campaign Activity. Supports custom dashboards and alerting.
- **Port:** `3001` · **Container:** `grafana-changemaker` · **Subdomain:** `grafana.DOMAIN`
+ **Port:** `3005` · **Container:** `grafana-changemaker` · **Subdomain:** `grafana.DOMAIN`
[:octicons-arrow-right-24: Grafana Docs](https://grafana.com/docs/grafana/latest/){ target="_blank" }
@@ -274,7 +312,7 @@ docker compose --profile monitoring up -d
Container resource metrics. Exposes CPU, memory, network, and filesystem usage per container for Prometheus to scrape.
- **Port:** `8080` · **Container:** `cadvisor-changemaker`
+ **Port:** `8086` · **Container:** `cadvisor-changemaker`
[:octicons-arrow-right-24: cAdvisor GitHub](https://github.com/google/cadvisor){ target="_blank" }
@@ -327,7 +365,7 @@ All services at a glance with their default ports and subdomains.
| Listmonk | 9001 | `listmonk.` | default |
| MailHog | 8025 | `mail.` | default |
| MkDocs (dev) | 4003 | `docs.` | default |
-| MkDocs (static) | 4001 | *(root)* | default |
+| MkDocs (static) | 4004 | *(root)* | default |
| Code Server | 8888 | `code.` | default |
| NocoDB | 8091 | `db.` | default |
| n8n | 5678 | `n8n.` | default |
@@ -335,11 +373,14 @@ All services at a glance with their default ports and subdomains.
| Mini QR | 8089 | `qr.` | default |
| Homepage | 3010 | `home.` | default |
| Excalidraw | 8090 | `draw.` | default |
+| Vaultwarden | 8445 | `vault.` | default |
+| Rocket.Chat | — | `chat.` | default |
+| Gancio | 8092 | `events.` | default |
| Newt (tunnel) | — | — | default |
| Prometheus | 9090 | — | `monitoring` |
-| Grafana | 3001 | `grafana.` | `monitoring` |
+| Grafana | 3005 | `grafana.` | `monitoring` |
| Alertmanager | 9093 | — | `monitoring` |
-| cAdvisor | 8080 | — | `monitoring` |
+| cAdvisor | 8086 | — | `monitoring` |
| Node Exporter | 9100 | — | `monitoring` |
| Redis Exporter | 9121 | — | `monitoring` |
| Gotify | 8889 | — | `monitoring` |
diff --git a/mkdocs/docs/hooks/__pycache__/env_config_hook.cpython-311.pyc b/mkdocs/docs/hooks/__pycache__/env_config_hook.cpython-311.pyc
index c9fee608..13e9fdb5 100644
Binary files a/mkdocs/docs/hooks/__pycache__/env_config_hook.cpython-311.pyc and b/mkdocs/docs/hooks/__pycache__/env_config_hook.cpython-311.pyc differ
diff --git a/mkdocs/docs/hooks/env_config_hook.py b/mkdocs/docs/hooks/env_config_hook.py
index c38bdfe9..66296be6 100644
--- a/mkdocs/docs/hooks/env_config_hook.py
+++ b/mkdocs/docs/hooks/env_config_hook.py
@@ -28,6 +28,8 @@ def on_config(config: Dict[str, Any]) -> Dict[str, Any]:
admin_port = os.environ.get('ADMIN_PORT', '3000')
admin_url = os.environ.get('ADMIN_URL', '')
base_domain = os.environ.get('BASE_DOMAIN', '')
+ gancio_url = os.environ.get('GANCIO_URL', 'http://localhost:8092')
+ gancio_port = os.environ.get('GANCIO_PORT', '8092')
if base_domain and not base_domain.startswith('http'):
base_domain = f'https://{base_domain}'
@@ -43,6 +45,10 @@ def on_config(config: Dict[str, Any]) -> Dict[str, Any]:
if is_docker_hostname(media_api_url):
media_api_url = f'http://localhost:{media_api_port}'
+ # Resolve Gancio URL — must be browser-accessible
+ if is_docker_hostname(gancio_url):
+ gancio_url = f'http://localhost:{gancio_port}'
+
# Resolve payment API URL — must be browser-accessible
if api_url and not is_docker_hostname(api_url):
payment_api_url = api_url
@@ -69,6 +75,7 @@ def on_config(config: Dict[str, Any]) -> Dict[str, Any]:
window.PUBLIC_URL = '{public_url}';
window.PAYMENT_API_URL = '{payment_api_url}';
window.APP_URL = '{app_url}';
+ window.GANCIO_URL = '{gancio_url}';
window.VIDEO_PLAYER_DEBUG = {str(os.environ.get('VIDEO_PLAYER_DEBUG', 'false')).lower()};
console.log('[EnvConfig] Loaded from environment:', {{
@@ -76,6 +83,7 @@ def on_config(config: Dict[str, Any]) -> Dict[str, Any]:
publicUrl: window.PUBLIC_URL,
paymentApiUrl: window.PAYMENT_API_URL,
appUrl: window.APP_URL,
+ gancioUrl: window.GANCIO_URL,
debug: window.VIDEO_PLAYER_DEBUG
}});
}})();
@@ -109,6 +117,7 @@ def on_config(config: Dict[str, Any]) -> Dict[str, Any]:
logger.info(f" PUBLIC_URL: {public_url}")
logger.info(f" PAYMENT_API_URL: {payment_api_url}")
logger.info(f" APP_URL: {app_url}")
+ logger.info(f" GANCIO_URL: {gancio_url}")
else:
logger.info(f"✓ Env config unchanged, skipping write")
diff --git a/mkdocs/docs/overrides/lander.html b/mkdocs/docs/overrides/lander.html
index 1005672d..b72295ca 100644
--- a/mkdocs/docs/overrides/lander.html
+++ b/mkdocs/docs/overrides/lander.html
@@ -39,6 +39,7 @@
--branch-data: #22D3EE;
--branch-devops: #FBBF24;
--branch-sovereignty: #F87171;
+ --branch-fundraising: #EC4899;
/* Surfaces — dark */
--bg-deep: #0F172A;
@@ -887,6 +888,7 @@
.branch-icon.data { background: rgba(34, 211, 238, 0.15); border: 1px solid rgba(34, 211, 238, 0.3); }
.branch-icon.devops { background: rgba(251, 191, 36, 0.15); border: 1px solid rgba(251, 191, 36, 0.3); }
.branch-icon.sovereignty { background: rgba(248, 113, 113, 0.15); border: 1px solid rgba(248, 113, 113, 0.3); }
+ .branch-icon.fundraising { background: rgba(236, 72, 153, 0.15); border: 1px solid rgba(236, 72, 153, 0.3); }
.branch-title h3 {
font-size: 1.35rem;
@@ -935,6 +937,7 @@
.branch-data .feature-node { border-top: 2px solid var(--branch-data); }
.branch-devops .feature-node { border-top: 2px solid var(--branch-devops); }
.branch-sovereignty .feature-node { border-top: 2px solid var(--branch-sovereignty); }
+ .branch-fundraising .feature-node { border-top: 2px solid var(--branch-fundraising); }
.branch-comm .feature-node:hover { border-color: var(--branch-comm); box-shadow: 0 0 20px rgba(192,132,252,0.2); }
.branch-map .feature-node:hover { border-color: var(--branch-map); box-shadow: 0 0 20px rgba(52,211,153,0.2); }
@@ -942,6 +945,7 @@
.branch-data .feature-node:hover { border-color: var(--branch-data); box-shadow: 0 0 20px rgba(34,211,238,0.2); }
.branch-devops .feature-node:hover { border-color: var(--branch-devops); box-shadow: 0 0 20px rgba(251,191,36,0.2); }
.branch-sovereignty .feature-node:hover { border-color: var(--branch-sovereignty); box-shadow: 0 0 20px rgba(248,113,113,0.2); }
+ .branch-fundraising .feature-node:hover { border-color: var(--branch-fundraising); box-shadow: 0 0 20px rgba(236,72,153,0.2); }
/* Branch-specific node icon tinting */
.branch-comm .node-icon { background: rgba(192,132,252,0.12); }
@@ -950,6 +954,7 @@
.branch-data .node-icon { background: rgba(34,211,238,0.12); }
.branch-devops .node-icon { background: rgba(251,191,36,0.12); }
.branch-sovereignty .node-icon { background: rgba(248,113,113,0.12); }
+ .branch-fundraising .node-icon { background: rgba(236,72,153,0.12); }
.node-header {
display: flex;
@@ -1555,7 +1560,7 @@
-
+
@@ -1563,7 +1568,7 @@
-
+
@@ -1661,13 +1666,13 @@
Self-Hosted Campaign Infrastructure
Grow Power.
Don't Rent It.
- Complete political independence on your own infrastructure. Own every byte of data, control every system.
+ A deeply integrated and opinionated collection of free and open source services and production ready applications for growing political movements. Campaigns, canvassing, fundraising, team chat, and media — all on your own infrastructure.
No corporate surveillance. No foreign interference. No monthly ransoms. Free and open source.
@@ -1698,7 +1703,7 @@
Self-Hosted
-
26+
+
30+
Integrated Tools
@@ -1760,7 +1765,7 @@
@@ -1805,6 +1810,22 @@
Public response collection with moderation, upvoting, and verification. Showcase supporter voices on your campaigns.
ModerationUpvotingVerification
+
+
+
Self-hosted team chat with SSO integration. Automatic channel notifications for shift signups, canvass sessions, and campaign responses.
+
SSOChannelsSlack alternative
+
+
+
+
Async notification queue for admin alerts and volunteer feedback. Shift reminders, session summaries, and signup confirmations.
+
BullMQRemindersSummaries
+
@@ -1911,6 +1932,14 @@
Full VS Code in the browser. Edit configuration, templates, and code from anywhere without SSH.
VS CodeBrowser IDEExtensions
+
+
+
Collaborative diagramming and whiteboard tool. Plan canvassing routes, sketch campaign strategies, and brainstorm as a team.
+
CollaborativeDiagramsReal-time
+
@@ -2001,10 +2030,71 @@
PostgreSQL dumps, Listmonk data, uploads archive, and optional S3 upload. One-command backup script.
PostgreSQLS3 optionalScripted
+
+
+
Self-hosted Bitwarden-compatible password manager. Secure credential sharing for your team with real-time sync and browser extensions.
+
BitwardenTeam sharingEncrypted
+
-
+
+
+
+
+
+
+
Accept one-time donations with configurable suggested amounts, anonymous giving, and automatic tax receipts via email.
+
StripeAnonymousReceipts
+
+
+
+
Recurring revenue with tiered plans, monthly and yearly billing, and automatic renewal management. Replace Patreon.
+
RecurringTiersMRR tracking
+
+
+
+
Sell digital products, event tickets, and merchandise. Inventory management, download delivery, and capacity limits.
+
Digital goodsEventsInventory
+
+
+
+
Revenue analytics with subscriber counts, MRR tracking, donation history, and CSV exports for accounting.
+
AnalyticsCSV exportRefunds
+
+
+
+
Promote donations, products, and subscriptions within the media gallery. Visibility targeting, scheduling, and click analytics.
+
TargetingSchedulingCTR tracking
+
+
+
+
+