hzgjuigik
This commit is contained in:
2026-01-27 21:06:48 +01:00
parent 18c11d27bc
commit 6da8ce1cbd
51 changed files with 6208 additions and 974 deletions

View File

@@ -200,6 +200,58 @@ async function setupCollections() {
db.createStringAttribute(DB_ID, 'user_preferences', 'userId', 64, true));
await ensureAttribute('user_preferences', 'preferencesJson', () =>
db.createStringAttribute(DB_ID, 'user_preferences', 'preferencesJson', 16384, false));
// ==================== Onboarding State ====================
await ensureCollection('onboarding_state', 'Onboarding State', PERM_AUTHENTICATED);
await ensureAttribute('onboarding_state', 'userId', () =>
db.createStringAttribute(DB_ID, 'onboarding_state', 'userId', 64, true));
await ensureAttribute('onboarding_state', 'onboarding_step', () =>
db.createStringAttribute(DB_ID, 'onboarding_state', 'onboarding_step', 32, true));
await ensureAttribute('onboarding_state', 'completed_steps_json', () =>
db.createStringAttribute(DB_ID, 'onboarding_state', 'completed_steps_json', 1024, false));
await ensureAttribute('onboarding_state', 'first_value_seen_at', () =>
db.createDatetimeAttribute(DB_ID, 'onboarding_state', 'first_value_seen_at', false));
await ensureAttribute('onboarding_state', 'skipped_at', () =>
db.createDatetimeAttribute(DB_ID, 'onboarding_state', 'skipped_at', false));
await ensureAttribute('onboarding_state', 'last_updated', () =>
db.createDatetimeAttribute(DB_ID, 'onboarding_state', 'last_updated', false));
// ==================== Email Usage ====================
await ensureCollection('email_usage', 'Email Usage', PERM_AUTHENTICATED);
await ensureAttribute('email_usage', 'userId', () =>
db.createStringAttribute(DB_ID, 'email_usage', 'userId', 64, true));
await ensureAttribute('email_usage', 'month', () =>
db.createStringAttribute(DB_ID, 'email_usage', 'month', 16, true)); // "2026-01"
await ensureAttribute('email_usage', 'emailsProcessed', () =>
db.createIntegerAttribute(DB_ID, 'email_usage', 'emailsProcessed', true, 0));
await ensureAttribute('email_usage', 'lastReset', () =>
db.createDatetimeAttribute(DB_ID, 'email_usage', 'lastReset', false));
// ==================== Referrals ====================
await ensureCollection('referrals', 'Referrals', PERM_AUTHENTICATED);
await ensureAttribute('referrals', 'userId', () =>
db.createStringAttribute(DB_ID, 'referrals', 'userId', 64, true));
await ensureAttribute('referrals', 'referralCode', () =>
db.createStringAttribute(DB_ID, 'referrals', 'referralCode', 32, true));
await ensureAttribute('referrals', 'referredBy', () =>
db.createStringAttribute(DB_ID, 'referrals', 'referredBy', 64, false));
await ensureAttribute('referrals', 'referralCount', () =>
db.createIntegerAttribute(DB_ID, 'referrals', 'referralCount', true, 0));
await ensureAttribute('referrals', 'createdAt', () =>
db.createDatetimeAttribute(DB_ID, 'referrals', 'createdAt', false));
// ==================== Analytics Events ====================
await ensureCollection('analytics_events', 'Analytics Events', PERM_PUBLIC_READ);
await ensureAttribute('analytics_events', 'userId', () =>
db.createStringAttribute(DB_ID, 'analytics_events', 'userId', 64, false));
await ensureAttribute('analytics_events', 'eventType', () =>
db.createStringAttribute(DB_ID, 'analytics_events', 'eventType', 64, true));
await ensureAttribute('analytics_events', 'metadataJson', () =>
db.createStringAttribute(DB_ID, 'analytics_events', 'metadataJson', 4096, false));
await ensureAttribute('analytics_events', 'timestamp', () =>
db.createDatetimeAttribute(DB_ID, 'analytics_events', 'timestamp', false));
await ensureAttribute('analytics_events', 'sessionId', () =>
db.createStringAttribute(DB_ID, 'analytics_events', 'sessionId', 64, false));
}
async function main() {

View File

@@ -68,6 +68,13 @@ export const config = {
origin: process.env.CORS_ORIGIN || process.env.FRONTEND_URL || 'http://localhost:5173',
credentials: true,
},
// Free Tier Limits
freeTier: {
emailsPerMonth: parseInt(process.env.FREE_TIER_EMAILS_PER_MONTH || '500', 10),
emailAccounts: 1,
autoSchedule: false, // manual only
},
}
/**

View File

@@ -121,6 +121,7 @@ app.get('/api/preferences/ai-control', asyncHandler(async (req, res) => {
enabledCategories: preferences.enabledCategories || [],
categoryActions: preferences.categoryActions || {},
autoDetectCompanies: preferences.autoDetectCompanies !== undefined ? preferences.autoDetectCompanies : true,
cleanup: preferences.cleanup || userPreferences.getDefaults().cleanup,
})
}))
@@ -129,13 +130,14 @@ app.get('/api/preferences/ai-control', asyncHandler(async (req, res) => {
* Save AI Control settings
*/
app.post('/api/preferences/ai-control', asyncHandler(async (req, res) => {
const { userId, enabledCategories, categoryActions, autoDetectCompanies } = req.body
const { userId, enabledCategories, categoryActions, autoDetectCompanies, cleanup } = req.body
if (!userId) throw new ValidationError('userId is required')
const updates = {}
if (enabledCategories !== undefined) updates.enabledCategories = enabledCategories
if (categoryActions !== undefined) updates.categoryActions = categoryActions
if (autoDetectCompanies !== undefined) updates.autoDetectCompanies = autoDetectCompanies
if (cleanup !== undefined) updates.cleanup = cleanup
await userPreferences.upsert(userId, updates)
respond.success(res, null, 'AI Control settings saved')

View File

@@ -4,11 +4,57 @@
*/
import express from 'express'
import { asyncHandler } from '../middleware/errorHandler.mjs'
import { asyncHandler, ValidationError } from '../middleware/errorHandler.mjs'
import { respond } from '../utils/response.mjs'
import { db, Collections } from '../services/database.mjs'
import { log } from '../middleware/logger.mjs'
const router = express.Router()
// Whitelist of allowed event types
const ALLOWED_EVENT_TYPES = [
'page_view',
'signup',
'trial_start',
'purchase',
'email_connected',
'onboarding_step',
'provider_connected',
'demo_used',
'suggested_rules_generated',
'rule_created',
'rules_applied',
'limit_reached',
'upgrade_clicked',
'referral_shared',
'sort_completed',
'account_deleted',
]
// Fields that should never be stored (PII)
const PII_FIELDS = ['email', 'password', 'emailContent', 'emailBody', 'subject', 'from', 'to', 'snippet', 'content']
function stripPII(metadata) {
if (!metadata || typeof metadata !== 'object') return {}
const cleaned = {}
for (const [key, value] of Object.entries(metadata)) {
if (PII_FIELDS.includes(key.toLowerCase())) {
continue // Skip PII fields
}
if (typeof value === 'string' && value.includes('@')) {
// Skip if looks like email
continue
}
if (typeof value === 'object' && value !== null) {
cleaned[key] = stripPII(value)
} else {
cleaned[key] = value
}
}
return cleaned
}
/**
* POST /api/analytics/track
* Track analytics events (page views, conversions, etc.)
@@ -39,29 +85,45 @@ router.post('/track', asyncHandler(async (req, res) => {
timestamp,
page,
referrer,
sessionId,
} = req.body
// Log analytics event (in production, send to analytics service)
// Validate event type
if (!type || !ALLOWED_EVENT_TYPES.includes(type)) {
throw new ValidationError(`Invalid event type. Allowed: ${ALLOWED_EVENT_TYPES.join(', ')}`)
}
// Strip PII from metadata
const cleanedMetadata = stripPII(metadata || {})
// Prepare event data
const eventData = {
userId: userId || null,
eventType: type,
metadataJson: JSON.stringify(cleanedMetadata),
timestamp: timestamp || new Date().toISOString(),
sessionId: sessionId || null,
}
// Store in database
try {
await db.create(Collections.ANALYTICS_EVENTS, eventData)
log.info(`Analytics event tracked: ${type}`, { userId, sessionId })
} catch (err) {
log.warn('Failed to store analytics event', { error: err.message, type })
// Don't fail the request if analytics storage fails
}
// Log in development
if (process.env.NODE_ENV === 'development') {
console.log('📊 Analytics Event:', {
type,
userId,
tracking,
metadata,
timestamp,
page,
referrer,
sessionId,
metadata: cleanedMetadata,
})
}
// TODO: Store in database for analytics dashboard
// For now, just log to console
// In production, you might want to:
// - Store in database
// - Send to Google Analytics / Plausible / etc.
// - Send to Mixpanel / Amplitude
// - Log to external analytics service
// Return success (client doesn't need to wait)
respond.success(res, { received: true })
}))

View File

@@ -7,9 +7,10 @@ import express from 'express'
import { asyncHandler, NotFoundError, ValidationError } from '../middleware/errorHandler.mjs'
import { validate, schemas, rules } from '../middleware/validate.mjs'
import { respond } from '../utils/response.mjs'
import { products, questions, submissions, orders } from '../services/database.mjs'
import { products, questions, submissions, orders, onboardingState, emailAccounts, emailStats, emailDigests, userPreferences, subscriptions, emailUsage, referrals, db, Collections, Query } from '../services/database.mjs'
import Stripe from 'stripe'
import { config } from '../config/index.mjs'
import { log } from '../middleware/logger.mjs'
const router = express.Router()
const stripe = new Stripe(config.stripe.secretKey)
@@ -171,4 +172,232 @@ router.get('/config', (req, res) => {
})
})
/**
* GET /api/onboarding/status
* Get current onboarding state
*/
router.get('/onboarding/status',
validate({
query: {
userId: [rules.required('userId')],
},
}),
asyncHandler(async (req, res) => {
const { userId } = req.query
const state = await onboardingState.getByUser(userId)
respond.success(res, state)
})
)
/**
* POST /api/onboarding/step
* Update onboarding step progress
*/
router.post('/onboarding/step',
validate({
body: {
userId: [rules.required('userId')],
step: [rules.required('step')],
completedSteps: [rules.isArray('completedSteps')],
},
}),
asyncHandler(async (req, res) => {
const { userId, step, completedSteps = [] } = req.body
await onboardingState.updateStep(userId, step, completedSteps)
respond.success(res, { step, completedSteps })
})
)
/**
* POST /api/onboarding/skip
* Skip onboarding
*/
router.post('/onboarding/skip',
validate({
body: {
userId: [rules.required('userId')],
},
}),
asyncHandler(async (req, res) => {
const { userId } = req.body
await onboardingState.skip(userId)
respond.success(res, { skipped: true })
})
)
/**
* POST /api/onboarding/resume
* Resume onboarding
*/
router.post('/onboarding/resume',
validate({
body: {
userId: [rules.required('userId')],
},
}),
asyncHandler(async (req, res) => {
const { userId } = req.body
await onboardingState.resume(userId)
const state = await onboardingState.getByUser(userId)
respond.success(res, state)
})
)
/**
* DELETE /api/account/delete
* Delete all user data and account
*/
router.delete('/account/delete',
validate({
body: {
userId: [rules.required('userId')],
},
}),
asyncHandler(async (req, res) => {
const { userId } = req.body
log.info(`Account deletion requested for user ${userId}`)
// Delete all user data
try {
// Delete email accounts
const accounts = await emailAccounts.getByUser(userId)
for (const account of accounts) {
try {
await db.delete(Collections.EMAIL_ACCOUNTS, account.$id)
} catch (err) {
log.warn(`Failed to delete account ${account.$id}`, { error: err.message })
}
}
// Delete stats
const stats = await emailStats.getByUser(userId)
if (stats) {
try {
await db.delete(Collections.EMAIL_STATS, stats.$id)
} catch (err) {
log.warn(`Failed to delete stats`, { error: err.message })
}
}
// Delete digests
const digests = await emailDigests.getByUser(userId)
for (const digest of digests) {
try {
await db.delete(Collections.EMAIL_DIGESTS, digest.$id)
} catch (err) {
log.warn(`Failed to delete digest ${digest.$id}`, { error: err.message })
}
}
// Delete preferences
const prefs = await userPreferences.getByUser(userId)
if (prefs) {
try {
await db.delete(Collections.USER_PREFERENCES, prefs.$id)
} catch (err) {
log.warn(`Failed to delete preferences`, { error: err.message })
}
}
// Delete subscription
const subscription = await subscriptions.getByUser(userId)
if (subscription && subscription.$id) {
try {
await db.delete(Collections.SUBSCRIPTIONS, subscription.$id)
} catch (err) {
log.warn(`Failed to delete subscription`, { error: err.message })
}
}
// Delete email usage
const usageRecords = await db.list(Collections.EMAIL_USAGE, [Query.equal('userId', userId)])
for (const usage of usageRecords) {
try {
await db.delete(Collections.EMAIL_USAGE, usage.$id)
} catch (err) {
log.warn(`Failed to delete usage record`, { error: err.message })
}
}
// Delete onboarding state
const onboarding = await onboardingState.getByUser(userId)
if (onboarding && onboarding.$id) {
try {
await db.delete(Collections.ONBOARDING_STATE, onboarding.$id)
} catch (err) {
log.warn(`Failed to delete onboarding state`, { error: err.message })
}
}
log.success(`Account deletion completed for user ${userId}`)
respond.success(res, { success: true, message: 'All data deleted successfully' })
} catch (err) {
log.error('Account deletion failed', { error: err.message, userId })
throw new ValidationError('Failed to delete account data')
}
})
)
/**
* GET /api/referrals/code
* Get or create referral code for user
*/
router.get('/referrals/code',
validate({
query: {
userId: [rules.required('userId')],
},
}),
asyncHandler(async (req, res) => {
const { userId } = req.query
const referral = await referrals.getOrCreateCode(userId)
respond.success(res, {
referralCode: referral.referralCode,
referralCount: referral.referralCount || 0,
})
})
)
/**
* POST /api/referrals/track
* Track a referral (when new user signs up with referral code)
*/
router.post('/referrals/track',
validate({
body: {
userId: [rules.required('userId')],
referralCode: [rules.required('referralCode')],
},
}),
asyncHandler(async (req, res) => {
const { userId, referralCode } = req.body
// Find referrer by code
const referrer = await referrals.getByCode(referralCode)
if (!referrer) {
throw new NotFoundError('Referral code')
}
// Don't allow self-referral
if (referrer.userId === userId) {
throw new ValidationError('Cannot refer yourself')
}
// Update referrer's count
await referrals.incrementCount(referrer.userId)
// Store referral relationship
await referrals.getOrCreateCode(userId)
const userReferral = await referrals.getOrCreateCode(userId)
await db.update(Collections.REFERRALS, userReferral.$id, {
referredBy: referrer.userId,
})
log.info(`Referral tracked: ${userId} referred by ${referrer.userId} (code: ${referralCode})`)
respond.success(res, { success: true })
})
)
export default router

View File

@@ -8,7 +8,7 @@ import { asyncHandler, NotFoundError, AuthorizationError, ValidationError } from
import { validate, rules } from '../middleware/validate.mjs'
import { limiters } from '../middleware/rateLimit.mjs'
import { respond } from '../utils/response.mjs'
import { emailAccounts, emailStats, emailDigests, userPreferences } from '../services/database.mjs'
import { emailAccounts, emailStats, emailDigests, userPreferences, emailUsage, subscriptions } from '../services/database.mjs'
import { config, features } from '../config/index.mjs'
import { log } from '../middleware/logger.mjs'
@@ -270,7 +270,34 @@ router.post('/sort',
}),
asyncHandler(async (req, res) => {
const { userId, accountId, maxEmails = 500, processAll = true } = req.body
const effectiveMax = Math.min(maxEmails, 2000) // Cap at 2000 emails
// Check subscription status and free tier limits
const subscription = await subscriptions.getByUser(userId)
const isFreeTier = subscription?.isFreeTier || false
// Check free tier limit
if (isFreeTier) {
const usage = await emailUsage.getUsage(userId)
const limit = subscription?.emailsLimit || config.freeTier.emailsPerMonth
if (usage.emailsProcessed >= limit) {
return respond.error(res, {
code: 'LIMIT_REACHED',
message: `You've processed ${limit} emails this month. Upgrade for unlimited sorting.`,
limit,
used: usage.emailsProcessed,
}, 403)
}
}
// Check if this is first run (no stats exist)
const existingStats = await emailStats.getByUser(userId)
const isFirstRun = !existingStats || existingStats.totalSorted === 0
// For first run, limit to 50 emails for speed
const effectiveMax = isFirstRun
? Math.min(maxEmails, 50)
: Math.min(maxEmails, 2000) // Cap at 2000 emails
// Get account
const account = await emailAccounts.get(accountId)
@@ -287,6 +314,7 @@ router.post('/sort',
const sorter = await getAISorter()
let sortedCount = 0
const results = { byCategory: {} }
let emailSamples = [] // For suggested rules generation
// ═══════════════════════════════════════════════════════════════════════
// DEMO MODE - Sorting with simulated emails
@@ -304,10 +332,20 @@ router.post('/sort',
// Real AI sorting with demo data
const classified = await sorter.batchCategorize(emailsToSort, preferences)
for (const { classification } of classified) {
for (const { email, classification } of classified) {
const category = classification.category
sortedCount++
results.byCategory[category] = (results.byCategory[category] || 0) + 1
// Collect samples for suggested rules (first run only, max 50)
if (isFirstRun && emailSamples.length < 50) {
emailSamples.push({
from: email.from,
subject: email.subject,
snippet: email.snippet,
category,
})
}
}
log.success(`AI sorting completed: ${sortedCount} demo emails`)
@@ -351,6 +389,16 @@ router.post('/sort',
sortedCount++
results.byCategory[category] = (results.byCategory[category] || 0) + 1
// Collect samples for suggested rules (first run only, max 50)
if (isFirstRun && emailSamples.length < 50) {
emailSamples.push({
from: email.from,
subject: email.subject,
snippet: email.snippet || '',
category,
})
}
}
log.success(`Rule-based sorting completed: ${sortedCount} demo emails`)
@@ -512,6 +560,16 @@ router.post('/sort',
category,
companyLabel,
})
// Collect samples for suggested rules (first run only, max 50)
if (isFirstRun && emailSamples.length < 50) {
emailSamples.push({
from: emailData.from,
subject: emailData.subject,
snippet: emailData.snippet,
category,
})
}
}
// Apply labels/categories and actions
@@ -740,8 +798,13 @@ router.post('/sort',
// Update last sync
await emailAccounts.updateLastSync(accountId)
// Update email usage (for free tier tracking)
if (isFreeTier) {
await emailUsage.increment(userId, sortedCount)
}
// Update stats
const timeSaved = Math.round(sortedCount * 0.25) // 15 seconds per email
const timeSaved = Math.round(sortedCount * 0.1) // 6 seconds per email = 0.1 minutes
await emailStats.increment(userId, {
total: sortedCount,
today: sortedCount,
@@ -810,6 +873,17 @@ router.post('/sort',
log.success(`Sorting completed: ${sortedCount} emails for ${account.email}`)
// Generate suggested rules for first run
let suggestedRules = []
if (isFirstRun && emailSamples.length > 0 && features.ai()) {
try {
suggestedRules = await sorter.generateSuggestedRules(userId, emailSamples)
log.info(`Generated ${suggestedRules.length} suggested rules for first run`)
} catch (err) {
log.warn('Failed to generate suggested rules', { error: err.message })
}
}
respond.success(res, {
sorted: sortedCount,
inboxCleared,
@@ -818,6 +892,8 @@ router.post('/sort',
minutes: timeSaved,
formatted: timeSaved > 0 ? `${timeSaved} minutes` : '< 1 minute',
},
isFirstRun,
suggestedRules: suggestedRules.length > 0 ? suggestedRules : undefined,
highlights,
suggestions,
provider: account.provider,
@@ -1083,4 +1159,237 @@ router.post('/webhook/outlook', asyncHandler(async (req, res) => {
}
}))
/**
* POST /api/email/cleanup
* Run auto-cleanup for read items and promotions
* Can be called manually or by cron job
*/
router.post('/cleanup', asyncHandler(async (req, res) => {
const { userId } = req.body // Optional: only process this user, otherwise all users
log.info('Cleanup job started', { userId: userId || 'all' })
const results = {
usersProcessed: 0,
emailsProcessed: {
readItems: 0,
promotions: 0,
},
errors: [],
}
try {
// Get all users with cleanup enabled
let usersToProcess = []
if (userId) {
// Single user mode
const prefs = await userPreferences.getByUser(userId)
if (prefs?.preferences?.cleanup?.enabled) {
usersToProcess = [{ userId, preferences: prefs.preferences }]
}
} else {
// All users mode - get all user preferences
// Note: This is a simplified approach. In production, you might want to add an index
// or query optimization for users with cleanup.enabled = true
const allPrefs = await emailAccounts.getByUser('*') // This won't work, need different approach
// For now, we'll process users individually when they have accounts
// TODO: Add efficient query for users with cleanup enabled
log.warn('Processing all users not yet implemented efficiently. Use userId parameter for single user cleanup.')
}
// If userId provided, process that user
if (userId) {
const prefs = await userPreferences.getByUser(userId)
if (!prefs?.preferences?.cleanup?.enabled) {
return respond.success(res, { ...results, message: 'Cleanup not enabled for this user' })
}
const accounts = await emailAccounts.getByUser(userId)
if (!accounts || accounts.length === 0) {
return respond.success(res, { ...results, message: 'No email accounts found' })
}
for (const account of accounts) {
if (!account.isActive || !account.accessToken) continue
try {
const cleanup = prefs.preferences.cleanup
// Read Items Cleanup
if (cleanup.readItems?.enabled) {
const readItemsCount = await processReadItemsCleanup(
account,
cleanup.readItems.action,
cleanup.readItems.gracePeriodDays
)
results.emailsProcessed.readItems += readItemsCount
}
// Promotion Cleanup
if (cleanup.promotions?.enabled) {
const promotionsCount = await processPromotionsCleanup(
account,
cleanup.promotions.action,
cleanup.promotions.deleteAfterDays,
cleanup.promotions.matchCategoriesOrLabels || []
)
results.emailsProcessed.promotions += promotionsCount
}
results.usersProcessed = 1
} catch (error) {
log.error(`Cleanup failed for account ${account.email}`, { error: error.message })
results.errors.push({ userId, accountId: account.id, error: error.message })
}
}
}
log.success('Cleanup job completed', results)
respond.success(res, results, 'Cleanup completed')
} catch (error) {
log.error('Cleanup job failed', { error: error.message })
throw error
}
}))
/**
* Process read items cleanup for an account
*/
async function processReadItemsCleanup(account, action, gracePeriodDays) {
const cutoffDate = new Date()
cutoffDate.setDate(cutoffDate.getDate() - gracePeriodDays)
let processedCount = 0
try {
if (account.provider === 'gmail') {
const gmail = await getGmailService(account.accessToken, account.refreshToken)
// Find read emails older than grace period
// Query: -is:unread AND before:YYYY/MM/DD
const query = `-is:unread before:${cutoffDate.getFullYear()}/${String(cutoffDate.getMonth() + 1).padStart(2, '0')}/${String(cutoffDate.getDate()).padStart(2, '0')}`
const response = await gmail.gmail.users.messages.list({
userId: 'me',
q: query,
maxResults: 500, // Limit to avoid rate limits
})
const messages = response.data.messages || []
for (const message of messages) {
try {
if (action === 'archive_read') {
await gmail.archiveEmail(message.id)
await gmail.markAsRead(message.id)
} else if (action === 'trash') {
await gmail.trashEmail(message.id)
}
processedCount++
} catch (err) {
log.error(`Failed to process message ${message.id}`, { error: err.message })
}
}
} else if (account.provider === 'outlook') {
const outlook = await getOutlookService(account.accessToken)
// Find read emails older than grace period
const filter = `isRead eq true and receivedDateTime lt ${cutoffDate.toISOString()}`
const messages = await outlook._request(`/me/messages?$filter=${encodeURIComponent(filter)}&$top=500`)
for (const message of messages.value || []) {
try {
if (action === 'archive_read') {
await outlook.archiveEmail(message.id)
await outlook.markAsRead(message.id)
} else if (action === 'trash') {
await outlook.deleteEmail(message.id) // Outlook uses deleteEmail for trash
}
processedCount++
} catch (err) {
log.error(`Failed to process message ${message.id}`, { error: err.message })
}
}
}
} catch (error) {
log.error(`Read items cleanup failed for ${account.email}`, { error: error.message })
throw error
}
return processedCount
}
/**
* Process promotions cleanup for an account
*/
async function processPromotionsCleanup(account, action, deleteAfterDays, matchCategories) {
const cutoffDate = new Date()
cutoffDate.setDate(cutoffDate.getDate() - deleteAfterDays)
let processedCount = 0
try {
if (account.provider === 'gmail') {
const gmail = await getGmailService(account.accessToken, account.refreshToken)
// Find emails with matching categories/labels older than deleteAfterDays
// Look for emails with EmailSorter labels matching the categories
const labelQueries = matchCategories.map(cat => `label:EmailSorter/${cat}`).join(' OR ')
const query = `(${labelQueries}) before:${cutoffDate.getFullYear()}/${String(cutoffDate.getMonth() + 1).padStart(2, '0')}/${String(cutoffDate.getDate()).padStart(2, '0')}`
const response = await gmail.gmail.users.messages.list({
userId: 'me',
q: query,
maxResults: 500,
})
const messages = response.data.messages || []
for (const message of messages) {
try {
if (action === 'archive_read') {
await gmail.archiveEmail(message.id)
await gmail.markAsRead(message.id)
} else if (action === 'trash') {
await gmail.trashEmail(message.id)
}
processedCount++
} catch (err) {
log.error(`Failed to process promotion message ${message.id}`, { error: err.message })
}
}
} else if (account.provider === 'outlook') {
const outlook = await getOutlookService(account.accessToken)
// For Outlook, we'd need to check categories or use a different approach
// This is a simplified version - in production, you might store category info
const filter = `receivedDateTime lt ${cutoffDate.toISOString()}`
const messages = await outlook._request(`/me/messages?$filter=${encodeURIComponent(filter)}&$top=500`)
// Filter by category if available (would need to be stored during sorting)
for (const message of messages.value || []) {
// TODO: Check if message category matches matchCategories
// This requires storing category info during sorting
try {
if (action === 'archive_read') {
await outlook.archiveEmail(message.id)
await outlook.markAsRead(message.id)
} else if (action === 'trash') {
await outlook.deleteEmail(message.id) // Outlook uses deleteEmail for trash
}
processedCount++
} catch (err) {
log.error(`Failed to process promotion message ${message.id}`, { error: err.message })
}
}
}
} catch (error) {
log.error(`Promotions cleanup failed for ${account.email}`, { error: error.message })
throw error
}
return processedCount
}
export default router

View File

@@ -234,6 +234,121 @@ export class AISorterService {
return null
}
/**
* Generate suggested rules based on email patterns
* Analyzes email samples to detect patterns and suggest rules
*/
async generateSuggestedRules(userId, emailSamples) {
if (!emailSamples || emailSamples.length === 0) {
return []
}
const suggestions = []
const senderCounts = {}
const domainCounts = {}
const subjectPatterns = {}
const categoryPatterns = {}
// Analyze patterns
for (const email of emailSamples) {
const from = email.from?.toLowerCase() || ''
const subject = email.subject?.toLowerCase() || ''
// Extract domain
const emailMatch = from.match(/@([^\s>]+)/)
if (emailMatch) {
const domain = emailMatch[1].toLowerCase()
domainCounts[domain] = (domainCounts[domain] || 0) + 1
}
// Count senders
const senderEmail = from.split('<')[1]?.split('>')[0] || from
senderCounts[senderEmail] = (senderCounts[senderEmail] || 0) + 1
// Detect category patterns
const category = email.category || 'review'
categoryPatterns[category] = (categoryPatterns[category] || 0) + 1
}
const totalEmails = emailSamples.length
const threshold = Math.max(3, Math.ceil(totalEmails * 0.1)) // At least 3 emails or 10% of total
// Suggest VIP senders (frequent senders)
const frequentSenders = Object.entries(senderCounts)
.filter(([_, count]) => count >= threshold)
.sort(([_, a], [__, b]) => b - a)
.slice(0, 3)
for (const [sender, count] of frequentSenders) {
suggestions.push({
type: 'vip_sender',
name: `Mark ${sender.split('@')[0]} as VIP`,
description: `${count} emails from this sender`,
confidence: Math.min(0.9, count / totalEmails),
action: {
type: 'add_vip',
email: sender,
},
})
}
// Suggest company labels (frequent domains)
const frequentDomains = Object.entries(domainCounts)
.filter(([domain, count]) => count >= threshold && !KNOWN_COMPANIES[domain])
.sort(([_, a], [__, b]) => b - a)
.slice(0, 3)
for (const [domain, count] of frequentDomains) {
const companyName = domain.split('.')[0].charAt(0).toUpperCase() + domain.split('.')[0].slice(1)
suggestions.push({
type: 'company_label',
name: `Label ${companyName} emails`,
description: `${count} emails from ${domain}`,
confidence: Math.min(0.85, count / totalEmails),
action: {
type: 'add_company_label',
name: companyName,
condition: `from:${domain}`,
category: 'promotions', // Default, user can change
},
})
}
// Suggest category-specific rules based on patterns
if (categoryPatterns.newsletters >= threshold) {
suggestions.push({
type: 'category_rule',
name: 'Archive newsletters automatically',
description: `${categoryPatterns.newsletters} newsletter emails detected`,
confidence: 0.8,
action: {
type: 'enable_category',
category: 'newsletters',
action: 'archive_read',
},
})
}
if (categoryPatterns.promotions >= threshold) {
suggestions.push({
type: 'category_rule',
name: 'Archive promotions automatically',
description: `${categoryPatterns.promotions} promotion emails detected`,
confidence: 0.75,
action: {
type: 'enable_category',
category: 'promotions',
action: 'archive_read',
},
})
}
// Sort by confidence and return top 5
return suggestions
.sort((a, b) => b.confidence - a.confidence)
.slice(0, 5)
}
/**
* Check if email matches a company label condition
*/

View File

@@ -30,6 +30,10 @@ export const Collections = {
EMAIL_DIGESTS: 'email_digests',
SUBSCRIPTIONS: 'subscriptions',
USER_PREFERENCES: 'user_preferences',
ONBOARDING_STATE: 'onboarding_state',
EMAIL_USAGE: 'email_usage',
REFERRALS: 'referrals',
ANALYTICS_EVENTS: 'analytics_events',
}
/**
@@ -251,12 +255,86 @@ export const emailStats = {
},
}
/**
* Email usage operations
*/
export const emailUsage = {
async getCurrentMonth(userId) {
const month = new Date().toISOString().slice(0, 7) // "2026-01"
return db.findOne(Collections.EMAIL_USAGE, [
Query.equal('userId', userId),
Query.equal('month', month),
])
},
async increment(userId, count) {
const month = new Date().toISOString().slice(0, 7)
const existing = await this.getCurrentMonth(userId)
if (existing) {
return db.update(Collections.EMAIL_USAGE, existing.$id, {
emailsProcessed: (existing.emailsProcessed || 0) + count,
lastReset: new Date().toISOString(),
})
}
return db.create(Collections.EMAIL_USAGE, {
userId,
month,
emailsProcessed: count,
lastReset: new Date().toISOString(),
})
},
async getUsage(userId) {
const usage = await this.getCurrentMonth(userId)
return {
emailsProcessed: usage?.emailsProcessed || 0,
month: new Date().toISOString().slice(0, 7),
}
},
}
/**
* Subscriptions operations
*/
export const subscriptions = {
async getByUser(userId) {
return db.findOne(Collections.SUBSCRIPTIONS, [Query.equal('userId', userId)])
const subscription = await db.findOne(Collections.SUBSCRIPTIONS, [Query.equal('userId', userId)])
// If no subscription, user is on free tier
if (!subscription) {
const usage = await emailUsage.getUsage(userId)
return {
plan: 'free',
status: 'active',
isFreeTier: true,
emailsUsedThisMonth: usage.emailsProcessed,
emailsLimit: 500, // From config
}
}
// Check if subscription is active
const isActive = subscription.status === 'active'
const isFreeTier = !isActive || subscription.plan === 'free'
// Get usage for free tier users
let emailsUsedThisMonth = 0
let emailsLimit = -1 // Unlimited for paid
if (isFreeTier) {
const usage = await emailUsage.getUsage(userId)
emailsUsedThisMonth = usage.emailsProcessed
emailsLimit = 500 // From config
}
return {
...subscription,
plan: subscription.plan || 'free',
isFreeTier,
emailsUsedThisMonth,
emailsLimit,
}
},
async getByStripeId(stripeSubscriptionId) {
@@ -296,6 +374,27 @@ export const userPreferences = {
categoryActions: {},
companyLabels: [],
autoDetectCompanies: true,
version: 1,
categoryAdvanced: {},
cleanup: {
enabled: false,
readItems: {
enabled: false,
action: 'archive_read',
gracePeriodDays: 7,
},
promotions: {
enabled: false,
matchCategoriesOrLabels: ['promotions', 'newsletters'],
action: 'archive_read',
deleteAfterDays: 30,
},
safety: {
requireConfirmForDelete: true,
dryRun: false,
maxDeletesPerRun: 100,
},
},
}
},
@@ -347,6 +446,170 @@ export const userPreferences = {
},
}
/**
* Onboarding state operations
*/
export const onboardingState = {
async getByUser(userId) {
const state = await db.findOne(Collections.ONBOARDING_STATE, [
Query.equal('userId', userId),
])
if (state?.completed_steps_json) {
return {
...state,
completedSteps: JSON.parse(state.completed_steps_json),
}
}
return {
...state,
completedSteps: [],
onboarding_step: state?.onboarding_step || 'not_started',
}
},
async updateStep(userId, step, completedSteps = []) {
const existing = await db.findOne(Collections.ONBOARDING_STATE, [
Query.equal('userId', userId),
])
const data = {
onboarding_step: step,
completed_steps_json: JSON.stringify(completedSteps),
last_updated: new Date().toISOString(),
}
if (existing) {
return db.update(Collections.ONBOARDING_STATE, existing.$id, data)
}
return db.create(Collections.ONBOARDING_STATE, { userId, ...data })
},
async markValueSeen(userId) {
const existing = await db.findOne(Collections.ONBOARDING_STATE, [
Query.equal('userId', userId),
])
const data = {
first_value_seen_at: new Date().toISOString(),
last_updated: new Date().toISOString(),
}
if (existing) {
return db.update(Collections.ONBOARDING_STATE, existing.$id, data)
}
return db.create(Collections.ONBOARDING_STATE, {
userId,
onboarding_step: 'see_results',
completed_steps_json: JSON.stringify(['connect', 'first_rule', 'see_results']),
...data,
})
},
async skip(userId) {
const existing = await db.findOne(Collections.ONBOARDING_STATE, [
Query.equal('userId', userId),
])
const data = {
skipped_at: new Date().toISOString(),
last_updated: new Date().toISOString(),
}
if (existing) {
return db.update(Collections.ONBOARDING_STATE, existing.$id, data)
}
return db.create(Collections.ONBOARDING_STATE, {
userId,
onboarding_step: 'not_started',
completed_steps_json: JSON.stringify([]),
...data,
})
},
async resume(userId) {
const existing = await db.findOne(Collections.ONBOARDING_STATE, [
Query.equal('userId', userId),
])
if (existing) {
return db.update(Collections.ONBOARDING_STATE, existing.$id, {
skipped_at: null,
last_updated: new Date().toISOString(),
})
}
// If no state exists, create initial state
return db.create(Collections.ONBOARDING_STATE, {
userId,
onboarding_step: 'connect',
completed_steps_json: JSON.stringify([]),
last_updated: new Date().toISOString(),
})
},
}
/**
* Referrals operations
*/
export const referrals = {
async getOrCreateCode(userId) {
const existing = await db.findOne(Collections.REFERRALS, [
Query.equal('userId', userId),
])
if (existing) {
return existing
}
// Generate unique code: USER-ABC123
const randomPart = Math.random().toString(36).substring(2, 8).toUpperCase()
const code = `USER-${randomPart}`
// Ensure uniqueness
let uniqueCode = code
let attempts = 0
while (attempts < 10) {
const existingCode = await db.findOne(Collections.REFERRALS, [
Query.equal('referralCode', uniqueCode),
])
if (!existingCode) break
uniqueCode = `USER-${Math.random().toString(36).substring(2, 8).toUpperCase()}`
attempts++
}
return db.create(Collections.REFERRALS, {
userId,
referralCode: uniqueCode,
referralCount: 0,
createdAt: new Date().toISOString(),
})
},
async getByCode(code) {
return db.findOne(Collections.REFERRALS, [
Query.equal('referralCode', code),
])
},
async incrementCount(userId) {
const referral = await db.findOne(Collections.REFERRALS, [
Query.equal('userId', userId),
])
if (referral) {
return db.update(Collections.REFERRALS, referral.$id, {
referralCount: (referral.referralCount || 0) + 1,
})
}
return null
},
async getReferrals(userId) {
return db.list(Collections.REFERRALS, [
Query.equal('referredBy', userId),
])
},
}
/**
* Orders operations
*/
@@ -450,6 +713,8 @@ export const emailDigests = {
},
}
export { Query }
export default {
db,
products,

View File

@@ -64,6 +64,16 @@ export function redirect(res, url, statusCode = 302) {
return res.redirect(statusCode, url)
}
/**
* Error response
*/
export function error(res, errorData, statusCode = 400) {
return res.status(statusCode).json({
success: false,
error: errorData,
})
}
/**
* Response helpers object
*/
@@ -73,6 +83,7 @@ export const respond = {
noContent,
paginated,
redirect,
error,
}
export default respond