add batch email api (#149)

* add bulk email

* add bulk email api

* add batch email sdk changes
This commit is contained in:
KM Koushik
2025-04-19 21:45:17 +10:00
committed by GitHub
parent 44e4f43e66
commit 3fe96b477f
10 changed files with 724 additions and 49 deletions

View File

@@ -7,6 +7,7 @@ import { db } from "../db";
import { sendEmailThroughSes, sendEmailWithAttachments } from "../aws/ses";
import { getRedis } from "../redis";
import { DEFAULT_QUEUE_OPTIONS } from "../queue/queue-constants";
import { Prisma } from "@prisma/client";
function createQueueAndWorker(region: string, quota: number, suffix: string) {
const connection = getRedis();
@@ -112,6 +113,113 @@ export class EmailQueueService {
);
}
/**
* Efficiently queues multiple pre-defined email jobs using BullMQ's addBulk.
* Jobs are grouped by region and type (transactional/marketing) before queuing.
*
* @param jobs - Array of job details to queue.
* @returns A promise that resolves when all bulk additions are attempted.
*/
public static async queueBulk(
jobs: {
emailId: string;
region: string;
transactional: boolean;
unsubUrl?: string;
delay?: number;
timestamp?: number; // Optional: pass timestamp if needed for data
}[]
): Promise<void> {
if (jobs.length === 0) {
console.log("[EmailQueueService]: No jobs provided for bulk queue.");
return;
}
if (!this.initialized) {
await this.init();
}
console.log(
`[EmailQueueService]: Starting bulk queue for ${jobs.length} jobs.`
);
// Group jobs by region and type
const groupedJobs = jobs.reduce(
(acc, job) => {
const key = `${job.region}-${job.transactional ? "transactional" : "marketing"}`;
if (!acc[key]) {
acc[key] = {
queue: job.transactional
? this.transactionalQueue.get(job.region)
: this.marketingQueue.get(job.region),
region: job.region,
transactional: job.transactional,
jobDetails: [],
};
}
acc[key]?.jobDetails.push(job);
return acc;
},
{} as Record<
string,
{
queue: Queue | undefined;
region: string;
transactional: boolean;
jobDetails: typeof jobs;
}
>
);
const bulkAddPromises: Promise<any>[] = [];
for (const groupKey in groupedJobs) {
const group = groupedJobs[groupKey];
if (!group || !group.queue) {
console.error(
`[EmailQueueService]: Queue not found for group ${groupKey} during bulk add. Skipping ${group?.jobDetails?.length ?? 0} jobs.`
);
// Optionally: handle these skipped jobs (e.g., mark corresponding emails as failed)
continue;
}
const queue = group.queue;
const isBulk = !group.transactional;
const bulkData = group.jobDetails.map((job) => ({
name: job.emailId, // Use emailId as job name (matches single queue logic)
data: {
emailId: job.emailId,
timestamp: job.timestamp ?? Date.now(),
unsubUrl: job.unsubUrl,
isBulk,
},
opts: {
jobId: job.emailId, // Use emailId as jobId
delay: job.delay,
...DEFAULT_QUEUE_OPTIONS, // Apply default options (attempts, backoff)
},
}));
console.log(
`[EmailQueueService]: Adding ${bulkData.length} jobs to queue ${queue.name}`
);
bulkAddPromises.push(
queue.addBulk(bulkData).catch((error) => {
console.error(
`[EmailQueueService]: Failed to add bulk jobs to queue ${queue.name}:`,
error
);
// Optionally: handle bulk add failure (e.g., mark corresponding emails as failed)
})
);
}
await Promise.allSettled(bulkAddPromises);
console.log(
"[EmailQueueService]: Finished processing bulk queue requests."
);
}
public static async changeDelay(
emailId: string,
region: string,

View File

@@ -220,3 +220,213 @@ export async function cancelEmail(emailId: string) {
},
});
}
/**
* Send multiple emails in bulk (up to 100 at a time)
* Handles template rendering, variable replacement, and efficient bulk queuing
*/
export async function sendBulkEmails(
emailContents: Array<
EmailContent & {
teamId: number;
apiKeyId?: number;
}
>
) {
if (emailContents.length === 0) {
throw new UnsendApiError({
code: "BAD_REQUEST",
message: "No emails provided for bulk send",
});
}
if (emailContents.length > 100) {
throw new UnsendApiError({
code: "BAD_REQUEST",
message: "Cannot send more than 100 emails in a single bulk request",
});
}
// Group emails by domain to minimize domain validations
const emailsByDomain = new Map<
string,
{
domain: Awaited<ReturnType<typeof validateDomainFromEmail>>;
emails: typeof emailContents;
}
>();
// First pass: validate domains and group emails
for (const content of emailContents) {
const { from } = content;
if (!emailsByDomain.has(from)) {
const domain = await validateDomainFromEmail(from, content.teamId);
emailsByDomain.set(from, { domain, emails: [] });
}
emailsByDomain.get(from)?.emails.push(content);
}
// Cache templates to avoid repeated database queries
const templateCache = new Map<
number,
{ subject: string; content: any; renderer: EmailRenderer }
>();
const createdEmails = [];
const queueJobs = [];
// Process each domain group
for (const { domain, emails } of emailsByDomain.values()) {
// Process emails in each domain group
for (const content of emails) {
const {
to,
from,
subject: subjectFromApiCall,
templateId,
variables,
text,
html: htmlFromApiCall,
teamId,
attachments,
replyTo,
cc,
bcc,
scheduledAt,
apiKeyId,
} = content;
let subject = subjectFromApiCall;
let html = htmlFromApiCall;
// Process template if specified
if (templateId) {
let templateData = templateCache.get(Number(templateId));
if (!templateData) {
const template = await db.template.findUnique({
where: { id: templateId },
});
if (template) {
const jsonContent = JSON.parse(template.content || "{}");
templateData = {
subject: template.subject || "",
content: jsonContent,
renderer: new EmailRenderer(jsonContent),
};
templateCache.set(Number(templateId), templateData);
}
}
if (templateData) {
subject = replaceVariables(templateData.subject, variables || {});
// {{}} for link replacements
const modifiedVariables = {
...variables,
...Object.keys(variables || {}).reduce(
(acc, key) => {
acc[`{{${key}}}`] = variables?.[key] || "";
return acc;
},
{} as Record<string, string>
),
};
html = await templateData.renderer.render({
shouldReplaceVariableValues: true,
variableValues: modifiedVariables,
});
}
}
if (!text && !html) {
throw new UnsendApiError({
code: "BAD_REQUEST",
message: `Either text or html is required for email to ${to}`,
});
}
const scheduledAtDate = scheduledAt ? new Date(scheduledAt) : undefined;
const delay = scheduledAtDate
? Math.max(0, scheduledAtDate.getTime() - Date.now())
: undefined;
try {
// Create email record
const email = await db.email.create({
data: {
to: Array.isArray(to) ? to : [to],
from,
subject: subject as string,
replyTo: replyTo
? Array.isArray(replyTo)
? replyTo
: [replyTo]
: undefined,
cc: cc ? (Array.isArray(cc) ? cc : [cc]) : undefined,
bcc: bcc ? (Array.isArray(bcc) ? bcc : [bcc]) : undefined,
text,
html,
teamId,
domainId: domain.id,
attachments: attachments ? JSON.stringify(attachments) : undefined,
scheduledAt: scheduledAtDate,
latestStatus: scheduledAtDate ? "SCHEDULED" : "QUEUED",
apiId: apiKeyId,
},
});
createdEmails.push(email);
// Prepare queue job
queueJobs.push({
emailId: email.id,
region: domain.region,
transactional: true, // Bulk emails are still transactional
delay,
timestamp: Date.now(),
});
} catch (error: any) {
console.error(
`Failed to create email record for recipient ${to}:`,
error
);
// Continue processing other emails
}
}
}
if (queueJobs.length === 0) {
throw new UnsendApiError({
code: "INTERNAL_SERVER_ERROR",
message: "Failed to create any email records",
});
}
// Bulk queue all jobs
try {
await EmailQueueService.queueBulk(queueJobs);
} catch (error: any) {
// Mark all created emails as failed
await Promise.all(
createdEmails.map(async (email) => {
await db.emailEvent.create({
data: {
emailId: email.id,
status: "FAILED",
data: {
error: error.toString(),
},
},
});
await db.email.update({
where: { id: email.id },
data: { latestStatus: "FAILED" },
});
})
);
throw error;
}
return createdEmails;
}