perf(api): increase batch size

This commit is contained in:
Isaac 2025-02-12 16:20:13 +00:00
parent 33c1b64019
commit a861f76df9
No known key found for this signature in database
GPG Key ID: 0DE40AE37BBA5C33
2 changed files with 9 additions and 7 deletions

View File

@ -9,8 +9,8 @@ const archiver = require('archiver');
const { iconURL } = require('../../../../../lib/misc'); const { iconURL } = require('../../../../../lib/misc');
const pkg = require('../../../../../../package.json'); const pkg = require('../../../../../../package.json');
// ! ceiL: at least 1 // a single persistent pool shared across all exports
const poolSize = Math.ceil(cpus().length / 4); const poolSize = Math.ceil(cpus().length / 4); // ! ceiL: at least 1
const pool = Pool(() => spawn(new Worker('../../../../../lib/workers/export.js')), { size: poolSize }); const pool = Pool(() => spawn(new Worker('../../../../../lib/workers/export.js')), { size: poolSize });
module.exports.get = fastify => ({ module.exports.get = fastify => ({
@ -72,6 +72,7 @@ module.exports.get = fastify => ({
async function* ticketsGenerator() { async function* ticketsGenerator() {
try { try {
let done = false; let done = false;
const take = 50;
const findOptions = { const findOptions = {
include: { include: {
archivedChannels: true, archivedChannels: true,
@ -82,16 +83,16 @@ module.exports.get = fastify => ({
questionAnswers: true, questionAnswers: true,
}, },
orderBy: { id: 'asc' }, orderBy: { id: 'asc' },
take: 24, take,
where: { guildId: id }, where: { guildId: id },
}; };
do { do {
const batch = await client.prisma.ticket.findMany(findOptions); const batch = await client.prisma.ticket.findMany(findOptions);
if (batch.length < findOptions.take) { if (batch.length < take) {
done = true; done = true;
} else { } else {
findOptions.skip = 1; findOptions.skip = 1;
findOptions.cursor = { id: batch[findOptions.take - 1].id }; findOptions.cursor = { id: batch[take - 1].id };
} }
// ! map (parallel) not for...of (serial) // ! map (parallel) not for...of (serial)
yield* batch.map(async ticket => (await pool.queue(worker => worker.exportTicket(ticket)) + '\n')); yield* batch.map(async ticket => (await pool.queue(worker => worker.exportTicket(ticket)) + '\n'));

View File

@ -9,8 +9,8 @@ const unzipper = require('unzipper');
const { createInterface } = require('node:readline'); const { createInterface } = require('node:readline');
const pkg = require('../../../../../../package.json'); const pkg = require('../../../../../../package.json');
// ! ceiL: at least 1 // a single persistent pool shared across all imports
const poolSize = Math.ceil(cpus().length / 4); const poolSize = Math.ceil(cpus().length / 4); // ! ceiL: at least 1
const pool = Pool(() => spawn(new Worker('../../../../../lib/workers/import.js')), { size: poolSize }); const pool = Pool(() => spawn(new Worker('../../../../../lib/workers/import.js')), { size: poolSize });
function parseJSON(string) { function parseJSON(string) {
@ -157,6 +157,7 @@ module.exports.post = fastify => ({
ticketsPromises.push(pool.queue(worker => worker.importTicket(line, id, categoryMap))); ticketsPromises.push(pool.queue(worker => worker.importTicket(line, id, categoryMap)));
} }
// TODO: batch 100 tickets per query?
const ticketsResolved = await Promise.all(ticketsPromises); const ticketsResolved = await Promise.all(ticketsPromises);
const queries = []; const queries = [];
const allMessages = []; const allMessages = [];