BREAKING CHANGE: Remove public/spooler_db/ legacy system Changes: - Migrate validation preview from http://glenlis/spooler_db/main_dev.php to CI4 /report/{accessnumber} - Add ReportController::preview() for HTML preview in validation dialog - Add ReportController::generatePdf() to queue PDF generation via node_spooler at http://glenlis:3030 - Add ReportController::checkPdfStatus() to poll spooler job status - Add ReportController::postToSpooler() helper for curl requests to spooler API - Add routes: GET /report/(:num)/preview, GET /report/(:num)/pdf, GET /report/status/(:any) - Delete public/spooler_db/ directory (40+ legacy files) - Compact node_spooler/README.md from 577 to 342 lines Technical Details: - New architecture: CI4 Controller -> node_spooler (port 3030) -> Chrome CDP (port 42020) - API endpoints: POST /api/pdf/generate, GET /api/pdf/status/:jobId, GET /api/queue/stats - Features: Max 5 concurrent jobs, max 100 in queue, auto-cleanup after 60 min - Error handling: Chrome crash detection, manual error review in data/error/ - PDF infrastructure ready, frontend PDF buttons to be updated later in production Migration verified: - No external code references spooler_db - All assets duplicated in public/assets/report/ - Syntax checks passed for ReportController.php and Routes.php Refs: node_spooler/README.md
331 lines
9.8 KiB
JavaScript
331 lines
9.8 KiB
JavaScript
const express = require('express');
|
|
const bodyParser = require('body-parser');
|
|
const CRI = require('chrome-remote-interface');
|
|
const fs = require('fs');
|
|
const path = require('path');
|
|
|
|
const LOGS_DIR = path.join(__dirname, 'logs');
|
|
const LOG_FILE = path.join(LOGS_DIR, 'spooler.log');
|
|
const ERROR_LOG_FILE = path.join(LOGS_DIR, 'errors.log');
|
|
const METRICS_LOG_FILE = path.join(LOGS_DIR, 'metrics.log');
|
|
|
|
const CONFIG = {
|
|
port: 3030,
|
|
chromePort: 42020,
|
|
maxConcurrent: 5,
|
|
maxQueueSize: 100,
|
|
jobCleanupMinutes: 60,
|
|
jobRetentionMs: 60 * 60 * 1000
|
|
};
|
|
|
|
function logInfo(message, data = null) {
|
|
const timestamp = new Date().toISOString();
|
|
const logEntry = `[${timestamp}] [INFO] ${message}${data ? ' ' + JSON.stringify(data) : ''}\n`;
|
|
fs.appendFileSync(LOG_FILE, logEntry);
|
|
console.log(`[INFO] ${message}`, data || '');
|
|
}
|
|
|
|
function logWarn(message, data = null) {
|
|
const timestamp = new Date().toISOString();
|
|
const logEntry = `[${timestamp}] [WARN] ${message}${data ? ' ' + JSON.stringify(data) : ''}\n`;
|
|
fs.appendFileSync(LOG_FILE, logEntry);
|
|
console.warn(`[WARN] ${message}`, data || '');
|
|
}
|
|
|
|
function logError(message, error = null) {
|
|
const timestamp = new Date().toISOString();
|
|
const logEntry = `[${timestamp}] [ERROR] ${message}${error ? ' ' + error.message + '\n' + error.stack : ''}\n`;
|
|
fs.appendFileSync(ERROR_LOG_FILE, logEntry);
|
|
fs.appendFileSync(LOG_FILE, logEntry);
|
|
console.error(`[ERROR] ${message}`, error || '');
|
|
}
|
|
|
|
class PDFQueue {
|
|
constructor() {
|
|
this.queue = [];
|
|
this.processing = new Set();
|
|
this.jobs = new Map();
|
|
this.chrome = null;
|
|
this.connected = false;
|
|
this.cleanupInterval = null;
|
|
}
|
|
|
|
async initialize() {
|
|
try {
|
|
//this.chrome = await CRI({ port: CONFIG.chromePort });
|
|
this.chrome = await CRI({ port: CONFIG.chromePort, host: '127.0.0.1' });
|
|
this.connected = true;
|
|
logInfo('Chrome CDP connected', { port: CONFIG.chromePort });
|
|
} catch (error) {
|
|
this.connected = false;
|
|
logError('Chrome CDP connection failed', error);
|
|
throw error;
|
|
}
|
|
|
|
this.startCleanup();
|
|
}
|
|
|
|
addJob(html, filename) {
|
|
if (this.queue.length >= CONFIG.maxQueueSize) {
|
|
logError('Queue full', { size: this.queue.length, max: CONFIG.maxQueueSize });
|
|
throw new Error('Queue is full, please try again later');
|
|
}
|
|
|
|
const jobId = `job_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
|
|
const job = {
|
|
id: jobId,
|
|
html,
|
|
filename: filename || `${jobId}.pdf`,
|
|
status: 'queued',
|
|
createdAt: Date.now(),
|
|
startedAt: null,
|
|
completedAt: null,
|
|
processingTime: null,
|
|
error: null,
|
|
pdfUrl: null
|
|
};
|
|
|
|
this.queue.push(job);
|
|
this.jobs.set(jobId, job);
|
|
|
|
this.processQueue();
|
|
return job;
|
|
}
|
|
|
|
async processQueue() {
|
|
while (this.processing.size < CONFIG.maxConcurrent && this.queue.length > 0) {
|
|
const job = this.queue.shift();
|
|
this.processJob(job);
|
|
}
|
|
}
|
|
|
|
async processJob(job) {
|
|
this.processing.add(job.id);
|
|
job.status = 'processing';
|
|
job.startedAt = Date.now();
|
|
|
|
try {
|
|
if (!this.connected) {
|
|
await this.initialize();
|
|
}
|
|
|
|
const { Page } = this.chrome;
|
|
|
|
await Page.enable();
|
|
await Page.setContent(job.html);
|
|
|
|
const pdf = await Page.printToPDF({
|
|
format: 'A4',
|
|
printBackground: true,
|
|
margin: { top: 0, bottom: 0, left: 0, right: 0 }
|
|
});
|
|
|
|
const outputPath = path.join(__dirname, 'data/pdfs', job.filename);
|
|
fs.writeFileSync(outputPath, Buffer.from(pdf.data, 'base64'));
|
|
|
|
job.status = 'completed';
|
|
job.completedAt = Date.now();
|
|
job.processingTime = (job.completedAt - job.startedAt) / 1000;
|
|
job.pdfUrl = `/node_spooler/data/pdfs/${job.filename}`;
|
|
|
|
logInfo('PDF generated successfully', {
|
|
jobId: job.id,
|
|
filename: job.filename,
|
|
processingTime: job.processingTime
|
|
});
|
|
|
|
this.logMetrics(job);
|
|
|
|
} catch (error) {
|
|
job.status = 'error';
|
|
job.error = error.message;
|
|
job.completedAt = Date.now();
|
|
|
|
const errorPath = path.join(__dirname, 'data/error', `${job.id}.json`);
|
|
fs.writeFileSync(errorPath, JSON.stringify(job, null, 2));
|
|
|
|
logError('PDF generation failed', {
|
|
jobId: job.id,
|
|
filename: job.filename,
|
|
error: error.message
|
|
});
|
|
|
|
if (error.message.includes('Chrome') || error.message.includes('CDP')) {
|
|
await this.handleChromeCrash();
|
|
}
|
|
}
|
|
|
|
this.processing.delete(job.id);
|
|
this.processQueue();
|
|
}
|
|
|
|
async handleChromeCrash() {
|
|
logWarn('Chrome crashed, attempting restart...');
|
|
|
|
this.queue.forEach(job => job.status = 'queued');
|
|
this.processing.clear();
|
|
this.connected = false;
|
|
|
|
for (let i = 0; i < 3; i++) {
|
|
try {
|
|
await this.initialize();
|
|
logInfo('Chrome restarted successfully');
|
|
return;
|
|
} catch (error) {
|
|
logError(`Chrome restart attempt ${i + 1} failed`, error);
|
|
await new Promise(resolve => setTimeout(resolve, 2000));
|
|
}
|
|
}
|
|
|
|
logError('Chrome restart failed after 3 attempts');
|
|
}
|
|
|
|
startCleanup() {
|
|
this.cleanupInterval = setInterval(() => {
|
|
this.cleanupOldJobs();
|
|
}, CONFIG.jobCleanupMinutes * 60 * 1000);
|
|
}
|
|
|
|
cleanupOldJobs() {
|
|
const now = Date.now();
|
|
const jobsToDelete = [];
|
|
|
|
for (const [jobId, job] of this.jobs) {
|
|
if (job.status === 'completed' || job.status === 'error') {
|
|
const age = now - job.completedAt;
|
|
if (age > CONFIG.jobRetentionMs) {
|
|
jobsToDelete.push(jobId);
|
|
}
|
|
}
|
|
}
|
|
|
|
jobsToDelete.forEach(jobId => {
|
|
this.jobs.delete(jobId);
|
|
});
|
|
|
|
if (jobsToDelete.length > 0) {
|
|
logInfo('Cleaned up old jobs', { count: jobsToDelete.length });
|
|
}
|
|
}
|
|
|
|
logMetrics(job) {
|
|
const timestamp = new Date().toISOString();
|
|
const logEntry = `[${timestamp}] ${job.id} status=${job.status} time=${job.processingTime}s filename=${job.filename}\n`;
|
|
fs.appendFileSync(METRICS_LOG_FILE, logEntry);
|
|
}
|
|
|
|
getJob(jobId) {
|
|
return this.jobs.get(jobId);
|
|
}
|
|
|
|
getStats() {
|
|
const allJobs = Array.from(this.jobs.values());
|
|
const completedJobs = allJobs.filter(j => j.status === 'completed');
|
|
const errorJobs = allJobs.filter(j => j.status === 'error');
|
|
|
|
const avgTime = completedJobs.length > 0
|
|
? completedJobs.reduce((sum, j) => sum + j.processingTime, 0) / completedJobs.length
|
|
: 0;
|
|
|
|
return {
|
|
queueSize: this.queue.length,
|
|
processing: this.processing.size,
|
|
completed: completedJobs.length,
|
|
errors: errorJobs.length,
|
|
avgProcessingTime: avgTime,
|
|
maxQueueSize: CONFIG.maxQueueSize
|
|
};
|
|
}
|
|
}
|
|
|
|
const app = express();
|
|
app.use(bodyParser.json());
|
|
|
|
app.use('/node_spooler/data', express.static(path.join(__dirname, 'data')));
|
|
|
|
const queue = new PDFQueue();
|
|
|
|
async function startServer() {
|
|
try {
|
|
await queue.initialize();
|
|
} catch (error) {
|
|
logError('Failed to connect to Chrome', error);
|
|
console.error('Please start Chrome with: "C:/Program Files/Google/Chrome/Application/chrome.exe" --headless --disable-gpu --remote-debugging-port=42020');
|
|
process.exit(1);
|
|
}
|
|
|
|
app.post('/api/pdf/generate', async (req, res) => {
|
|
try {
|
|
const { html, filename } = req.body;
|
|
|
|
if (!html) {
|
|
return res.status(400).json({
|
|
success: false,
|
|
error: 'HTML content is required'
|
|
});
|
|
}
|
|
|
|
const job = queue.addJob(html, filename);
|
|
|
|
res.json({
|
|
success: true,
|
|
jobId: job.id,
|
|
status: job.status,
|
|
message: 'Job added to queue'
|
|
});
|
|
|
|
} catch (error) {
|
|
logError('API error', error);
|
|
res.status(500).json({
|
|
success: false,
|
|
error: error.message
|
|
});
|
|
}
|
|
});
|
|
|
|
app.get('/api/pdf/status/:jobId', (req, res) => {
|
|
const { jobId } = req.params;
|
|
const job = queue.getJob(jobId);
|
|
|
|
if (!job) {
|
|
return res.status(404).json({
|
|
success: false,
|
|
error: 'Job not found'
|
|
});
|
|
}
|
|
|
|
res.json({
|
|
success: true,
|
|
jobId: job.id,
|
|
status: job.status,
|
|
progress: job.status === 'completed' ? 100 : (job.status === 'processing' ? 50 : 0),
|
|
pdfUrl: job.pdfUrl,
|
|
error: job.error
|
|
});
|
|
});
|
|
|
|
app.get('/api/queue/stats', (req, res) => {
|
|
const stats = queue.getStats();
|
|
res.json({
|
|
success: true,
|
|
...stats
|
|
});
|
|
});
|
|
|
|
app.get('/api/cleanup', (req, res) => {
|
|
res.json({
|
|
success: true,
|
|
message: 'Please run cleanup manually: npm run cleanup'
|
|
});
|
|
});
|
|
|
|
app.listen(CONFIG.port, 'localhost', () => {
|
|
logInfo(`PDF Spooler started on port ${CONFIG.port}`);
|
|
});
|
|
}
|
|
|
|
startServer().catch(error => {
|
|
logError('Server startup failed', error);
|
|
process.exit(1);
|
|
});
|