- Updated all component headers and documentation
- Changed navbar and footer branding
- Updated homepage hero badge
- Modified page title in index.html
- Simplified footer text to 'Built with ❤️'
- Consistent V2 capitalization across all references
329 lines
12 KiB
JavaScript
329 lines
12 KiB
JavaScript
"use strict";
|
|
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
};
|
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
const express_1 = __importDefault(require("express"));
|
|
const multer_1 = __importDefault(require("multer"));
|
|
const promises_1 = __importDefault(require("fs/promises"));
|
|
const zod_1 = require("zod");
|
|
const auth_middleware_1 = require("../middleware/auth.middleware");
|
|
const rate_limit_middleware_1 = require("../middleware/rate-limit.middleware");
|
|
const bulk_processor_service_1 = require("../services/bulk-processor.service");
|
|
const logger_1 = require("../lib/logger");
|
|
const router = express_1.default.Router();
|
|
const bulkProcessor = new bulk_processor_service_1.BulkProcessorService();
|
|
const upload = (0, multer_1.default)({
|
|
dest: 'uploads/',
|
|
limits: {
|
|
fileSize: 5 * 1024 * 1024,
|
|
files: 1,
|
|
},
|
|
fileFilter: (req, file, cb) => {
|
|
if (file.mimetype === 'text/csv' || file.originalname.toLowerCase().endsWith('.csv')) {
|
|
cb(null, true);
|
|
}
|
|
else {
|
|
cb(new Error('Only CSV files are allowed'));
|
|
}
|
|
},
|
|
});
|
|
const CreateBulkJobSchema = zod_1.z.object({
|
|
projectId: zod_1.z.string().optional(),
|
|
urls: zod_1.z.array(zod_1.z.object({
|
|
url: zod_1.z.string().url(),
|
|
label: zod_1.z.string().optional(),
|
|
metadata: zod_1.z.record(zod_1.z.any()).optional(),
|
|
})).min(1).max(1000),
|
|
options: zod_1.z.object({
|
|
method: zod_1.z.enum(['GET', 'POST', 'HEAD']).default('GET'),
|
|
userAgent: zod_1.z.string().optional(),
|
|
maxHops: zod_1.z.number().min(1).max(20).default(10),
|
|
timeout: zod_1.z.number().min(1000).max(30000).default(15000),
|
|
enableSSLAnalysis: zod_1.z.boolean().default(true),
|
|
enableSEOAnalysis: zod_1.z.boolean().default(true),
|
|
enableSecurityAnalysis: zod_1.z.boolean().default(true),
|
|
headers: zod_1.z.record(zod_1.z.string()).optional(),
|
|
}).default({}),
|
|
});
|
|
const BulkJobParamsSchema = zod_1.z.object({
|
|
jobId: zod_1.z.string().min(1),
|
|
});
|
|
const GetJobsQuerySchema = zod_1.z.object({
|
|
limit: zod_1.z.string().transform(val => parseInt(val) || 20).refine(val => val > 0 && val <= 100),
|
|
offset: zod_1.z.string().transform(val => parseInt(val) || 0).refine(val => val >= 0),
|
|
}).partial();
|
|
router.post('/upload', auth_middleware_1.requireAuth, rate_limit_middleware_1.bulkRateLimit, upload.single('file'), async (req, res) => {
|
|
try {
|
|
if (!req.file) {
|
|
return res.status(400).json({
|
|
success: false,
|
|
error: 'No file uploaded',
|
|
});
|
|
}
|
|
const userId = req.user.id;
|
|
const organizationId = req.user.memberships?.[0]?.organizationId;
|
|
const projectId = req.body.projectId || 'default-project';
|
|
const options = req.body.options ? JSON.parse(req.body.options) : {};
|
|
logger_1.logger.info(`Processing CSV upload for user: ${userId}`, {
|
|
filename: req.file.originalname,
|
|
size: req.file.size,
|
|
});
|
|
const job = await bulkProcessor.createBulkJobFromCsv(userId, organizationId, req.file.path, projectId, options);
|
|
res.json({
|
|
success: true,
|
|
data: {
|
|
jobId: job.id,
|
|
status: job.status,
|
|
progress: job.progress,
|
|
estimatedCompletionAt: job.estimatedCompletionAt,
|
|
},
|
|
});
|
|
}
|
|
catch (error) {
|
|
logger_1.logger.error('CSV upload failed:', error);
|
|
if (req.file) {
|
|
await promises_1.default.unlink(req.file.path).catch(() => { });
|
|
}
|
|
res.status(500).json({
|
|
success: false,
|
|
error: error instanceof Error ? error.message : 'Failed to process CSV upload',
|
|
});
|
|
}
|
|
});
|
|
router.post('/jobs', auth_middleware_1.requireAuth, rate_limit_middleware_1.bulkRateLimit, async (req, res) => {
|
|
try {
|
|
const userId = req.user.id;
|
|
const organizationId = req.user.memberships?.[0]?.organizationId;
|
|
const validatedData = CreateBulkJobSchema.parse(req.body);
|
|
logger_1.logger.info(`Creating bulk job for user: ${userId}`, {
|
|
urlCount: validatedData.urls.length,
|
|
projectId: validatedData.projectId,
|
|
});
|
|
const job = await bulkProcessor.createBulkJob(userId, organizationId, validatedData);
|
|
res.status(201).json({
|
|
success: true,
|
|
data: {
|
|
jobId: job.id,
|
|
status: job.status,
|
|
progress: job.progress,
|
|
estimatedCompletionAt: job.estimatedCompletionAt,
|
|
urls: job.urls.length,
|
|
},
|
|
});
|
|
}
|
|
catch (error) {
|
|
logger_1.logger.error('Bulk job creation failed:', error);
|
|
if (error instanceof zod_1.z.ZodError) {
|
|
return res.status(400).json({
|
|
success: false,
|
|
error: 'Validation failed',
|
|
details: error.errors,
|
|
});
|
|
}
|
|
res.status(500).json({
|
|
success: false,
|
|
error: error instanceof Error ? error.message : 'Failed to create bulk job',
|
|
});
|
|
}
|
|
});
|
|
router.get('/jobs', auth_middleware_1.requireAuth, (0, rate_limit_middleware_1.addRateLimitStatus)('bulk'), async (req, res) => {
|
|
try {
|
|
const userId = req.user.id;
|
|
const query = GetJobsQuerySchema.parse(req.query);
|
|
const jobs = await bulkProcessor.getUserBulkJobs(userId, query.limit || 20, query.offset || 0);
|
|
const sanitizedJobs = jobs.map(job => ({
|
|
id: job.id,
|
|
status: job.status,
|
|
progress: job.progress,
|
|
createdAt: job.createdAt,
|
|
startedAt: job.startedAt,
|
|
finishedAt: job.finishedAt,
|
|
estimatedCompletionAt: job.estimatedCompletionAt,
|
|
projectId: job.projectId,
|
|
urlCount: job.urls.length,
|
|
options: job.options,
|
|
}));
|
|
res.json({
|
|
success: true,
|
|
data: sanitizedJobs,
|
|
meta: {
|
|
limit: query.limit || 20,
|
|
offset: query.offset || 0,
|
|
total: sanitizedJobs.length,
|
|
},
|
|
});
|
|
}
|
|
catch (error) {
|
|
logger_1.logger.error('Failed to get bulk jobs:', error);
|
|
if (error instanceof zod_1.z.ZodError) {
|
|
return res.status(400).json({
|
|
success: false,
|
|
error: 'Invalid query parameters',
|
|
details: error.errors,
|
|
});
|
|
}
|
|
res.status(500).json({
|
|
success: false,
|
|
error: 'Failed to retrieve bulk jobs',
|
|
});
|
|
}
|
|
});
|
|
router.get('/jobs/:jobId', auth_middleware_1.requireAuth, async (req, res) => {
|
|
try {
|
|
const userId = req.user.id;
|
|
const { jobId } = BulkJobParamsSchema.parse(req.params);
|
|
const job = await bulkProcessor.getBulkJob(jobId, userId);
|
|
if (!job) {
|
|
return res.status(404).json({
|
|
success: false,
|
|
error: 'Bulk job not found',
|
|
});
|
|
}
|
|
const responseData = {
|
|
id: job.id,
|
|
status: job.status,
|
|
progress: job.progress,
|
|
createdAt: job.createdAt,
|
|
startedAt: job.startedAt,
|
|
finishedAt: job.finishedAt,
|
|
estimatedCompletionAt: job.estimatedCompletionAt,
|
|
projectId: job.projectId,
|
|
urlCount: job.urls.length,
|
|
options: job.options,
|
|
};
|
|
if (job.status === 'COMPLETED' && job.results) {
|
|
responseData.results = job.results;
|
|
}
|
|
res.json({
|
|
success: true,
|
|
data: responseData,
|
|
});
|
|
}
|
|
catch (error) {
|
|
logger_1.logger.error('Failed to get bulk job:', error);
|
|
if (error instanceof zod_1.z.ZodError) {
|
|
return res.status(400).json({
|
|
success: false,
|
|
error: 'Invalid job ID',
|
|
details: error.errors,
|
|
});
|
|
}
|
|
res.status(500).json({
|
|
success: false,
|
|
error: 'Failed to retrieve bulk job',
|
|
});
|
|
}
|
|
});
|
|
router.delete('/jobs/:jobId', auth_middleware_1.requireAuth, async (req, res) => {
|
|
try {
|
|
const userId = req.user.id;
|
|
const { jobId } = BulkJobParamsSchema.parse(req.params);
|
|
const success = await bulkProcessor.cancelBulkJob(jobId, userId);
|
|
if (!success) {
|
|
return res.status(404).json({
|
|
success: false,
|
|
error: 'Bulk job not found or cannot be cancelled',
|
|
});
|
|
}
|
|
res.json({
|
|
success: true,
|
|
message: 'Bulk job cancelled successfully',
|
|
});
|
|
}
|
|
catch (error) {
|
|
logger_1.logger.error('Failed to cancel bulk job:', error);
|
|
if (error instanceof zod_1.z.ZodError) {
|
|
return res.status(400).json({
|
|
success: false,
|
|
error: 'Invalid job ID',
|
|
details: error.errors,
|
|
});
|
|
}
|
|
res.status(500).json({
|
|
success: false,
|
|
error: 'Failed to cancel bulk job',
|
|
});
|
|
}
|
|
});
|
|
router.get('/jobs/:jobId/export/csv', auth_middleware_1.requireAuth, async (req, res) => {
|
|
try {
|
|
const userId = req.user.id;
|
|
const { jobId } = BulkJobParamsSchema.parse(req.params);
|
|
const filePath = await bulkProcessor.exportResultsToCsv(jobId, userId);
|
|
res.setHeader('Content-Type', 'text/csv');
|
|
res.setHeader('Content-Disposition', `attachment; filename="bulk-results-${jobId}.csv"`);
|
|
const fileStream = require('fs').createReadStream(filePath);
|
|
fileStream.pipe(res);
|
|
fileStream.on('end', async () => {
|
|
await promises_1.default.unlink(filePath).catch(() => { });
|
|
});
|
|
fileStream.on('error', (error) => {
|
|
logger_1.logger.error('File streaming error:', error);
|
|
res.status(500).json({
|
|
success: false,
|
|
error: 'Failed to stream results file',
|
|
});
|
|
});
|
|
}
|
|
catch (error) {
|
|
logger_1.logger.error('Failed to export bulk job results:', error);
|
|
if (error instanceof zod_1.z.ZodError) {
|
|
return res.status(400).json({
|
|
success: false,
|
|
error: 'Invalid job ID',
|
|
details: error.errors,
|
|
});
|
|
}
|
|
res.status(500).json({
|
|
success: false,
|
|
error: error instanceof Error ? error.message : 'Failed to export results',
|
|
});
|
|
}
|
|
});
|
|
router.get('/stats', auth_middleware_1.requireAuth, async (req, res) => {
|
|
try {
|
|
const stats = await bulkProcessor.getQueueStats();
|
|
res.json({
|
|
success: true,
|
|
data: {
|
|
queue: stats,
|
|
timestamp: new Date().toISOString(),
|
|
},
|
|
});
|
|
}
|
|
catch (error) {
|
|
logger_1.logger.error('Failed to get queue stats:', error);
|
|
res.status(500).json({
|
|
success: false,
|
|
error: 'Failed to retrieve queue statistics',
|
|
});
|
|
}
|
|
});
|
|
router.delete('/cleanup', auth_middleware_1.requireAuth, async (req, res) => {
|
|
try {
|
|
const user = req.user;
|
|
const isAdmin = user.memberships?.some(m => m.role === 'ADMIN' || m.role === 'OWNER');
|
|
if (!isAdmin) {
|
|
return res.status(403).json({
|
|
success: false,
|
|
error: 'Admin privileges required',
|
|
});
|
|
}
|
|
const maxAgeHours = parseInt(req.query.maxAge) || 72;
|
|
await bulkProcessor.cleanupOldJobs(maxAgeHours);
|
|
res.json({
|
|
success: true,
|
|
message: `Cleanup completed for jobs older than ${maxAgeHours} hours`,
|
|
});
|
|
}
|
|
catch (error) {
|
|
logger_1.logger.error('Failed to cleanup old jobs:', error);
|
|
res.status(500).json({
|
|
success: false,
|
|
error: 'Failed to cleanup old jobs',
|
|
});
|
|
}
|
|
});
|
|
exports.default = router;
|
|
//# sourceMappingURL=bulk.routes.js.map
|