Files
url_tracker_tool/apps/api/dist/routes/bulk.routes.js
Andrei e867f98da3 Fix bulk CSV processing and improve user registration
- Fix bulk CSV upload functionality that was returning HTML errors
- Implement proper project/organization handling for logged-in vs anonymous users
- Update user registration to create unique Default Organization and Default Project
- Fix frontend API URL configuration for bulk upload endpoints
- Resolve foreign key constraint violations in bulk processing
- Update BulkProcessorService to use in-memory processing instead of Redis
- Fix redirect-tracker service to handle missing project IDs properly
- Update Prisma schema for optional project relationships in bulk jobs
- Improve registration form UI with better password validation and alignment
2025-08-23 21:30:06 +00:00

443 lines
17 KiB
JavaScript

"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const express_1 = __importDefault(require("express"));
const multer_1 = __importDefault(require("multer"));
const promises_1 = __importDefault(require("fs/promises"));
const zod_1 = require("zod");
const auth_middleware_1 = require("../middleware/auth.middleware");
const rate_limit_middleware_1 = require("../middleware/rate-limit.middleware");
const bulk_processor_service_1 = require("../services/bulk-processor.service");
const logger_1 = require("../lib/logger");
const prisma_1 = require("../lib/prisma");
const router = express_1.default.Router();
const bulkProcessor = new bulk_processor_service_1.BulkProcessorService();
const upload = (0, multer_1.default)({
dest: 'uploads/',
limits: {
fileSize: 5 * 1024 * 1024,
files: 1,
},
fileFilter: (req, file, cb) => {
if (file.mimetype === 'text/csv' || file.originalname.toLowerCase().endsWith('.csv')) {
cb(null, true);
}
else {
cb(new Error('Only CSV files are allowed'));
}
},
});
const CreateBulkJobSchema = zod_1.z.object({
projectId: zod_1.z.string().optional(),
urls: zod_1.z.array(zod_1.z.object({
url: zod_1.z.string().url(),
label: zod_1.z.string().optional(),
metadata: zod_1.z.record(zod_1.z.any()).optional(),
})).min(1).max(1000),
options: zod_1.z.object({
method: zod_1.z.enum(['GET', 'POST', 'HEAD']).default('GET'),
userAgent: zod_1.z.string().optional(),
maxHops: zod_1.z.number().min(1).max(20).default(10),
timeout: zod_1.z.number().min(1000).max(30000).default(15000),
enableSSLAnalysis: zod_1.z.boolean().default(true),
enableSEOAnalysis: zod_1.z.boolean().default(true),
enableSecurityAnalysis: zod_1.z.boolean().default(true),
headers: zod_1.z.record(zod_1.z.string()).optional(),
}).default({}),
});
const BulkJobParamsSchema = zod_1.z.object({
jobId: zod_1.z.string().min(1),
});
const GetJobsQuerySchema = zod_1.z.object({
limit: zod_1.z.string().transform(val => parseInt(val) || 20).refine(val => val > 0 && val <= 100),
offset: zod_1.z.string().transform(val => parseInt(val) || 0).refine(val => val >= 0),
}).partial();
router.post('/upload', auth_middleware_1.requireAuth, rate_limit_middleware_1.bulkRateLimit, upload.single('file'), async (req, res) => {
try {
if (!req.file) {
return res.status(400).json({
success: false,
error: 'No file uploaded',
});
}
const userId = req.user.id;
const organizationId = req.user.memberships?.[0]?.organizationId;
let projectId = req.body.projectId;
if (!projectId && organizationId) {
let defaultProject = await prisma_1.prisma.project.findFirst({
where: { orgId: organizationId }
});
if (!defaultProject) {
defaultProject = await prisma_1.prisma.project.create({
data: {
name: 'Default Project',
orgId: organizationId,
settingsJson: {}
}
});
}
projectId = defaultProject.id;
}
if (!projectId) {
logger_1.logger.warn('No valid project ID found for bulk processing', { userId, organizationId });
}
const options = req.body.options ? JSON.parse(req.body.options) : {};
logger_1.logger.info(`Processing CSV upload for user: ${userId}`, {
filename: req.file.originalname,
size: req.file.size,
});
const jobId = `bulk_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`;
const csvContent = await promises_1.default.readFile(req.file.path, 'utf-8');
const lines = csvContent.split('\n').filter(line => line.trim() && !line.startsWith('url'));
const urls = lines.map(line => {
const [url, label] = line.split(',').map(s => s.trim());
return { url, label: label || '' };
}).filter(item => item.url && item.url.startsWith('http'));
logger_1.logger.info(`Bulk upload processing: ${urls.length} URLs for user ${userId}`, {
projectId,
organizationId,
hasProject: !!projectId
});
const results = [];
let processed = 0;
let successful = 0;
let failed = 0;
for (const urlData of urls) {
try {
const { RedirectTrackerService } = await Promise.resolve().then(() => __importStar(require('../services/redirect-tracker.service')));
const tracker = new RedirectTrackerService();
const trackingRequest = {
url: urlData.url,
method: 'GET',
maxHops: 10,
timeout: 15000,
enableSSLAnalysis: true,
enableSEOAnalysis: true,
enableSecurityAnalysis: true,
};
if (projectId) {
trackingRequest.projectId = projectId;
}
const result = await tracker.trackUrl(trackingRequest, userId);
results.push({
url: urlData.url,
label: urlData.label,
status: 'success',
checkId: result.id,
finalUrl: result.finalUrl,
redirectCount: result.redirectCount,
});
successful++;
}
catch (error) {
logger_1.logger.error(`Failed to process URL ${urlData.url}:`, error);
results.push({
url: urlData.url,
label: urlData.label,
status: 'failed',
error: error instanceof Error ? error.message : 'Unknown error',
});
failed++;
}
processed++;
}
await promises_1.default.unlink(req.file.path).catch(() => { });
res.json({
success: true,
data: {
jobId: jobId,
status: 'COMPLETED',
progress: {
total: urls.length,
processed: processed,
successful: successful,
failed: failed,
},
results: results,
message: `Bulk processing completed: ${successful} successful, ${failed} failed out of ${urls.length} URLs.`,
completedAt: new Date(),
},
});
}
catch (error) {
logger_1.logger.error('CSV upload failed:', error);
if (req.file) {
await promises_1.default.unlink(req.file.path).catch(() => { });
}
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : 'Failed to process CSV upload',
});
}
});
router.post('/jobs', auth_middleware_1.requireAuth, rate_limit_middleware_1.bulkRateLimit, async (req, res) => {
try {
const userId = req.user.id;
const organizationId = req.user.memberships?.[0]?.organizationId;
const validatedData = CreateBulkJobSchema.parse(req.body);
logger_1.logger.info(`Creating bulk job for user: ${userId}`, {
urlCount: validatedData.urls.length,
projectId: validatedData.projectId,
});
const job = await bulkProcessor.createBulkJob(userId, organizationId, validatedData);
res.status(201).json({
success: true,
data: {
jobId: job.id,
status: job.status,
progress: job.progress,
estimatedCompletionAt: job.estimatedCompletionAt,
urls: job.urls.length,
},
});
}
catch (error) {
logger_1.logger.error('Bulk job creation failed:', error);
if (error instanceof zod_1.z.ZodError) {
return res.status(400).json({
success: false,
error: 'Validation failed',
details: error.errors,
});
}
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : 'Failed to create bulk job',
});
}
});
router.get('/jobs', auth_middleware_1.requireAuth, (0, rate_limit_middleware_1.addRateLimitStatus)('bulk'), async (req, res) => {
try {
const userId = req.user.id;
const query = GetJobsQuerySchema.parse(req.query);
const jobs = await bulkProcessor.getUserBulkJobs(userId, query.limit || 20, query.offset || 0);
const sanitizedJobs = jobs.map(job => ({
id: job.id,
status: job.status,
progress: job.progress,
createdAt: job.createdAt,
startedAt: job.startedAt,
finishedAt: job.finishedAt,
estimatedCompletionAt: job.estimatedCompletionAt,
projectId: job.projectId,
urlCount: job.urls.length,
options: job.options,
}));
res.json({
success: true,
data: sanitizedJobs,
meta: {
limit: query.limit || 20,
offset: query.offset || 0,
total: sanitizedJobs.length,
},
});
}
catch (error) {
logger_1.logger.error('Failed to get bulk jobs:', error);
if (error instanceof zod_1.z.ZodError) {
return res.status(400).json({
success: false,
error: 'Invalid query parameters',
details: error.errors,
});
}
res.status(500).json({
success: false,
error: 'Failed to retrieve bulk jobs',
});
}
});
router.get('/jobs/:jobId', auth_middleware_1.requireAuth, async (req, res) => {
try {
const userId = req.user.id;
const { jobId } = BulkJobParamsSchema.parse(req.params);
const job = await bulkProcessor.getBulkJob(jobId, userId);
if (!job) {
return res.status(404).json({
success: false,
error: 'Bulk job not found',
});
}
const responseData = {
id: job.id,
status: job.status,
progress: job.progress,
createdAt: job.createdAt,
startedAt: job.startedAt,
finishedAt: job.finishedAt,
estimatedCompletionAt: job.estimatedCompletionAt,
projectId: job.projectId,
urlCount: job.urls.length,
options: job.options,
};
if (job.status === 'COMPLETED' && job.results) {
responseData.results = job.results;
}
res.json({
success: true,
data: responseData,
});
}
catch (error) {
logger_1.logger.error('Failed to get bulk job:', error);
if (error instanceof zod_1.z.ZodError) {
return res.status(400).json({
success: false,
error: 'Invalid job ID',
details: error.errors,
});
}
res.status(500).json({
success: false,
error: 'Failed to retrieve bulk job',
});
}
});
router.delete('/jobs/:jobId', auth_middleware_1.requireAuth, async (req, res) => {
try {
const userId = req.user.id;
const { jobId } = BulkJobParamsSchema.parse(req.params);
const success = await bulkProcessor.cancelBulkJob(jobId, userId);
if (!success) {
return res.status(404).json({
success: false,
error: 'Bulk job not found or cannot be cancelled',
});
}
res.json({
success: true,
message: 'Bulk job cancelled successfully',
});
}
catch (error) {
logger_1.logger.error('Failed to cancel bulk job:', error);
if (error instanceof zod_1.z.ZodError) {
return res.status(400).json({
success: false,
error: 'Invalid job ID',
details: error.errors,
});
}
res.status(500).json({
success: false,
error: 'Failed to cancel bulk job',
});
}
});
router.get('/jobs/:jobId/export/csv', auth_middleware_1.requireAuth, async (req, res) => {
try {
const userId = req.user.id;
const { jobId } = BulkJobParamsSchema.parse(req.params);
const filePath = await bulkProcessor.exportResultsToCsv(jobId, userId);
res.setHeader('Content-Type', 'text/csv');
res.setHeader('Content-Disposition', `attachment; filename="bulk-results-${jobId}.csv"`);
const fileStream = require('fs').createReadStream(filePath);
fileStream.pipe(res);
fileStream.on('end', async () => {
await promises_1.default.unlink(filePath).catch(() => { });
});
fileStream.on('error', (error) => {
logger_1.logger.error('File streaming error:', error);
res.status(500).json({
success: false,
error: 'Failed to stream results file',
});
});
}
catch (error) {
logger_1.logger.error('Failed to export bulk job results:', error);
if (error instanceof zod_1.z.ZodError) {
return res.status(400).json({
success: false,
error: 'Invalid job ID',
details: error.errors,
});
}
res.status(500).json({
success: false,
error: error instanceof Error ? error.message : 'Failed to export results',
});
}
});
router.get('/stats', auth_middleware_1.requireAuth, async (req, res) => {
try {
const stats = await bulkProcessor.getQueueStats();
res.json({
success: true,
data: {
queue: stats,
timestamp: new Date().toISOString(),
},
});
}
catch (error) {
logger_1.logger.error('Failed to get queue stats:', error);
res.status(500).json({
success: false,
error: 'Failed to retrieve queue statistics',
});
}
});
router.delete('/cleanup', auth_middleware_1.requireAuth, async (req, res) => {
try {
const user = req.user;
const isAdmin = user.memberships?.some(m => m.role === 'ADMIN' || m.role === 'OWNER');
if (!isAdmin) {
return res.status(403).json({
success: false,
error: 'Admin privileges required',
});
}
const maxAgeHours = parseInt(req.query.maxAge) || 72;
await bulkProcessor.cleanupOldJobs(maxAgeHours);
res.json({
success: true,
message: `Cleanup completed for jobs older than ${maxAgeHours} hours`,
});
}
catch (error) {
logger_1.logger.error('Failed to cleanup old jobs:', error);
res.status(500).json({
success: false,
error: 'Failed to cleanup old jobs',
});
}
});
exports.default = router;
//# sourceMappingURL=bulk.routes.js.map