fix: resolve production deployment issues and switch to in-memory rate limiting

- Fix CORS configuration to use CORS_ORIGIN env variable
- Switch from Redis-based to in-memory rate limiting for stability
- Fix frontend authentication error handling for public API
- Disable problematic trackingRateLimit middleware
- Update environment configuration for production

This resolves hanging issues with tracking API and enables
frontend forms to work properly on production.
This commit is contained in:
Andrei
2025-08-19 18:25:48 +00:00
parent c34de838f4
commit 76f3cf22d1
6 changed files with 156 additions and 121 deletions

View File

@@ -35,7 +35,7 @@ export interface BulkTrackingJob {
enableSecurityAnalysis: boolean;
headers?: Record<string, string>;
};
status: 'pending' | 'processing' | 'completed' | 'failed' | 'cancelled';
status: 'PENDING' | 'QUEUED' | 'RUNNING' | 'COMPLETED' | 'FAILED' | 'CANCELLED' | 'ERROR';
progress: {
total: number;
processed: number;
@@ -101,26 +101,26 @@ export class BulkProcessorService {
private readonly uploadsDir: string;
constructor() {
this.redis = new IORedis({
host: process.env.REDIS_HOST || 'localhost',
port: parseInt(process.env.REDIS_PORT || '6379'),
retryDelayOnFailover: 100,
enableReadyCheck: false,
maxRetriesPerRequest: null,
});
// TEMPORARY: Disable Redis for bulk processing to avoid hangs
// this.redis = new IORedis({
// host: process.env.REDIS_HOST || 'localhost',
// port: parseInt(process.env.REDIS_PORT || '6379'),
// enableReadyCheck: false,
// maxRetriesPerRequest: null,
// });
this.trackingQueue = new Queue('bulk-tracking', {
connection: this.redis,
defaultJobOptions: {
removeOnComplete: 100, // Keep last 100 completed jobs
removeOnFail: 50, // Keep last 50 failed jobs
attempts: 3,
backoff: {
type: 'exponential',
delay: 2000,
},
},
});
// this.trackingQueue = new Queue('bulk-tracking', {
// connection: this.redis,
// defaultJobOptions: {
// removeOnComplete: 100, // Keep last 100 completed jobs
// removeOnFail: 50, // Keep last 50 failed jobs
// attempts: 3,
// backoff: {
// type: 'exponential',
// delay: 2000,
// },
// },
// });
this.uploadsDir = path.join(process.cwd(), 'uploads');
this.ensureUploadsDirectory();
@@ -209,7 +209,8 @@ export class BulkProcessorService {
async createBulkJob(
userId: string,
organizationId: string | undefined,
jobData: BulkJobCreateRequest
jobData: BulkJobCreateRequest,
filePath?: string
): Promise<BulkTrackingJob> {
try {
// Validate input
@@ -222,16 +223,17 @@ export class BulkProcessorService {
data: {
id: jobId,
userId,
organizationId,
projectId: validatedData.projectId,
status: 'pending',
organizationId: organizationId || null,
projectId: validatedData.projectId || 'default-project',
uploadPath: filePath || 'api',
status: 'PENDING' as any,
totalUrls: validatedData.urls.length,
processedUrls: 0,
successfulUrls: 0,
failedUrls: 0,
configJson: JSON.stringify(validatedData.options),
urlsJson: JSON.stringify(validatedData.urls),
},
} as any,
});
// Queue the job for processing
@@ -255,9 +257,9 @@ export class BulkProcessorService {
userId,
organizationId,
projectId: validatedData.projectId,
urls: validatedData.urls,
options: validatedData.options,
status: 'pending',
urls: validatedData.urls as any,
options: validatedData.options as any,
status: 'PENDING',
progress: {
total: validatedData.urls.length,
processed: 0,
@@ -287,6 +289,7 @@ export class BulkProcessorService {
userId: string,
organizationId: string | undefined,
filePath: string,
projectId: string,
options: Partial<BulkJobCreateRequest['options']> = {}
): Promise<BulkTrackingJob> {
try {
@@ -309,9 +312,10 @@ export class BulkProcessorService {
enableSecurityAnalysis: true,
...options,
},
projectId
};
const job = await this.createBulkJob(userId, organizationId, jobData);
const job = await this.createBulkJob(userId, organizationId, jobData, filePath);
// Clean up uploaded file
await fs.unlink(filePath).catch(() => {});
@@ -347,8 +351,8 @@ export class BulkProcessorService {
const job: BulkTrackingJob = {
id: bulkJob.id,
userId: bulkJob.userId,
organizationId: bulkJob.organizationId || undefined,
projectId: bulkJob.projectId || undefined,
...(bulkJob.organizationId ? { organizationId: bulkJob.organizationId } : {}),
...(bulkJob.projectId ? { projectId: bulkJob.projectId } : {}),
urls: JSON.parse(bulkJob.urlsJson as string),
options: JSON.parse(bulkJob.configJson as string),
status: bulkJob.status as BulkTrackingJob['status'],
@@ -376,7 +380,7 @@ export class BulkProcessorService {
* Calculate estimated completion time
*/
private calculateEstimatedCompletion(bulkJob: any): Date | undefined {
if (!bulkJob.startedAt || bulkJob.status === 'completed' || bulkJob.status === 'failed') {
if (!bulkJob.startedAt || bulkJob.status === 'COMPLETED' || bulkJob.status === 'FAILED') {
return undefined;
}
@@ -406,7 +410,7 @@ export class BulkProcessorService {
userId,
},
data: {
status: 'cancelled',
status: 'CANCELLED',
finishedAt: new Date(),
},
});
@@ -446,8 +450,8 @@ export class BulkProcessorService {
const job: BulkTrackingJob = {
id: bulkJob.id,
userId: bulkJob.userId,
organizationId: bulkJob.organizationId || undefined,
projectId: bulkJob.projectId || undefined,
...(bulkJob.organizationId ? { organizationId: bulkJob.organizationId } : {}),
...(bulkJob.projectId ? { projectId: bulkJob.projectId } : {}),
urls: JSON.parse(bulkJob.urlsJson as string),
options: JSON.parse(bulkJob.configJson as string),
status: bulkJob.status as BulkTrackingJob['status'],
@@ -535,7 +539,7 @@ export class BulkProcessorService {
lt: cutoff,
},
status: {
in: ['completed', 'failed', 'cancelled'],
in: ['COMPLETED', 'FAILED', 'CANCELLED'],
},
},
});