Files
MinglarBackendNestJS/src/modules/host/handlers/addCompanyDetails.ts

393 lines
14 KiB
TypeScript

// modules/host/handlers/addCompanyDetails.ts
import config from '@/config/config';
import { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';
import AWS from 'aws-sdk';
import Busboy from 'busboy';
import { PrismaService } from '../../../common/database/prisma.service';
import { verifyHostToken } from '../../../common/middlewares/jwt/authForHost';
import { safeHandler } from '../../../common/utils/handlers/safeHandler';
import ApiError from '../../../common/utils/helper/ApiError';
import {
hostCompanyDetailsSchema,
hostDocumentsSchema,
parentCompanySchema,
REQUIRED_DOC_TYPES
} from '../../../common/utils/validation/host/hostCompanyDetails.validation';
import { HostService } from '../../host/services/host.service';
import { sendEmailToAM, sendEmailToMinglarAdmin } from '../services/sendHostResubmitEmailToAM.service';
const prisma = new PrismaService();
const hostService = new HostService(prisma);
const s3 = new AWS.S3({
region: config.aws.region,
});
function normalizeJsonField(fields: any, key: string) {
if (!fields[key]) return undefined;
const val = fields[key];
if (typeof val === "object") return val;
if (typeof val === "string") {
try {
return JSON.parse(val);
} catch (err) {
throw new ApiError(400, `Invalid JSON in field: ${key}`);
}
}
throw new ApiError(400, `Invalid input: ${key} must be object or JSON string.`);
}
export const handler = safeHandler(async (event: APIGatewayProxyEvent): Promise<APIGatewayProxyResult> => {
try {
// 1) Auth
const token = event.headers['x-auth-token'] || event.headers['X-Auth-Token'];
if (!token) throw new ApiError(400, 'This is a protected route. Please provide a valid token.');
const userInfo = await verifyHostToken(token);
// 2) multipart check
const contentType = event.headers['content-type'] || event.headers['Content-Type'];
if (!contentType?.includes('multipart/form-data')) {
throw new ApiError(400, 'Content-Type must be multipart/form-data.');
}
// Handle both base64 and non-base64 encoded bodies
let bodyBuffer: Buffer;
if (event.isBase64Encoded) {
bodyBuffer = Buffer.from(event.body as string, 'base64');
} else {
bodyBuffer = Buffer.from(event.body as string, 'binary');
}
const fields: Record<string, any> = {};
const files: Array<{ buffer: Buffer; mimeType: string; fileName: string; fieldName: string }> = [];
// 3) parse with Busboy
await new Promise<void>((resolve, reject) => {
const bb = Busboy({
headers: {
'content-type': contentType
}
});
bb.on('file', (fieldname, file, info) => {
const { filename, mimeType } = info;
const chunks: Buffer[] = [];
let totalSize = 0;
const MAX_SIZE = 5 * 1024 * 1024; // 5 MB
file.on('data', (chunk) => {
totalSize += chunk.length;
if (totalSize > MAX_SIZE) {
file.destroy(new Error(`File ${filename} exceeds 5MB limit.`));
return;
}
chunks.push(chunk);
});
file.on('end', () => {
if (chunks.length > 0) {
files.push({
buffer: Buffer.concat(chunks),
mimeType: mimeType || 'application/octet-stream',
fileName: filename || 'unknown',
fieldName: fieldname,
});
}
});
file.on('error', (error) => {
reject(new ApiError(400, `File upload error: ${error.message}`));
});
});
bb.on('field', (fieldname, val) => {
fields[fieldname] = val;
});
bb.on('close', () => {
resolve();
});
bb.on('error', (error) => {
reject(new ApiError(400, `Multipart parsing error: ${error.message}`));
});
bb.write(bodyBuffer);
bb.end();
});
// Extract isDraft flag from fields (default to false if not provided)
const isDraft = fields.isDraft === 'true' || fields.isDraft === true;
if (fields.userProfile) {
const userProfileRaw = normalizeJsonField(fields, "userProfile");
if (userProfileRaw) {
const { firstName, lastName, mobileNumber } = userProfileRaw;
// Update user profile if provided
if (firstName || lastName || mobileNumber) {
await prisma.user.update({
where: { id: userInfo.id },
data: {
...(firstName && { firstName }),
...(lastName && { lastName }),
...(mobileNumber && { mobileNumber }),
},
});
}
}
}
// 4) Validate required root fields
if (!fields.companyDetails) throw new ApiError(400, 'Missing companyDetails field.');
if (!fields.documents) throw new ApiError(400, 'Missing documents field.');
// 5) Parse companyDetails
const companyDetailsRaw = normalizeJsonField(fields, "companyDetails");
if (!companyDetailsRaw) throw new ApiError(400, "companyDetails is required.");
// Get existing host to determine host ID for folder structure
const existingHost = await prisma.hostHeader.findFirst({
where: { userXid: userInfo.id },
});
let hostId: number;
if (existingHost) {
hostId = existingHost.id;
} else {
hostId = userInfo.id;
}
// Define uploadToS3 function (same as before)
async function uploadToS3(buffer: Buffer, mimeType: string, originalName: string, folderType: 'logo' | 'documents' | 'parent_company', documentTypeXid?: number, fieldName?: string) {
let s3Key: string;
const sanitizeFileName = (name: string) => {
return name
.toLowerCase()
.replace(/[^a-z0-9.]/g, '_')
.replace(/_+/g, '_')
.replace(/^_+|_+$/g, '');
};
const fileExtension = originalName.split('.').pop() || 'pdf';
if (folderType === 'logo') {
const sanitizedFileName = sanitizeFileName(originalName);
s3Key = `Documents/Host/${hostId}/logo/${sanitizedFileName}`;
} else if (folderType === 'documents' && documentTypeXid && fieldName) {
const fileName = `${documentTypeXid}_${fieldName}.${fileExtension}`;
const sanitizedFileName = sanitizeFileName(fileName);
s3Key = `Documents/Host/${hostId}/documents/${sanitizedFileName}`;
} else if (folderType === 'parent_company' && documentTypeXid && fieldName) {
const fileName = `${documentTypeXid}_${fieldName}.${fileExtension}`;
const sanitizedFileName = sanitizeFileName(fileName);
s3Key = `Documents/Host/${hostId}/parent_company/${sanitizedFileName}`;
} else {
throw new ApiError(400, 'Invalid folder type or missing documentTypeXid/fieldName');
}
await s3
.upload({
Bucket: config.aws.bucketName,
Key: s3Key,
Body: buffer,
ContentType: mimeType,
ACL: 'private',
})
.promise();
console.log(`File uploaded successfully: ${s3Key}`);
return `https://${config.aws.bucketName}.s3.${config.aws.region}.amazonaws.com/${s3Key}`;
}
// 5.5) Handle company logo upload
const logoFile = files.find((f) => f.fieldName === 'companyLogo');
if (logoFile) {
const logoPath = await uploadToS3(
logoFile.buffer,
logoFile.mimeType,
logoFile.fileName,
'logo'
);
companyDetailsRaw.logoPath = logoPath;
console.log('Company logo uploaded:', logoPath);
}
// 6) Zod validation for companyDetails
const companyValidation = hostCompanyDetailsSchema.safeParse(companyDetailsRaw);
if (!companyValidation.success) {
const message = companyValidation.error.issues.map((i) => i.message).join(', ');
throw new ApiError(400, `Validation failed: ${message}`);
}
const parsedCompany = companyValidation.data;
// 7) Parse documents metadata
const documentsMetadataRaw = normalizeJsonField(fields, "documents");
if (!Array.isArray(documentsMetadataRaw)) throw new ApiError(400, "documents must be an array.");
if (!documentsMetadataRaw.length) throw new ApiError(400, 'Documents must be a non-empty array.');
// Validate documents metadata shape
const docsParse = hostDocumentsSchema.safeParse(documentsMetadataRaw);
if (!docsParse.success) {
const message = docsParse.error.issues.map((i) => i.message).join(', ');
throw new ApiError(400, `Documents validation failed: ${message}`);
}
const documentsMetadata = documentsMetadataRaw.map((d: any) => ({
...d,
owner: d.owner || 'host',
}));
// 8) Map uploaded files to metadata
const documentMetadata = documentsMetadata.map((doc: any) => {
const file = files.find((f) => f.fieldName === doc.fieldName);
if (!file) throw new ApiError(400, `File not found for field: ${doc.fieldName}`);
return { ...doc, file };
});
// 9) Split host vs parent docs
const hostDocs = documentMetadata.filter((d) => d.owner === 'host');
const parentDocs = documentMetadata.filter((d) => d.owner === 'parent');
// 10) If NOT draft, validate required documents
if (!isDraft) {
const hostUploadedTypes = hostDocs.map((d) => d.documentTypeXid);
const requiredHostTypes = Object.values(REQUIRED_DOC_TYPES);
const missingHostDocs = requiredHostTypes.filter((typeId) => !hostUploadedTypes.includes(typeId));
if (missingHostDocs.length > 0) {
throw new ApiError(400, `Missing mandatory documents for host: ${missingHostDocs.join(', ')}`);
}
}
// 11) If isSubsidairy === true and parentCompany provided -> validate parent company & docs
let parsedParentCompany: any = null;
if (parsedCompany.isSubsidairy) {
if (!parsedCompany.parentCompany) {
throw new ApiError(400, 'isSubsidairy is true but parentCompany object is missing inside companyDetails.');
}
const parentValidation = parentCompanySchema.safeParse(parsedCompany.parentCompany);
if (!parentValidation.success) {
const message = parentValidation.error.issues.map((i) => i.message).join(', ');
throw new ApiError(400, `Parent company validation failed: ${message}`);
}
parsedParentCompany = parsedCompany.parentCompany;
// If NOT draft, validate required parent documents
if (!isDraft) {
const parentUploadedTypes = parentDocs.map((d) => d.documentTypeXid);
const requiredParentTypes = Object.values(REQUIRED_DOC_TYPES);
const missingParentDocs = requiredParentTypes.filter((typeId) => !parentUploadedTypes.includes(typeId));
if (missingParentDocs.length > 0) {
throw new ApiError(400, `Missing mandatory documents for parent company: ${missingParentDocs.join(', ')}`);
}
}
}
// 12) Upload files to S3 (same for both draft and final submission)
const uploadedHostDocs: Array<{ documentTypeXid: number; documentName: string; filePath: string }> = [];
const uploadedParentDocs: Array<{ documentTypeXid: number; documentName: string; filePath: string }> = [];
// Upload host documents
for (const doc of hostDocs) {
const filePath = await uploadToS3(
doc.file.buffer,
doc.file.mimeType,
doc.file.fileName,
'documents',
doc.documentTypeXid,
doc.fieldName
);
uploadedHostDocs.push({
documentTypeXid: doc.documentTypeXid,
documentName: doc.fieldName,
filePath,
});
}
// Upload parent company documents
if (parentDocs.length > 0) {
for (const doc of parentDocs) {
const filePath = await uploadToS3(
doc.file.buffer,
doc.file.mimeType,
doc.file.fileName,
'parent_company',
doc.documentTypeXid,
doc.fieldName
);
uploadedParentDocs.push({
documentTypeXid: doc.documentTypeXid,
documentName: doc.documentName,
filePath,
});
}
}
// 13) Persist using hostService - PASS isDraft flag
const createdOrUpdated = await hostService.addOrUpdateCompanyDetails(
userInfo.id,
parsedCompany,
uploadedHostDocs,
parsedParentCompany,
uploadedParentDocs,
isDraft // Pass the isDraft flag
);
if (!createdOrUpdated) throw new ApiError(400, 'Failed to add/update company details.');
// Update hostId if it was a new creation
if (!existingHost) {
hostId = createdOrUpdated.id;
console.log(`Host created with ID: ${hostId}`);
}
// 14) Send emails only for FINAL submission (not draft)
if (!isDraft) {
const getSuggestionDetails = await hostService.getSuggestionDetails(userInfo.id)
if (getSuggestionDetails.hostDetails.accountManagerXid !== null) {
await sendEmailToAM(
getSuggestionDetails.hostDetails.accountManager.emailAddress,
getSuggestionDetails.hostDetails.accountManager.firstName,
getSuggestionDetails.hostDetails.companyName,
getSuggestionDetails.hostDetails.hostRefNumber
);
} else {
await sendEmailToMinglarAdmin(
config.MinglarAdminEmail,
config.MinglarAdminName,
getSuggestionDetails.hostDetails.companyName,
getSuggestionDetails.hostDetails.hostRefNumber
)
}
}
// 15) Success response
return {
statusCode: 200,
headers: {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
},
body: JSON.stringify({
success: true,
message: isDraft
? 'Company details saved as draft successfully.'
: 'Company (and parent if provided) details and documents uploaded successfully.',
data: {
id: createdOrUpdated.id,
hostRefNumber: (createdOrUpdated as any).hostRefNumber,
isDraft
}
}),
};
} catch (error: any) {
console.error('❌ Error in addCompanyDetails:', error);
throw error;
}
});