made add company details api and moved it in the host folder

This commit is contained in:
2025-11-13 15:53:35 +05:30
parent 8e19bb566d
commit 72f9e26ca6
13 changed files with 513 additions and 268 deletions

View File

@@ -29,7 +29,7 @@ declare module 'express-serve-static-core' {
* Core authentication function - verifies JWT and validates Host user
* Can be used by both Express middleware and Lambda handlers
*/
export async function verifyHostToken(token: string): Promise<{ id: number; role?: string }> {
export async function verifyMinglarAdminToken(token: string): Promise<{ id: number; role?: string }> {
if (!token) {
throw new ApiError(httpStatus.UNAUTHORIZED, 'Please authenticate');
}
@@ -86,7 +86,7 @@ const verifyCallback = async (
const token = req.header('x-auth-token') || req.cookies?.accessToken;
try {
const userInfo = await verifyHostToken(token);
const userInfo = await verifyMinglarAdminToken(token);
// Attach user to request
req.user = { id: userInfo.id.toString(), role: userInfo.role };

View File

@@ -29,7 +29,7 @@ declare module 'express-serve-static-core' {
* Core authentication function - verifies JWT and validates Host user
* Can be used by both Express middleware and Lambda handlers
*/
export async function verifyHostToken(token: string): Promise<{ id: number; role?: string }> {
export async function verifyUserToken(token: string): Promise<{ id: number; role?: string }> {
if (!token) {
throw new ApiError(httpStatus.UNAUTHORIZED, 'Please authenticate');
}
@@ -86,7 +86,7 @@ const verifyCallback = async (
const token = req.header('x-auth-token') || req.cookies?.accessToken;
try {
const userInfo = await verifyHostToken(token);
const userInfo = await verifyUserToken(token);
// Attach user to request
req.user = { id: userInfo.id.toString(), role: userInfo.role };

View File

@@ -0,0 +1,126 @@
import ApiError from './ApiError';
interface ParsedFormData {
fields: Record<string, string>;
files: Array<{
fieldName: string;
fileName: string;
contentType: string;
data: Buffer;
}>;
}
/**
* Parse multipart/form-data from Lambda event
* Supports both base64 encoded and binary body
*/
export function parseMultipartFormData(
eventBody: string | null,
contentType: string | undefined,
isBase64Encoded: boolean = false
): ParsedFormData {
if (!eventBody) {
throw new ApiError(400, 'Request body is required');
}
if (!contentType || !contentType.includes('multipart/form-data')) {
throw new ApiError(400, 'Content-Type must be multipart/form-data');
}
// Extract boundary from Content-Type header
const boundaryMatch = contentType.match(/boundary=([^;]+)/);
if (!boundaryMatch) {
throw new ApiError(400, 'Invalid multipart boundary');
}
const boundary = boundaryMatch[1].trim();
// Decode base64 body if needed (API Gateway sends base64 encoded for binary media types)
let bodyBuffer: Buffer;
try {
if (isBase64Encoded) {
bodyBuffer = Buffer.from(eventBody, 'base64');
} else {
// Try to detect if it's base64
if (eventBody.match(/^[A-Za-z0-9+/=]+$/)) {
bodyBuffer = Buffer.from(eventBody, 'base64');
} else {
bodyBuffer = Buffer.from(eventBody, 'binary');
}
}
} catch (error) {
throw new ApiError(400, 'Invalid request body encoding');
}
// Split by boundary
const parts = bodyBuffer.toString('binary').split(`--${boundary}`);
const fields: Record<string, string> = {};
const files: ParsedFormData['files'] = [];
for (const part of parts) {
if (!part || part.trim() === '' || part.trim() === '--') {
continue;
}
// Split headers and body
const [headers, ...bodyParts] = part.split('\r\n\r\n');
if (!headers || bodyParts.length === 0) {
continue;
}
const body = bodyParts.join('\r\n\r\n').trim();
if (!body) {
continue;
}
// Parse Content-Disposition header
const contentDispositionMatch = headers.match(/Content-Disposition:\s*form-data;\s*name="([^"]+)"/);
if (!contentDispositionMatch) {
continue;
}
const fieldName = contentDispositionMatch[1];
// Check if it's a file
const filenameMatch = headers.match(/filename="([^"]+)"/);
const contentTypeMatch = headers.match(/Content-Type:\s*([^\r\n]+)/);
if (filenameMatch) {
// It's a file
const fileName = filenameMatch[1];
const fileContentType = contentTypeMatch ? contentTypeMatch[1].trim() : 'application/octet-stream';
// Convert body to buffer (remove trailing boundary markers)
const fileData = Buffer.from(body.replace(/\r\n--$/, ''), 'binary');
files.push({
fieldName,
fileName,
contentType: fileContentType,
data: fileData,
});
} else {
// It's a regular field
fields[fieldName] = body.replace(/\r\n--$/, '').trim();
}
}
return { fields, files };
}
/**
* Parse JSON field from form data
*/
export function parseJsonField(fields: Record<string, string>, fieldName: string): any {
const value = fields[fieldName];
if (!value) {
return null;
}
try {
return JSON.parse(value);
} catch (error) {
throw new ApiError(400, `Invalid JSON in field: ${fieldName}`);
}
}

View File

@@ -2,8 +2,8 @@ import { z } from "zod";
// Allowed document types (must match your DocumentType master table IDs)
export const REQUIRED_DOC_TYPES = {
PAN: 1,
GST: 2,
GST: 1,
PAN: 2,
REGISTRATION: 3,
AADHAAR: 4,
};

View File

@@ -31,6 +31,8 @@ const envVarsSchema = yup
.number()
.default(10)
.required('minutes after which verify email token expires'),
AWS_REGION: yup.string().required('AWS region is required'),
S3_BUCKET_NAME: yup.string().required('S3 bucket name is required'),
//SMTP and BREVO
// BREVO_SMTP_HOST: yup
// .string()
@@ -129,6 +131,10 @@ function getConfig() {
socketPath: '/var/run/mysqld/mysqld.sock',
},
},
aws: {
region: envVars.AWS_REGION,
bucketName: envVars.S3_BUCKET_NAME,
},
byPassOTP: envVars.BYPASS_OTP,
// BaseURL: envVars.BASEURL,
// FRONTEND_URL: envVars.FRONTEND_URL,

View File

@@ -0,0 +1,177 @@
import { APIGatewayProxyEvent, APIGatewayProxyResult } from 'aws-lambda';
import { safeHandler } from '../../../common/utils/handlers/safeHandler';
import { PrismaService } from '../../../common/database/prisma.service';
import { HostService } from '../../host/services/host.service';
import ApiError from '../../../common/utils/helper/ApiError';
import { verifyHostToken } from '../../../common/middlewares/jwt/authForHost';
import {
hostCompanyDetailsSchema,
REQUIRED_DOC_TYPES,
} from '../../../common/utils/validation/host/hostCompanyDetails.validation';
import AWS from 'aws-sdk';
import Busboy from 'busboy';
import crypto from 'crypto';
import config from '@/config/config';
const prisma = new PrismaService();
const hostService = new HostService(prisma);
const s3 = new AWS.S3({
region: config.aws.region,
});
export const handler = safeHandler(async (event: APIGatewayProxyEvent): Promise<APIGatewayProxyResult> => {
try {
// ✅ 1. Verify Token
// Extract token from headers
const token = event.headers['x-auth-token'] || event.headers['X-Auth-Token']
if (!token) {
throw new ApiError(400, 'This is a protected route. Please provide a valid token.');
}
// Authenticate user using the shared authForHost function
const userInfo = await verifyHostToken(token);
// ✅ 2. Ensure content-type is multipart/form-data
const contentType = event.headers['content-type'] || event.headers['Content-Type'];
if (!contentType?.startsWith('multipart/form-data'))
throw new ApiError(400, 'Content-Type must be multipart/form-data.');
if (!event.isBase64Encoded)
throw new ApiError(400, 'Event body must be base64 encoded for multipart uploads.');
const bodyBuffer = Buffer.from(event.body as string, 'base64');
const fields: Record<string, any> = {};
const files: Array<{ buffer: Buffer; mimeType: string; fileName: string; fieldName: string }> = [];
// ✅ 3. Parse multipart data using Busboy
await new Promise<void>((resolve, reject) => {
const bb = Busboy({ headers: { 'content-type': contentType } });
bb.on('file', (fieldname, file, info) => {
const { filename, mimeType } = info;
const chunks: Buffer[] = [];
let totalSize = 0;
const MAX_SIZE = 5 * 1024 * 1024; // 5 MB
file.on('data', (chunk) => {
totalSize += chunk.length;
if (totalSize > MAX_SIZE) {
file.resume();
return reject(new ApiError(400, `File ${filename} exceeds 5MB limit.`));
}
chunks.push(chunk);
});
file.on('end', () => {
files.push({
buffer: Buffer.concat(chunks),
mimeType,
fileName: filename,
fieldName: fieldname,
});
});
});
bb.on('field', (fieldname, val) => {
try {
fields[fieldname] = JSON.parse(val);
} catch {
fields[fieldname] = val;
}
});
bb.on('close', resolve);
bb.on('error', reject);
bb.end(bodyBuffer);
});
// ✅ 4. Validate fields
if (!fields.companyDetails) throw new ApiError(400, 'Missing companyDetails field.');
if (!fields.documents) throw new ApiError(400, 'Missing documents field.');
if (files.length === 0) throw new ApiError(400, 'At least one document file is required.');
// ✅ Parse & validate JSON inputs
let companyDetails;
try {
companyDetails = typeof fields.companyDetails === 'string' ? JSON.parse(fields.companyDetails) : fields.companyDetails;
} catch {
throw new ApiError(400, 'Invalid JSON in companyDetails.');
}
const companyValidation = hostCompanyDetailsSchema.safeParse(companyDetails);
if (!companyValidation.success) {
const message = companyValidation.error.issues.map((e) => e.message).join(', ');
throw new ApiError(400, `Validation failed: ${message}`);
}
const parsedCompany = companyValidation.data;
let documentsMetadata;
try {
documentsMetadata = typeof fields.documents === 'string' ? JSON.parse(fields.documents) : fields.documents;
} catch {
throw new ApiError(400, 'Invalid JSON in documents.');
}
if (!Array.isArray(documentsMetadata) || documentsMetadata.length === 0)
throw new ApiError(400, 'Documents must be a non-empty array.');
// ✅ 5. Map uploaded files to document metadata
const documentMetadata = documentsMetadata.map((doc: any) => {
const file = files.find((f) => f.fieldName === doc.fieldName);
if (!file) throw new ApiError(400, `File not found for field: ${doc.fieldName}`);
return { ...doc, file };
});
// ✅ 6. Ensure all required document types exist
const uploadedDocTypes = documentMetadata.map((d) => d.documentTypeXid);
const missingDocs = Object.entries(REQUIRED_DOC_TYPES)
.filter(([_, typeId]) => !uploadedDocTypes.includes(typeId))
.map(([name]) => name);
if (missingDocs.length > 0)
throw new ApiError(400, `Missing mandatory documents: ${missingDocs.join(', ')}`);
// ✅ 7. Upload to S3
const uploadedDocs: Array<{ documentTypeXid: number; documentName: string; filePath: string }> = [];
for (const doc of documentMetadata) {
const uniqueKey = `${userInfo.id}_${crypto.randomUUID()}_${doc.file.fileName}`;
const s3Key = `Documents/Host/${uniqueKey}`;
await s3
.upload({
Bucket: config.aws.bucketName,
Key: s3Key,
Body: doc.file.buffer,
ContentType: doc.file.mimeType,
ACL: 'private',
})
.promise();
uploadedDocs.push({
documentTypeXid: doc.documentTypeXid,
documentName: doc.documentName,
filePath: `https://${config.aws.bucketName}.s3.${config.aws.region}.amazonaws.com/${s3Key}`,
});
}
// ✅ 8. Save company details + documents in DB via MinglarService
const createdHost = await hostService.addCompanyDetails(parsedCompany, uploadedDocs);
if (!createdHost) throw new ApiError(400, 'Failed to add company details.');
// ✅ 9. Success response
return {
statusCode: 200,
headers: {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
},
body: JSON.stringify({
success: true,
message: 'Company details and documents uploaded successfully.',
data: createdHost,
}),
};
} catch (error: any) {
console.error('❌ Error in addCompanyDetails:', error);
throw error;
}
});

View File

@@ -5,6 +5,17 @@ import { AddPaymentDetailsDTO, CreateHostDto, UpdateHostDto } from '../dto/host.
import * as bcrypt from 'bcryptjs';
import ApiError from '../../../common/utils/helper/ApiError';
import { User } from '@prisma/client';
import { z } from 'zod';
import { hostCompanyDetailsSchema } from '@/common/utils/validation/host/hostCompanyDetails.validation';
type HostCompanyDetailsInput = z.infer<typeof hostCompanyDetailsSchema>;
// Document input after S3 upload (with S3 URL as filePath)
interface HostDocumentInput {
documentTypeXid: number;
documentName: string;
filePath: string; // S3 URL
}
@Injectable()
export class HostService {
@@ -163,4 +174,61 @@ export class HostService {
return addedPaymentDetails;
}
async addCompanyDetails(
companyData: HostCompanyDetailsInput,
documents: HostDocumentInput[] // Documents with S3 URLs
) {
return await this.prisma.$transaction(async (tx) => {
// ✅ Check for existing company
const existingHost = await tx.hostHeader.findFirst({
where: { registrationNumber: companyData.registrationNumber },
});
if (existingHost) {
throw new ApiError(400, 'Company already exists with this registration number');
}
// ✅ Create company record
const createdHost = await tx.hostHeader.create({
data: {
companyName: companyData.companyName,
hostRefNumber: companyData.hostRefNumber,
address1: companyData.address1,
address2: companyData.address2,
cityXid: companyData.cityXid,
stateXid: companyData.stateXid,
countryXid: companyData.countryXid,
pinCode: companyData.pinCode,
logoPath: companyData.logoPath,
isSubsidairy: companyData.isSubsidairy,
registrationNumber: companyData.registrationNumber,
panNumber: companyData.panNumber,
gstNumber: companyData.gstNumber,
formationDate: new Date(companyData.formationDate),
companyType: companyData.companyType,
websiteUrl: companyData.websiteUrl,
instagramUrl: companyData.instagramUrl,
facebookUrl: companyData.facebookUrl,
linkedinUrl: companyData.linkedinUrl,
twitterUrl: companyData.twitterUrl,
currencyXid: companyData.currencyXid,
},
});
// ✅ Create documents (if provided)
if (documents && documents.length > 0) {
const docsData = documents.map((doc) => ({
hostXid: createdHost.id,
documentTypeXid: doc.documentTypeXid,
documentName: doc.documentName,
filePath: doc.filePath,
}));
await tx.hostDocuments.createMany({ data: docsData });
}
return createdHost;
});
}
}

View File

@@ -1,185 +0,0 @@
import { APIGatewayProxyEvent, APIGatewayProxyResult, Context } from 'aws-lambda';
import { safeHandler } from '../../../common/utils/handlers/safeHandler';
import { PrismaService } from '../../../common/database/prisma.service';
import { MinglarService } from '../services/minglar.service';
import ApiError from '../../../common/utils/helper/ApiError';
import { verifyHostToken } from '../../../common/middlewares/jwt/authForMinglarAdmin';
import {
hostCompanyDetailsSchema,
REQUIRED_DOC_TYPES,
} from '../../../common/utils/validation/host/hostCompanyDetails.validation';
import { uploadFilesToS3 } from '../../../common/utils/helper/s3Upload';
import { parseMultipartFormData } from '../../../common/utils/helper/parseMultipartFormData';
const prismaService = new PrismaService();
const minglarService = new MinglarService(prismaService);
export const handler = safeHandler(async (
event: APIGatewayProxyEvent,
context?: Context
): Promise<APIGatewayProxyResult> => {
// ✅ 1. Extract & verify token
const token = event.headers['x-auth-token'] || event.headers['X-Auth-Token'];
if (!token) {
throw new ApiError(400, 'This is a protected route. Please provide a valid token.');
}
const userInfo = await verifyHostToken(token);
// ✅ 2. Check Content-Type and parse accordingly
const contentType = event.headers['content-type'] || event.headers['Content-Type'] || '';
let parsedCompany: any;
let documentsWithS3Urls: Array<{ documentTypeXid: number; documentName: string; filePath: string }> = [];
if (contentType.includes('multipart/form-data')) {
// ✅ Parse multipart/form-data
// API Gateway sets isBase64Encoded to true for binary media types
const isBase64Encoded = (event as any).isBase64Encoded === true;
const formData = parseMultipartFormData(event.body, contentType, isBase64Encoded);
// ✅ Parse companyDetails from form field (should be JSON string)
const companyDetailsJson = formData.fields['companyDetails'];
if (!companyDetailsJson) {
throw new ApiError(400, 'Company details are required in form data');
}
try {
parsedCompany = JSON.parse(companyDetailsJson);
} catch {
throw new ApiError(400, 'Invalid JSON in companyDetails field');
}
// ✅ Validate company details
const companyValidation = hostCompanyDetailsSchema.safeParse(parsedCompany);
if (!companyValidation.success) {
const errorMessages = companyValidation.error.issues.map(e => e.message).join(', ');
throw new ApiError(400, `Validation failed: ${errorMessages}`);
}
parsedCompany = companyValidation.data;
// ✅ Process uploaded files
if (formData.files.length === 0) {
throw new ApiError(400, 'At least one document file is required');
}
// ✅ Parse documents metadata (JSON array)
const documentsJson = formData.fields['documents'];
if (!documentsJson) {
throw new ApiError(400, 'Documents metadata is required in form data');
}
let documentsMetadata: Array<{ documentTypeXid: number; documentName: string; fieldName: string }>;
try {
documentsMetadata = JSON.parse(documentsJson);
} catch {
throw new ApiError(400, 'Invalid JSON in documents field');
}
if (!Array.isArray(documentsMetadata) || documentsMetadata.length === 0) {
throw new ApiError(400, 'Documents must be a non-empty array');
}
// ✅ Map files to document structure
const documentMetadata: Array<{ documentTypeXid: number; documentName: string; file: typeof formData.files[0] }> = [];
for (const docMeta of documentsMetadata) {
const file = formData.files.find((f) => f.fieldName === docMeta.fieldName);
if (!file) {
throw new ApiError(400, `File not found for field: ${docMeta.fieldName}`);
}
documentMetadata.push({
documentTypeXid: docMeta.documentTypeXid,
documentName: docMeta.documentName,
file,
});
}
// ✅ Ensure all required documents exist
const uploadedDocTypes = documentMetadata.map((doc) => doc.documentTypeXid);
const missingDocs = Object.entries(REQUIRED_DOC_TYPES)
.filter(([_, typeId]) => !uploadedDocTypes.includes(typeId))
.map(([name]) => name);
if (missingDocs.length > 0) {
throw new ApiError(400, `Missing mandatory documents: ${missingDocs.join(', ')}`);
}
// ✅ Upload files to S3
const filesToUpload = documentMetadata.map((doc) => ({
fileData: doc.file.data.toString('base64'),
fileName: doc.file.fileName,
contentType: doc.file.contentType,
}));
const s3Urls = await uploadFilesToS3(filesToUpload, `host-documents/${userInfo.id}`);
// ✅ Map S3 URLs to documents
documentsWithS3Urls = documentMetadata.map((doc, index) => ({
documentTypeXid: doc.documentTypeXid,
documentName: doc.documentName,
filePath: s3Urls[index],
}));
} else {
// ✅ Fallback to JSON parsing (for backward compatibility)
let body: { companyDetails?: unknown; documents?: unknown[] };
try {
body = event.body ? JSON.parse(event.body) : {};
} catch {
throw new ApiError(400, 'Invalid JSON in request body');
}
const { companyDetails, documents } = body;
if (!companyDetails) {
throw new ApiError(400, 'Company details are required');
}
// ✅ Validate company details
const companyValidation = hostCompanyDetailsSchema.safeParse(companyDetails);
if (!companyValidation.success) {
const errorMessages = companyValidation.error.issues.map(e => e.message).join(', ');
throw new ApiError(400, `Validation failed: ${errorMessages}`);
}
parsedCompany = companyValidation.data;
// For JSON, we still expect base64 encoded files in documents array
// This maintains backward compatibility
if (documents && Array.isArray(documents) && documents.length > 0) {
const filesToUpload = documents.map((doc: any) => ({
fileData: doc.fileData,
fileName: doc.documentName,
contentType: doc.contentType || 'application/pdf',
}));
const s3Urls = await uploadFilesToS3(filesToUpload, `host-documents/${userInfo.id}`);
documentsWithS3Urls = documents.map((doc: any, index: number) => ({
documentTypeXid: doc.documentTypeXid,
documentName: doc.documentName,
filePath: s3Urls[index],
}));
}
}
// ✅ 7. Pass validated data to service
const createdHost = await minglarService.addCompanyDetails(parsedCompany, documentsWithS3Urls);
if (!createdHost) {
throw new ApiError(400, 'Failed to add company details');
}
// ✅ 6. Success response
return {
statusCode: 200,
headers: {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
},
body: JSON.stringify({
success: true,
message: 'Company details and documents uploaded successfully',
}),
};
});

View File

@@ -2,17 +2,7 @@ import { Injectable } from '@nestjs/common';
import { PrismaService } from '../../../common/database/prisma.service';
import ApiError from '../../../common/utils/helper/ApiError';
import * as bcrypt from 'bcryptjs';
import { z } from 'zod';
import { hostCompanyDetailsSchema } from '../../../common/utils/validation/host/hostCompanyDetails.validation';
type HostCompanyDetailsInput = z.infer<typeof hostCompanyDetailsSchema>;
// Document input after S3 upload (with S3 URL as filePath)
interface HostDocumentInput {
documentTypeXid: number;
documentName: string;
filePath: string; // S3 URL
}
@Injectable()
export class MinglarService {
@@ -46,61 +36,4 @@ export class MinglarService {
return true;
}
async addCompanyDetails(
companyData: HostCompanyDetailsInput,
documents: HostDocumentInput[] // Documents with S3 URLs
) {
return await this.prisma.$transaction(async (tx) => {
// ✅ Check for existing company
const existingHost = await tx.hostHeader.findFirst({
where: { registrationNumber: companyData.registrationNumber },
});
if (existingHost) {
throw new ApiError(400, 'Company already exists with this registration number');
}
// ✅ Create company record
const createdHost = await tx.hostHeader.create({
data: {
companyName: companyData.companyName,
hostRefNumber: companyData.hostRefNumber,
address1: companyData.address1,
address2: companyData.address2,
cityXid: companyData.cityXid,
stateXid: companyData.stateXid,
countryXid: companyData.countryXid,
pinCode: companyData.pinCode,
logoPath: companyData.logoPath,
isSubsidairy: companyData.isSubsidairy,
registrationNumber: companyData.registrationNumber,
panNumber: companyData.panNumber,
gstNumber: companyData.gstNumber,
formationDate: new Date(companyData.formationDate),
companyType: companyData.companyType,
websiteUrl: companyData.websiteUrl,
instagramUrl: companyData.instagramUrl,
facebookUrl: companyData.facebookUrl,
linkedinUrl: companyData.linkedinUrl,
twitterUrl: companyData.twitterUrl,
currencyXid: companyData.currencyXid,
},
});
// ✅ Create documents (if provided)
if (documents && documents.length > 0) {
const docsData = documents.map((doc) => ({
hostXid: createdHost.id,
documentTypeXid: doc.documentTypeXid,
documentName: doc.documentName,
filePath: doc.filePath,
}));
await tx.hostDocuments.createMany({ data: docsData });
}
return createdHost;
});
}
}