mail-s3-admin/app/lib/utils.ts

90 lines
2.6 KiB
TypeScript

import { S3Client } from '@aws-sdk/client-s3';
import { NodeHttpHandler } from '@smithy/node-http-handler';
import { NextRequest } from 'next/server';
import { Readable } from 'stream';
import https from 'https';
export function getS3Client() {
return new S3Client({
region: process.env.AWS_REGION,
credentials: {
accessKeyId: process.env.AWS_ACCESS_KEY_ID!,
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY!
},
maxAttempts: 3,
requestHandler: new NodeHttpHandler({
connectionTimeout: 10000,
socketTimeout: 60000,
httpsAgent: new https.Agent({
keepAlive: true,
maxSockets: 50
})
})
});
}
export function authenticate(req: NextRequest) {
const auth = req.headers.get('Authorization');
console.log('Received Auth Header:', auth);
console.log('Expected Password:', process.env.APP_PASSWORD);
if (!auth || !auth.startsWith('Basic ')) return false;
const [user, pass] = Buffer.from(auth.slice(6), 'base64').toString().split(':');
return user === 'admin' && pass === process.env.APP_PASSWORD;
}
export async function getBody(stream: Readable, timeoutMs = 30000): Promise<Buffer> {
console.log('Getting body from stream...');
return new Promise((resolve, reject) => {
const chunks: Buffer[] = [];
let totalSize = 0;
let timeoutHandle: NodeJS.Timeout;
// Timeout-Handler
const cleanup = () => {
if (timeoutHandle) clearTimeout(timeoutHandle);
stream.removeAllListeners();
};
timeoutHandle = setTimeout(() => {
cleanup();
reject(new Error(`Stream timeout after ${timeoutMs}ms`));
}, timeoutMs);
stream.on('data', (chunk: Buffer) => {
chunks.push(chunk);
totalSize += chunk.length;
// Optional: Limit für maximale Größe (z.B. 50MB)
if (totalSize > 50 * 1024 * 1024) {
cleanup();
reject(new Error('Stream size exceeded maximum limit of 50MB'));
}
});
stream.on('error', (err) => {
cleanup();
console.error('Stream error:', err);
reject(err);
});
stream.on('end', () => {
cleanup();
const buffer = Buffer.concat(chunks);
console.log('Body fetched, size:', buffer.length);
resolve(buffer);
});
// Handle stream destroy/close
stream.on('close', () => {
cleanup();
if (chunks.length > 0) {
const buffer = Buffer.concat(chunks);
console.log('Stream closed, partial data fetched, size:', buffer.length);
resolve(buffer);
} else {
reject(new Error('Stream closed without data'));
}
});
});
}