diff --git a/ses-lambda/lambda-function.py b/ses-lambda/lambda-function.py index 29e8255..8a1a7ea 100644 --- a/ses-lambda/lambda-function.py +++ b/ses-lambda/lambda-function.py @@ -19,20 +19,22 @@ MAX_EMAIL_SIZE = int(os.environ.get('MAX_EMAIL_SIZE', '10485760')) s3_client = boto3.client('s3') def mark_email_processed(bucket, key, s3_client, processor='lambda'): - """Setzt in S3 das processed-Flag per Metadata.""" - s3_client.copy_object( - Bucket=bucket, - Key=key, - CopySource={'Bucket': bucket, 'Key': key}, - Metadata={ - 'processed': 'true', - 'processed_timestamp': str(int(time.time())), - 'processor': processor - }, - MetadataDirective='REPLACE' - ) - logger.info(f"Marked S3 object {bucket}/{key} as processed") - + try: + s3_client.copy_object( + Bucket=bucket, + Key=key, + CopySource={'Bucket': bucket, 'Key': key}, + Metadata={ + 'processed': 'true', + 'processed_timestamp': str(int(time.time())), + 'processor': processor + }, + MetadataDirective='REPLACE' + ) + logger.info(f"Marked S3 object {bucket}/{key} as processed") + except botocore.exceptions.ClientError as e: + logger.error(f"Fehler beim Markieren {bucket}/{key}: {e}") + def call_api_once(payload, domain, request_id): """Single-shot POST, kein Retry.""" url = f"{API_BASE_URL}/process/{domain}" @@ -64,6 +66,12 @@ def lambda_handler(event, context): # Kopf-Check head = s3_client.head_object(Bucket=bucket, Key=key) + + metadata = head.get('Metadata', {}) + if metadata.get('processed') == 'true': + logger.info(f"[{req_id}] Skipping already processed object") + return {'statusCode': 200, 'body': 'Already processed'} + size = head['ContentLength'] if size > MAX_EMAIL_SIZE: logger.warning(f"[{req_id}] Email too large: {size} bytes")