from db import db import os import shutil # Import shutil for directory removal from transcode_video import transcode_video from transcode_short_video import transcode_short_video from wasabi_settings import settings from video_dimensions import get_video_dimensions from s3_client import s3_client def process_transcoding_task(public_id: str, video_type: str, location: str): import uuid local_input_path = None output_dir = None try: db.transcodequeue.update_one( {"public_id": public_id}, {"$set": {"status": "processing"}} ) # Generate flat temporary filename ext = location.split(".")[-1] tmp_name = f"{uuid.uuid4()}.{ext}" local_input_path = os.path.join(settings.TEMP_DIR, tmp_name) # Create output directory output_dir = os.path.join(settings.TEMP_DIR, f"transcoded_{public_id}") os.makedirs(output_dir, exist_ok=True) # Upload prefix based on video type upload_base_prefix = "videos" if video_type == "video" else "clips" s3_upload_prefix = f"{upload_base_prefix}/{public_id}/" # Download raw file from Wasabi print(f"Downloading {location} from bucket {settings.WASABI_BUCKET}...") s3_client.download_file( settings.WASABI_BUCKET, location, local_input_path # fixed ) # Get dimensions width, height = get_video_dimensions(local_input_path) # Transcode if(video_type=="video"): transcode_video(local_input_path, output_dir, width, height) elif video_type=="clip": transcode_short_video(local_input_path, output_dir, width, height) # Upload all transcoded files print(f"Uploading transcoded files under {s3_upload_prefix}...") for root, _, files in os.walk(output_dir): for file in files: local_file_path = os.path.join(root, file) relative = os.path.relpath(local_file_path, output_dir) s3_key = f"{s3_upload_prefix}{relative}".replace("\\", "/") s3_client.upload_file(local_file_path, settings.WASABI_BUCKET, s3_key) # Delete raw file delete_base_prefix = "raw_videos" if video_type == "video" else "raw_clips" print(f"Deleting original: {location}") # s3_client.delete_object( # Bucket=settings.WASABI_BUCKET, # Key=f"{delete_base_prefix}/{public_id}" # ) try: # Check if the object exists before trying to delete (optional, but good practice) # This can still hang if connectivity is the issue. # s3_client.head_object(Bucket=settings.WASABI_BUCKET, Key=location) # Use the s3_client to delete the specific object key stored in 'location' response = s3_client.delete_object( Bucket=settings.WASABI_BUCKET, Key=location ) print(f"S3 Delete operation response status: {response.get('ResponseMetadata', {}).get('HTTPStatusCode')}") except s3_client.exceptions.NoSuchKey: # If the object is already gone, treat it as successful and log print(f"Warning: Object to delete was not found: {location}") except Exception as e: # Log any S3-specific errors during deletion but continue if it's not critical print(f"Error during S3 deletion of {location}: {e}") # Optionally, raise the exception or just log and continue # raise # <--- uncomment this if deletion failure is critical db.transcodequeue.update_one( {"public_id": public_id}, {"$set": {"status": "completed", "s3_prefix": s3_upload_prefix}} ) except Exception as e: print(f"Transcoding failed for {public_id}: {e}") db.transcodequeue.update_one( {"public_id": public_id}, {"$set": {"status": "failed", "error": str(e)}} ) finally: # Clean input if local_input_path and os.path.exists(local_input_path): os.remove(local_input_path) # Clean output if output_dir and os.path.exists(output_dir): shutil.rmtree(output_dir)