Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 4 additions & 1 deletion fastapi-starter/backend/app/config/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,10 @@ class Settings(BaseSettings):

# MongoDB settings
MONGODB_URI: str
DATABASE_NAME: str
DATABASE_NAME: str = "sample_mflix"

# ML settings
ENABLE_ML: bool = True

# Logging
LOG_LEVEL: str = "INFO"
Expand Down
11 changes: 7 additions & 4 deletions fastapi-starter/backend/app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,13 @@ async def lifespan(app: FastAPI):
logger.error(f"Failed to connect to MongoDB: {e}")

# Pre-load ML model
try:
ml_service.load_model()
except Exception as e:
logger.error(f"Failed to pre-load ML model: {e}")
if settings.ENABLE_ML:
try:
ml_service.load_model()
except Exception as e:
logger.error(f"Failed to pre-load ML model: {e}")
else:
logger.info("ML model pre-loading skipped (ENABLE_ML=False)")

yield

Expand Down
27 changes: 16 additions & 11 deletions fastapi-starter/backend/app/modules/api/v1/ml/service.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
"""ML Service for sentiment analysis using PyTorch and Transformers."""

import torch
from transformers import pipeline
from typing import List, Dict, Any
import time

from app.utils.logger import get_logger

logger = get_logger(__name__)
Expand All @@ -14,21 +11,29 @@ class MLService:
def __init__(self, model_name: str = "distilbert-base-uncased-finetuned-sst-2-english"):
"""
Initialize the ML service.

HERE IS THE MODEL REFERENCE: https://huggingface.co/docs/transformers/en/model_doc/distilbert?usage=Pipeline

Args:
model_name: HuggingFace model name
"""
self.model_name = model_name
self.device = "cuda" if torch.cuda.is_available() else "cpu"
self.classifier = None
logger.info(f"Initializing MLService with model: {model_name} on device: {self.device}")
self._device_cache = None
logger.info(f"MLService initialized (lazy-loading enabled for {model_name})")

@property
def device(self):
"""Lazy-loaded device info."""
if self._device_cache is None:
try:
import torch
self._device_cache = "cuda" if torch.cuda.is_available() else "cpu"
except ImportError:
self._device_cache = "cpu"
return self._device_cache

def load_model(self):
"""Load the model if not already loaded."""
if self.classifier is None:
try:
from transformers import pipeline
logger.info(f"Loading ML model: {self.model_name} on {self.device}")
self.classifier = pipeline(
"sentiment-analysis",
model=self.model_name,
Expand Down
5 changes: 3 additions & 2 deletions fastapi-starter/backend/render.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -15,5 +15,6 @@ services:
sync: false
description: "MongoDB Connection String"
- key: DATABASE_NAME
sync: false
description: "MongoDB Database Name"
value: sample_mflix
- key: ENABLE_ML
value: false