Skip to content

Commit d68ba4e

Browse files
Update langchain_agents.py
1 parent 46d5540 commit d68ba4e

File tree

1 file changed

+26
-11
lines changed

1 file changed

+26
-11
lines changed

app/services/langchain_agents.py

Lines changed: 26 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
from langchain_groq import ChatGroq
99
from .Pneumonia import Pneumonia
1010
from .Skin_cancer import MelanomaPipeline
11+
from .Eye import Eye
1112
from app.config import get_settings
1213

1314
settings = get_settings()
@@ -20,7 +21,9 @@ def __init__(self, img_path: str, task: str):
2021
self.image_path = img_path
2122
self.task = task
2223
self.prompt = PromptTemplate(
23-
template="You are an expert in {task} diagnosis. Given a {task} image, provide a detailed analysis and recommendations.\nPrediction: {prediction}\nConfidence: {confidence}",
24+
template="""As a specialist in {task} diagnosis, analyze the provided {task} image and offer a comprehensive
25+
report including the diagnosis, severity, and treatment recommendations.\nDiagnosis: {prediction}\nConfidence Level:
26+
{confidence}""",
2427
input_variables=["task", "prediction", "confidence"]
2528
)
2629
self.llm = ChatGroq(
@@ -31,12 +34,16 @@ def __init__(self, img_path: str, task: str):
3134
# Initialize models only when needed
3235
self._pneumonia_model: Optional[Pneumonia] = None
3336
self._melanoma_model: Optional[MelanomaPipeline] = None
37+
self._brain_model: Optional[Any] = None
38+
self._heart_model: Optional[Any] = None
39+
self._eye_model: Optional[Any] = None
3440

3541
self.task_to_model = {
36-
"Pneumonia": self.predict_pneumonia,
37-
"Melanoma": self.predict_melanoma,
38-
"Brain": self.predict_brain,
39-
"Heart": self.predict_heart,
42+
"pneumonia": self.predict_pneumonia,
43+
"skin_cancer": self.predict_melanoma,
44+
"brain_tumor": self.predict_brain,
45+
"heart_disease": self.predict_heart,
46+
"eye_disease": self.predict_eye,
4047
}
4148

4249
def process_image(self) -> Optional[str]:
@@ -50,11 +57,9 @@ def process_image(self) -> Optional[str]:
5057
base64_str = base64.b64encode(image_bytes).decode("utf-8")
5158
return base64_str
5259
except (IOError, OSError) as e:
53-
logging.error(f"Error processing image: {e}")
5460
raise ValueError(f"Invalid image file: {e}")
5561
except Exception as e:
56-
logging.error(f"Unexpected error processing image: {e}")
57-
return None
62+
return f"Error Occur while creating"
5863

5964
def agent(self) -> Tuple[str, float]:
6065
"""Select the appropriate model for the task and make predictions."""
@@ -75,20 +80,31 @@ def melanoma_model(self) -> MelanomaPipeline:
7580
self._melanoma_model = MelanomaPipeline()
7681
return self._melanoma_model
7782

83+
@property
84+
def eye_model(self) -> Eye:
85+
if self._eye_model is None:
86+
self._eye_model = Eye()
87+
return self._eye_model
88+
89+
def predict_eye(self) -> Tuple[str, float]:
90+
"""Predict eye condition."""
91+
try:
92+
return self.eye_model.predict_with_torch(self.image_path)
93+
except Exception as e:
94+
raise RuntimeError(f"Error predicting Eye: {str(e)}")
95+
7896
def predict_pneumonia(self) -> Tuple[str, float]:
7997
"""Predict pneumonia using the Pneumonia model."""
8098
try:
8199
return self.pneumonia_model.predict_with_torch(self.image_path)
82100
except Exception as e:
83-
logging.error(f"Error predicting Pneumonia: {e}")
84101
raise RuntimeError(f"Error predicting Pneumonia: {str(e)}")
85102

86103
def predict_melanoma(self) -> Tuple[str, float]:
87104
"""Predict melanoma using the MelanomaPipeline model."""
88105
try:
89106
return self.melanoma_model.predict_with_torch(self.image_path)
90107
except Exception as e:
91-
logging.error(f"Error predicting Melanoma: {e}")
92108
raise RuntimeError(f"Error predicting Melanoma: {str(e)}")
93109

94110
def predict_brain(self) -> Tuple[str, float]:
@@ -120,5 +136,4 @@ def response(self) -> dict:
120136
"analysis": analysis
121137
}
122138
except Exception as e:
123-
logging.error(f"Error generating response: {e}")
124139
return {"error": str(e)}

0 commit comments

Comments
 (0)