Skip to content

Commit e417ba4

Browse files
committed
666 user story fix digital avatar coverity issues (#717)
1 parent 36883e5 commit e417ba4

File tree

16 files changed

+351
-292
lines changed

16 files changed

+351
-292
lines changed

usecases/ai/digital-avatar/backend/rag-backend/main.py

Lines changed: 23 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,8 @@
2727
from utils.chroma_client import ChromaClient
2828
import openvino as ov
2929

30+
from urllib.parse import urlparse
31+
3032
logger = logging.getLogger('uvicorn.error')
3133

3234
OPENAI_BASE_URL = os.environ.get("OPENAI_BASE_URL", "http://localhost:8012/v1")
@@ -83,7 +85,7 @@ class Configurations(BaseModel):
8385
use_rag: Optional[bool] = False
8486
embedding_device: Optional[str] = EMBEDDING_DEVICE
8587
reranker_device: Optional[str] = RERANKER_DEVICE
86-
88+
8789
def get_available_devices():
8890
devices = []
8991
core = ov.Core()
@@ -102,7 +104,8 @@ def get_models():
102104
if "/v1" in base_url:
103105
base_url = base_url.replace("/v1", "")
104106
try:
105-
response = requests.get(f"{base_url}/api/tags")
107+
url = urlparse(f"{base_url}/api/tags")
108+
response = requests.get(url)
106109
response.raise_for_status()
107110
return response.json()
108111
except Exception as e:
@@ -115,6 +118,14 @@ async def lifespan(app: FastAPI):
115118
logger.info("Initializing server services ...")
116119
DEVICES = get_available_devices()
117120
if (CONFIG['use_rag']):
121+
# Validate embedding_device and reranker_device
122+
device_values = [d['value'] for d in DEVICES]
123+
if CONFIG["embedding_device"] not in device_values:
124+
logger.error(f"Embedding device {CONFIG['embedding_device']} not found in available devices: {device_values}")
125+
raise HTTPException(status_code=400, detail=f"Embedding device {CONFIG['embedding_device']} not available.")
126+
if CONFIG["reranker_device"] not in device_values:
127+
logger.error(f"Reranker device {CONFIG['reranker_device']} not found in available devices: {device_values}")
128+
raise HTTPException(status_code=400, detail=f"Reranker device {CONFIG['reranker_device']} not available.")
118129
CHROMA_CLIENT = ChromaClient(VECTORDB_DIR, CONFIG["embedding_device"], CONFIG["reranker_device"])
119130

120131
# Check if LLM model exist in list of models
@@ -202,6 +213,14 @@ async def update_config(data: Configurations):
202213
}
203214

204215
if (CONFIG['use_rag']):
216+
# Validate embedding_device and reranker_device
217+
device_values = [d['value'] for d in DEVICES]
218+
if data.embedding_device not in device_values:
219+
logger.error(f"Embedding device {data.embedding_device} not found in available devices: {device_values}")
220+
raise HTTPException(status_code=400, detail=f"Embedding device {data.embedding_device} not available.")
221+
if data.reranker_device not in device_values:
222+
logger.error(f"Reranker device {data.reranker_device} not found in available devices: {device_values}")
223+
raise HTTPException(status_code=400, detail=f"Reranker device {data.reranker_device} not available.")
205224
CHROMA_CLIENT = ChromaClient(VECTORDB_DIR, data.embedding_device, data.reranker_device)
206225

207226
result = {"status": True, "data": None}
@@ -222,8 +241,9 @@ async def pull_model(data: IModel):
222241
# yield chunk
223242

224243
# return StreamingResponse(stream_response(), media_type="application/json")
244+
url = urlparse(f"{base_url}/api/pull")
225245

226-
response = requests.post(f"{base_url}/api/pull", json={"model": model, "stream": False})
246+
response = requests.post(url, json={"model": model, "stream": False})
227247
return JSONResponse(content=jsonable_encoder({"status": True, "data": response.json()}))
228248

229249
@app.delete("/v1/rag/text_embeddings/{uuid}", status_code=200)

usecases/ai/digital-avatar/backend/rag-backend/scripts/convert_ollama.py

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,20 +7,22 @@
77

88
from transformers import AutoTokenizer
99
from template import ollama_template
10+
from pathlib import Path
1011

1112
MODELFILE_TEMPLATE = '''FROM ../llm
1213
1314
TEMPLATE """{chat_template}"""
1415
'''
1516

1617
def create_modelfile(model_path, save_path):
17-
with open(f"{model_path}/config.json", 'r') as f:
18+
config_path = Path(model_path) / "config.json"
19+
with open(config_path, 'r') as f:
1820
_data = f.read()
1921
model_data = json.loads(_data)
2022

2123
chat_template = ollama_template[model_data['model_type']]
2224
data = MODELFILE_TEMPLATE.format(chat_template=chat_template)
23-
with open(save_path, "w") as f:
25+
with open(Path(save_path), "w") as f:
2426
f.write(data)
2527

2628
if __name__ == "__main__":

usecases/ai/digital-avatar/backend/rag-backend/utils/chroma_client.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,7 @@
2121
from langchain_community.document_compressors.openvino_rerank import OpenVINOReranker
2222
from langchain.retrievers import ContextualCompressionRetriever
2323
from langchain_chroma import Chroma
24-
24+
from pathlib import Path
2525

2626
class ChromaClient:
2727
def __init__(self, db_dir, embedding_device="CPU", reranker_device="CPU") -> None:
@@ -30,7 +30,7 @@ def __init__(self, db_dir, embedding_device="CPU", reranker_device="CPU") -> Non
3030
if not os.path.isdir(db_dir):
3131
self.logger.warning(
3232
f"No chromaDB is found in {db_dir}. Creating a directory to store all the embeddings.")
33-
os.makedirs(db_dir, exist_ok=True)
33+
os.makedirs(Path(db_dir), exist_ok=True)
3434

3535
self.db_dir = db_dir
3636

usecases/ai/digital-avatar/frontend/app/(app)/(dashboard)/documents/page.tsx

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ export default function DocumentList() {
6868
</div>
6969
{source?.data && taskStatus ? (
7070
(source.data && source.data.length > 0) || (taskStatus === "IN_PROGRESS") ? (
71-
<DocumentSourceTable data={status ? source?.data ?? [] : []} taskData={taskData?.data} refetch={refetchTextEmbeddingSources} />
71+
<DocumentSourceTable data={status ? source.data ?? [] : []} taskData={taskData?.data} refetch={refetchTextEmbeddingSources} />
7272
) : (
7373
<div className="p-4 md:p-6">No RAG documents found.</div>
7474
)

usecases/ai/digital-avatar/frontend/app/(app)/(dashboard)/metric/performance-results/page.tsx

Lines changed: 26 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -23,13 +23,13 @@ import { ConfigSection } from "@/components/config/ConfigSection";
2323
import { SelectedPipelineConfig } from "@/types/config";
2424
import { PerformanceResultsMetadata } from "@/types/performanceResults";
2525

26-
const chartModuleConfig: Record<string, string> = {
27-
denoise: "Denoise",
28-
stt: "STT",
29-
llm: "LLM",
30-
tts: "TTS",
31-
lipsync: "Lipsync",
32-
}
26+
const chartModuleConfig: Record<string, string>[] = [
27+
{ type: "denoise", name: "Denoise" },
28+
{ type: "stt", name: "STT" },
29+
{ type: "llm", name: "LLM" },
30+
{ type: "tts", name: "TTS" },
31+
{ type: "lipsync", name: "Lipsync" },
32+
];
3333

3434
const defaultChartConfig: Record<string, { label: string; color: string }> = {
3535
config1: {
@@ -47,9 +47,15 @@ const defaultChartConfig: Record<string, { label: string; color: string }> = {
4747
} satisfies ChartConfig
4848

4949
const generateRandomColor = () => {
50-
const hue = Math.floor(Math.random() * 360); // Random hue value between 0 and 360
51-
const saturation = Math.floor(Math.random() * 50) + 50; // Saturation between 50% and 100%
52-
const lightness = Math.floor(Math.random() * 30) + 40; // Lightness between 40% and 70%
50+
// Use cryptographically secure random number generator
51+
const getRandom = (range: number) => {
52+
const randomArray = new Uint32Array(1);
53+
window.crypto.getRandomValues(randomArray);
54+
return Math.floor((randomArray[0] / (2 ** 32)) * range);
55+
};
56+
const hue = getRandom(360); // Random hue value between 0 and 360
57+
const saturation = getRandom(50) + 50; // Saturation between 50% and 100%
58+
const lightness = getRandom(30) + 40; // Lightness between 40% and 70%
5359
return `hsl(${hue}, ${saturation}%, ${lightness}%)`;
5460
};
5561

@@ -64,20 +70,19 @@ export default function LatencyDashboard() {
6470
return config satisfies ChartConfig;
6571
}, {});
6672

67-
const overviewChartData = Object.keys(chartModuleConfig).map((moduleKey) => {
68-
const moduleData: { module: string;[key: string]: number | string } = { module: chartModuleConfig[moduleKey] };
69-
73+
const overviewChartData = chartModuleConfig.map((module) => {
74+
const moduleData: { module: string; [key: string]: number | string } = { module: module.name };
7075
data?.docs.forEach((result) => {
7176
let value = 0;
7277

73-
if (moduleKey === "denoise") {
74-
value = result[moduleKey]?.inferenceLatency || 0;
75-
} else if (moduleKey === "stt") {
76-
value = (result[moduleKey]?.inferenceLatency || 0) + (result[moduleKey]?.httpLatency || 0);
77-
} else if (moduleKey === "llm") {
78-
value = result[moduleKey]?.totalLatency + result[moduleKey]?.ttft || 0;
79-
} else if (moduleKey === "tts" || moduleKey === "lipsync") {
80-
const results = result[moduleKey] || [];
78+
if (module.type === "denoise") {
79+
value = result[module.type]?.inferenceLatency || 0;
80+
} else if (module.type === "stt") {
81+
value = (result[module.type]?.inferenceLatency || 0) + (result[module.type]?.httpLatency || 0);
82+
} else if (module.type === "llm") {
83+
value = result[module.type]?.totalLatency + result[module.type]?.ttft || 0;
84+
} else if (module.type === "tts" || module.type === "lipsync") {
85+
const results = result[module.type] || [];
8186
value = results.reduce((acc, item) => acc + (item.httpLatency || 0) + (item.inferenceLatency || 0), 0);
8287
}
8388

usecases/ai/digital-avatar/frontend/app/(app)/api/avatar-skins/route.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,10 +30,11 @@ export async function DELETE(request: NextRequest) {
3030
}
3131
const skinsDir = path.join(process.cwd(), "public/assets/avatar-skins")
3232
const filePath = path.join(skinsDir, `${skinName}.mp4`)
33-
if (!fs.existsSync(filePath)) {
33+
const fileURL = new URL(`file://${filePath}`)
34+
if (!fs.existsSync(fileURL)) {
3435
return NextResponse.json({ success: false, error: "Skin not found" }, { status: 404 })
3536
}
36-
fs.unlinkSync(filePath)
37+
fs.unlinkSync(fileURL)
3738
return NextResponse.json({ success: true, message: "Skin deleted" })
3839
} catch (error) {
3940
console.error("Error deleting avatar skin:", error)

usecases/ai/digital-avatar/frontend/components/chat/Chatbox.tsx

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -245,7 +245,8 @@ export default function Chatbox() {
245245
const reversed = false
246246

247247
// Lipsync
248-
const { data: lipsyncData } = await getLipsync.mutateAsync({ data: { filename: ttsData.filename }, startIndex: startIndex.toString(), reversed: reversed ? "1" : "0" })
248+
// const { data: lipsyncData } = await getLipsync.mutateAsync({ data: { filename: ttsData.filename }, startIndex: startIndex.toString(), reversed: reversed ? "1" : "0" })
249+
const { data: lipsyncData } = await getLipsync.mutateAsync({ data: { filename: ttsData.filename }, startIndex: startIndex.toString(), reversed: "0"})
249250
updateVideo(index, lipsyncData.url, startIndex, reversed, ttsData.duration)
250251

251252
setPerformanceResults(prev => {

0 commit comments

Comments
 (0)