Skip to content

Commit a75a005

Browse files
committed
fix:enhance examples after bugs
1 parent 0f35e7b commit a75a005

File tree

2 files changed

+36
-25
lines changed

2 files changed

+36
-25
lines changed

examples/framework_integrations/ray_serve_integration_guide.py

Lines changed: 26 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -76,18 +76,16 @@ def __init__(self, model_type: str = "classifier"):
7676
async def __call__(self, request) -> Dict[str, Any]:
7777
"""Handle incoming requests with DataSON serialization."""
7878
try:
79-
# Parse request using DataSON's smart loading
79+
# Parse request - already returns parsed dict
8080
raw_data = await request.json()
8181

82-
# Use load_smart for intelligent data parsing
83-
parsed_data = ds.load_smart(raw_data, config=API_CONFIG)
84-
85-
# Extract the actual data payload
86-
if isinstance(parsed_data, dict) and "data" in parsed_data:
87-
input_data = parsed_data["data"]
88-
metadata = parsed_data.get("metadata", {})
82+
# Since raw_data is already parsed, we don't need load_smart for parsing
83+
# We can use it for enhanced processing if we need type detection
84+
if isinstance(raw_data, dict) and "data" in raw_data:
85+
input_data = raw_data["data"]
86+
metadata = raw_data.get("metadata", {})
8987
else:
90-
input_data = parsed_data
88+
input_data = raw_data
9189
metadata = {}
9290

9391
# Make prediction
@@ -128,11 +126,10 @@ async def batch_predict(self, request) -> Dict[str, Any]:
128126
"""Handle batch predictions."""
129127
try:
130128
raw_data = await request.json()
131-
parsed_data = ds.load_smart(raw_data, config=API_CONFIG)
132129

133-
# Extract batch data
134-
batch_data = parsed_data.get("batch_data", [])
135-
batch_metadata = parsed_data.get("metadata", {})
130+
# Extract batch data directly since raw_data is already parsed
131+
batch_data = raw_data.get("batch_data", [])
132+
batch_metadata = raw_data.get("metadata", {})
136133

137134
if not batch_data:
138135
return ds.dump_api({"error": "No batch data provided"})
@@ -169,11 +166,12 @@ async def __call__(self, request) -> Dict[str, Any]:
169166
self.request_count += 1
170167

171168
try:
172-
# Parse incoming request
169+
# Parse incoming request - already returns parsed dict
173170
raw_data = await request.json()
174171

175-
# Transform using DataSON (e.g., for data cleaning, validation)
176-
transformed_data = ds.load_smart(raw_data, config=API_CONFIG)
172+
# For demonstration, convert to JSON string first if we want to use load_smart
173+
json_string = ds.dumps_json(raw_data)
174+
transformed_data = ds.load_smart(json_string, config=API_CONFIG)
177175

178176
# Add proxy metadata
179177
proxy_enhanced = {
@@ -284,10 +282,15 @@ async def run_ray_serve_demo():
284282
# Serialize with DataSON API mode
285283
serialized = ds.dump_api(request)
286284
print("📤 Serialized Request:")
287-
print(ds.dumps_json(serialized, indent=2)[:200] + "...")
288-
289-
# Parse back with smart loading
290-
parsed = ds.load_smart(serialized, config=API_CONFIG)
285+
serialized_str = ds.dumps_json(serialized, indent=2)
286+
if len(serialized_str) > 200:
287+
print(serialized_str[:200] + "...")
288+
else:
289+
print(serialized_str)
290+
291+
# Parse back with smart loading (convert to JSON string first)
292+
json_string = ds.dumps_json(serialized)
293+
parsed = ds.load_smart(json_string, config=API_CONFIG)
291294
print("📥 Parsed Back Successfully:", parsed["metadata"]["request_type"])
292295

293296
return {"status": "demo_completed_without_ray"}
@@ -352,8 +355,9 @@ def test_ray_serve_integration():
352355
# Create request data
353356
request_data = {"data": test_data, "metadata": {"client_id": "test", "timestamp": time.time()}}
354357

355-
# Test smart loading
356-
smart_loaded = ds.load_smart(request_data)
358+
# Test smart loading (convert dict to JSON string first for proper load_smart usage)
359+
json_string = ds.dumps_json(request_data)
360+
smart_loaded = ds.load_smart(json_string)
357361
print(f" Smart loaded: {type(smart_loaded)}")
358362

359363
# Test API serialization

examples/framework_integrations/seldon_kserve_integration.py

Lines changed: 10 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -503,12 +503,18 @@ def run_k8s_ml_serving_demo():
503503
# Generate Seldon deployment
504504
print("\n📋 Seldon Core Deployment YAML:")
505505
seldon_yaml = demo.create_seldon_deployment_yaml()
506-
print(seldon_yaml[:500] + "..." if len(seldon_yaml) > 500 else seldon_yaml)
506+
if len(seldon_yaml) > 500:
507+
print(seldon_yaml[:500] + "...")
508+
else:
509+
print(seldon_yaml)
507510

508511
# Generate KServe service
509512
print("\n📋 KServe InferenceService YAML:")
510513
kserve_yaml = demo.create_kserve_service_yaml()
511-
print(kserve_yaml[:500] + "..." if len(kserve_yaml) > 500 else kserve_yaml)
514+
if len(kserve_yaml) > 500:
515+
print(kserve_yaml[:500] + "...")
516+
else:
517+
print(kserve_yaml)
512518

513519
print("\n3️⃣ Sample API Requests:")
514520
sample_requests = demo.create_sample_requests()
@@ -536,7 +542,8 @@ def run_k8s_ml_serving_demo():
536542
print(f"\n🔄 Processing {platform}:")
537543

538544
# Parse request with DataSON
539-
parsed_request = ds.load_smart(request_data, config=API_CONFIG)
545+
json_string = ds.dumps_json(request_data)
546+
parsed_request = ds.load_smart(json_string, config=API_CONFIG)
540547

541548
# Extract features
542549
if "data" in parsed_request and "ndarray" in parsed_request["data"]:

0 commit comments

Comments
 (0)