@@ -21,7 +21,7 @@ class TestPlayground:
2121 def test_method_run (self , client : Lemma ) -> None :
2222 playground = client .playground .run (
2323 model = "model" ,
24- prompt_id = "prompt_id " ,
24+ prompt_content = "prompt_content " ,
2525 )
2626 assert_matches_type (object , playground , path = ["response" ])
2727
@@ -30,7 +30,7 @@ def test_method_run(self, client: Lemma) -> None:
3030 def test_method_run_with_all_params (self , client : Lemma ) -> None :
3131 playground = client .playground .run (
3232 model = "model" ,
33- prompt_id = "prompt_id " ,
33+ prompt_content = "prompt_content " ,
3434 evaluator_id = "evaluator_id" ,
3535 input_variables = {"foo" : "bar" },
3636 )
@@ -41,7 +41,7 @@ def test_method_run_with_all_params(self, client: Lemma) -> None:
4141 def test_raw_response_run (self , client : Lemma ) -> None :
4242 response = client .playground .with_raw_response .run (
4343 model = "model" ,
44- prompt_id = "prompt_id " ,
44+ prompt_content = "prompt_content " ,
4545 )
4646
4747 assert response .is_closed is True
@@ -54,7 +54,7 @@ def test_raw_response_run(self, client: Lemma) -> None:
5454 def test_streaming_response_run (self , client : Lemma ) -> None :
5555 with client .playground .with_streaming_response .run (
5656 model = "model" ,
57- prompt_id = "prompt_id " ,
57+ prompt_content = "prompt_content " ,
5858 ) as response :
5959 assert not response .is_closed
6060 assert response .http_request .headers .get ("X-Stainless-Lang" ) == "python"
@@ -75,7 +75,7 @@ class TestAsyncPlayground:
7575 async def test_method_run (self , async_client : AsyncLemma ) -> None :
7676 playground = await async_client .playground .run (
7777 model = "model" ,
78- prompt_id = "prompt_id " ,
78+ prompt_content = "prompt_content " ,
7979 )
8080 assert_matches_type (object , playground , path = ["response" ])
8181
@@ -84,7 +84,7 @@ async def test_method_run(self, async_client: AsyncLemma) -> None:
8484 async def test_method_run_with_all_params (self , async_client : AsyncLemma ) -> None :
8585 playground = await async_client .playground .run (
8686 model = "model" ,
87- prompt_id = "prompt_id " ,
87+ prompt_content = "prompt_content " ,
8888 evaluator_id = "evaluator_id" ,
8989 input_variables = {"foo" : "bar" },
9090 )
@@ -95,7 +95,7 @@ async def test_method_run_with_all_params(self, async_client: AsyncLemma) -> Non
9595 async def test_raw_response_run (self , async_client : AsyncLemma ) -> None :
9696 response = await async_client .playground .with_raw_response .run (
9797 model = "model" ,
98- prompt_id = "prompt_id " ,
98+ prompt_content = "prompt_content " ,
9999 )
100100
101101 assert response .is_closed is True
@@ -108,7 +108,7 @@ async def test_raw_response_run(self, async_client: AsyncLemma) -> None:
108108 async def test_streaming_response_run (self , async_client : AsyncLemma ) -> None :
109109 async with async_client .playground .with_streaming_response .run (
110110 model = "model" ,
111- prompt_id = "prompt_id " ,
111+ prompt_content = "prompt_content " ,
112112 ) as response :
113113 assert not response .is_closed
114114 assert response .http_request .headers .get ("X-Stainless-Lang" ) == "python"
0 commit comments