@@ -134,7 +134,6 @@ messages:
134
134
runCmd .SetArgs ([]string {
135
135
"--file" , tmp .Name (),
136
136
azuremodels .FormatIdentifier ("openai" , "test-model" ),
137
- "foo?" ,
138
137
})
139
138
140
139
_ , err = runCmd .ExecuteC ()
@@ -143,11 +142,85 @@ messages:
143
142
require .Equal (t , 3 , len (capturedReq .Messages ))
144
143
require .Equal (t , "You are a text summarizer." , * capturedReq .Messages [0 ].Content )
145
144
require .Equal (t , "Hello there!" , * capturedReq .Messages [1 ].Content )
146
- require .Equal (t , "foo?" , * capturedReq .Messages [2 ].Content )
147
145
148
146
require .NotNil (t , capturedReq .Temperature )
149
147
require .Equal (t , 0.5 , * capturedReq .Temperature )
150
148
151
149
require .Contains (t , out .String (), reply ) // response streamed to output
152
150
})
151
+
152
+ t .Run ("--file with {{input}} placeholder is substituted with stdin" , func (t * testing.T ) {
153
+ const yamlBody = `
154
+ name: Summarizer
155
+ description: Summarizes input text
156
+ model: openai/test-model
157
+ messages:
158
+ - role: system
159
+ content: You are a text summarizer.
160
+ - role: user
161
+ content: "{{input}}"
162
+ `
163
+
164
+ tmp , err := os .CreateTemp (t .TempDir (), "*.prompt.yml" )
165
+ require .NoError (t , err )
166
+ _ , err = tmp .WriteString (yamlBody )
167
+ require .NoError (t , err )
168
+ require .NoError (t , tmp .Close ())
169
+
170
+ client := azuremodels .NewMockClient ()
171
+ modelSummary := & azuremodels.ModelSummary {
172
+ Name : "test-model" ,
173
+ Publisher : "openai" ,
174
+ Task : "chat-completion" ,
175
+ }
176
+ client .MockListModels = func (ctx context.Context ) ([]* azuremodels.ModelSummary , error ) {
177
+ return []* azuremodels.ModelSummary {modelSummary }, nil
178
+ }
179
+
180
+ var capturedReq azuremodels.ChatCompletionOptions
181
+ reply := "Summary - bar"
182
+ chatCompletion := azuremodels.ChatCompletion {
183
+ Choices : []azuremodels.ChatChoice {{
184
+ Message : & azuremodels.ChatChoiceMessage {
185
+ Content : util .Ptr (reply ),
186
+ Role : util .Ptr (string (azuremodels .ChatMessageRoleAssistant )),
187
+ },
188
+ }},
189
+ }
190
+ client .MockGetChatCompletionStream = func (ctx context.Context , opt azuremodels.ChatCompletionOptions ) (* azuremodels.ChatCompletionResponse , error ) {
191
+ capturedReq = opt
192
+ return & azuremodels.ChatCompletionResponse {
193
+ Reader : sse .NewMockEventReader ([]azuremodels.ChatCompletion {chatCompletion }),
194
+ }, nil
195
+ }
196
+
197
+ // create a pipe to fake stdin so that isPipe(os.Stdin)==true
198
+ r , w , err := os .Pipe ()
199
+ require .NoError (t , err )
200
+ oldStdin := os .Stdin
201
+ os .Stdin = r
202
+ defer func () { os .Stdin = oldStdin }()
203
+ piped := "Hello there!"
204
+ go func () {
205
+ _ , _ = w .Write ([]byte (piped ))
206
+ _ = w .Close ()
207
+ }()
208
+
209
+ out := new (bytes.Buffer )
210
+ cfg := command .NewConfig (out , out , client , true , 100 )
211
+ runCmd := NewRunCommand (cfg )
212
+ runCmd .SetArgs ([]string {
213
+ "--file" , tmp .Name (),
214
+ azuremodels .FormatIdentifier ("openai" , "test-model" ),
215
+ })
216
+
217
+ _ , err = runCmd .ExecuteC ()
218
+ require .NoError (t , err )
219
+
220
+ require .Len (t , capturedReq .Messages , 3 )
221
+ require .Equal (t , "You are a text summarizer." , * capturedReq .Messages [0 ].Content )
222
+ require .Equal (t , piped , * capturedReq .Messages [1 ].Content ) // {{input}} -> "Hello there!"
223
+
224
+ require .Contains (t , out .String (), reply )
225
+ })
153
226
}
0 commit comments