Skip to content

Commit 718d330

Browse files
committed
1. core logic for making the API call and parsing the response is moved into a dedicated function callOllama. 2. better error handling: callOllama now returns an Either String OllamaResponse. 3. code now supports an optional second command-line argument for the model name, defaulting to llama3.2:latest. This adds flexibility.
1 parent 0b183b0 commit 718d330

File tree

1 file changed

+76
-43
lines changed

1 file changed

+76
-43
lines changed

ollama_commandline/Main.hs

Lines changed: 76 additions & 43 deletions
Original file line numberDiff line numberDiff line change
@@ -1,63 +1,96 @@
11
{-# LANGUAGE DuplicateRecordFields #-}
22
{-# LANGUAGE OverloadedRecordDot #-}
3+
{-# LANGUAGE DeriveGeneric #-} -- Added DeriveGeneric for clarity, though Generic is imported
34

4-
import Control.Monad.IO.Class (liftIO)
5+
import Control.Monad (when) -- Import when
56
import System.Environment (getArgs)
67
import qualified Data.Aeson as Aeson
78
import Data.Aeson (FromJSON, ToJSON)
8-
import GHC.Generics
9-
import Network.HTTP.Client (newManager, httpLbs, parseRequest, Request(..), RequestBody(..), responseBody, responseStatus, defaultManagerSettings)
10-
import Network.HTTP.Types.Status (statusCode)
9+
import GHC.Generics (Generic) -- Explicitly import Generic
10+
import Network.HTTP.Client
11+
( newManager
12+
, httpLbs
13+
, parseRequest
14+
, Request(..)
15+
, RequestBody(..)
16+
, responseBody
17+
, responseStatus
18+
, defaultManagerSettings
19+
, Manager -- Import Manager type
20+
)
21+
import Network.HTTP.Types.Status (statusIsSuccessful) -- Import statusIsSuccessful
1122

23+
-- Data types for Ollama interaction
1224
data OllamaRequest = OllamaRequest
1325
{ model :: String
1426
, prompt :: String
1527
, stream :: Bool
16-
} deriving (Show, Generic, ToJSON)
28+
} deriving (Show, Generic, ToJSON) -- Derive Generic and ToJSON
1729

1830
data OllamaResponse = OllamaResponse
19-
  { model :: String
20-
  , created_at :: String
21-
  , response :: String -- This matches the actual field name in the JSON
22-
  , done :: Bool
23-
  , done_reason :: String
24-
  } deriving (Show, Generic, FromJSON)
31+
{ model :: String
32+
, created_at :: String
33+
, response :: String -- This matches the actual field name in the JSON
34+
, done :: Bool
35+
, done_reason :: Maybe String -- done_reason might be null/missing in some responses, using Maybe is safer
36+
} deriving (Show, Generic, FromJSON) -- Derive Generic and FromJSON
37+
38+
-- Function to call the Ollama API
39+
callOllama :: Manager -> String -> String -> IO (Either String OllamaResponse)
40+
callOllama manager modelName userPrompt = do
41+
-- Note: parseRequest throws exceptions on invalid URLs, which is acceptable here.
42+
initialRequest <- parseRequest "http://localhost:11434/api/generate"
43+
44+
let ollamaRequestBody = OllamaRequest
45+
{ model = modelName
46+
, prompt = userPrompt
47+
, stream = False -- Keeping stream as False for a single response
48+
}
49+
50+
let request = initialRequest
51+
{ requestHeaders = [("Content-Type", "application/json")]
52+
, method = "POST"
53+
, requestBody = RequestBodyLBS $ Aeson.encode ollamaRequestBody
54+
}
55+
56+
-- httpLbs also throws exceptions on network errors, which `main` handles implicitly
57+
httpResponse <- httpLbs request manager
58+
59+
let status = responseStatus httpResponse
60+
body = responseBody httpResponse
61+
62+
if statusIsSuccessful status -- Use statusIsSuccessful for clarity
63+
then do
64+
let maybeOllamaResponse = Aeson.decode body :: Maybe OllamaResponse
65+
case maybeOllamaResponse of
66+
Just ollamaResponse -> return $ Right ollamaResponse
67+
Nothing -> return $ Left $ "Error: Failed to parse JSON response. Body: " ++ show body
68+
else do
69+
return $ Left $ "Error: HTTP request failed with status " ++ show status ++ ". Body: " ++ show body
2570

2671
main :: IO ()
2772
main = do
2873
args <- getArgs
2974
case args of
30-
[] -> putStrLn "Error: Please provide a prompt as a command-line argument."
31-
(arg:_) -> do
75+
[] -> putStrLn "Usage: <program_name> <prompt> [model_name]"
76+
(promptArg:modelArgs) -> do
77+
let modelName = case modelArgs of
78+
(m:_) -> m
79+
[] -> "llama3.2:latest" -- Default model
80+
3281
manager <- newManager defaultManagerSettings
3382

34-
initialRequest <- parseRequest "http://localhost:11434/api/generate"
35-
36-
let ollamaRequestBody = OllamaRequest
37-
{ model = "llama3.2:latest" -- You can change this to your preferred model
38-
, prompt = arg
39-
, stream = False
40-
}
41-
42-
let request = initialRequest
43-
{ requestHeaders = [("Content-Type", "application/json")]
44-
, method = "POST"
45-
, requestBody = RequestBodyLBS $ Aeson.encode ollamaRequestBody
46-
}
47-
48-
httpResponse <- httpLbs request manager
49-
-- liftIO $ putStrLn $ "httpResponse:" ++ show httpResponse -- debug
50-
51-
let responseStatus' = responseStatus httpResponse
52-
53-
if statusCode responseStatus' == 200
54-
then do
55-
let maybeOllamaResponse =
56-
Aeson.decode (responseBody httpResponse) :: Maybe OllamaResponse
57-
case maybeOllamaResponse of
58-
Just ollamaResponse -> do
59-
liftIO $ putStrLn $ "Response:\n\n" ++ ollamaResponse.response
60-
Nothing -> do
61-
liftIO $ putStrLn "Error: Failed to parse response"
62-
else do
63-
putStrLn $ "Error: " ++ show responseStatus'
83+
putStrLn $ "Sending prompt '" ++ promptArg ++ "' to model '" ++ modelName ++ "'..."
84+
85+
result <- callOllama manager modelName promptArg
86+
87+
case result of
88+
Right ollamaResponse -> do
89+
-- No need for liftIO here, putStrLn is already IO
90+
putStrLn "\n--- Response ---"
91+
putStrLn ollamaResponse.response
92+
when (ollamaResponse.done_reason /= Nothing) $ -- Check if done_reason is present
93+
putStrLn $ "\nDone reason: " ++ show ollamaResponse.done_reason -- Show the reason if present
94+
Left err -> do
95+
-- No need for liftIO here either
96+
putStrLn $ "API Error: " ++ err

0 commit comments

Comments
 (0)