Skip to content

Commit 9a161e9

Browse files
committed
added Ollama LLM client command line tool
1 parent f72612b commit 9a161e9

File tree

3 files changed

+114
-0
lines changed

3 files changed

+114
-0
lines changed

ollama_commandline/Main.hs

Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
{-# LANGUAGE DuplicateRecordFields #-}
2+
{-# LANGUAGE OverloadedRecordDot #-}
3+
4+
import Control.Monad.IO.Class (liftIO)
5+
import System.Environment (getArgs)
6+
import qualified Data.Aeson as Aeson
7+
import Data.Aeson (FromJSON, ToJSON)
8+
import GHC.Generics
9+
import Network.HTTP.Client (newManager, httpLbs, parseRequest, Request(..), RequestBody(..), responseBody, responseStatus, defaultManagerSettings)
10+
import Network.HTTP.Types.Status (statusCode)
11+
--import qualified Data.Text as T
12+
--import Data.Text.Encoding (encodeUtf8)
13+
14+
data OllamaRequest = OllamaRequest
15+
{ model :: String
16+
, prompt :: String
17+
, stream :: Bool
18+
} deriving (Show, Generic, ToJSON)
19+
20+
data OllamaResponse = OllamaResponse
21+
  { model :: String
22+
  , created_at :: String
23+
  , response :: String -- This matches the actual field name in the JSON
24+
  , done :: Bool
25+
  , done_reason :: String
26+
  } deriving (Show, Generic, FromJSON)
27+
28+
main :: IO ()
29+
main = do
30+
args <- getArgs
31+
case args of
32+
[] -> putStrLn "Error: Please provide a prompt as a command-line argument."
33+
(arg:_) -> do
34+
manager <- newManager defaultManagerSettings
35+
36+
initialRequest <- parseRequest "http://localhost:11434/api/generate"
37+
38+
let ollamaRequestBody = OllamaRequest
39+
{ model = "llama3.2:latest" -- You can change this to your preferred model
40+
, prompt = arg
41+
, stream = False
42+
}
43+
44+
let request = initialRequest
45+
{ requestHeaders = [("Content-Type", "application/json")]
46+
, method = "POST"
47+
, requestBody = RequestBodyLBS $ Aeson.encode ollamaRequestBody
48+
}
49+
50+
httpResponse <- httpLbs request manager
51+
-- liftIO $ putStrLn $ "httpResponse:" ++ show httpResponse -- debug
52+
53+
let responseStatus' = responseStatus httpResponse
54+
55+
if statusCode responseStatus' == 200
56+
then do
57+
let maybeOllamaResponse =
58+
Aeson.decode (responseBody httpResponse) :: Maybe OllamaResponse
59+
case maybeOllamaResponse of
60+
Just ollamaResponse -> do
61+
liftIO $ putStrLn $ "Response:\n\n" ++ ollamaResponse.response
62+
Nothing -> do
63+
liftIO $ putStrLn "Error: Failed to parse response"
64+
else do
65+
putStrLn $ "Error: " ++ show responseStatus'

ollama_commandline/README.md

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
# Comand line tool to access local Ollama LLM server
2+
3+
Run example:
4+
5+
cabal run ollama-client "how much is 4 + 11 + 13?"
6+
7+
or:
8+
9+
cabal run ollama-client "write Python script to print out 11th and 12th prime numbers"
10+
11+
or:
12+
13+
cabal run ollama-client "Write a Haskell hello world program"
Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
name: ollama-client
2+
version: 0.1.0.0
3+
synopsis: A simple client for Ollama API
4+
description: A Haskell client to interact with local Ollama API
5+
license: MIT
6+
license-file: LICENSE
7+
author: Your Name
8+
maintainer: your.email@example.com
9+
category: AI
10+
build-type: Simple
11+
cabal-version: >=1.10
12+
13+
executable ollama-client
14+
  main-is: Main.hs
15+
  build-depends: base >= 4.7 && < 5
16+
                    , aeson
17+
                    , http-client
18+
                    , text
19+
                    , http-types
20+
                    , vector
21+
if os(darwin)
22+
ghc-options:
23+
ld-options:
24+
25+
-- Language extensions used in the code
26+
default-extensions:
27+
OverloadedStrings
28+
DeriveGeneric
29+
LambdaCase
30+
DeriveAnyClass
31+
32+
program-default-options
33+
hsc2hs: --with-hsc2hs=/opt/homebrew/bin/hsc2hs
34+
35+
-- Compiler flags
36+
ghc-options: -Wall -O2

0 commit comments

Comments
 (0)