Safe Haskell | Safe-Inferred |
---|---|
Language | Haskell2010 |
Synopsis
- generate :: GenerateOps -> IO (Either String GenerateResponse)
- generateJson :: (ToJSON jsonResult, FromJSON jsonResult) => GenerateOps -> jsonResult -> Maybe Int -> IO (Either String jsonResult)
- defaultGenerateOps :: GenerateOps
- data GenerateOps = GenerateOps {}
- data GenerateResponse = GenerateResponse {}
- chat :: ChatOps -> IO (Either String ChatResponse)
- chatJson :: (FromJSON jsonResult, ToJSON jsonResult) => ChatOps -> jsonResult -> Maybe Int -> IO (Either String jsonResult)
- data Role
- defaultChatOps :: ChatOps
- data ChatResponse = ChatResponse {}
- data ChatOps = ChatOps {}
- embedding :: Text -> Text -> IO (Maybe EmbeddingResp)
- embeddingOps :: Text -> Text -> Maybe Bool -> Maybe Text -> IO (Maybe EmbeddingResp)
- copyModel :: Text -> Text -> IO ()
- createModel :: Text -> Maybe Text -> Maybe FilePath -> IO ()
- createModelOps :: Text -> Maybe Text -> Maybe Bool -> Maybe FilePath -> IO ()
- deleteModel :: Text -> IO ()
- list :: IO (Maybe Models)
- ps :: IO (Maybe RunningModels)
- push :: Text -> IO ()
- pushOps :: Text -> Maybe Bool -> Maybe Bool -> IO ()
- pull :: Text -> IO ()
- pullOps :: Text -> Maybe Bool -> Maybe Bool -> IO ()
- showModel :: Text -> IO (Maybe ShowModelResponse)
- showModelOps :: Text -> Maybe Bool -> IO (Maybe ShowModelResponse)
- data ShowModelResponse = ShowModelResponse {}
- newtype Models = Models [ModelInfo]
- data ModelInfo = ModelInfo {
- name :: Text
- modifiedAt :: UTCTime
- size :: Int64
- digest :: Text
- details :: ModelDetails
- newtype RunningModels = RunningModels [RunningModel]
- data RunningModel = RunningModel {
- name_ :: Text
- modelName :: Text
- size_ :: Int64
- modelDigest :: Text
- modelDetails :: ModelDetails
- expiresAt :: UTCTime
- sizeVRam :: Int64
- data Message = Message {}
Main APIs
Generate Texts
generate :: GenerateOps -> IO (Either String GenerateResponse) Source #
Generate function that returns either a GenerateResponse
type or an error message.
It takes a GenerateOps
configuration and performs a request to the Ollama generate API.
Examples:
Basic usage without streaming:
let ops = GenerateOps { modelName = "llama3.2" , prompt = "Tell me a joke." , suffix = Nothing , images = Nothing , format = Nothing , system = Nothing , template = Nothing , stream = Nothing , raw = Nothing , keepAlive = Nothing } result <- generate ops case result of Left errorMsg -> putStrLn ("Error: " ++ errorMsg) Right response -> print response
Usage with streaming to print responses to the console:
void $ generate defaultGenerateOps { modelName = "llama3.2" , prompt = "what is functional programming?" , stream = Just (T.putStr . response_, pure ()) }
In this example, the first function in the $sel:stream:GenerateOps
tuple processes each chunk of response by printing it,
and the second function is a simple no-op flush.generate :: GenerateOps -> IO (Either String GenerateResponse)
:: (ToJSON jsonResult, FromJSON jsonResult) | |
=> GenerateOps | |
-> jsonResult | Haskell type that you want your result in |
-> Maybe Int | Max retries |
-> IO (Either String jsonResult) |
generateJson is a higher level function that takes generateOps (similar to generate) and also takes a Haskell type (that has To and From JSON instance) and returns the response in provided type.
This function simply calls generate with extra prompt appended to it, telling LLM to return the response in certain JSON format and serializes the response. This function will be helpful when you want to use the LLM to do something programmatic.
For Example: > let expectedJsonStrucutre = Example { > sortedList = ["sorted List here"] > , wasListAlreadSorted = False > } > eRes2 <- generateJson > defaultGenerateOps > { modelName = "llama3.2" > , prompt = "Sort given list: [4, 2 , 3, 67]. Also tell whether list was already sorted or not." > } > expectedJsonStrucutre > Nothing > case eRes2 of > Left e -> putStrLn e > Right r -> print ("JSON response: " :: String, r)
Output: > ("JSON response: ",Example {sortedList = ["1","2","3","4"], wasListAlreadSorted = False})
Note: While Passing the type, construct the type that will help LLM understand the field better. For example, in the above example, the sortedList's value is written as "Sorted List here". This will help LLM understand context better.
You can also provide number of retries in case the LLM field to return the response in correct JSON in first attempt.
defaultGenerateOps :: GenerateOps Source #
A function to create a default GenerateOps
type with preset values.
Example:
let ops = defaultGenerateOps generate ops
This will generate a response using the default configuration.
data GenerateOps Source #
Input type for generate functions. This data type represents all possible configurations that you can pass to the Ollama generate API.
Example:
let ops = GenerateOps { modelName = "llama3.2" , prompt = "What is the meaning of life?" , suffix = Nothing , images = Nothing , format = Just "text" , system = Nothing , template = Nothing , stream = Nothing , raw = Just False , keepAlive = Just "yes" }
GenerateOps | |
|
Instances
ToJSON GenerateOps Source # | |
Defined in Data.Ollama.Generate toJSON :: GenerateOps -> Value # toEncoding :: GenerateOps -> Encoding # toJSONList :: [GenerateOps] -> Value # toEncodingList :: [GenerateOps] -> Encoding # omitField :: GenerateOps -> Bool # | |
Show GenerateOps Source # | |
Defined in Data.Ollama.Generate showsPrec :: Int -> GenerateOps -> ShowS # show :: GenerateOps -> String # showList :: [GenerateOps] -> ShowS # | |
Eq GenerateOps Source # | |
Defined in Data.Ollama.Generate (==) :: GenerateOps -> GenerateOps -> Bool # (/=) :: GenerateOps -> GenerateOps -> Bool # |
data GenerateResponse Source #
Result type for generate function containing the model's response and meta-information.
GenerateResponse | |
|
Instances
FromJSON GenerateResponse Source # | |
Defined in Data.Ollama.Generate parseJSON :: Value -> Parser GenerateResponse # parseJSONList :: Value -> Parser [GenerateResponse] # | |
Show GenerateResponse Source # | |
Defined in Data.Ollama.Generate showsPrec :: Int -> GenerateResponse -> ShowS # show :: GenerateResponse -> String # showList :: [GenerateResponse] -> ShowS # | |
Eq GenerateResponse Source # | |
Defined in Data.Ollama.Generate (==) :: GenerateResponse -> GenerateResponse -> Bool # (/=) :: GenerateResponse -> GenerateResponse -> Bool # |
Chat with LLMs
chat :: ChatOps -> IO (Either String ChatResponse) Source #
Initiates a chat session with the specified ChatOps
configuration and returns either
a ChatResponse
or an error message.
This function sends a request to the Ollama chat API with the given options.
Example:
let ops = defaultChatOps result <- chat ops case result of Left errorMsg -> putStrLn ("Error: " ++ errorMsg) Right response -> print response
:: (FromJSON jsonResult, ToJSON jsonResult) | |
=> ChatOps | |
-> jsonResult | Haskell type that you want your result in |
-> Maybe Int | Max retries |
-> IO (Either String jsonResult) |
chatJson is a higher level function that takes ChatOps (similar to chat) and also takes a Haskell type (that has To and From JSON instance) and returns the response in provided type.
This function simply calls chat with extra prompt appended to it, telling LLM to return the response in certain JSON format and serializes the response. This function will be helpful when you want to use the LLM to do something programmatic.
For Example: > let expectedJsonStrucutre = Example { > sortedList = ["sorted List here"] > , wasListAlreadSorted = False > } > let msg0 = Ollama.Message User "Sort given list: [4, 2 , 3, 67]. Also tell whether list was already sorted or not." Nothing > eRes3 <- > chatJson > defaultChatOps > { Chat.chatModelName = "llama3.2" > , Chat.messages = msg0 :| [] > } > expectedJsonStrucutre > (Just 2) > print eRes3 Output: > Example {sortedList = ["1","2","3","4"], wasListAlreadSorted = False}
Note: While Passing the type, construct the type that will help LLM understand the field better. For example, in the above example, the sortedList's value is written as "Sorted List here". This will help LLM understand context better.
You can also provide number of retries in case the LLM field to return the response in correct JSON in first attempt.
Enumerated roles that can participate in a chat.
defaultChatOps :: ChatOps Source #
A default configuration for initiating a chat with a model. This can be used as a starting point and modified as needed.
Example:
let ops = defaultChatOps { chatModelName = "customModel" } chat ops
data ChatResponse Source #
ChatResponse | |
|
Instances
FromJSON ChatResponse Source # | |
Defined in Data.Ollama.Chat parseJSON :: Value -> Parser ChatResponse # parseJSONList :: Value -> Parser [ChatResponse] # | |
Show ChatResponse Source # | |
Defined in Data.Ollama.Chat showsPrec :: Int -> ChatResponse -> ShowS # show :: ChatResponse -> String # showList :: [ChatResponse] -> ShowS # | |
Eq ChatResponse Source # | |
Defined in Data.Ollama.Chat (==) :: ChatResponse -> ChatResponse -> Bool # (/=) :: ChatResponse -> ChatResponse -> Bool # |
ChatOps | |
|
Embeddings
Embedding API
Ollama operations
Copy Models
Copy model from source to destination
Create Models
Create a new model | Please note, if you specify both ModelFile and Path, ModelFile will be used.
Create a new model either from ModelFile or Path Please note, if you specify both ModelFile and Path, ModelFile will be used.
Delete Models
List Models
List currently running models
Push and Pull
Push a model with options
Pull a model using default options. This simplifies the pull operation by not requiring additional options.
Example:
pull "myModel"
This will pull "myModel" using default settings (no insecure connections and no streaming).
Pull a model with additional options for insecure connections and streaming. This function interacts directly with the Ollama API to download the specified model.
Example:
pullOps "myModel" (Just True) (Just True)
This will attempt to pull "myModel" with insecure connections allowed and enable streaming.
Show Model Info
:: Text | model name |
-> IO (Maybe ShowModelResponse) |
Show given model's information.
Higher level API for show. @since 1.0.0.0
Show given model's information with options.
Since: 1.0.0.0
Types
data ShowModelResponse Source #
Instances
FromJSON ShowModelResponse Source # | The instance for show model response |
Defined in Data.Ollama.Show | |
Show ShowModelResponse Source # | |
Defined in Data.Ollama.Show showsPrec :: Int -> ShowModelResponse -> ShowS # show :: ShowModelResponse -> String # showList :: [ShowModelResponse] -> ShowS # | |
Eq ShowModelResponse Source # | |
Defined in Data.Ollama.Show (==) :: ShowModelResponse -> ShowModelResponse -> Bool # (/=) :: ShowModelResponse -> ShowModelResponse -> Bool # |
newtype RunningModels Source #
Instances
FromJSON RunningModels Source # | |
Defined in Data.Ollama.Ps parseJSON :: Value -> Parser RunningModels # parseJSONList :: Value -> Parser [RunningModels] # | |
Show RunningModels Source # | |
Defined in Data.Ollama.Ps showsPrec :: Int -> RunningModels -> ShowS # show :: RunningModels -> String # showList :: [RunningModels] -> ShowS # | |
Eq RunningModels Source # | |
Defined in Data.Ollama.Ps (==) :: RunningModels -> RunningModels -> Bool # (/=) :: RunningModels -> RunningModels -> Bool # |
data RunningModel Source #
RunningModel | |
|
Instances
FromJSON RunningModel Source # | |
Defined in Data.Ollama.Ps parseJSON :: Value -> Parser RunningModel # parseJSONList :: Value -> Parser [RunningModel] # | |
Show RunningModel Source # | |
Defined in Data.Ollama.Ps showsPrec :: Int -> RunningModel -> ShowS # show :: RunningModel -> String # showList :: [RunningModel] -> ShowS # | |
Eq RunningModel Source # | |
Defined in Data.Ollama.Ps (==) :: RunningModel -> RunningModel -> Bool # (/=) :: RunningModel -> RunningModel -> Bool # |
Represents a message within a chat, including its role and content.
Instances
FromJSON Message Source # | |
Defined in Data.Ollama.Chat | |
ToJSON Message Source # | |
Generic Message Source # | |
Show Message Source # | |
Eq Message Source # | |
type Rep Message Source # | |
Defined in Data.Ollama.Chat type Rep Message = D1 ('MetaData "Message" "Data.Ollama.Chat" "ollama-haskell-0.1.2.0-FhW2R5HBnzqI9p8FACrOm4" 'False) (C1 ('MetaCons "Message" 'PrefixI 'True) (S1 ('MetaSel ('Just "role") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 Role) :*: (S1 ('MetaSel ('Just "content") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 Text) :*: S1 ('MetaSel ('Just "images") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 (Maybe [Text]))))) |