openai
This provides a binding to OpenAI's API using servant
Example usage
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE OverloadedLists #-}
module Main where
import Data.Foldable (traverse_)
import OpenAI.V1
import OpenAI.V1.Chat.Completions
import qualified Data.Text as Text
import qualified Data.Text.IO as Text.IO
import qualified System.Environment as Environment
main :: IO ()
main = do
key <- Environment.getEnv "OPENAI_KEY"
clientEnv <- getClientEnv "https://api.openai.com"
let Methods{ createChatCompletion } = makeMethods clientEnv (Text.pack key) Nothing Nothing
text <- Text.IO.getLine
ChatCompletionObject{ choices } <- createChatCompletion _CreateChatCompletion
{ messages = [ User{ content = [ Text{ text } ], name = Nothing } ]
, model = "gpt-4o-mini"
}
let display Choice{ message } = Text.IO.putStrLn (messageToContent message)
traverse_ display choices
Responses API (JSON)
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
import qualified Data.Text as Text
import qualified OpenAI.V1 as V1
import qualified OpenAI.V1.Responses as Responses
main :: IO ()
main = do
key <- System.Environment.getEnv "OPENAI_KEY"
env <- V1.getClientEnv "https://api.openai.com"
let V1.Methods{ createResponse } = V1.makeMethods env (Text.pack key) Nothing Nothing
let req = Responses._CreateResponse
{ Responses.model = "gpt-5"
, Responses.input = Just (Responses.Input
[ Responses.Item_Input_Message
{ Responses.role = Responses.User
, Responses.content = [ Responses.Input_Text{ Responses.text = "Say hello in one sentence." } ]
, Responses.status = Nothing
}
])
}
res <- createResponse req
print res
Responses API (Structured Outputs)
{-# LANGUAGE DuplicateRecordFields #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedLists #-}
{-# LANGUAGE OverloadedStrings #-}
import qualified Data.Aeson as Aeson
import qualified Data.Text as Text
import qualified OpenAI.V1 as V1
import qualified OpenAI.V1.Responses as Responses
main :: IO ()
main = do
key <- System.Environment.getEnv "OPENAI_KEY"
env <- V1.getClientEnv "https://api.openai.com"
let V1.Methods{ createResponse } = V1.makeMethods env (Text.pack key) Nothing Nothing
let schemaObject = Aeson.object
[ "type" Aeson..= ("object" :: Text.Text)
, "properties" Aeson..= Aeson.object
[ "greeting" Aeson..= Aeson.object [ "type" Aeson..= ("string" :: Text.Text) ] ]
, "required" Aeson..= (["greeting"] :: [Text.Text])
, "additionalProperties" Aeson..= False
]
let req = Responses._CreateResponse
{ Responses.model = "gpt-5"
, Responses.input = Just (Responses.Input
[ Responses.Item_Input_Message
{ Responses.role = Responses.User
, Responses.content = [ Responses.Input_Text{ Responses.text = "Say hello." } ]
, Responses.status = Nothing
}
])
, Responses.text = Just Responses._TextConfig
{ Responses.format = Responses.TextFormat_JSON_Schema
{ Responses.name = "greeting"
, Responses.description = Just "Greeting payload"
, Responses.schema = Just schemaObject
, Responses.strict = Just True
}
}
}
res <- createResponse req
print res
Setup
Using Nix with Flakes (Recommended)
This project uses Nix with flakes for development environment setup.
- Ensure you have Nix with flakes enabled
- Copy the sample environment file and configure your OpenAI API key:
# Copy the sample environment file
cp .envrc.sample .envrc
-
Edit the .envrc file and replace the placeholder API key with your actual key
-
Use direnv to automatically load the development environment:
# Install direnv if you haven't already
# macOS: brew install direnv
# Linux: your-package-manager install direnv
# Enable direnv hook in your shell
eval "$(direnv hook bash)" # or zsh, fish, etc.
# Clone the repository and enter the directory
git clone https://github.com/MercuryTechnologies/openai.git
cd openai
# Allow direnv (this will automatically load the environment)
direnv allow
Manual Setup
Without Nix:
# Clone the repository
git clone https://github.com/MercuryTechnologies/openai.git
cd openai
# Build with cabal
cabal build
Environment Variables
Set your OpenAI API key as an environment variable:
# Option 1: Set directly in your shell
export OPENAI_KEY="your-openai-api-key"
# Option 2: Using .envrc with direnv (recommended)
(umask 077; cp .envrc.sample .envrc)
# Edit .envrc to add your API key
direnv allow
The API key is needed for running the test suite and example program.
Testing
Run the test suite:
cabal test
The test suite is in the tasty/ directory with test data located in tasty/data/.
Running the Example
# Make sure your API key is set (either via .envrc or export)
# If using direnv with proper .envrc setup, this happens automatically
# Build and run the example
cabal run openai-example