Copyright | (c) 2025 Tushar Adhatrao |
---|---|
License | MIT |
Maintainer | Tushar Adhatrao <tusharadhatrao@gmail.com> |
Stability | experimental |
Safe Haskell | Safe-Inferred |
Language | Haskell2010 |
Langchain.LLM.OpenAI
Contents
Description
OpenAI implementation of LangChain's LLM interface. Not tested
Synopsis
- data OpenAI = OpenAI {}
- data ChatCompletionRequest = ChatCompletionRequest {
- messages :: [Message]
- model :: Text
- frequencyPenalty :: Maybe Double
- logitBias :: Maybe (Map Text Double)
- logprobs :: Maybe Bool
- maxCompletionTokens :: Maybe Int
- maxTokens :: Maybe Int
- metadata :: Maybe (Map Text Text)
- modalities :: Maybe [Modality]
- n :: Maybe Int
- parallelToolCalls :: Maybe Bool
- prediction :: Maybe PredictionOutput
- presencePenalty :: Maybe Double
- reasoningEffort :: Maybe ReasoningEffort
- responseFormat :: Maybe ResponseFormat
- seed :: Maybe Int
- serviceTier :: Maybe Text
- stop :: Maybe (Either Text [Text])
- store :: Maybe Bool
- stream :: Maybe Bool
- streamOptions :: Maybe StreamOptions
- temperature :: Maybe Double
- toolChoice :: Maybe ToolChoice
- tools :: Maybe [Tool_]
- topLogprobs :: Maybe Int
- topP :: Maybe Double
- user :: Maybe Text
- webSearchOptions :: Maybe WebSearchOptions
- audio :: Maybe AudioConfig
- data ChatCompletionResponse = ChatCompletionResponse {}
- data Message = Message {
- role :: Role
- content :: Maybe MessageContent
- name :: Maybe Text
- functionCall :: Maybe FunctionCall_
- toolCalls :: Maybe [ToolCall]
- toolCallId :: Maybe Text
- audio :: Maybe AudioResponse
- refusal :: Maybe Text
- data Role
- data MessageContent
- data TextContent = TextContent {
- text_ :: Text
- contentType :: Text
- data Tool_ = Tool_ {}
- data Function_ = Function_ {}
- data ToolCall = ToolCall {}
- data FunctionCall_ = FunctionCall_ {}
- data Usage = Usage {}
- data Choice = Choice {
- finishReason :: FinishReason
- index :: Int
- logprobs :: Maybe LogProbs
- message :: Message
- data FinishReason
- data LogProbs = LogProbs {
- content :: Maybe [LogProbContent]
- refusal :: Maybe [LogProbContent]
- data LogProbContent = LogProbContent {
- bytes :: Maybe [Int]
- logprob :: Double
- token :: Text
- topLogprobs :: [TopLogProb]
- data TopLogProb = TopLogProb {}
- data AudioConfig = AudioConfig {}
- data AudioResponse = AudioResponse {}
- data Modality
- data ToolChoice
- data SpecificToolChoice = SpecificToolChoice {}
- data ReasoningEffort
- data PredictionOutput = PredictionOutput {}
- data PredictionContent = PredictionContent {}
- data ResponseFormat
- data StreamOptions = StreamOptions {
- includeUsage :: Bool
- data WebSearchOptions = WebSearchOptions {}
- data UserLocation = UserLocation {}
- data ApproximateLocation = ApproximateLocation {
- locationType :: Text
- data CompletionTokensDetails = CompletionTokensDetails {}
- data PromptTokensDetails = PromptTokensDetails {
- audioTokens :: Int
- cachedTokens :: Int
- createChatCompletion :: Text -> ChatCompletionRequest -> IO (Either String ChatCompletionResponse)
- defaultChatCompletionRequest :: ChatCompletionRequest
- defaultMessage :: Message
Documentation
Data Types
data ChatCompletionRequest Source #
Main request type for chat completions Contains all parameters for configuring the OpenAI chat completion API call.
Constructors
ChatCompletionRequest | |
Fields
|
Instances
data ChatCompletionResponse Source #
Constructors
ChatCompletionResponse | |
Instances
Represents a single message in a conversation Contains role, content, and optional metadata like function calls or audio responses.
Constructors
Message | |
Fields
|
Instances
Represents different roles in a conversation User: Human user input Assistant: AI-generated response System: System-level instructions Developer: Special role for developer messages Tool: Tool interaction messages Function: Function call messages
Instances
FromJSON Role Source # | |
Defined in Langchain.LLM.OpenAI | |
ToJSON Role Source # | |
Generic Role Source # | |
Show Role Source # | |
Eq Role Source # | |
type Rep Role Source # | |
Defined in Langchain.LLM.OpenAI type Rep Role = D1 ('MetaData "Role" "Langchain.LLM.OpenAI" "langchain-hs-0.0.1.0-inplace" 'False) ((C1 ('MetaCons "User" 'PrefixI 'False) (U1 :: Type -> Type) :+: (C1 ('MetaCons "Assistant" 'PrefixI 'False) (U1 :: Type -> Type) :+: C1 ('MetaCons "System" 'PrefixI 'False) (U1 :: Type -> Type))) :+: (C1 ('MetaCons "Developer" 'PrefixI 'False) (U1 :: Type -> Type) :+: (C1 ('MetaCons "Tool" 'PrefixI 'False) (U1 :: Type -> Type) :+: C1 ('MetaCons "Function" 'PrefixI 'False) (U1 :: Type -> Type)))) |
data MessageContent Source #
Constructors
StringContent Text | |
ContentParts [TextContent] |
Instances
data TextContent Source #
Constructors
TextContent | |
Fields
|
Instances
Instances
FromJSON Tool_ Source # | |
Defined in Langchain.LLM.OpenAI | |
ToJSON Tool_ Source # | |
Generic Tool_ Source # | |
Show Tool_ Source # | |
Eq Tool_ Source # | |
type Rep Tool_ Source # | |
Defined in Langchain.LLM.OpenAI type Rep Tool_ = D1 ('MetaData "Tool_" "Langchain.LLM.OpenAI" "langchain-hs-0.0.1.0-inplace" 'False) (C1 ('MetaCons "Tool_" 'PrefixI 'True) (S1 ('MetaSel ('Just "toolType") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 Text) :*: S1 ('MetaSel ('Just "function") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 Function_))) |
Constructors
Function_ | |
Instances
FromJSON Function_ Source # | |
Defined in Langchain.LLM.OpenAI | |
ToJSON Function_ Source # | |
Generic Function_ Source # | |
Show Function_ Source # | |
Eq Function_ Source # | |
type Rep Function_ Source # | |
Defined in Langchain.LLM.OpenAI type Rep Function_ = D1 ('MetaData "Function_" "Langchain.LLM.OpenAI" "langchain-hs-0.0.1.0-inplace" 'False) (C1 ('MetaCons "Function_" 'PrefixI 'True) ((S1 ('MetaSel ('Just "name") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 Text) :*: S1 ('MetaSel ('Just "description") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 (Maybe Text))) :*: (S1 ('MetaSel ('Just "parameters") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 (Maybe Value)) :*: S1 ('MetaSel ('Just "strict") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 (Maybe Bool))))) |
Instances
FromJSON ToolCall Source # | |
Defined in Langchain.LLM.OpenAI | |
ToJSON ToolCall Source # | |
Generic ToolCall Source # | |
Show ToolCall Source # | |
Eq ToolCall Source # | |
type Rep ToolCall Source # | |
Defined in Langchain.LLM.OpenAI type Rep ToolCall = D1 ('MetaData "ToolCall" "Langchain.LLM.OpenAI" "langchain-hs-0.0.1.0-inplace" 'False) (C1 ('MetaCons "ToolCall" 'PrefixI 'True) (S1 ('MetaSel ('Just "id_") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 Text) :*: (S1 ('MetaSel ('Just "toolType") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 Text) :*: S1 ('MetaSel ('Just "function") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 FunctionCall_)))) |
data FunctionCall_ Source #
Constructors
FunctionCall_ | |
Instances
Constructors
Usage | |
Instances
FromJSON Usage Source # | |
Defined in Langchain.LLM.OpenAI | |
Generic Usage Source # | |
Show Usage Source # | |
Eq Usage Source # | |
type Rep Usage Source # | |
Defined in Langchain.LLM.OpenAI type Rep Usage = D1 ('MetaData "Usage" "Langchain.LLM.OpenAI" "langchain-hs-0.0.1.0-inplace" 'False) (C1 ('MetaCons "Usage" 'PrefixI 'True) ((S1 ('MetaSel ('Just "completionTokens") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 Int) :*: S1 ('MetaSel ('Just "promptTokens") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 Int)) :*: (S1 ('MetaSel ('Just "totalTokens") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 Int) :*: (S1 ('MetaSel ('Just "completionTokensDetails") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 (Maybe CompletionTokensDetails)) :*: S1 ('MetaSel ('Just "promptTokensDetails") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 (Maybe PromptTokensDetails)))))) |
Constructors
Choice | |
Fields
|
Instances
FromJSON Choice Source # | |
Defined in Langchain.LLM.OpenAI | |
Generic Choice Source # | |
Show Choice Source # | |
Eq Choice Source # | |
type Rep Choice Source # | |
Defined in Langchain.LLM.OpenAI type Rep Choice = D1 ('MetaData "Choice" "Langchain.LLM.OpenAI" "langchain-hs-0.0.1.0-inplace" 'False) (C1 ('MetaCons "Choice" 'PrefixI 'True) ((S1 ('MetaSel ('Just "finishReason") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 FinishReason) :*: S1 ('MetaSel ('Just "index") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 Int)) :*: (S1 ('MetaSel ('Just "logprobs") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 (Maybe LogProbs)) :*: S1 ('MetaSel ('Just "message") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 Message)))) |
data FinishReason Source #
Constructors
Stop | |
Length | |
ContentFilter | |
ToolCalls | |
FunctionCall |
Instances
FromJSON FinishReason Source # | |
Defined in Langchain.LLM.OpenAI | |
Generic FinishReason Source # | |
Defined in Langchain.LLM.OpenAI Associated Types type Rep FinishReason :: Type -> Type # | |
Show FinishReason Source # | |
Defined in Langchain.LLM.OpenAI Methods showsPrec :: Int -> FinishReason -> ShowS # show :: FinishReason -> String # showList :: [FinishReason] -> ShowS # | |
Eq FinishReason Source # | |
Defined in Langchain.LLM.OpenAI | |
type Rep FinishReason Source # | |
Defined in Langchain.LLM.OpenAI type Rep FinishReason = D1 ('MetaData "FinishReason" "Langchain.LLM.OpenAI" "langchain-hs-0.0.1.0-inplace" 'False) ((C1 ('MetaCons "Stop" 'PrefixI 'False) (U1 :: Type -> Type) :+: C1 ('MetaCons "Length" 'PrefixI 'False) (U1 :: Type -> Type)) :+: (C1 ('MetaCons "ContentFilter" 'PrefixI 'False) (U1 :: Type -> Type) :+: (C1 ('MetaCons "ToolCalls" 'PrefixI 'False) (U1 :: Type -> Type) :+: C1 ('MetaCons "FunctionCall" 'PrefixI 'False) (U1 :: Type -> Type)))) |
Constructors
LogProbs | |
Fields
|
Instances
FromJSON LogProbs Source # | |
Defined in Langchain.LLM.OpenAI | |
Generic LogProbs Source # | |
Show LogProbs Source # | |
Eq LogProbs Source # | |
type Rep LogProbs Source # | |
Defined in Langchain.LLM.OpenAI type Rep LogProbs = D1 ('MetaData "LogProbs" "Langchain.LLM.OpenAI" "langchain-hs-0.0.1.0-inplace" 'False) (C1 ('MetaCons "LogProbs" 'PrefixI 'True) (S1 ('MetaSel ('Just "content") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 (Maybe [LogProbContent])) :*: S1 ('MetaSel ('Just "refusal") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 (Maybe [LogProbContent])))) |
data LogProbContent Source #
Constructors
LogProbContent | |
Fields
|
Instances
data TopLogProb Source #
Instances
FromJSON TopLogProb Source # | |
Defined in Langchain.LLM.OpenAI | |
Generic TopLogProb Source # | |
Defined in Langchain.LLM.OpenAI Associated Types type Rep TopLogProb :: Type -> Type # | |
Show TopLogProb Source # | |
Defined in Langchain.LLM.OpenAI Methods showsPrec :: Int -> TopLogProb -> ShowS # show :: TopLogProb -> String # showList :: [TopLogProb] -> ShowS # | |
Eq TopLogProb Source # | |
Defined in Langchain.LLM.OpenAI | |
type Rep TopLogProb Source # | |
Defined in Langchain.LLM.OpenAI type Rep TopLogProb = D1 ('MetaData "TopLogProb" "Langchain.LLM.OpenAI" "langchain-hs-0.0.1.0-inplace" 'False) (C1 ('MetaCons "TopLogProb" 'PrefixI 'True) (S1 ('MetaSel ('Just "bytes") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 (Maybe [Int])) :*: (S1 ('MetaSel ('Just "logprob") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 Double) :*: S1 ('MetaSel ('Just "token") 'NoSourceUnpackedness 'NoSourceStrictness 'DecidedLazy) (Rec0 Text)))) |
data AudioConfig Source #
Configuration for audio processing Specifies format and voice preferences for text-to-speech
Constructors
AudioConfig | |
Instances
data AudioResponse Source #
Constructors
AudioResponse | |
Instances
Constructors
TextModality | |
AudioModality |
data ToolChoice Source #
Constructors
None | |
Auto | |
Required | |
SpecificTool SpecificToolChoice |
Instances
data SpecificToolChoice Source #
Constructors
SpecificToolChoice | |
Instances
data ReasoningEffort Source #
Instances
data PredictionOutput Source #
Constructors
PredictionOutput | |
Fields |
Instances
data PredictionContent Source #
Constructors
PredictionContent | |
Fields
|
Instances
data ResponseFormat Source #
Constructors
JsonObjectFormat | |
JsonSchemaFormat Value |
Instances
data StreamOptions Source #
Constructors
StreamOptions | |
Fields
|
Instances
data WebSearchOptions Source #
Constructors
WebSearchOptions | |
Fields |
Instances
data UserLocation Source #
Constructors
UserLocation | |
Fields |
Instances
data ApproximateLocation Source #
Constructors
ApproximateLocation | |
Fields
|
Instances
data CompletionTokensDetails Source #
Constructors
CompletionTokensDetails | |
Fields |
Instances
data PromptTokensDetails Source #
Constructors
PromptTokensDetails | |
Fields
|
Instances
Functions
createChatCompletion :: Text -> ChatCompletionRequest -> IO (Either String ChatCompletionResponse) Source #
Creates a chat completion request Sends the request to OpenAI API and returns the parsed response.
Example usage:
response <- createChatCompletion "your-api-key" request
case response of
Right res -> print (choices res)
Left err -> putStrLn err
defaultChatCompletionRequest :: ChatCompletionRequest Source #
Default chat completion request Uses "gpt-4o-mini-2024-07-18" as the default model. All other parameters are set to Nothing.