Class: TogetherLLM
OpenAI LLM implementation
Hierarchy
-
↳
TogetherLLM
Constructors
constructor
• new TogetherLLM(init?
): TogetherLLM
Parameters
Name | Type |
---|---|
init? | Partial <OpenAI > |
Returns
Overrides
Defined in
packages/core/src/llm/together.ts:4
Properties
additionalChatOptions
• Optional
additionalChatOptions: Omit
<Partial
<ChatCompletionCreateParams
>, "stream"
| "model"
| "temperature"
| "messages"
| "max_tokens"
| "top_p"
>
Inherited from
Defined in
packages/core/src/llm/LLM.ts:78
additionalSessionOptions
• Optional
additionalSessionOptions: Omit
<Partial
<ClientOptions
>, "apiKey"
| "timeout"
| "maxRetries"
>
Inherited from
OpenAI.additionalSessionOptions
Defined in
packages/core/src/llm/LLM.ts:88
apiKey
• Optional
apiKey: string
= undefined
Inherited from
Defined in
packages/core/src/llm/LLM.ts:84
callbackManager
• Optional
callbackManager: CallbackManager
Inherited from
Defined in
packages/core/src/llm/LLM.ts:93
maxRetries
• maxRetries: number
Inherited from
Defined in
packages/core/src/llm/LLM.ts:85
maxTokens
• Optional
maxTokens: number
Inherited from
Defined in
packages/core/src/llm/LLM.ts:77
model
• model: string
Inherited from
Defined in
packages/core/src/llm/LLM.ts:74
session
• session: OpenAISession
Inherited from
Defined in
packages/core/src/llm/LLM.ts:87
temperature
• temperature: number
Inherited from
Defined in
packages/core/src/llm/LLM.ts:75
timeout
• Optional
timeout: number
Inherited from
Defined in
packages/core/src/llm/LLM.ts:86