interface ChatGroqInput {
    apiKey?: string;
    maxTokens?: number;
    model?: string;
    modelName?: string;
    stop?: null | string | string[];
    stopSequences?: string[];
    streaming?: boolean;
    temperature?: number;
}

Hierarchy (view full)

Properties

apiKey?: string

The Groq API key to use for requests.

Default

process.env.GROQ_API_KEY
maxTokens?: number

The maximum number of tokens that the model can process in a single response. This limits ensures computational efficiency and resource management.

model?: string

The name of the model to use.

Default

"llama2-70b-4096"
modelName?: string

The name of the model to use. Alias for model

Default

"llama2-70b-4096"
stop?: null | string | string[]

Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence. Alias for stopSequences

stopSequences?: string[]

Up to 4 sequences where the API will stop generating further tokens. The returned text will not contain the stop sequence.

streaming?: boolean

Whether or not to stream responses.

temperature?: number

The temperature to use for sampling.

Default

0.7

Generated using TypeDoc