This is a standalone configuration type, prefix from configuration root: langchain4j.ollama.streaming-chat-model
Configuration options
| key | type | default value | description |
|---|---|---|---|
|
string |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.baseUrl(java.lang.String) |
|
Map<string, string> |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.customHeaders(java.util.Map) |
|
ChatRequestParameters |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.defaultRequestParameters(dev.langchain4j.model.chat.request.ChatRequestParameters) |
|
boolean |
|
If set to |
|
HttpClientBuilder |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.httpClientBuilder(dev.langchain4j.http.client.HttpClientBuilder) |
|
ChatModelListener[] |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.listeners(java.util.List) |
|
boolean |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.logRequests(java.lang.Boolean) |
|
boolean |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.logResponses(java.lang.Boolean) |
|
Logger |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.logger(org.slf4j.Logger) |
|
double |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.minP(java.lang.Double) |
|
int |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.mirostat(java.lang.Integer) |
|
double |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.mirostatEta(java.lang.Double) |
|
double |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.mirostatTau(java.lang.Double) |
|
string |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.modelName(java.lang.String) |
|
int |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.numCtx(java.lang.Integer) |
|
int |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.numPredict(java.lang.Integer) |
|
int |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.repeatLastN(java.lang.Integer) |
|
double |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.repeatPenalty(java.lang.Double) |
|
ResponseFormat |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.responseFormat(dev.langchain4j.model.chat.request.ResponseFormat) |
|
boolean |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.returnThinking(java.lang.Boolean) |
|
int |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.seed(java.lang.Integer) |
|
string[] |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.stop(java.util.List) |
|
Capability[] (RESPONSE_FORMAT_JSON_SCHEMA) |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.supportedCapabilities(java.util.Set) |
|
double |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.temperature(java.lang.Double) |
|
boolean |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.think(java.lang.Boolean) |
|
Duration |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.timeout(java.time.Duration) |
|
int |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.topK(java.lang.Integer) |
|
double |
|
Generated from dev.langchain4j.model.ollama.OllamaBaseChatModel.Builder.topP(java.lang.Double) |