1package io.github.ollama4j.models.request;
3import com.fasterxml.jackson.core.JsonProcessingException;
4import io.github.ollama4j.exceptions.OllamaBaseException;
5import io.github.ollama4j.models.response.OllamaResult;
6import io.github.ollama4j.models.chat.OllamaChatResponseModel;
7import io.github.ollama4j.models.chat.OllamaChatStreamObserver;
8import io.github.ollama4j.models.generate.OllamaStreamHandler;
9import io.github.ollama4j.utils.OllamaRequestBody;
10import io.github.ollama4j.utils.Utils;
11import org.slf4j.Logger;
12import org.slf4j.LoggerFactory;
14import java.io.IOException;
26 super(host, basicAuth, requestTimeoutSeconds, verbose);
38 responseBuffer.append(ollamaResponseModel.getMessage().getContent());
39 if (streamObserver !=
null) {
40 streamObserver.
notify(ollamaResponseModel);
42 return ollamaResponseModel.isDone();
43 }
catch (JsonProcessingException e) {
44 LOG.error(
"Error parsing the Ollama chat response!", e);
52 return super.callSync(body);
void notify(OllamaChatResponseModel currentResponsePart)
OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
OllamaChatEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose)
boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer)
String getEndpointSuffix()
static ObjectMapper getObjectMapper()