1package io.github.ollama4j.models.request;
3import com.fasterxml.jackson.core.JsonProcessingException;
4import com.fasterxml.jackson.core.type.TypeReference;
5import io.github.ollama4j.exceptions.OllamaBaseException;
6import io.github.ollama4j.models.chat.OllamaChatMessage;
7import io.github.ollama4j.models.response.OllamaResult;
8import io.github.ollama4j.models.chat.OllamaChatResponseModel;
9import io.github.ollama4j.models.chat.OllamaChatStreamObserver;
10import io.github.ollama4j.models.generate.OllamaStreamHandler;
11import io.github.ollama4j.utils.OllamaRequestBody;
12import io.github.ollama4j.utils.Utils;
13import org.slf4j.Logger;
14import org.slf4j.LoggerFactory;
16import java.io.IOException;
28 super(host, basicAuth, requestTimeoutSeconds, verbose);
55 responseBuffer.append(message.getContent());
56 if (streamObserver !=
null) {
57 streamObserver.
notify(ollamaResponseModel);
60 return ollamaResponseModel.isDone();
61 }
catch (JsonProcessingException e) {
62 LOG.error(
"Error parsing the Ollama chat response!", e);
70 return super.callSync(body);
void notify(OllamaChatResponseModel currentResponsePart)
OllamaResult call(OllamaRequestBody body, OllamaStreamHandler streamHandler)
OllamaChatEndpointCaller(String host, BasicAuth basicAuth, long requestTimeoutSeconds, boolean verbose)
boolean parseResponseAndAddToBuffer(String line, StringBuilder responseBuffer)
String getEndpointSuffix()
static ObjectMapper getObjectMapper()