| code
				 stringlengths 419 102k | apis
				 listlengths 1 10 | extract_api
				 stringlengths 67 54.7k | 
|---|---|---|
| 
	package io.thomasvitale.langchain4j.spring.openai;
import java.util.List;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.agent.tool.ToolParameters;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.image.Image;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.Content;
import dev.langchain4j.data.message.ContentType;
import dev.langchain4j.data.message.ImageContent;
import dev.langchain4j.data.message.SystemMessage;
import dev.langchain4j.data.message.TextContent;
import dev.langchain4j.data.message.ToolExecutionResultMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.output.FinishReason;
import dev.langchain4j.model.output.TokenUsage;
import org.springframework.lang.Nullable;
import org.springframework.util.CollectionUtils;
import io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionFinishReason;
import io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage;
import io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionRequest;
import io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionResponse;
import io.thomasvitale.langchain4j.spring.openai.api.chat.Tool;
import io.thomasvitale.langchain4j.spring.openai.api.embedding.EmbeddingResponse;
import io.thomasvitale.langchain4j.spring.openai.api.image.ImageGenerationResponse;
import io.thomasvitale.langchain4j.spring.openai.api.shared.Usage;
import static java.lang.String.format;
import static java.util.stream.Collectors.toList;
/**
 * Utility class to convert between OpenAI and Langchain4j types.
 * <p>
 * Based on the original LangChain4j implementation.
 */
public final class OpenAiAdapters {
    /**
     * Converts from a list of LangChain4J ChatMessage to a list of OpenAI ChatCompletionMessage.
     */
    public static List<ChatCompletionMessage> toOpenAiMessages(List<ChatMessage> messages) {
        return messages.stream()
                .map(OpenAiAdapters::toOpenAiMessage)
                .toList();
    }
    /**
     * Converts from LangChain4J ChatMessage to ChatCompletionMessage.
     */
    private static ChatCompletionMessage toOpenAiMessage(ChatMessage message) {
        if (message instanceof SystemMessage systemMessage) {
            return ChatCompletionMessage.builder()
                    .role(ChatCompletionMessage.Role.SYSTEM)
                    .content(systemMessage.text())
                    .build();
        }
        if (message instanceof UserMessage userMessage) {
            if (userMessage.hasSingleText()) {
                return ChatCompletionMessage.builder()
                        .role(ChatCompletionMessage.Role.USER)
                        .content(userMessage.text())
                        .name(userMessage.name())
                        .build();
            } else {
                return ChatCompletionMessage.builder()
                        .content(userMessage.contents().stream()
                                .map(OpenAiAdapters::toOpenAiContent)
                                .collect(toList()))
                        .name(userMessage.name())
                        .build();
            }
        }
        if (message instanceof AiMessage aiMessage) {
            if (!aiMessage.hasToolExecutionRequests()) {
                return ChatCompletionMessage.builder()
                        .role(ChatCompletionMessage.Role.ASSISTANT)
                        .content(aiMessage.text())
                        .build();
            }
            return ChatCompletionMessage.builder()
                    .role(ChatCompletionMessage.Role.ASSISTANT)
                    .toolCalls(toOpenAiToolCalls(aiMessage.toolExecutionRequests()))
                    .build();
        }
        if (message instanceof ToolExecutionResultMessage toolExecutionResultMessage) {
            return ChatCompletionMessage.builder()
                    .role(ChatCompletionMessage.Role.TOOL)
                    .content(toolExecutionResultMessage.text())
                    .toolCallId(toolExecutionResultMessage.id())
                    .build();
        }
        throw new IllegalArgumentException("Unknown message type: " + message.type());
    }
    /**
     * Converts from LangChain4J Content to OpenAI Content.
     */
    public static ChatCompletionMessage.Content toOpenAiContent(Content content) {
        if (ContentType.TEXT.equals(content.type())) {
            var textContent = (TextContent) content;
            return ChatCompletionMessage.Content.builder()
                    .type(ChatCompletionMessage.ContentType.TEXT)
                    .text(textContent.text())
                    .build();
        } else if (ContentType.IMAGE.equals(content.type())) {
            var imageContent = (ImageContent) content;
            return ChatCompletionMessage.Content.builder()
                    .type(ChatCompletionMessage.ContentType.IMAGE_URL)
                    .imageUrl(new ChatCompletionMessage.ImageUrl(
                            toOpenAiUrl(imageContent.image()),
                            toOpenAiDetail(imageContent.detailLevel())))
                    .build();
        } else {
            throw new IllegalArgumentException("Unknown content type: " + content.type());
        }
    }
    /**
     * Converts from LangChain4J Image to OpenAI Image URL.
     */
    private static String toOpenAiUrl(Image image) {
        if (image.url() != null) {
            return image.url().toString();
        }
        return format("data:%s;base64,%s", image.mimeType(), image.base64Data());
    }
    /**
     * Converts from LangChain4J DetailLevel to OpenAI DetailLevel.
     */
    private static String toOpenAiDetail(ImageContent.DetailLevel detailLevel) {
        if (detailLevel == null) {
            return null;
        }
        return detailLevel.name();
    }
    /**
     * Converts from a list of LangChain4J ToolExecutionRequest to a list of OpenAI ToolCall.
     */
    private static List<ChatCompletionMessage.ToolCall> toOpenAiToolCalls(List<ToolExecutionRequest> toolExecutionRequests) {
        return toolExecutionRequests.stream()
                .map(OpenAiAdapters::toOpenAiToolCall)
                .toList();
    }
    /**
     * Converts from LangChain4J ToolExecutionRequest to OpenAI ToolCall.
     */
    private static ChatCompletionMessage.ToolCall toOpenAiToolCall(ToolExecutionRequest toolExecutionRequest) {
        var functionCall = new ChatCompletionMessage.ChatCompletionFunction(
                toolExecutionRequest.name(),
                toolExecutionRequest.arguments());
        return new ChatCompletionMessage.ToolCall(toolExecutionRequest.id(), functionCall);
    }
    /**
     * Converts from LangChain4J ToolSpecification to OpenAI ToolChoice.
     */
    public static ChatCompletionRequest.ToolChoice toOpenAiToolChoice(ToolSpecification toolSpecification) {
        return new ChatCompletionRequest.ToolChoice(toolSpecification.name());
    }
    /**
     * Converts from a list of LangChain4J ToolSpecification to a list of OpenAI Tool.
     */
    public static List<Tool> toOpenAiTools(List<ToolSpecification> toolSpecifications) {
        return toolSpecifications.stream()
                .map(OpenAiAdapters::toOpenAiTool)
                .toList();
    }
    /**
     * Converts from LangChain4J ToolSpecification to OpenAI Tool.
     */
    private static Tool toOpenAiTool(ToolSpecification toolSpecification) {
        var function = Tool.Function.builder()
                .description(toolSpecification.description())
                .name(toolSpecification.name())
                .parameters(OpenAiAdapters.toOpenAiParameters(toolSpecification.parameters()))
                .build();
        return new Tool(function);
    }
    /**
     * Converts from LangChain4J ToolParameters to OpenAI Tool.Parameters.
     */
    private static Tool.Parameters toOpenAiParameters(@Nullable ToolParameters toolParameters) {
        if (toolParameters == null) {
            return Tool.Parameters.builder().build();
        }
        return Tool.Parameters.builder()
                .properties(toolParameters.properties())
                .required(toolParameters.required())
                .build();
    }
    /**
     * Converts from OpenAI Usage to LangChain4J Usage.
     */
    public static TokenUsage toTokenUsage(Usage usage) {
        return new TokenUsage(usage.promptTokens(), usage.completionTokens(), usage.totalTokens());
    }
    /**
     * Converts from OpenAI ChatCompletionResponse to LangChain4J AiMessage.
     */
    public static AiMessage toAiMessage(ChatCompletionResponse response) {
        var assistantMessage = response.choices().get(0).message();
        var toolCalls = assistantMessage.toolCalls();
        if (!(CollectionUtils.isEmpty(toolCalls))) {
            List<ToolExecutionRequest> toolExecutionRequests = toolCalls.stream()
                    .filter(toolCall -> "function".equals(toolCall.type()))
                    .map(OpenAiAdapters::toToolExecutionRequest)
                    .toList();
            return AiMessage.from(toolExecutionRequests);
        }
        return AiMessage.from((String) assistantMessage.content());
    }
    /**
     * Converts from OpenAI ToolCall to LangChain4J ToolExecutionRequest.
     */
    private static ToolExecutionRequest toToolExecutionRequest(ChatCompletionMessage.ToolCall toolCall) {
        return ToolExecutionRequest.builder()
                .id(toolCall.id())
                .name(toolCall.function().name())
                .arguments(toolCall.function().arguments())
                .build();
    }
    /**
     * Converts from OpenAI ChatCompletionFinishReason to LangChain4J FinishReason.
     */
    public static FinishReason toFinishReason(ChatCompletionFinishReason finishReason) {
        return switch (finishReason) {
            case STOP -> FinishReason.STOP;
            case LENGTH -> FinishReason.LENGTH;
            case TOOL_CALLS -> FinishReason.TOOL_EXECUTION;
            case CONTENT_FILTER -> FinishReason.CONTENT_FILTER;
        };
    }
    /**
     * Converts from OpenAI EmbeddingData to LangChain4J Embedding.
     */
    public static Embedding toEmbedding(EmbeddingResponse.EmbeddingData embeddingData) {
        var floatVectors = embeddingData.embedding().stream()
                .map(Double::floatValue)
                .toList();
        return Embedding.from(floatVectors);
    }
    /**
     * Converts from OpenAI ImageData to LangChain4J Image.
     */
    public static Image toImage(ImageGenerationResponse.ImageData imageData) {
        return Image.builder()
                .url(imageData.url())
                .base64Data(imageData.b64Json())
                .revisedPrompt(imageData.revisedPrompt())
                .build();
    }
}
 | 
	[
  "dev.langchain4j.agent.tool.ToolExecutionRequest.builder",
  "dev.langchain4j.data.message.ContentType.TEXT.equals",
  "dev.langchain4j.data.image.Image.builder",
  "dev.langchain4j.data.message.ContentType.IMAGE.equals"
] | 
	[((2434, 2606), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((2434, 2577), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((2434, 2526), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((2747, 2977), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((2747, 2944), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((2747, 2894), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((2747, 2841), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((3023, 3324), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((3023, 3291), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((3023, 3241), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((3485, 3668), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((3485, 3635), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((3485, 3584), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((3704, 3913), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((3704, 3884), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((3704, 3799), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((4033, 4281), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((4033, 4252), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((4033, 4187), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((4033, 4123), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.builder'), ((4559, 4598), 'dev.langchain4j.data.message.ContentType.TEXT.equals'), ((4674, 4854), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.Content.builder'), ((4674, 4825), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.Content.builder'), ((4674, 4779), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.Content.builder'), ((4674, 4713), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.Content.builder'), ((4875, 4915), 'dev.langchain4j.data.message.ContentType.IMAGE.equals'), ((4993, 5334), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.Content.builder'), ((4993, 5305), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.Content.builder'), ((4993, 5103), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.Content.builder'), ((4993, 5032), 'io.thomasvitale.langchain4j.spring.openai.api.chat.ChatCompletionMessage.Content.builder'), ((7675, 7928), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Function.builder'), ((7675, 7903), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Function.builder'), ((7675, 7808), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Function.builder'), ((7675, 7760), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Function.builder'), ((7675, 7698), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Function.builder'), ((8217, 8250), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Parameters.builder'), ((8217, 8242), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Parameters.builder'), ((8277, 8437), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Parameters.builder'), ((8277, 8412), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Parameters.builder'), ((8277, 8359), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Parameters.builder'), ((8277, 8302), 'io.thomasvitale.langchain4j.spring.openai.api.chat.Tool.Parameters.builder'), ((9635, 9835), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((9635, 9810), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((9635, 9750), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((9635, 9700), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((10831, 11016), 'dev.langchain4j.data.image.Image.builder'), ((10831, 10991), 'dev.langchain4j.data.image.Image.builder'), ((10831, 10933), 'dev.langchain4j.data.image.Image.builder'), ((10831, 10884), 'dev.langchain4j.data.image.Image.builder')] | 
| 
	package org.jugph;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.UrlDocumentLoader;
import dev.langchain4j.data.document.transformer.HtmlTextExtractor;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.UserMessage;
import static java.time.Duration.ofSeconds;
public class JavaNewsRetrieverToolsExample {
    static class JavaNewsRetriever {
        @Tool("Retrieves the latest java news. Limit to the 3 latest news")
        String retrieveJavaNews() {
            Document javaNews = UrlDocumentLoader.load("https://dev.java/news/");
            Document transformedJavaNews = new HtmlTextExtractor(".container", null, true)
                    .transform(javaNews);
            return transformedJavaNews.text().replaceAll("\n", " ");
        }
    }
    interface Assistant {
        String chat(String userMessage);
    }
    interface NewsPrettierAssistant {
        @UserMessage("Given a jumbled java news {{it}}, summarize each, and list down them in numerical format, latest to oldest. " +
                "Include details such as url and date announced.")
        String prettify(String userMessage);
    }
    public static void main(String[] args) {
        var model = OpenAiChatModel.builder()
                .apiKey(System.getenv("OPENAI_API_KEY"))
                .timeout(ofSeconds(120))
                .build();
        var assistant = AiServices.builder(Assistant.class)
                .chatLanguageModel(model)
                .tools(new JavaNewsRetriever())
                .chatMemory(MessageWindowChatMemory.withMaxMessages(10))
                .build();
        var newsPrettierAssistant = AiServices.builder(NewsPrettierAssistant.class)
                .chatLanguageModel(model)
                .build();
        var question = "What are latest java news?";
        var answer = assistant.chat(question);
        var prettiedAnswer = newsPrettierAssistant.prettify(answer);
        System.out.println("\n=================================\n"+prettiedAnswer);
    }
}
 | 
	[
  "dev.langchain4j.service.AiServices.builder",
  "dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | 
	[((1405, 1553), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1405, 1528), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1405, 1487), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1580, 1803), 'dev.langchain4j.service.AiServices.builder'), ((1580, 1778), 'dev.langchain4j.service.AiServices.builder'), ((1580, 1705), 'dev.langchain4j.service.AiServices.builder'), ((1580, 1657), 'dev.langchain4j.service.AiServices.builder'), ((1842, 1956), 'dev.langchain4j.service.AiServices.builder'), ((1842, 1931), 'dev.langchain4j.service.AiServices.builder')] | 
| 
	package io.quarkiverse.langchain4j.sample.chatbot;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
import java.io.File;
import java.util.List;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.event.Observes;
import jakarta.inject.Inject;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkus.logging.Log;
import io.quarkus.runtime.StartupEvent;
@ApplicationScoped
public class IngestorExample {
    /**
     * The embedding store (the database).
     * The bean is provided by the quarkus-langchain4j-redis extension.
     */
    @Inject
    EmbeddingStore store;
    /**
     * The embedding model (how the vector of a document is computed).
     * The bean is provided by the LLM (like openai) extension.
     */
    @Inject
    EmbeddingModel embeddingModel;
    public void ingest(@Observes StartupEvent event) {
        Log.infof("Ingesting documents...");
        List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/catalog").toPath(),
                new TextDocumentParser());
        var ingestor = EmbeddingStoreIngestor.builder()
                .embeddingStore(store)
                .embeddingModel(embeddingModel)
                .documentSplitter(recursive(500, 0))
                .build();
        ingestor.ingest(documents);
        Log.infof("Ingested %d documents.%n", documents.size());
    }
}
 | 
	[
  "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | 
	[((1441, 1638), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1441, 1613), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1441, 1560), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1441, 1512), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] | 
| 
	package com.honvay.flychat.langchain.llama.model;
import dev.ai4j.openai4j.chat.*;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.agent.tool.ToolParameters;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.SystemMessage;
import dev.langchain4j.data.message.ToolExecutionResultMessage;
import java.util.Collection;
import java.util.List;
import java.util.stream.Collectors;
import static dev.ai4j.openai4j.chat.Role.*;
import static dev.langchain4j.data.message.AiMessage.aiMessage;
public class OpenAiConverters {
    static List<Message> toOpenAiMessages(List<ChatMessage> messages) {
        return messages.stream()
                .map(OpenAiConverters::toOpenAiMessage)
                .collect(Collectors.toList());
    }
    static Message toOpenAiMessage(ChatMessage message) {
        return Message.builder()
                .role(roleFrom(message))
                .name(nameFrom(message))
                .content(message.text())
                .functionCall(functionCallFrom(message))
                .build();
    }
    private static String nameFrom(ChatMessage message) {
        if (message instanceof ToolExecutionResultMessage) {
            return ((ToolExecutionResultMessage) message).toolName();
        }
        return null;
    }
    private static FunctionCall functionCallFrom(ChatMessage message) {
        if (message instanceof AiMessage) {
            AiMessage aiMessage = (AiMessage) message;
            if (aiMessage.toolExecutionRequest() != null) {
                return FunctionCall.builder()
                        .name(aiMessage.toolExecutionRequest().name())
                        .arguments(aiMessage.toolExecutionRequest().arguments())
                        .build();
            }
        }
        return null;
    }
    static Role roleFrom(ChatMessage message) {
        if (message instanceof AiMessage) {
            return Role.ASSISTANT;
        } else if (message instanceof ToolExecutionResultMessage) {
            return Role.FUNCTION;
        } else if (message instanceof SystemMessage) {
            return Role.SYSTEM;
        } else {
            return Role.USER;
        }
    }
    static List<Function> toFunctions(Collection<ToolSpecification> toolSpecifications) {
        if (toolSpecifications == null) {
            return null;
        }
        return toolSpecifications.stream()
                .map(OpenAiConverters::toFunction)
                .collect(Collectors.toList());
    }
    private static Function toFunction(ToolSpecification toolSpecification) {
        return Function.builder()
                .name(toolSpecification.name())
                .description(toolSpecification.description())
                .parameters(toOpenAiParameters(toolSpecification.parameters()))
                .build();
    }
    private static dev.ai4j.openai4j.chat.Parameters toOpenAiParameters(ToolParameters toolParameters) {
        if (toolParameters == null) {
            return dev.ai4j.openai4j.chat.Parameters.builder().build();
        }
        return dev.ai4j.openai4j.chat.Parameters.builder()
                .properties(toolParameters.properties())
                .required(toolParameters.required())
                .build();
    }
    static AiMessage aiMessageFrom(ChatCompletionResponse response) {
        return AiMessage.aiMessage(response.choices().get(0).delta().content());
    }
}
 | 
	[
  "dev.langchain4j.data.message.AiMessage.aiMessage.toolExecutionRequest"
] | 
	[((1743, 1782), 'dev.langchain4j.data.message.AiMessage.aiMessage.toolExecutionRequest'), ((1819, 1863), 'dev.langchain4j.data.message.AiMessage.aiMessage.toolExecutionRequest')] | 
| 
	package ma.enset.projet.service;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore;
import ma.enset.projet.Dao.DocumentImp;
import ma.enset.projet.Dao.VectorBd;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
public class ConversationBuild {
    private final ChatMemory customChatMemory = MessageWindowChatMemory.withMaxMessages(20);
    private DocumentImp document;
    private  VectorBd vectorBd = new VectorBd() ;
    public void setVectorBd(VectorBd vectorBd) {
        this.vectorBd = vectorBd;
    }
    public ConversationBuild() {
    }
    public ChatMemory getCustomChatMemory() {
        return customChatMemory;
    }
    public DocumentImp getDocument() {
        return document;
    }
    public void setDocument(DocumentImp document) {
        this.document = document;
    }
    public VectorBd getVectorBd() {
        return vectorBd;
    }
    public int getMaxResults() {
        return maxResults;
    }
    public Double getMinScore() {
        return minScore;
    }
    private final int maxResults = 4;
    private final Double minScore = 0.7;
    public EmbeddingStoreRetriever retriever(){
        ChromaEmbeddingStore chromaStore = vectorBd.getConnection();
        EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
        return new EmbeddingStoreRetriever(chromaStore, embeddingModel, maxResults, minScore);
    }
    public void build(String path) throws IOException {
        document = new DocumentImp(path);
        String txtFilePath = document.setTxtFilePath(path);
        String txtContent = Files.readString(Path.of(txtFilePath));
        DocumentSplitter lineSplitter = document.SplitDocument();
        Document doc = document.Document(txtContent);
//        EmbeddingStoreIngestor.builder()
//                .documentSplitter(lineSplitter)
//                .embeddingModel(new AllMiniLmL6V2EmbeddingModel())
//                .embeddingStore(vectorBd.getConnection())
//                .build()
//                .ingest(doc);
    }
    public ConversationalRetrievalChain chain(String API){
        return ConversationalRetrievalChain.builder()
                .chatLanguageModel(OpenAiChatModel.withApiKey(API))
                .chatMemory(customChatMemory)
                .promptTemplate(document.template())
                .retriever(retriever())
                .build();
    }
}
 | 
	[
  "dev.langchain4j.chain.ConversationalRetrievalChain.builder"
] | 
	[((2701, 2971), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2701, 2946), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2701, 2906), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2701, 2853), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2701, 2807), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')] | 
| 
	package com.datawise.bertdocqa.config;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore;
import dev.langchain4j.data.segment.TextSegment;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class ElasticsearchConfig {
    @Value("${elasticsearch.server.url}")
    private String serverUrl;
    @Value("${elasticsearch.index.name}")
    private String indexName;
    @Value("${elasticsearch.dimension}")
    private int dimension;
    @Bean
    public EmbeddingStore<TextSegment> embeddingStore() {
        return ElasticsearchEmbeddingStore.builder()
                .serverUrl(serverUrl)
                .indexName(indexName)
                .dimension(dimension)
                .build();
    }
}
 | 
	[
  "dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder"
] | 
	[((747, 923), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((747, 898), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((747, 860), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((747, 822), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder')] | 
| 
	import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentParser;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.rag.DefaultRetrievalAugmentor;
import dev.langchain4j.rag.RetrievalAugmentor;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.rag.query.transformer.CompressingQueryTransformer;
import dev.langchain4j.rag.query.transformer.QueryTransformer;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Scanner;
public class _02_Advanced_RAG_with_Query_Compression {
    /**
     * Please refer to previous examples for basic context.
     * <p>
     * Advanced RAG in LangChain4j is described here: https://github.com/langchain4j/langchain4j/pull/538
     * <p>
     * This example illustrates the implementation of a more sophisticated RAG application
     * using a technique known as "query compression".
     * Often, a query from a user is a follow-up question that refers back to earlier parts of the conversation
     * and lacks all the necessary details for effective retrieval.
     * For example, consider this conversation:
     * User: What is the legacy of John Doe?
     * AI: John Doe was a...
     * User: When was he born?
     * <p>
     * In such scenarios, using a basic RAG approach with a query like "When was he born?"
     * would likely fail to find articles about John Doe, as it doesn't contain "John Doe" in the query.
     * Query compression involves taking the user's query and the preceding conversation, then asking the LLM
     * to "compress" this into a single, self-contained query.
     * The LLM should generate a query like "When was John Doe born?".
     * This method adds a bit of latency and cost but significantly enhances the quality of the RAG process.
     * It's worth noting that the LLM used for compression doesn't have to be the same as the one
     * used for conversation. For instance, you might use a smaller local model trained for summarization.
     * <p>
     * In this example, we will continue using {@link AiServices},
     * but the same principles apply to {@link ConversationalRetrievalChain}, or you can develop your custom RAG flow.
     */
    public static void main(String[] args) {
        Biographer biographer = createBiographer();
        // First, ask "What is the legacy of John Doe?"
        // Then, ask "When was he born?"
        // Now, review the logs:
        // The first query was not compressed as there was no preceding context to compress.
        // The second query, however, was compressed into something like "When was John Doe born?"
        try (Scanner scanner = new Scanner(System.in)) {
            while (true) {
                System.out.println("==================================================");
                System.out.print("User: ");
                String userQuery = scanner.nextLine();
                System.out.println("==================================================");
                if ("exit".equalsIgnoreCase(userQuery)) {
                    break;
                }
                String biographerAnswer = biographer.answer(userQuery);
                System.out.println("==================================================");
                System.out.println("Biographer: " + biographerAnswer);
            }
        }
    }
    private static Biographer createBiographer() {
        // Check _01_Naive_RAG if you need more details on what is going on here
        ChatLanguageModel chatModel = OpenAiChatModel.builder()
                .apiKey("demo")
                .build();
        EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
        Path documentPath = toPath("biography-of-john-doe.txt");
        EmbeddingStore<TextSegment> embeddingStore = embed(documentPath, embeddingModel);
        // We will create a CompressingQueryTransformer, which is responsible for compressing
        // the user's query and the preceding conversation into a single, stand-alone query.
        // This should significantly improve the quality of the retrieval process.
        QueryTransformer queryTransformer = new CompressingQueryTransformer(chatModel);
        ContentRetriever contentRetriever = EmbeddingStoreContentRetriever.builder()
                .embeddingStore(embeddingStore)
                .embeddingModel(embeddingModel)
                .maxResults(2)
                .minScore(0.6)
                .build();
        // The RetrievalAugmentor serves as the entry point into the RAG flow in LangChain4j.
        // It can be configured to customize the RAG behavior according to your requirements.
        // In subsequent examples, we will explore more customizations.
        RetrievalAugmentor retrievalAugmentor = DefaultRetrievalAugmentor.builder()
                .queryTransformer(queryTransformer)
                .contentRetriever(contentRetriever)
                .build();
        return AiServices.builder(Biographer.class)
                .chatLanguageModel(chatModel)
                .retrievalAugmentor(retrievalAugmentor)
                .chatMemory(MessageWindowChatMemory.withMaxMessages(10))
                .build();
    }
    private static EmbeddingStore<TextSegment> embed(Path documentPath, EmbeddingModel embeddingModel) {
        DocumentParser documentParser = new TextDocumentParser();
        Document document = FileSystemDocumentLoader.loadDocument(documentPath, documentParser);
        DocumentSplitter splitter = DocumentSplitters.recursive(300, 0);
        List<TextSegment> segments = splitter.split(document);
        List<Embedding> embeddings = embeddingModel.embedAll(segments).content();
        EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
        embeddingStore.addAll(embeddings, segments);
        return embeddingStore;
    }
    interface Biographer {
        String answer(String query);
    }
    private static Path toPath(String fileName) {
        try {
            URL fileUrl = _02_Advanced_RAG_with_Query_Compression.class.getResource(fileName);
            return Paths.get(fileUrl.toURI());
        } catch (URISyntaxException e) {
            throw new RuntimeException(e);
        }
    }
} | 
	[
  "dev.langchain4j.service.AiServices.builder",
  "dev.langchain4j.rag.DefaultRetrievalAugmentor.builder",
  "dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder",
  "dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | 
	[((4539, 4621), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4539, 4596), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5259, 5482), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5259, 5457), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5259, 5426), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5259, 5395), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5259, 5347), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5793, 5957), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((5793, 5932), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((5793, 5880), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((5975, 6211), 'dev.langchain4j.service.AiServices.builder'), ((5975, 6186), 'dev.langchain4j.service.AiServices.builder'), ((5975, 6113), 'dev.langchain4j.service.AiServices.builder'), ((5975, 6057), 'dev.langchain4j.service.AiServices.builder')] | 
| 
	package _Engenharia;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
//import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Scanner;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.data.message.UserMessage.userMessage;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.util.stream.Collectors.joining;
import static java.time.Duration.ofSeconds;
import assistente.ApiKeys;
public class AssistenteMemoryDocument {
	
	private static final String vetModel = "sentence-transformers/all-MiniLM-L6-v2";
	
	private static final ChatMemory chatMemory = TokenWindowChatMemory.withMaxTokens(300, new OpenAiTokenizer(GPT_3_5_TURBO));
	
	
	public String fazerPergunta(String pergunta) throws Exception {
		//ChatMemoria
		//ChatLanguageModel model = OpenAiChatModel.withApiKey(ApiKeys.OPENAI_API_KEY);
        
		
        
        //Chat Language Model Builder OpenAi
        ChatLanguageModel chatLanguageModel = OpenAiChatModel.builder()
                .apiKey(ApiKeys.OPENAI_API_KEY)
                .temperature(0.0)
                .timeout(ofSeconds(900))
                .build();  
        
        
        //EmbeddingModel Builder HuggingFace
        EmbeddingModel embeddingModel = HuggingFaceEmbeddingModel.builder()
                .accessToken(ApiKeys.HF_API_KEY)
                .modelId(vetModel)
                .waitForModel(true)
                .timeout(ofSeconds(60))
                .build();
        
        EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
        //"Cosumo" do texto do arquivo
        
        EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
                .documentSplitter(DocumentSplitters.recursive(500, 0))
                .embeddingModel(embeddingModel)
                .embeddingStore(embeddingStore)
                .build();
        Document document = loadDocument(toPath("template.txt"));
        ingestor.ingest(document);
        ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
                .chatLanguageModel(chatLanguageModel)
                .retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
                .chatMemory(chatMemory) // you can override default chat memory
                // .promptTemplate() // you can override default prompt template
                .build();
        chatMemory.add(userMessage(pergunta));
        
        return chain.execute(pergunta);
        
		}
	
	
	//Procura arquivo
	private static Path toPath(String fileName) {
        try {
            URL fileUrl = AssistenteMemoryDocument.class.getResource(fileName);
            return Paths.get(fileUrl.toURI());
        } catch (URISyntaxException e) {
            throw new RuntimeException(e);
        }
    }
	
	
}
 | 
	[
  "dev.langchain4j.chain.ConversationalRetrievalChain.builder",
  "dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder",
  "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
  "dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | 
	[((2335, 2508), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2335, 2483), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2335, 2442), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2335, 2408), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2615, 2835), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((2615, 2810), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((2615, 2770), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((2615, 2734), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((2615, 2699), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((3022, 3246), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3022, 3221), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3022, 3173), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3022, 3125), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3396, 3763), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3396, 3617), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3396, 3577), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3396, 3488), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')] | 
| 
	package io.quarkiverse.langchain4j.sample.chatbot;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
import java.io.File;
import java.io.FileReader;
import java.io.IOException;
import java.io.Reader;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.event.Observes;
import jakarta.inject.Inject;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVRecord;
import org.eclipse.microprofile.config.inject.ConfigProperty;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.Metadata;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkiverse.langchain4j.redis.RedisEmbeddingStore;
import io.quarkus.runtime.StartupEvent;
@ApplicationScoped
public class CsvIngestorExample {
    /**
     * The embedding store (the database).
     * The bean is provided by the quarkus-langchain4j-redis extension.
     */
    @Inject
    RedisEmbeddingStore store;
    /**
     * The embedding model (how the vector of a document is computed).
     * The bean is provided by the LLM (like openai) extension.
     */
    @Inject
    EmbeddingModel embeddingModel;
    @ConfigProperty(name = "csv.file")
    File file;
    @ConfigProperty(name = "csv.headers")
    List<String> headers;
    public void ingest(@Observes StartupEvent event) throws IOException {
        CSVFormat csvFormat = CSVFormat.DEFAULT.builder()
                .setHeader(headers.toArray(new String[0]))
                .setSkipHeaderRecord(true)
                .build();
        List<Document> documents = new ArrayList<>();
        try (Reader reader = new FileReader(file)) {
            // Generate on document per row, the document is using the following syntax:
            // key1: value1
            // key2: value2
            Iterable<CSVRecord> records = csvFormat.parse(reader);
            int i = 1;
            for (CSVRecord record : records) {
                Map<String, String> metadata = new HashMap<>();
                metadata.put("source", file.getAbsolutePath());
                metadata.put("row", String.valueOf(i++));
                StringBuilder content = new StringBuilder();
                for (String header : headers) {
                    metadata.put(header, record.get(header)); // Include all headers in the metadata.
                    content.append(header).append(": ").append(record.get(header)).append("\n");
                }
                documents.add(new Document(content.toString(), Metadata.from(metadata)));
            }
            var ingestor = EmbeddingStoreIngestor.builder()
                    .embeddingStore(store)
                    .embeddingModel(embeddingModel)
                    .documentSplitter(recursive(300, 0))
                    .build();
            ingestor.ingest(documents);
            System.out.printf("Ingested %d documents.%n", documents.size());
        }
    }
}
 | 
	[
  "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | 
	[((1575, 1729), 'org.apache.commons.csv.CSVFormat.DEFAULT.builder'), ((1575, 1704), 'org.apache.commons.csv.CSVFormat.DEFAULT.builder'), ((1575, 1661), 'org.apache.commons.csv.CSVFormat.DEFAULT.builder'), ((1575, 1602), 'org.apache.commons.csv.CSVFormat.DEFAULT.builder'), ((2766, 2979), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2766, 2950), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2766, 2893), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2766, 2841), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] | 
| 
	package org.goafabric.dbagent.ai;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
import static java.time.Duration.ofSeconds;
@Configuration
@Profile("openai")
public class OpenAiConfiguration {
    @Bean
    ChatLanguageModel chatModelOpenAi(DatabaseTool databaseTool) {
        return OpenAiChatModel.builder().apiKey("demo")
                .modelName("gpt-3.5-turbo")
                .timeout(ofSeconds(30)).temperature(0.0)
                .build();
    }
    @Bean
    DatabaseAgent databaseAgent(ChatLanguageModel chatLanguageModel, DatabaseTool databaseTool) {
        return AiServices.builder(DatabaseAgent.class)
                .chatLanguageModel(chatLanguageModel)
                .chatMemory(MessageWindowChatMemory.withMaxMessages(20))
                .tools(databaseTool)
                .build();
    }
}
 | 
	[
  "dev.langchain4j.service.AiServices.builder",
  "dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | 
	[((620, 786), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((620, 761), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((620, 744), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((620, 704), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((620, 660), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((918, 1146), 'dev.langchain4j.service.AiServices.builder'), ((918, 1121), 'dev.langchain4j.service.AiServices.builder'), ((918, 1084), 'dev.langchain4j.service.AiServices.builder'), ((918, 1011), 'dev.langchain4j.service.AiServices.builder')] | 
| 
	package com.elec5620.studyhelper.api.llm;
import io.github.cdimascio.dotenv.Dotenv;
import java.net.URL;
import java.util.Scanner;
import com.elec5620.studyhelper.core.HelperSystem;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.io.File;
import java.io.IOException;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.Duration;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
public class ChatBot {
    public static String response(String filePath, String request) {
        Dotenv dotenv = Dotenv.load();
        String token = dotenv.get("OPENAI_API_KEY");
        System.out.println("file path: " + filePath);
        try {
            EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
            OpenAiChatModel model = OpenAiChatModel.builder().apiKey(token).timeout(Duration.ofMinutes(1)).build();
                    EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
                    EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
                            .documentSplitter(DocumentSplitters.recursive(500, 0))
                            .embeddingModel(embeddingModel)
                            .embeddingStore(embeddingStore)
                            .build();
                            Document document = loadDocument(toPath(filePath));
                    ingestor.ingest(document);
                    ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
                            .chatLanguageModel(model)
                            .retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
                            // .chatMemory() // you can override default chat memory
                            // .promptTemplate() // you can override default prompt template
                            .build();
                    String answer = chain.execute(request);
                    System.out.println(answer); // answer based on given information
                    answer = answer.replace("\\n", System.lineSeparator());
                    return answer;
        } catch (Exception e) {
            return e.getMessage();
        }
    }
    public static void main(String[] args) {
    Dotenv dotenv = Dotenv.load();
    String token = dotenv.get("OPENAI_API_KEY");
    try (Scanner myObj = new Scanner(System.in)) {
        System.out.println("Enter question to ask:");
        String question = myObj.nextLine();
        EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
                EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
                EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
                        .documentSplitter(DocumentSplitters.recursive(500, 0))
                        .embeddingModel(embeddingModel)
                        .embeddingStore(embeddingStore)
                        .build();
                        Document document = loadDocument(toPath("example.txt"));
                ingestor.ingest(document);
                ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
                        .chatLanguageModel(OpenAiChatModel.withApiKey(token))
                        .retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
                        // .chatMemory() // you can override default chat memory
                        // .promptTemplate() // you can override default prompt template
                        .build();
                String answer = chain.execute(question);
                System.out.println(answer); // answer based on given information
    }
    }
    private static Path toPath(String fileName) {
        try {
            Path filePath = Paths.get(fileName);
            URL fileURL = filePath.toUri().toURL();
            return Paths.get(fileURL.toURI());
        } catch (URISyntaxException e) {
            throw new RuntimeException(e);
        } catch (MalformedURLException e) {
            // TODO Auto-generated catch block
            e.printStackTrace();
        }
        return null;
    }
    
    
}
 | 
	[
  "dev.langchain4j.chain.ConversationalRetrievalChain.builder",
  "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
  "dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | 
	[((1554, 1632), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1554, 1624), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1554, 1593), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1788, 2060), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1788, 2023), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1788, 1963), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1788, 1903), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2248, 2656), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2248, 2441), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2248, 2340), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3503, 3759), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3503, 3726), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3503, 3670), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3503, 3614), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3940, 4356), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3940, 4153), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3940, 4056), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')] | 
| 
	package org.agoncal.fascicle.langchain4j.vectordb.cassandra;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.cassandra.CassandraEmbeddingStore;
import java.util.List;
// tag::adocSkip[]
/**
 * @author Antonio Goncalves
 * http://www.antoniogoncalves.org
 * --
 */
// end::adocSkip[]
public class MusicianService {
  public static void main(String[] args) {
    MusicianService musicianService = new MusicianService();
    musicianService.useQdrantToStoreEmbeddings();
  }
  public void useQdrantToStoreEmbeddings() {
    System.out.println("### useQdrantToStoreEmbeddings");
    // tag::adocSnippet[]
    EmbeddingStore<TextSegment> embeddingStore =
      CassandraEmbeddingStore.builder()
        .port(6334)
        .build();
    EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
    TextSegment segment1 = TextSegment.from("I've been to France twice.");
    Embedding embedding1 = embeddingModel.embed(segment1).content();
    embeddingStore.add(embedding1, segment1);
    TextSegment segment2 = TextSegment.from("New Delhi is the capital of India.");
    Embedding embedding2 = embeddingModel.embed(segment2).content();
    embeddingStore.add(embedding2, segment2);
    Embedding queryEmbedding = embeddingModel.embed("Did you ever travel abroad?").content();
    List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(queryEmbedding, 1);
    EmbeddingMatch<TextSegment> embeddingMatch = relevant.get(0);
    System.out.println(embeddingMatch.score());
    System.out.println(embeddingMatch.embedded().text());
    // end::adocSnippet[]
  }
}
 | 
	[
  "dev.langchain4j.store.embedding.cassandra.CassandraEmbeddingStore.builder"
] | 
	[((986, 1056), 'dev.langchain4j.store.embedding.cassandra.CassandraEmbeddingStore.builder'), ((986, 1039), 'dev.langchain4j.store.embedding.cassandra.CassandraEmbeddingStore.builder')] | 
| 
	package embedding.model;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.vertexai.VertexAiEmbeddingModel;
public class VertexAiEmbeddingModelExample {
    public static void main(String[] args) {
        EmbeddingModel embeddingModel = VertexAiEmbeddingModel.builder()
                .endpoint("us-central1-aiplatform.googleapis.com:443")
                .project("langchain4j")
                .location("us-central1")
                .publisher("google")
                .modelName("textembedding-gecko@001")
                .build();
        Response<Embedding> response = embeddingModel.embed("Hello, how are you?");
        System.out.println(response);
    }
}
 | 
	[
  "dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder"
] | 
	[((371, 671), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((371, 646), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((371, 592), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((371, 555), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((371, 514), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((371, 474), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder')] | 
| 
	package com.example;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore;
public class Chroma {
    public static final EmbeddingStore<TextSegment> embeddingStore =
            ChromaEmbeddingStore.builder()
                    .baseUrl("http://localhost:8000/")
                    .collectionName("my-collection")
                    .build();
    public static final EmbeddingModel embeddingModel =
            OpenAiEmbeddingModel.builder()
                    .apiKey("API_KEY")
                    .modelName("text-embedding-ada-002")
                    .build();
}
 | 
	[
  "dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore.builder",
  "dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder"
] | 
	[((412, 579), 'dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore.builder'), ((412, 550), 'dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore.builder'), ((412, 497), 'dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore.builder'), ((650, 805), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((650, 776), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((650, 719), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder')] | 
| 
	package io.quarkiverse.langchain4j.deployment;
import static dev.langchain4j.agent.tool.JsonSchemaProperty.ARRAY;
import static dev.langchain4j.agent.tool.JsonSchemaProperty.BOOLEAN;
import static dev.langchain4j.agent.tool.JsonSchemaProperty.INTEGER;
import static dev.langchain4j.agent.tool.JsonSchemaProperty.NUMBER;
import static dev.langchain4j.agent.tool.JsonSchemaProperty.OBJECT;
import static dev.langchain4j.agent.tool.JsonSchemaProperty.STRING;
import static dev.langchain4j.agent.tool.JsonSchemaProperty.description;
import static dev.langchain4j.agent.tool.JsonSchemaProperty.enums;
import static java.util.Arrays.stream;
import static java.util.stream.Collectors.toList;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.function.BiFunction;
import java.util.stream.Collectors;
import org.jboss.jandex.AnnotationInstance;
import org.jboss.jandex.AnnotationTarget;
import org.jboss.jandex.AnnotationValue;
import org.jboss.jandex.ClassInfo;
import org.jboss.jandex.DotName;
import org.jboss.jandex.IndexView;
import org.jboss.jandex.MethodInfo;
import org.jboss.jandex.MethodParameterInfo;
import org.jboss.jandex.Type;
import org.jboss.logging.Logger;
import org.objectweb.asm.ClassVisitor;
import org.objectweb.asm.Opcodes;
import dev.langchain4j.agent.tool.JsonSchemaProperty;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.agent.tool.ToolMemoryId;
import dev.langchain4j.agent.tool.ToolParameters;
import dev.langchain4j.agent.tool.ToolSpecification;
import io.quarkiverse.langchain4j.runtime.ToolsRecorder;
import io.quarkiverse.langchain4j.runtime.prompt.Mappable;
import io.quarkiverse.langchain4j.runtime.tool.ToolInvoker;
import io.quarkiverse.langchain4j.runtime.tool.ToolMethodCreateInfo;
import io.quarkiverse.langchain4j.runtime.tool.ToolParametersObjectSubstitution;
import io.quarkiverse.langchain4j.runtime.tool.ToolSpanWrapper;
import io.quarkiverse.langchain4j.runtime.tool.ToolSpecificationObjectSubstitution;
import io.quarkus.arc.deployment.AdditionalBeanBuildItem;
import io.quarkus.arc.deployment.ValidationPhaseBuildItem;
import io.quarkus.deployment.Capabilities;
import io.quarkus.deployment.Capability;
import io.quarkus.deployment.GeneratedClassGizmoAdaptor;
import io.quarkus.deployment.annotations.BuildProducer;
import io.quarkus.deployment.annotations.BuildStep;
import io.quarkus.deployment.annotations.ExecutionTime;
import io.quarkus.deployment.annotations.Record;
import io.quarkus.deployment.builditem.BytecodeTransformerBuildItem;
import io.quarkus.deployment.builditem.CombinedIndexBuildItem;
import io.quarkus.deployment.builditem.GeneratedClassBuildItem;
import io.quarkus.deployment.builditem.nativeimage.ReflectiveClassBuildItem;
import io.quarkus.deployment.recording.RecorderContext;
import io.quarkus.gizmo.ClassCreator;
import io.quarkus.gizmo.ClassOutput;
import io.quarkus.gizmo.ClassTransformer;
import io.quarkus.gizmo.FieldDescriptor;
import io.quarkus.gizmo.MethodCreator;
import io.quarkus.gizmo.MethodDescriptor;
import io.quarkus.gizmo.ResultHandle;
public class ToolProcessor {
    private static final Logger log = Logger.getLogger(AiServicesProcessor.class);
    private static final DotName TOOL = DotName.createSimple(Tool.class);
    private static final DotName TOOL_MEMORY_ID = DotName.createSimple(ToolMemoryId.class);
    private static final DotName P = DotName.createSimple(dev.langchain4j.agent.tool.P.class);
    private static final MethodDescriptor METHOD_METADATA_CTOR = MethodDescriptor
            .ofConstructor(ToolInvoker.MethodMetadata.class, boolean.class, Map.class, Integer.class);
    private static final MethodDescriptor HASHMAP_CTOR = MethodDescriptor.ofConstructor(HashMap.class);
    public static final MethodDescriptor MAP_PUT = MethodDescriptor.ofMethod(Map.class, "put", Object.class, Object.class,
            Object.class);
    @BuildStep
    public void telemetry(Capabilities capabilities, BuildProducer<AdditionalBeanBuildItem> additionalBeanProducer) {
        var addOpenTelemetrySpan = capabilities.isPresent(Capability.OPENTELEMETRY_TRACER);
        if (addOpenTelemetrySpan) {
            additionalBeanProducer.produce(AdditionalBeanBuildItem.builder().addBeanClass(ToolSpanWrapper.class).build());
        }
    }
    @BuildStep
    @Record(ExecutionTime.STATIC_INIT)
    public void handleTools(CombinedIndexBuildItem indexBuildItem,
            ToolsRecorder recorder,
            RecorderContext recorderContext,
            BuildProducer<BytecodeTransformerBuildItem> transformerProducer,
            BuildProducer<GeneratedClassBuildItem> generatedClassProducer,
            BuildProducer<ReflectiveClassBuildItem> reflectiveClassProducer,
            BuildProducer<ValidationPhaseBuildItem.ValidationErrorBuildItem> validation,
            BuildProducer<ToolsMetadataBuildItem> toolsMetadataProducer) {
        recorderContext.registerSubstitution(ToolSpecification.class, ToolSpecificationObjectSubstitution.Serialized.class,
                ToolSpecificationObjectSubstitution.class);
        recorderContext.registerSubstitution(ToolParameters.class, ToolParametersObjectSubstitution.Serialized.class,
                ToolParametersObjectSubstitution.class);
        IndexView index = indexBuildItem.getIndex();
        Collection<AnnotationInstance> instances = index.getAnnotations(TOOL);
        Map<String, List<ToolMethodCreateInfo>> metadata = new HashMap<>();
        List<String> generatedInvokerClasses = new ArrayList<>();
        List<String> generatedArgumentMapperClasses = new ArrayList<>();
        if (!instances.isEmpty()) {
            ClassOutput classOutput = new GeneratedClassGizmoAdaptor(generatedClassProducer, true);
            Map<DotName, List<MethodInfo>> methodsPerClass = new HashMap<>();
            for (AnnotationInstance instance : instances) {
                if (instance.target().kind() != AnnotationTarget.Kind.METHOD) {
                    continue;
                }
                MethodInfo methodInfo = instance.target().asMethod();
                ClassInfo classInfo = methodInfo.declaringClass();
                if (classInfo.isInterface() || Modifier.isAbstract(classInfo.flags())) {
                    validation.produce(
                            new ValidationPhaseBuildItem.ValidationErrorBuildItem(new IllegalStateException(
                                    "@Tool is only supported on non-abstract classes, all other usages are ignored. Offending method is '"
                                            + methodInfo.declaringClass().name().toString() + "#" + methodInfo.name() + "'")));
                    continue;
                }
                DotName declaringClassName = classInfo.name();
                methodsPerClass.computeIfAbsent(declaringClassName, (n -> new ArrayList<>())).add(methodInfo);
            }
            boolean validationErrorFound = false;
            Map<String, ClassInfo> discoveredTools = new HashMap<>();
            for (var entry : methodsPerClass.entrySet()) {
                DotName className = entry.getKey();
                List<MethodInfo> toolMethods = entry.getValue();
                List<MethodInfo> privateMethods = new ArrayList<>();
                for (MethodInfo toolMethod : toolMethods) {
                    // Validation
                    // - Must not have another tool with the same method name
                    // - Must have at least one parameter
                    if (discoveredTools.containsKey(toolMethod.name())) {
                        validation.produce(
                                new ValidationPhaseBuildItem.ValidationErrorBuildItem(new IllegalStateException(
                                        "A tool with the name '" + toolMethod.name() + "' from class '"
                                                + className + "' is already declared in class '"
                                                + discoveredTools.get(toolMethod.name())
                                                + "'. Tools method name must be unique.")));
                        validationErrorFound = true;
                        continue;
                    }
                    discoveredTools.put(toolMethod.name(), toolMethod.declaringClass());
                    if (Modifier.isPrivate(toolMethod.flags())) {
                        privateMethods.add(toolMethod);
                    }
                }
                if (!privateMethods.isEmpty()) {
                    transformerProducer.produce(new BytecodeTransformerBuildItem(className.toString(),
                            new RemovePrivateFromMethodsVisitor(privateMethods)));
                }
                if (validationErrorFound) {
                    return;
                }
                for (MethodInfo toolMethod : toolMethods) {
                    AnnotationInstance instance = toolMethod.annotation(TOOL);
                    AnnotationValue nameValue = instance.value("name");
                    AnnotationValue descriptionValue = instance.value();
                    String toolName = getToolName(nameValue, toolMethod);
                    String toolDescription = getToolDescription(descriptionValue);
                    ToolSpecification.Builder builder = ToolSpecification.builder()
                            .name(toolName)
                            .description(toolDescription);
                    MethodParameterInfo memoryIdParameter = null;
                    for (MethodParameterInfo parameter : toolMethod.parameters()) {
                        if (parameter.hasAnnotation(TOOL_MEMORY_ID)) {
                            memoryIdParameter = parameter;
                            continue;
                        }
                        builder.addParameter(parameter.name(), toJsonSchemaProperties(parameter, index));
                    }
                    Map<String, Integer> nameToParamPosition = toolMethod.parameters().stream().collect(
                            Collectors.toMap(MethodParameterInfo::name, i -> Integer.valueOf(i.position())));
                    String methodSignature = createUniqueSignature(toolMethod);
                    String invokerClassName = generateInvoker(toolMethod, classOutput, nameToParamPosition,
                            memoryIdParameter != null ? memoryIdParameter.position() : null, methodSignature);
                    generatedInvokerClasses.add(invokerClassName);
                    String argumentMapperClassName = generateArgumentMapper(toolMethod, classOutput,
                            methodSignature);
                    generatedArgumentMapperClasses.add(argumentMapperClassName);
                    ToolSpecification toolSpecification = builder.build();
                    ToolMethodCreateInfo methodCreateInfo = new ToolMethodCreateInfo(
                            toolMethod.name(), invokerClassName,
                            toolSpecification, argumentMapperClassName);
                    metadata.computeIfAbsent(className.toString(), (c) -> new ArrayList<>()).add(methodCreateInfo);
                }
            }
        }
        if (!generatedInvokerClasses.isEmpty()) {
            reflectiveClassProducer.produce(ReflectiveClassBuildItem
                    .builder(generatedInvokerClasses.toArray(String[]::new))
                    .constructors(true)
                    .build());
        }
        if (!generatedArgumentMapperClasses.isEmpty()) {
            reflectiveClassProducer.produce(ReflectiveClassBuildItem
                    .builder(generatedArgumentMapperClasses.toArray(String[]::new))
                    .fields(true)
                    .constructors(true)
                    .build());
        }
        toolsMetadataProducer.produce(new ToolsMetadataBuildItem(metadata));
        recorder.setMetadata(metadata);
    }
    private static String createUniqueSignature(MethodInfo toolMethod) {
        StringBuilder sigBuilder = new StringBuilder();
        sigBuilder.append(toolMethod.name())
                .append(toolMethod.returnType().name().toString());
        for (MethodParameterInfo t : toolMethod.parameters()) {
            sigBuilder.append(t.type().name().toString());
        }
        return sigBuilder.toString();
    }
    private static String getToolName(AnnotationValue nameValue, MethodInfo methodInfo) {
        if (nameValue == null) {
            return methodInfo.name();
        }
        String annotationValue = nameValue.asString();
        if (annotationValue.isEmpty()) {
            return methodInfo.name();
        }
        return annotationValue;
    }
    private String getToolDescription(AnnotationValue descriptionValue) {
        if (descriptionValue == null) {
            return "";
        }
        return String.join("\n", descriptionValue.asStringArray());
    }
    private static String generateInvoker(MethodInfo methodInfo, ClassOutput classOutput,
            Map<String, Integer> nameToParamPosition, Short memoryIdParamPosition, String methodSignature) {
        String implClassName = methodInfo.declaringClass().name() + "$$QuarkusInvoker$" + methodInfo.name() + "_"
                + HashUtil.sha1(methodSignature);
        try (ClassCreator classCreator = ClassCreator.builder()
                .classOutput(classOutput)
                .className(implClassName)
                .interfaces(ToolInvoker.class)
                .build()) {
            MethodCreator invokeMc = classCreator.getMethodCreator(
                    MethodDescriptor.ofMethod(implClassName, "invoke", Object.class, Object.class, Object[].class));
            ResultHandle result;
            if (methodInfo.parametersCount() > 0) {
                List<ResultHandle> argumentHandles = new ArrayList<>(methodInfo.parametersCount());
                for (int i = 0; i < methodInfo.parametersCount(); i++) {
                    argumentHandles.add(invokeMc.readArrayValue(invokeMc.getMethodParam(1), i));
                }
                ResultHandle[] targetMethodHandles = argumentHandles.toArray(new ResultHandle[0]);
                result = invokeMc.invokeVirtualMethod(MethodDescriptor.of(methodInfo), invokeMc.getMethodParam(0),
                        targetMethodHandles);
            } else {
                result = invokeMc.invokeVirtualMethod(MethodDescriptor.of(methodInfo), invokeMc.getMethodParam(0));
            }
            boolean toolReturnsVoid = methodInfo.returnType().kind() == Type.Kind.VOID;
            if (toolReturnsVoid) {
                invokeMc.returnValue(invokeMc.load("Success"));
            } else {
                invokeMc.returnValue(result);
            }
            MethodCreator methodMetadataMc = classCreator
                    .getMethodCreator(MethodDescriptor.ofMethod(implClassName, "methodMetadata",
                            ToolInvoker.MethodMetadata.class));
            ResultHandle nameToParamPositionHandle = methodMetadataMc.newInstance(HASHMAP_CTOR);
            for (var entry : nameToParamPosition.entrySet()) {
                methodMetadataMc.invokeInterfaceMethod(MAP_PUT, nameToParamPositionHandle,
                        methodMetadataMc.load(entry.getKey()),
                        methodMetadataMc.load(entry.getValue()));
            }
            ResultHandle resultHandle = methodMetadataMc.newInstance(METHOD_METADATA_CTOR,
                    methodMetadataMc.load(toolReturnsVoid),
                    nameToParamPositionHandle,
                    memoryIdParamPosition != null ? methodMetadataMc.load(Integer.valueOf(memoryIdParamPosition))
                            : methodMetadataMc.loadNull());
            methodMetadataMc.returnValue(resultHandle);
        }
        return implClassName;
    }
    private String generateArgumentMapper(MethodInfo methodInfo, ClassOutput classOutput,
            String methodSignature) {
        String implClassName = methodInfo.declaringClass().name() + "$$QuarkusToolArgumentMapper$" + methodInfo.name() + "_"
                + HashUtil.sha1(methodSignature);
        try (ClassCreator classCreator = ClassCreator.builder()
                .classOutput(classOutput)
                .className(implClassName)
                .interfaces(Mappable.class)
                .build()) {
            List<FieldDescriptor> fieldDescriptors = new ArrayList<>();
            for (MethodParameterInfo parameter : methodInfo.parameters()) {
                FieldDescriptor fieldDescriptor = FieldDescriptor.of(implClassName, parameter.name(),
                        parameter.type().name().toString());
                fieldDescriptors.add(fieldDescriptor);
                classCreator.getFieldCreator(fieldDescriptor).setModifiers(Modifier.PUBLIC);
            }
            MethodCreator mc = classCreator
                    .getMethodCreator(MethodDescriptor.ofMethod(implClassName, "obtainFieldValuesMap", Map.class));
            ResultHandle mapHandle = mc.newInstance(MethodDescriptor.ofConstructor(HashMap.class));
            for (FieldDescriptor field : fieldDescriptors) {
                ResultHandle fieldValue = mc.readInstanceField(field, mc.getThis());
                mc.invokeInterfaceMethod(MAP_PUT, mapHandle, mc.load(field.getName()), fieldValue);
            }
            mc.returnValue(mapHandle);
        }
        return implClassName;
    }
    private Iterable<JsonSchemaProperty> toJsonSchemaProperties(MethodParameterInfo parameter, IndexView index) {
        Type type = parameter.type();
        DotName typeName = parameter.type().name();
        AnnotationInstance pInstance = parameter.annotation(P);
        JsonSchemaProperty description = pInstance == null ? null : description(pInstance.value().asString());
        if (DotNames.STRING.equals(typeName) || DotNames.CHARACTER.equals(typeName)
                || DotNames.PRIMITIVE_CHAR.equals(typeName)) {
            return removeNulls(STRING, description);
        }
        if (DotNames.BOOLEAN.equals(typeName) || DotNames.PRIMITIVE_BOOLEAN.equals(typeName)) {
            return removeNulls(BOOLEAN, description);
        }
        if (DotNames.BYTE.equals(typeName) || DotNames.PRIMITIVE_BYTE.equals(typeName)
                || DotNames.SHORT.equals(typeName) || DotNames.PRIMITIVE_SHORT.equals(typeName)
                || DotNames.INTEGER.equals(typeName) || DotNames.PRIMITIVE_INT.equals(typeName)
                || DotNames.LONG.equals(typeName) || DotNames.PRIMITIVE_LONG.equals(typeName)
                || DotNames.BIG_INTEGER.equals(typeName)) {
            return removeNulls(INTEGER, description);
        }
        // TODO put constraints on min and max?
        if (DotNames.FLOAT.equals(typeName) || DotNames.PRIMITIVE_FLOAT.equals(typeName)
                || DotNames.DOUBLE.equals(typeName) || DotNames.PRIMITIVE_DOUBLE.equals(typeName)
                || DotNames.BIG_DECIMAL.equals(typeName)) {
            return removeNulls(NUMBER, description);
        }
        if ((type.kind() == Type.Kind.ARRAY)
                || DotNames.LIST.equals(typeName)
                || DotNames.SET.equals(typeName)) { // TODO something else?
            return removeNulls(ARRAY, description); // TODO provide type of array?
        }
        if (isEnum(type, index)) {
            return removeNulls(STRING, enums(enumConstants(type)), description);
        }
        return removeNulls(OBJECT, description); // TODO provide internals
    }
    private Iterable<JsonSchemaProperty> removeNulls(JsonSchemaProperty... properties) {
        return stream(properties)
                .filter(Objects::nonNull)
                .collect(toList());
    }
    private boolean isEnum(Type returnType, IndexView index) {
        if (returnType.kind() != Type.Kind.CLASS) {
            return false;
        }
        ClassInfo maybeEnum = index.getClassByName(returnType.name());
        return maybeEnum != null && maybeEnum.isEnum();
    }
    private static Object[] enumConstants(Type type) {
        return JandexUtil.load(type, Thread.currentThread().getContextClassLoader()).getEnumConstants();
    }
    /**
     * Simply removes the {@code private} modifier from tool methods
     */
    private static class RemovePrivateFromMethodsVisitor implements
            BiFunction<String, ClassVisitor, ClassVisitor> {
        private final List<MethodInfo> privateMethods;
        private RemovePrivateFromMethodsVisitor(List<MethodInfo> privateMethods) {
            this.privateMethods = privateMethods;
        }
        @Override
        public ClassVisitor apply(String className, ClassVisitor classVisitor) {
            ClassTransformer transformer = new ClassTransformer(className);
            for (MethodInfo method : privateMethods) {
                transformer.modifyMethod(MethodDescriptor.of(method)).removeModifiers(Opcodes.ACC_PRIVATE);
            }
            return transformer.applyTo(classVisitor);
        }
    }
}
 | 
	[
  "dev.langchain4j.agent.tool.ToolSpecification.builder"
] | 
	[((4315, 4392), 'io.quarkus.arc.deployment.AdditionalBeanBuildItem.builder'), ((4315, 4384), 'io.quarkus.arc.deployment.AdditionalBeanBuildItem.builder'), ((9418, 9547), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((9418, 9489), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((13423, 13601), 'io.quarkus.gizmo.ClassCreator.builder'), ((13423, 13576), 'io.quarkus.gizmo.ClassCreator.builder'), ((13423, 13529), 'io.quarkus.gizmo.ClassCreator.builder'), ((13423, 13487), 'io.quarkus.gizmo.ClassCreator.builder'), ((16279, 16454), 'io.quarkus.gizmo.ClassCreator.builder'), ((16279, 16429), 'io.quarkus.gizmo.ClassCreator.builder'), ((16279, 16385), 'io.quarkus.gizmo.ClassCreator.builder'), ((16279, 16343), 'io.quarkus.gizmo.ClassCreator.builder')] | 
| 
	import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.ollama.OllamaChatModel;
import org.junit.jupiter.api.Test;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;
@Testcontainers
class OllamaChatModelTest {
    /**
     * The first time you run this test, it will download a Docker image with Ollama and a model.
     * It might take a few minutes.
     * <p>
     * This test uses modified Ollama Docker images, which already contain models inside them.
     * All images with pre-packaged models are available here: https://hub.docker.com/repositories/langchain4j
     * <p>
     * However, you are not restricted to these images.
     * You can run any model from https://ollama.ai/library by following these steps:
     * 1. Run "docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama"
     * 2. Run "docker exec -it ollama ollama run mistral" <- specify the desired model here
     */
    static String MODEL_NAME = "orca-mini"; // try "mistral", "llama2", "codellama", "phi" or "tinyllama"
    @Container
    static GenericContainer<?> ollama = new GenericContainer<>("langchain4j/ollama-" + MODEL_NAME + ":latest")
            .withExposedPorts(11434);
    @Test
    void simple_example() {
        ChatLanguageModel model = OllamaChatModel.builder()
                .baseUrl(baseUrl())
                .modelName(MODEL_NAME)
                .build();
        String answer = model.generate("Provide 3 short bullet points explaining why Java is awesome");
        System.out.println(answer);
    }
    @Test
    void json_output_example() {
        ChatLanguageModel model = OllamaChatModel.builder()
                .baseUrl(baseUrl())
                .modelName(MODEL_NAME)
                .format("json")
                .build();
        String json = model.generate("Give me a JSON with 2 fields: name and age of a John Doe, 42");
        System.out.println(json);
    }
    static String baseUrl() {
        return String.format("http://%s:%d", ollama.getHost(), ollama.getFirstMappedPort());
    }
}
 | 
	[
  "dev.langchain4j.model.ollama.OllamaChatModel.builder"
] | 
	[((1404, 1529), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((1404, 1504), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((1404, 1465), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((1758, 1915), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((1758, 1890), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((1758, 1858), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((1758, 1819), 'dev.langchain4j.model.ollama.OllamaChatModel.builder')] | 
| 
	package com.honvay.flychat.langchain.llama.embedding;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.model.openai.OpenAiModelName;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.time.Duration;
import java.util.List;
import java.util.stream.Collectors;
@Service
public class OpenAiEmbeddingService implements EmbeddingService {
    private final String apiKey;
    public OpenAiEmbeddingService(@Value("${openai.apiKey:}") String apiKey) {
        this.apiKey = apiKey;
    }
    @Override
   public List<float[]> embed(List<String> texts){
       List<TextSegment> segments = texts.stream()
               .map(TextSegment::from)
               .collect(Collectors.toList());
       EmbeddingModel embeddingModel = OpenAiEmbeddingModel.builder()
               .apiKey(apiKey) // https://platform.openai.com/account/api-keys
               .modelName(OpenAiModelName.TEXT_EMBEDDING_ADA_002)
               .timeout(Duration.ofSeconds(15))
               .build();
        List<Embedding> embeddings = embeddingModel.embedAll(segments);
       return embeddings
               .stream()
               .map(Embedding::vector)
               .collect(Collectors.toList());
   }
    @Override
    public float[] embed(String text) {
        EmbeddingModel embeddingModel = OpenAiEmbeddingModel.builder()
                .apiKey(apiKey) // https://platform.openai.com/account/api-keys
                .modelName(OpenAiModelName.TEXT_EMBEDDING_ADA_002)
                .timeout(Duration.ofSeconds(15))
                .build();
        Embedding embedding = embeddingModel.embed(text);
        return embedding.vector();
    }
}
 | 
	[
  "dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder"
] | 
	[((981, 1228), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((981, 1204), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((981, 1156), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((981, 1042), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1539, 1790), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1539, 1765), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1539, 1716), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1539, 1601), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder')] | 
| 
	package com.wxm158.promptgeneration.service;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.PropertyAccessor;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import com.google.common.reflect.TypeToken;
import com.google.gson.Gson;
import com.google.gson.GsonBuilder;
import com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel;
import com.wxm158.promptgeneration.mapper.QuestionMapper;
import com.wxm158.promptgeneration.model.dto.ChatRequest;
import com.wxm158.promptgeneration.model.dto.QuestionGeneration;
import com.wxm158.promptgeneration.model.dto.TopicResponse;
import com.wxm158.promptgeneration.model.entity.Question;
import com.wxm158.promptgeneration.repository.QuestionRepository;
import com.wxm158.promptgeneration.weviate.WeaviateEmbeddingStore;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.SystemMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.retriever.Retriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringEscapeUtils;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import java.io.FileWriter;
import java.io.IOException;
import java.lang.reflect.Type;
import java.net.Proxy;
import java.nio.file.Paths;
import java.time.Duration;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.internal.Utils.getOrDefault;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.util.stream.Collectors.joining;
@Slf4j
@Service
@RequiredArgsConstructor
public class EmbeddingService {
    @Value("${OPENAI_API_KEY}")
    private String API_KEY;
    @Value("${WEAVIATE_API_KEY}")
    private String WEAVIATE_API_KEY;
    private static final String DEFAULT_NAMESPACE = "default"; // do not change, will break backward compatibility!
    private static final String DEFAULT_METADATA_TEXT_KEY = "text_segment"; // do not change, will break backward compatibility!
    String baseUrl = "https://api.openai.com/v1";
    String modelName = "text-embedding-ada-002";  // You can change this if needed
    Duration timeout = Duration.ofSeconds(120);  // You can change this if needed
    Integer maxRetries = 3;  // You can change this if needed
    Proxy proxy = null;  // You can provide a proxy if needed
    Boolean logRequests = true;  // Set to true if you want to log requests
    Boolean logResponses = true;  // Set to true if you want to log responses
    private final QuestionRepository questionRepository;
    private final QuestionMapper questionMapper;
//    Create embedding model
    private EmbeddingModel createEmbeddingModel() {
        return OpenAiEmbeddingModel.builder()
                .baseUrl(baseUrl)
                .apiKey(API_KEY)
                .modelName(modelName)
                .timeout(timeout)
                .maxRetries(maxRetries)
                .proxy(proxy)
                .logRequests(logRequests)
                .logResponses(logResponses)
                .build();
    }
//    Create embedding store
    private EmbeddingStore<TextSegment> createEmbeddingStore() {
        return WeaviateEmbeddingStore.builder()
                .apiKey(WEAVIATE_API_KEY)
                .scheme("https")
                .host("question-gen-wwxbinax.weaviate.network")
                .avoidDups(true)
                .consistencyLevel("ALL")
                .build();
    }
//    Create Chat Model
    private OpenAiChatModel createChatModel() {
        return OpenAiChatModel.builder()
                .apiKey(API_KEY)
                // old key 8T6eTtmk
                .modelName("ft:gpt-3.5-turbo-1106:personal::8VzKieWR")
                .timeout(timeout)
                .temperature(0.3)
                .build();
    }
    private String format(List<TextSegment> relevantSegments) {
        return relevantSegments.stream()
                .map(TextSegment::text)
                .map(segment -> "..." + segment + "...")
                .collect(joining("\n\n"));
    }
//    Create question from embedding and fine tuning
    public List<QuestionGeneration> createQuestions(ChatRequest chatRequest) {
        String message = chatRequest.getTopic();
        String questionType = chatRequest.getQuestionType();
        String questionAmount = chatRequest.getQuestionAmount();
//        Initialise fine tuned chat model, embedding model, embedding store.
        OpenAiChatModel chatModel = createChatModel();
        EmbeddingModel embeddingModel = createEmbeddingModel();
        EmbeddingStore<TextSegment> embeddingStore = createEmbeddingStore();
//        retrieve relevant text from embedding store. max three text segments.
        Retriever<TextSegment> retriever = EmbeddingStoreRetriever.from(embeddingStore, embeddingModel, 1);
        String information = format(retriever.findRelevant(message)).replace("\n", " ");
//        Create the prompt in format used in training fine-tuned model.
        ChatMessage[] messagesArray = {
                new SystemMessage("You are an A-level Computer Science teacher. You aim to generate various questions for your students."),
                new UserMessage("SCOPE: " + message + ", QUESTION_TYPE: " + questionType + ", QUESTION_AMOUNT: " + questionAmount +
                        ", TEXT: " + information)
        };
        List<ChatMessage> messages = new ArrayList<>(List.of(messagesArray));
        System.out.println(messages.toString());
//        Get response from model (json list of questions and answers)
        Response<AiMessage> response = chatModel.generate(messages);
        String stringResponse = response.content().text();
        System.out.println(stringResponse);
//        Map response to List of QuestionGeneration object for the frontend.
        int startIndex = stringResponse.indexOf("[");
        int endIndex = stringResponse.lastIndexOf("]");
        Gson gson = new Gson();
        Type type = new TypeToken<List<QuestionGeneration>>(){}.getType();
        List<QuestionGeneration> questions = gson.fromJson(stringResponse.substring(startIndex, endIndex + 1), type);
        for (QuestionGeneration question : questions) {
            question.setId((long) questions.indexOf(question));
            question.setQuestionType(questionType);
        }
        return questions;
    }
    // Save Questions in question-generation-db
    public List<Question> saveQuestions(List<QuestionGeneration> questions, String userId) {
        List<Question> questionList = questionMapper.mapQuestionGenerationsToQuestions(questions, userId);
        List<Question> savedQuestions = new ArrayList<>();
        for (Question question: questionList) {
            if (!questionRepository.existsByQuestionAndQuestionType(question.getQuestion(), question.getQuestionType())) {
                 savedQuestions.add(questionRepository.save(question));
            }
        }
        return savedQuestions;
    }
    public List<Question> getAllQuestions(String userId) {
        return questionRepository.findAllByUserId(Long.valueOf(userId));
//        return questionMapper.mapQuestionsToQuestionGenerations(questionList);
    }
}
 | 
	[
  "dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | 
	[((3990, 4340), 'com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel.builder'), ((3990, 4315), 'com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel.builder'), ((3990, 4271), 'com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel.builder'), ((3990, 4229), 'com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel.builder'), ((3990, 4199), 'com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel.builder'), ((3990, 4159), 'com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel.builder'), ((3990, 4125), 'com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel.builder'), ((3990, 4087), 'com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel.builder'), ((3990, 4054), 'com.wxm158.promptgeneration.OpenAI.OpenAiEmbeddingModel.builder'), ((4457, 4727), 'com.wxm158.promptgeneration.weviate.WeaviateEmbeddingStore.builder'), ((4457, 4702), 'com.wxm158.promptgeneration.weviate.WeaviateEmbeddingStore.builder'), ((4457, 4661), 'com.wxm158.promptgeneration.weviate.WeaviateEmbeddingStore.builder'), ((4457, 4628), 'com.wxm158.promptgeneration.weviate.WeaviateEmbeddingStore.builder'), ((4457, 4564), 'com.wxm158.promptgeneration.weviate.WeaviateEmbeddingStore.builder'), ((4457, 4531), 'com.wxm158.promptgeneration.weviate.WeaviateEmbeddingStore.builder'), ((4822, 5080), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4822, 5055), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4822, 5021), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4822, 4987), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4822, 4880), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] | 
| 
	package com.gonnect.helpme.config;
import com.gonnect.helpme.agent.ReservationSupportAgent;
import com.gonnect.helpme.service.ReservationToolService;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.retriever.Retriever;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import java.io.IOException;
import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
@Configuration
public class ReservationHelpMeApplicationConfigurer {
    /**
     * Run ReservationSupportApplicationTest to see simulated conversation with customer support agent
     */
    @Bean
    ReservationSupportAgent reservationSupportAgent(ChatLanguageModel chatLanguageModel,
                                                    ReservationToolService reservationToolService,
                                                    Retriever<TextSegment> retriever) {
        return AiServices.builder(ReservationSupportAgent.class)
                .chatLanguageModel(chatLanguageModel)
                .chatMemory(MessageWindowChatMemory.withMaxMessages(20))
                .tools(reservationToolService)
                .retriever(retriever)
                .build();
    }
    @Bean
    Retriever<TextSegment> fetch(EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel) {
        // You will need to adjust these parameters to find the optimal setting, which will depend on two main factors:
        // - The nature of your data
        // - The embedding model you are using
        int maxResultsRetrieved = 1;
        double minScore = 0.6;
        return EmbeddingStoreRetriever.from(embeddingStore, embeddingModel, maxResultsRetrieved, minScore);
    }
    @Bean
    EmbeddingModel embeddingModel() {
        return new AllMiniLmL6V2EmbeddingModel();
    }
    @Bean
    EmbeddingStore<TextSegment> embeddingStore(EmbeddingModel embeddingModel, ResourceLoader resourceLoader) throws IOException {
        // Embedding Store Setup
        // --------------------
        // For demonstration purposes, the embedding store is populated
        // dynamically instead of being pre-filled with application data.
        // This allows the code to run self-sufficiently for demos.
        // The first step is initializing an embedding store.
        // For this example we use an in-memory implementation.
        // This stores the vector representations of text for similarity lookups.
        EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
        // Load "Gonnect Support Bot" training guidelines as sample
        Resource resource = resourceLoader.getResource("classpath:gonnect-miles-terms-and-condition.txt");
        Document document = loadDocument(resource.getFile().toPath(), new TextDocumentParser());
        // Ingest Sample Document
        // ---------------------
        // 1. Split document into 100-token segments
        // 2. Convert text segments into vector embeddings
        // 3. Save embeddings in the store
        // The EmbeddingStoreIngestor automates this process of
        // analyzing text and populating the embedding store
        DocumentSplitter documentSplitter = DocumentSplitters.recursive(100, 0, new OpenAiTokenizer(GPT_3_5_TURBO));
        EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
                .documentSplitter(documentSplitter)
                .embeddingModel(embeddingModel)
                .embeddingStore(embeddingStore)
                .build();
        ingestor.ingest(document);
        return embeddingStore;
    }
}
 | 
	[
  "dev.langchain4j.service.AiServices.builder",
  "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | 
	[((1951, 2237), 'dev.langchain4j.service.AiServices.builder'), ((1951, 2212), 'dev.langchain4j.service.AiServices.builder'), ((1951, 2174), 'dev.langchain4j.service.AiServices.builder'), ((1951, 2127), 'dev.langchain4j.service.AiServices.builder'), ((1951, 2054), 'dev.langchain4j.service.AiServices.builder'), ((4356, 4561), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((4356, 4536), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((4356, 4488), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((4356, 4440), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] | 
| 
	package com.anthonyquere.companionapi.completion.langchain;
import com.anthonyquere.companionapi.completion.langchain.services.Summary;
import com.anthonyquere.companionapi.completion.langchain.services.TalkWithCompanion;
import com.anthonyquere.companionapi.crud.companions.Companion;
import com.anthonyquere.companionapi.crud.message.MessageRepository;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.service.AiServices;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class CompanionAiService {
    @Bean
    public TalkWithCompanion buildAiCompanionService(
            ChatLanguageModel model,
            MessageRepository messageRepository
    ) {
        return AiServices.builder(TalkWithCompanion.class)
                .chatLanguageModel(model)
                .chatMemoryProvider(companion -> new CompanionChatMemory((Companion) companion, messageRepository))
                .build();
    }
    @Bean
    public Summary buildAiSummaryService(
            ChatLanguageModel model
    ) {
        return AiServices.builder(Summary.class)
                .chatLanguageModel(model)
                .build();
    }
}
 | 
	[
  "dev.langchain4j.service.AiServices.builder"
] | 
	[((789, 1015), 'dev.langchain4j.service.AiServices.builder'), ((789, 990), 'dev.langchain4j.service.AiServices.builder'), ((789, 874), 'dev.langchain4j.service.AiServices.builder'), ((1135, 1235), 'dev.langchain4j.service.AiServices.builder'), ((1135, 1210), 'dev.langchain4j.service.AiServices.builder')] | 
| 
	package my.samples;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.apache.pdfbox.ApachePdfBoxDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.Scanner;
public class InMemoryEmbeddingManualExample {
    public static final String ANSI_GREEN = "\u001B[32m";
    public static final String ANSI_RESET = "\u001B[0m";
    public static final String ANSI_YELLOW = "\u001B[33m";
    public static void main(String[] args) {
        EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
        EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
        EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
                .documentSplitter(DocumentSplitters.recursive(300, 0))
                .embeddingModel(embeddingModel)
                .embeddingStore(embeddingStore)
                .build();
        Path filePath = toPath("example-files/2025_US_F150_Warranty_Guide_ENG_V1.pdf");
        Document document = FileSystemDocumentLoader.loadDocument(filePath, new ApachePdfBoxDocumentParser());
        document.metadata().add("fileName", filePath.getFileName().toString());
        document.metadata().add("filePath", filePath.toString());
        document.metadata().add("company", "FORD");
        document.metadata().add("product", "F150");
        document.metadata().add("language", "ENG");
        document.metadata().add("version", "V1");
        document.metadata().add("year", "2025");
        document.metadata().add("type", "Warranty Guide");
        document.metadata().add("country", "US");
        document.metadata().add("category", "Automotive");
        ingestor.ingest(document);
        Scanner scanner = new Scanner(System.in);
        while (true) {
            System.out.println("Enter your query (or type 'exit' to quit):");
            // Wait for the user to input a query
            String query = scanner.nextLine();
            // Check if the user wants to exit the program
            if ("exit".equalsIgnoreCase(query)) {
                System.out.println("Exiting program.");
                break;
            }
            // Who Pays For Warranty Repairs?
            // What is the warranty period?
            // What is the warranty period for the powertrain?
            // What is the warranty period for the powertrain?
            // Process the query and get an answer
            Embedding queryEmbedding = embeddingModel.embed(query).content();
            List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(queryEmbedding,5 );
            System.out.println("Start ---------   Matching Context from Document: 2025_US_F150_Warranty_Guide_ENG_V1.pdf");
            List<String> answers = new ArrayList<>();
            for (EmbeddingMatch<TextSegment> match : relevant) {
                System.out.println(match.score());
                answers.add(match.embedded().text());
                System.out.println(ANSI_GREEN+match.embedded().text()+ANSI_RESET);
                System.out.println("");
            }
            System.out.println("End ---------   Matching Context from Document: 2025_US_F150_Warranty_Guide_ENG_V1.pdf");
            if(!answers.isEmpty()){
                try {
                    System.out.println(ANSI_YELLOW+ RestClient.getAnswer(query, answers) + ANSI_RESET);
                } catch (IOException e) {
                    e.printStackTrace();
                }
            }
        }
        // Close the scanner
        scanner.close();
        // In-memory embedding store can be serialized and deserialized to/from JSON
         String serializedStore = ((InMemoryEmbeddingStore)embeddingStore).serializeToJson();
        System.out.println(serializedStore);
        // InMemoryEmbeddingStore<TextSegment> deserializedStore = InMemoryEmbeddingStore.fromJson(serializedStore);
        // In-memory embedding store can be serialized and deserialized to/from file
        // String filePath = "/home/me/embedding.store";
        // embeddingStore.serializeToFile(filePath);
        // InMemoryEmbeddingStore<TextSegment> deserializedStore = InMemoryEmbeddingStore.fromFile(filePath);
    }
    private static Path toPath(String fileName) {
        try {
            // Corrected path assuming files are in src/main/resources/example-files
            URL fileUrl = InMemoryEmbeddingManualExample.class.getClassLoader().getResource( fileName);
            if (fileUrl == null) {
                throw new RuntimeException("Resource not found: " + fileName);
            }
            return Paths.get(fileUrl.toURI());
        } catch (URISyntaxException e) {
            throw new RuntimeException("Failed to resolve URI for: " + fileName, e);
        }
    }
}
 | 
	[
  "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | 
	[((1444, 1668), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1444, 1643), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1444, 1595), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1444, 1547), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] | 
| 
	package org.agoncal.fascicle.langchain4j.accessing.bedrock;
import dev.langchain4j.model.bedrock.BedrockAnthropicChatModel;
import software.amazon.awssdk.regions.Region;
// tag::adocSkip[]
/**
 * @author Antonio Goncalves
 * http://www.antoniogoncalves.org
 * --
 */
// end::adocSkip[]
public class MusicianService {
  public static void main(String[] args) {
    MusicianService musicianService = new MusicianService();
    musicianService.useBedrockLanguageModelBuilder();
  }
  private static final String AZURE_OPENAI_KEY = System.getenv("AZURE_OPENAI_KEY");
  private static final String AZURE_OPENAI_ENDPOINT = System.getenv("AZURE_OPENAI_ENDPOINT");
  private static final String AZURE_OPENAI_DEPLOYMENT_NAME = System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME");
  private static final String PROMPT = "When was the first Beatles album released?";
  // ##############################
  // ### BEDROCK LANGUAGE MODEL ###
  // ##############################
  public void useBedrockLanguageModelBuilder() {
    System.out.println("### useBedrockLanguageModelBuilder");
    // tag::adocSnippet[]
    BedrockAnthropicChatModel model = BedrockAnthropicChatModel.builder()
      .build();
    // end::adocSnippet[]
    String completion = model.generate(PROMPT);
  }
  // ##########################
  // ### BEDROCK CHAT MODEL ###
  // ##########################
  public void useBedrockChatModelRequest() {
    System.out.println("### useBedrockChatModelRequest");
    // tag::adocRequest[]
    BedrockAnthropicChatModel model = BedrockAnthropicChatModel.builder()
      .maxRetries(3)
      .topP(1.0f)
      .temperature(0.9f)
      .model(BedrockAnthropicChatModel.Types.AnthropicClaudeV1)
      .maxTokens(100)
      .anthropicVersion("v2")
      .assistantPrompt("assistant prompt")
      .humanPrompt("human prompt")
      .region(Region.AF_SOUTH_1)
      .build();
    // end::adocRequest[]
    String completion = model.generate("When was the first Rolling Stones album released?");
    System.out.println(completion);
  }
}
 | 
	[
  "dev.langchain4j.model.bedrock.BedrockAnthropicChatModel.builder"
] | 
	[((1142, 1192), 'dev.langchain4j.model.bedrock.BedrockAnthropicChatModel.builder'), ((1539, 1880), 'dev.langchain4j.model.bedrock.BedrockAnthropicChatModel.builder'), ((1539, 1865), 'dev.langchain4j.model.bedrock.BedrockAnthropicChatModel.builder'), ((1539, 1832), 'dev.langchain4j.model.bedrock.BedrockAnthropicChatModel.builder'), ((1539, 1797), 'dev.langchain4j.model.bedrock.BedrockAnthropicChatModel.builder'), ((1539, 1754), 'dev.langchain4j.model.bedrock.BedrockAnthropicChatModel.builder'), ((1539, 1724), 'dev.langchain4j.model.bedrock.BedrockAnthropicChatModel.builder'), ((1539, 1702), 'dev.langchain4j.model.bedrock.BedrockAnthropicChatModel.builder'), ((1539, 1638), 'dev.langchain4j.model.bedrock.BedrockAnthropicChatModel.builder'), ((1539, 1613), 'dev.langchain4j.model.bedrock.BedrockAnthropicChatModel.builder'), ((1539, 1595), 'dev.langchain4j.model.bedrock.BedrockAnthropicChatModel.builder')] | 
| 
	package dev.langchain4j.model.qianfan;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.agent.tool.ToolParameters;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.message.*;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.internal.Utils;
import dev.langchain4j.model.qianfan.client.embedding.EmbeddingResponse;
import dev.langchain4j.model.qianfan.client.chat.Parameters;
import dev.langchain4j.model.output.FinishReason;
import dev.langchain4j.model.output.TokenUsage;
import dev.langchain4j.model.qianfan.client.chat.ChatCompletionResponse;
import dev.langchain4j.model.qianfan.client.chat.Message;
import dev.langchain4j.model.qianfan.client.chat.Role;
import dev.langchain4j.model.qianfan.client.chat.FunctionCall;
import dev.langchain4j.model.qianfan.client.chat.Function;
import dev.langchain4j.model.qianfan.client.completion.CompletionResponse;
import java.util.Collection;
import java.util.List;
import java.util.Optional;
import static dev.langchain4j.data.message.AiMessage.aiMessage;
import static dev.langchain4j.internal.Exceptions.illegalArgument;
import static dev.langchain4j.model.output.FinishReason.*;
import static java.util.stream.Collectors.toList;
public class InternalQianfanHelper {
    public static List<Function> toFunctions(Collection<ToolSpecification> toolSpecifications) {
        return toolSpecifications.stream()
                .map(InternalQianfanHelper::toFunction)
                .collect(toList());
    }
    private static Function toFunction(ToolSpecification toolSpecification) {
        return Function.builder()
                .name(toolSpecification.name())
                .description(toolSpecification.description())
                .parameters(toOpenAiParameters(toolSpecification.parameters()))
                .build();
    }
    private static Parameters toOpenAiParameters(ToolParameters toolParameters) {
        if (toolParameters == null) {
            return Parameters.builder().build();
        }
        return Parameters.builder()
                .properties(toolParameters.properties())
                .required(toolParameters.required())
                .build();
    }
    public static Message toQianfanMessage(ChatMessage message) {
        if (message instanceof UserMessage) {
            UserMessage userMessage = (UserMessage) message;
            return Message.builder()
                    .role(Role.USER)
                    .content(userMessage.text())
                    .name(userMessage.name())
                    .build();
        }
        if (message instanceof AiMessage) {
            AiMessage aiMessage = (AiMessage) message;
            if (!aiMessage.hasToolExecutionRequests()) {
                return  Message.builder()
                        .content(message.text())
                        .role(Role.ASSISTANT)
                        .build();
            }
            ToolExecutionRequest toolExecutionRequest = aiMessage.toolExecutionRequests().get(0);
            if (toolExecutionRequest.id() == null) {
                FunctionCall functionCall = FunctionCall.builder()
                        .name(toolExecutionRequest.name())
                        .arguments(toolExecutionRequest.arguments())
                        .build();
                return Message.builder()
                        .content(message.text())
                        .role(Role.ASSISTANT)
                        .functionCall(functionCall)
                        .build();
            }
        }
        if (message instanceof ToolExecutionResultMessage) {
            ToolExecutionResultMessage toolExecutionResultMessage = (ToolExecutionResultMessage) message;
                FunctionCall functionCall = FunctionCall.builder()
                        .name(toolExecutionResultMessage.toolName())
                        .arguments(toolExecutionResultMessage.text())
                        .build();
                return  Message.builder()
                        .content(message.text())
                        .role(Role.FUNCTION)
                        .name(functionCall.name())
                        .build();
        }
        throw illegalArgument("Unknown message type: " + message.type());
    }
    static TokenUsage tokenUsageFrom(ChatCompletionResponse response) {
        return Optional.of(response)
                .map(ChatCompletionResponse::getUsage)
                .map(usage -> new TokenUsage(usage.promptTokens(), usage.completionTokens(), usage.totalTokens()))
                .orElse(null);
    }
    static TokenUsage tokenUsageFrom(CompletionResponse response) {
        return Optional.of(response)
                .map(CompletionResponse::getUsage)
                .map(usage -> new TokenUsage(usage.promptTokens(), usage.completionTokens(), usage.totalTokens()))
                .orElse(null);
    }
    static TokenUsage tokenUsageFrom(EmbeddingResponse response) {
        return Optional.of(response)
                .map(EmbeddingResponse::getUsage)
                .map(usage -> new TokenUsage(usage.promptTokens(), usage.completionTokens(), usage.totalTokens()))
                .orElse(null);
    }
    public static FinishReason finishReasonFrom(String finishReason) {
        if(Utils.isNullOrBlank(finishReason)){
            return null;
        }
        switch (finishReason) {
            case "normal":
                return STOP;
            case "stop":
                return STOP;
            case "length":
                return LENGTH;
            case "content_filter":
                return CONTENT_FILTER;
            case "function_call":
                return TOOL_EXECUTION;
            default:
                return null;
        }
    }
    public static AiMessage aiMessageFrom(ChatCompletionResponse response) {
        FunctionCall functionCall = response.getFunctionCall();
        if (functionCall != null) {
            ToolExecutionRequest toolExecutionRequest = ToolExecutionRequest.builder()
                    .name(functionCall.name())
                    .arguments(functionCall.arguments())
                    .build();
            return aiMessage(toolExecutionRequest);
        }
        return aiMessage(response.getResult());
    }
    static String getSystemMessage(List<ChatMessage> messages) {
        List<ChatMessage> systemMessages = messages.stream().filter(message -> message instanceof SystemMessage).collect(toList());
        if (systemMessages.size() > 1) {
            throw new RuntimeException("Multiple system messages are not supported");
        }
        if(Utils.isNullOrEmpty(systemMessages)){
            return  null;
        }
        return ((SystemMessage) systemMessages.get(0)).text();
    }
    public static List<Message> toOpenAiMessages(List<ChatMessage> messages) {
        return messages.stream()
                .filter(chatMessage -> !(chatMessage instanceof SystemMessage))
                .map(InternalQianfanHelper::toQianfanMessage)
                .collect(toList());
    }
}
 | 
	[
  "dev.langchain4j.model.qianfan.client.chat.FunctionCall.builder",
  "dev.langchain4j.model.qianfan.client.chat.Message.builder",
  "dev.langchain4j.agent.tool.ToolExecutionRequest.builder",
  "dev.langchain4j.model.qianfan.client.chat.Parameters.builder",
  "dev.langchain4j.model.qianfan.client.chat.Function.builder",
  "dev.langchain4j.data.message.AiMessage.aiMessage.toolExecutionRequests"
] | 
	[((1632, 1865), 'dev.langchain4j.model.qianfan.client.chat.Function.builder'), ((1632, 1840), 'dev.langchain4j.model.qianfan.client.chat.Function.builder'), ((1632, 1760), 'dev.langchain4j.model.qianfan.client.chat.Function.builder'), ((1632, 1698), 'dev.langchain4j.model.qianfan.client.chat.Function.builder'), ((2013, 2041), 'dev.langchain4j.model.qianfan.client.chat.Parameters.builder'), ((2068, 2223), 'dev.langchain4j.model.qianfan.client.chat.Parameters.builder'), ((2068, 2198), 'dev.langchain4j.model.qianfan.client.chat.Parameters.builder'), ((2068, 2145), 'dev.langchain4j.model.qianfan.client.chat.Parameters.builder'), ((2424, 2602), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((2424, 2573), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((2424, 2527), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((2424, 2478), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((2797, 2942), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((2797, 2909), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((2797, 2863), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((3015, 3055), 'dev.langchain4j.data.message.AiMessage.aiMessage.toolExecutionRequests'), ((3154, 3337), 'dev.langchain4j.model.qianfan.client.chat.FunctionCall.builder'), ((3154, 3304), 'dev.langchain4j.model.qianfan.client.chat.FunctionCall.builder'), ((3154, 3235), 'dev.langchain4j.model.qianfan.client.chat.FunctionCall.builder'), ((3363, 3560), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((3363, 3527), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((3363, 3475), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((3363, 3429), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((3799, 3993), 'dev.langchain4j.model.qianfan.client.chat.FunctionCall.builder'), ((3799, 3960), 'dev.langchain4j.model.qianfan.client.chat.FunctionCall.builder'), ((3799, 3890), 'dev.langchain4j.model.qianfan.client.chat.FunctionCall.builder'), ((4019, 4214), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((4019, 4181), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((4019, 4130), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((4019, 4085), 'dev.langchain4j.model.qianfan.client.chat.Message.builder'), ((4394, 4615), 'java.util.Optional.of'), ((4394, 4585), 'java.util.Optional.of'), ((4394, 4470), 'java.util.Optional.of'), ((4707, 4924), 'java.util.Optional.of'), ((4707, 4894), 'java.util.Optional.of'), ((4707, 4779), 'java.util.Optional.of'), ((5016, 5232), 'java.util.Optional.of'), ((5016, 5202), 'java.util.Optional.of'), ((5016, 5087), 'java.util.Optional.of'), ((6045, 6208), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((6045, 6179), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((6045, 6122), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')] | 
| 
	package io.quarkiverse.langchain4j.runtime.tool;
import java.util.List;
import java.util.Map;
import dev.langchain4j.agent.tool.ToolParameters;
import io.quarkus.runtime.ObjectSubstitution;
import io.quarkus.runtime.annotations.RecordableConstructor;
public class ToolParametersObjectSubstitution
        implements ObjectSubstitution<ToolParameters, ToolParametersObjectSubstitution.Serialized> {
    @Override
    public Serialized serialize(ToolParameters obj) {
        return new Serialized(obj.type(), obj.properties(), obj.required());
    }
    @Override
    public ToolParameters deserialize(Serialized obj) {
        return ToolParameters.builder()
                .type(obj.type)
                .required(obj.required)
                .properties(obj.properties).build();
    }
    public static class Serialized {
        private final String type;
        private final Map<String, Map<String, Object>> properties;
        private final List<String> required;
        @RecordableConstructor
        public Serialized(String type, Map<String, Map<String, Object>> properties, List<String> required) {
            this.type = type;
            this.properties = properties;
            this.required = required;
        }
        public String getType() {
            return type;
        }
        public Map<String, Map<String, Object>> getProperties() {
            return properties;
        }
        public List<String> getRequired() {
            return required;
        }
    }
}
 | 
	[
  "dev.langchain4j.agent.tool.ToolParameters.builder"
] | 
	[((639, 787), 'dev.langchain4j.agent.tool.ToolParameters.builder'), ((639, 779), 'dev.langchain4j.agent.tool.ToolParameters.builder'), ((639, 735), 'dev.langchain4j.agent.tool.ToolParameters.builder'), ((639, 695), 'dev.langchain4j.agent.tool.ToolParameters.builder')] | 
| 
	package me.nzuguem.something.story.configurations.langchain;
import dev.langchain4j.rag.DefaultRetrievalAugmentor;
import dev.langchain4j.rag.RetrievalAugmentor;
import jakarta.enterprise.context.ApplicationScoped;
import java.util.function.Supplier;
@ApplicationScoped
public class StoryRetrievalAugmentor implements Supplier<RetrievalAugmentor> {
    private final StoryContentRetriever retriever;
    public StoryRetrievalAugmentor(StoryContentRetriever retriever) {
        this.retriever = retriever;
    }
    @Override
    public RetrievalAugmentor get() {
        return DefaultRetrievalAugmentor.builder()
                .contentRetriever(this.retriever)
                .build();
    }
}
 | 
	[
  "dev.langchain4j.rag.DefaultRetrievalAugmentor.builder"
] | 
	[((585, 695), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((585, 670), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder')] | 
| 
	package org.example;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.MemoryId;
import dev.langchain4j.service.UserMessage;
import dev.langchain4j.store.memory.chat.ChatMemoryStore;
public class _09_AIServices_05_ChatMemory {
    public static void main(String[] args) {
        OpenAiChatModel model = OpenAiChatModel.withApiKey(ApiKeys.OPENAI_DEMO);
        ChatAssistant assistant = AiServices.builder(ChatAssistant.class)
                .chatLanguageModel(model)
                .chatMemoryProvider(memoryId -> MessageWindowChatMemory.withMaxMessages(10))
                .build();
        System.out.println(assistant.chat(1, "Hello, my name is Michael"));
        System.out.println(assistant.chat(2, "Hello, my name is Karl"));
        System.out.println(assistant.chat(2, "What is my name?"));
        System.out.println(assistant.chat(1, "What is my name?"));
    }
}
interface ChatAssistant {
    String chat(@MemoryId int memoryId, @UserMessage String message);
} | 
	[
  "dev.langchain4j.service.AiServices.builder"
] | 
	[((529, 728), 'dev.langchain4j.service.AiServices.builder'), ((529, 703), 'dev.langchain4j.service.AiServices.builder'), ((529, 610), 'dev.langchain4j.service.AiServices.builder')] | 
| 
	package embedding.model;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.vertexai.VertexAiEmbeddingModel;
public class VertexAiEmbeddingModelExample {
    public static void main(String[] args) {
        EmbeddingModel embeddingModel = VertexAiEmbeddingModel.builder()
                .endpoint("us-central1-aiplatform.googleapis.com:443")
                .project("neat-vent-381323")
                .location("us-central1")
                .publisher("google")
                .modelName("textembedding-gecko@002")
                .build();
        Response<Embedding> response = embeddingModel.embed("Hello, how are you?");
        System.out.println(response);
    }
}
 | 
	[
  "dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder"
] | 
	[((371, 676), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((371, 651), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((371, 597), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((371, 560), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((371, 519), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder'), ((371, 474), 'dev.langchain4j.model.vertexai.VertexAiEmbeddingModel.builder')] | 
| 
	package com.magicrepokit.chat;
import cn.hutool.core.util.StrUtil;
import cn.hutool.json.JSONArray;
import cn.hutool.json.ObjectMapper;
import com.magicrepokit.chat.agent.CustomerSupportAgent;
import com.magicrepokit.chat.component.GoogleSearch;
import com.magicrepokit.chat.component.LangchainComponent;
import com.magicrepokit.chat.service.tool.CalculatorService;
import com.magicrepokit.langchain.ElasticOperation;
import com.magicrepokit.langchain.config.ConfigProperties;
import com.magicrepokit.oss.OssTemplate;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.loader.UrlDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.StreamingResponseHandler;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.SystemMessage;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore;
import lombok.extern.slf4j.Slf4j;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.test.context.ActiveProfiles;
import org.springframework.test.context.junit4.SpringRunner;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.util.stream.Collectors.joining;
@SpringBootTest(webEnvironment = SpringBootTest.WebEnvironment.RANDOM_PORT)
@RunWith(SpringRunner.class)
@Slf4j
@ActiveProfiles("test")
@ComponentScan(basePackages = {"com.magicrepokit"})
public class ChatTest {
    @Autowired
    private ChatLanguageModel model;
    @Autowired
    CustomerSupportAgent agent;
    @Autowired
    EmbeddingModel embeddingModel;
    @Autowired
    ElasticOperation elasticOperation;
    @Autowired
    OssTemplate ossTemplate;
    @Autowired
    ConfigProperties langchainConfigProperties;
    @Autowired
    LangchainComponent langchainComponent;
    @Autowired
    GoogleSearch googleSearch;
    @Test
    public void testOssTemplate(){
        ossTemplate.makeBucket("test");
    }
    @Test
    public void simpleChat() {
        String response = model.generate("你好");
        System.out.println(response);
    }
    interface Assistant {
        @SystemMessage({"1.你是是一个数学专家,你只能回答数学方面的知识,如果用户内容与数学无关,你会只能回答:不知道!",
                "2.你可以使用工具类,使用之前你需要确定工具类的描述与用户问题相关,如果不相关,你会只能回答:不知道!"})
        String chat(String userMessage);
    }
    @Test
    public void chatWithTool() {
        Assistant build = AiServices.builder(Assistant.class)
                .chatLanguageModel(model)
                .tools(new CalculatorService())
                .chatMemory(MessageWindowChatMemory.withMaxMessages(10)).build();
        String question = "5/4=?";
        String answer = build.chat(question);
        System.out.println(answer);
    }
    @Test
    public void should_provide_booking_details_and_explain_why_cancellation_is_not_possible() {
        // Please define API keys in application.properties before running this test.
        // Tip: Use gpt-4 for this example, as gpt-3.5-turbo tends to hallucinate often and invent name and surname.
        interact(agent, "你好,我忘记我的预订信息");
        interact(agent, "123-457");
//        interact(agent, "I'm sorry I'm so inattentive today. Klaus Heisler.");
//        interact(agent, "My bad, it's 123-456");
//
//        // Here, information about the cancellation policy is automatically retrieved and injected into the prompt.
//        // Although the LLM sometimes attempts to cancel the booking, it fails to do so and will explain
//        // the reason why the booking cannot be cancelled, based on the injected cancellation policy.
//        interact(agent, "My plans have changed, can I cancel my booking?");
    }
    private static void interact(CustomerSupportAgent agent, String userMessage) {
        System.out.println("==========================================================================================");
        System.out.println("[User]: " + userMessage);
        System.out.println("==========================================================================================");
        String agentAnswer = agent.chat(userMessage);
        System.out.println("==========================================================================================");
        System.out.println("[Agent]: " + agentAnswer);
        System.out.println("==========================================================================================");
    }
    private ElasticsearchEmbeddingStore getElasticsearchEmbeddingStore(String indexName){
        if(langchainConfigProperties.getEnabled()){
            log.error("未开启elasticsearch");
            return null;
        }
        String elasticHost = langchainConfigProperties.getElasticHost();
        int elasticPort = langchainConfigProperties.getElasticPort();
        String url = StrUtil.format("{}:{}", elasticHost, elasticPort);
        return ElasticsearchEmbeddingStore.builder()
                .serverUrl(url)
                .userName(langchainConfigProperties.getElasticUsername())
                .password(langchainConfigProperties.getElasticPassword())
                .indexName(indexName)
                .dimension(1536)
                .build();
    }
    /**
     * 创建EmbeddingStore with Elasticsearch
     */
    @Test
    public void createEmbeddingStoreWithElasticsearch() {
        //1.elsaticstore
        ElasticsearchEmbeddingStore embeddingStore = getElasticsearchEmbeddingStore("c2267fb9-7539-46b7-8aab-c1c8c532cbd5");
        Embedding content = embeddingModel.embed("这里石昊是谁").content();
        List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(content, 1, 0.9);
        System.out.println(relevant.get(0).score());
        System.out.println(relevant.get(0).embedded());
    }
    /**
     * 创建elastic客户端
     */
    @Test
    public void createElasticClient() {
        //1.判断索引是否存在
        System.out.println(elasticOperation.isIndexExist("mrk_gpt_knowledge2"));
        //2.根据id查询document
        System.out.println(elasticOperation.getDocumentById("mrk_gpt_knowledge2", "62c3470d-6f38-4b52-959e-988dc0721b01"));
        //3.删除索引
        System.out.println(elasticOperation.deleteIndex("mrk_gpt_knowledge2"));
    }
    @Test
    public void loadFromURL() {
        Document document = UrlDocumentLoader.load("http://s6ie5kuog.hd-bkt.clouddn.com/raipiot_user.txt", new TextDocumentParser());
        System.out.println(document.text());
        DocumentSplitter documentSplitter = DocumentSplitters.recursive(500, 100, new OpenAiTokenizer(GPT_3_5_TURBO));
        List<TextSegment> split = documentSplitter.split(document);
        ObjectMapper objectMapper = ObjectMapper.of(split);
        JSONArray jsonArray = new JSONArray();
        objectMapper.map(jsonArray,null);
        System.out.println(jsonArray.toString());
    }
    interface Chef {
        String answer(String question);
    }
    @Test
    public void testPoJO() throws InterruptedException {
        StreamingChatLanguageModel model =OpenAiStreamingChatModel.builder().apiKey("sk-gRbZ9FJz2E7c7mwO5JOvp2u2rtoWoAbg12CxDy3Y25eLeDvd").baseUrl("https://api.chatanywhere.tech/").build();
        List<ChatMessage> chatMessages = new ArrayList<>();
        chatMessages.add(new dev.langchain4j.data.message.SystemMessage("你是一个代码专家,你只能回答代码方面的知识,如果用户内容与代码无关,你会只能回答:我是一个代码专家,只能帮你回答有关代码的问题!"));
        chatMessages.add(new UserMessage("帮我写一个简单的计算器的js代码?"));
        model.generate(chatMessages, new StreamingResponseHandler<AiMessage>() {
            @Override
            public void onNext(String token) {
                System.out.println("==========================================================================================");
                System.out.println("[answer]: " + token);
                System.out.println("==========================================================================================");
            }
            @Override
            public void onError(Throwable error) {
                System.out.println("==========================================================================================");
                System.out.println("[error]: " + error);
                System.out.println("==========================================================================================");
            }
            @Override
            public void onComplete(Response<AiMessage> response) {
                System.out.println("==========================================================================================");
                System.out.println("[complate]: " + response);
                System.out.println("==========================================================================================");
            }
        });
        Thread.sleep(10000);
    }
    @Test
    public void testTemplate(){
        PromptTemplate promptTemplate = new PromptTemplate("你可以根据知识库内容回答用户相关问题\n" +
                "知识库:\n"+
                "{{knowledge}} \n"+
                "用户问题:\n" +
                "{{question}} \n"
        );
        //3.检索知识库
        List<TextSegment> relevant = langchainComponent.findRelevant("mrk_gpt_knowledge2","你好,我忘记我的预订信息?");
        String relevantContext = relevant.stream().map(TextSegment::text).collect(joining("\n\n"));
        Map<String,Object> promtMap = new HashMap<>();
        promtMap.put("knowledge",relevantContext);
        promtMap.put("question","H你好,我忘记我的预订信息?");
        Prompt apply = promptTemplate.apply(promtMap);
        System.out.println(apply.text());
    }
    @Test
    public void testCostToken(){
        OpenAiTokenizer openAiTokenizer = new OpenAiTokenizer("gpt-3.5-turbo");
        int i = openAiTokenizer.estimateTokenCountInText("你好,我忘记我的预订信息?");
        String generate = model.generate("你好,我忘记我的预订信息?");
        System.out.println(i);
        System.out.println(generate);
    }
    @Test
    public void searchTest(){
        String s = googleSearch.searchGoogle("完美世界");
        System.out.println(s);
    }
}
 | 
	[
  "dev.langchain4j.service.AiServices.builder",
  "dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder",
  "dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder"
] | 
	[((3786, 3992), 'dev.langchain4j.service.AiServices.builder'), ((3786, 3984), 'dev.langchain4j.service.AiServices.builder'), ((3786, 3911), 'dev.langchain4j.service.AiServices.builder'), ((3786, 3863), 'dev.langchain4j.service.AiServices.builder'), ((6278, 6591), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((6278, 6566), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((6278, 6533), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((6278, 6495), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((6278, 6421), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((6278, 6347), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((8457, 8603), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((8457, 8595), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((8457, 8553), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder')] | 
| 
	package com.magicrepokit.chat;
import com.magicrepokit.chat.agent.CustomerSupportAgent;
import com.magicrepokit.chat.service.tool.BookingTools;
import com.magicrepokit.chat.vo.knowledge.KnowledgeFileListVO;
import com.magicrepokit.oss.OssTemplate;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.retriever.Retriever;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import org.junit.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.util.stream.Collectors.joining;
@Configuration
public class TestConfiguration {
    /**
     * 创建openai chat model
     * @return
     */
    @Bean
    ChatLanguageModel chatLanguageModel(){
        return OpenAiChatModel.builder().apiKey("sk-gRbZ9FJz2E7c7mwO5JOvp2u2rtoWoAbg12CxDy3Y25eLeDvd").baseUrl("https://api.chatanywhere.tech/").build();
    }
    /**
     * 创建客服代理
     */
    @Bean
    CustomerSupportAgent customerSupportAgent(ChatLanguageModel chatLanguageModel, BookingTools bookingTools, Retriever<TextSegment> retriever) {
        return AiServices.builder(CustomerSupportAgent.class)
                .chatLanguageModel(chatLanguageModel)
                .chatMemory(MessageWindowChatMemory.withMaxMessages(20))
                .tools(bookingTools)
                .retriever(retriever)
                .build();
    }
    /**
     * 创建retriever
     * @param embeddingStore
     * @param embeddingModel
     * @return
     */
    @Bean
    Retriever<TextSegment> retriever(EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel) {
        // You will need to adjust these parameters to find the optimal setting, which will depend on two main factors:
        // - The nature of your data
        // - The embedding model you are using
        int maxResultsRetrieved = 5;
        double minScore = 0.6;
        return EmbeddingStoreRetriever.from(embeddingStore, embeddingModel, maxResultsRetrieved, minScore);
    }
    /**
     * 创建embedding model
     * @return
     */
    @Bean
    EmbeddingModel embeddingModel() {
        return OpenAiEmbeddingModel.builder().apiKey("sk-gRbZ9FJz2E7c7mwO5JOvp2u2rtoWoAbg12CxDy3Y25eLeDvd").baseUrl("https://api.chatanywhere.tech/v1").build();
    }
    /**
     * 创建embedding store
     * @param embeddingModel
     * @param resourceLoader
     * @return
     * @throws IOException
     */
    @Bean
    EmbeddingStore<TextSegment> embeddingStore(EmbeddingModel embeddingModel, ResourceLoader resourceLoader) throws IOException {
        // Normally, you would already have your embedding store filled with your data.
        // However, for the purpose of this demonstration, we will:
        // 1. Create an in-memory embedding store
        //EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
        ElasticsearchEmbeddingStore embeddingStore = ElasticsearchEmbeddingStore.builder()
                .serverUrl("154.204.60.125:9200")
                .userName("elastic")
                .password("123456")
                .indexName("mrk_gpt_knowledge2")
                .dimension(1536)
                .build();
        // 2. Load an example document ("Miles of Smiles" terms of use)
        Resource resource = resourceLoader.getResource("classpath:miles-of-smiles-terms-of-use.txt");
        Document document = loadDocument(resource.getFile().toPath(), new TextDocumentParser());
        // 3. Split the document into segments 100 tokens each
        // 4. Convert segments into embeddings
        // 5. Store embeddings into embedding store
        // All this can be done manually, but we will use EmbeddingStoreIngestor to automate this:
        DocumentSplitter documentSplitter = DocumentSplitters.recursive(100, 0, new OpenAiTokenizer(GPT_3_5_TURBO));
        EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
                .documentSplitter(documentSplitter)
                .embeddingModel(embeddingModel)
                .embeddingStore(embeddingStore)
                .build();
        ingestor.ingest(document);
        return embeddingStore;
    }
}
 | 
	[
  "dev.langchain4j.service.AiServices.builder",
  "dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder",
  "dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder",
  "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
  "dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | 
	[((2216, 2353), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2216, 2345), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2216, 2303), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2577, 2850), 'dev.langchain4j.service.AiServices.builder'), ((2577, 2825), 'dev.langchain4j.service.AiServices.builder'), ((2577, 2787), 'dev.langchain4j.service.AiServices.builder'), ((2577, 2750), 'dev.langchain4j.service.AiServices.builder'), ((2577, 2677), 'dev.langchain4j.service.AiServices.builder'), ((3607, 3751), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((3607, 3743), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((3607, 3699), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((4393, 4660), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((4393, 4635), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((4393, 4602), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((4393, 4553), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((4393, 4517), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((4393, 4480), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((5355, 5560), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((5355, 5535), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((5355, 5487), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((5355, 5439), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] | 
			Subsets and Splits
				
	
				
			
				
No community queries yet
The top public SQL queries from the community will appear here once available.