Dataset Viewer
code
stringlengths 419
102k
| apis
sequencelengths 1
10
| extract_api
stringlengths 67
54.7k
|
---|---|---|
package dev.langchain4j.service;
import dev.langchain4j.agent.tool.DefaultToolExecutor;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.agent.tool.ToolSpecification;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.ToolExecutionResultMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.input.structured.StructuredPrompt;
import dev.langchain4j.model.moderation.Moderation;
import dev.langchain4j.model.moderation.ModerationModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.rag.DefaultRetrievalAugmentor;
import dev.langchain4j.rag.RetrievalAugmentor;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.retriever.Retriever;
import dev.langchain4j.spi.services.AiServicesFactory;
import java.lang.reflect.Method;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import static dev.langchain4j.agent.tool.ToolSpecifications.toolSpecificationFrom;
import static dev.langchain4j.exception.IllegalConfigurationException.illegalConfiguration;
import static dev.langchain4j.internal.ValidationUtils.ensureNotNull;
import static dev.langchain4j.spi.ServiceHelper.loadFactories;
import static java.util.stream.Collectors.toList;
/**
* AI Services provide a simpler and more flexible alternative to chains.
* You can define your own API (a Java interface with one or more methods),
* and AiServices will provide an implementation for it (we call this "AI Service").
* <p>
* Currently, AI Services support:
* <pre>
* - Prompt templates for user and system messages using {@link UserMessage} and {@link SystemMessage}
* - Structured prompts as method arguments (see {@link StructuredPrompt})
* - Shared or per-user (see {@link MemoryId}) chat memory
* - RAG (see {@link RetrievalAugmentor})
* - Tools (see {@link Tool})
* - Various return types (output parsers), see below
* - Streaming (use {@link TokenStream} as a return type)
* - Auto-moderation using {@link Moderate}
* </pre>
* <p>
* Here is the simplest example of an AI Service:
*
* <pre>
* interface Assistant {
*
* String chat(String userMessage);
* }
*
* Assistant assistant = AiServices.create(Assistant.class, model);
*
* String answer = assistant.chat("hello");
* System.out.println(answer); // Hello, how can I help you today?
* </pre>
*
* <pre>
* The return type of methods in your AI Service can be any of the following:
* - a {@link String}, an {@link AiMessage} or a {@code Response<AiMessage>}, if you want to get the answer from the LLM as-is
* - a {@code List<String>} or {@code Set<String>}, if you want to receive the answer as a collection of items or bullet points
* - any {@link Enum} or a {@code boolean}, if you want to use the LLM for classification
* - a primitive or boxed Java type: {@code int}, {@code Double}, etc., if you want to use the LLM for data extraction
* - many default Java types: {@code Date}, {@code LocalDateTime}, {@code BigDecimal}, etc., if you want to use the LLM for data extraction
* - any custom POJO, if you want to use the LLM for data extraction.
* For POJOs, it is advisable to use the "json mode" feature if the LLM provider supports it. For OpenAI, this can be enabled by calling {@code responseFormat("json_object")} during model construction.
*
* </pre>
* <p>
* Let's see how we can classify the sentiment of a text:
* <pre>
* enum Sentiment {
* POSITIVE, NEUTRAL, NEGATIVE
* }
*
* interface SentimentAnalyzer {
*
* {@code @UserMessage}("Analyze sentiment of {{it}}")
* Sentiment analyzeSentimentOf(String text);
* }
*
* SentimentAnalyzer assistant = AiServices.create(SentimentAnalyzer.class, model);
*
* Sentiment sentiment = analyzeSentimentOf.chat("I love you");
* System.out.println(sentiment); // POSITIVE
* </pre>
* <p>
* As demonstrated, you can put {@link UserMessage} and {@link SystemMessage} annotations above a method to define
* templates for user and system messages, respectively.
* In this example, the special {@code {{it}}} prompt template variable is used because there's only one method parameter.
* However, you can use more parameters as demonstrated in the following example:
* <pre>
* interface Translator {
*
* {@code @SystemMessage}("You are a professional translator into {{language}}")
* {@code @UserMessage}("Translate the following text: {{text}}")
* String translate(@V("text") String text, @V("language") String language);
* }
* </pre>
* <p>
* See more examples <a href="https://github.com/langchain4j/langchain4j-examples/tree/main/other-examples/src/main/java">here</a>.
*
* @param <T> The interface for which AiServices will provide an implementation.
*/
public abstract class AiServices<T> {
protected static final String DEFAULT = "default";
protected final AiServiceContext context;
private boolean retrieverSet = false;
private boolean contentRetrieverSet = false;
private boolean retrievalAugmentorSet = false;
protected AiServices(AiServiceContext context) {
this.context = context;
}
/**
* Creates an AI Service (an implementation of the provided interface), that is backed by the provided chat model.
* This convenience method can be used to create simple AI Services.
* For more complex cases, please use {@link #builder}.
*
* @param aiService The class of the interface to be implemented.
* @param chatLanguageModel The chat model to be used under the hood.
* @return An instance of the provided interface, implementing all its defined methods.
*/
public static <T> T create(Class<T> aiService, ChatLanguageModel chatLanguageModel) {
return builder(aiService)
.chatLanguageModel(chatLanguageModel)
.build();
}
/**
* Creates an AI Service (an implementation of the provided interface), that is backed by the provided streaming chat model.
* This convenience method can be used to create simple AI Services.
* For more complex cases, please use {@link #builder}.
*
* @param aiService The class of the interface to be implemented.
* @param streamingChatLanguageModel The streaming chat model to be used under the hood.
* The return type of all methods should be {@link TokenStream}.
* @return An instance of the provided interface, implementing all its defined methods.
*/
public static <T> T create(Class<T> aiService, StreamingChatLanguageModel streamingChatLanguageModel) {
return builder(aiService)
.streamingChatLanguageModel(streamingChatLanguageModel)
.build();
}
/**
* Begins the construction of an AI Service.
*
* @param aiService The class of the interface to be implemented.
* @return builder
*/
public static <T> AiServices<T> builder(Class<T> aiService) {
AiServiceContext context = new AiServiceContext(aiService);
for (AiServicesFactory factory : loadFactories(AiServicesFactory.class)) {
return factory.create(context);
}
return new DefaultAiServices<>(context);
}
/**
* Configures chat model that will be used under the hood of the AI Service.
* <p>
* Either {@link ChatLanguageModel} or {@link StreamingChatLanguageModel} should be configured,
* but not both at the same time.
*
* @param chatLanguageModel Chat model that will be used under the hood of the AI Service.
* @return builder
*/
public AiServices<T> chatLanguageModel(ChatLanguageModel chatLanguageModel) {
context.chatModel = chatLanguageModel;
return this;
}
/**
* Configures streaming chat model that will be used under the hood of the AI Service.
* The methods of the AI Service must return a {@link TokenStream} type.
* <p>
* Either {@link ChatLanguageModel} or {@link StreamingChatLanguageModel} should be configured,
* but not both at the same time.
*
* @param streamingChatLanguageModel Streaming chat model that will be used under the hood of the AI Service.
* @return builder
*/
public AiServices<T> streamingChatLanguageModel(StreamingChatLanguageModel streamingChatLanguageModel) {
context.streamingChatModel = streamingChatLanguageModel;
return this;
}
/**
* Configures the chat memory that will be used to preserve conversation history between method calls.
* <p>
* Unless a {@link ChatMemory} or {@link ChatMemoryProvider} is configured, all method calls will be independent of each other.
* In other words, the LLM will not remember the conversation from the previous method calls.
* <p>
* The same {@link ChatMemory} instance will be used for every method call.
* <p>
* If you want to have a separate {@link ChatMemory} for each user/conversation, configure {@link #chatMemoryProvider} instead.
* <p>
* Either a {@link ChatMemory} or a {@link ChatMemoryProvider} can be configured, but not both simultaneously.
*
* @param chatMemory An instance of chat memory to be used by the AI Service.
* @return builder
*/
public AiServices<T> chatMemory(ChatMemory chatMemory) {
context.chatMemories = new ConcurrentHashMap<>();
context.chatMemories.put(DEFAULT, chatMemory);
return this;
}
/**
* Configures the chat memory provider, which provides a dedicated instance of {@link ChatMemory} for each user/conversation.
* To distinguish between users/conversations, one of the method's arguments should be a memory ID (of any data type)
* annotated with {@link MemoryId}.
* For each new (previously unseen) memoryId, an instance of {@link ChatMemory} will be automatically obtained
* by invoking {@link ChatMemoryProvider#get(Object id)}.
* Example:
* <pre>
* interface Assistant {
*
* String chat(@MemoryId int memoryId, @UserMessage String message);
* }
* </pre>
* If you prefer to use the same (shared) {@link ChatMemory} for all users/conversations, configure a {@link #chatMemory} instead.
* <p>
* Either a {@link ChatMemory} or a {@link ChatMemoryProvider} can be configured, but not both simultaneously.
*
* @param chatMemoryProvider The provider of a {@link ChatMemory} for each new user/conversation.
* @return builder
*/
public AiServices<T> chatMemoryProvider(ChatMemoryProvider chatMemoryProvider) {
context.chatMemories = new ConcurrentHashMap<>();
context.chatMemoryProvider = chatMemoryProvider;
return this;
}
/**
* Configures a moderation model to be used for automatic content moderation.
* If a method in the AI Service is annotated with {@link Moderate}, the moderation model will be invoked
* to check the user content for any inappropriate or harmful material.
*
* @param moderationModel The moderation model to be used for content moderation.
* @return builder
* @see Moderate
*/
public AiServices<T> moderationModel(ModerationModel moderationModel) {
context.moderationModel = moderationModel;
return this;
}
/**
* Configures the tools that the LLM can use.
* A {@link ChatMemory} that can hold at least 3 messages is required for the tools to work properly.
*
* @param objectsWithTools One or more objects whose methods are annotated with {@link Tool}.
* All these tools (methods annotated with {@link Tool}) will be accessible to the LLM.
* Note that inherited methods are ignored.
* @return builder
* @see Tool
*/
public AiServices<T> tools(Object... objectsWithTools) {
return tools(Arrays.asList(objectsWithTools));
}
/**
* Configures the tools that the LLM can use.
* A {@link ChatMemory} that can hold at least 3 messages is required for the tools to work properly.
*
* @param objectsWithTools A list of objects whose methods are annotated with {@link Tool}.
* All these tools (methods annotated with {@link Tool}) are accessible to the LLM.
* Note that inherited methods are ignored.
* @return builder
* @see Tool
*/
public AiServices<T> tools(List<Object> objectsWithTools) {
context.toolSpecifications = new ArrayList<>();
context.toolExecutors = new HashMap<>();
for (Object objectWithTool : objectsWithTools) {
for (Method method : objectWithTool.getClass().getDeclaredMethods()) {
if (method.isAnnotationPresent(Tool.class)) {
ToolSpecification toolSpecification = toolSpecificationFrom(method);
context.toolSpecifications.add(toolSpecification);
context.toolExecutors.put(toolSpecification.name(), new DefaultToolExecutor(objectWithTool, method));
}
}
}
return this;
}
/**
* Deprecated. Use {@link #contentRetriever(ContentRetriever)}
* (e.g. {@link EmbeddingStoreContentRetriever}) instead.
* <br>
* Configures a retriever that will be invoked on every method call to fetch relevant information
* related to the current user message from an underlying source (e.g., embedding store).
* This relevant information is automatically injected into the message sent to the LLM.
*
* @param retriever The retriever to be used by the AI Service.
* @return builder
*/
@Deprecated
public AiServices<T> retriever(Retriever<TextSegment> retriever) {
if(contentRetrieverSet || retrievalAugmentorSet) {
throw illegalConfiguration("Only one out of [retriever, contentRetriever, retrievalAugmentor] can be set");
}
if (retriever != null) {
AiServices<T> withContentRetriever = contentRetriever(retriever.toContentRetriever());
retrieverSet = true;
return withContentRetriever;
}
return this;
}
/**
* Configures a content retriever to be invoked on every method call for retrieving relevant content
* related to the user's message from an underlying data source
* (e.g., an embedding store in the case of an {@link EmbeddingStoreContentRetriever}).
* The retrieved relevant content is then automatically incorporated into the message sent to the LLM.
* <br>
* This method provides a straightforward approach for those who do not require
* a customized {@link RetrievalAugmentor}.
* It configures a {@link DefaultRetrievalAugmentor} with the provided {@link ContentRetriever}.
*
* @param contentRetriever The content retriever to be used by the AI Service.
* @return builder
*/
public AiServices<T> contentRetriever(ContentRetriever contentRetriever) {
if(retrieverSet || retrievalAugmentorSet) {
throw illegalConfiguration("Only one out of [retriever, contentRetriever, retrievalAugmentor] can be set");
}
contentRetrieverSet = true;
context.retrievalAugmentor = DefaultRetrievalAugmentor.builder()
.contentRetriever(ensureNotNull(contentRetriever, "contentRetriever"))
.build();
return this;
}
/**
* Configures a retrieval augmentor to be invoked on every method call.
*
* @param retrievalAugmentor The retrieval augmentor to be used by the AI Service.
* @return builder
*/
public AiServices<T> retrievalAugmentor(RetrievalAugmentor retrievalAugmentor) {
if(retrieverSet || contentRetrieverSet) {
throw illegalConfiguration("Only one out of [retriever, contentRetriever, retrievalAugmentor] can be set");
}
retrievalAugmentorSet = true;
context.retrievalAugmentor = ensureNotNull(retrievalAugmentor, "retrievalAugmentor");
return this;
}
/**
* Constructs and returns the AI Service.
*
* @return An instance of the AI Service implementing the specified interface.
*/
public abstract T build();
protected void performBasicValidation() {
if (context.chatModel == null && context.streamingChatModel == null) {
throw illegalConfiguration("Please specify either chatLanguageModel or streamingChatLanguageModel");
}
if (context.toolSpecifications != null && !context.hasChatMemory()) {
throw illegalConfiguration(
"Please set up chatMemory or chatMemoryProvider in order to use tools. "
+ "A ChatMemory that can hold at least 3 messages is required for the tools to work properly. "
+ "While the LLM can technically execute a tool without chat memory, if it only receives the " +
"result of the tool's execution without the initial message from the user, it won't interpret " +
"the result properly."
);
}
}
public static List<ChatMessage> removeToolMessages(List<ChatMessage> messages) {
return messages.stream()
.filter(it -> !(it instanceof ToolExecutionResultMessage))
.filter(it -> !(it instanceof AiMessage && ((AiMessage) it).hasToolExecutionRequests()))
.collect(toList());
}
public static void verifyModerationIfNeeded(Future<Moderation> moderationFuture) {
if (moderationFuture != null) {
try {
Moderation moderation = moderationFuture.get();
if (moderation.flagged()) {
throw new ModerationException(String.format("Text \"%s\" violates content policy", moderation.flaggedText()));
}
} catch (InterruptedException | ExecutionException e) {
throw new RuntimeException(e);
}
}
}
}
| [
"dev.langchain4j.rag.DefaultRetrievalAugmentor.builder"
] | [((15779, 15926), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((15779, 15901), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder')] |
package org.mfusco;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import static java.time.Duration.ofSeconds;
public class MortgageChat {
private final ChatLanguageModel model;
private final PersonExtractor extractor;
private final DroolsMortgageCalculator droolsMortgageCalculator = new DroolsMortgageCalculator();
private final Assistant assistant;
public MortgageChat(String openAiApiKey) {
model = OpenAiChatModel.builder()
.apiKey(openAiApiKey)
.timeout(ofSeconds(60))
.build();
extractor = AiServices.create(PersonExtractor.class, model);
assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(model)
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.tools(droolsMortgageCalculator)
.build();
}
public String chat(String text) {
return text.endsWith("?") ? assistant.chat(text) : extractPerson(text);
}
private String extractPerson(String text) {
Person person = extractor.extractPersonFrom(text);
droolsMortgageCalculator.register(person);
return person.toString();
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((601, 729), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((601, 704), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((601, 664), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((822, 1046), 'dev.langchain4j.service.AiServices.builder'), ((822, 1021), 'dev.langchain4j.service.AiServices.builder'), ((822, 972), 'dev.langchain4j.service.AiServices.builder'), ((822, 899), 'dev.langchain4j.service.AiServices.builder')] |
package gcfv2;
import java.io.BufferedWriter;
import com.google.gson.Gson;
import com.google.gson.JsonObject;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.stream.Collectors;
//Logging packages
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.google.cloud.functions.HttpFunction;
import com.google.cloud.functions.HttpRequest;
import com.google.cloud.functions.HttpResponse;
//LangChain4j packages
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.vertexai.VertexAiChatModel;
public class PredictChatFunction implements HttpFunction {
private static Logger logger = LoggerFactory.getLogger(PredictChatFunction.class);
public void service(final HttpRequest request, final HttpResponse response) throws Exception {
final BufferedWriter writer = response.getWriter();
//Read the environment variables which will be passed to the Vertex AI Model for initialization
String GCP_REGION = System.getenv("GCP_REGION");
String GCP_PROJECT = System.getenv("GCP_PROJECT");
//Fetch the prompt from the JSON body in the request
BufferedReader reader = new BufferedReader(new InputStreamReader(request.getInputStream()));
String jsonRequest = reader.lines().collect(Collectors.joining());
// Parse the JSON data
Gson gson = new Gson();
JsonObject jsonRequestObject = gson.fromJson(jsonRequest, JsonObject.class);
// Get the data from the JSON object
String prompt = jsonRequestObject.get("prompt").getAsString();
if (prompt.length() > 0) {
VertexAiChatModel vertexAiChatModel = VertexAiChatModel.builder()
.endpoint("us-central1-aiplatform.googleapis.com:443")
.project(GCP_PROJECT)
.location(GCP_REGION)
.publisher("google")
.modelName("chat-bison@001")
.temperature(1.0)
.maxOutputTokens(50)
.topK(0)
.topP(0.0)
.maxRetries(3)
.build();
Response<AiMessage> modelResponse = vertexAiChatModel.generate(UserMessage.from(prompt));
logger.info("Result: " + modelResponse.content().text());
writer.write(modelResponse.content().text());
}
else {
logger.info("No prompt provided.");
writer.write("No prompt provided.");
}
}
}
| [
"dev.langchain4j.model.vertexai.VertexAiChatModel.builder"
] | [((1716, 2173), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((1716, 2146), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((1716, 2113), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((1716, 2084), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((1716, 2057), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((1716, 2018), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((1716, 1982), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((1716, 1935), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((1716, 1896), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((1716, 1856), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((1716, 1816), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder')] |
package com.moyz.adi.common.service;
import com.moyz.adi.common.helper.LLMContext;
import com.moyz.adi.common.interfaces.TriConsumer;
import com.moyz.adi.common.util.AdiPgVectorEmbeddingStore;
import com.moyz.adi.common.vo.AnswerMeta;
import com.moyz.adi.common.vo.PromptMeta;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.tuple.ImmutablePair;
import org.apache.commons.lang3.tuple.Pair;
import org.apache.commons.lang3.tuple.Triple;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.util.stream.Collectors.joining;
@Slf4j
@Service
public class RAGService {
@Value("${spring.datasource.url}")
private String dataBaseUrl;
@Value("${spring.datasource.username}")
private String dataBaseUserName;
@Value("${spring.datasource.password}")
private String dataBasePassword;
private static final PromptTemplate promptTemplate = PromptTemplate.from("尽可能准确地回答下面的问题: {{question}}\n\n根据以下知识库的内容:\n{{information}}");
private EmbeddingModel embeddingModel;
private EmbeddingStore<TextSegment> embeddingStore;
public void init() {
log.info("initEmbeddingModel");
embeddingModel = new AllMiniLmL6V2EmbeddingModel();
embeddingStore = initEmbeddingStore();
}
private EmbeddingStore<TextSegment> initEmbeddingStore() {
// 正则表达式匹配
String regex = "jdbc:postgresql://([^:/]+):(\\d+)/(\\w+).+";
Pattern pattern = Pattern.compile(regex);
Matcher matcher = pattern.matcher(dataBaseUrl);
String host = "";
String port = "";
String databaseName = "";
if (matcher.matches()) {
host = matcher.group(1);
port = matcher.group(2);
databaseName = matcher.group(3);
System.out.println("Host: " + host);
System.out.println("Port: " + port);
System.out.println("Database: " + databaseName);
} else {
throw new RuntimeException("parse url error");
}
AdiPgVectorEmbeddingStore embeddingStore = AdiPgVectorEmbeddingStore.builder()
.host(host)
.port(Integer.parseInt(port))
.database(databaseName)
.user(dataBaseUserName)
.password(dataBasePassword)
.dimension(384)
.createTable(true)
.dropTableFirst(false)
.table("adi_knowledge_base_embedding")
.build();
return embeddingStore;
}
private EmbeddingStoreIngestor getEmbeddingStoreIngestor() {
DocumentSplitter documentSplitter = DocumentSplitters.recursive(1000, 0, new OpenAiTokenizer(GPT_3_5_TURBO));
EmbeddingStoreIngestor embeddingStoreIngestor = EmbeddingStoreIngestor.builder()
.documentSplitter(documentSplitter)
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
return embeddingStoreIngestor;
}
/**
* 对文档切块并向量化
*
* @param document 知识库文档
*/
public void ingest(Document document) {
getEmbeddingStoreIngestor().ingest(document);
}
public Prompt retrieveAndCreatePrompt(String kbUuid, String question) {
// Embed the question
Embedding questionEmbedding = embeddingModel.embed(question).content();
// Find relevant embeddings in embedding store by semantic similarity
// You can play with parameters below to find a sweet spot for your specific use case
int maxResults = 3;
double minScore = 0.6;
List<EmbeddingMatch<TextSegment>> relevantEmbeddings = ((AdiPgVectorEmbeddingStore) embeddingStore).findRelevantByKbUuid(kbUuid, questionEmbedding, maxResults, minScore);
// Create a prompt for the model that includes question and relevant embeddings
String information = relevantEmbeddings.stream()
.map(match -> match.embedded().text())
.collect(joining("\n\n"));
if (StringUtils.isBlank(information)) {
return null;
}
return promptTemplate.apply(Map.of("question", question, "information", Matcher.quoteReplacement(information)));
}
/**
* 召回并提问
*
* @param kbUuid 知识库uuid
* @param question 用户的问题
* @param modelName LLM model name
* @return
*/
public Pair<String, Response<AiMessage>> retrieveAndAsk(String kbUuid, String question, String modelName) {
Prompt prompt = retrieveAndCreatePrompt(kbUuid, question);
if (null == prompt) {
return null;
}
Response<AiMessage> response = new LLMContext(modelName).getLLMService().chat(prompt.toUserMessage());
return new ImmutablePair<>(prompt.text(), response);
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((3196, 3615), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3590), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3535), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3496), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3461), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3429), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3385), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3345), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3305), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3196, 3259), 'com.moyz.adi.common.util.AdiPgVectorEmbeddingStore.builder'), ((3894, 4099), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3894, 4074), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3894, 4026), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3894, 3978), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package dev.zbendhiba.demo.telegram.openapi;
import java.util.List;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import jakarta.enterprise.context.ApplicationScoped;
import static java.time.Duration.ofSeconds;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.telegram.model.IncomingMessage;
import org.eclipse.microprofile.config.inject.ConfigProperty;
@ApplicationScoped
public class Routes extends RouteBuilder {
@ConfigProperty(name="open-api-key")
String openApiKey;
private EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
private EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
@Override
public void configure() throws Exception {
// REST endpoint to add a bio
rest("data")
.post("/camel-split-ingest/")
.to("direct:camel-split-ingest")
.post("/langchain4j-split-ingest/")
.to("direct:langchain4j-split-ingest");
// Ingest Data
from("direct:camel-split-ingest")
.wireTap("direct:processBio")
.transform().simple("Thanks");
from("direct:processBio")
// split into paragraphs and use OpenApiTokenizer
.split(body().tokenize("\\s*\\n\\s*\\n"))
.setHeader("paragraphNumber", simple("${exchangeProperty.CamelSplitIndex}"))
// Process each paragraph using the OpenAiTokenizerProcessor
.process(new CamelSplitterProcessor())
.to("direct:processTokenizedPart")
.end();
// Embed paragraphs into Vector Database
from("direct:processTokenizedPart")
.process(exchange -> {
embed(exchange.getIn().getBody(List.class));
});
from("direct:process-langchain4j-split-ingest")
.process(new LangchainSplitterProcessor())
.to("direct:processTokenizedPart");
from("direct:langchain4j-split-ingest")
.wireTap("direct:process-langchain4j-split-ingest")
.transform().simple("Thanks");
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(openApiKey)
.modelName(GPT_3_5_TURBO)
.temperature(0.3)
.timeout(ofSeconds(3000))
.build();
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(model)
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.promptTemplate(PromptTemplate
.from("Answer the following question to the best of your ability: {{question}}\n\nBase your answer on the following information:\n{{information}}"))
.build();
from("telegram:bots?timeout=30000")
.log("Text received in Telegram : ${body}")
// this is just a Hello World, we suppose that we receive only text messages from user
.filter(simple("${body} != '/start'"))
.process(e->{
IncomingMessage incomingMessage = e.getMessage().getBody(IncomingMessage.class);
var openapiMessage = chain.execute(incomingMessage.getText());
e.getMessage().setBody(openapiMessage);
})
.log("Text to send to user based on response from ChatGPT : ${body}")
.to("telegram:bots")
.end();
}
public void embed(List<TextSegment> textSegments ) {
List<Embedding> embeddings = embeddingModel.embedAll(textSegments).content();
embeddingStore.addAll(embeddings, textSegments);
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((2918, 3122), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2918, 3097), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2918, 3055), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2918, 3021), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2918, 2979), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3171, 3658), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3171, 3633), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3171, 3413), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3171, 3340), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3171, 3251), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')] |
package eu.luminis.faqlangchain.service;
import java.io.File;
import java.io.FileNotFoundException;
import java.time.Duration;
import java.util.Arrays;
import java.util.stream.Collectors;
import com.fasterxml.jackson.databind.JsonNode;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.FileSystemResource;
import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType;
import org.springframework.http.client.MultipartBodyBuilder;
import org.springframework.stereotype.Service;
import org.springframework.util.ResourceUtils;
import org.springframework.web.reactive.function.BodyInserters;
import org.springframework.web.reactive.function.client.WebClient;
import reactor.core.publisher.Mono;
@Service
public class IngestService {
private static final Logger LOGGER = LoggerFactory.getLogger(IngestService.class);
private final WebClient webClient;
private final EmbeddingStore<TextSegment> embeddingStore;
private final EmbeddingModel embeddingModel;
public IngestService(@Value("${unstructured.apiKey}") String unstructuredApiKey,
@Qualifier("openaiModel") EmbeddingModel embeddingModel,
@Qualifier("inMemoryEmbeddingStore") EmbeddingStore<TextSegment> embeddingStore) {
this.embeddingModel = embeddingModel;
this.embeddingStore = embeddingStore;
this.webClient = WebClient.builder()
.baseUrl("https://api.unstructured.io/general/v0/")
.defaultHeader("unstructured-api-key", unstructuredApiKey)
.build();
}
public boolean ingestPDF() throws FileNotFoundException {
LOGGER.info("Ingesting PDF");
File file = ResourceUtils.getFile("classpath:data/faq.pdf");
MultipartBodyBuilder builder = new MultipartBodyBuilder();
builder.part("files", new FileSystemResource(file));
builder.part("strategy", "ocr_only");
builder.part("ocr_languages", "eng");
Mono<Object> mono = webClient.post()
.uri("general")
.contentType(MediaType.MULTIPART_FORM_DATA)
.body(BodyInserters.fromMultipartData(builder.build()))
.exchangeToMono(response -> {
if (response.statusCode().equals(HttpStatus.OK)) {
return response.bodyToMono(UnstructuredResponse[].class);
} else {
LOGGER.error("Something went wrong when uploading file to Unstructured API. Received status code {}", response.statusCode());
return response.bodyToMono(JsonNode.class);
}
});
Object response = mono.block(Duration.ofMinutes(1));
if (response instanceof JsonNode jsonNode) {
LOGGER.error("Response: {}", jsonNode);
return false;
}
if (response instanceof UnstructuredResponse[] unstructuredResponses) {
String text = Arrays.stream(unstructuredResponses).map(UnstructuredResponse::getText).collect(Collectors.joining(" "));
Document document = Document.from(text);
DocumentSplitter documentSplitter = DocumentSplitters.recursive(300);
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(documentSplitter)
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
ingestor.ingest(document);
LOGGER.info("Ingestion of PDF finished");
return true;
}
return false;
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1939, 2126), 'org.springframework.web.reactive.function.client.WebClient.builder'), ((1939, 2101), 'org.springframework.web.reactive.function.client.WebClient.builder'), ((1939, 2026), 'org.springframework.web.reactive.function.client.WebClient.builder'), ((3531, 3635), 'java.util.Arrays.stream'), ((3531, 3602), 'java.util.Arrays.stream'), ((3819, 4040), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3819, 4011), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3819, 3959), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3819, 3907), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package my.project;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.ALL_MINILM_L6_V2_EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
public class App {
// Please also check ServiceWithRetrieverExample
public static void main(String[] args) throws Exception {
EmbeddingModel embeddingModel = new ALL_MINILM_L6_V2_EmbeddingModel();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
EmbeddingStoreIngestor ingestor =
EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(500, new OpenAiTokenizer(GPT_3_5_TURBO)))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
Document document = loadDocument(toPath("example-files/story-about-happy-carrot.txt"));
ingestor.ingest(document);
ConversationalRetrievalChain chain =
ConversationalRetrievalChain.builder()
.chatLanguageModel(OpenAiChatModel.withApiKey("my-openai-key"))
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
// .chatMemory() // you can override default chat memory
// .promptTemplate() // you can override default prompt template
.build();
String answer = chain.execute("Who is Charlie?");
System.out.println(answer); // Charlie is a cheerful carrot living in VeggieVille...
}
private static Path toPath(String fileName) {
try {
URL fileUrl = App.class.getResource(fileName);
return Paths.get(fileUrl.toURI());
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1330, 1571), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1330, 1550), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1330, 1506), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1330, 1462), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1747, 2113), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((1747, 1946), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((1747, 1861), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')] |
package org.agoncal.fascicle.langchain4j.accessing.vertexai;
import dev.langchain4j.model.vertexai.VertexAiChatModel;
// tag::adocSkip[]
/**
* @author Antonio Goncalves
* http://www.antoniogoncalves.org
* --
*/
// end::adocSkip[]
public class MusicianService {
public static void main(String[] args) {
MusicianService musicianService = new MusicianService();
musicianService.useVertexAiLanguageModelBuilder();
}
private static final String AZURE_OPENAI_KEY = System.getenv("AZURE_OPENAI_KEY");
private static final String AZURE_OPENAI_ENDPOINT = System.getenv("AZURE_OPENAI_ENDPOINT");
private static final String AZURE_OPENAI_DEPLOYMENT_NAME = System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME");
private static final String PROMPT = "When was the first Beatles album released?";
// ###################################
// ### AZURE OPENAI LANGUAGE MODEL ###
// ###################################
public void useVertexAiLanguageModelBuilder() {
System.out.println("### useVertexAiLanguageModelBuilder");
// tag::adocSnippet[]
VertexAiChatModel model = VertexAiChatModel.builder()
.endpoint(AZURE_OPENAI_ENDPOINT)
.temperature(0.3)
.build();
// end::adocSnippet[]
String completion = model.generate(PROMPT);
}
}
| [
"dev.langchain4j.model.vertexai.VertexAiChatModel.builder"
] | [((1100, 1205), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((1100, 1190), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((1100, 1166), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder')] |
package org.tutorial.yy.langchain.demo.aiservice.memory;
import dev.langchain4j.chain.ConversationalChain;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import org.tutorial.yy.langchain.demo.aiservice.LangChainBase;
import java.io.IOException;
import static dev.langchain4j.data.message.UserMessage.userMessage;
/**
* @author yyHuangfu
* @create 2024/2/3
* @description
*/
public class HelloChatMemory extends LangChainBase {
public static void main(String[] args) throws IOException {
ConversationalChain chain = ConversationalChain.builder()
.chatLanguageModel(getModel())
.chatMemory(getCustomMemory())
.build();
String answerWithName = chain.execute("can u introduce yourself?"); // 你能介绍一下你自己吗?
System.out.println(answerWithName);
String answerInCN = chain.execute("in chinese plz"); // 请用中文说
System.out.println(answerInCN);
}
public static ChatMemory getCustomMemory() {
// yiyu robot
ChatMemory chatMemory = MessageWindowChatMemory.builder()
.maxMessages(10)
.build();
chatMemory.add(userMessage("you are the human called Alex HF, u also called yiyu"));
chatMemory.add(userMessage("u like programming, reading and any sports"));
chatMemory.add(userMessage("u can speak Chinese and English"));
return chatMemory;
}
}
| [
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder",
"dev.langchain4j.chain.ConversationalChain.builder"
] | [((591, 739), 'dev.langchain4j.chain.ConversationalChain.builder'), ((591, 714), 'dev.langchain4j.chain.ConversationalChain.builder'), ((591, 667), 'dev.langchain4j.chain.ConversationalChain.builder'), ((1129, 1220), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((1129, 1195), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder')] |
package com.example.application;
import com.example.application.services.BookingTools;
import com.example.application.services.CustomerSupportAgent;
import com.vaadin.flow.component.page.AppShellConfigurator;
import com.vaadin.flow.theme.Theme;
import dev.langchain4j.data.document.DocumentSplitter;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.Tokenizer;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.CommandLineRunner;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.core.io.Resource;
import org.springframework.core.io.ResourceLoader;
import java.io.IOException;
import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_4;
@SpringBootApplication
@Theme(value = "customer-service-chatbot")
public class Application implements AppShellConfigurator {
public static void main(String[] args) {
SpringApplication.run(Application.class, args);
}
@Bean
EmbeddingModel embeddingModel() {
return new AllMiniLmL6V2EmbeddingModel();
}
@Bean
EmbeddingStore<TextSegment> embeddingStore() {
return new InMemoryEmbeddingStore<>();
}
@Bean
Tokenizer tokenizer() {
return new OpenAiTokenizer(GPT_3_5_TURBO);
}
// In the real world, ingesting documents would often happen separately, on a CI server or similar
@Bean
CommandLineRunner docsToEmbeddings(
EmbeddingModel embeddingModel,
EmbeddingStore<TextSegment> embeddingStore,
Tokenizer tokenizer,
ResourceLoader resourceLoader
) throws IOException {
return args -> {
Resource resource =
resourceLoader.getResource("classpath:terms-of-service.txt");
var termsOfUse = loadDocument(resource.getFile().toPath(), new TextDocumentParser());
DocumentSplitter documentSplitter = DocumentSplitters.recursive(200, 0,
tokenizer);
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(documentSplitter)
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
ingestor.ingest(termsOfUse);
};
}
@Bean
StreamingChatLanguageModel chatLanguageModel() {
return OpenAiStreamingChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.modelName(GPT_3_5_TURBO)
.build();
}
@Bean
ContentRetriever retriever(
EmbeddingStore<TextSegment> embeddingStore,
EmbeddingModel embeddingModel
) {
return EmbeddingStoreContentRetriever.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.maxResults(2)
.minScore(0.6)
.build();
}
@Bean
CustomerSupportAgent customerSupportAgent(
StreamingChatLanguageModel chatLanguageModel,
Tokenizer tokenizer,
ContentRetriever retriever,
BookingTools tools
) {
return AiServices.builder(CustomerSupportAgent.class)
.streamingChatLanguageModel(chatLanguageModel)
.chatMemoryProvider(chatId -> TokenWindowChatMemory.builder()
.id(chatId)
.maxTokens(1000, tokenizer)
.build())
.contentRetriever(retriever)
.tools(tools)
.build();
}
} | [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder",
"dev.langchain4j.memory.chat.TokenWindowChatMemory.builder",
"dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((3196, 3417), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3196, 3388), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3196, 3336), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3196, 3284), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((3556, 3705), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((3556, 3680), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((3556, 3638), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((3878, 4101), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3878, 4076), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3878, 4045), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3878, 4014), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3878, 3966), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((4354, 4763), 'dev.langchain4j.service.AiServices.builder'), ((4354, 4738), 'dev.langchain4j.service.AiServices.builder'), ((4354, 4708), 'dev.langchain4j.service.AiServices.builder'), ((4354, 4663), 'dev.langchain4j.service.AiServices.builder'), ((4354, 4463), 'dev.langchain4j.service.AiServices.builder'), ((4510, 4662), 'dev.langchain4j.memory.chat.TokenWindowChatMemory.builder'), ((4510, 4629), 'dev.langchain4j.memory.chat.TokenWindowChatMemory.builder'), ((4510, 4577), 'dev.langchain4j.memory.chat.TokenWindowChatMemory.builder')] |
package com.johnsosoka.selfdiscover.config;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* This class configures the language model used by the chat agents.
*/
@Configuration
public class LanguageModelConfig {
@Value("${openai.api-key}")
String apiKey;
@Bean
public ChatLanguageModel chatLanguageModel() {
return OpenAiChatModel.builder()
.apiKey(apiKey)
.build();
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((580, 662), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((580, 637), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package com.tencent.supersonic.headless.core.chat.parser.llm;
import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.core.config.OptimizationConfig;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMReq;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMReq.SqlGenerationMode;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMResp;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.output.Response;
import lombok.extern.slf4j.Slf4j;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@Service
@Slf4j
public class TwoPassSqlGeneration implements SqlGeneration, InitializingBean {
private static final Logger keyPipelineLog = LoggerFactory.getLogger("keyPipeline");
@Autowired
private ChatLanguageModel chatLanguageModel;
@Autowired
private SqlExamplarLoader sqlExamplarLoader;
@Autowired
private OptimizationConfig optimizationConfig;
@Autowired
private SqlPromptGenerator sqlPromptGenerator;
@Override
public LLMResp generation(LLMReq llmReq, Long dataSetId) {
keyPipelineLog.info("dataSetId:{},llmReq:{}", dataSetId, llmReq);
List<Map<String, String>> sqlExamples = sqlExamplarLoader.retrieverSqlExamples(llmReq.getQueryText(),
optimizationConfig.getText2sqlExampleNum());
String linkingPromptStr = sqlPromptGenerator.generateLinkingPrompt(llmReq, sqlExamples);
Prompt prompt = PromptTemplate.from(JsonUtil.toString(linkingPromptStr)).apply(new HashMap<>());
keyPipelineLog.info("step one request prompt:{}", prompt.toSystemMessage());
Response<AiMessage> response = chatLanguageModel.generate(prompt.toSystemMessage());
keyPipelineLog.info("step one model response:{}", response.content().text());
String schemaLinkStr = OutputFormat.getSchemaLink(response.content().text());
String generateSqlPrompt = sqlPromptGenerator.generateSqlPrompt(llmReq, schemaLinkStr, sqlExamples);
Prompt sqlPrompt = PromptTemplate.from(JsonUtil.toString(generateSqlPrompt)).apply(new HashMap<>());
keyPipelineLog.info("step two request prompt:{}", sqlPrompt.toSystemMessage());
Response<AiMessage> sqlResult = chatLanguageModel.generate(sqlPrompt.toSystemMessage());
String result = sqlResult.content().text();
keyPipelineLog.info("step two model response:{}", result);
Map<String, Double> sqlMap = new HashMap<>();
sqlMap.put(result, 1D);
keyPipelineLog.info("schemaLinkStr:{},sqlMap:{}", schemaLinkStr, sqlMap);
LLMResp llmResp = new LLMResp();
llmResp.setQuery(llmReq.getQueryText());
llmResp.setSqlRespMap(OutputFormat.buildSqlRespMap(sqlExamples, sqlMap));
return llmResp;
}
@Override
public void afterPropertiesSet() {
SqlGenerationFactory.addSqlGenerationForFactory(SqlGenerationMode.TWO_PASS_AUTO_COT, this);
}
}
| [
"dev.langchain4j.model.input.PromptTemplate.from"
] | [((1891, 1970), 'dev.langchain4j.model.input.PromptTemplate.from'), ((2459, 2539), 'dev.langchain4j.model.input.PromptTemplate.from')] |
package com.sg.chatbot.service;
import org.springframework.http.codec.ServerSentEvent;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.TokenStream;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Sinks;
@Service
public class ChatService {
private String openaiApiKey = "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";
private Assistant assistant;
private StreamingAssistant streamingAssistant;
interface Assistant {
String chat(String message);
}
interface StreamingAssistant {
TokenStream chat(String message);
}
public ChatService(){
if (openaiApiKey == null) {
System.err
.println("ERROR: OPENAI_API_KEY environment variable is not set. Please set it to your OpenAI API key.");
}
var memory = TokenWindowChatMemory.withMaxTokens(2000, new OpenAiTokenizer("gpt-3.5-turbo"));
assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(openaiApiKey))
.chatMemory(memory)
.build();
streamingAssistant = AiServices.builder(StreamingAssistant.class)
.streamingChatLanguageModel(OpenAiStreamingChatModel.withApiKey(openaiApiKey))
.chatMemory(memory)
.build();
}
public String chat(String message) {
System.out.println(message);
return assistant.chat(message);
}
public Flux<ServerSentEvent<String>> chatStream(String message) {
Sinks.Many<String> sink = Sinks.many().unicast().onBackpressureBuffer();
streamingAssistant.chat(message)
.onNext(sink::tryEmitNext)
.onComplete(c -> sink.tryEmitComplete())
.onError(sink::tryEmitError)
.start();
return sink.asFlux().map(mes -> ServerSentEvent.<String>builder()
.event("chat")
.data(mes)
.build());
}
} | [
"dev.langchain4j.service.AiServices.builder"
] | [((1177, 1326), 'dev.langchain4j.service.AiServices.builder'), ((1177, 1309), 'dev.langchain4j.service.AiServices.builder'), ((1177, 1281), 'dev.langchain4j.service.AiServices.builder'), ((1354, 1530), 'dev.langchain4j.service.AiServices.builder'), ((1354, 1513), 'dev.langchain4j.service.AiServices.builder'), ((1354, 1485), 'dev.langchain4j.service.AiServices.builder'), ((1748, 1793), 'reactor.core.publisher.Sinks.many'), ((1748, 1770), 'reactor.core.publisher.Sinks.many'), ((2009, 2107), 'org.springframework.http.codec.ServerSentEvent.<String>builder'), ((2009, 2090), 'org.springframework.http.codec.ServerSentEvent.<String>builder'), ((2009, 2065), 'org.springframework.http.codec.ServerSentEvent.<String>builder')] |
package dev.langchain4j.model.azure;
import com.azure.ai.openai.models.*;
import dev.langchain4j.agent.tool.ToolExecutionRequest;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.Tokenizer;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.model.output.TokenUsage;
import java.util.List;
import static dev.langchain4j.model.azure.InternalAzureOpenAiHelper.finishReasonFrom;
import static java.util.Collections.singletonList;
/**
* This class needs to be thread safe because it is called when a streaming result comes back
* and there is no guarantee that this thread will be the same as the one that initiated the request,
* in fact it almost certainly won't be.
*/
class AzureOpenAiStreamingResponseBuilder {
private final StringBuffer contentBuilder = new StringBuffer();
private final StringBuffer toolNameBuilder = new StringBuffer();
private final StringBuffer toolArgumentsBuilder = new StringBuffer();
private volatile CompletionsFinishReason finishReason;
private final Integer inputTokenCount;
public AzureOpenAiStreamingResponseBuilder(Integer inputTokenCount) {
this.inputTokenCount = inputTokenCount;
}
public void append(ChatCompletions completions) {
if (completions == null) {
return;
}
List<ChatChoice> choices = completions.getChoices();
if (choices == null || choices.isEmpty()) {
return;
}
ChatChoice chatCompletionChoice = choices.get(0);
if (chatCompletionChoice == null) {
return;
}
CompletionsFinishReason finishReason = chatCompletionChoice.getFinishReason();
if (finishReason != null) {
this.finishReason = finishReason;
}
com.azure.ai.openai.models.ChatResponseMessage delta = chatCompletionChoice.getDelta();
if (delta == null) {
return;
}
String content = delta.getContent();
if (content != null) {
contentBuilder.append(content);
return;
}
FunctionCall functionCall = delta.getFunctionCall();
if (functionCall != null) {
if (functionCall.getName() != null) {
toolNameBuilder.append(functionCall.getName());
}
if (functionCall.getArguments() != null) {
toolArgumentsBuilder.append(functionCall.getArguments());
}
}
}
public void append(Completions completions) {
if (completions == null) {
return;
}
List<Choice> choices = completions.getChoices();
if (choices == null || choices.isEmpty()) {
return;
}
Choice completionChoice = choices.get(0);
if (completionChoice == null) {
return;
}
CompletionsFinishReason completionsFinishReason = completionChoice.getFinishReason();
if (completionsFinishReason != null) {
this.finishReason = completionsFinishReason;
}
String token = completionChoice.getText();
if (token != null) {
contentBuilder.append(token);
}
}
public Response<AiMessage> build(Tokenizer tokenizer, boolean forcefulToolExecution) {
String content = contentBuilder.toString();
if (!content.isEmpty()) {
return Response.from(
AiMessage.from(content),
tokenUsage(content, tokenizer),
finishReasonFrom(finishReason)
);
}
String toolName = toolNameBuilder.toString();
if (!toolName.isEmpty()) {
ToolExecutionRequest toolExecutionRequest = ToolExecutionRequest.builder()
.name(toolName)
.arguments(toolArgumentsBuilder.toString())
.build();
return Response.from(
AiMessage.from(toolExecutionRequest),
tokenUsage(toolExecutionRequest, tokenizer, forcefulToolExecution),
finishReasonFrom(finishReason)
);
}
return null;
}
private TokenUsage tokenUsage(String content, Tokenizer tokenizer) {
if (tokenizer == null) {
return null;
}
int outputTokenCount = tokenizer.estimateTokenCountInText(content);
return new TokenUsage(inputTokenCount, outputTokenCount);
}
private TokenUsage tokenUsage(ToolExecutionRequest toolExecutionRequest, Tokenizer tokenizer, boolean forcefulToolExecution) {
if (tokenizer == null) {
return null;
}
int outputTokenCount = 0;
if (forcefulToolExecution) {
// OpenAI calculates output tokens differently when tool is executed forcefully
outputTokenCount += tokenizer.estimateTokenCountInForcefulToolExecutionRequest(toolExecutionRequest);
} else {
outputTokenCount = tokenizer.estimateTokenCountInToolExecutionRequests(singletonList(toolExecutionRequest));
}
return new TokenUsage(inputTokenCount, outputTokenCount);
}
}
| [
"dev.langchain4j.agent.tool.ToolExecutionRequest.builder"
] | [((3735, 3894), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((3735, 3865), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((3735, 3801), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')] |
package dev.nano.sbot.configuration;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import dev.nano.sbot.retriever.EmbeddingStoreLoggingRetriever;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import java.time.Duration;
import java.util.List;
import static dev.nano.sbot.constant.Constants.PROMPT_TEMPLATE_2;
@Configuration
@RequiredArgsConstructor
@Slf4j
public class LangChainConfiguration {
@Value("${langchain.api.key}")
private String apiKey;
@Value("${langchain.timeout}")
private Long timeout;
private final List<Document> documents;
@Bean
public ConversationalRetrievalChain chain() {
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
.documentSplitter(DocumentSplitters.recursive(500, 0))
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
log.info("Ingesting Spring Boot Resources ...");
ingestor.ingest(documents);
log.info("Ingested {} documents", documents.size());
EmbeddingStoreRetriever retriever = EmbeddingStoreRetriever.from(embeddingStore, embeddingModel);
EmbeddingStoreLoggingRetriever loggingRetriever = new EmbeddingStoreLoggingRetriever(retriever);
/*MessageWindowChatMemory chatMemory = MessageWindowChatMemory.builder()
.maxMessages(10)
.build();*/
log.info("Building ConversationalRetrievalChain ...");
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(OpenAiChatModel.builder()
.apiKey(apiKey)
.timeout(Duration.ofSeconds(timeout))
.build()
)
.promptTemplate(PromptTemplate.from(PROMPT_TEMPLATE_2))
//.chatMemory(chatMemory)
.retriever(loggingRetriever)
.build();
log.info("Spring Boot knowledge base is ready!");
return chain;
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1682, 1906), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1682, 1881), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1682, 1833), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1682, 1785), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2530, 2966), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2530, 2941), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2530, 2854), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2530, 2782), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2604, 2764), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2604, 2731), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2604, 2669), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package com.nexus.backend.service;
import com.nexus.backend.dto.UserTender;
import com.nexus.backend.entity.Act;
import com.nexus.backend.entity.Tender;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import org.springframework.stereotype.Service;
import java.util.HashMap;
import java.util.Map;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
@Service
public class AiService {
public void testGpt(){
PromptTemplate promptTemplate = PromptTemplate
.from("Tell me a {{adjective}} joke about {{content}}..");
Map<String, Object> variables = new HashMap<>();
variables.put("adjective", "funny");
variables.put("content", "computers");
Prompt prompt = promptTemplate.apply(variables);
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey("KEY").modelName(GPT_3_5_TURBO)
.temperature(0.3)
.build();
String response = model.generate(prompt.text());
System.out.println(response);
}
public String checkIfCompliant(Act act, UserTender userTender) {
PromptTemplate promptTemplate = PromptTemplate
.from("This is a government act with a set of compliances {{act}}, With keeping this above act in mind, tell me if my tender/plan seems broadly compliant or not. " +
"Consider this tender/plan: {{tender}}" +
"Let me know if there are any shortcomings and where the tender/plan is not compliant. Also tell me about penalties.");
Map<String, Object> variables = new HashMap<>();
variables.put("act", act);
variables.put("tender", userTender);
Prompt prompt = promptTemplate.apply(variables);
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey("API_KEY")
.modelName(GPT_3_5_TURBO)
.temperature(0.3)
.build();
String response = model.generate(prompt.text());
System.out.println(response);
return response;
}
public void Summarise(){
}
public String checkIfTenderIsCompliant(Tender tender, String userTender) {
PromptTemplate promptTemplate = PromptTemplate
.from("This is a government Tender with a set of compliances {{tender}}. With keeping this above act in mind, tell me if my tender seems broadly compliant or not. " +
"Consider this tender/plan: {{userTender}}" +
"Let me know if there are any shortcomings and where the tender is not compliant. Also tell me about penalties.");
Map<String, Object> variables = new HashMap<>();
variables.put("tender", tender.toString());
variables.put("userTender", userTender.toString());
Prompt prompt = promptTemplate.apply(variables);
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey("KEY")
.modelName(GPT_3_5_TURBO)
.temperature(0.3)
.build();
String response = model.generate(prompt.text());
System.out.println(response);
return response;
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((957, 1097), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((957, 1072), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((957, 1038), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((957, 1013), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1948, 2109), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1948, 2084), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1948, 2050), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1948, 2008), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3065, 3222), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3065, 3197), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3065, 3163), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3065, 3121), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package eu.luminis.faqlangchain.config;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.inprocess.InProcessEmbeddingModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import static dev.langchain4j.model.inprocess.InProcessEmbeddingModelType.*;
import static dev.langchain4j.model.openai.OpenAiModelName.*;
import static java.time.Duration.*;
@Configuration
public class QuestionAnsweringConfig {
@Value("${openai.apiKey}")
private String openaiApiKey;
@Qualifier("openaiModel")
@Bean
public EmbeddingModel openaiEmbeddingModel() {
return OpenAiEmbeddingModel.builder()
.apiKey(openaiApiKey)
.modelName(TEXT_EMBEDDING_ADA_002)
.build();
}
@Qualifier("inMemoryModel")
@Bean
public EmbeddingModel inMemoryEmbeddingModel() {
return new InProcessEmbeddingModel(ALL_MINILM_L6_V2);
}
@Qualifier("openaiChatModel")
@Bean
public ChatLanguageModel openaiChatModel() {
return OpenAiChatModel.builder()
.apiKey(openaiApiKey)
.modelName(GPT_3_5_TURBO)
.temperature(0.7)
.timeout(ofSeconds(15))
.maxRetries(3)
.logResponses(true)
.logRequests(true)
.build();
}
@Qualifier("inMemoryEmbeddingStore")
@Bean
public EmbeddingStore<TextSegment> inMemoryEmbeddingStore() {
return new InMemoryEmbeddingStore<>();
}
@Qualifier("weaviateEmbeddingStore")
@Bean
public EmbeddingStore<TextSegment> weaviateEmbeddingStore(@Value("${weaviate.apiKey}") String apiKey,
@Value("${weaviate.host}") String host) {
return WeaviateEmbeddingStore.builder()
.apiKey(apiKey)
.scheme("https")
.host(host)
.build();
}
}
| [
"dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder",
"dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder"
] | [((1210, 1354), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1210, 1329), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1210, 1278), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1635, 1941), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1916), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1881), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1845), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1814), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1774), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1740), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1635, 1698), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2397, 2547), 'dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder'), ((2397, 2522), 'dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder'), ((2397, 2494), 'dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder'), ((2397, 2461), 'dev.langchain4j.store.embedding.weaviate.WeaviateEmbeddingStore.builder')] |
package com.example.demo;
import java.time.Duration;
import java.time.LocalDate;
import java.util.Arrays;
import java.util.List;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.structured.StructuredPrompt;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.output.structured.Description;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.MemoryId;
import dev.langchain4j.service.SystemMessage;
import dev.langchain4j.service.UserMessage;
import dev.langchain4j.service.V;
public class AiServicesExamples {
static Duration duration = Duration.ofSeconds(60);
static ChatLanguageModel model = OpenAiChatModel.builder().apiKey(ApiKeys.OPENAI_API_KEY).timeout(duration).build();
////////////////// SIMPLE EXAMPLE //////////////////////
static class Simple_AI_Service_Example {
interface Assistant {
String chat(String message);
}
public static void main(String[] args) {
Assistant assistant = AiServices.create(Assistant.class, model);
String userMessage = "Translate 'Plus-Values des cessions de valeurs mobilières, de droits sociaux et gains assimilés'";
String answer = assistant.chat(userMessage);
System.out.println(answer);
}
}
////////////////// WITH MESSAGE AND VARIABLES //////////////////////
static class AI_Service_with_System_and_User_Messages_Example {
interface TextUtils {
@SystemMessage("You are a professional translator into {{language}}")
@UserMessage("Translate the following text: {{text}}")
String translate(@V("text") String text, @V("language") String language);
@SystemMessage("Summarize every message from user in {{n}} bullet points. Provide only bullet points.")
List<String> summarize(@UserMessage String text, @V("n") int n);
}
public static void main(String[] args) {
TextUtils utils = AiServices.create(TextUtils.class, model);
String translation = utils.translate("Hello, how are you?", "italian");
System.out.println(translation); // Ciao, come stai?
String text = "AI, or artificial intelligence, is a branch of computer science that aims to create "
+ "machines that mimic human intelligence. This can range from simple tasks such as recognizing "
+ "patterns or speech to more complex tasks like making decisions or predictions.";
List<String> bulletPoints = utils.summarize(text, 3);
System.out.println(bulletPoints);
}
}
////////////////////EXTRACTING DIFFERENT DATA TYPES ////////////////////
static class Sentiment_Extracting_AI_Service_Example {
enum Sentiment {
POSITIVE, NEUTRAL, NEGATIVE;
}
interface SentimentAnalyzer {
@UserMessage("Analyze sentiment of {{it}}")
Sentiment analyzeSentimentOf(String text);
@UserMessage("Does {{it}} have a positive sentiment?")
boolean isPositive(String text);
}
public static void main(String[] args) {
SentimentAnalyzer sentimentAnalyzer = AiServices.create(SentimentAnalyzer.class, model);
Sentiment sentiment = sentimentAnalyzer.analyzeSentimentOf("It is amazing!");
System.out.println(sentiment); // POSITIVE
boolean positive = sentimentAnalyzer.isPositive("It is bad!");
System.out.println(positive); // false
}
}
static class POJO_Extracting_AI_Service_Example {
static class Person {
private String firstName;
private String lastName;
private LocalDate birthDate;
@Override
public String toString() {
return "Person {" + " firstName = \"" + firstName + "\"" + ", lastName = \"" + lastName + "\""
+ ", birthDate = " + birthDate + " }";
}
}
interface PersonExtractor {
@UserMessage("Extract information about a person from {{it}}")
Person extractPersonFrom(String text);
}
public static void main(String[] args) {
PersonExtractor extractor = AiServices.create(PersonExtractor.class, model);
String text = "In 1968, amidst the fading echoes of Independence Day, "
+ "a child named John arrived under the calm evening sky. "
+ "This newborn, bearing the surname Doe, marked the start of a new journey.";
Person person = extractor.extractPersonFrom(text);
System.out.println(person); // Person { firstName = "John", lastName = "Doe", birthDate = 1968-07-04 }
}
}
////////////////////// DESCRIPTIONS ////////////////////////
static class POJO_With_Descriptions_Extracting_AI_Service_Example {
static class Recipe {
@Description("short title, 3 words maximum")
private String title;
@Description("short description, 2 sentences maximum")
private String description;
@Description("each step should be described in 6 to 8 words, steps should rhyme with each other")
private List<String> steps;
private Integer preparationTimeMinutes;
@Override
public String toString() {
return "Recipe {" +
" title = \"" + title + "\"" +
", description = \"" + description + "\"" +
", steps = " + steps +
", preparationTimeMinutes = " + preparationTimeMinutes +
" }";
}
}
@StructuredPrompt("Create a recipe of a {{dish}} that can be prepared using only {{ingredients}}")
static class CreateRecipePrompt {
private String dish;
private List<String> ingredients;
}
interface Chef {
Recipe createRecipeFrom(String... ingredients);
Recipe createRecipe(CreateRecipePrompt prompt);
}
public static void main(String[] args) {
Chef chef = AiServices.create(Chef.class, model);
Recipe recipe = chef.createRecipeFrom("cucumber", "tomato", "feta", "onion", "olives", "lemon");
System.out.println(recipe);
CreateRecipePrompt prompt = new CreateRecipePrompt();
prompt.dish = "oven dish";
prompt.ingredients = Arrays.asList("cucumber", "tomato", "feta", "onion", "olives", "potatoes");
Recipe anotherRecipe = chef.createRecipe(prompt);
System.out.println(anotherRecipe);
}
}
////////////////////////// WITH MEMORY /////////////////////////
static class ServiceWithMemoryExample {
interface Assistant {
String chat(String message);
}
public static void main(String[] args) {
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(model)
.chatMemory(chatMemory)
.build();
String answer = assistant.chat("Hello! My name is Klaus.");
System.out.println(answer); // Hello Klaus! How can I assist you today?
String answerWithName = assistant.chat("What is my name?");
System.out.println(answerWithName); // Your name is Klaus.
}
}
static class ServiceWithMemoryForEachUserExample {
interface Assistant {
String chat(@MemoryId int memoryId, @UserMessage String userMessage);
}
public static void main(String[] args) {
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(model)
.chatMemoryProvider(memoryId -> MessageWindowChatMemory.withMaxMessages(10))
.build();
System.out.println(assistant.chat(1, "Hello, my name is Klaus"));
// Hi Klaus! How can I assist you today?
System.out.println(assistant.chat(2, "Hello, my name is Francine"));
// Hello Francine! How can I assist you today?
System.out.println(assistant.chat(1, "What is my name?"));
// Your name is Klaus.
System.out.println(assistant.chat(2, "What is my name?"));
// Your name is Francine.
}
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((792, 874), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((792, 866), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((792, 848), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((6740, 6894), 'dev.langchain4j.service.AiServices.builder'), ((6740, 6865), 'dev.langchain4j.service.AiServices.builder'), ((6740, 6821), 'dev.langchain4j.service.AiServices.builder'), ((7478, 7685), 'dev.langchain4j.service.AiServices.builder'), ((7478, 7656), 'dev.langchain4j.service.AiServices.builder'), ((7478, 7559), 'dev.langchain4j.service.AiServices.builder')] |
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.service.AiServices;
public class _04_Agents {
static class Calculator {
@Tool("Calculates the length of a string")
int stringLength(String s) {
return s.length();
}
@Tool("Calculates the sum of two numbers")
int add(int a, int b) {
return a + b;
}
}
interface Assistant {
Response<AiMessage> chat(String userMessage);
}
public static void main(String[] args) {
String openAiKey = System.getenv("OPENAI_API_KEY");
var assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(openAiKey))
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.tools(new Calculator())
.build();
var question = "What is the sum of the numbers of letters in the words 'language' and 'model'";
var response = assistant.chat(question);
System.out.println(response.content().text());
System.out.println("\n\n########### TOKEN USAGE ############\n");
System.out.println(response.tokenUsage());
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((821, 1069), 'dev.langchain4j.service.AiServices.builder'), ((821, 1044), 'dev.langchain4j.service.AiServices.builder'), ((821, 1003), 'dev.langchain4j.service.AiServices.builder'), ((821, 930), 'dev.langchain4j.service.AiServices.builder')] |
package me.nzuguem.bot.configurations.llm;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import jakarta.annotation.PreDestroy;
import jakarta.enterprise.context.RequestScoped;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@RequestScoped
public class ChatMemoryBean implements ChatMemoryProvider {
private final Map<Object, ChatMemory> memories = new ConcurrentHashMap<>();
@Override
public ChatMemory get(Object memoryId) {
return memories.computeIfAbsent(memoryId, id -> MessageWindowChatMemory.builder()
.maxMessages(20)
.id(memoryId)
.build());
}
@PreDestroy
public void close() {
memories.clear();
}
}
| [
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((631, 752), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((631, 727), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((631, 697), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder')] |
package net.savantly.mainbot.config;
import java.time.Duration;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import lombok.RequiredArgsConstructor;
import net.savantly.mainbot.service.replicate.ReplicateClient;
@Configuration
@RequiredArgsConstructor
public class ChatModelConfig {
private final OpenAIConfig openAIConfig;
@Bean
@Primary
@ConditionalOnProperty(prefix = "openai", name = "enabled", havingValue = "true")
public ChatLanguageModel getChatModel(ReplicateClient replicateClient) {
return getOpenAiChatModel();
// return new ReplicateChatLanguageModel(replicateClient);
}
public ChatLanguageModel getOpenAiChatModel() {
String apiKey = openAIConfig.getApiKey();
return OpenAiChatModel.builder()
.apiKey(apiKey) // https://platform.openai.com/account/api-keys
.modelName(openAIConfig.getChatModelId())
.temperature(0.1)
.logResponses(false)
.logRequests(false)
.timeout(Duration.ofSeconds(openAIConfig.getTimeoutSeconds()))
.build();
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1056, 1430), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1056, 1405), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1056, 1326), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1056, 1290), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1056, 1253), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1056, 1219), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1056, 1113), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package io.quarkiverse.langchain4j.workshop.chat;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import jakarta.enterprise.context.ApplicationScoped;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
@ApplicationScoped
public class ChatMemoryBean implements ChatMemoryProvider {
private final Map<Object, ChatMemory> memories = new ConcurrentHashMap<>();
@Override
public ChatMemory get(Object memoryId) {
return memories.computeIfAbsent(memoryId, id -> MessageWindowChatMemory.builder()
.maxMessages(3)
.id(memoryId)
.build());
}
public void clear(Object session) {
memories.remove(session);
}
}
| [
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((608, 728), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((608, 703), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((608, 673), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder')] |
package io.quarkiverse.langchain4j.workshop.chat;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkiverse.langchain4j.redis.RedisEmbeddingStore;
import io.quarkus.runtime.StartupEvent;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.event.Observes;
import jakarta.inject.Inject;
import java.io.File;
import java.util.List;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
@ApplicationScoped
public class DocumentIngestor {
/**
* The embedding store (the database).
* The bean is provided by the quarkus-langchain4j-redis extension.
*/
@Inject
RedisEmbeddingStore store;
/**
* The embedding model (how the vector of a document is computed).
* The bean is provided by the LLM (like openai) extension.
*/
@Inject
EmbeddingModel embeddingModel;
public void ingest(@Observes StartupEvent event) {
System.out.printf("Ingesting documents...%n");
List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/catalog").toPath(), new TextDocumentParser());
var ingestor = EmbeddingStoreIngestor.builder()
.embeddingStore(store)
.embeddingModel(embeddingModel)
.documentSplitter(recursive(500, 0))
.build();
ingestor.ingest(documents);
System.out.printf("Ingested %d documents.%n", documents.size());
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1414, 1611), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1414, 1586), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1414, 1533), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1414, 1485), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package com.example.demo;
import java.time.Duration;
import dev.langchain4j.chain.ConversationalChain;
import dev.langchain4j.model.openai.OpenAiChatModel;
public class _07_ConversationalChain {
public static void main(String[] args) {
Duration duration = Duration.ofSeconds(60);
OpenAiChatModel model = OpenAiChatModel.builder().apiKey(ApiKeys.OPENAI_API_KEY).timeout(duration).build();
ConversationalChain chain = ConversationalChain.builder().chatLanguageModel(model)
// .chatMemory(...) // you can override default chat memory
.build();
String userMessage1 = "Can you give a brief explanation of the Agile methodology, 3 lines max?";
System.out.println("[User]: " + userMessage1);
String answer1 = chain.execute(userMessage1);
System.out.println("[LLM]: " + answer1);
String userMessage2 = "What are good tools for that? 3 lines max.";
System.out.println("[User]: " + userMessage2);
String answer2 = chain.execute(userMessage2);
System.out.println("[LLM]: " + answer2);
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder",
"dev.langchain4j.chain.ConversationalChain.builder"
] | [((313, 395), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((313, 387), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((313, 369), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((428, 559), 'dev.langchain4j.chain.ConversationalChain.builder'), ((428, 482), 'dev.langchain4j.chain.ConversationalChain.builder')] |
package org.mf.langchain.service;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.localai.LocalAiChatModel;
import dev.langchain4j.model.localai.LocalAiStreamingChatModel;
import org.jetbrains.annotations.Nullable;
import org.mf.langchain.util.LanguageModel;
import org.mf.langchain.StreamLanguageModel;
import org.springframework.stereotype.Service;
import java.time.Duration;
import java.util.function.Consumer;
@Service
public class LangChainService {
private final LanguageModel lm;
private final StreamLanguageModel slm;
LangChainService() {
lm = new LanguageModel(LocalAiChatModel.builder()
.modelName("phi-2")
.baseUrl("http://localhost:8080")
.build());
slm = new StreamLanguageModel(LocalAiStreamingChatModel.builder()
.modelName("phi-2")
.baseUrl("http://localhost:8080")
.timeout(Duration.ofDays(1))
.temperature(0.8)
.build());
}
public String Generate(String prompt)
{
return lm.RunBlocking(prompt);
}
public void GenerateStream(String prompt, Consumer<String> onNext, Consumer<Throwable> onError, @Nullable Consumer<AiMessage> onComplete) {
slm.generate(prompt, onNext, onError, onComplete);
}
}
| [
"dev.langchain4j.model.localai.LocalAiChatModel.builder",
"dev.langchain4j.model.localai.LocalAiStreamingChatModel.builder"
] | [((623, 760), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((623, 735), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((623, 685), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((802, 1027), 'dev.langchain4j.model.localai.LocalAiStreamingChatModel.builder'), ((802, 1002), 'dev.langchain4j.model.localai.LocalAiStreamingChatModel.builder'), ((802, 968), 'dev.langchain4j.model.localai.LocalAiStreamingChatModel.builder'), ((802, 923), 'dev.langchain4j.model.localai.LocalAiStreamingChatModel.builder'), ((802, 873), 'dev.langchain4j.model.localai.LocalAiStreamingChatModel.builder')] |
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.output.Response;
import dev.langchain4j.service.AiServices;
public class _04_Agents {
static class Calculator {
@Tool("Calculates the length of a string")
int stringLength(String s) {
return s.length();
}
@Tool("Calculates the sum of two numbers")
int add(int a, int b) {
return a + b;
}
}
interface Assistant {
Response<AiMessage> chat(String userMessage);
}
public static void main(String[] args) {
String openAiKey = System.getenv("OPENAI_API_KEY");
var assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(openAiKey))
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.tools(new Calculator())
.build();
var question = "What is the sum of the numbers of letters in the words 'language' and 'model'";
var response = assistant.chat(question);
System.out.println(response.content().text());
System.out.println("\n\n########### TOKEN USAGE ############\n");
System.out.println(response.tokenUsage());
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((821, 1069), 'dev.langchain4j.service.AiServices.builder'), ((821, 1044), 'dev.langchain4j.service.AiServices.builder'), ((821, 1003), 'dev.langchain4j.service.AiServices.builder'), ((821, 930), 'dev.langchain4j.service.AiServices.builder')] |
package io.quarkiverse.langchain4j.workshop.chat;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkiverse.langchain4j.redis.RedisEmbeddingStore;
import io.quarkus.runtime.StartupEvent;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.enterprise.event.Observes;
import jakarta.inject.Inject;
import java.io.File;
import java.util.List;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
@ApplicationScoped
public class DocumentIngestor {
/**
* The embedding store (the database).
* The bean is provided by the quarkus-langchain4j-redis extension.
*/
@Inject
RedisEmbeddingStore store;
/**
* The embedding model (how the vector of a document is computed).
* The bean is provided by the LLM (like openai) extension.
*/
@Inject
EmbeddingModel embeddingModel;
public void ingest(@Observes StartupEvent event) {
System.out.printf("Ingesting documents...%n");
List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/catalog").toPath(), new TextDocumentParser());
var ingestor = EmbeddingStoreIngestor.builder()
.embeddingStore(store)
.embeddingModel(embeddingModel)
.documentSplitter(recursive(500, 0))
.build();
ingestor.ingest(documents);
System.out.printf("Ingested %d documents.%n", documents.size());
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1414, 1611), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1414, 1586), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1414, 1533), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1414, 1485), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package io.quarkiverse.langchain4j.samples;
import java.util.function.Supplier;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.store.memory.chat.InMemoryChatMemoryStore;
public class CustomProvider implements Supplier<ChatMemoryProvider> {
private final InMemoryChatMemoryStore store = new InMemoryChatMemoryStore();
@Override
public ChatMemoryProvider get() {
return new ChatMemoryProvider() {
@Override
public ChatMemory get(Object memoryId) {
return MessageWindowChatMemory.builder()
.maxMessages(20)
.id(memoryId)
.chatMemoryStore(store)
.build();
}
};
}
}
| [
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((652, 845), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((652, 812), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((652, 764), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((652, 726), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder')] |
package com.moyz.adi.common.service;
import com.moyz.adi.common.cosntant.AdiConstant;
import com.moyz.adi.common.interfaces.AbstractLLMService;
import com.moyz.adi.common.vo.QianFanSetting;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.qianfan.QianfanChatModel;
import dev.langchain4j.model.qianfan.QianfanStreamingChatModel;
import lombok.experimental.Accessors;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.StringUtils;
/**
* QianFan LLM service
*/
@Slf4j
@Accessors(chain = true)
public class QianFanLLMService extends AbstractLLMService<QianFanSetting> {
public QianFanLLMService(String modelName) {
super(modelName, AdiConstant.SysConfigKey.QIANFAN_SETTING, QianFanSetting.class, null);
}
@Override
public boolean isEnabled() {
return StringUtils.isNoneBlank(setting.getApiKey(), setting.getSecretKey());
}
@Override
protected ChatLanguageModel buildChatLLM() {
return QianfanChatModel.builder()
.modelName(modelName)
.temperature(0.7)
.topP(1.0)
.maxRetries(1)
.apiKey(setting.getApiKey())
.secretKey(setting.getSecretKey())
.build();
}
@Override
protected StreamingChatLanguageModel buildStreamingChatLLM() {
return QianfanStreamingChatModel.builder()
.modelName(modelName)
.temperature(0.7)
.topP(1.0)
.apiKey(setting.getApiKey())
.secretKey(setting.getSecretKey())
.build();
}
@Override
protected String parseError(Object error) {
return null;
}
}
| [
"dev.langchain4j.model.qianfan.QianfanChatModel.builder",
"dev.langchain4j.model.qianfan.QianfanStreamingChatModel.builder"
] | [((1052, 1329), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1052, 1304), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1052, 1253), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1052, 1208), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1052, 1177), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1052, 1150), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1052, 1116), 'dev.langchain4j.model.qianfan.QianfanChatModel.builder'), ((1434, 1689), 'dev.langchain4j.model.qianfan.QianfanStreamingChatModel.builder'), ((1434, 1664), 'dev.langchain4j.model.qianfan.QianfanStreamingChatModel.builder'), ((1434, 1613), 'dev.langchain4j.model.qianfan.QianfanStreamingChatModel.builder'), ((1434, 1568), 'dev.langchain4j.model.qianfan.QianfanStreamingChatModel.builder'), ((1434, 1541), 'dev.langchain4j.model.qianfan.QianfanStreamingChatModel.builder'), ((1434, 1507), 'dev.langchain4j.model.qianfan.QianfanStreamingChatModel.builder')] |
package dev.onurb.travelassistant;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import java.io.IOException;
import java.time.Duration;
import java.util.Scanner;
public class TravelAgency {
public static void main(String[] args) throws IOException {
String apiKey = System.getenv("OPENAPI_KEY");
TravelAssistant assistant = AiServices.builder(TravelAssistant.class)
.chatLanguageModel(OpenAiChatModel.builder().apiKey(apiKey).timeout(Duration.ofMinutes(3)).build())
.tools(new TripServices())
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.build();
String input = readInput();
while (!"bye".equalsIgnoreCase(input)) {
String answer = assistant.chat(input);
System.out.println("\u001B[33m" + answer + "\u001B[37m");
input = readInput();
}
}
private static String readInput() {
Scanner in = new Scanner(System.in);
System.out.print("> ");
return in.nextLine();
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((460, 758), 'dev.langchain4j.service.AiServices.builder'), ((460, 733), 'dev.langchain4j.service.AiServices.builder'), ((460, 660), 'dev.langchain4j.service.AiServices.builder'), ((460, 617), 'dev.langchain4j.service.AiServices.builder'), ((537, 616), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((537, 608), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((537, 577), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
/*
* Copyright 2024 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package gemini.workshop;
import dev.langchain4j.agent.tool.P;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.vertexai.VertexAiGeminiChatModel;
import dev.langchain4j.service.AiServices;
public class Step8b_FunctionCalling {
record WeatherForecast(String location, String forecast, int temperature) {}
static class WeatherForecastService {
@Tool("Get the weather forecast for a location")
WeatherForecast getForecast(@P("Location to get the forecast for") String location) {
if (location.equals("Paris")) {
return new WeatherForecast("Paris", "Sunny", 20);
} else if (location.equals("London")) {
return new WeatherForecast("London", "Rainy", 15);
} else {
return new WeatherForecast("Unknown", "Unknown", 0);
}
}
}
interface WeatherAssistant {
String chat(String userMessage);
}
public static void main(String[] args) {
ChatLanguageModel model = VertexAiGeminiChatModel.builder()
.project(System.getenv("PROJECT_ID"))
.location(System.getenv("LOCATION"))
.modelName("gemini-1.0-pro")
.maxOutputTokens(100)
.build();
WeatherForecastService weatherForecastService = new WeatherForecastService();
WeatherAssistant assistant = AiServices.builder(WeatherAssistant.class)
.chatLanguageModel(model)
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.tools(weatherForecastService)
.build();
System.out.println(assistant.chat("What is the weather in Paris?"));
System.out.println(assistant.chat("What is the weather in London?"));
System.out.println(assistant.chat("Is the temperature warmer in Paris or London?"));
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder"
] | [((1743, 1971), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1743, 1950), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1743, 1916), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1743, 1875), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1743, 1826), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((2098, 2311), 'dev.langchain4j.service.AiServices.builder'), ((2098, 2290), 'dev.langchain4j.service.AiServices.builder'), ((2098, 2247), 'dev.langchain4j.service.AiServices.builder'), ((2098, 2178), 'dev.langchain4j.service.AiServices.builder')] |
package com.hillarocket.application.handler;
import com.vaadin.flow.server.auth.AnonymousAllowed;
import dev.hilla.BrowserCallable;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.TokenStream;
import jakarta.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Value;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Sinks;
@BrowserCallable
@AnonymousAllowed
public class OpenApiHandler {
@Value("${openai.api.key}")
private String OPENAI_API_KEY;
private Assistant assistant;
private StreamingAssistant streamingAssistant;
interface Assistant {
String chat(String message);
}
interface StreamingAssistant {
TokenStream chat(String message);
}
@PostConstruct
public void init() {
if (OPENAI_API_KEY == null) {
System.err.println("ERROR: OPENAI_API_KEY environment variable is not set. Please set it to your OpenAI API key.");
}
var memory = TokenWindowChatMemory.withMaxTokens(2000, new OpenAiTokenizer("gpt-3.5-turbo"));
assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(OPENAI_API_KEY))
.chatMemory(memory)
.build();
streamingAssistant = AiServices.builder(StreamingAssistant.class)
.streamingChatLanguageModel(OpenAiStreamingChatModel.withApiKey(OPENAI_API_KEY))
.chatMemory(memory)
.build();
}
public String chat(String message) {
return assistant.chat(message);
}
public Flux<String> chatStream(String message) {
Sinks.Many<String> sink = Sinks.many().unicast().onBackpressureBuffer();
streamingAssistant.chat(message)
.onNext(sink::tryEmitNext)
.onComplete(c -> sink.tryEmitComplete())
.onError(sink::tryEmitError)
.start();
return sink.asFlux();
}
} | [
"dev.langchain4j.service.AiServices.builder"
] | [((1336, 1511), 'dev.langchain4j.service.AiServices.builder'), ((1336, 1486), 'dev.langchain4j.service.AiServices.builder'), ((1336, 1450), 'dev.langchain4j.service.AiServices.builder'), ((1543, 1745), 'dev.langchain4j.service.AiServices.builder'), ((1543, 1720), 'dev.langchain4j.service.AiServices.builder'), ((1543, 1684), 'dev.langchain4j.service.AiServices.builder'), ((1929, 1974), 'reactor.core.publisher.Sinks.many'), ((1929, 1951), 'reactor.core.publisher.Sinks.many')] |
package _Engenharia;
import dev.langchain4j.chain.ConversationalRetrievalChain;
import dev.langchain4j.data.document.Document;
//import dev.langchain4j.data.document.splitter.ParagraphSplitter; !!!!!!!!!!!!!!!DANDO ERRO, substitui temporariamente!!!!!!!!!!!!!!!!!!!!!
import dev.langchain4j.data.document.splitter.DocumentSplitters; //Substituição
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.huggingface.HuggingFaceChatModel;
import dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel;
import dev.langchain4j.retriever.EmbeddingStoreRetriever;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.nio.file.Paths;
import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument;
import static java.time.Duration.ofSeconds;
import java.io.File;
public class Assistente {
// You can get your own HuggingFace API key here: https://huggingface.co/settings/tokens
public static final String hfApiKey = "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";
private static String pergunta;
public String fazerPergunta() throws Exception {
Document document = loadDocument(toPath("template.txt")); //Usa documento criado com todos os dados do documento selecionado (Esse documento e criado dentro do pacote _Engenharia)
//escolhendo um modelo para vetorizar meu texto
EmbeddingModel embeddingModel = HuggingFaceEmbeddingModel.builder()
.accessToken(hfApiKey)
.modelId("sentence-transformers/all-MiniLM-L6-v2")
.waitForModel(true)
.timeout(ofSeconds(60))
.build();
EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>();
//estou aplicando o modelo de vetorização escolhido ao meu texto
EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder()
// .splitter(new ParagraphSplitter()) !!!!!!!!!!!!!!!DANDO ERRO, substitui temporariamente!!!!!!!!!!!!!!!!!!!!!
.documentSplitter(DocumentSplitters.recursive(500)) //Substituição
.embeddingModel(embeddingModel)
.embeddingStore(embeddingStore)
.build();
ingestor.ingest(document);
//aqui eu escolho o modelo da inferência (a pergunta)
ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder()
.chatLanguageModel(HuggingFaceChatModel.withAccessToken(hfApiKey))
.retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel))
// .chatMemory() // you can override default chat memory
// .promptTemplate() // you can override default prompt template
.build();
//aqui eu faço a inferência
String answer = chain.execute(pergunta);
File delete_file = new File("src/main/java/_Engenharia/template.txt"); //Apaga o documento depois da resposta
delete_file.delete(); //Caso erro na resposta o arquivo NAO e deletado
return answer; // Charlie is a cheerful carrot living in VeggieVille...
//exemplo para continuar a pesquisa
//https://github.com/langchain4j/langchain4j/blob/7307f43d9823af619f1e3196252d212f3df04ddc/langchain4j/src/main/java/dev/langchain4j/model/huggingface/HuggingFaceChatModel.java
}
private static Path toPath(String fileName) {
try {
URL fileUrl = Assistente.class.getResource(fileName);
return Paths.get(fileUrl.toURI());
} catch (URISyntaxException e) {
throw new RuntimeException(e);
}
}
public void setPergunta(String p) {
pergunta = p;
}
}
| [
"dev.langchain4j.chain.ConversationalRetrievalChain.builder",
"dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1706, 1948), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((1706, 1923), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((1706, 1883), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((1706, 1847), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((1706, 1780), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((2162, 2524), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2162, 2499), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2162, 2451), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2162, 2385), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2675, 3064), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2675, 2885), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2675, 2796), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')] |
package com.kchandrakant;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.openai.OpenAiChatModel;
import java.util.HashMap;
import java.util.Map;
import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO;
import static java.time.Duration.ofSeconds;
public class PromptTemplates {
public static void main(String[] args) {
// Create a prompt template
PromptTemplate promptTemplate = PromptTemplate.from("Tell me a {{adjective}} joke about {{content}}..");
// Generate prompt using the prompt template and user variables
Map<String, Object> variables = new HashMap<>();
variables.put("adjective", "funny");
variables.put("content", "humans");
Prompt prompt = promptTemplate.apply(variables);
System.out.println(prompt.text());
// Create an instance of a model
ChatLanguageModel model = OpenAiChatModel.builder()
.apiKey(ApiKeys.OPENAI_API_KEY)
.modelName(GPT_3_5_TURBO)
.temperature(0.3)
.build();
// Start interacting
String response = model.generate(prompt.text());
System.out.println(response);
}
} | [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((1019, 1193), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1019, 1168), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1019, 1134), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1019, 1092), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package com.azure.migration.java.copilot.service;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class Configure {
@Bean
ServiceAnalysisAgent chooseServiceAnalysisAgent(ChatLanguageModel chatLanguageModel) {
return AiServices.builder(ServiceAnalysisAgent.class)
.chatLanguageModel(chatLanguageModel)
.build();
}
@Bean
ConfigureResourceAgent configureResourceAgent(ChatLanguageModel chatLanguageModel,ContentRetriever contentRetriever) {
return AiServices.builder(ConfigureResourceAgent.class)
.chatLanguageModel(chatLanguageModel)
.contentRetriever(contentRetriever)
.build();
}
@Bean
WorkflowChatAgent configureWorkflowChatAgent(ChatLanguageModel chatLanguageModel, ContentRetriever contentRetriever, MigrationWorkflowTools migrationWorkflowTools) {
return AiServices.builder(WorkflowChatAgent.class)
.chatLanguageModel(chatLanguageModel)
.tools(migrationWorkflowTools)
.chatMemory(MessageWindowChatMemory.withMaxMessages(10))
.build();
}
@Bean
ContentRetriever contentRetriever(EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel) {
// You will need to adjust these parameters to find the optimal setting, which will depend on two main factors:
// - The nature of your data
// - The embedding model you are using
int maxResults = 5;
double minScore = 0.6;
return EmbeddingStoreContentRetriever.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.maxResults(maxResults)
.minScore(minScore)
.build();
}
@Bean
EmbeddingModel embeddingModel() {
return new AllMiniLmL6V2EmbeddingModel();
}
}
| [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder"
] | [((846, 971), 'dev.langchain4j.service.AiServices.builder'), ((846, 946), 'dev.langchain4j.service.AiServices.builder'), ((1128, 1307), 'dev.langchain4j.service.AiServices.builder'), ((1128, 1282), 'dev.langchain4j.service.AiServices.builder'), ((1128, 1230), 'dev.langchain4j.service.AiServices.builder'), ((1511, 1753), 'dev.langchain4j.service.AiServices.builder'), ((1511, 1728), 'dev.langchain4j.service.AiServices.builder'), ((1511, 1655), 'dev.langchain4j.service.AiServices.builder'), ((1511, 1608), 'dev.langchain4j.service.AiServices.builder'), ((2167, 2404), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2167, 2379), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2167, 2343), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2167, 2303), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2167, 2255), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder')] |
package com.example.application.services;
import com.vaadin.flow.server.auth.AnonymousAllowed;
import dev.hilla.BrowserCallable;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.TokenStream;
import jakarta.annotation.PostConstruct;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;
import reactor.core.publisher.Sinks;
@Service
@BrowserCallable
@AnonymousAllowed
public class ChatService {
@Value("${openai.api.key}")
private String OPENAI_API_KEY;
private Assistant assistant;
private StreamingAssistant streamingAssistant;
interface Assistant {
String chat(String message);
}
interface StreamingAssistant {
TokenStream chat(String message);
}
@PostConstruct
public void init() {
var memory = TokenWindowChatMemory.withMaxTokens(2000, new OpenAiTokenizer("gpt-3.5-turbo"));
assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(OPENAI_API_KEY))
.chatMemory(memory)
.build();
streamingAssistant = AiServices.builder(StreamingAssistant.class)
.streamingChatLanguageModel(OpenAiStreamingChatModel.withApiKey(OPENAI_API_KEY))
.chatMemory(memory)
.build();
}
public String chat(String message) {
return assistant.chat(message);
}
public Flux<String> chatStream(String message) {
Sinks.Many<String> sink = Sinks.many().unicast().onBackpressureBuffer();
streamingAssistant.chat(message)
.onNext(sink::tryEmitNext)
.onComplete(sink::tryEmitComplete)
.onError(sink::tryEmitError)
.start();
return sink.asFlux();
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((1208, 1383), 'dev.langchain4j.service.AiServices.builder'), ((1208, 1358), 'dev.langchain4j.service.AiServices.builder'), ((1208, 1322), 'dev.langchain4j.service.AiServices.builder'), ((1415, 1617), 'dev.langchain4j.service.AiServices.builder'), ((1415, 1592), 'dev.langchain4j.service.AiServices.builder'), ((1415, 1556), 'dev.langchain4j.service.AiServices.builder'), ((1801, 1846), 'reactor.core.publisher.Sinks.many'), ((1801, 1823), 'reactor.core.publisher.Sinks.many')] |
package org.acme;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import io.quarkus.logging.Log;
import io.quarkus.runtime.Startup;
import jakarta.enterprise.context.ApplicationScoped;
import jakarta.inject.Inject;
import jakarta.json.Json;
import jakarta.json.JsonArray;
import jakarta.json.JsonReader;
import jakarta.json.JsonValue;
import org.eclipse.microprofile.config.inject.ConfigProperty;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.List;
import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive;
@ApplicationScoped
public class IngestData {
@Inject
EmbeddingStore<TextSegment> store;
@Inject
EmbeddingModel embeddingModel;
@Inject
@ConfigProperty(name = "data.file")
File dataFile;
@Inject
@ConfigProperty(name = "max.entries", defaultValue = "99999")
Integer maxEntries;
@Startup
public void init() {
List<Document> documents = new ArrayList<>();
try(JsonReader reader = Json.createReader(new FileReader(dataFile))) {
JsonArray results = reader.readArray();
Log.info("Ingesting news reports...");
int i = 0;
for (JsonValue newsEntry : results) {
i++;
if(i > maxEntries) {
break;
}
String content = newsEntry.asJsonObject().getString("content", null);
if(content != null && !content.isEmpty()) {
Document doc = new Document(content);
documents.add(doc);
continue;
}
String fullDescription = newsEntry.asJsonObject().getString("full_description", null);
if(fullDescription != null && !fullDescription.isEmpty()) {
Document doc = new Document(fullDescription);
documents.add(doc);
continue;
}
String description = newsEntry.asJsonObject().getString("description", null);
if(description != null && !description.isEmpty()) {
Document doc = new Document(description);
documents.add(doc);
continue;
}
}
var ingestor = EmbeddingStoreIngestor.builder()
.embeddingStore(store)
.embeddingModel(embeddingModel)
.documentSplitter(recursive(1000, 50))
.build();
ingestor.ingest(documents);
Log.infof("Ingested %d news articles.", documents.size());
} catch (FileNotFoundException e) {
throw new RuntimeException(e);
}
}
}
| [
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((2590, 2805), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2590, 2776), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2590, 2717), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2590, 2665), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')] |
package com.sivalabs.demo.langchain4j;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.ollama.OllamaChatModel;
public class OllamaChatDemo {
public static void main(String[] args) {
ChatLanguageModel model = OllamaChatModel.builder()
.baseUrl("http://localhost:11434")
.modelName("llama2")
.build();
String answer = model.generate("List all the movies directed by Quentin Tarantino");
System.out.println(answer);
}
}
| [
"dev.langchain4j.model.ollama.OllamaChatModel.builder"
] | [((257, 395), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((257, 370), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((257, 333), 'dev.langchain4j.model.ollama.OllamaChatModel.builder')] |
package org.agoncal.fascicle.langchain4j.accessing.mistralai;
import dev.langchain4j.model.mistralai.MistralAiChatModel;
// tag::adocSkip[]
/**
* @author Antonio Goncalves
* http://www.antoniogoncalves.org
* --
*/
// end::adocSkip[]
public class MusicianService {
public static void main(String[] args) {
MusicianService musicianService = new MusicianService();
musicianService.useMistralAiChatModel();
}
private static final String AZURE_OPENAI_KEY = System.getenv("AZURE_OPENAI_KEY");
private static final String AZURE_OPENAI_ENDPOINT = System.getenv("AZURE_OPENAI_ENDPOINT");
private static final String AZURE_OPENAI_DEPLOYMENT_NAME = System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME");
private static final String PROMPT = "When was the first Beatles album released?";
// ###############################
// ### AZURE OPENAI CHAT MODEL ###
// ###############################
public void useMistralAiChatModel() {
System.out.println("### useMistralAiChatModel");
// tag::adocSnippet[]
MistralAiChatModel model = MistralAiChatModel.builder()
.apiKey(AZURE_OPENAI_KEY)
.temperature(0.3)
.build();
// end::adocSnippet[]
String completion = model.generate("When was the first Rolling Stones album released?");
System.out.println(completion);
}
}
| [
"dev.langchain4j.model.mistralai.MistralAiChatModel.builder"
] | [((1062, 1161), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((1062, 1146), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((1062, 1122), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder')] |
package com.ramesh.langchain;
import java.util.Scanner;
import dev.langchain4j.agent.tool.Tool;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
/***
* This project demostrates the use of LangCHain Services which uses custom tools to generate the final output
*/
public class ServiceWithToolsLive {
// Open AI Key and Chat GPT Model to use
public static String OPENAI_API_KEY = "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";
public static String OPENAI_MODEL = "gpt-3.5-turbo";
public static void main(String[] args) {
System.out.println("Using a custom Calculator as LangChain \"tool\"");
// Building a Custom LangChain Assistant using LangChain AiServices
System.out.println("Building a Custom Assistant using LangChain AiServices");
Assistant assistant = AiServices.builder(Assistant.class)
.chatLanguageModel(OpenAiChatModel.withApiKey(OPENAI_API_KEY)).tools(new Calculator())
.chatMemory(MessageWindowChatMemory.withMaxMessages(10)).build();
while (true) {
// get 2 words for which the total characters count is calculated
Scanner scanner = new Scanner(System.in);
System.out.print("Enter Word 1:");
String word1 = scanner.nextLine();
System.out.print("Enter Word 2:");
String word2 = scanner.nextLine();
String question = "What is the sum of the numbers of letters in the words \"" + word1 + "\" and \"" + word2 + "\"?";
System.out.println("Prompting ChatGPT :" + question);
// when a prompt having 2 words are sent LLM via LAngChain Assistant
// the Calcualtor functions are called to get the final answers
System.out.println("Invoking Custom Assistant Class chat() and getting response from ChatGPT...");
String answer = assistant.chat(question);
System.out.println("ChatGPT Response...\n");
System.out.println(answer);
}
}
// a custom tool
static class Calculator {
@Tool("Calculates the length of a string")
int stringLength(String s) {
return s.length();
}
@Tool("Calculates the sum of two numbers")
int add(int a, int b) {
return a + b;
}
}
interface Assistant {
String chat(String userMessage);
}
}
| [
"dev.langchain4j.service.AiServices.builder"
] | [((896, 1091), 'dev.langchain4j.service.AiServices.builder'), ((896, 1083), 'dev.langchain4j.service.AiServices.builder'), ((896, 1022), 'dev.langchain4j.service.AiServices.builder'), ((896, 998), 'dev.langchain4j.service.AiServices.builder')] |
package ${{ values.basePackage }};
import java.io.IOException;
import java.nio.file.Path;
import dev.langchain4j.data.document.Document;
import dev.langchain4j.data.document.DocumentParser;
import dev.langchain4j.data.document.loader.FileSystemDocumentLoader;
import dev.langchain4j.data.document.parser.TextDocumentParser;
import dev.langchain4j.data.document.splitter.DocumentSplitters;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.rag.content.retriever.ContentRetriever;
import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.EmbeddingStoreIngestor;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.util.ResourceUtils;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RestController;
@SpringBootApplication
public class DemoApplication {
public static void main(String[] args) {
SpringApplication.run(DemoApplication.class, args);
}
@Bean
ChatAgent chatAgent(ChatLanguageModel chatLanguageModel) {
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
return AiServices.builder(ChatAgent.class)
.chatLanguageModel(chatLanguageModel)
.chatMemory(chatMemory)
.build();
}
@Bean
DocumentAgent documentAgent(ChatLanguageModel chatLanguageModel, EmbeddingModel embeddingModel, EmbeddingStore<TextSegment> embeddingStore) throws IOException {
Path documentPath = ResourceUtils.getFile("classpath:documents/story.md").toPath();
DocumentParser documentParser = new TextDocumentParser();
Document document = FileSystemDocumentLoader.loadDocument(documentPath, documentParser);
EmbeddingStoreIngestor dataIngestor = EmbeddingStoreIngestor.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.documentSplitter(DocumentSplitters.recursive(300, 10))
.build();
dataIngestor.ingest(document);
ContentRetriever contentRetriever = EmbeddingStoreContentRetriever.builder()
.embeddingStore(embeddingStore)
.embeddingModel(embeddingModel)
.maxResults(3)
.minScore(0.5)
.build();
ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10);
return AiServices.builder(DocumentAgent.class)
.chatLanguageModel(chatLanguageModel)
.contentRetriever(contentRetriever)
.chatMemory(chatMemory)
.build();
}
}
@RestController
class ChatController {
private final ChatAgent chatAgent;
ChatController(ChatAgent chatAgent) {
this.chatAgent = chatAgent;
}
@PostMapping("/chat")
String chat(@RequestBody String prompt) {
return chatAgent.answer(prompt);
}
}
@RestController
class DocumentController {
private final DocumentAgent documentAgent;
DocumentController(DocumentAgent documentAgent) {
this.documentAgent = documentAgent;
}
@PostMapping("/chat/doc")
String chat(@RequestBody String prompt) {
return documentAgent.answer(prompt);
}
}
interface ChatAgent {
String answer(String prompt);
}
interface DocumentAgent {
String answer(String prompt);
}
| [
"dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder",
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder"
] | [((1657, 1775), 'dev.langchain4j.service.AiServices.builder'), ((1657, 1762), 'dev.langchain4j.service.AiServices.builder'), ((1657, 1734), 'dev.langchain4j.service.AiServices.builder'), ((1972, 2034), 'org.springframework.util.ResourceUtils.getFile'), ((2228, 2405), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2228, 2392), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2228, 2332), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2228, 2296), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2479, 2642), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2479, 2629), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2479, 2610), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2479, 2591), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2479, 2555), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2727, 2889), 'dev.langchain4j.service.AiServices.builder'), ((2727, 2876), 'dev.langchain4j.service.AiServices.builder'), ((2727, 2848), 'dev.langchain4j.service.AiServices.builder'), ((2727, 2808), 'dev.langchain4j.service.AiServices.builder')] |
package com.docuverse.backend.configuration;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import io.github.cdimascio.dotenv.Dotenv;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import static dev.langchain4j.model.openai.OpenAiModelName.TEXT_EMBEDDING_ADA_002;
import static java.time.Duration.ofSeconds;
@Configuration
public class EmbeddingModelConfiguration {
Dotenv dotenv = Dotenv.load();
@Bean
public EmbeddingModel embeddingModel() {
return OpenAiEmbeddingModel.builder()
.apiKey(dotenv.get("OPENAI_API_KEY"))
.modelName(TEXT_EMBEDDING_ADA_002)
.timeout(ofSeconds(15))
.logRequests(false)
.logResponses(false)
.build();
}
}
| [
"dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder"
] | [((784, 1057), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((784, 1032), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((784, 995), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((784, 959), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((784, 919), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((784, 868), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder')] |
package io.thomasvitale.langchain4j.spring.core.chat.messages.jackson;
import java.io.IOException;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonAutoDetect;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.core.JsonParser;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.DeserializationContext;
import com.fasterxml.jackson.databind.JsonDeserializer;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import dev.langchain4j.data.message.Content;
import dev.langchain4j.data.message.UserMessage;
import io.thomasvitale.langchain4j.spring.core.json.JsonDeserializationException;
/**
* Mixin used to serialize / deserialize {@link UserMessage}.
*/
@JsonAutoDetect(fieldVisibility = JsonAutoDetect.Visibility.ANY, getterVisibility = JsonAutoDetect.Visibility.NONE,
isGetterVisibility = JsonAutoDetect.Visibility.NONE, setterVisibility = JsonAutoDetect.Visibility.NONE)
@JsonDeserialize(using = UserMessageMixin.UserMessageDeserializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonIgnoreProperties(ignoreUnknown = true)
abstract class UserMessageMixin {
static class UserMessageDeserializer extends JsonDeserializer<UserMessage> {
private static final TypeReference<List<Content>> CONTENT_LIST = new TypeReference<>() {
};
@Override
public UserMessage deserialize(JsonParser jsonParser, DeserializationContext context) throws IOException {
ObjectMapper objectMapper = (ObjectMapper) jsonParser.getCodec();
JsonNode node = objectMapper.readTree(jsonParser);
String name = null;
if (node.has("name")) {
name = node.get("name").asText();
}
String text = null;
if (node.has("text")) {
text = node.get("text").asText();
}
List<Content> contents = null;
if (node.has("contents")) {
contents = objectMapper.readValue(node.get("contents").traverse(objectMapper), CONTENT_LIST);
}
if (text != null) {
return buildUserMessageWithText(name, text);
}
else if (contents != null) {
return buildUserMessageWithContents(name, contents);
}
else {
throw new JsonDeserializationException(
"No 'text' or 'contents' fields found in %s".formatted(UserMessage.class.getName()));
}
}
private UserMessage buildUserMessageWithText(String name, String text) {
return name == null ? new UserMessage(text) : new UserMessage(name, text);
}
private UserMessage buildUserMessageWithContents(String name, List<Content> contents) {
return name == null ? new UserMessage(contents) : new UserMessage(name, contents);
}
}
}
| [
"dev.langchain4j.data.message.UserMessage.class.getName"
] | [((2676, 2703), 'dev.langchain4j.data.message.UserMessage.class.getName')] |
package io.quarkiverse.langchain4j.openai.runtime;
import static io.quarkiverse.langchain4j.runtime.OptionalUtil.firstOrDefault;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Optional;
import java.util.function.Supplier;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.DisabledChatLanguageModel;
import dev.langchain4j.model.chat.DisabledStreamingChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.embedding.DisabledEmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.model.image.DisabledImageModel;
import dev.langchain4j.model.image.ImageModel;
import dev.langchain4j.model.moderation.DisabledModerationModel;
import dev.langchain4j.model.moderation.ModerationModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import dev.langchain4j.model.openai.OpenAiModerationModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient;
import io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel;
import io.quarkiverse.langchain4j.openai.runtime.config.ChatModelConfig;
import io.quarkiverse.langchain4j.openai.runtime.config.EmbeddingModelConfig;
import io.quarkiverse.langchain4j.openai.runtime.config.ImageModelConfig;
import io.quarkiverse.langchain4j.openai.runtime.config.LangChain4jOpenAiConfig;
import io.quarkiverse.langchain4j.openai.runtime.config.ModerationModelConfig;
import io.quarkiverse.langchain4j.runtime.NamedModelUtil;
import io.quarkus.runtime.ShutdownContext;
import io.quarkus.runtime.annotations.Recorder;
import io.smallrye.config.ConfigValidationException;
@Recorder
public class OpenAiRecorder {
private static final String DUMMY_KEY = "dummy";
public Supplier<ChatLanguageModel> chatModel(LangChain4jOpenAiConfig runtimeConfig, String modelName) {
LangChain4jOpenAiConfig.OpenAiConfig openAiConfig = correspondingOpenAiConfig(runtimeConfig, modelName);
if (openAiConfig.enableIntegration()) {
String apiKey = openAiConfig.apiKey();
if (DUMMY_KEY.equals(apiKey)) {
throw new ConfigValidationException(createApiKeyConfigProblems(modelName));
}
ChatModelConfig chatModelConfig = openAiConfig.chatModel();
var builder = OpenAiChatModel.builder()
.baseUrl(openAiConfig.baseUrl())
.apiKey(apiKey)
.timeout(openAiConfig.timeout())
.maxRetries(openAiConfig.maxRetries())
.logRequests(firstOrDefault(false, chatModelConfig.logRequests(), openAiConfig.logRequests()))
.logResponses(firstOrDefault(false, chatModelConfig.logResponses(), openAiConfig.logResponses()))
.modelName(chatModelConfig.modelName())
.temperature(chatModelConfig.temperature())
.topP(chatModelConfig.topP())
.presencePenalty(chatModelConfig.presencePenalty())
.frequencyPenalty(chatModelConfig.frequencyPenalty())
.responseFormat(chatModelConfig.responseFormat().orElse(null));
openAiConfig.organizationId().ifPresent(builder::organizationId);
if (chatModelConfig.maxTokens().isPresent()) {
builder.maxTokens(chatModelConfig.maxTokens().get());
}
return new Supplier<>() {
@Override
public ChatLanguageModel get() {
return builder.build();
}
};
} else {
return new Supplier<>() {
@Override
public ChatLanguageModel get() {
return new DisabledChatLanguageModel();
}
};
}
}
public Supplier<StreamingChatLanguageModel> streamingChatModel(LangChain4jOpenAiConfig runtimeConfig, String modelName) {
LangChain4jOpenAiConfig.OpenAiConfig openAiConfig = correspondingOpenAiConfig(runtimeConfig, modelName);
if (openAiConfig.enableIntegration()) {
String apiKey = openAiConfig.apiKey();
if (DUMMY_KEY.equals(apiKey)) {
throw new ConfigValidationException(createApiKeyConfigProblems(modelName));
}
ChatModelConfig chatModelConfig = openAiConfig.chatModel();
var builder = OpenAiStreamingChatModel.builder()
.baseUrl(openAiConfig.baseUrl())
.apiKey(apiKey)
.timeout(openAiConfig.timeout())
.logRequests(firstOrDefault(false, chatModelConfig.logRequests(), openAiConfig.logRequests()))
.logResponses(firstOrDefault(false, chatModelConfig.logResponses(), openAiConfig.logResponses()))
.modelName(chatModelConfig.modelName())
.temperature(chatModelConfig.temperature())
.topP(chatModelConfig.topP())
.presencePenalty(chatModelConfig.presencePenalty())
.frequencyPenalty(chatModelConfig.frequencyPenalty())
.responseFormat(chatModelConfig.responseFormat().orElse(null));
openAiConfig.organizationId().ifPresent(builder::organizationId);
if (chatModelConfig.maxTokens().isPresent()) {
builder.maxTokens(chatModelConfig.maxTokens().get());
}
return new Supplier<>() {
@Override
public StreamingChatLanguageModel get() {
return builder.build();
}
};
} else {
return new Supplier<>() {
@Override
public StreamingChatLanguageModel get() {
return new DisabledStreamingChatLanguageModel();
}
};
}
}
public Supplier<EmbeddingModel> embeddingModel(LangChain4jOpenAiConfig runtimeConfig, String modelName) {
LangChain4jOpenAiConfig.OpenAiConfig openAiConfig = correspondingOpenAiConfig(runtimeConfig, modelName);
if (openAiConfig.enableIntegration()) {
String apiKeyOpt = openAiConfig.apiKey();
if (DUMMY_KEY.equals(apiKeyOpt)) {
throw new ConfigValidationException(createApiKeyConfigProblems(modelName));
}
EmbeddingModelConfig embeddingModelConfig = openAiConfig.embeddingModel();
var builder = OpenAiEmbeddingModel.builder()
.baseUrl(openAiConfig.baseUrl())
.apiKey(apiKeyOpt)
.timeout(openAiConfig.timeout())
.maxRetries(openAiConfig.maxRetries())
.logRequests(firstOrDefault(false, embeddingModelConfig.logRequests(), openAiConfig.logRequests()))
.logResponses(firstOrDefault(false, embeddingModelConfig.logResponses(), openAiConfig.logResponses()))
.modelName(embeddingModelConfig.modelName());
if (embeddingModelConfig.user().isPresent()) {
builder.user(embeddingModelConfig.user().get());
}
openAiConfig.organizationId().ifPresent(builder::organizationId);
return new Supplier<>() {
@Override
public EmbeddingModel get() {
return builder.build();
}
};
} else {
return new Supplier<>() {
@Override
public EmbeddingModel get() {
return new DisabledEmbeddingModel();
}
};
}
}
public Supplier<ModerationModel> moderationModel(LangChain4jOpenAiConfig runtimeConfig, String modelName) {
LangChain4jOpenAiConfig.OpenAiConfig openAiConfig = correspondingOpenAiConfig(runtimeConfig, modelName);
if (openAiConfig.enableIntegration()) {
String apiKey = openAiConfig.apiKey();
if (DUMMY_KEY.equals(apiKey)) {
throw new ConfigValidationException(createApiKeyConfigProblems(modelName));
}
ModerationModelConfig moderationModelConfig = openAiConfig.moderationModel();
var builder = OpenAiModerationModel.builder()
.baseUrl(openAiConfig.baseUrl())
.apiKey(apiKey)
.timeout(openAiConfig.timeout())
.maxRetries(openAiConfig.maxRetries())
.logRequests(firstOrDefault(false, moderationModelConfig.logRequests(), openAiConfig.logRequests()))
.logResponses(firstOrDefault(false, moderationModelConfig.logResponses(), openAiConfig.logResponses()))
.modelName(moderationModelConfig.modelName());
openAiConfig.organizationId().ifPresent(builder::organizationId);
return new Supplier<>() {
@Override
public ModerationModel get() {
return builder.build();
}
};
} else {
return new Supplier<>() {
@Override
public ModerationModel get() {
return new DisabledModerationModel();
}
};
}
}
public Supplier<ImageModel> imageModel(LangChain4jOpenAiConfig runtimeConfig, String modelName) {
LangChain4jOpenAiConfig.OpenAiConfig openAiConfig = correspondingOpenAiConfig(runtimeConfig, modelName);
if (openAiConfig.enableIntegration()) {
String apiKey = openAiConfig.apiKey();
if (DUMMY_KEY.equals(apiKey)) {
throw new ConfigValidationException(createApiKeyConfigProblems(modelName));
}
ImageModelConfig imageModelConfig = openAiConfig.imageModel();
var builder = QuarkusOpenAiImageModel.builder()
.baseUrl(openAiConfig.baseUrl())
.apiKey(apiKey)
.timeout(openAiConfig.timeout())
.maxRetries(openAiConfig.maxRetries())
.logRequests(firstOrDefault(false, imageModelConfig.logRequests(), openAiConfig.logRequests()))
.logResponses(firstOrDefault(false, imageModelConfig.logResponses(), openAiConfig.logResponses()))
.modelName(imageModelConfig.modelName())
.size(imageModelConfig.size())
.quality(imageModelConfig.quality())
.style(imageModelConfig.style())
.responseFormat(imageModelConfig.responseFormat())
.user(imageModelConfig.user());
openAiConfig.organizationId().ifPresent(builder::organizationId);
// we persist if the directory was set explicitly and the boolean flag was not set to false
// or if the boolean flag was set explicitly to true
Optional<Path> persistDirectory = Optional.empty();
if (imageModelConfig.persist().isPresent()) {
if (imageModelConfig.persist().get()) {
persistDirectory = imageModelConfig.persistDirectory().or(new Supplier<>() {
@Override
public Optional<? extends Path> get() {
return Optional.of(Paths.get(System.getProperty("java.io.tmpdir"), "dall-e-images"));
}
});
}
} else {
if (imageModelConfig.persistDirectory().isPresent()) {
persistDirectory = imageModelConfig.persistDirectory();
}
}
builder.persistDirectory(persistDirectory);
return new Supplier<>() {
@Override
public ImageModel get() {
return builder.build();
}
};
} else {
return new Supplier<>() {
@Override
public ImageModel get() {
return new DisabledImageModel();
}
};
}
}
private LangChain4jOpenAiConfig.OpenAiConfig correspondingOpenAiConfig(LangChain4jOpenAiConfig runtimeConfig,
String modelName) {
LangChain4jOpenAiConfig.OpenAiConfig openAiConfig;
if (NamedModelUtil.isDefault(modelName)) {
openAiConfig = runtimeConfig.defaultConfig();
} else {
openAiConfig = runtimeConfig.namedConfig().get(modelName);
}
return openAiConfig;
}
private ConfigValidationException.Problem[] createApiKeyConfigProblems(String modelName) {
return createConfigProblems("api-key", modelName);
}
private ConfigValidationException.Problem[] createConfigProblems(String key, String modelName) {
return new ConfigValidationException.Problem[] { createConfigProblem(key, modelName) };
}
private ConfigValidationException.Problem createConfigProblem(String key, String modelName) {
return new ConfigValidationException.Problem(String.format(
"SRCFG00014: The config property quarkus.langchain4j.openai%s%s is required but it could not be found in any config source",
NamedModelUtil.isDefault(modelName) ? "." : ("." + modelName + "."), key));
}
public void cleanUp(ShutdownContext shutdown) {
shutdown.addShutdownTask(new Runnable() {
@Override
public void run() {
QuarkusOpenAiClient.clearCache();
}
});
}
}
| [
"dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder",
"dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder",
"dev.langchain4j.model.openai.OpenAiModerationModel.builder",
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((2450, 3312), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 3229), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 3155), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 3083), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 3033), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2969), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2909), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2791), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2676), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2617), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2564), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2450, 2528), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4555, 5367), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 5284), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 5210), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 5138), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 5088), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 5024), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 4964), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 4846), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 4731), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 4678), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4555, 4642), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((6642, 7184), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((6642, 7119), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((6642, 6996), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((6642, 6876), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((6642, 6817), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((6642, 6764), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((6642, 6725), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((8417, 8960), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((8417, 8894), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((8417, 8770), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((8417, 8649), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((8417, 8590), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((8417, 8537), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((8417, 8501), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((10032, 10845), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10794), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10723), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10670), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10613), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10562), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10501), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10382), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10266), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10207), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10154), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder'), ((10032, 10118), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiImageModel.builder')] |
package com.example.demo.configuration;
import dev.langchain4j.model.azure.AzureOpenAiImageModel;
import dev.langchain4j.model.image.ImageModel;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
public class ImageModelConfiguration {
@Bean
ImageModel imageModel() {
return AzureOpenAiImageModel.builder()
.endpoint(System.getenv("AZURE_OPENAI_ENDPOINT"))
.apiKey(System.getenv("AZURE_OPENAI_KEY"))
.deploymentName("dall-e-3")
.logRequestsAndResponses(true)
.build();
}
}
| [
"dev.langchain4j.model.azure.AzureOpenAiImageModel.builder"
] | [((370, 642), 'dev.langchain4j.model.azure.AzureOpenAiImageModel.builder'), ((370, 617), 'dev.langchain4j.model.azure.AzureOpenAiImageModel.builder'), ((370, 570), 'dev.langchain4j.model.azure.AzureOpenAiImageModel.builder'), ((370, 526), 'dev.langchain4j.model.azure.AzureOpenAiImageModel.builder'), ((370, 467), 'dev.langchain4j.model.azure.AzureOpenAiImageModel.builder')] |
package io.quarkiverse.langchain4j.sample;
import java.util.function.Supplier;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
public class MyChatModelSupplier implements Supplier<ChatLanguageModel> {
@Override
public ChatLanguageModel get() {
return OpenAiChatModel.builder()
.apiKey("...")
.build();
}
}
| [
"dev.langchain4j.model.openai.OpenAiChatModel.builder"
] | [((328, 409), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((328, 384), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')] |
package com.tencent.supersonic.headless.core.chat.parser.llm;
import com.tencent.supersonic.common.util.JsonUtil;
import com.tencent.supersonic.headless.core.config.OptimizationConfig;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMReq;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMReq.SqlGenerationMode;
import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMResp;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.input.Prompt;
import dev.langchain4j.model.input.PromptTemplate;
import dev.langchain4j.model.output.Response;
import org.apache.commons.lang3.tuple.Pair;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.InitializingBean;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.CopyOnWriteArrayList;
@Service
public class TwoPassSCSqlGeneration implements SqlGeneration, InitializingBean {
private static final Logger keyPipelineLog = LoggerFactory.getLogger("keyPipeline");
@Autowired
private ChatLanguageModel chatLanguageModel;
@Autowired
private SqlExamplarLoader sqlExamplarLoader;
@Autowired
private OptimizationConfig optimizationConfig;
@Autowired
private SqlPromptGenerator sqlPromptGenerator;
@Override
public LLMResp generation(LLMReq llmReq, Long dataSetId) {
//1.retriever sqlExamples and generate exampleListPool
keyPipelineLog.info("dataSetId:{},llmReq:{}", dataSetId, llmReq);
List<Map<String, String>> sqlExamples = sqlExamplarLoader.retrieverSqlExamples(llmReq.getQueryText(),
optimizationConfig.getText2sqlExampleNum());
List<List<Map<String, String>>> exampleListPool = sqlPromptGenerator.getExampleCombos(sqlExamples,
optimizationConfig.getText2sqlFewShotsNum(), optimizationConfig.getText2sqlSelfConsistencyNum());
//2.generator linking prompt,and parallel generate response.
List<String> linkingPromptPool = sqlPromptGenerator.generatePromptPool(llmReq, exampleListPool, false);
List<String> linkingResults = new CopyOnWriteArrayList<>();
linkingPromptPool.parallelStream().forEach(
linkingPrompt -> {
Prompt prompt = PromptTemplate.from(JsonUtil.toString(linkingPrompt)).apply(new HashMap<>());
keyPipelineLog.info("step one request prompt:{}", prompt.toSystemMessage());
Response<AiMessage> linkingResult = chatLanguageModel.generate(prompt.toSystemMessage());
String result = linkingResult.content().text();
keyPipelineLog.info("step one model response:{}", result);
linkingResults.add(OutputFormat.getSchemaLink(result));
}
);
List<String> sortedList = OutputFormat.formatList(linkingResults);
Pair<String, Map<String, Double>> linkingMap = OutputFormat.selfConsistencyVote(sortedList);
//3.generator sql prompt,and parallel generate response.
List<String> sqlPromptPool = sqlPromptGenerator.generateSqlPromptPool(llmReq, sortedList, exampleListPool);
List<String> sqlTaskPool = new CopyOnWriteArrayList<>();
sqlPromptPool.parallelStream().forEach(sqlPrompt -> {
Prompt linkingPrompt = PromptTemplate.from(JsonUtil.toString(sqlPrompt)).apply(new HashMap<>());
keyPipelineLog.info("step two request prompt:{}", linkingPrompt.toSystemMessage());
Response<AiMessage> sqlResult = chatLanguageModel.generate(linkingPrompt.toSystemMessage());
String result = sqlResult.content().text();
keyPipelineLog.info("step two model response:{}", result);
sqlTaskPool.add(result);
});
//4.format response.
Pair<String, Map<String, Double>> sqlMapPair = OutputFormat.selfConsistencyVote(sqlTaskPool);
keyPipelineLog.info("linkingMap:{} sqlMap:{}", linkingMap, sqlMapPair.getRight());
LLMResp llmResp = new LLMResp();
llmResp.setQuery(llmReq.getQueryText());
llmResp.setSqlRespMap(OutputFormat.buildSqlRespMap(sqlExamples, sqlMapPair.getRight()));
return llmResp;
}
@Override
public void afterPropertiesSet() {
SqlGenerationFactory.addSqlGenerationForFactory(SqlGenerationMode.TWO_PASS_AUTO_COT_SELF_CONSISTENCY, this);
}
}
| [
"dev.langchain4j.model.input.PromptTemplate.from"
] | [((2481, 2557), 'dev.langchain4j.model.input.PromptTemplate.from'), ((3537, 3609), 'dev.langchain4j.model.input.PromptTemplate.from')] |
package dev.langchain4j.model.qianfan.client;
import dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest;
import dev.langchain4j.model.qianfan.client.chat.ChatCompletionResponse;
import dev.langchain4j.model.qianfan.client.chat.ChatTokenResponse;
import dev.langchain4j.model.qianfan.client.completion.CompletionRequest;
import dev.langchain4j.model.qianfan.client.completion.CompletionResponse;
import dev.langchain4j.model.qianfan.client.embedding.EmbeddingRequest;
import dev.langchain4j.model.qianfan.client.embedding.EmbeddingResponse;
import okhttp3.Cache;
import okhttp3.OkHttpClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import retrofit2.Retrofit;
import retrofit2.converter.gson.GsonConverterFactory;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.Proxy;
import java.time.Duration;
public class QianfanClient {
private static final Logger log = LoggerFactory.getLogger(QianfanClient.class);
private final String baseUrl;
private String token;
private final OkHttpClient okHttpClient;
private final QianfanApi qianfanApi;
private final String apiKey;
private final String secretKey;
private final boolean logStreamingResponses;
public static final String GRANT_TYPE = "client_credentials";
public QianfanClient(String apiKey, String secretKey) {
this(builder().apiKey(apiKey).secretKey(secretKey));
}
private QianfanClient(Builder serviceBuilder) {
this.baseUrl = serviceBuilder.baseUrl;
OkHttpClient.Builder okHttpClientBuilder = (new OkHttpClient.Builder()).callTimeout(serviceBuilder.callTimeout)
.connectTimeout(serviceBuilder.connectTimeout).readTimeout(serviceBuilder.readTimeout)
.writeTimeout(serviceBuilder.writeTimeout);
if (serviceBuilder.apiKey == null) {
throw new IllegalArgumentException("apiKey must be defined");
} else if (serviceBuilder.secretKey == null) {
throw new IllegalArgumentException("secretKey must be defined");
} else {
if (serviceBuilder.apiKey != null) {
okHttpClientBuilder.addInterceptor(new AuthorizationHeaderInjector(serviceBuilder.apiKey));
}
if (serviceBuilder.proxy != null) {
okHttpClientBuilder.proxy(serviceBuilder.proxy);
}
if (serviceBuilder.logRequests) {
okHttpClientBuilder.addInterceptor(new RequestLoggingInterceptor());
}
if (serviceBuilder.logResponses) {
okHttpClientBuilder.addInterceptor(new ResponseLoggingInterceptor());
}
this.logStreamingResponses = serviceBuilder.logStreamingResponses;
this.apiKey = serviceBuilder.apiKey;
this.secretKey = serviceBuilder.secretKey;
this.okHttpClient = okHttpClientBuilder.build();
Retrofit retrofit = (new Retrofit.Builder()).baseUrl(serviceBuilder.baseUrl).client(this.okHttpClient)
.addConverterFactory(GsonConverterFactory.create(Json.GSON)).build();
this.qianfanApi = retrofit.create(QianfanApi.class);
}
}
public void shutdown() {
this.okHttpClient.dispatcher().executorService().shutdown();
this.okHttpClient.connectionPool().evictAll();
Cache cache = this.okHttpClient.cache();
if (cache != null) {
try {
cache.close();
} catch (IOException var3) {
log.error("Failed to close cache", var3);
}
}
}
public static Builder builder() {
return new Builder();
}
public SyncOrAsyncOrStreaming<ChatCompletionResponse> chatCompletion(ChatCompletionRequest request,String endpoint) {
refreshToken();
return new RequestExecutor(this.qianfanApi.chatCompletions(endpoint,request, this.token), (r) -> {
return r;
}, this.okHttpClient, this.formatUrl("rpc/2.0/ai_custom/v1/wenxinworkshop/chat/"+endpoint+"?access_token="+this.token), () -> {
return ChatCompletionRequest.builder().from(request).stream(true).build();
}, ChatCompletionResponse.class, (r) -> {
return r;
}, this.logStreamingResponses);
}
public SyncOrAsyncOrStreaming<CompletionResponse> completion(CompletionRequest request, boolean stream,
String endpoint) {
refreshToken();
CompletionRequest syncRequest = CompletionRequest.builder().from(request).stream(stream).build();
return new RequestExecutor(this.qianfanApi.completions(endpoint,request, this.token), (r) -> {
return r;
}, this.okHttpClient, this.formatUrl("rpc/2.0/ai_custom/v1/wenxinworkshop/completions/"+endpoint+"?access_token="+this.token), () -> {
return CompletionRequest.builder().from(request).stream(true).build();
}, CompletionResponse.class, (r) -> {
return r;
}, this.logStreamingResponses);
}
public SyncOrAsync<EmbeddingResponse> embedding(EmbeddingRequest request, String serviceName) {
refreshToken();
return new RequestExecutor(this.qianfanApi.embeddings(serviceName, request, this.token), (r) -> {
return r;
});
}
private void refreshToken() {
RequestExecutor<String, ChatTokenResponse, String> executor = new RequestExecutor<>(
this.qianfanApi.getToken(GRANT_TYPE, this.apiKey,
this.secretKey), ChatTokenResponse::getAccessToken);
this.token = executor.execute();
}
private String formatUrl(String endpoint) {
return this.baseUrl + endpoint;
}
public static class Builder {
private String baseUrl;
private String apiKey;
private String secretKey;
private Duration callTimeout;
private Duration connectTimeout;
private Duration readTimeout;
private Duration writeTimeout;
private Proxy proxy;
private boolean logRequests;
private boolean logResponses;
private boolean logStreamingResponses;
private Builder() {
this.baseUrl = "https://aip.baidubce.com/";
this.callTimeout = Duration.ofSeconds(60L);
this.connectTimeout = Duration.ofSeconds(60L);
this.readTimeout = Duration.ofSeconds(60L);
this.writeTimeout = Duration.ofSeconds(60L);
}
public Builder baseUrl(String baseUrl) {
if (baseUrl != null && !baseUrl.trim().isEmpty()) {
this.baseUrl = baseUrl.endsWith("/") ? baseUrl : baseUrl + "/";
return this;
} else {
throw new IllegalArgumentException("baseUrl cannot be null or empty");
}
}
public Builder apiKey(String apiKey) {
if (apiKey != null && !apiKey.trim().isEmpty()) {
this.apiKey = apiKey;
return this;
} else {
throw new IllegalArgumentException("apiKey cannot be null or empty. ");
}
}
public Builder secretKey(String secretKey) {
if (secretKey != null && !secretKey.trim().isEmpty()) {
this.secretKey = secretKey;
return this;
} else {
throw new IllegalArgumentException("secretKey cannot be null or empty. ");
}
}
public Builder callTimeout(Duration callTimeout) {
if (callTimeout == null) {
throw new IllegalArgumentException("callTimeout cannot be null");
} else {
this.callTimeout = callTimeout;
return this;
}
}
public Builder connectTimeout(Duration connectTimeout) {
if (connectTimeout == null) {
throw new IllegalArgumentException("connectTimeout cannot be null");
} else {
this.connectTimeout = connectTimeout;
return this;
}
}
public Builder readTimeout(Duration readTimeout) {
if (readTimeout == null) {
throw new IllegalArgumentException("readTimeout cannot be null");
} else {
this.readTimeout = readTimeout;
return this;
}
}
public Builder writeTimeout(Duration writeTimeout) {
if (writeTimeout == null) {
throw new IllegalArgumentException("writeTimeout cannot be null");
} else {
this.writeTimeout = writeTimeout;
return this;
}
}
public Builder proxy(Proxy.Type type, String ip, int port) {
this.proxy = new Proxy(type, new InetSocketAddress(ip, port));
return this;
}
public Builder proxy(Proxy proxy) {
this.proxy = proxy;
return this;
}
public Builder logRequests() {
return this.logRequests(true);
}
public Builder logRequests(Boolean logRequests) {
if (logRequests == null) {
logRequests = false;
}
this.logRequests = logRequests;
return this;
}
public Builder logResponses() {
return this.logResponses(true);
}
public Builder logResponses(Boolean logResponses) {
if (logResponses == null) {
logResponses = false;
}
this.logResponses = logResponses;
return this;
}
public Builder logStreamingResponses() {
return this.logStreamingResponses(true);
}
public Builder logStreamingResponses(Boolean logStreamingResponses) {
if (logStreamingResponses == null) {
logStreamingResponses = false;
}
this.logStreamingResponses = logStreamingResponses;
return this;
}
public QianfanClient build() {
return new QianfanClient(this);
}
}
}
| [
"dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest.builder",
"dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder"
] | [((4128, 4194), 'dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest.builder'), ((4128, 4186), 'dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest.builder'), ((4128, 4173), 'dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest.builder'), ((4575, 4639), 'dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder'), ((4575, 4631), 'dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder'), ((4575, 4616), 'dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder'), ((4928, 4990), 'dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder'), ((4928, 4982), 'dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder'), ((4928, 4969), 'dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder')] |
package org.example;
import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.memory.chat.ChatMemoryStore;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.nio.file.StandardOpenOption;
import java.util.ArrayList;
import java.util.List;
public class _09_AIServices_06_ChatMemoryPersisted {
public static void main(String[] args) {
OpenAiChatModel model = OpenAiChatModel.withApiKey(ApiKeys.OPENAI_DEMO);
FileStore store = new FileStore();
ChatMemoryProvider provider = memoryId -> MessageWindowChatMemory.builder()
.id(memoryId)
.maxMessages(10)
.chatMemoryStore(store)
.build();
ChatAssistant assistant = AiServices.builder(ChatAssistant.class)
.chatLanguageModel(model)
.chatMemoryProvider(provider)
.build();
System.out.println(assistant.chat(1, "Hello my name is Michael"));
System.out.println(assistant.chat(2, "Hello my name is Karl"));
// System.out.println(assistant.chat(1, "What is my name?"));
// System.out.println(assistant.chat(2, "What is my name?"));
}
}
class FileStore implements ChatMemoryStore {
public static final String PATH = "src/main/resources/messages_%s.txt";
@Override
public List<ChatMessage> getMessages(Object memoryId) {
List<ChatMessage> chatMessages = new ArrayList<>();
String file = PATH.formatted(memoryId);
try {
if (!Files.exists(Paths.get(file))) {
Files.createFile(Paths.get(file));
}
for (String s : Files.readAllLines(Paths.get(file))) {
chatMessages.add(UserMessage.from(s));
}
} catch (IOException e) {
throw new RuntimeException(e);
}
return chatMessages;
}
@Override
public void updateMessages(Object memoryId, List<ChatMessage> messages) {
String file = PATH.formatted(memoryId);
for (ChatMessage message : messages) {
try {
Files.writeString(Paths.get(file), message.text() + "\n", StandardOpenOption.APPEND);
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
@Override
public void deleteMessages(Object memoryId) {
System.out.println("Not implemented");
}
} | [
"dev.langchain4j.service.AiServices.builder",
"dev.langchain4j.memory.chat.MessageWindowChatMemory.builder"
] | [((843, 1004), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((843, 979), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((843, 939), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((843, 906), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((1041, 1193), 'dev.langchain4j.service.AiServices.builder'), ((1041, 1168), 'dev.langchain4j.service.AiServices.builder'), ((1041, 1122), 'dev.langchain4j.service.AiServices.builder')] |
package org.agoncal.fascicle.langchain4j.vectordb.pgvector;
import dev.langchain4j.data.embedding.Embedding;
import dev.langchain4j.data.segment.TextSegment;
import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel;
import dev.langchain4j.model.embedding.EmbeddingModel;
import dev.langchain4j.store.embedding.EmbeddingMatch;
import dev.langchain4j.store.embedding.EmbeddingStore;
import dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore;
import java.util.List;
// tag::adocSkip[]
/**
* @author Antonio Goncalves
* http://www.antoniogoncalves.org
* --
*/
// end::adocSkip[]
public class MusicianService {
public static void main(String[] args) {
MusicianService musicianService = new MusicianService();
musicianService.usePGVectorToStoreEmbeddings();
}
public void usePGVectorToStoreEmbeddings() {
System.out.println("### usePGVectorToStoreEmbeddings");
// tag::adocSnippet[]
EmbeddingStore<TextSegment> embeddingStore =
PgVectorEmbeddingStore.builder()
.host("localhost")
.port(5432)
.createTable(true)
.dropTableFirst(true)
.dimension(384)
.table("langchain4j_collection")
.user("agoncal")
.password("agoncal")
.database("agoncal")
.build();
// end::adocSnippet[]
EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel();
TextSegment segment1 = TextSegment.from("I've been to France twice.");
Embedding embedding1 = embeddingModel.embed(segment1).content();
embeddingStore.add(embedding1, segment1);
TextSegment segment2 = TextSegment.from("New Delhi is the capital of India.");
Embedding embedding2 = embeddingModel.embed(segment2).content();
embeddingStore.add(embedding2, segment2);
Embedding queryEmbedding = embeddingModel.embed("Did you ever travel abroad?").content();
List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(queryEmbedding, 1);
EmbeddingMatch<TextSegment> embeddingMatch = relevant.get(0);
System.out.println(embeddingMatch.score());
System.out.println(embeddingMatch.embedded().text());
}
}
| [
"dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder"
] | [((989, 1290), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1273), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1244), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1215), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1190), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1149), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1125), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1095), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1068), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((989, 1048), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder')] |
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 26