code
stringlengths
419
102k
apis
sequencelengths
1
10
extract_api
stringlengths
67
54.7k
package io.quarkiverse.langchain4j.runtime; import java.util.function.Function; import dev.langchain4j.memory.ChatMemory; import dev.langchain4j.memory.chat.ChatMemoryProvider; import dev.langchain4j.memory.chat.MessageWindowChatMemory; import dev.langchain4j.memory.chat.TokenWindowChatMemory; import dev.langchain4j.model.Tokenizer; import dev.langchain4j.store.memory.chat.ChatMemoryStore; import io.quarkiverse.langchain4j.runtime.aiservice.ChatMemoryConfig; import io.quarkus.arc.SyntheticCreationalContext; import io.quarkus.runtime.annotations.Recorder; @Recorder public class ChatMemoryRecorder { public Function<SyntheticCreationalContext<ChatMemoryProvider>, ChatMemoryProvider> messageWindow(ChatMemoryConfig config) { return new Function<>() { @Override public ChatMemoryProvider apply(SyntheticCreationalContext<ChatMemoryProvider> context) { ChatMemoryStore chatMemoryStore = context.getInjectedReference(ChatMemoryStore.class); int maxMessages = config.memoryWindow().maxMessages(); return new ChatMemoryProvider() { @Override public ChatMemory get(Object memoryId) { return MessageWindowChatMemory.builder() .maxMessages(maxMessages) .id(memoryId) .chatMemoryStore(chatMemoryStore) .build(); } }; } }; } public Function<SyntheticCreationalContext<ChatMemoryProvider>, ChatMemoryProvider> tokenWindow(ChatMemoryConfig config) { return new Function<>() { @Override public ChatMemoryProvider apply(SyntheticCreationalContext<ChatMemoryProvider> context) { ChatMemoryStore chatMemoryStore = context.getInjectedReference(ChatMemoryStore.class); Tokenizer tokenizer = context.getInjectedReference(Tokenizer.class); int maxTokens = config.tokenWindow().maxTokens(); return new ChatMemoryProvider() { @Override public ChatMemory get(Object memoryId) { return TokenWindowChatMemory.builder() .maxTokens(maxTokens, tokenizer) .id(memoryId) .chatMemoryStore(chatMemoryStore) .build(); } }; } }; } }
[ "dev.langchain4j.memory.chat.TokenWindowChatMemory.builder", "dev.langchain4j.memory.chat.MessageWindowChatMemory.builder" ]
[((1242, 1486), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((1242, 1445), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((1242, 1379), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((1242, 1333), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((2272, 2521), 'dev.langchain4j.memory.chat.TokenWindowChatMemory.builder'), ((2272, 2480), 'dev.langchain4j.memory.chat.TokenWindowChatMemory.builder'), ((2272, 2414), 'dev.langchain4j.memory.chat.TokenWindowChatMemory.builder'), ((2272, 2368), 'dev.langchain4j.memory.chat.TokenWindowChatMemory.builder')]
package com.genai.tmgenai.service; import dev.langchain4j.data.document.Document; import dev.langchain4j.data.document.DocumentLoader; import dev.langchain4j.data.document.DocumentSegment; import dev.langchain4j.data.document.DocumentSplitter; import dev.langchain4j.data.document.splitter.ParagraphSplitter; import dev.langchain4j.data.document.splitter.SentenceSplitter; import dev.langchain4j.data.embedding.Embedding; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.model.input.Prompt; import dev.langchain4j.model.input.PromptTemplate; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.model.openai.OpenAiEmbeddingModel; import dev.langchain4j.store.embedding.EmbeddingMatch; import dev.langchain4j.store.embedding.PineconeEmbeddingStore; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import org.springframework.web.multipart.MultipartFile; import java.io.File; import java.io.IOException; import java.nio.file.Paths; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import static dev.langchain4j.data.document.DocumentType.PDF; import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO; import static dev.langchain4j.model.openai.OpenAiModelName.TEXT_EMBEDDING_ADA_002; import static java.time.Duration.ofSeconds; import static java.util.stream.Collectors.joining; @Service public class FileEmbeddingService { @Value("${key.opnenapikey}") private String OPENAI_API_KEY; public void embedFile(MultipartFile multipartFile,String fileId) throws IOException { File file = new File("/Users/amankumar/Downloads" + fileId + ".pdf"); multipartFile.transferTo(file); DocumentLoader documentLoader = DocumentLoader.from(Paths.get(file.getPath()), PDF); Document document = documentLoader.load(); // Split document into segments (one paragraph per segment) DocumentSplitter splitter = new ParagraphSplitter(); // List<DocumentSegment> documentSegments = splitter.split(document); // Embed segments (convert them into semantic vectors) EmbeddingModel embeddingModel = OpenAiEmbeddingModel.builder() .apiKey(OPENAI_API_KEY) // https://platform.openai.com/account/api-keys .modelName(TEXT_EMBEDDING_ADA_002) .timeout(ofSeconds(15)) .build(); // List<Embedding> embeddings = embeddingModel.embedAll(documentSegments).get(); // Store embeddings into embedding store for further search / retrieval PineconeEmbeddingStore pinecone = PineconeEmbeddingStore.builder() .apiKey("1d0899b3-7abf-40be-a267-ac208d572ed3") // https://app.pinecone.io/organizations/xxx/projects/yyy:zzz/keys .environment("asia-southeast1-gcp-free") .projectName("bca6a53") .index("documents") // make sure the dimensions of the Pinecone index match the dimensions of the embedding model (1536 for text-embedding-ada-002) .build(); // // pinecone.addAll(embeddings, documentSegments); String question = "what is the value for policy no?"; Embedding questionEmbedding = embeddingModel.embed(question).get(); // Find relevant embeddings in embedding store by semantic similarity List<EmbeddingMatch<DocumentSegment>> relevantEmbeddings = pinecone.findRelevant(questionEmbedding, 2); // Create a prompt for the model that includes question and relevant embeddings PromptTemplate promptTemplate = PromptTemplate.from( "Answer the following question to the best of your ability :\n" + "\n" + "Question:\n" + "{{question}}\n" + "\n" + "Base your answer on the below information from a policy document: \n" + "{{information}}"); String information = relevantEmbeddings.stream() .map(match -> match.embedded().get().text()) .collect(joining("\n\n")); Map<String, Object> variables = new HashMap<>(); variables.put("question", question); variables.put("information", information); Prompt prompt = promptTemplate.apply(variables); // Send prompt to the model ChatLanguageModel chatModel = OpenAiChatModel.builder() .apiKey(OPENAI_API_KEY) // https://platform.openai.com/account/api-keys .modelName(GPT_3_5_TURBO) .temperature(1.0) .logResponses(true) .logRequests(true) .build(); AiMessage aiMessage = chatModel.sendUserMessage(prompt).get(); // See an answer from the model String answer = aiMessage.text(); System.out.println(answer); } }
[ "dev.langchain4j.store.embedding.PineconeEmbeddingStore.builder", "dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder", "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((2341, 2575), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((2341, 2550), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((2341, 2510), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((2341, 2411), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((2792, 3241), 'dev.langchain4j.store.embedding.PineconeEmbeddingStore.builder'), ((2792, 3088), 'dev.langchain4j.store.embedding.PineconeEmbeddingStore.builder'), ((2792, 3052), 'dev.langchain4j.store.embedding.PineconeEmbeddingStore.builder'), ((2792, 3012), 'dev.langchain4j.store.embedding.PineconeEmbeddingStore.builder'), ((2792, 2888), 'dev.langchain4j.store.embedding.PineconeEmbeddingStore.builder'), ((4610, 4895), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4610, 4870), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4610, 4835), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4610, 4799), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4610, 4765), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4610, 4675), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')]
package io.thomasvitale.langchain4j.spring.core.model.prompt.jackson; import java.util.Map; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; import dev.langchain4j.model.input.Prompt; import dev.langchain4j.model.input.PromptTemplate; import dev.langchain4j.model.input.structured.StructuredPrompt; import dev.langchain4j.spi.prompt.structured.StructuredPromptFactory; import io.thomasvitale.langchain4j.spring.core.json.jackson.LangChain4jJacksonProvider; /** * A factory for creating a {@link Prompt} instance from a structured prompt using * Jackson. * <p> * Adapted from DefaultStructuredPromptFactory in the LangChain4j project. */ public class JacksonStructuredPromptFactory implements StructuredPromptFactory { public static TypeReference<Map<String, Object>> MAP_TYPE = new TypeReference<>() { }; private final ObjectMapper objectMapper; public JacksonStructuredPromptFactory() { this.objectMapper = LangChain4jJacksonProvider.getObjectMapper(); } @Override public Prompt toPrompt(Object structuredPrompt) { StructuredPrompt annotation = StructuredPrompt.Util.validateStructuredPrompt(structuredPrompt); String promptTemplateString = StructuredPrompt.Util.join(annotation); Map<String, Object> variables = extractVariables(structuredPrompt); PromptTemplate promptTemplate = PromptTemplate.from(promptTemplateString); return promptTemplate.apply(variables); } private Map<String, Object> extractVariables(Object structuredPrompt) { return objectMapper.convertValue(structuredPrompt, MAP_TYPE); } }
[ "dev.langchain4j.model.input.structured.StructuredPrompt.Util.validateStructuredPrompt", "dev.langchain4j.model.input.structured.StructuredPrompt.Util.join" ]
[((1161, 1225), 'dev.langchain4j.model.input.structured.StructuredPrompt.Util.validateStructuredPrompt'), ((1265, 1303), 'dev.langchain4j.model.input.structured.StructuredPrompt.Util.join')]
package net.savantly.mainbot.dom.embeddingstore; import java.util.HashMap; import org.springframework.stereotype.Service; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore; import lombok.RequiredArgsConstructor; import net.savantly.mainbot.config.PineconeConfig; @Service @RequiredArgsConstructor public class EmbeddingStoreProvider { final private PineconeConfig pineconeConfig; final private HashMap<String, EmbeddingStore<TextSegment>> stores = new HashMap<>(); /** * Returns a PineconeEmbeddingStore for the given gameId. If one does not exist, * it will be created. * * @param id * @return */ public EmbeddingStore<TextSegment> embeddingStore(String id) { if (!stores.containsKey(id)) { stores.put(id, create(id)); } return stores.get(id); } private EmbeddingStore<TextSegment> create(String id) { PineconeEmbeddingStore pinecone = PineconeEmbeddingStore.builder() .apiKey(pineconeConfig.getApiKey()) // https://app.pinecone.io/organizations/xxx/projects/yyy:zzz/keys .environment(pineconeConfig.getEnvironment()) .projectId(pineconeConfig.getProjectName()) .nameSpace(convertIdToPineconeNamespace(id)) .index(pineconeConfig.getIndex()) // make sure the dimensions of the Pinecone index match the dimensions // of the // embedding model (1536 for text-embedding-ada-002) .build(); return pinecone; } private String convertIdToPineconeNamespace(String id) { return pineconeConfig.getNamespacePrefix() + id; } }
[ "dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder" ]
[((1069, 1678), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder'), ((1069, 1453), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder'), ((1069, 1403), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder'), ((1069, 1342), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder'), ((1069, 1282), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder'), ((1069, 1153), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder')]
package my.samples; import dev.langchain4j.data.document.Document; import dev.langchain4j.data.document.loader.FileSystemDocumentLoader; import dev.langchain4j.data.document.parser.apache.pdfbox.ApachePdfBoxDocumentParser; import dev.langchain4j.data.document.splitter.DocumentSplitters; import dev.langchain4j.data.embedding.Embedding; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.store.embedding.EmbeddingMatch; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.EmbeddingStoreIngestor; import dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore; import java.io.IOException; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; import java.util.Scanner; public class ElasticSearchEmbeddingManualLoader { public static final String ANSI_GREEN = "\u001B[32m"; public static final String ANSI_RESET = "\u001B[0m"; public static final String ANSI_YELLOW = "\u001B[33m"; public static void main(String[] args) { EmbeddingStore<TextSegment> embeddingStore = ElasticsearchEmbeddingStore.builder() .serverUrl("http://localhost:9200") .indexName("car-warranty-guide-embeddings") .dimension(384) .build(); EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() .documentSplitter(DocumentSplitters.recursive(300, 0)) .embeddingModel(embeddingModel) .embeddingStore(embeddingStore) .build(); Path filePath = toPath("example-files/Tesla_Models_Owners_Manual.pdf"); Document document = FileSystemDocumentLoader.loadDocument(filePath, new ApachePdfBoxDocumentParser()); document.metadata().add("fileName", filePath.getFileName().toString()); document.metadata().add("filePath", filePath.toString()); document.metadata().add("company", "TESLA"); document.metadata().add("product", "MODEL S"); document.metadata().add("language", "ENG"); document.metadata().add("version", "V1"); document.metadata().add("year", "2025"); document.metadata().add("type", "Owner's Manual Guide"); document.metadata().add("country", "US"); document.metadata().add("category", "Automotive"); ingestor.ingest(document); Scanner scanner = new Scanner(System.in); while (true) { System.out.println("Enter your query (or type 'exit' to quit):"); // Wait for the user to input a query String query = scanner.nextLine(); // Check if the user wants to exit the program if ("exit".equalsIgnoreCase(query)) { System.out.println("Exiting program."); break; } // Who Pays For Warranty Repairs? // What is the warranty period? // What is the warranty period for the powertrain? // What is the warranty period for the powertrain? // Process the query and get an answer Embedding queryEmbedding = embeddingModel.embed(query).content(); List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(queryEmbedding,5 ); System.out.println("Start --------- Matching Context from Document: 2025_US_F150_Warranty_Guide_ENG_V1.pdf"); List<String> answers = new ArrayList<>(); for (EmbeddingMatch<TextSegment> match : relevant) { System.out.println(match.score()); answers.add(match.embedded().text()); System.out.println(ANSI_GREEN+match.embedded().text()+ANSI_RESET); System.out.println(""); } System.out.println("End --------- Matching Context from Document: 2025_US_F150_Warranty_Guide_ENG_V1.pdf"); if(!answers.isEmpty()){ try { System.out.println(ANSI_YELLOW+ RestClient.getAnswer(query, answers) + ANSI_RESET); } catch (IOException e) { e.printStackTrace(); } } } // Close the scanner scanner.close(); // In-memory embedding store can be serialized and deserialized to/from JSON // String serializedStore = ((InMemoryEmbeddingStore)embeddingStore).serializeToJson(); // System.out.println(serializedStore); // InMemoryEmbeddingStore<TextSegment> deserializedStore = InMemoryEmbeddingStore.fromJson(serializedStore); // In-memory embedding store can be serialized and deserialized to/from file // String filePath = "/home/me/embedding.store"; // embeddingStore.serializeToFile(filePath); // InMemoryEmbeddingStore<TextSegment> deserializedStore = InMemoryEmbeddingStore.fromFile(filePath); } private static Path toPath(String fileName) { try { // Corrected path assuming files are in src/main/resources/example-files URL fileUrl = ElasticSearchEmbeddingManualLoader.class.getClassLoader().getResource( fileName); if (fileUrl == null) { throw new RuntimeException("Resource not found: " + fileName); } return Paths.get(fileUrl.toURI()); } catch (URISyntaxException e) { throw new RuntimeException("Failed to resolve URI for: " + fileName, e); } } }
[ "dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder", "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder" ]
[((1308, 1514), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1308, 1489), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1308, 1457), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1308, 1397), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1634, 1858), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1634, 1833), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1634, 1785), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1634, 1737), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')]
import dev.langchain4j.chain.ConversationalRetrievalChain; import dev.langchain4j.data.document.Document; import dev.langchain4j.data.document.parser.TextDocumentParser; import dev.langchain4j.data.document.splitter.DocumentSplitters; import dev.langchain4j.data.image.Image; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.model.image.ImageModel; import dev.langchain4j.model.input.PromptTemplate; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.model.openai.OpenAiImageModel; import dev.langchain4j.model.output.Response; import dev.langchain4j.retriever.EmbeddingStoreRetriever; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.EmbeddingStoreIngestor; import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore; import java.net.URISyntaxException; import java.nio.file.Paths; import java.util.HashMap; import java.util.Map; import java.util.Objects; import static dev.ai4j.openai4j.image.ImageModel.DALL_E_QUALITY_HD; import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument; public class OpenAiImageModelExamples { static class Simple_Prompt { public static void main(String[] args) { ImageModel model = OpenAiImageModel.withApiKey(System.getenv("OPENAI_API_KEY")); Response<Image> response = model.generate("Donald Duck in New York, cartoon style"); System.out.println(response.content().url()); // Donald Duck is here :) } } static class Draw_Story_From_My_Document { public static void main(String[] args) throws URISyntaxException { ImageModel model = OpenAiImageModel.builder() .apiKey(System.getenv("OPENAI_API_KEY")) .quality(DALL_E_QUALITY_HD) .logRequests(true) .logResponses(true) .withPersisting() .build(); EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>(); EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor .builder() .documentSplitter(DocumentSplitters.recursive(1000, 0)) .embeddingModel(embeddingModel) .embeddingStore(embeddingStore) .build(); Document document = loadDocument( Paths.get( Objects .requireNonNull( OpenAiImageModelExamples.class.getResource("example-files/story-about-happy-carrot.txt") ) .toURI() ), new TextDocumentParser() ); ingestor.ingest(document); ConversationalRetrievalChain chain = ConversationalRetrievalChain .builder() .chatLanguageModel(OpenAiChatModel.builder().apiKey(System.getenv("OPENAI_API_KEY")).build()) .retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel)) .build(); PromptTemplate drawPromptTemplate = PromptTemplate.from( "Draw {{object}}. Base the picture on following information:\n\n{{information}}" ); Map<String, Object> variables = new HashMap<>(); variables.put("information", chain.execute("Who is Charlie?")); variables.put("object", "Ultra realistic Charlie on the party, cinematic lighting"); Response<Image> response = model.generate(drawPromptTemplate.apply(variables).text()); System.out.println(response.content().url()); // Enjoy your locally stored picture of Charlie on the party :) } } }
[ "dev.langchain4j.model.openai.OpenAiChatModel.builder", "dev.langchain4j.model.openai.OpenAiImageModel.builder" ]
[((1819, 2100), 'dev.langchain4j.model.openai.OpenAiImageModel.builder'), ((1819, 2071), 'dev.langchain4j.model.openai.OpenAiImageModel.builder'), ((1819, 2033), 'dev.langchain4j.model.openai.OpenAiImageModel.builder'), ((1819, 1993), 'dev.langchain4j.model.openai.OpenAiImageModel.builder'), ((1819, 1954), 'dev.langchain4j.model.openai.OpenAiImageModel.builder'), ((1819, 1906), 'dev.langchain4j.model.openai.OpenAiImageModel.builder'), ((3236, 3309), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3236, 3301), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')]
import dev.langchain4j.chain.ConversationalRetrievalChain; import dev.langchain4j.data.document.Document; import dev.langchain4j.data.document.DocumentParser; import dev.langchain4j.data.document.DocumentSplitter; import dev.langchain4j.data.document.loader.FileSystemDocumentLoader; import dev.langchain4j.data.document.parser.TextDocumentParser; import dev.langchain4j.data.document.splitter.DocumentSplitters; import dev.langchain4j.data.embedding.Embedding; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.memory.chat.MessageWindowChatMemory; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.cohere.CohereScoringModel; import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.model.scoring.ScoringModel; import dev.langchain4j.rag.DefaultRetrievalAugmentor; import dev.langchain4j.rag.RetrievalAugmentor; import dev.langchain4j.rag.content.aggregator.ContentAggregator; import dev.langchain4j.rag.content.aggregator.ReRankingContentAggregator; import dev.langchain4j.rag.content.retriever.ContentRetriever; import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever; import dev.langchain4j.service.AiServices; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.util.Scanner; public class _04_Advanced_RAG_with_ReRanking { /** * Please refer to previous examples for basic context. * <p> * Advanced RAG in LangChain4j is described here: https://github.com/langchain4j/langchain4j/pull/538 * <p> * This example illustrates the implementation of a more advanced RAG application * using a technique known as "re-ranking". * <p> * Frequently, not all results retrieved by {@link ContentRetriever} are truly relevant to the user query. * This is because, during the initial retrieval stage, it is often preferable to use faster * and more cost-effective models, particularly when dealing with a large volume of data. * The trade-off is that the retrieval quality may be lower. * Providing irrelevant information to the LLM can be costly and, in the worst case, lead to hallucinations. * Therefore, in the second stage, we can perform re-ranking of the results obtained in the first stage * and eliminate irrelevant results using a more advanced model (e.g., Cohere Rerank). * <p> * We will continue using {@link AiServices} for this example, * but the same principles apply to {@link ConversationalRetrievalChain}, or you can develop your custom RAG flow. */ public static void main(String[] args) { CustomerSupportAgent agent = createCustomerSupportAgent(); // First, say "Hi". Observe how all segments retrieved in the first stage were filtered out. // Then, ask "Can I cancel my reservation?" and observe how all but one segment were filtered out. try (Scanner scanner = new Scanner(System.in)) { while (true) { System.out.println("=================================================="); System.out.print("User: "); String userQuery = scanner.nextLine(); System.out.println("=================================================="); if ("exit".equalsIgnoreCase(userQuery)) { break; } String agentAnswer = agent.answer(userQuery); System.out.println("=================================================="); System.out.println("Agent: " + agentAnswer); } } } private static CustomerSupportAgent createCustomerSupportAgent() { // Check _01_Naive_RAG if you need more details on what is going on here ChatLanguageModel chatModel = OpenAiChatModel.builder() .apiKey("demo") .modelName("gpt-3.5-turbo") .build(); EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); Path documentPath = toPath("miles-of-smiles-terms-of-use.txt"); EmbeddingStore<TextSegment> embeddingStore = embed(documentPath, embeddingModel); ContentRetriever contentRetriever = EmbeddingStoreContentRetriever.builder() .embeddingStore(embeddingStore) .embeddingModel(embeddingModel) .maxResults(5) // let's get more results .build(); // To register and get a free API key for Cohere, please visit the following link: // https://dashboard.cohere.com/welcome/register ScoringModel scoringModel = CohereScoringModel.withApiKey(System.getenv("COHERE_API_KEY")); ContentAggregator contentAggregator = ReRankingContentAggregator.builder() .scoringModel(scoringModel) .minScore(0.8) // we want to present the LLM with only the truly relevant segments for the user's query .build(); RetrievalAugmentor retrievalAugmentor = DefaultRetrievalAugmentor.builder() .contentRetriever(contentRetriever) .contentAggregator(contentAggregator) .build(); return AiServices.builder(CustomerSupportAgent.class) .chatLanguageModel(chatModel) .retrievalAugmentor(retrievalAugmentor) .chatMemory(MessageWindowChatMemory.withMaxMessages(10)) .build(); } private static EmbeddingStore<TextSegment> embed(Path documentPath, EmbeddingModel embeddingModel) { DocumentParser documentParser = new TextDocumentParser(); Document document = FileSystemDocumentLoader.loadDocument(documentPath, documentParser); DocumentSplitter splitter = DocumentSplitters.recursive(300, 0); List<TextSegment> segments = splitter.split(document); List<Embedding> embeddings = embeddingModel.embedAll(segments).content(); EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>(); embeddingStore.addAll(embeddings, segments); return embeddingStore; } interface CustomerSupportAgent { String answer(String query); } private static Path toPath(String fileName) { try { URL fileUrl = _04_Advanced_RAG_with_ReRanking.class.getResource(fileName); return Paths.get(fileUrl.toURI()); } catch (URISyntaxException e) { throw new RuntimeException(e); } } }
[ "dev.langchain4j.service.AiServices.builder", "dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder", "dev.langchain4j.rag.DefaultRetrievalAugmentor.builder", "dev.langchain4j.rag.content.aggregator.ReRankingContentAggregator.builder", "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((4121, 4247), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4121, 4222), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4121, 4178), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4533, 4751), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((4533, 4700), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((4533, 4669), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((4533, 4621), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((5049, 5275), 'dev.langchain4j.rag.content.aggregator.ReRankingContentAggregator.builder'), ((5049, 5160), 'dev.langchain4j.rag.content.aggregator.ReRankingContentAggregator.builder'), ((5049, 5129), 'dev.langchain4j.rag.content.aggregator.ReRankingContentAggregator.builder'), ((5326, 5492), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((5326, 5467), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((5326, 5413), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((5510, 5756), 'dev.langchain4j.service.AiServices.builder'), ((5510, 5731), 'dev.langchain4j.service.AiServices.builder'), ((5510, 5658), 'dev.langchain4j.service.AiServices.builder'), ((5510, 5602), 'dev.langchain4j.service.AiServices.builder')]
package me.egaetan.xpchat; import static dev.langchain4j.data.message.ChatMessageDeserializer.messagesFromJson; import static dev.langchain4j.data.message.ChatMessageSerializer.messagesToJson; import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO; import static java.util.stream.Collectors.joining; import static org.mapdb.Serializer.INTEGER; import static org.mapdb.Serializer.STRING; import java.io.IOException; import java.io.InputStream; import java.net.URI; import java.net.http.HttpClient; import java.net.http.HttpRequest; import java.net.http.HttpRequest.BodyPublishers; import java.net.http.HttpResponse; import java.net.http.HttpResponse.BodyHandlers; import java.time.Duration; import java.util.Collections; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ExecutorService; import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; import org.eclipse.jetty.server.session.SessionHandler; import org.jetbrains.annotations.NotNull; import org.mapdb.DB; import org.mapdb.DBMaker; import org.testcontainers.containers.Container.ExecResult; import org.testcontainers.containers.GenericContainer; import org.testcontainers.containers.output.OutputFrame.OutputType; import org.testcontainers.containers.wait.strategy.DockerHealthcheckWaitStrategy; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.github.dockerjava.api.model.DeviceRequest; import dev.langchain4j.data.document.Document; import dev.langchain4j.data.document.DocumentLoader; import dev.langchain4j.data.document.DocumentSource; import dev.langchain4j.data.document.DocumentSplitter; import dev.langchain4j.data.document.Metadata; import dev.langchain4j.data.document.parser.apache.pdfbox.ApachePdfBoxDocumentParser; import dev.langchain4j.data.document.splitter.DocumentSplitters; import dev.langchain4j.data.embedding.Embedding; import dev.langchain4j.data.message.ChatMessage; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.memory.chat.ChatMemoryProvider; import dev.langchain4j.memory.chat.MessageWindowChatMemory; import dev.langchain4j.model.chat.StreamingChatLanguageModel; import dev.langchain4j.model.embedding.E5SmallV2EmbeddingModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.model.input.Prompt; import dev.langchain4j.model.input.PromptTemplate; import dev.langchain4j.model.ollama.OllamaStreamingChatModel; import dev.langchain4j.model.openai.OpenAiTokenizer; import dev.langchain4j.service.AiServices; import dev.langchain4j.service.MemoryId; import dev.langchain4j.service.SystemMessage; import dev.langchain4j.service.TokenStream; import dev.langchain4j.service.UserMessage; import dev.langchain4j.store.embedding.EmbeddingMatch; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore; import dev.langchain4j.store.memory.chat.ChatMemoryStore; import io.javalin.Javalin; import io.javalin.community.ssl.SslPlugin; import io.javalin.http.Context; import io.javalin.http.UploadedFile; import io.javalin.http.staticfiles.Location; import io.javalin.websocket.WsConnectContext; public class MyCM { private static final String WHISPER_MODEL = "whisper"; private static final String STABLEDIFFUSION_URL_TXT2IMG = "http://127.0.0.1:7860/sdapi/v1/txt2img"; private static final String PATH_TO_CERT = "C:\\Certbot\\live\\undefined.egaetan.me\\"; static String OLLAMA_MODEL_NAME = "gemma"; static String OLLAMA_DOCKER_IMAGE_NAME = "ollama/ollama"; static Integer OLLAMA_PORT = 11434; static String DOCKER_LOCALAI_IMAGE_NAME = "localai/localai:v2.9.0-ffmpeg-core"; static Integer LOCALAI_PORT = 8080; static GenericContainer<?> localai = new GenericContainer<>(DOCKER_LOCALAI_IMAGE_NAME) .withFileSystemBind("whisperModels", "/build/models") .withCommand("whisper-base") .withExposedPorts(8080); static GenericContainer<?> ollama = new GenericContainer<>(OLLAMA_DOCKER_IMAGE_NAME) .withCreateContainerCmdModifier(cmd -> { cmd .getHostConfig() .withDeviceRequests( Collections.singletonList( new DeviceRequest() .withCapabilities(Collections.singletonList(Collections.singletonList("gpu"))) .withCount(-1) ) ); }) .withFileSystemBind("ollama", "/root/.ollama") .withExposedPorts(OLLAMA_PORT); public static class Message { public String message; public Message() { } public Message(String message) { super(); this.message = message; } } public static class MessageService { public String service; public MessageService() { } public MessageService(String message) { super(); this.service = message; } } public static class MessageStop { public String stop; public MessageStop() { } public MessageStop(String message) { super(); this.stop = message; } } public static class Whisper { public String text; } interface Assistant { @SystemMessage("You are a helpful french assistant. Répond uniquement en français, ne parle jamais en anglais. Sois précis et juste dans toutes tes réponses") TokenStream chat(@MemoryId String id, @UserMessage String userMessage); } public static void main(String[] args) throws UnsupportedOperationException, IOException, InterruptedException { ollama.start(); ollama.followOutput(x -> System.out.println("OLLAMA>>"+x.getUtf8StringWithoutLineEnding()), OutputType.STDOUT); ollama.followOutput(x -> System.err.println("OLLAMA>>"+x.getUtf8StringWithoutLineEnding()), OutputType.STDERR); ollama.waitingFor(new DockerHealthcheckWaitStrategy()); localai.setCommand("whisper-base"); localai.start(); localai.followOutput(x -> System.out.println("LOCALAI"+x.getUtf8StringWithoutLineEnding()), OutputType.STDOUT); localai.followOutput(x -> System.err.println("LOCALAI"+x.getUtf8StringWithoutLineEnding()), OutputType.STDERR); localai.waitingFor(new DockerHealthcheckWaitStrategy()); System.out.println("Run Ollama"); ExecResult execInContainer = ollama.execInContainer("ollama", "run", "gemma:7b"); System.err.println(execInContainer.getStderr()); System.out.println(execInContainer.getStdout()); System.out.println("Create LanguageModels"); StreamingChatLanguageModel modelStreaming = OllamaStreamingChatModel.builder() .baseUrl(String.format("http://%s:%d", ollama.getHost(), ollama.getMappedPort(OLLAMA_PORT))) .timeout(Duration.ofMinutes(2)) .modelName("gemma:7b") .numPredict(8192) .temperature(0.0).build(); PersistentChatMemoryStore store = new PersistentChatMemoryStore(); DocumentSplitter splitter = DocumentSplitters.recursive(300, 50, new OpenAiTokenizer(GPT_3_5_TURBO)); EmbeddingModel embeddingModel = new E5SmallV2EmbeddingModel(); Map<String, EmbeddingStore<TextSegment>> embeddingStore = new ConcurrentHashMap<>(); ChatMemoryProvider chatMemoryProvider = memoryId -> MessageWindowChatMemory.builder().id(memoryId) .maxMessages(20) .chatMemoryStore(store) .build(); Assistant assistant = AiServices.builder(Assistant.class) .streamingChatLanguageModel(modelStreaming) .chatMemoryProvider(chatMemoryProvider) .build(); SslPlugin plugin = new SslPlugin(conf -> { conf.pemFromPath(PATH_TO_CERT + "cert.pem", PATH_TO_CERT + "privkey.pem"); conf.http2 = false; }); Javalin app = Javalin.create(config -> { config.staticFiles.add("src/main/resources/public", Location.EXTERNAL); config.jetty.modifyServletContextHandler(handler -> handler.setSessionHandler(new SessionHandler())); config.registerPlugin(plugin); }) ; app.before(ctx -> { ctx.req().getSession(true); }); Map<String, WsConnectContext> rsp = new ConcurrentHashMap<>(); ExecutorService executor = Executors.newFixedThreadPool(2); app.post("/api/chat2Img", ctx -> { Message msg = ctx.bodyAsClass(Message.class); String sessionId = ctx.req().getSession().getId(); draw(ctx, msg, sessionId); }); app.post("/api/speech", ctx -> { UploadedFile uploadedFile = ctx.uploadedFile("file"); MultiPartBodyPublisher publisher = new MultiPartBodyPublisher() .addPart("model", WHISPER_MODEL) .addPart("file", () -> uploadedFile.content(), "speech", "application/octet-stream"); HttpClient client = HttpClient.newHttpClient(); HttpRequest request = HttpRequest.newBuilder() .uri(URI.create("http://localhost:"+localai.getMappedPort(LOCALAI_PORT)+"/v1/audio/transcriptions")) .header("Content-Type", "multipart/form-data; boundary=" + publisher.getBoundary()) .timeout(Duration.ofMinutes(1)) .POST(publisher.build()) .build(); HttpResponse<String> response = client.send(request, BodyHandlers.ofString()); System.out.println(response.statusCode()); System.out.println(response.body()); ObjectMapper mapperWhisper = new ObjectMapper(); mapperWhisper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false); Whisper value = mapperWhisper.readValue(response.body(), Whisper.class); Message msg = new Message(value.text); String sessionId = ctx.req().getSession().getId(); System.out.println("SessionId : " + sessionId); generateChat(embeddingModel, embeddingStore, assistant, rsp, executor, msg, sessionId); ctx.json(msg); }); app.post("/api/chat", ctx -> { Message msg = ctx.bodyAsClass(Message.class); String sessionId = ctx.req().getSession().getId(); System.out.println("SessionId : " + sessionId); generateChat(embeddingModel, embeddingStore, assistant, rsp, executor, msg, sessionId); System.out.println(msg.message); }); app.post("/api/upload", ctx -> { String sessionId = ctx.req().getSession().getId(); System.out.println("Upload"); UploadedFile uploadedFile = ctx.uploadedFile("document"); InputStream content = uploadedFile.content(); Document document = DocumentLoader.load(new DocumentSource() { @Override public Metadata metadata() { return new Metadata(); } @Override public InputStream inputStream() throws IOException { return content; } }, new ApachePdfBoxDocumentParser()); List<TextSegment> segments = splitter.split(document); List<Embedding> embeddings = embeddingModel.embedAll(segments).content(); embeddingStore.computeIfAbsent(sessionId, __ -> new InMemoryEmbeddingStore<>()).addAll(embeddings, segments); System.out.println("OK -pdf"); }); app.ws("/api/canal", ctx -> { ctx.onConnect(r -> { String sessionId = r.getUpgradeCtx$javalin().req().getSession().getId(); System.out.println("Session " + sessionId); rsp.put(sessionId, r); r.sendPing(); }); ctx.onClose(r -> { String sessionId = r.getUpgradeCtx$javalin().req().getSession().getId(); System.out.println("Delete Session " + sessionId); store.deleteMessages(sessionId); embeddingStore.remove(sessionId); rsp.remove(sessionId); }); }); Executors.newSingleThreadScheduledExecutor().scheduleAtFixedRate(() -> { for (var x : rsp.values()) { x.sendPing(); } }, 1, 1, TimeUnit.SECONDS); app.start(7070); } private static void draw(@NotNull Context ctx, Message msg, String sessionId) throws IOException, InterruptedException { System.out.println("Chat2img : " + msg.message); System.out.println("SessionId : " + sessionId); HttpClient client = HttpClient.newHttpClient(); String body = """ { "prompt": "$$$PROMPT$$$", "negative_prompt" : "ugly, bad quality", "steps": 20, "cfg_scale": 5, "sampler_name": "DPM++ 3M SDE Karras", "width": 512, "height": 512, "override_settings": { "sd_model_checkpoint": "icbinpICantBelieveIts_newYear", "CLIP_stop_at_last_layers": 2 }, "extra_generation_params": {"ADetailer model": "face_yolov8n.pt", "ADetailer confidence": 0.3, "ADetailer dilate erode": 4, "ADetailer mask blur": 4, "ADetailer denoising strength": 0.4, "ADetailer inpaint only masked": true, "ADetailer inpaint padding": 32, "ADetailer version": "24.1.2", "Denoising strength": 0.4, "Mask blur": 4, "Inpaint area": "Only masked", "Masked area padding": 32} } """; HttpRequest req = HttpRequest.newBuilder() .uri(URI.create(STABLEDIFFUSION_URL_TXT2IMG)) .POST(BodyPublishers.ofString(body.replace("$$$PROMPT$$$", msg.message))) .build(); HttpResponse<String> reponse = client.send(req, BodyHandlers.ofString()); System.out.println("Done"); ctx.result(reponse.body()); } private static void generateChat(EmbeddingModel embeddingModel, Map<String, EmbeddingStore<TextSegment>> embeddingStore, Assistant assistant, Map<String, WsConnectContext> rsp, ExecutorService executor, Message msg, String sessionId) { System.out.println(">>>" + msg.message); EmbeddingStore<TextSegment> embeddings = embeddingStore.get(sessionId); if (embeddings == null) { executor.execute(() -> speak(assistant, rsp, msg, sessionId)); } else { Embedding questionEmbedding = embeddingModel.embed(msg.message).content(); int maxResults = 10; double minScore = 0.7; List<EmbeddingMatch<TextSegment>> relevantEmbeddings = embeddings.findRelevant(questionEmbedding, maxResults, minScore); PromptTemplate promptTemplate = PromptTemplate .from("Répond à la question suivante avec la plus grande précisions:\n" + "\n" + "Question:\n" + "{{question}}\n" + "\n" + "En te basant sur les informations suivantes:\n" + "{{information}}"); String information = relevantEmbeddings.stream().map(match -> match.embedded().text()) .collect(joining("\n\n")); System.out.println("Embeddings:" + information.length() +"\n------------------\n"); Map<String, Object> variables = new HashMap<>(); variables.put("question", msg.message); variables.put("information", information); Prompt prompt = promptTemplate.apply(variables); executor.execute(() -> speak(assistant, rsp, new Message(prompt.text()), sessionId)); } } private static void speak(Assistant assistant, Map<String, WsConnectContext> rsp, Message msg, String sessionId) { TokenStream tokenStream = assistant.chat(sessionId, msg.message); AtomicBoolean receive = new AtomicBoolean(false); tokenStream.onNext(t -> { WsConnectContext x = rsp.get(sessionId); if (x == null) { System.out.println("No session"); tokenStream.onNext(__ -> {}); return; } try { x.send(new ObjectMapper().writeValueAsString(new Message(t))); } catch (JsonProcessingException e) { e.printStackTrace(); } if (!receive.getAndSet(true)) { System.out.println("Début de la réponse"); } }) .onComplete(t -> { WsConnectContext x = rsp.get(sessionId); if (x == null) { return; } try { x.send(new ObjectMapper().writeValueAsString(new MessageService(t.content().text()))); } catch (JsonProcessingException e) { e.printStackTrace(); } System.out.println(t); }) .onError(t-> { WsConnectContext x = rsp.get(sessionId); if (x == null) { return; } try { x.send(new ObjectMapper().writeValueAsString(new MessageStop("ERROR"))); } catch (JsonProcessingException e) { e.printStackTrace(); } System.err.println(t); }) .start(); } static class PersistentChatMemoryStore implements ChatMemoryStore { private final DB db = DBMaker.fileDB("multi-user-chat-memory.db").transactionEnable().make(); private final Map<Integer, String> map = db.hashMap("messages", INTEGER, STRING).createOrOpen(); public PersistentChatMemoryStore() { map.clear(); } @Override public List<ChatMessage> getMessages(Object memoryId) { String json = map.get((int) memoryId.hashCode()); return messagesFromJson(json); } @Override public void updateMessages(Object memoryId, List<ChatMessage> messages) { String json = messagesToJson(messages); map.put((int) memoryId.hashCode(), json); db.commit(); } @Override public void deleteMessages(Object memoryId) { map.remove((int) memoryId.hashCode()); db.commit(); } } }
[ "dev.langchain4j.service.AiServices.builder", "dev.langchain4j.memory.chat.MessageWindowChatMemory.builder", "dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder" ]
[((6543, 6789), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((6543, 6781), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((6543, 6759), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((6543, 6737), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((6543, 6710), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((6543, 6674), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((7173, 7281), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((7173, 7268), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((7173, 7240), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((7173, 7219), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((7308, 7448), 'dev.langchain4j.service.AiServices.builder'), ((7308, 7435), 'dev.langchain4j.service.AiServices.builder'), ((7308, 7391), 'dev.langchain4j.service.AiServices.builder'), ((8608, 8908), 'java.net.http.HttpRequest.newBuilder'), ((8608, 8894), 'java.net.http.HttpRequest.newBuilder'), ((8608, 8864), 'java.net.http.HttpRequest.newBuilder'), ((8608, 8827), 'java.net.http.HttpRequest.newBuilder'), ((8608, 8738), 'java.net.http.HttpRequest.newBuilder'), ((11199, 11355), 'java.util.concurrent.Executors.newSingleThreadScheduledExecutor'), ((12436, 12601), 'java.net.http.HttpRequest.newBuilder'), ((12436, 12588), 'java.net.http.HttpRequest.newBuilder'), ((12436, 12510), 'java.net.http.HttpRequest.newBuilder'), ((15598, 15668), 'org.mapdb.DBMaker.fileDB'), ((15598, 15661), 'org.mapdb.DBMaker.fileDB')]
import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.openai.OpenAiChatModel; import static java.time.Duration.ofSeconds; public class _01_ModelParameters { public static void main(String[] args) { // OpenAI parameters are explained here: https://platform.openai.com/docs/api-reference/chat/create ChatLanguageModel model = OpenAiChatModel.builder() .apiKey(ApiKeys.OPENAI_API_KEY) .modelName("gpt-3.5-turbo") .temperature(0.3) .timeout(ofSeconds(60)) .logRequests(true) .logResponses(true) .build(); String prompt = "Explain in three lines how to make a beautiful painting"; String response = model.generate(prompt); System.out.println(response); } }
[ "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((377, 664), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((377, 639), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((377, 603), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((377, 568), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((377, 528), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((377, 494), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((377, 450), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')]
package dev.example; import static dev.langchain4j.data.document.UrlDocumentLoader.load; import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO; import java.io.IOException; import java.net.URL; import jakarta.enterprise.context.ApplicationScoped; import dev.langchain4j.data.document.Document; import dev.langchain4j.data.document.DocumentSplitter; import dev.langchain4j.data.document.splitter.DocumentSplitters; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.model.openai.OpenAiTokenizer; import dev.langchain4j.retriever.EmbeddingStoreRetriever; import dev.langchain4j.retriever.Retriever; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.EmbeddingStoreIngestor; import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore; public class Beans { @ApplicationScoped Retriever<TextSegment> retriever(EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel) { // You will need to adjust these parameters to find the optimal setting, which will depend on two main factors: // - The nature of your data // - The embedding model you are using int maxResultsRetrieved = 1; double minScore = 0.6; return EmbeddingStoreRetriever.from(embeddingStore, embeddingModel, maxResultsRetrieved, minScore); } @ApplicationScoped EmbeddingStore<TextSegment> embeddingStore(EmbeddingModel embeddingModel) throws IOException { // Normally, you would already have your embedding store filled with your data. // However, for the purpose of this demonstration, we will: // 1. Create an in-memory embedding store EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>(); // 2. Load an example document ("Miles of Smiles" terms of use) String documentName = "miles-of-smiles-terms-of-use.txt"; URL resource = Thread.currentThread().getContextClassLoader().getResource(documentName); if (resource == null) { throw new IllegalStateException("Unable to locate document: '" + documentName + "' on the classpath"); } Document document = load(resource); // 3. Split the document into segments 100 tokens each // 4. Convert segments into embeddings // 5. Store embeddings into embedding store // All this can be done manually, but we will use EmbeddingStoreIngestor to automate this: DocumentSplitter documentSplitter = DocumentSplitters.recursive(100, 0, new OpenAiTokenizer(GPT_3_5_TURBO)); EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() .documentSplitter(documentSplitter) .embeddingModel(embeddingModel) .embeddingStore(embeddingStore) .build(); ingestor.ingest(document); return embeddingStore; } }
[ "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder" ]
[((2707, 2912), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2707, 2887), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2707, 2839), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2707, 2791), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')]
package com.kchandrakant; import dev.langchain4j.memory.ChatMemory; import dev.langchain4j.memory.chat.MessageWindowChatMemory; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.service.AiServices; public class SimpleService { interface Assistant { String chat(String message); } public static void main(String[] args) { ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10); Assistant assistant = AiServices.builder(Assistant.class) .chatLanguageModel(OpenAiChatModel.withApiKey(ApiKeys.OPENAI_API_KEY)) .chatMemory(chatMemory) .build(); String answer = assistant.chat("Hello! My name is Klaus."); System.out.println(answer); // Hello Klaus! How can I assist you today? String answerWithName = assistant.chat("What is my name?"); System.out.println(answerWithName); // Your name is Klaus. } }
[ "dev.langchain4j.service.AiServices.builder" ]
[((481, 668), 'dev.langchain4j.service.AiServices.builder'), ((481, 643), 'dev.langchain4j.service.AiServices.builder'), ((481, 603), 'dev.langchain4j.service.AiServices.builder')]
package com.redhat; import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive; import dev.langchain4j.data.document.Document; import dev.langchain4j.data.document.loader.FileSystemDocumentLoader; import dev.langchain4j.data.document.parser.TextDocumentParser; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.store.embedding.EmbeddingStoreIngestor; import io.quarkiverse.langchain4j.redis.RedisEmbeddingStore; import io.quarkus.runtime.StartupEvent; import jakarta.enterprise.context.ApplicationScoped; import jakarta.enterprise.event.Observes; import jakarta.inject.Inject; import java.io.File; import java.util.List; @ApplicationScoped public class IngestorExample { /** * The embedding store (the database). * The bean is provided by the quarkus-langchain4j-redis extension. */ @Inject RedisEmbeddingStore store; /** * The embedding model (how the vector of a document is computed). * The bean is provided by the LLM (like openai) extension. */ @Inject EmbeddingModel embeddingModel; public void ingest(@Observes StartupEvent event) { System.out.printf("Ingesting documents...%n"); // List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/bank").toPath(), // List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/museum").toPath(), // List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/halffoods").toPath(), List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/insurance").toPath(), new TextDocumentParser()); var ingestor = EmbeddingStoreIngestor.builder() .embeddingStore(store) .embeddingModel(embeddingModel) .documentSplitter(recursive(500, 0)) .build(); ingestor.ingest(documents); System.out.printf("Ingested %d documents.%n", documents.size()); } }
[ "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder" ]
[((1785, 1982), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1785, 1957), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1785, 1904), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1785, 1856), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')]
import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.input.structured.StructuredPrompt; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.model.output.structured.Description; import dev.langchain4j.service.*; import java.math.BigDecimal; import java.math.BigInteger; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; import java.util.List; import static java.util.Arrays.asList; public class OtherServiceExamples { static ChatLanguageModel chatLanguageModel = OpenAiChatModel.withApiKey(ApiKeys.OPENAI_API_KEY); static class Sentiment_Extracting_AI_Service_Example { enum Sentiment { POSITIVE, NEUTRAL, NEGATIVE; } interface SentimentAnalyzer { @UserMessage("Analyze sentiment of {{it}}") Sentiment analyzeSentimentOf(String text); @UserMessage("Does {{it}} have a positive sentiment?") boolean isPositive(String text); } public static void main(String[] args) { SentimentAnalyzer sentimentAnalyzer = AiServices.create(SentimentAnalyzer.class, chatLanguageModel); Sentiment sentiment = sentimentAnalyzer.analyzeSentimentOf("It is good!"); System.out.println(sentiment); // POSITIVE boolean positive = sentimentAnalyzer.isPositive("It is bad!"); System.out.println(positive); // false } } static class Number_Extracting_AI_Service_Example { interface NumberExtractor { @UserMessage("Extract number from {{it}}") int extractInt(String text); @UserMessage("Extract number from {{it}}") long extractLong(String text); @UserMessage("Extract number from {{it}}") BigInteger extractBigInteger(String text); @UserMessage("Extract number from {{it}}") float extractFloat(String text); @UserMessage("Extract number from {{it}}") double extractDouble(String text); @UserMessage("Extract number from {{it}}") BigDecimal extractBigDecimal(String text); } public static void main(String[] args) { NumberExtractor extractor = AiServices.create(NumberExtractor.class, chatLanguageModel); String text = "After countless millennia of computation, the supercomputer Deep Thought finally announced " + "that the answer to the ultimate question of life, the universe, and everything was forty two."; int intNumber = extractor.extractInt(text); System.out.println(intNumber); // 42 long longNumber = extractor.extractLong(text); System.out.println(longNumber); // 42 BigInteger bigIntegerNumber = extractor.extractBigInteger(text); System.out.println(bigIntegerNumber); // 42 float floatNumber = extractor.extractFloat(text); System.out.println(floatNumber); // 42.0 double doubleNumber = extractor.extractDouble(text); System.out.println(doubleNumber); // 42.0 BigDecimal bigDecimalNumber = extractor.extractBigDecimal(text); System.out.println(bigDecimalNumber); // 42.0 } } static class Date_and_Time_Extracting_AI_Service_Example { interface DateTimeExtractor { @UserMessage("Extract date from {{it}}") LocalDate extractDateFrom(String text); @UserMessage("Extract time from {{it}}") LocalTime extractTimeFrom(String text); @UserMessage("Extract date and time from {{it}}") LocalDateTime extractDateTimeFrom(String text); } public static void main(String[] args) { DateTimeExtractor extractor = AiServices.create(DateTimeExtractor.class, chatLanguageModel); String text = "The tranquility pervaded the evening of 1968, just fifteen minutes shy of midnight," + " following the celebrations of Independence Day."; LocalDate date = extractor.extractDateFrom(text); System.out.println(date); // 1968-07-04 LocalTime time = extractor.extractTimeFrom(text); System.out.println(time); // 23:45 LocalDateTime dateTime = extractor.extractDateTimeFrom(text); System.out.println(dateTime); // 1968-07-04T23:45 } } static class POJO_Extracting_AI_Service_Example { static class Person { private String firstName; private String lastName; private LocalDate birthDate; @Override public String toString() { return "Person {" + " firstName = \"" + firstName + "\"" + ", lastName = \"" + lastName + "\"" + ", birthDate = " + birthDate + " }"; } } interface PersonExtractor { @UserMessage("Extract information about a person from {{it}}") Person extractPersonFrom(String text); } public static void main(String[] args) { ChatLanguageModel chatLanguageModel = OpenAiChatModel.builder() .apiKey(System.getenv("OPENAI_API_KEY")) // When extracting POJOs with the LLM that supports the "json mode" feature // (e.g., OpenAI, Azure OpenAI, Ollama, etc.), it is advisable to use it to get more reliable results. // When using this feature, LLM will be forced to output a valid JSON. // Please note that this feature is not (yet) supported when using "demo" key. .responseFormat("json_object") .build(); PersonExtractor extractor = AiServices.create(PersonExtractor.class, chatLanguageModel); String text = "In 1968, amidst the fading echoes of Independence Day, " + "a child named John arrived under the calm evening sky. " + "This newborn, bearing the surname Doe, marked the start of a new journey."; Person person = extractor.extractPersonFrom(text); System.out.println(person); // Person { firstName = "John", lastName = "Doe", birthDate = 1968-07-04 } } } static class POJO_With_Descriptions_Extracting_AI_Service_Example { static class Recipe { @Description("short title, 3 words maximum") private String title; @Description("short description, 2 sentences maximum") private String description; @Description("each step should be described in 4 words, steps should rhyme") private List<String> steps; private Integer preparationTimeMinutes; @Override public String toString() { return "Recipe {" + " title = \"" + title + "\"" + ", description = \"" + description + "\"" + ", steps = " + steps + ", preparationTimeMinutes = " + preparationTimeMinutes + " }"; } } @StructuredPrompt("Create a recipe of a {{dish}} that can be prepared using only {{ingredients}}") static class CreateRecipePrompt { private String dish; private List<String> ingredients; } interface Chef { Recipe createRecipeFrom(String... ingredients); Recipe createRecipe(CreateRecipePrompt prompt); } public static void main(String[] args) { Chef chef = AiServices.create(Chef.class, chatLanguageModel); Recipe recipe = chef.createRecipeFrom("cucumber", "tomato", "feta", "onion", "olives"); System.out.println(recipe); // Recipe { // title = "Greek Salad", // description = "A refreshing mix of veggies and feta cheese in a zesty dressing.", // steps = [ // "Chop cucumber and tomato", // "Add onion and olives", // "Crumble feta on top", // "Drizzle with dressing and enjoy!" // ], // preparationTimeMinutes = 10 // } CreateRecipePrompt prompt = new CreateRecipePrompt(); prompt.dish = "salad"; prompt.ingredients = asList("cucumber", "tomato", "feta", "onion", "olives"); Recipe anotherRecipe = chef.createRecipe(prompt); System.out.println(anotherRecipe); // Recipe ... } } static class AI_Service_with_System_Message_Example { interface Chef { @SystemMessage("You are a professional chef. You are friendly, polite and concise.") String answer(String question); } public static void main(String[] args) { Chef chef = AiServices.create(Chef.class, chatLanguageModel); String answer = chef.answer("How long should I grill chicken?"); System.out.println(answer); // Grilling chicken usually takes around 10-15 minutes per side, depending on ... } } static class AI_Service_with_System_and_User_Messages_Example { interface TextUtils { @SystemMessage("You are a professional translator into {{language}}") @UserMessage("Translate the following text: {{text}}") String translate(@V("text") String text, @V("language") String language); @SystemMessage("Summarize every message from user in {{n}} bullet points. Provide only bullet points.") List<String> summarize(@UserMessage String text, @V("n") int n); } public static void main(String[] args) { TextUtils utils = AiServices.create(TextUtils.class, chatLanguageModel); String translation = utils.translate("Hello, how are you?", "italian"); System.out.println(translation); // Ciao, come stai? String text = "AI, or artificial intelligence, is a branch of computer science that aims to create " + "machines that mimic human intelligence. This can range from simple tasks such as recognizing " + "patterns or speech to more complex tasks like making decisions or predictions."; List<String> bulletPoints = utils.summarize(text, 3); System.out.println(bulletPoints); // [ // "- AI is a branch of computer science", // "- It aims to create machines that mimic human intelligence", // "- It can perform simple or complex tasks" // ] } } static class AI_Service_with_UserName_Example { interface Assistant { String chat(@UserName String name, @UserMessage String message); } public static void main(String[] args) { Assistant assistant = AiServices.create(Assistant.class, chatLanguageModel); String answer = assistant.chat("Klaus", "Hi, tell me my name if you see it."); System.out.println(answer); // Hello! Your name is Klaus. How can I assist you today? } } }
[ "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((5313, 5888), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5313, 5859), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5313, 5399), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')]
import dev.langchain4j.data.embedding.Embedding; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.store.embedding.EmbeddingMatch; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.neo4j.Neo4jEmbeddingStore; import java.util.List; public class Neo4jEmbeddingStoreExample { /** * To run this example, ensure you have Neo4j running locally, * and change uri, username and password strings consistently. * If not, then: * - Execute "docker pull neo4j:latest" * - Execute "docker run -d -p 7687:7687 --env NEO4J_AUTH=neo4j/password1234 neo4j:latest" * - Wait until Neo4j is ready to serve (may take a few minutes) */ public static void main(String[] args) { String uri = "bolt://localhost:7687"; String username = "neo4j"; String password = "password1234"; EmbeddingStore<TextSegment> embeddingStore = Neo4jEmbeddingStore.builder() .withBasicAuth(uri, username, password) .dimension(384) .build(); EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); TextSegment segment1 = TextSegment.from("I like football."); Embedding embedding1 = embeddingModel.embed(segment1).content(); embeddingStore.add(embedding1, segment1); TextSegment segment2 = TextSegment.from("The weather is good today."); Embedding embedding2 = embeddingModel.embed(segment2).content(); embeddingStore.add(embedding2, segment2); Embedding queryEmbedding = embeddingModel.embed("What is your favourite sport?").content(); List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(queryEmbedding, 1); EmbeddingMatch<TextSegment> embeddingMatch = relevant.get(0); System.out.println(embeddingMatch.score()); // 0.8144289255142212 System.out.println(embeddingMatch.embedded().text()); // I like football. } }
[ "dev.langchain4j.store.embedding.neo4j.Neo4jEmbeddingStore.builder" ]
[((1067, 1209), 'dev.langchain4j.store.embedding.neo4j.Neo4jEmbeddingStore.builder'), ((1067, 1184), 'dev.langchain4j.store.embedding.neo4j.Neo4jEmbeddingStore.builder'), ((1067, 1152), 'dev.langchain4j.store.embedding.neo4j.Neo4jEmbeddingStore.builder')]
package dev.langchain4j; import static dev.langchain4j.ModelProvider.OPEN_AI; import static dev.langchain4j.exception.IllegalConfigurationException.illegalConfiguration; import static dev.langchain4j.internal.Utils.isNullOrBlank; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.model.embedding.S2OnnxEmbeddingModel; import dev.langchain4j.model.embedding.BgeSmallZhEmbeddingModel; import dev.langchain4j.model.huggingface.HuggingFaceChatModel; import dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel; import dev.langchain4j.model.huggingface.HuggingFaceLanguageModel; import dev.langchain4j.model.language.LanguageModel; import dev.langchain4j.model.localai.LocalAiChatModel; import dev.langchain4j.model.localai.LocalAiEmbeddingModel; import dev.langchain4j.model.localai.LocalAiLanguageModel; import dev.langchain4j.model.moderation.ModerationModel; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.model.openai.OpenAiEmbeddingModel; import dev.langchain4j.model.openai.OpenAiLanguageModel; import dev.langchain4j.model.openai.OpenAiModerationModel; import java.util.Objects; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean; import org.springframework.boot.context.properties.EnableConfigurationProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.context.annotation.Lazy; import org.springframework.context.annotation.Primary; @Configuration @EnableConfigurationProperties(S2LangChain4jProperties.class) public class S2LangChain4jAutoConfiguration { @Autowired private S2LangChain4jProperties properties; @Bean @Lazy @ConditionalOnMissingBean ChatLanguageModel chatLanguageModel(S2LangChain4jProperties properties) { if (properties.getChatModel() == null) { throw illegalConfiguration("\n\nPlease define 'langchain4j.chat-model' properties, for example:\n" + "langchain4j.chat-model.provider = openai\n" + "langchain4j.chat-model.openai.api-key = sk-...\n"); } switch (properties.getChatModel().getProvider()) { case OPEN_AI: OpenAi openAi = properties.getChatModel().getOpenAi(); if (openAi == null || isNullOrBlank(openAi.getApiKey())) { throw illegalConfiguration("\n\nPlease define 'langchain4j.chat-model.openai.api-key' property"); } return OpenAiChatModel.builder() .baseUrl(openAi.getBaseUrl()) .apiKey(openAi.getApiKey()) .modelName(openAi.getModelName()) .temperature(openAi.getTemperature()) .topP(openAi.getTopP()) .maxTokens(openAi.getMaxTokens()) .presencePenalty(openAi.getPresencePenalty()) .frequencyPenalty(openAi.getFrequencyPenalty()) .timeout(openAi.getTimeout()) .maxRetries(openAi.getMaxRetries()) .logRequests(openAi.getLogRequests()) .logResponses(openAi.getLogResponses()) .build(); case HUGGING_FACE: HuggingFace huggingFace = properties.getChatModel().getHuggingFace(); if (huggingFace == null || isNullOrBlank(huggingFace.getAccessToken())) { throw illegalConfiguration( "\n\nPlease define 'langchain4j.chat-model.huggingface.access-token' property"); } return HuggingFaceChatModel.builder() .accessToken(huggingFace.getAccessToken()) .modelId(huggingFace.getModelId()) .timeout(huggingFace.getTimeout()) .temperature(huggingFace.getTemperature()) .maxNewTokens(huggingFace.getMaxNewTokens()) .returnFullText(huggingFace.getReturnFullText()) .waitForModel(huggingFace.getWaitForModel()) .build(); case LOCAL_AI: LocalAi localAi = properties.getChatModel().getLocalAi(); if (localAi == null || isNullOrBlank(localAi.getBaseUrl())) { throw illegalConfiguration("\n\nPlease define 'langchain4j.chat-model.localai.base-url' property"); } if (isNullOrBlank(localAi.getModelName())) { throw illegalConfiguration( "\n\nPlease define 'langchain4j.chat-model.localai.model-name' property"); } return LocalAiChatModel.builder() .baseUrl(localAi.getBaseUrl()) .modelName(localAi.getModelName()) .temperature(localAi.getTemperature()) .topP(localAi.getTopP()) .maxTokens(localAi.getMaxTokens()) .timeout(localAi.getTimeout()) .maxRetries(localAi.getMaxRetries()) .logRequests(localAi.getLogRequests()) .logResponses(localAi.getLogResponses()) .build(); default: throw illegalConfiguration("Unsupported chat model provider: %s", properties.getChatModel().getProvider()); } } @Bean @Lazy @ConditionalOnMissingBean LanguageModel languageModel(S2LangChain4jProperties properties) { if (properties.getLanguageModel() == null) { throw illegalConfiguration("\n\nPlease define 'langchain4j.language-model' properties, for example:\n" + "langchain4j.language-model.provider = openai\n" + "langchain4j.language-model.openai.api-key = sk-...\n"); } switch (properties.getLanguageModel().getProvider()) { case OPEN_AI: OpenAi openAi = properties.getLanguageModel().getOpenAi(); if (openAi == null || isNullOrBlank(openAi.getApiKey())) { throw illegalConfiguration( "\n\nPlease define 'langchain4j.language-model.openai.api-key' property"); } return OpenAiLanguageModel.builder() .apiKey(openAi.getApiKey()) .baseUrl(openAi.getBaseUrl()) .modelName(openAi.getModelName()) .temperature(openAi.getTemperature()) .timeout(openAi.getTimeout()) .maxRetries(openAi.getMaxRetries()) .logRequests(openAi.getLogRequests()) .logResponses(openAi.getLogResponses()) .build(); case HUGGING_FACE: HuggingFace huggingFace = properties.getLanguageModel().getHuggingFace(); if (huggingFace == null || isNullOrBlank(huggingFace.getAccessToken())) { throw illegalConfiguration( "\n\nPlease define 'langchain4j.language-model.huggingface.access-token' property"); } return HuggingFaceLanguageModel.builder() .accessToken(huggingFace.getAccessToken()) .modelId(huggingFace.getModelId()) .timeout(huggingFace.getTimeout()) .temperature(huggingFace.getTemperature()) .maxNewTokens(huggingFace.getMaxNewTokens()) .returnFullText(huggingFace.getReturnFullText()) .waitForModel(huggingFace.getWaitForModel()) .build(); case LOCAL_AI: LocalAi localAi = properties.getLanguageModel().getLocalAi(); if (localAi == null || isNullOrBlank(localAi.getBaseUrl())) { throw illegalConfiguration( "\n\nPlease define 'langchain4j.language-model.localai.base-url' property"); } if (isNullOrBlank(localAi.getModelName())) { throw illegalConfiguration( "\n\nPlease define 'langchain4j.language-model.localai.model-name' property"); } return LocalAiLanguageModel.builder() .baseUrl(localAi.getBaseUrl()) .modelName(localAi.getModelName()) .temperature(localAi.getTemperature()) .topP(localAi.getTopP()) .maxTokens(localAi.getMaxTokens()) .timeout(localAi.getTimeout()) .maxRetries(localAi.getMaxRetries()) .logRequests(localAi.getLogRequests()) .logResponses(localAi.getLogResponses()) .build(); default: throw illegalConfiguration("Unsupported language model provider: %s", properties.getLanguageModel().getProvider()); } } @Bean @Lazy @ConditionalOnMissingBean @Primary EmbeddingModel embeddingModel(S2LangChain4jProperties properties) { if (properties.getEmbeddingModel() == null || properties.getEmbeddingModel().getProvider() == null) { throw illegalConfiguration("\n\nPlease define 'langchain4j.embedding-model' properties, for example:\n" + "langchain4j.embedding-model.provider = openai\n" + "langchain4j.embedding-model.openai.api-key = sk-...\n"); } switch (properties.getEmbeddingModel().getProvider()) { case OPEN_AI: OpenAi openAi = properties.getEmbeddingModel().getOpenAi(); if (openAi == null || isNullOrBlank(openAi.getApiKey())) { throw illegalConfiguration( "\n\nPlease define 'langchain4j.embedding-model.openai.api-key' property"); } return OpenAiEmbeddingModel.builder() .apiKey(openAi.getApiKey()) .baseUrl(openAi.getBaseUrl()) .modelName(openAi.getModelName()) .timeout(openAi.getTimeout()) .maxRetries(openAi.getMaxRetries()) .logRequests(openAi.getLogRequests()) .logResponses(openAi.getLogResponses()) .build(); case HUGGING_FACE: HuggingFace huggingFace = properties.getEmbeddingModel().getHuggingFace(); if (huggingFace == null || isNullOrBlank(huggingFace.getAccessToken())) { throw illegalConfiguration( "\n\nPlease define 'langchain4j.embedding-model.huggingface.access-token' property"); } return HuggingFaceEmbeddingModel.builder() .accessToken(huggingFace.getAccessToken()) .modelId(huggingFace.getModelId()) .waitForModel(huggingFace.getWaitForModel()) .timeout(huggingFace.getTimeout()) .build(); case LOCAL_AI: LocalAi localAi = properties.getEmbeddingModel().getLocalAi(); if (localAi == null || isNullOrBlank(localAi.getBaseUrl())) { throw illegalConfiguration( "\n\nPlease define 'langchain4j.embedding-model.localai.base-url' property"); } if (isNullOrBlank(localAi.getModelName())) { throw illegalConfiguration( "\n\nPlease define 'langchain4j.embedding-model.localai.model-name' property"); } return LocalAiEmbeddingModel.builder() .baseUrl(localAi.getBaseUrl()) .modelName(localAi.getModelName()) .timeout(localAi.getTimeout()) .maxRetries(localAi.getMaxRetries()) .logRequests(localAi.getLogRequests()) .logResponses(localAi.getLogResponses()) .build(); case IN_PROCESS: InProcess inProcess = properties.getEmbeddingModel().getInProcess(); if (Objects.isNull(inProcess) || isNullOrBlank(inProcess.getModelPath())) { return new BgeSmallZhEmbeddingModel(); } return new S2OnnxEmbeddingModel(inProcess.getModelPath(), inProcess.getVocabularyPath()); default: throw illegalConfiguration("Unsupported embedding model provider: %s", properties.getEmbeddingModel().getProvider()); } } @Bean @Lazy @ConditionalOnMissingBean ModerationModel moderationModel(S2LangChain4jProperties properties) { if (properties.getModerationModel() == null) { throw illegalConfiguration("\n\nPlease define 'langchain4j.moderation-model' properties, for example:\n" + "langchain4j.moderation-model.provider = openai\n" + "langchain4j.moderation-model.openai.api-key = sk-...\n"); } if (properties.getModerationModel().getProvider() != OPEN_AI) { throw illegalConfiguration("Unsupported moderation model provider: %s", properties.getModerationModel().getProvider()); } OpenAi openAi = properties.getModerationModel().getOpenAi(); if (openAi == null || isNullOrBlank(openAi.getApiKey())) { throw illegalConfiguration("\n\nPlease define 'langchain4j.moderation-model.openai.api-key' property"); } return OpenAiModerationModel.builder() .apiKey(openAi.getApiKey()) .modelName(openAi.getModelName()) .timeout(openAi.getTimeout()) .maxRetries(openAi.getMaxRetries()) .logRequests(openAi.getLogRequests()) .logResponses(openAi.getLogResponses()) .build(); } }
[ "dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder", "dev.langchain4j.model.openai.OpenAiModerationModel.builder", "dev.langchain4j.model.openai.OpenAiLanguageModel.builder", "dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder", "dev.langchain4j.model.localai.LocalAiLanguageModel.builder", "dev.langchain4j.model.huggingface.HuggingFaceChatModel.builder", "dev.langchain4j.model.huggingface.HuggingFaceLanguageModel.builder", "dev.langchain4j.model.localai.LocalAiChatModel.builder", "dev.langchain4j.model.localai.LocalAiEmbeddingModel.builder", "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((2669, 3441), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2669, 3408), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2669, 3344), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2669, 3282), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2669, 3222), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2669, 3168), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2669, 3096), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2669, 3026), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2669, 2968), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2669, 2920), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2669, 2858), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2669, 2800), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2669, 2748), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3849, 4375), 'dev.langchain4j.model.huggingface.HuggingFaceChatModel.builder'), ((3849, 4342), 'dev.langchain4j.model.huggingface.HuggingFaceChatModel.builder'), ((3849, 4273), 'dev.langchain4j.model.huggingface.HuggingFaceChatModel.builder'), ((3849, 4200), 'dev.langchain4j.model.huggingface.HuggingFaceChatModel.builder'), ((3849, 4131), 'dev.langchain4j.model.huggingface.HuggingFaceChatModel.builder'), ((3849, 4064), 'dev.langchain4j.model.huggingface.HuggingFaceChatModel.builder'), ((3849, 4005), 'dev.langchain4j.model.huggingface.HuggingFaceChatModel.builder'), ((3849, 3946), 'dev.langchain4j.model.huggingface.HuggingFaceChatModel.builder'), ((4948, 5536), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((4948, 5503), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((4948, 5438), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((4948, 5375), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((4948, 5314), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((4948, 5259), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((4948, 5200), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((4948, 5151), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((4948, 5088), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((4948, 5029), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((6606, 7134), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((6606, 7101), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((6606, 7037), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((6606, 6975), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((6606, 6915), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((6606, 6861), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((6606, 6799), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((6606, 6741), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((6606, 6687), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((7550, 8080), 'dev.langchain4j.model.huggingface.HuggingFaceLanguageModel.builder'), ((7550, 8047), 'dev.langchain4j.model.huggingface.HuggingFaceLanguageModel.builder'), ((7550, 7978), 'dev.langchain4j.model.huggingface.HuggingFaceLanguageModel.builder'), ((7550, 7905), 'dev.langchain4j.model.huggingface.HuggingFaceLanguageModel.builder'), ((7550, 7836), 'dev.langchain4j.model.huggingface.HuggingFaceLanguageModel.builder'), ((7550, 7769), 'dev.langchain4j.model.huggingface.HuggingFaceLanguageModel.builder'), ((7550, 7710), 'dev.langchain4j.model.huggingface.HuggingFaceLanguageModel.builder'), ((7550, 7651), 'dev.langchain4j.model.huggingface.HuggingFaceLanguageModel.builder'), ((8694, 9286), 'dev.langchain4j.model.localai.LocalAiLanguageModel.builder'), ((8694, 9253), 'dev.langchain4j.model.localai.LocalAiLanguageModel.builder'), ((8694, 9188), 'dev.langchain4j.model.localai.LocalAiLanguageModel.builder'), ((8694, 9125), 'dev.langchain4j.model.localai.LocalAiLanguageModel.builder'), ((8694, 9064), 'dev.langchain4j.model.localai.LocalAiLanguageModel.builder'), ((8694, 9009), 'dev.langchain4j.model.localai.LocalAiLanguageModel.builder'), ((8694, 8950), 'dev.langchain4j.model.localai.LocalAiLanguageModel.builder'), ((8694, 8901), 'dev.langchain4j.model.localai.LocalAiLanguageModel.builder'), ((8694, 8838), 'dev.langchain4j.model.localai.LocalAiLanguageModel.builder'), ((8694, 8779), 'dev.langchain4j.model.localai.LocalAiLanguageModel.builder'), ((10443, 10910), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((10443, 10877), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((10443, 10813), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((10443, 10751), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((10443, 10691), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((10443, 10637), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((10443, 10579), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((10443, 10525), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((11328, 11650), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((11328, 11617), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((11328, 11558), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((11328, 11489), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((11328, 11430), 'dev.langchain4j.model.huggingface.HuggingFaceEmbeddingModel.builder'), ((12267, 12689), 'dev.langchain4j.model.localai.LocalAiEmbeddingModel.builder'), ((12267, 12656), 'dev.langchain4j.model.localai.LocalAiEmbeddingModel.builder'), ((12267, 12591), 'dev.langchain4j.model.localai.LocalAiEmbeddingModel.builder'), ((12267, 12528), 'dev.langchain4j.model.localai.LocalAiEmbeddingModel.builder'), ((12267, 12467), 'dev.langchain4j.model.localai.LocalAiEmbeddingModel.builder'), ((12267, 12412), 'dev.langchain4j.model.localai.LocalAiEmbeddingModel.builder'), ((12267, 12353), 'dev.langchain4j.model.localai.LocalAiEmbeddingModel.builder'), ((14251, 14609), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((14251, 14584), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((14251, 14528), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((14251, 14474), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((14251, 14422), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((14251, 14376), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder'), ((14251, 14326), 'dev.langchain4j.model.openai.OpenAiModerationModel.builder')]
package io.quarkiverse.langchain4j.workshop.chat; import dev.langchain4j.memory.ChatMemory; import dev.langchain4j.memory.chat.ChatMemoryProvider; import dev.langchain4j.memory.chat.MessageWindowChatMemory; import jakarta.enterprise.context.ApplicationScoped; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; @ApplicationScoped public class ChatMemoryBean implements ChatMemoryProvider { private final Map<Object, ChatMemory> memories = new ConcurrentHashMap<>(); @Override public ChatMemory get(Object memoryId) { return memories.computeIfAbsent(memoryId, id -> MessageWindowChatMemory.builder() .maxMessages(20) .id(memoryId) .build()); } public void clear(Object session) { memories.remove(session); } }
[ "dev.langchain4j.memory.chat.MessageWindowChatMemory.builder" ]
[((608, 729), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((608, 704), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((608, 674), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder')]
import dev.langchain4j.chain.ConversationalRetrievalChain; import dev.langchain4j.data.document.Document; import dev.langchain4j.data.document.DocumentSplitter; import dev.langchain4j.data.document.parser.TextDocumentParser; import dev.langchain4j.data.document.splitter.DocumentSplitters; import dev.langchain4j.data.embedding.Embedding; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.model.input.Prompt; import dev.langchain4j.model.input.PromptTemplate; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.model.openai.OpenAiTokenizer; import dev.langchain4j.retriever.EmbeddingStoreRetriever; import dev.langchain4j.store.embedding.EmbeddingMatch; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.EmbeddingStoreIngestor; import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.Path; import java.nio.file.Paths; import java.time.Duration; import java.util.HashMap; import java.util.List; import java.util.Map; import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument; import static java.util.stream.Collectors.joining; public class ChatWithDocumentsExamples { // Please also check ServiceWithRetrieverExample static class IfYouNeedSimplicity { public static void main(String[] args) throws Exception { EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>(); EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() .documentSplitter(DocumentSplitters.recursive(300, 0)) .embeddingModel(embeddingModel) .embeddingStore(embeddingStore) .build(); Document document = loadDocument(toPath("example-files/story-about-happy-carrot.txt"), new TextDocumentParser()); ingestor.ingest(document); ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() .chatLanguageModel(OpenAiChatModel.withApiKey(ApiKeys.OPENAI_API_KEY)) .retriever(EmbeddingStoreRetriever.from(embeddingStore, embeddingModel)) // .chatMemory() // you can override default chat memory // .promptTemplate() // you can override default prompt template .build(); String answer = chain.execute("Who is Charlie?"); System.out.println(answer); // Charlie is a cheerful carrot living in VeggieVille... } } static class If_You_Need_More_Control { public static void main(String[] args) { // Load the document that includes the information you'd like to "chat" about with the model. Document document = loadDocument(toPath("example-files/story-about-happy-carrot.txt"), new TextDocumentParser()); // Split document into segments 100 tokens each DocumentSplitter splitter = DocumentSplitters.recursive( 100, 0, new OpenAiTokenizer("gpt-3.5-turbo") ); List<TextSegment> segments = splitter.split(document); // Embed segments (convert them into vectors that represent the meaning) using embedding model EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); List<Embedding> embeddings = embeddingModel.embedAll(segments).content(); // Store embeddings into embedding store for further search / retrieval EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>(); embeddingStore.addAll(embeddings, segments); // Specify the question you want to ask the model String question = "Who is Charlie?"; // Embed the question Embedding questionEmbedding = embeddingModel.embed(question).content(); // Find relevant embeddings in embedding store by semantic similarity // You can play with parameters below to find a sweet spot for your specific use case int maxResults = 3; double minScore = 0.7; List<EmbeddingMatch<TextSegment>> relevantEmbeddings = embeddingStore.findRelevant(questionEmbedding, maxResults, minScore); // Create a prompt for the model that includes question and relevant embeddings PromptTemplate promptTemplate = PromptTemplate.from( "Answer the following question to the best of your ability:\n" + "\n" + "Question:\n" + "{{question}}\n" + "\n" + "Base your answer on the following information:\n" + "{{information}}"); String information = relevantEmbeddings.stream() .map(match -> match.embedded().text()) .collect(joining("\n\n")); Map<String, Object> variables = new HashMap<>(); variables.put("question", question); variables.put("information", information); Prompt prompt = promptTemplate.apply(variables); // Send the prompt to the OpenAI chat model ChatLanguageModel chatModel = OpenAiChatModel.builder() .apiKey(ApiKeys.OPENAI_API_KEY) .timeout(Duration.ofSeconds(60)) .build(); AiMessage aiMessage = chatModel.generate(prompt.toUserMessage()).content(); // See an answer from the model String answer = aiMessage.text(); System.out.println(answer); // Charlie is a cheerful carrot living in VeggieVille... } } private static Path toPath(String fileName) { try { URL fileUrl = ChatWithDocumentsExamples.class.getResource(fileName); return Paths.get(fileUrl.toURI()); } catch (URISyntaxException e) { throw new RuntimeException(e); } } }
[ "dev.langchain4j.chain.ConversationalRetrievalChain.builder", "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder", "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((1887, 2127), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1887, 2098), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1887, 2046), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1887, 1994), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2345, 2758), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2345, 2567), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((2345, 2474), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((5749, 5908), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5749, 5879), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5749, 5826), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')]
package ru.vzotov.ai.interfaces.facade.impl; import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.JavaType; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.Lists; import dev.langchain4j.data.embedding.Embedding; import dev.langchain4j.data.message.Content; import dev.langchain4j.data.message.TextContent; import dev.langchain4j.data.message.UserMessage; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.memory.ChatMemory; import dev.langchain4j.memory.chat.MessageWindowChatMemory; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.model.output.structured.Description; import dev.langchain4j.rag.DefaultRetrievalAugmentor; import dev.langchain4j.rag.RetrievalAugmentor; import dev.langchain4j.rag.content.aggregator.ContentAggregator; import dev.langchain4j.rag.content.aggregator.DefaultContentAggregator; import dev.langchain4j.rag.content.injector.ContentInjector; import dev.langchain4j.rag.content.injector.DefaultContentInjector; import dev.langchain4j.rag.content.retriever.ContentRetriever; import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever; import dev.langchain4j.rag.query.Query; import dev.langchain4j.rag.query.transformer.QueryTransformer; import dev.langchain4j.service.AiServices; import dev.langchain4j.store.embedding.EmbeddingMatch; import dev.langchain4j.store.embedding.EmbeddingStore; import lombok.AllArgsConstructor; import lombok.Builder; import lombok.Data; import lombok.NoArgsConstructor; import org.jetbrains.annotations.NotNull; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.security.access.annotation.Secured; import org.springframework.transaction.annotation.Transactional; import ru.vzotov.accounting.infrastructure.security.SecurityUtils; import ru.vzotov.accounting.interfaces.purchases.PurchasesApi; import ru.vzotov.accounting.interfaces.purchases.facade.impl.assembler.PurchaseAssembler; import ru.vzotov.ai.interfaces.facade.AIFacade; import ru.vzotov.cashreceipt.domain.model.PurchaseCategory; import ru.vzotov.cashreceipt.domain.model.PurchaseCategoryId; import ru.vzotov.cashreceipt.domain.model.PurchaseCategoryRepository; import ru.vzotov.purchase.domain.model.Purchase; import ru.vzotov.purchase.domain.model.PurchaseId; import ru.vzotov.purchases.domain.model.PurchaseRepository; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Optional; import java.util.function.Function; import java.util.regex.Matcher; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.IntStream; import java.util.stream.Stream; public class AIFacadeImpl implements AIFacade { private static final Logger log = LoggerFactory.getLogger(AIFacadeImpl.class); private final PurchaseCategoryRepository purchaseCategoryRepository; private final PurchaseRepository purchaseRepository; private final EmbeddingStore<TextSegment> embeddingStore; private final EmbeddingModel embeddingModel; private final ChatLanguageModel chatLanguageModel; private final ObjectMapper objectMapper; @Builder public AIFacadeImpl(PurchaseCategoryRepository purchaseCategoryRepository, PurchaseRepository purchaseRepository, EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel, ChatLanguageModel chatLanguageModel, ObjectMapper objectMapper ) { this.purchaseCategoryRepository = purchaseCategoryRepository; this.purchaseRepository = purchaseRepository; this.embeddingStore = embeddingStore; this.embeddingModel = embeddingModel; this.chatLanguageModel = chatLanguageModel; this.objectMapper = objectMapper; } @Override @Transactional(value = "accounting-tx", readOnly = true) @Secured({"ROLE_USER"}) public List<PurchasesApi.Purchase> classifyPurchasesBySimilarity(List<String> purchaseIdList) { final int samples = 5; final int threshold = samples - 1; log.debug("Start hybrid classification of purchases {}. Samples={}, threshold={}", purchaseIdList, samples, threshold); try { final List<PurchaseCategory> categories = purchaseCategoryRepository.findAll(SecurityUtils.getCurrentPerson()); final Map<PurchaseCategoryId, PurchaseCategory> purchaseCategoryMap = categories.stream() .collect(Collectors.toMap(PurchaseCategory::categoryId, it -> it)); final List<Purchase> purchases = loadPurchases(purchaseIdList); final List<Purchase> classified = new ArrayList<>(); final List<String> classifyByChatModel = new ArrayList<>(); log.debug("Get embeddings"); final List<Embedding> embeddings = embeddingModel.embedAll( purchases.stream().map(p -> TextSegment.from(p.name())).toList()).content(); IntStream.range(0, purchases.size()) .boxed() .parallel() .forEach(i -> { Purchase purchase = purchases.get(i); Embedding embedding = embeddings.get(i); log.debug("{}:: Find relevant documents for purchase {}, {}", i, purchase.purchaseId(), purchase.name()); List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(embedding, samples, 0.8); Map<PurchaseCategoryId, Long> relevantCategories = relevant.stream() .map(match -> match.embedded().text()) .peek(text -> log.debug("Sample: {}", text)) .map(AIFacadeImpl::extractCategoryId) .filter(Objects::nonNull) .map(PurchaseCategoryId::new) .collect(Collectors.groupingBy(e -> e, Collectors.counting())); Optional<PurchaseCategory> optionalKey = relevantCategories.entrySet().stream() .filter(entry -> entry.getValue() > threshold) .map(Map.Entry::getKey) .findFirst() .map(purchaseCategoryMap::get); optionalKey.ifPresentOrElse(category -> { log.debug("{}:: Relevant category: {}, {}", i, category.categoryId(), category.name()); purchase.assignCategory(category); classified.add(purchase); }, () -> classifyByChatModel.add(purchase.purchaseId().value())); }); final List<PurchasesApi.Purchase> fromChatModel = new ArrayList<>(); if (!classifyByChatModel.isEmpty()) { log.debug("This list of purchases is ambiguous. We will use chat model for classification: {}", classifyByChatModel); Lists.partition(classifyByChatModel, 5).stream() .map(this::classifyPurchases) .forEach(fromChatModel::addAll); } return Stream.concat( new PurchaseAssembler().toDTOList(classified).stream(), fromChatModel.stream() ).toList(); } finally { log.debug("Done"); } } private static String extractCategoryId(String document) { Pattern pattern = Pattern.compile("with id '(.+?)'"); Matcher matcher = pattern.matcher(document); if (matcher.find()) { return matcher.group(1); } return null; } @Override @Transactional(value = "accounting-tx", readOnly = true) @Secured({"ROLE_USER"}) public List<PurchasesApi.Purchase> classifyPurchases(List<String> purchaseIdList) { final List<PurchaseCategory> categories = purchaseCategoryRepository.findAll(SecurityUtils.getCurrentPerson()); final Map<PurchaseCategoryId, PurchaseCategory> purchaseCategoryMap = categories.stream() .collect(Collectors.toMap(PurchaseCategory::categoryId, it -> it)); // todo: we can embed multiple queries in one request (use embedAll) // The content retriever is responsible for retrieving relevant content based on a text query. ContentRetriever contentRetriever = EmbeddingStoreContentRetriever.builder() .embeddingStore(embeddingStore) .embeddingModel(embeddingModel) .maxResults(2) // on each interaction we will retrieve the 2 most relevant segments .minScore(0.5) // we want to retrieve segments at least somewhat similar to user query .build(); // Aggregates all Contents retrieved from all ContentRetrievers using all queries. ContentAggregator contentAggregator = new DefaultContentAggregator(); // todo: we can use special kind of query (list of simple queries) // Splits collection query to multiple queries: one query for each item QueryTransformer queryTransformer = query -> { UserMessage userMessage = query.metadata().userMessage(); return jsonMessage(userMessage, objectMapper.constructType(AgentRequest.class), (AgentRequest data) -> data.purchases().stream() .map(s -> Query.from(s.purchaseName(), query.metadata())) .toList()); }; ContentInjector defaultContentInjector = DefaultContentInjector.builder().build(); ContentInjector contentInjector = (contents, userMessage) -> defaultContentInjector.inject(contents, UserMessage.from(jsonMessage(userMessage, objectMapper.constructType(AgentRequest.class), (AgentRequest data) -> { try { return """ Please answer which categories the list of purchases belong to: ```json %s ``` The purchase category must be one of this list of possible categories: ```json %s ``` """.formatted( objectMapper.writeValueAsString(data.purchases()), objectMapper.writeValueAsString(categories.stream().map(c -> new CategoryData(c.name(), c.categoryId().value())).toList()) ); } catch (JsonProcessingException e) { throw new RuntimeException(e); } }))); RetrievalAugmentor retrievalAugmentor = DefaultRetrievalAugmentor.builder() .contentRetriever(contentRetriever) .queryTransformer(queryTransformer) .contentAggregator(contentAggregator) .contentInjector(contentInjector) .build(); ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10); PurchaseClassifyingAgent agent = AiServices.builder(PurchaseClassifyingAgent.class) .chatLanguageModel(chatLanguageModel) .retrievalAugmentor(retrievalAugmentor) .chatMemory(chatMemory) .build(); final List<Purchase> purchases = loadPurchases(purchaseIdList); final Map<PurchaseId, Purchase> purchaseMap = purchases.stream() .collect(Collectors.toMap(Purchase::purchaseId, it -> it)); try { AgentResponse response = agent.classify( objectMapper.writeValueAsString(new AgentRequest(purchases.stream().map(p -> new IdNameOfPurchase(p.purchaseId().value(), p.name())).toList()))); Optional.ofNullable(response) .map(AgentResponse::classification) .stream().flatMap(List::stream) .forEach(item -> { final Purchase p = Optional.ofNullable(item.getPurchaseId()) .map(PurchaseId::new) .map(purchaseMap::get) .orElse(null); if (p == null) return; final PurchaseCategory targetCategory = Optional.ofNullable(item.getCategoryId()) .map(PurchaseCategoryId::new) .map(purchaseCategoryMap::get) .orElse(null); if (targetCategory == null) return; p.assignCategory(targetCategory); }); return new PurchaseAssembler().toDTOList(purchases); } catch (JsonProcessingException e) { throw new RuntimeException(e); } } @NotNull private List<Purchase> loadPurchases(List<String> purchaseIdList) { return purchaseIdList.stream() .filter(Objects::nonNull) .map(PurchaseId::new) .map(purchaseRepository::find) .filter(Objects::nonNull) .toList(); } record CategoryData(String name, String id) { } @Data @NoArgsConstructor @AllArgsConstructor @Builder static class PurchaseCategoryData { private String purchaseId; private String purchaseName; private String categoryId; private String categoryName; } record IdNameOfPurchase(String purchaseId, String purchaseName) { } interface PurchaseClassifyingAgent { AgentResponse classify(String agentQuery); } record AgentResponse( @Description(""" array of objects {"purchaseId": (type: string), "purchaseName": (type: string), "categoryId": (type: string), "categoryName": (type: string)} """) List<PurchaseCategoryData> classification) { } record AgentRequest(List<IdNameOfPurchase> purchases) { } <T, R> R jsonMessage(UserMessage userMessage, JavaType type, Function<T, R> action) { if (!userMessage.hasSingleText()) throw new IllegalArgumentException("We support only single-text messages"); Content content = userMessage.contents().get(0); if (content instanceof TextContent text) { try { T data = objectMapper.readValue(text.text(), type); return action.apply(data); } catch (JsonProcessingException e) { throw new RuntimeException(e); } } else { throw new IllegalArgumentException("Unsupported content type"); } } }
[ "dev.langchain4j.rag.DefaultRetrievalAugmentor.builder", "dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder", "dev.langchain4j.service.AiServices.builder", "dev.langchain4j.rag.content.injector.DefaultContentInjector.builder" ]
[((5182, 7051), 'java.util.stream.IntStream.range'), ((5182, 5279), 'java.util.stream.IntStream.range'), ((5182, 5247), 'java.util.stream.IntStream.range'), ((7335, 7493), 'com.google.common.collect.Lists.partition'), ((7335, 7437), 'com.google.common.collect.Lists.partition'), ((7335, 7383), 'com.google.common.collect.Lists.partition'), ((7530, 7686), 'java.util.stream.Stream.concat'), ((8757, 9121), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((8757, 9024), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((8757, 8924), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((8757, 8893), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((8757, 8845), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((9923, 9963), 'dev.langchain4j.rag.content.injector.DefaultContentInjector.builder'), ((11454, 11722), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((11454, 11697), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((11454, 11647), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((11454, 11593), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((11454, 11541), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((11844, 12069), 'dev.langchain4j.service.AiServices.builder'), ((11844, 12044), 'dev.langchain4j.service.AiServices.builder'), ((11844, 12004), 'dev.langchain4j.service.AiServices.builder'), ((11844, 11948), 'dev.langchain4j.service.AiServices.builder'), ((12538, 13421), 'java.util.Optional.ofNullable'), ((12538, 12675), 'java.util.Optional.ofNullable'), ((12538, 12653), 'java.util.Optional.ofNullable'), ((12538, 12623), 'java.util.Optional.ofNullable'), ((12758, 12954), 'java.util.Optional.ofNullable'), ((12758, 12908), 'java.util.Optional.ofNullable'), ((12758, 12853), 'java.util.Optional.ofNullable'), ((13067, 13279), 'java.util.Optional.ofNullable'), ((13067, 13233), 'java.util.Optional.ofNullable'), ((13067, 13170), 'java.util.Optional.ofNullable')]
package dev.langchain4j.model.qianfan; import dev.langchain4j.internal.Utils; import dev.langchain4j.model.StreamingResponseHandler; import dev.langchain4j.model.language.StreamingLanguageModel; import dev.langchain4j.model.output.Response; import dev.langchain4j.model.qianfan.client.QianfanClient; import dev.langchain4j.model.qianfan.client.QianfanStreamingResponseBuilder; import dev.langchain4j.model.qianfan.client.completion.CompletionRequest; import dev.langchain4j.model.qianfan.client.SyncOrAsyncOrStreaming; import dev.langchain4j.model.qianfan.client.completion.CompletionResponse; import dev.langchain4j.model.qianfan.spi.QianfanStreamingLanguageModelBuilderFactory; import lombok.Builder; import static dev.langchain4j.internal.Utils.getOrDefault; import static dev.langchain4j.spi.ServiceHelper.loadFactories; /** * * see details here: https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu */ public class QianfanStreamingLanguageModel implements StreamingLanguageModel { private final QianfanClient client; private final String baseUrl; private final Double temperature; private final Double topP; private final String modelName; private final Double penaltyScore; private final Integer maxRetries; private final Integer topK; private final String endpoint; @Builder public QianfanStreamingLanguageModel(String baseUrl, String apiKey, String secretKey, Double temperature, Integer maxRetries, Integer topK, Double topP, String modelName, String endpoint, Double penaltyScore, Boolean logRequests, Boolean logResponses ) { if (Utils.isNullOrBlank(apiKey)||Utils.isNullOrBlank(secretKey)) { throw new IllegalArgumentException(" api key and secret key must be defined. It can be generated here: https://console.bce.baidu.com/qianfan/ais/console/applicationConsole/application"); } this.modelName=modelName; this.endpoint=Utils.isNullOrBlank(endpoint)? QianfanLanguageModelNameEnum.getEndpoint(modelName):endpoint; if (Utils.isNullOrBlank(this.endpoint) ) { throw new IllegalArgumentException("Qianfan is no such model name. You can see model name here: https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu"); } this.baseUrl = getOrDefault(baseUrl, "https://aip.baidubce.com"); this.client = QianfanClient.builder() .baseUrl(this.baseUrl) .apiKey(apiKey) .secretKey(secretKey) .logRequests(logRequests) .logStreamingResponses(logResponses) .build(); this.temperature = getOrDefault(temperature, 0.7); this.maxRetries = getOrDefault(maxRetries, 3); this.topP = topP; this.topK = topK; this.penaltyScore = penaltyScore; } @Override public void generate(String prompt, StreamingResponseHandler<String> handler) { CompletionRequest request = CompletionRequest.builder() .prompt(prompt) .topK(topK) .topP(topP) .temperature(temperature) .penaltyScore(penaltyScore) .build(); QianfanStreamingResponseBuilder responseBuilder = new QianfanStreamingResponseBuilder(null); SyncOrAsyncOrStreaming<CompletionResponse> response = client.completion(request, true, endpoint); response.onPartialResponse(partialResponse -> { responseBuilder.append(partialResponse); handle(partialResponse, handler); }) .onComplete(() -> { Response<String> response1 = responseBuilder.build(null); handler.onComplete(response1); }) .onError(handler::onError) .execute(); } private static void handle(CompletionResponse partialResponse, StreamingResponseHandler<String> handler) { String result = partialResponse.getResult(); if (Utils.isNullOrBlank(result)) { return; } handler.onNext(result); } public static QianfanStreamingLanguageModelBuilder builder() { for (QianfanStreamingLanguageModelBuilderFactory factory : loadFactories(QianfanStreamingLanguageModelBuilderFactory.class)) { return factory.get(); } return new QianfanStreamingLanguageModelBuilder(); } public static class QianfanStreamingLanguageModelBuilder { public QianfanStreamingLanguageModelBuilder() { // This is public so it can be extended // By default with Lombok it becomes package private } } }
[ "dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder", "dev.langchain4j.model.qianfan.client.QianfanClient.builder" ]
[((2840, 3092), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2840, 3067), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2840, 3014), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2840, 2972), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2840, 2934), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2840, 2902), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((3444, 3670), 'dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder'), ((3444, 3645), 'dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder'), ((3444, 3601), 'dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder'), ((3444, 3559), 'dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder'), ((3444, 3531), 'dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder'), ((3444, 3503), 'dev.langchain4j.model.qianfan.client.completion.CompletionRequest.builder')]
package io.quarkiverse.langchain4j.sample.chatbot; import java.util.function.Supplier; import jakarta.enterprise.context.ApplicationScoped; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.rag.DefaultRetrievalAugmentor; import dev.langchain4j.rag.RetrievalAugmentor; import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever; import dev.langchain4j.store.embedding.EmbeddingStore; @ApplicationScoped public class AugmentorExample implements Supplier<RetrievalAugmentor> { private final EmbeddingStoreContentRetriever retriever; AugmentorExample(EmbeddingStore store, EmbeddingModel model) { retriever = EmbeddingStoreContentRetriever.builder() .embeddingModel(model) .embeddingStore(store) .maxResults(20) .build(); } @Override public RetrievalAugmentor get() { return DefaultRetrievalAugmentor.builder() .contentRetriever(retriever) .build(); } }
[ "dev.langchain4j.rag.DefaultRetrievalAugmentor.builder", "dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder" ]
[((672, 847), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((672, 822), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((672, 790), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((672, 751), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((923, 1028), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((923, 1003), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder')]
import dev.langchain4j.data.image.Image; import dev.langchain4j.model.azure.AzureOpenAiImageModel; import dev.langchain4j.model.output.Response; public class AzureOpenAIDallEExample { public static void main(String[] args) { AzureOpenAiImageModel model = AzureOpenAiImageModel.builder() .endpoint(System.getenv("AZURE_OPENAI_ENDPOINT")) .deploymentName(System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME")) .apiKey(System.getenv("AZURE_OPENAI_KEY")) .logRequestsAndResponses(true) .build(); Response<Image> response = model.generate("A coffee mug in Paris, France"); System.out.println(response.toString()); Image image = response.content(); System.out.println("The remote image is here:" + image.url()); } }
[ "dev.langchain4j.model.azure.AzureOpenAiImageModel.builder" ]
[((268, 575), 'dev.langchain4j.model.azure.AzureOpenAiImageModel.builder'), ((268, 550), 'dev.langchain4j.model.azure.AzureOpenAiImageModel.builder'), ((268, 503), 'dev.langchain4j.model.azure.AzureOpenAiImageModel.builder'), ((268, 444), 'dev.langchain4j.model.azure.AzureOpenAiImageModel.builder'), ((268, 365), 'dev.langchain4j.model.azure.AzureOpenAiImageModel.builder')]
import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.model.StreamingResponseHandler; import dev.langchain4j.model.chat.StreamingChatLanguageModel; import dev.langchain4j.model.ollama.OllamaStreamingChatModel; import dev.langchain4j.model.output.Response; import org.junit.jupiter.api.Test; import org.testcontainers.containers.GenericContainer; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; import java.util.concurrent.CompletableFuture; @Testcontainers class OllamaStreamingChatModelTest { /** * The first time you run this test, it will download a Docker image with Ollama and a model. * It might take a few minutes. * <p> * This test uses modified Ollama Docker images, which already contain models inside them. * All images with pre-packaged models are available here: https://hub.docker.com/repositories/langchain4j * <p> * However, you are not restricted to these images. * You can run any model from https://ollama.ai/library by following these steps: * 1. Run "docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama" * 2. Run "docker exec -it ollama ollama run llama2" <- specify the desired model here */ static String MODEL_NAME = "orca-mini"; // try "mistral", "llama2", "codellama" or "phi" static String DOCKER_IMAGE_NAME = "langchain4j/ollama-" + MODEL_NAME + ":latest"; static Integer PORT = 11434; @Container static GenericContainer<?> ollama = new GenericContainer<>(DOCKER_IMAGE_NAME) .withExposedPorts(PORT); @Test void streaming_example() { StreamingChatLanguageModel model = OllamaStreamingChatModel.builder() .baseUrl(String.format("http://%s:%d", ollama.getHost(), ollama.getMappedPort(PORT))) .modelName(MODEL_NAME) .temperature(0.0) .build(); String userMessage = "Write a 100-word poem about Java and AI"; CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>(); model.generate(userMessage, new StreamingResponseHandler<AiMessage>() { @Override public void onNext(String token) { System.out.print(token); } @Override public void onComplete(Response<AiMessage> response) { futureResponse.complete(response); } @Override public void onError(Throwable error) { futureResponse.completeExceptionally(error); } }); futureResponse.join(); } }
[ "dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder" ]
[((1714, 1948), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((1714, 1923), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((1714, 1889), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((1714, 1850), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder')]
package dev.langchain4j.model.openai; import dev.ai4j.openai4j.OpenAiClient; import dev.ai4j.openai4j.image.GenerateImagesRequest; import dev.ai4j.openai4j.image.GenerateImagesResponse; import dev.langchain4j.data.image.Image; import dev.langchain4j.model.image.ImageModel; import dev.langchain4j.model.openai.spi.OpenAiImageModelBuilderFactory; import dev.langchain4j.model.output.Response; import lombok.Builder; import lombok.NonNull; import java.net.Proxy; import java.nio.file.Path; import java.time.Duration; import java.util.List; import java.util.stream.Collectors; import static dev.langchain4j.internal.RetryUtils.withRetry; import static dev.langchain4j.internal.Utils.getOrDefault; import static dev.langchain4j.model.openai.InternalOpenAiHelper.DEFAULT_USER_AGENT; import static dev.langchain4j.model.openai.InternalOpenAiHelper.OPENAI_URL; import static dev.langchain4j.model.openai.OpenAiModelName.DALL_E_2; import static dev.langchain4j.spi.ServiceHelper.loadFactories; import static java.time.Duration.ofSeconds; /** * Represents an OpenAI DALL·E models to generate artistic images. Versions 2 and 3 (default) are supported. */ public class OpenAiImageModel implements ImageModel { private final String modelName; private final String size; private final String quality; private final String style; private final String user; private final String responseFormat; private final OpenAiClient client; private final Integer maxRetries; /** * Instantiates OpenAI DALL·E image processing model. * Find the parameters description <a href="https://platform.openai.com/docs/api-reference/images/create">here</a>. * * @param modelName dall-e-3 is default one * @param persistTo specifies the local path where the generated image will be downloaded to (in case provided). * The URL within <code>dev.ai4j.openai4j.image.GenerateImagesResponse</code> will contain * the URL to local images then. * @param withPersisting generated response will be persisted under <code>java.io.tmpdir</code>. * The URL within <code>dev.ai4j.openai4j.image.GenerateImagesResponse</code> will contain * the URL to local images then. */ @Builder @SuppressWarnings("rawtypes") public OpenAiImageModel( String baseUrl, @NonNull String apiKey, String organizationId, String modelName, String size, String quality, String style, String user, String responseFormat, Duration timeout, Integer maxRetries, Proxy proxy, Boolean logRequests, Boolean logResponses, Boolean withPersisting, Path persistTo ) { timeout = getOrDefault(timeout, ofSeconds(60)); OpenAiClient.Builder cBuilder = OpenAiClient .builder() .baseUrl(getOrDefault(baseUrl, OPENAI_URL)) .openAiApiKey(apiKey) .organizationId(organizationId) .callTimeout(timeout) .connectTimeout(timeout) .readTimeout(timeout) .writeTimeout(timeout) .proxy(proxy) .logRequests(getOrDefault(logRequests, false)) .logResponses(getOrDefault(logResponses, false)) .userAgent(DEFAULT_USER_AGENT) .persistTo(persistTo); if (withPersisting != null && withPersisting) { cBuilder.withPersisting(); } this.client = cBuilder.build(); this.maxRetries = getOrDefault(maxRetries, 3); this.modelName = modelName; this.size = size; this.quality = quality; this.style = style; this.user = user; this.responseFormat = responseFormat; } @Override public Response<Image> generate(String prompt) { GenerateImagesRequest request = requestBuilder(prompt).build(); GenerateImagesResponse response = withRetry(() -> client.imagesGeneration(request), maxRetries).execute(); return Response.from(fromImageData(response.data().get(0))); } @Override public Response<List<Image>> generate(String prompt, int n) { GenerateImagesRequest request = requestBuilder(prompt).n(n).build(); GenerateImagesResponse response = withRetry(() -> client.imagesGeneration(request), maxRetries).execute(); return Response.from( response.data().stream().map(OpenAiImageModel::fromImageData).collect(Collectors.toList()) ); } public static OpenAiImageModelBuilder builder() { for (OpenAiImageModelBuilderFactory factory : loadFactories(OpenAiImageModelBuilderFactory.class)) { return factory.get(); } return new OpenAiImageModelBuilder(); } public static class OpenAiImageModelBuilder { public OpenAiImageModelBuilder() { // This is public so it can be extended // By default with Lombok it becomes package private } public OpenAiImageModelBuilder modelName(String modelName) { this.modelName = modelName; return this; } public OpenAiImageModelBuilder modelName(OpenAiImageModelName modelName) { this.modelName = modelName.toString(); return this; } public OpenAiImageModelBuilder withPersisting() { return withPersisting(true); } public OpenAiImageModelBuilder withPersisting(Boolean withPersisting) { this.withPersisting = withPersisting; return this; } } public static OpenAiImageModel withApiKey(String apiKey) { return builder().apiKey(apiKey).build(); } private static Image fromImageData(GenerateImagesResponse.ImageData data) { return Image.builder().url(data.url()).base64Data(data.b64Json()).revisedPrompt(data.revisedPrompt()).build(); } private GenerateImagesRequest.Builder requestBuilder(String prompt) { GenerateImagesRequest.Builder requestBuilder = GenerateImagesRequest .builder() .prompt(prompt) .size(size) .quality(quality) .style(style) .user(user) .responseFormat(responseFormat); if (DALL_E_2.equals(modelName)) { requestBuilder.model(dev.ai4j.openai4j.image.ImageModel.DALL_E_2); } return requestBuilder; } }
[ "dev.langchain4j.data.image.Image.builder" ]
[((6031, 6133), 'dev.langchain4j.data.image.Image.builder'), ((6031, 6125), 'dev.langchain4j.data.image.Image.builder'), ((6031, 6089), 'dev.langchain4j.data.image.Image.builder'), ((6031, 6062), 'dev.langchain4j.data.image.Image.builder')]
package dev.langchain4j.model.openai; import dev.ai4j.openai4j.chat.*; import dev.ai4j.openai4j.shared.Usage; import dev.langchain4j.agent.tool.ToolExecutionRequest; import dev.langchain4j.agent.tool.ToolParameters; import dev.langchain4j.agent.tool.ToolSpecification; import dev.langchain4j.data.image.Image; import dev.langchain4j.data.message.Content; import dev.langchain4j.data.message.SystemMessage; import dev.langchain4j.data.message.UserMessage; import dev.langchain4j.data.message.*; import dev.langchain4j.model.output.FinishReason; import dev.langchain4j.model.output.TokenUsage; import java.util.Collection; import java.util.List; import static dev.ai4j.openai4j.chat.ContentType.IMAGE_URL; import static dev.ai4j.openai4j.chat.ContentType.TEXT; import static dev.ai4j.openai4j.chat.ToolType.FUNCTION; import static dev.langchain4j.data.message.AiMessage.aiMessage; import static dev.langchain4j.internal.Exceptions.illegalArgument; import static dev.langchain4j.internal.Utils.isNullOrEmpty; import static dev.langchain4j.model.output.FinishReason.*; import static java.lang.String.format; import static java.util.stream.Collectors.toList; public class InternalOpenAiHelper { static final String OPENAI_URL = "https://api.openai.com/v1"; static final String OPENAI_DEMO_API_KEY = "demo"; static final String OPENAI_DEMO_URL = "http://langchain4j.dev/demo/openai/v1"; static final String DEFAULT_USER_AGENT = "langchain4j-openai"; public static List<Message> toOpenAiMessages(List<ChatMessage> messages) { return messages.stream() .map(InternalOpenAiHelper::toOpenAiMessage) .collect(toList()); } public static Message toOpenAiMessage(ChatMessage message) { if (message instanceof SystemMessage) { return dev.ai4j.openai4j.chat.SystemMessage.from(((SystemMessage) message).text()); } if (message instanceof UserMessage) { UserMessage userMessage = (UserMessage) message; if (userMessage.hasSingleText()) { return dev.ai4j.openai4j.chat.UserMessage.builder() .content(userMessage.text()) .name(userMessage.name()) .build(); } else { return dev.ai4j.openai4j.chat.UserMessage.builder() .content(userMessage.contents().stream() .map(InternalOpenAiHelper::toOpenAiContent) .collect(toList())) .name(userMessage.name()) .build(); } } if (message instanceof AiMessage) { AiMessage aiMessage = (AiMessage) message; if (!aiMessage.hasToolExecutionRequests()) { return AssistantMessage.from(aiMessage.text()); } ToolExecutionRequest toolExecutionRequest = aiMessage.toolExecutionRequests().get(0); if (toolExecutionRequest.id() == null) { FunctionCall functionCall = FunctionCall.builder() .name(toolExecutionRequest.name()) .arguments(toolExecutionRequest.arguments()) .build(); return AssistantMessage.builder() .functionCall(functionCall) .build(); } List<ToolCall> toolCalls = aiMessage.toolExecutionRequests().stream() .map(it -> ToolCall.builder() .id(it.id()) .type(FUNCTION) .function(FunctionCall.builder() .name(it.name()) .arguments(it.arguments()) .build()) .build()) .collect(toList()); return AssistantMessage.builder() .toolCalls(toolCalls) .build(); } if (message instanceof ToolExecutionResultMessage) { ToolExecutionResultMessage toolExecutionResultMessage = (ToolExecutionResultMessage) message; if (toolExecutionResultMessage.id() == null) { return FunctionMessage.from(toolExecutionResultMessage.toolName(), toolExecutionResultMessage.text()); } return ToolMessage.from(toolExecutionResultMessage.id(), toolExecutionResultMessage.text()); } throw illegalArgument("Unknown message type: " + message.type()); } private static dev.ai4j.openai4j.chat.Content toOpenAiContent(Content content) { if (content instanceof TextContent) { return toOpenAiContent((TextContent) content); } else if (content instanceof ImageContent) { return toOpenAiContent((ImageContent) content); } else { throw illegalArgument("Unknown content type: " + content); } } private static dev.ai4j.openai4j.chat.Content toOpenAiContent(TextContent content) { return dev.ai4j.openai4j.chat.Content.builder() .type(TEXT) .text(content.text()) .build(); } private static dev.ai4j.openai4j.chat.Content toOpenAiContent(ImageContent content) { return dev.ai4j.openai4j.chat.Content.builder() .type(IMAGE_URL) .imageUrl(ImageUrl.builder() .url(toUrl(content.image())) .detail(toDetail(content.detailLevel())) .build()) .build(); } private static String toUrl(Image image) { if (image.url() != null) { return image.url().toString(); } return format("data:%s;base64,%s", image.mimeType(), image.base64Data()); } private static ImageDetail toDetail(ImageContent.DetailLevel detailLevel) { if (detailLevel == null) { return null; } return ImageDetail.valueOf(detailLevel.name()); } public static List<Tool> toTools(Collection<ToolSpecification> toolSpecifications) { return toolSpecifications.stream() .map(InternalOpenAiHelper::toTool) .collect(toList()); } private static Tool toTool(ToolSpecification toolSpecification) { Function function = Function.builder() .name(toolSpecification.name()) .description(toolSpecification.description()) .parameters(toOpenAiParameters(toolSpecification.parameters())) .build(); return Tool.from(function); } /** * @deprecated Functions are deprecated by OpenAI, use {@link #toTools(Collection)} instead */ @Deprecated public static List<Function> toFunctions(Collection<ToolSpecification> toolSpecifications) { return toolSpecifications.stream() .map(InternalOpenAiHelper::toFunction) .collect(toList()); } /** * @deprecated Functions are deprecated by OpenAI, use {@link #toTool(ToolSpecification)} ()} instead */ @Deprecated private static Function toFunction(ToolSpecification toolSpecification) { return Function.builder() .name(toolSpecification.name()) .description(toolSpecification.description()) .parameters(toOpenAiParameters(toolSpecification.parameters())) .build(); } private static dev.ai4j.openai4j.chat.Parameters toOpenAiParameters(ToolParameters toolParameters) { if (toolParameters == null) { return dev.ai4j.openai4j.chat.Parameters.builder().build(); } return dev.ai4j.openai4j.chat.Parameters.builder() .properties(toolParameters.properties()) .required(toolParameters.required()) .build(); } public static AiMessage aiMessageFrom(ChatCompletionResponse response) { AssistantMessage assistantMessage = response.choices().get(0).message(); List<ToolCall> toolCalls = assistantMessage.toolCalls(); if (!isNullOrEmpty(toolCalls)) { List<ToolExecutionRequest> toolExecutionRequests = toolCalls.stream() .filter(toolCall -> toolCall.type() == FUNCTION) .map(InternalOpenAiHelper::toToolExecutionRequest) .collect(toList()); return aiMessage(toolExecutionRequests); } FunctionCall functionCall = assistantMessage.functionCall(); if (functionCall != null) { ToolExecutionRequest toolExecutionRequest = ToolExecutionRequest.builder() .name(functionCall.name()) .arguments(functionCall.arguments()) .build(); return aiMessage(toolExecutionRequest); } return aiMessage(assistantMessage.content()); } private static ToolExecutionRequest toToolExecutionRequest(ToolCall toolCall) { FunctionCall functionCall = toolCall.function(); return ToolExecutionRequest.builder() .id(toolCall.id()) .name(functionCall.name()) .arguments(functionCall.arguments()) .build(); } public static TokenUsage tokenUsageFrom(Usage openAiUsage) { if (openAiUsage == null) { return null; } return new TokenUsage( openAiUsage.promptTokens(), openAiUsage.completionTokens(), openAiUsage.totalTokens() ); } public static FinishReason finishReasonFrom(String openAiFinishReason) { if (openAiFinishReason == null) { return null; } switch (openAiFinishReason) { case "stop": return STOP; case "length": return LENGTH; case "tool_calls": case "function_call": return TOOL_EXECUTION; case "content_filter": return CONTENT_FILTER; default: return null; } } }
[ "dev.langchain4j.agent.tool.ToolExecutionRequest.builder", "dev.langchain4j.data.message.AiMessage.aiMessage.toolExecutionRequests" ]
[((2947, 2987), 'dev.langchain4j.data.message.AiMessage.aiMessage.toolExecutionRequests'), ((3462, 3939), 'dev.langchain4j.data.message.AiMessage.aiMessage.toolExecutionRequests'), ((3462, 3900), 'dev.langchain4j.data.message.AiMessage.aiMessage.toolExecutionRequests'), ((3462, 3504), 'dev.langchain4j.data.message.AiMessage.aiMessage.toolExecutionRequests'), ((8748, 8911), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((8748, 8882), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((8748, 8825), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((9193, 9379), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((9193, 9354), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((9193, 9301), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((9193, 9258), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')]
package com.johnsosoka.selfdiscover.config; import com.johnsosoka.selfdiscover.agent.SelfDiscovery; import com.johnsosoka.selfdiscover.agent.Solving; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.service.AiServices; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @Configuration public class SelfDiscoveryAgentConfig { /** * Provisions an AIService `SelfDiscoverAgent` which contains the prompts * which implement the SELF-DISCOVER algorithm to self-compose reasoning structures. * @param chatLanguageModel * @return */ @Bean public SelfDiscovery selfDiscoveryAgent(ChatLanguageModel chatLanguageModel) { return AiServices.builder(SelfDiscovery.class) .chatLanguageModel(chatLanguageModel) .build(); } /** * Provisions an AIService `SolverAgent` which contains the prompts for solving a given task. * The self-composed reasoning structures generated by the SelfDiscover Agent are used to * solve the task. * * @param chatLanguageModel * @return */ @Bean public Solving solverAgent(ChatLanguageModel chatLanguageModel) { return AiServices.builder(Solving.class) .chatLanguageModel(chatLanguageModel) .build(); } }
[ "dev.langchain4j.service.AiServices.builder" ]
[((755, 873), 'dev.langchain4j.service.AiServices.builder'), ((755, 848), 'dev.langchain4j.service.AiServices.builder'), ((1263, 1375), 'dev.langchain4j.service.AiServices.builder'), ((1263, 1350), 'dev.langchain4j.service.AiServices.builder')]
package dev.langchain4j.model.qianfan; import dev.langchain4j.agent.tool.ToolSpecification; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.data.message.ChatMessage; import dev.langchain4j.internal.Utils; import dev.langchain4j.model.StreamingResponseHandler; import dev.langchain4j.model.chat.StreamingChatLanguageModel; import dev.langchain4j.model.output.Response; import dev.langchain4j.model.qianfan.client.QianfanClient; import dev.langchain4j.model.qianfan.client.QianfanStreamingResponseBuilder; import dev.langchain4j.model.qianfan.client.SyncOrAsyncOrStreaming; import dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest; import dev.langchain4j.model.qianfan.client.chat.ChatCompletionResponse; import dev.langchain4j.model.qianfan.spi.QianfanStreamingChatModelBuilderFactory; import lombok.Builder; import static dev.langchain4j.model.qianfan.InternalQianfanHelper.*; import java.util.List; import static dev.langchain4j.internal.Utils.getOrDefault; import static dev.langchain4j.spi.ServiceHelper.loadFactories; /** * * see details here: https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu */ public class QianfanStreamingChatModel implements StreamingChatLanguageModel { private final QianfanClient client; private final String baseUrl; private final Double temperature; private final Double topP; private final String modelName; private final String endpoint; private final Double penaltyScore; private final String responseFormat; @Builder public QianfanStreamingChatModel(String baseUrl, String apiKey, String secretKey, Double temperature, Double topP, String modelName, String endpoint, String responseFormat, Double penaltyScore, Boolean logRequests, Boolean logResponses ) { if (Utils.isNullOrBlank(apiKey)||Utils.isNullOrBlank(secretKey)) { throw new IllegalArgumentException(" api key and secret key must be defined. It can be generated here: https://console.bce.baidu.com/qianfan/ais/console/applicationConsole/application"); } this.modelName=modelName; this.endpoint=Utils.isNullOrBlank(endpoint)? QianfanChatModelNameEnum.getEndpoint(modelName):endpoint; if (Utils.isNullOrBlank(this.endpoint)) { throw new IllegalArgumentException("Qianfan is no such model name. You can see model name here: https://cloud.baidu.com/doc/WENXINWORKSHOP/s/Nlks5zkzu"); } this.baseUrl = getOrDefault(baseUrl, "https://aip.baidubce.com"); this.client = QianfanClient.builder() .baseUrl(this.baseUrl) .apiKey(apiKey) .secretKey(secretKey) .logRequests(logRequests) .logStreamingResponses(logResponses) .build(); this.temperature = getOrDefault(temperature, 0.7); this.topP = topP; this.penaltyScore = penaltyScore; this.responseFormat = responseFormat; } @Override public void generate(List<ChatMessage> messages, StreamingResponseHandler<AiMessage> handler) { generate(messages, null, null, handler); } @Override public void generate(List<ChatMessage> messages, List<ToolSpecification> toolSpecifications, StreamingResponseHandler<AiMessage> handler) { generate(messages, toolSpecifications, null, handler); } @Override public void generate(List<ChatMessage> messages, ToolSpecification toolSpecification, StreamingResponseHandler<AiMessage> handler) { throw new RuntimeException("Not supported"); } private void generate(List<ChatMessage> messages, List<ToolSpecification> toolSpecifications, ToolSpecification toolThatMustBeExecuted, StreamingResponseHandler<AiMessage> handler ) { ChatCompletionRequest.Builder builder = ChatCompletionRequest.builder() .messages(InternalQianfanHelper.toOpenAiMessages(messages)) .temperature(temperature) .topP(topP) .system(getSystemMessage(messages)) .responseFormat(responseFormat) .penaltyScore(penaltyScore); if (toolSpecifications != null && !toolSpecifications.isEmpty()) { builder.functions(InternalQianfanHelper.toFunctions(toolSpecifications)); } ChatCompletionRequest request = builder.build(); QianfanStreamingResponseBuilder responseBuilder = new QianfanStreamingResponseBuilder(null); SyncOrAsyncOrStreaming<ChatCompletionResponse> response = client.chatCompletion(request, endpoint); response.onPartialResponse(partialResponse -> { responseBuilder.append(partialResponse); handle(partialResponse, handler); }) .onComplete(() -> { Response<AiMessage> messageResponse = responseBuilder.build(); handler.onComplete(messageResponse); }) .onError(handler::onError ) .execute(); } private static void handle(ChatCompletionResponse partialResponse, StreamingResponseHandler<AiMessage> handler) { String result = partialResponse.getResult(); if (Utils.isNullOrBlank(result)) { return; } handler.onNext(result); } public static QianfanStreamingChatModelBuilder builder() { for (QianfanStreamingChatModelBuilderFactory factory : loadFactories(QianfanStreamingChatModelBuilderFactory.class)) { return factory.get(); } return new QianfanStreamingChatModelBuilder(); } public static class QianfanStreamingChatModelBuilder { public QianfanStreamingChatModelBuilder() { // This is public so it can be extended // By default with Lombok it becomes package private } } }
[ "dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest.builder", "dev.langchain4j.model.qianfan.client.QianfanClient.builder" ]
[((2941, 3193), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2941, 3168), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2941, 3115), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2941, 3073), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2941, 3035), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((2941, 3003), 'dev.langchain4j.model.qianfan.client.QianfanClient.builder'), ((4305, 4626), 'dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest.builder'), ((4305, 4582), 'dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest.builder'), ((4305, 4534), 'dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest.builder'), ((4305, 4482), 'dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest.builder'), ((4305, 4454), 'dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest.builder'), ((4305, 4412), 'dev.langchain4j.model.qianfan.client.chat.ChatCompletionRequest.builder')]
package dev.langchain4j.data.message; import dev.langchain4j.data.image.Image; import java.net.URI; import java.util.Objects; import static dev.langchain4j.data.message.ContentType.IMAGE; import static dev.langchain4j.data.message.ImageContent.DetailLevel.LOW; import static dev.langchain4j.internal.ValidationUtils.ensureNotBlank; import static dev.langchain4j.internal.ValidationUtils.ensureNotNull; /** * Represents an image with a DetailLevel. */ public class ImageContent implements Content { /** * The detail level of an {@code Image}. */ public enum DetailLevel { /** * Low detail. */ LOW, /** * High detail. */ HIGH, /** * Auto detail. */ AUTO } private final Image image; private final DetailLevel detailLevel; /** * Create a new {@link ImageContent} from the given url. * * <p>The image will be created with {@code DetailLevel.LOW} detail.</p> * * @param url the url of the image. */ public ImageContent(URI url) { this(url, LOW); } /** * Create a new {@link ImageContent} from the given url. * * <p>The image will be created with {@code DetailLevel.LOW} detail.</p> * * @param url the url of the image. */ public ImageContent(String url) { this(URI.create(url)); } /** * Create a new {@link ImageContent} from the given url and detail level. * * @param url the url of the image. * @param detailLevel the detail level of the image. */ public ImageContent(URI url, DetailLevel detailLevel) { this(Image.builder() .url(ensureNotNull(url, "url")) .build(), detailLevel); } /** * Create a new {@link ImageContent} from the given url and detail level. * * @param url the url of the image. * @param detailLevel the detail level of the image. */ public ImageContent(String url, DetailLevel detailLevel) { this(URI.create(url), detailLevel); } /** * Create a new {@link ImageContent} from the given base64 data and mime type. * * <p>The image will be created with {@code DetailLevel.LOW} detail.</p> * * @param base64Data the base64 data of the image. * @param mimeType the mime type of the image. */ public ImageContent(String base64Data, String mimeType) { this(base64Data, mimeType, LOW); } /** * Create a new {@link ImageContent} from the given base64 data and mime type. * * @param base64Data the base64 data of the image. * @param mimeType the mime type of the image. * @param detailLevel the detail level of the image. */ public ImageContent(String base64Data, String mimeType, DetailLevel detailLevel) { this(Image.builder() .base64Data(ensureNotBlank(base64Data, "base64Data")) .mimeType(ensureNotBlank(mimeType, "mimeType")) .build(), detailLevel); } /** * Create a new {@link ImageContent} from the given image. * * <p>The image will be created with {@code DetailLevel.LOW} detail.</p> * * @param image the image. */ public ImageContent(Image image) { this(image, LOW); } /** * Create a new {@link ImageContent} from the given image. * * @param image the image. * @param detailLevel the detail level of the image. */ public ImageContent(Image image, DetailLevel detailLevel) { this.image = ensureNotNull(image, "image"); this.detailLevel = ensureNotNull(detailLevel, "detailLevel"); } /** * Get the {@code Image}. * @return the {@code Image}. */ public Image image() { return image; } /** * Get the {@code DetailLevel}. * @return the {@code DetailLevel}. */ public DetailLevel detailLevel() { return detailLevel; } @Override public ContentType type() { return IMAGE; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; ImageContent that = (ImageContent) o; return Objects.equals(this.image, that.image) && Objects.equals(this.detailLevel, that.detailLevel); } @Override public int hashCode() { return Objects.hash(image, detailLevel); } @Override public String toString() { return "ImageContent {" + " image = " + image + " detailLevel = " + detailLevel + " }"; } /** * Create a new {@link ImageContent} from the given url. * * <p>The image will be created with {@code DetailLevel.LOW} detail.</p> * * @param url the url of the image. * @return the new {@link ImageContent}. */ public static ImageContent from(URI url) { return new ImageContent(url); } /** * Create a new {@link ImageContent} from the given url. * * <p>The image will be created with {@code DetailLevel.LOW} detail.</p> * * @param url the url of the image. * @return the new {@link ImageContent}. */ public static ImageContent from(String url) { return new ImageContent(url); } /** * Create a new {@link ImageContent} from the given url and detail level. * * @param url the url of the image. * @param detailLevel the detail level of the image. * @return the new {@link ImageContent}. */ public static ImageContent from(URI url, DetailLevel detailLevel) { return new ImageContent(url, detailLevel); } /** * Create a new {@link ImageContent} from the given url and detail level. * * @param url the url of the image. * @param detailLevel the detail level of the image. * @return the new {@link ImageContent}. */ public static ImageContent from(String url, DetailLevel detailLevel) { return new ImageContent(url, detailLevel); } /** * Create a new {@link ImageContent} from the given base64 data and mime type. * * <p>The image will be created with {@code DetailLevel.LOW} detail.</p> * * @param base64Data the base64 data of the image. * @param mimeType the mime type of the image. * @return the new {@link ImageContent}. */ public static ImageContent from(String base64Data, String mimeType) { return new ImageContent(base64Data, mimeType); } /** * Create a new {@link ImageContent} from the given base64 data and mime type. * * @param base64Data the base64 data of the image. * @param mimeType the mime type of the image. * @param detailLevel the detail level of the image. * @return the new {@link ImageContent}. */ public static ImageContent from(String base64Data, String mimeType, DetailLevel detailLevel) { return new ImageContent(base64Data, mimeType, detailLevel); } /** * Create a new {@link ImageContent} from the given image. * * <p>The image will be created with {@code DetailLevel.LOW} detail.</p> * * @param image the image. * @return the new {@link ImageContent}. */ public static ImageContent from(Image image) { return new ImageContent(image); } /** * Create a new {@link ImageContent} from the given image. * * @param image the image. * @param detailLevel the detail level of the image. * @return the new {@link ImageContent}. */ public static ImageContent from(Image image, DetailLevel detailLevel) { return new ImageContent(image, detailLevel); } }
[ "dev.langchain4j.data.image.Image.builder" ]
[((1691, 1779), 'dev.langchain4j.data.image.Image.builder'), ((1691, 1754), 'dev.langchain4j.data.image.Image.builder'), ((2889, 3063), 'dev.langchain4j.data.image.Image.builder'), ((2889, 3038), 'dev.langchain4j.data.image.Image.builder'), ((2889, 2974), 'dev.langchain4j.data.image.Image.builder')]
package dev.langchain4j.model.qianfan.client; import dev.langchain4j.agent.tool.ToolExecutionRequest; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.internal.Utils; import dev.langchain4j.model.Tokenizer; import dev.langchain4j.model.output.Response; import dev.langchain4j.model.output.TokenUsage; import dev.langchain4j.model.qianfan.InternalQianfanHelper; import dev.langchain4j.model.qianfan.client.chat.ChatCompletionResponse; import dev.langchain4j.model.qianfan.client.chat.FunctionCall; import dev.langchain4j.model.qianfan.client.completion.CompletionResponse; import java.util.List; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import static java.util.Collections.singletonList; import static java.util.stream.Collectors.toList; /** * This class needs to be thread safe because it is called when a streaming result comes back * and there is no guarantee that this thread will be the same as the one that initiated the request, * in fact it almost certainly won't be. */ public class QianfanStreamingResponseBuilder { private final StringBuffer contentBuilder = new StringBuffer(); private final StringBuffer toolNameBuilder = new StringBuffer(); private final StringBuffer toolArgumentsBuilder = new StringBuffer(); private final Map<Integer, ToolExecutionRequestBuilder> indexToToolExecutionRequestBuilder = new ConcurrentHashMap<>(); private volatile String finishReason; private Integer inputTokenCount; private Integer outputTokenCount; public QianfanStreamingResponseBuilder(Integer inputTokenCount) { this.inputTokenCount = inputTokenCount; } public void append(ChatCompletionResponse partialResponse) { if (partialResponse == null) { return; } String finishReason = partialResponse.getFinishReason(); if (finishReason != null) { this.finishReason = finishReason; } String content = partialResponse.getResult(); if (content != null) { contentBuilder.append(content); } Usage usage = partialResponse.getUsage(); if (usage!=null){ inputTokenCount = usage.promptTokens(); outputTokenCount = usage.completionTokens(); } FunctionCall functionCall = partialResponse.getFunctionCall(); if (functionCall != null) { if (functionCall.name() != null) { toolNameBuilder.append(functionCall.name()); } if (functionCall.arguments() != null) { toolArgumentsBuilder.append(functionCall.arguments()); } } } public void append(CompletionResponse partialResponse) { if (partialResponse == null) { return; } String result = partialResponse.getResult(); if (Utils.isNullOrBlank(result)) { return; } String finishReason = partialResponse.getFinishReason(); if (finishReason != null) { this.finishReason = finishReason; } String token = partialResponse.getResult(); if (token != null) { contentBuilder.append(token); } } public Response<AiMessage> build(Tokenizer tokenizer, boolean forcefulToolExecution) { String content = contentBuilder.toString(); if (!content.isEmpty()) { return Response.from( AiMessage.from(content), tokenUsage(content, tokenizer), InternalQianfanHelper.finishReasonFrom(finishReason) ); } String toolName = toolNameBuilder.toString(); if (!toolName.isEmpty()) { ToolExecutionRequest toolExecutionRequest = ToolExecutionRequest.builder() .name(toolName) .arguments(toolArgumentsBuilder.toString()) .build(); return Response.from( AiMessage.from(toolExecutionRequest), tokenUsage(singletonList(toolExecutionRequest), tokenizer, forcefulToolExecution), InternalQianfanHelper.finishReasonFrom(finishReason) ); } if (!indexToToolExecutionRequestBuilder.isEmpty()) { List<ToolExecutionRequest> toolExecutionRequests = indexToToolExecutionRequestBuilder.values().stream() .map(it -> ToolExecutionRequest.builder() .id(it.idBuilder.toString()) .name(it.nameBuilder.toString()) .arguments(it.argumentsBuilder.toString()) .build()) .collect(toList()); return Response.from( AiMessage.from(toolExecutionRequests), tokenUsage(toolExecutionRequests, tokenizer, forcefulToolExecution), InternalQianfanHelper.finishReasonFrom(finishReason) ); } return null; } public Response<String> build(Tokenizer tokenizer) { String content = contentBuilder.toString(); if (!content.isEmpty()) { return Response.from( content, tokenUsage(content, tokenizer), InternalQianfanHelper.finishReasonFrom(finishReason) ); } return null; } private TokenUsage tokenUsage(String content, Tokenizer tokenizer) { if (tokenizer == null) { return null; } int outputTokenCount = tokenizer.estimateTokenCountInText(content); return new TokenUsage(inputTokenCount, outputTokenCount); } private TokenUsage tokenUsage(List<ToolExecutionRequest> toolExecutionRequests, Tokenizer tokenizer, boolean forcefulToolExecution) { if (tokenizer == null) { return null; } int outputTokenCount = 0; if (forcefulToolExecution) { // Qianfan calculates output tokens differently when tool is executed forcefully for (ToolExecutionRequest toolExecutionRequest : toolExecutionRequests) { outputTokenCount += tokenizer.estimateTokenCountInForcefulToolExecutionRequest(toolExecutionRequest); } } else { outputTokenCount = tokenizer.estimateTokenCountInToolExecutionRequests(toolExecutionRequests); } return new TokenUsage(inputTokenCount, outputTokenCount); } private static class ToolExecutionRequestBuilder { private final StringBuffer idBuilder = new StringBuffer(); private final StringBuffer nameBuilder = new StringBuffer(); private final StringBuffer argumentsBuilder = new StringBuffer(); } public Response<AiMessage> build() { String content = contentBuilder.toString(); if (!content.isEmpty()) { return Response.from( AiMessage.from(content), new TokenUsage(inputTokenCount, outputTokenCount), InternalQianfanHelper.finishReasonFrom(finishReason) ); } String toolName = toolNameBuilder.toString(); if (!toolName.isEmpty()) { ToolExecutionRequest toolExecutionRequest = ToolExecutionRequest.builder() .name(toolName) .arguments(toolArgumentsBuilder.toString()) .build(); return Response.from( AiMessage.from(toolExecutionRequest), new TokenUsage(inputTokenCount, outputTokenCount), InternalQianfanHelper.finishReasonFrom(finishReason) ); } if (!indexToToolExecutionRequestBuilder.isEmpty()) { List<ToolExecutionRequest> toolExecutionRequests = indexToToolExecutionRequestBuilder.values().stream() .map(it -> ToolExecutionRequest.builder() .id(it.idBuilder.toString()) .name(it.nameBuilder.toString()) .arguments(it.argumentsBuilder.toString()) .build()) .collect(toList()); return Response.from( AiMessage.from(toolExecutionRequests), new TokenUsage(inputTokenCount, outputTokenCount), InternalQianfanHelper.finishReasonFrom(finishReason) ); } return null; } }
[ "dev.langchain4j.agent.tool.ToolExecutionRequest.builder" ]
[((3800, 3959), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((3800, 3930), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((3800, 3866), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((4463, 4719), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((4463, 4682), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((4463, 4611), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((4463, 4550), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((7336, 7495), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((7336, 7466), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((7336, 7402), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((7967, 8223), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((7967, 8186), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((7967, 8115), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((7967, 8054), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')]
package org.agoncal.fascicle.langchain4j.vectordb.infinispan; import dev.langchain4j.data.embedding.Embedding; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.store.embedding.EmbeddingMatch; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.infinispan.InfinispanEmbeddingStore; import java.util.List; // tag::adocSkip[] /** * @author Antonio Goncalves * http://www.antoniogoncalves.org * -- */ // end::adocSkip[] public class MusicianService { public static void main(String[] args) { MusicianService musicianService = new MusicianService(); musicianService.useQdrantToStoreEmbeddings(); } public void useQdrantToStoreEmbeddings() { System.out.println("### useQdrantToStoreEmbeddings"); // tag::adocSnippet[] EmbeddingStore<TextSegment> embeddingStore = InfinispanEmbeddingStore.builder() .build(); EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); TextSegment segment1 = TextSegment.from("I've been to France twice."); Embedding embedding1 = embeddingModel.embed(segment1).content(); embeddingStore.add(embedding1, segment1); TextSegment segment2 = TextSegment.from("New Delhi is the capital of India."); Embedding embedding2 = embeddingModel.embed(segment2).content(); embeddingStore.add(embedding2, segment2); Embedding queryEmbedding = embeddingModel.embed("Did you ever travel abroad?").content(); List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(queryEmbedding, 1); EmbeddingMatch<TextSegment> embeddingMatch = relevant.get(0); System.out.println(embeddingMatch.score()); System.out.println(embeddingMatch.embedded().text()); // end::adocSnippet[] } }
[ "dev.langchain4j.store.embedding.infinispan.InfinispanEmbeddingStore.builder" ]
[((989, 1040), 'dev.langchain4j.store.embedding.infinispan.InfinispanEmbeddingStore.builder')]
package org.agoncal.fascicle.langchain4j.vectordb.chroma; import dev.langchain4j.data.embedding.Embedding; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.store.embedding.EmbeddingMatch; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore; import java.util.List; // tag::adocSkip[] /** * @author Antonio Goncalves * http://www.antoniogoncalves.org * -- */ // end::adocSkip[] public class MusicianService { public static void main(String[] args) { MusicianService musicianService = new MusicianService(); musicianService.useQdrantToStoreEmbeddings(); } public void useQdrantToStoreEmbeddings() { System.out.println("### useQdrantToStoreEmbeddings"); // tag::adocSnippet[] EmbeddingStore<TextSegment> embeddingStore = ChromaEmbeddingStore.builder() .build(); EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); TextSegment segment1 = TextSegment.from("I've been to France twice."); Embedding embedding1 = embeddingModel.embed(segment1).content(); embeddingStore.add(embedding1, segment1); TextSegment segment2 = TextSegment.from("New Delhi is the capital of India."); Embedding embedding2 = embeddingModel.embed(segment2).content(); embeddingStore.add(embedding2, segment2); Embedding queryEmbedding = embeddingModel.embed("Did you ever travel abroad?").content(); List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(queryEmbedding, 1); EmbeddingMatch<TextSegment> embeddingMatch = relevant.get(0); System.out.println(embeddingMatch.score()); System.out.println(embeddingMatch.embedded().text()); // end::adocSnippet[] } }
[ "dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore.builder" ]
[((977, 1024), 'dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore.builder')]
package com.kchandrakant; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.input.Prompt; import dev.langchain4j.model.input.structured.StructuredPromptProcessor; import dev.langchain4j.model.openai.OpenAiChatModel; import java.util.List; import static java.time.Duration.ofSeconds; import static java.util.Arrays.asList; public class StructuredPrompt { @dev.langchain4j.model.input.structured.StructuredPrompt({ "Create a recipe of a {{dish}} that can be prepared using only {{ingredients}}.", "Structure your answer in the following way:", "Recipe name: ...", "Description: ...", "Preparation time: ...", "Required ingredients:", "- ...", "- ...", "Instructions:", "- ...", "- ..." }) static class CreateRecipePrompt { String dish; List<String> ingredients; CreateRecipePrompt(String dish, List<String> ingredients) { this.dish = dish; this.ingredients = ingredients; } } public static void main(String[] args) { ChatLanguageModel model = OpenAiChatModel.builder() .apiKey(ApiKeys.OPENAI_API_KEY) .timeout(ofSeconds(60)) .build(); CreateRecipePrompt createRecipePrompt = new CreateRecipePrompt( "salad", asList("cucumber", "tomato", "feta", "onion", "olives") ); Prompt prompt = StructuredPromptProcessor.toPrompt(createRecipePrompt); String recipe = model.generate(prompt.text()); System.out.println(recipe); } }
[ "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((1201, 1339), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1201, 1314), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1201, 1274), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')]
package io.quarkiverse.langchain4j.bam.deployment; import static com.github.tomakehurst.wiremock.core.WireMockConfiguration.options; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertThrowsExactly; import java.util.List; import jakarta.inject.Inject; import jakarta.inject.Singleton; import org.jboss.shrinkwrap.api.ShrinkWrap; import org.jboss.shrinkwrap.api.spec.JavaArchive; import org.junit.jupiter.api.AfterAll; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; import com.fasterxml.jackson.databind.ObjectMapper; import com.github.tomakehurst.wiremock.WireMockServer; import dev.langchain4j.agent.tool.ToolSpecification; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.output.FinishReason; import dev.langchain4j.model.output.Response; import dev.langchain4j.model.output.TokenUsage; import dev.langchain4j.service.SystemMessage; import dev.langchain4j.service.UserMessage; import io.quarkiverse.langchain4j.RegisterAiService; import io.quarkiverse.langchain4j.bam.BamRestApi; import io.quarkiverse.langchain4j.bam.Message; import io.quarkiverse.langchain4j.bam.Parameters; import io.quarkiverse.langchain4j.bam.TextGenerationRequest; import io.quarkiverse.langchain4j.bam.runtime.config.LangChain4jBamConfig; import io.quarkus.test.QuarkusUnitTest; public class AiChatServiceTest { static WireMockServer wireMockServer; static ObjectMapper mapper; static WireMockUtil mockServers; @RegisterExtension static QuarkusUnitTest unitTest = new QuarkusUnitTest() .overrideRuntimeConfigKey("quarkus.langchain4j.bam.base-url", WireMockUtil.URL) .overrideRuntimeConfigKey("quarkus.langchain4j.bam.api-key", WireMockUtil.API_KEY) .setArchiveProducer(() -> ShrinkWrap.create(JavaArchive.class).addClass(WireMockUtil.class)); @BeforeAll static void beforeAll() { wireMockServer = new WireMockServer(options().port(WireMockUtil.PORT)); wireMockServer.start(); mapper = BamRestApi.objectMapper(new ObjectMapper()); mockServers = new WireMockUtil(wireMockServer); } @AfterAll static void afterAll() { wireMockServer.stop(); } @RegisterAiService @Singleton interface NewAIService { @SystemMessage("This is a systemMessage") @UserMessage("This is a userMessage {text}") String chat(String text); } @Inject NewAIService service; @Inject ChatLanguageModel chatModel; @Inject LangChain4jBamConfig langchain4jBamConfig; @Test void chat() throws Exception { var config = langchain4jBamConfig.defaultConfig(); var modelId = config.chatModel().modelId(); var parameters = Parameters.builder() .decodingMethod(config.chatModel().decodingMethod()) .temperature(config.chatModel().temperature()) .minNewTokens(config.chatModel().minNewTokens()) .maxNewTokens(config.chatModel().maxNewTokens()) .build(); List<Message> messages = List.of( new Message("system", "This is a systemMessage"), new Message("user", "This is a userMessage Hello")); var body = new TextGenerationRequest(modelId, messages, parameters); mockServers .mockBuilder(WireMockUtil.URL_CHAT_API, 200) .body(mapper.writeValueAsString(body)) .response(""" { "results": [ { "generated_token_count": 20, "input_token_count": 146, "stop_reason": "max_tokens", "seed": 40268626, "generated_text": "AI Response" } ] } """) .build(); assertEquals("AI Response", service.chat("Hello")); } @Test void chat_test_generate_1() throws Exception { var config = langchain4jBamConfig.defaultConfig(); var modelId = config.chatModel().modelId(); var parameters = Parameters.builder() .decodingMethod(config.chatModel().decodingMethod()) .temperature(config.chatModel().temperature()) .minNewTokens(config.chatModel().minNewTokens()) .maxNewTokens(config.chatModel().maxNewTokens()) .build(); List<Message> messages = List.of( new Message("user", "Hello")); var body = new TextGenerationRequest(modelId, messages, parameters); mockServers .mockBuilder(WireMockUtil.URL_CHAT_API, 200) .body(mapper.writeValueAsString(body)) .response(""" { "results": [ { "generated_token_count": 20, "input_token_count": 146, "stop_reason": "max_tokens", "seed": 40268626, "generated_text": "AI Response" } ] } """) .build(); assertEquals("AI Response", chatModel.generate("Hello")); } @Test void chat_test_generate_2() throws Exception { var config = langchain4jBamConfig.defaultConfig(); var modelId = config.chatModel().modelId(); var parameters = Parameters.builder() .decodingMethod(config.chatModel().decodingMethod()) .temperature(config.chatModel().temperature()) .minNewTokens(config.chatModel().minNewTokens()) .maxNewTokens(config.chatModel().maxNewTokens()) .build(); List<Message> messages = List.of( new Message("system", "This is a systemMessage"), new Message("user", "This is a userMessage"), new Message("assistant", "This is a assistantMessage")); var body = new TextGenerationRequest(modelId, messages, parameters); mockServers .mockBuilder(WireMockUtil.URL_CHAT_API, 200) .body(mapper.writeValueAsString(body)) .response(""" { "results": [ { "generated_token_count": 20, "input_token_count": 146, "stop_reason": "max_tokens", "seed": 40268626, "generated_text": "AI Response" } ] } """) .build(); var expected = Response.from(AiMessage.from("AI Response"), new TokenUsage(146, 20, 166), FinishReason.LENGTH); assertEquals(expected, chatModel.generate(List.of( new dev.langchain4j.data.message.SystemMessage("This is a systemMessage"), new dev.langchain4j.data.message.UserMessage("This is a userMessage"), new dev.langchain4j.data.message.AiMessage("This is a assistantMessage")))); assertEquals(expected, chatModel.generate( new dev.langchain4j.data.message.SystemMessage("This is a systemMessage"), new dev.langchain4j.data.message.UserMessage("This is a userMessage"), new dev.langchain4j.data.message.AiMessage("This is a assistantMessage"))); } @Test void chat_test_tool_specification() throws Exception { assertThrowsExactly( IllegalArgumentException.class, () -> chatModel.generate(List.of(), ToolSpecification.builder().build())); assertThrowsExactly( IllegalArgumentException.class, () -> chatModel.generate(List.of(), List.of(ToolSpecification.builder().build()))); } }
[ "dev.langchain4j.agent.tool.ToolSpecification.builder" ]
[((1945, 2010), 'org.jboss.shrinkwrap.api.ShrinkWrap.create'), ((2915, 3222), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((2915, 3197), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((2915, 3132), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((2915, 3067), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((2915, 3004), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((4472, 4779), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((4472, 4754), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((4472, 4689), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((4472, 4624), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((4472, 4561), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((5947, 6254), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((5947, 6229), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((5947, 6164), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((5947, 6099), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((5947, 6036), 'io.quarkiverse.langchain4j.bam.Parameters.builder'), ((8283, 8318), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((8460, 8495), 'dev.langchain4j.agent.tool.ToolSpecification.builder')]
package ai.equity.salt; import dev.langchain4j.model.openai.OpenAiLanguageModel; import dev.langchain4j.model.language.LanguageModel; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import static org.junit.Assert.assertThrows; class OpenAiApiTest { private static final String FAKE_API_KEY = "asdfghjkl"; LanguageModel modelNoApiKey = OpenAiLanguageModel.builder() .apiKey(FAKE_API_KEY) .logRequests(true) .logResponses(true) .build(); @Test void testExceptionMessage() { String prompt = "What is the capital of Germany?"; Exception exception = assertThrows(RuntimeException.class, () -> { modelNoApiKey.generate(prompt); }); String expectedMessage = "Incorrect API key provided: " + FAKE_API_KEY + ". You can find your API key at https://platform.openai.com/account/api-keys."; String actualMessage = exception.getMessage(); Assertions.assertTrue(actualMessage.contains(expectedMessage)); } }
[ "dev.langchain4j.model.openai.OpenAiLanguageModel.builder" ]
[((377, 524), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((377, 503), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((377, 471), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((377, 440), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder')]
package com.exoreaction.quadim; import static java.time.Duration.ofSeconds; import com.exoreaction.quadim.service.ApiKeys; import com.fasterxml.jackson.core.json.JsonReadFeature; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import dev.langchain4j.memory.chat.MessageWindowChatMemory; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.model.openai.OpenAiModelName; import dev.langchain4j.model.output.structured.Description; import dev.langchain4j.service.AiServices; import dev.langchain4j.service.MemoryId; import dev.langchain4j.service.UserMessage; import java.util.List; import java.util.Random; import org.junit.jupiter.api.Test; public class AiAssistedHRAssistantTest { public static final ObjectMapper mapper = new ObjectMapper() .configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false) .configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false) .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) .configure(JsonReadFeature.ALLOW_UNESCAPED_CONTROL_CHARS.mappedFeature(), true) .enable(JsonReadFeature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER.mappedFeature()) .findAndRegisterModules(); @Test public void testChatWithHRAI() throws Exception { Random r = new Random(); int userNo = r.nextInt(100); // PLan ChatLanguageModel model = OpenAiChatModel.builder() .apiKey(ApiKeys.MY_OPENAI_API_KEY) .modelName(OpenAiModelName.GPT_3_5_TURBO) .timeout(ofSeconds(900)) .temperature(0.9) .build(); Assistant assistant = AiServices.builder(Assistant.class) .chatLanguageModel(model) .chatMemoryProvider(memoryId -> MessageWindowChatMemory.withMaxMessages(10)) .build(); // a) create types for retrieving skills and experience objects from responses SkillExtractor skillExtractor = AiServices.create(SkillExtractor.class, model); // b) simulate a chat String appendPrompt = "Answer acting as a friendly HR Consultant helping the user with his/her competence mapping, focussing on skills and projects."+ "Structure the answer friendly and selling with bullets for discovered or suggested supporting skills and potential typical projects"+ "where the user may have used those skills. " + "Limit answer to the most relevant 5 skills and top 8 projects"; String q1 = "Yes, I do work with Java and java microservices on the backend "; System.out.println("me: " + q1); String res1 = assistant.chat(userNo, q1 + appendPrompt); System.out.println(res1); Skill extractedSkills1 = skillExtractor.extractSkillFrom(res1); System.out.println("\n\n1. Skill mapped:" + mapper.writerWithDefaultPrettyPrinter().writeValueAsString(extractedSkills1) + "\n\n"); } interface Assistant { String chat(@MemoryId int memoryId, @UserMessage String userMessage); } static class Skill { @Description("the name of this skill") private String name; @Description("description of this skill. please make it selling and not more than 10 lines of text") private String description; @Description("list of suggested skills which correlate to this skill") private List<SkillReference> listOfCandidateSkillDefinitions; public String getName() { return name; } public void setName(String name) { this.name = name; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } public List<SkillReference> getListOfCandidateSkillDefinitions() { return listOfCandidateSkillDefinitions; } public void setListOfCandidateSkillDefinitions(List<SkillReference> listOfCandidateSkillDefinitions) { this.listOfCandidateSkillDefinitions = listOfCandidateSkillDefinitions; } @Override public String toString() { return "Skill{" + "skillName='" + name + '\'' + ", skillDescription='" + description + '\'' + ", listOfCandidateSkillDefinitions=" + listOfCandidateSkillDefinitions + '}'; } } static class SkillReference { @Description("the name of this skill") private String name; @Description("description of this skill. please make it selling and not more than 10 lines of text") private String description; public String getName() { return name; } public void setName(String name) { this.name = name; } public String getDescription() { return description; } public void setDescription(String description) { this.description = description; } @Override public String toString() { return "SkillReference{" + "skillName='" + name + '\'' + ", skillDescription='" + description + '\'' + '}'; } } interface SkillExtractor { @UserMessage("Extract information about a skill from {{it}}") Skill extractSkillFrom(String text); } }
[ "dev.langchain4j.service.AiServices.builder", "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((1181, 1242), 'com.fasterxml.jackson.core.json.JsonReadFeature.ALLOW_UNESCAPED_CONTROL_CHARS.mappedFeature'), ((1264, 1334), 'com.fasterxml.jackson.core.json.JsonReadFeature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER.mappedFeature'), ((1534, 1728), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1534, 1711), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1534, 1685), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1534, 1652), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1534, 1602), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1756, 1927), 'dev.langchain4j.service.AiServices.builder'), ((1756, 1910), 'dev.langchain4j.service.AiServices.builder'), ((1756, 1825), 'dev.langchain4j.service.AiServices.builder')]
package dev.langchain4j.model.openai; import dev.langchain4j.agent.tool.ToolExecutionRequest; import dev.langchain4j.agent.tool.ToolSpecification; import dev.langchain4j.data.message.*; import dev.langchain4j.model.StreamingResponseHandler; import dev.langchain4j.model.chat.StreamingChatLanguageModel; import dev.langchain4j.model.chat.TestStreamingResponseHandler; import dev.langchain4j.model.output.Response; import dev.langchain4j.model.output.TokenUsage; import org.assertj.core.data.Percentage; import org.junit.jupiter.api.Test; import java.util.Base64; import java.util.List; import java.util.concurrent.CompletableFuture; import static dev.langchain4j.agent.tool.JsonSchemaProperty.INTEGER; import static dev.langchain4j.data.message.ToolExecutionResultMessage.from; import static dev.langchain4j.data.message.UserMessage.userMessage; import static dev.langchain4j.internal.Utils.readBytes; import static dev.langchain4j.model.openai.OpenAiChatModelIT.CAT_IMAGE_URL; import static dev.langchain4j.model.openai.OpenAiChatModelIT.DICE_IMAGE_URL; import static dev.langchain4j.model.openai.OpenAiChatModelName.GPT_3_5_TURBO; import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO_1106; import static dev.langchain4j.model.openai.OpenAiModelName.GPT_4_VISION_PREVIEW; import static dev.langchain4j.model.output.FinishReason.STOP; import static dev.langchain4j.model.output.FinishReason.TOOL_EXECUTION; import static java.util.Arrays.asList; import static java.util.Collections.singletonList; import static java.util.concurrent.TimeUnit.SECONDS; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.data.Percentage.withPercentage; class OpenAiStreamingChatModelIT { StreamingChatLanguageModel model = OpenAiStreamingChatModel.builder() .baseUrl(System.getenv("OPENAI_BASE_URL")) .apiKey(System.getenv("OPENAI_API_KEY")) .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .temperature(0.0) .logRequests(true) .logResponses(true) .build(); StreamingChatLanguageModel visionModel = OpenAiStreamingChatModel.builder() .baseUrl(System.getenv("OPENAI_BASE_URL")) .apiKey(System.getenv("OPENAI_API_KEY")) .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .modelName(GPT_4_VISION_PREVIEW) .temperature(0.0) .logRequests(true) .logResponses(true) .build(); ToolSpecification calculator = ToolSpecification.builder() .name("calculator") .description("returns a sum of two numbers") .addParameter("first", INTEGER) .addParameter("second", INTEGER) .build(); Percentage tokenizerPrecision = withPercentage(5); @Test void should_stream_answer() throws Exception { CompletableFuture<String> futureAnswer = new CompletableFuture<>(); CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>(); model.generate("What is the capital of Germany?", new StreamingResponseHandler<AiMessage>() { private final StringBuilder answerBuilder = new StringBuilder(); @Override public void onNext(String token) { System.out.println("onNext: '" + token + "'"); answerBuilder.append(token); } @Override public void onComplete(Response<AiMessage> response) { System.out.println("onComplete: '" + response + "'"); futureAnswer.complete(answerBuilder.toString()); futureResponse.complete(response); } @Override public void onError(Throwable error) { futureAnswer.completeExceptionally(error); futureResponse.completeExceptionally(error); } }); String answer = futureAnswer.get(30, SECONDS); Response<AiMessage> response = futureResponse.get(30, SECONDS); assertThat(answer).contains("Berlin"); assertThat(response.content().text()).isEqualTo(answer); TokenUsage tokenUsage = response.tokenUsage(); assertThat(tokenUsage.inputTokenCount()).isEqualTo(14); assertThat(tokenUsage.outputTokenCount()).isGreaterThan(0); assertThat(tokenUsage.totalTokenCount()) .isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount()); assertThat(response.finishReason()).isEqualTo(STOP); } @Test void should_execute_a_tool_then_stream_answer() throws Exception { // given UserMessage userMessage = userMessage("2+2=?"); List<ToolSpecification> toolSpecifications = singletonList(calculator); // when CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>(); model.generate(singletonList(userMessage), toolSpecifications, new StreamingResponseHandler<AiMessage>() { @Override public void onNext(String token) { System.out.println("onNext: '" + token + "'"); Exception e = new IllegalStateException("onNext() should never be called when tool is executed"); futureResponse.completeExceptionally(e); } @Override public void onComplete(Response<AiMessage> response) { System.out.println("onComplete: '" + response + "'"); futureResponse.complete(response); } @Override public void onError(Throwable error) { futureResponse.completeExceptionally(error); } }); Response<AiMessage> response = futureResponse.get(30, SECONDS); AiMessage aiMessage = response.content(); // then assertThat(aiMessage.text()).isNull(); List<ToolExecutionRequest> toolExecutionRequests = aiMessage.toolExecutionRequests(); assertThat(toolExecutionRequests).hasSize(1); ToolExecutionRequest toolExecutionRequest = toolExecutionRequests.get(0); assertThat(toolExecutionRequest.name()).isEqualTo("calculator"); assertThat(toolExecutionRequest.arguments()).isEqualToIgnoringWhitespace("{\"first\": 2, \"second\": 2}"); TokenUsage tokenUsage = response.tokenUsage(); assertThat(tokenUsage.inputTokenCount()).isCloseTo(53, tokenizerPrecision); assertThat(tokenUsage.outputTokenCount()).isCloseTo(14, tokenizerPrecision); assertThat(tokenUsage.totalTokenCount()) .isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount()); assertThat(response.finishReason()).isEqualTo(TOOL_EXECUTION); // given ToolExecutionResultMessage toolExecutionResultMessage = from(toolExecutionRequest, "4"); List<ChatMessage> messages = asList(userMessage, aiMessage, toolExecutionResultMessage); // when CompletableFuture<Response<AiMessage>> secondFutureResponse = new CompletableFuture<>(); model.generate(messages, new StreamingResponseHandler<AiMessage>() { @Override public void onNext(String token) { System.out.println("onNext: '" + token + "'"); } @Override public void onComplete(Response<AiMessage> response) { System.out.println("onComplete: '" + response + "'"); secondFutureResponse.complete(response); } @Override public void onError(Throwable error) { secondFutureResponse.completeExceptionally(error); } }); Response<AiMessage> secondResponse = secondFutureResponse.get(30, SECONDS); AiMessage secondAiMessage = secondResponse.content(); // then assertThat(secondAiMessage.text()).contains("4"); assertThat(secondAiMessage.toolExecutionRequests()).isNull(); TokenUsage secondTokenUsage = secondResponse.tokenUsage(); assertThat(secondTokenUsage.inputTokenCount()).isCloseTo(33, tokenizerPrecision); assertThat(secondTokenUsage.outputTokenCount()).isGreaterThan(0); assertThat(secondTokenUsage.totalTokenCount()) .isEqualTo(secondTokenUsage.inputTokenCount() + secondTokenUsage.outputTokenCount()); assertThat(secondResponse.finishReason()).isEqualTo(STOP); } @Test void should_execute_tool_forcefully_then_stream_answer() throws Exception { // given UserMessage userMessage = userMessage("2+2=?"); // when CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>(); model.generate(singletonList(userMessage), calculator, new StreamingResponseHandler<AiMessage>() { @Override public void onNext(String token) { System.out.println("onNext: '" + token + "'"); Exception e = new IllegalStateException("onNext() should never be called when tool is executed"); futureResponse.completeExceptionally(e); } @Override public void onComplete(Response<AiMessage> response) { System.out.println("onComplete: '" + response + "'"); futureResponse.complete(response); } @Override public void onError(Throwable error) { futureResponse.completeExceptionally(error); } }); Response<AiMessage> response = futureResponse.get(30, SECONDS); AiMessage aiMessage = response.content(); // then assertThat(aiMessage.text()).isNull(); List<ToolExecutionRequest> toolExecutionRequests = aiMessage.toolExecutionRequests(); assertThat(toolExecutionRequests).hasSize(1); ToolExecutionRequest toolExecutionRequest = toolExecutionRequests.get(0); assertThat(toolExecutionRequest.name()).isEqualTo("calculator"); assertThat(toolExecutionRequest.arguments()).isEqualToIgnoringWhitespace("{\"first\": 2, \"second\": 2}"); TokenUsage tokenUsage = response.tokenUsage(); assertThat(tokenUsage.inputTokenCount()).isCloseTo(59, tokenizerPrecision); assertThat(tokenUsage.outputTokenCount()).isCloseTo(9, tokenizerPrecision); assertThat(tokenUsage.totalTokenCount()) .isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount()); assertThat(response.finishReason()).isEqualTo(STOP); // not sure if a bug in OpenAI or stop is expected here // given ToolExecutionResultMessage toolExecutionResultMessage = from(toolExecutionRequest, "4"); List<ChatMessage> messages = asList(userMessage, aiMessage, toolExecutionResultMessage); // when CompletableFuture<Response<AiMessage>> secondFutureResponse = new CompletableFuture<>(); model.generate(messages, new StreamingResponseHandler<AiMessage>() { @Override public void onNext(String token) { System.out.println("onNext: '" + token + "'"); } @Override public void onComplete(Response<AiMessage> response) { System.out.println("onComplete: '" + response + "'"); secondFutureResponse.complete(response); } @Override public void onError(Throwable error) { secondFutureResponse.completeExceptionally(error); } }); Response<AiMessage> secondResponse = secondFutureResponse.get(30, SECONDS); AiMessage secondAiMessage = secondResponse.content(); // then assertThat(secondAiMessage.text()).contains("4"); assertThat(secondAiMessage.toolExecutionRequests()).isNull(); TokenUsage secondTokenUsage = secondResponse.tokenUsage(); assertThat(secondTokenUsage.inputTokenCount()).isCloseTo(33, tokenizerPrecision); assertThat(secondTokenUsage.outputTokenCount()).isGreaterThan(0); assertThat(secondTokenUsage.totalTokenCount()) .isEqualTo(secondTokenUsage.inputTokenCount() + secondTokenUsage.outputTokenCount()); assertThat(secondResponse.finishReason()).isEqualTo(STOP); } @Test void should_execute_multiple_tools_in_parallel_then_stream_answer() throws Exception { // given StreamingChatLanguageModel model = OpenAiStreamingChatModel.builder() .baseUrl(System.getenv("OPENAI_BASE_URL")) .apiKey(System.getenv("OPENAI_API_KEY")) .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .modelName(GPT_3_5_TURBO_1106) // supports parallel function calling .temperature(0.0) .logRequests(true) .logResponses(true) .build(); UserMessage userMessage = userMessage("2+2=? 3+3=?"); List<ToolSpecification> toolSpecifications = singletonList(calculator); // when CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>(); model.generate(singletonList(userMessage), toolSpecifications, new StreamingResponseHandler<AiMessage>() { @Override public void onNext(String token) { System.out.println("onNext: '" + token + "'"); Exception e = new IllegalStateException("onNext() should never be called when tool is executed"); futureResponse.completeExceptionally(e); } @Override public void onComplete(Response<AiMessage> response) { System.out.println("onComplete: '" + response + "'"); futureResponse.complete(response); } @Override public void onError(Throwable error) { futureResponse.completeExceptionally(error); } }); Response<AiMessage> response = futureResponse.get(30, SECONDS); AiMessage aiMessage = response.content(); // then assertThat(aiMessage.text()).isNull(); assertThat(aiMessage.toolExecutionRequests()).hasSize(2); ToolExecutionRequest toolExecutionRequest1 = aiMessage.toolExecutionRequests().get(0); assertThat(toolExecutionRequest1.name()).isEqualTo("calculator"); assertThat(toolExecutionRequest1.arguments()).isEqualToIgnoringWhitespace("{\"first\": 2, \"second\": 2}"); ToolExecutionRequest toolExecutionRequest2 = aiMessage.toolExecutionRequests().get(1); assertThat(toolExecutionRequest2.name()).isEqualTo("calculator"); assertThat(toolExecutionRequest2.arguments()).isEqualToIgnoringWhitespace("{\"first\": 3, \"second\": 3}"); TokenUsage tokenUsage = response.tokenUsage(); assertThat(tokenUsage.inputTokenCount()).isCloseTo(57, tokenizerPrecision); assertThat(tokenUsage.outputTokenCount()).isCloseTo(51, tokenizerPrecision); assertThat(tokenUsage.totalTokenCount()) .isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount()); assertThat(response.finishReason()).isEqualTo(TOOL_EXECUTION); // given ToolExecutionResultMessage toolExecutionResultMessage1 = from(toolExecutionRequest1, "4"); ToolExecutionResultMessage toolExecutionResultMessage2 = from(toolExecutionRequest2, "6"); List<ChatMessage> messages = asList(userMessage, aiMessage, toolExecutionResultMessage1, toolExecutionResultMessage2); // when CompletableFuture<Response<AiMessage>> secondFutureResponse = new CompletableFuture<>(); model.generate(messages, new StreamingResponseHandler<AiMessage>() { @Override public void onNext(String token) { System.out.println("onNext: '" + token + "'"); } @Override public void onComplete(Response<AiMessage> response) { System.out.println("onComplete: '" + response + "'"); secondFutureResponse.complete(response); } @Override public void onError(Throwable error) { secondFutureResponse.completeExceptionally(error); } }); Response<AiMessage> secondResponse = secondFutureResponse.get(30, SECONDS); AiMessage secondAiMessage = secondResponse.content(); // then assertThat(secondAiMessage.text()).contains("4", "6"); assertThat(secondAiMessage.toolExecutionRequests()).isNull(); TokenUsage secondTokenUsage = secondResponse.tokenUsage(); assertThat(secondTokenUsage.inputTokenCount()).isCloseTo(83, tokenizerPrecision); assertThat(secondTokenUsage.outputTokenCount()).isGreaterThan(0); assertThat(secondTokenUsage.totalTokenCount()) .isEqualTo(secondTokenUsage.inputTokenCount() + secondTokenUsage.outputTokenCount()); assertThat(secondResponse.finishReason()).isEqualTo(STOP); } @Test void should_stream_valid_json() throws Exception { //given String userMessage = "Return JSON with two fields: name and surname of Klaus Heisler. " + "Before returning, tell me a joke."; // nudging it to say something additionally to json StreamingChatLanguageModel model = OpenAiStreamingChatModel.builder() .baseUrl(System.getenv("OPENAI_BASE_URL")) .apiKey(System.getenv("OPENAI_API_KEY")) .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .modelName(GPT_3_5_TURBO_1106) // supports response_format = 'json_object' .responseFormat("json_object") .logRequests(true) .logResponses(true) .build(); // when CompletableFuture<String> futureAnswer = new CompletableFuture<>(); CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>(); model.generate(userMessage, new StreamingResponseHandler<AiMessage>() { private final StringBuilder answerBuilder = new StringBuilder(); @Override public void onNext(String token) { System.out.println("onNext: '" + token + "'"); answerBuilder.append(token); } @Override public void onComplete(Response<AiMessage> response) { System.out.println("onComplete: '" + response + "'"); futureAnswer.complete(answerBuilder.toString()); futureResponse.complete(response); } @Override public void onError(Throwable error) { futureAnswer.completeExceptionally(error); futureResponse.completeExceptionally(error); } }); String json = futureAnswer.get(30, SECONDS); Response<AiMessage> response = futureResponse.get(30, SECONDS); // then assertThat(json).isEqualToIgnoringWhitespace("{\"name\": \"Klaus\", \"surname\": \"Heisler\"}"); assertThat(response.content().text()).isEqualTo(json); } @Test void should_accept_image_url() { // given ImageContent imageContent = ImageContent.from(CAT_IMAGE_URL); UserMessage userMessage = UserMessage.from(imageContent); // when TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>(); visionModel.generate(singletonList(userMessage), handler); Response<AiMessage> response = handler.get(); // then assertThat(response.content().text()).containsIgnoringCase("cat"); assertThat(response.tokenUsage().inputTokenCount()).isEqualTo(92); } @Test void should_accept_base64_image() { // given String base64Data = Base64.getEncoder().encodeToString(readBytes(CAT_IMAGE_URL)); ImageContent imageContent = ImageContent.from(base64Data, "image/png"); UserMessage userMessage = UserMessage.from(imageContent); // when TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>(); visionModel.generate(singletonList(userMessage), handler); Response<AiMessage> response = handler.get(); // then assertThat(response.content().text()).containsIgnoringCase("cat"); assertThat(response.tokenUsage().inputTokenCount()).isEqualTo(92); } @Test void should_accept_text_and_image() { // given UserMessage userMessage = UserMessage.from( TextContent.from("What do you see? Reply in one word."), ImageContent.from(CAT_IMAGE_URL) ); // when TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>(); visionModel.generate(singletonList(userMessage), handler); Response<AiMessage> response = handler.get(); // then assertThat(response.content().text()).containsIgnoringCase("cat"); assertThat(response.tokenUsage().inputTokenCount()).isEqualTo(102); } @Test void should_accept_text_and_multiple_images() { // given UserMessage userMessage = UserMessage.from( TextContent.from("What do you see? Reply with one word per image."), ImageContent.from(CAT_IMAGE_URL), ImageContent.from(DICE_IMAGE_URL) ); // when TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>(); visionModel.generate(singletonList(userMessage), handler); Response<AiMessage> response = handler.get(); // then assertThat(response.content().text()) .containsIgnoringCase("cat") .containsIgnoringCase("dice"); assertThat(response.tokenUsage().inputTokenCount()).isEqualTo(189); } @Test void should_accept_text_and_multiple_images_from_different_sources() { // given UserMessage userMessage = UserMessage.from( ImageContent.from(CAT_IMAGE_URL), ImageContent.from(Base64.getEncoder().encodeToString(readBytes(DICE_IMAGE_URL)), "image/png"), TextContent.from("What do you see? Reply with one word per image.") ); // when TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>(); visionModel.generate(singletonList(userMessage), handler); Response<AiMessage> response = handler.get(); // then assertThat(response.content().text()) .containsIgnoringCase("cat") .containsIgnoringCase("dice"); assertThat(response.tokenUsage().inputTokenCount()).isEqualTo(189); } @Test void should_use_enum_as_model_name() { // given OpenAiStreamingChatModel model = OpenAiStreamingChatModel.builder() .baseUrl(System.getenv("OPENAI_BASE_URL")) .apiKey(System.getenv("OPENAI_API_KEY")) .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .modelName(GPT_3_5_TURBO) .logRequests(true) .logResponses(true) .build(); String question = "What is the capital of Germany?"; // when TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>(); model.generate(question, handler); Response<AiMessage> response = handler.get(); // then assertThat(response.content().text()).containsIgnoringCase("Berlin"); } }
[ "dev.langchain4j.agent.tool.ToolSpecification.builder" ]
[((2550, 2776), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2550, 2755), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2550, 2710), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2550, 2666), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2550, 2609), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((20008, 20068), 'java.util.Base64.getEncoder'), ((22330, 22391), 'java.util.Base64.getEncoder')]
package dev.langchain4j.model.vertexai; import com.google.cloud.vertexai.api.FunctionCall; import com.google.cloud.vertexai.api.FunctionDeclaration; import com.google.cloud.vertexai.api.Schema; import com.google.cloud.vertexai.api.Tool; import com.google.cloud.vertexai.api.Type; import com.google.protobuf.ListValue; import com.google.protobuf.NullValue; import com.google.protobuf.Struct; import com.google.protobuf.Value; import dev.langchain4j.agent.tool.JsonSchemaProperty; import dev.langchain4j.agent.tool.ToolExecutionRequest; import dev.langchain4j.agent.tool.ToolSpecification; import org.junit.jupiter.api.Test; import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; import static dev.langchain4j.model.vertexai.FunctionCallHelper.unwrapProtoValue; import static org.assertj.core.api.Assertions.assertThat; public class FunctionCallHelperTest { @Test void should_unwrap_proto_values() { // check basic values assertThat(unwrapProtoValue(Value.newBuilder().setStringValue("hello").build())).isEqualTo("hello"); assertThat(unwrapProtoValue(Value.newBuilder().setBoolValue(false).build())).isEqualTo(false); assertThat(unwrapProtoValue(Value.newBuilder().setNumberValue(1.23).build())).isEqualTo(1.23); assertThat(unwrapProtoValue(Value.newBuilder().setNullValue(NullValue.NULL_VALUE).build())).isEqualTo(null); // check list unwrapping ListValue listValue = ListValue.newBuilder() .addValues(Value.newBuilder().setStringValue("hello")) .addValues(Value.newBuilder().setBoolValue(true)) .addValues(Value.newBuilder().setNumberValue(3.14)) .build(); assertThat(unwrapProtoValue(Value.newBuilder().setListValue(listValue).build())).isEqualTo( Arrays.asList("hello", true, 3.14) ); // check struct unwrapping Struct struct = Struct.newBuilder() .putFields("name", Value.newBuilder().setStringValue("Guillaume").build()) .putFields("numberOfKids", Value.newBuilder().setNumberValue(2).build()) .putFields("kids", Value.newBuilder().setListValue(ListValue.newBuilder() .addValues(Value.newBuilder().setStringValue("Marion").build()) .addValues(Value.newBuilder().setStringValue("Érine").build()) .build()).build()) .putFields("flag", Value.newBuilder().setBoolValue(false).build()) .build(); HashMap<Object, Object> map = new HashMap<>(); map.put("name", "Guillaume"); map.put("numberOfKids", 2.0); map.put("kids", Arrays.asList("Marion", "Érine")); map.put("flag", false); assertThat(unwrapProtoValue(Value.newBuilder().setStructValue(struct).build())).isEqualTo(map); } @Test void should_convert_tool_specs() { // given ToolSpecification toolSpec = ToolSpecification.builder() .description("Give the weather forecast for a location") .name("getWeatherForecast") .addParameter("location", JsonSchemaProperty.STRING, JsonSchemaProperty.description("the location to get the weather forecast for")) .addOptionalParameter("days", JsonSchemaProperty.INTEGER, JsonSchemaProperty.description("the number of days in the forecast")) .build(); // when Tool tool = FunctionCallHelper.convertToolSpecifications(Collections.singletonList(toolSpec)); // then assertThat(tool.getFunctionDeclarationsCount()).isEqualTo(1); FunctionDeclaration funDecl = tool.getFunctionDeclarations(0); assertThat(funDecl.getDescription()).isEqualTo("Give the weather forecast for a location"); assertThat(funDecl.getName()).isEqualTo("getWeatherForecast"); Schema parameters = funDecl.getParameters(); assertThat(parameters.getPropertiesCount()).isEqualTo(2); assertThat(parameters.getPropertiesMap().get("location").getType()).isEqualTo(Type.STRING); assertThat(parameters.getPropertiesMap().get("location").getDescription()) .isEqualTo("the location to get the weather forecast for"); assertThat(parameters.getRequiredCount()).isEqualTo(1); assertThat(parameters.getRequired(0)).isEqualTo("location"); assertThat(parameters.getPropertiesMap().get("days").getType()).isEqualTo(Type.INTEGER); } @Test void should_convert_function_calls_to_tool_execution_requests_and_back() { // given FunctionCall functionCall = FunctionCall.newBuilder() .setName("getWeatherForecast") .setArgs( Struct.newBuilder() .putFields("location", Value.newBuilder().setStringValue("Paris").build()) .build() ) .build(); // when List<ToolExecutionRequest> toolExecutionRequest = FunctionCallHelper.fromFunctionCalls(Collections.singletonList(functionCall)); FunctionCall sameFunctionCall = FunctionCallHelper.fromToolExecutionRequest(toolExecutionRequest.get(0)); // then assertThat(functionCall).isEqualTo(sameFunctionCall); // given ToolExecutionRequest newExecutionRequest = ToolExecutionRequest.builder() .name("getWeatherForecast") .arguments("{\"location\":\"Paris\"}") .build(); // when FunctionCall newFunctionCall = FunctionCallHelper.fromToolExecutionRequest(newExecutionRequest); ToolExecutionRequest sameExecutionRequest = FunctionCallHelper.fromFunctionCalls(Collections.singletonList(newFunctionCall)).get(0); // then assertThat(newExecutionRequest).isEqualTo(sameExecutionRequest); } }
[ "dev.langchain4j.agent.tool.ToolSpecification.builder", "dev.langchain4j.agent.tool.ToolExecutionRequest.builder" ]
[((1025, 1075), 'com.google.protobuf.Value.newBuilder'), ((1025, 1067), 'com.google.protobuf.Value.newBuilder'), ((1134, 1180), 'com.google.protobuf.Value.newBuilder'), ((1134, 1172), 'com.google.protobuf.Value.newBuilder'), ((1237, 1284), 'com.google.protobuf.Value.newBuilder'), ((1237, 1276), 'com.google.protobuf.Value.newBuilder'), ((1340, 1401), 'com.google.protobuf.Value.newBuilder'), ((1340, 1393), 'com.google.protobuf.Value.newBuilder'), ((1485, 1721), 'com.google.protobuf.ListValue.newBuilder'), ((1485, 1700), 'com.google.protobuf.ListValue.newBuilder'), ((1485, 1636), 'com.google.protobuf.ListValue.newBuilder'), ((1485, 1574), 'com.google.protobuf.ListValue.newBuilder'), ((1531, 1573), 'com.google.protobuf.Value.newBuilder'), ((1598, 1635), 'com.google.protobuf.Value.newBuilder'), ((1660, 1699), 'com.google.protobuf.Value.newBuilder'), ((1759, 1809), 'com.google.protobuf.Value.newBuilder'), ((1759, 1801), 'com.google.protobuf.Value.newBuilder'), ((1941, 2513), 'com.google.protobuf.Struct.newBuilder'), ((1941, 2492), 'com.google.protobuf.Struct.newBuilder'), ((1941, 2413), 'com.google.protobuf.Struct.newBuilder'), ((1941, 2132), 'com.google.protobuf.Struct.newBuilder'), ((1941, 2047), 'com.google.protobuf.Struct.newBuilder'), ((1992, 2046), 'com.google.protobuf.Value.newBuilder'), ((1992, 2038), 'com.google.protobuf.Value.newBuilder'), ((2087, 2131), 'com.google.protobuf.Value.newBuilder'), ((2087, 2123), 'com.google.protobuf.Value.newBuilder'), ((2164, 2412), 'com.google.protobuf.Value.newBuilder'), ((2164, 2404), 'com.google.protobuf.Value.newBuilder'), ((2196, 2403), 'com.google.protobuf.ListValue.newBuilder'), ((2196, 2378), 'com.google.protobuf.ListValue.newBuilder'), ((2196, 2298), 'com.google.protobuf.ListValue.newBuilder'), ((2246, 2297), 'com.google.protobuf.Value.newBuilder'), ((2246, 2289), 'com.google.protobuf.Value.newBuilder'), ((2326, 2377), 'com.google.protobuf.Value.newBuilder'), ((2326, 2369), 'com.google.protobuf.Value.newBuilder'), ((2445, 2491), 'com.google.protobuf.Value.newBuilder'), ((2445, 2483), 'com.google.protobuf.Value.newBuilder'), ((2774, 2823), 'com.google.protobuf.Value.newBuilder'), ((2774, 2815), 'com.google.protobuf.Value.newBuilder'), ((2952, 3426), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2952, 3405), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2952, 3249), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2952, 3088), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2952, 3048), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((4632, 4917), 'com.google.cloud.vertexai.api.FunctionCall.newBuilder'), ((4632, 4896), 'com.google.cloud.vertexai.api.FunctionCall.newBuilder'), ((4632, 4700), 'com.google.cloud.vertexai.api.FunctionCall.newBuilder'), ((4739, 4882), 'com.google.protobuf.Struct.newBuilder'), ((4739, 4853), 'com.google.protobuf.Struct.newBuilder'), ((4802, 4852), 'com.google.protobuf.Value.newBuilder'), ((4802, 4844), 'com.google.protobuf.Value.newBuilder'), ((5347, 5489), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((5347, 5468), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((5347, 5417), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')]
package dev.langchain4j.data.message; import dev.langchain4j.agent.tool.ToolExecutionRequest; import org.assertj.core.api.WithAssertions; import org.junit.jupiter.api.Test; import java.util.Arrays; class AiMessageTest implements WithAssertions { @Test public void test_accessors() { { AiMessage m = new AiMessage("text"); assertThat(m.type()).isEqualTo(ChatMessageType.AI); assertThat(m.text()).isEqualTo("text"); assertThat(m.toolExecutionRequests()).isNull(); assertThat(m.hasToolExecutionRequests()).isFalse(); assertThat(m).hasToString("AiMessage { text = \"text\" toolExecutionRequests = null }"); } { AiMessage m = new AiMessage(Arrays.asList( ToolExecutionRequest.builder() .id("foo") .build(), ToolExecutionRequest.builder() .id("bar") .build())); assertThat(m.type()).isEqualTo(ChatMessageType.AI); assertThat(m.text()).isNull(); assertThat(m.toolExecutionRequests()).hasSize(2); assertThat(m.hasToolExecutionRequests()).isTrue(); assertThat(m).hasToString("AiMessage { text = null toolExecutionRequests = [ToolExecutionRequest { id = \"foo\", name = null, arguments = null }, ToolExecutionRequest { id = \"bar\", name = null, arguments = null }] }"); } } @Test public void test_equals_and_hashCode() { AiMessage m1 = new AiMessage("text"); AiMessage m2 = new AiMessage("text"); assertThat(m1) .isEqualTo(m1) .isNotEqualTo(null) .isNotEqualTo(new Object()) .isEqualTo(m2) .hasSameHashCodeAs(m2); AiMessage m3 = new AiMessage("different"); assertThat(m1) .isNotEqualTo(m3) .doesNotHaveSameHashCodeAs(m3); AiMessage m4 = AiMessage.from( ToolExecutionRequest.builder() .id("foo") .build(), ToolExecutionRequest.builder() .id("bar") .build()); AiMessage m5 = AiMessage.from( ToolExecutionRequest.builder() .id("foo") .build(), ToolExecutionRequest.builder() .id("bar") .build()); assertThat(m4) .isNotEqualTo(m1) .doesNotHaveSameHashCodeAs(m1) .isEqualTo(m5) .hasSameHashCodeAs(m5); } @Test public void test_from() { ToolExecutionRequest[] requests = new ToolExecutionRequest[]{ ToolExecutionRequest.builder() .id("foo") .build(), ToolExecutionRequest.builder() .id("bar") .build()}; { AiMessage m = AiMessage.from(requests); assertThat(m.text()).isNull(); assertThat(m.toolExecutionRequests()).containsOnly(requests); } { AiMessage m = AiMessage.aiMessage(requests); assertThat(m.text()).isNull(); assertThat(m.toolExecutionRequests()).containsOnly(requests); } { AiMessage m = AiMessage.from(Arrays.asList(requests)); assertThat(m.text()).isNull(); assertThat(m.toolExecutionRequests()).containsOnly(requests); } { AiMessage m = AiMessage.aiMessage(Arrays.asList(requests)); assertThat(m.text()).isNull(); assertThat(m.toolExecutionRequests()).containsOnly(requests); } { AiMessage m = AiMessage.from("text"); assertThat(m.text()).isEqualTo("text"); assertThat(m.toolExecutionRequests()).isNull(); } { AiMessage m = AiMessage.aiMessage("text"); assertThat(m.text()).isEqualTo("text"); assertThat(m.toolExecutionRequests()).isNull(); } } }
[ "dev.langchain4j.agent.tool.ToolExecutionRequest.builder" ]
[((790, 896), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((790, 859), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((918, 1024), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((918, 987), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2076, 2174), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2076, 2141), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2192, 2290), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2192, 2257), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2348, 2446), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2348, 2413), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2464, 2562), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2464, 2529), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2874, 2972), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2874, 2939), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2990, 3088), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2990, 3055), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')]
package dev.langchain4j.service; import dev.langchain4j.agent.tool.P; import dev.langchain4j.agent.tool.Tool; import dev.langchain4j.agent.tool.ToolExecutionRequest; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.data.message.ChatMessage; import dev.langchain4j.data.message.ToolExecutionResultMessage; import dev.langchain4j.data.message.UserMessage; import dev.langchain4j.memory.ChatMemory; import dev.langchain4j.memory.chat.MessageWindowChatMemory; import dev.langchain4j.model.chat.StreamingChatLanguageModel; import dev.langchain4j.model.openai.OpenAiStreamingChatModel; import dev.langchain4j.model.output.Response; import dev.langchain4j.model.output.TokenUsage; import org.assertj.core.data.Percentage; import org.junit.jupiter.api.Test; import java.util.List; import java.util.concurrent.CompletableFuture; import static dev.langchain4j.model.openai.OpenAiChatModelName.GPT_3_5_TURBO_0613; import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO_1106; import static dev.langchain4j.model.output.FinishReason.STOP; import static java.util.concurrent.TimeUnit.SECONDS; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.data.Percentage.withPercentage; import static org.mockito.Mockito.*; public class StreamingAiServicesIT { StreamingChatLanguageModel streamingChatModel = OpenAiStreamingChatModel.builder() .baseUrl(System.getenv("OPENAI_BASE_URL")) .apiKey(System.getenv("OPENAI_API_KEY")) .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .temperature(0.0) .logRequests(true) .logResponses(true) .build(); Percentage tokenizerPrecision = withPercentage(5); interface Assistant { TokenStream chat(String userMessage); } @Test void should_stream_answer() throws Exception { Assistant assistant = AiServices.create(Assistant.class, streamingChatModel); StringBuilder answerBuilder = new StringBuilder(); CompletableFuture<String> futureAnswer = new CompletableFuture<>(); CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>(); assistant.chat("What is the capital of Germany?") .onNext(answerBuilder::append) .onComplete(response -> { futureAnswer.complete(answerBuilder.toString()); futureResponse.complete(response); }) .onError(futureAnswer::completeExceptionally) .start(); String answer = futureAnswer.get(30, SECONDS); Response<AiMessage> response = futureResponse.get(30, SECONDS); assertThat(answer).contains("Berlin"); assertThat(response.content().text()).isEqualTo(answer); assertThat(response.tokenUsage().inputTokenCount()).isEqualTo(14); assertThat(response.tokenUsage().outputTokenCount()).isGreaterThan(1); assertThat(response.tokenUsage().totalTokenCount()).isGreaterThan(15); assertThat(response.finishReason()).isEqualTo(STOP); } @Test void should_stream_answers_with_memory() throws Exception { ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10); Assistant assistant = AiServices.builder(Assistant.class) .streamingChatLanguageModel(streamingChatModel) .chatMemory(chatMemory) .build(); String firstUserMessage = "Hi, my name is Klaus"; CompletableFuture<Response<AiMessage>> firstResultFuture = new CompletableFuture<>(); assistant.chat(firstUserMessage) .onNext(System.out::println) .onComplete(firstResultFuture::complete) .onError(firstResultFuture::completeExceptionally) .start(); Response<AiMessage> firstResponse = firstResultFuture.get(30, SECONDS); assertThat(firstResponse.content().text()).contains("Klaus"); String secondUserMessage = "What is my name?"; CompletableFuture<Response<AiMessage>> secondResultFuture = new CompletableFuture<>(); assistant.chat(secondUserMessage) .onNext(System.out::println) .onComplete(secondResultFuture::complete) .onError(secondResultFuture::completeExceptionally) .start(); Response<AiMessage> secondResponse = secondResultFuture.get(30, SECONDS); assertThat(secondResponse.content().text()).contains("Klaus"); List<ChatMessage> messages = chatMemory.messages(); assertThat(messages).hasSize(4); assertThat(messages.get(0)).isInstanceOf(UserMessage.class); assertThat(messages.get(0).text()).isEqualTo(firstUserMessage); assertThat(messages.get(1)).isInstanceOf(AiMessage.class); assertThat(messages.get(1)).isEqualTo(firstResponse.content()); assertThat(messages.get(2)).isInstanceOf(UserMessage.class); assertThat(messages.get(2).text()).isEqualTo(secondUserMessage); assertThat(messages.get(3)).isInstanceOf(AiMessage.class); assertThat(messages.get(3)).isEqualTo(secondResponse.content()); } static class Calculator { @Tool("calculates the square root of the provided number") double squareRoot(@P("number to operate on") double number) { return Math.sqrt(number); } } @Test void should_execute_a_tool_then_stream_answer() throws Exception { Calculator calculator = spy(new Calculator()); ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10); Assistant assistant = AiServices.builder(Assistant.class) .streamingChatLanguageModel(streamingChatModel) .chatMemory(chatMemory) .tools(calculator) .build(); StringBuilder answerBuilder = new StringBuilder(); CompletableFuture<String> futureAnswer = new CompletableFuture<>(); CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>(); String userMessage = "What is the square root of 485906798473894056 in scientific notation?"; assistant.chat(userMessage) .onNext(answerBuilder::append) .onComplete(response -> { futureAnswer.complete(answerBuilder.toString()); futureResponse.complete(response); }) .onError(futureAnswer::completeExceptionally) .start(); String answer = futureAnswer.get(30, SECONDS); Response<AiMessage> response = futureResponse.get(30, SECONDS); assertThat(answer).contains("6.97"); assertThat(response.content().text()).isEqualTo(answer); TokenUsage tokenUsage = response.tokenUsage(); assertThat(tokenUsage.inputTokenCount()).isCloseTo(72 + 110, tokenizerPrecision); assertThat(tokenUsage.outputTokenCount()).isCloseTo(21 + 28, tokenizerPrecision); assertThat(tokenUsage.totalTokenCount()) .isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount()); assertThat(response.finishReason()).isEqualTo(STOP); verify(calculator).squareRoot(485906798473894056.0); verifyNoMoreInteractions(calculator); List<ChatMessage> messages = chatMemory.messages(); assertThat(messages).hasSize(4); assertThat(messages.get(0)).isInstanceOf(UserMessage.class); assertThat(messages.get(0).text()).isEqualTo(userMessage); AiMessage aiMessage = (AiMessage) messages.get(1); assertThat(aiMessage.text()).isNull(); assertThat(aiMessage.toolExecutionRequests()).hasSize(1); ToolExecutionRequest toolExecutionRequest = aiMessage.toolExecutionRequests().get(0); assertThat(toolExecutionRequest.id()).isNotBlank(); assertThat(toolExecutionRequest.name()).isEqualTo("squareRoot"); assertThat(toolExecutionRequest.arguments()) .isEqualToIgnoringWhitespace("{\"arg0\": 485906798473894056}"); ToolExecutionResultMessage toolExecutionResultMessage = (ToolExecutionResultMessage) messages.get(2); assertThat(toolExecutionResultMessage.id()).isEqualTo(toolExecutionRequest.id()); assertThat(toolExecutionResultMessage.toolName()).isEqualTo("squareRoot"); assertThat(toolExecutionResultMessage.text()).isEqualTo("6.97070153193991E8"); assertThat(messages.get(3)).isInstanceOf(AiMessage.class); assertThat(messages.get(3).text()).contains("6.97"); } @Test void should_execute_multiple_tools_sequentially_then_answer() throws Exception { StreamingChatLanguageModel streamingChatModel = OpenAiStreamingChatModel.builder() .baseUrl(System.getenv("OPENAI_BASE_URL")) .apiKey(System.getenv("OPENAI_API_KEY")) .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .modelName(GPT_3_5_TURBO_0613) // this model can only call tools sequentially .temperature(0.0) .logRequests(true) .logResponses(true) .build(); Calculator calculator = spy(new Calculator()); ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10); Assistant assistant = AiServices.builder(Assistant.class) .streamingChatLanguageModel(streamingChatModel) .chatMemory(chatMemory) .tools(calculator) .build(); StringBuilder answerBuilder = new StringBuilder(); CompletableFuture<String> futureAnswer = new CompletableFuture<>(); CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>(); String userMessage = "What is the square root of 485906798473894056 and 97866249624785 in scientific notation?"; assistant.chat(userMessage) .onNext(answerBuilder::append) .onComplete(response -> { futureAnswer.complete(answerBuilder.toString()); futureResponse.complete(response); }) .onError(futureAnswer::completeExceptionally) .start(); String answer = futureAnswer.get(30, SECONDS); Response<AiMessage> response = futureResponse.get(30, SECONDS); assertThat(answer).contains("6.97", "9.89"); assertThat(response.content().text()).isEqualTo(answer); TokenUsage tokenUsage = response.tokenUsage(); assertThat(tokenUsage.inputTokenCount()).isCloseTo(79 + 117 + 152, tokenizerPrecision); assertThat(tokenUsage.outputTokenCount()).isCloseTo(21 + 20 + 53, tokenizerPrecision); assertThat(tokenUsage.totalTokenCount()) .isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount()); assertThat(response.finishReason()).isEqualTo(STOP); verify(calculator).squareRoot(485906798473894056.0); verify(calculator).squareRoot(97866249624785.0); verifyNoMoreInteractions(calculator); List<ChatMessage> messages = chatMemory.messages(); assertThat(messages).hasSize(6); assertThat(messages.get(0)).isInstanceOf(dev.langchain4j.data.message.UserMessage.class); assertThat(messages.get(0).text()).isEqualTo(userMessage); AiMessage aiMessage = (AiMessage) messages.get(1); assertThat(aiMessage.text()).isNull(); assertThat(aiMessage.toolExecutionRequests()).hasSize(1); ToolExecutionRequest toolExecutionRequest = aiMessage.toolExecutionRequests().get(0); assertThat(toolExecutionRequest.id()).isNotBlank(); assertThat(toolExecutionRequest.name()).isEqualTo("squareRoot"); assertThat(toolExecutionRequest.arguments()) .isEqualToIgnoringWhitespace("{\"arg0\": 485906798473894056}"); ToolExecutionResultMessage toolExecutionResultMessage = (ToolExecutionResultMessage) messages.get(2); assertThat(toolExecutionResultMessage.id()).isEqualTo(toolExecutionRequest.id()); assertThat(toolExecutionResultMessage.toolName()).isEqualTo("squareRoot"); assertThat(toolExecutionResultMessage.text()).isEqualTo("6.97070153193991E8"); AiMessage secondAiMessage = (AiMessage) messages.get(3); assertThat(secondAiMessage.text()).isNull(); assertThat(secondAiMessage.toolExecutionRequests()).hasSize(1); ToolExecutionRequest secondToolExecutionRequest = secondAiMessage.toolExecutionRequests().get(0); assertThat(secondToolExecutionRequest.id()).isNotBlank(); assertThat(secondToolExecutionRequest.name()).isEqualTo("squareRoot"); assertThat(secondToolExecutionRequest.arguments()) .isEqualToIgnoringWhitespace("{\"arg0\": 97866249624785}"); ToolExecutionResultMessage secondToolExecutionResultMessage = (ToolExecutionResultMessage) messages.get(4); assertThat(secondToolExecutionResultMessage.id()).isEqualTo(secondToolExecutionRequest.id()); assertThat(secondToolExecutionResultMessage.toolName()).isEqualTo("squareRoot"); assertThat(secondToolExecutionResultMessage.text()).isEqualTo("9892737.215997653"); assertThat(messages.get(5)).isInstanceOf(AiMessage.class); assertThat(messages.get(5).text()).contains("6.97", "9.89"); } @Test void should_execute_multiple_tools_in_parallel_then_answer() throws Exception { Calculator calculator = spy(new Calculator()); StreamingChatLanguageModel streamingChatModel = OpenAiStreamingChatModel.builder() .baseUrl(System.getenv("OPENAI_BASE_URL")) .apiKey(System.getenv("OPENAI_API_KEY")) .organizationId(System.getenv("OPENAI_ORGANIZATION_ID")) .modelName(GPT_3_5_TURBO_1106) .temperature(0.0) .logRequests(true) .logResponses(true) .build(); ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10); Assistant assistant = AiServices.builder(Assistant.class) .streamingChatLanguageModel(streamingChatModel) .chatMemory(chatMemory) .tools(calculator) .build(); StringBuilder answerBuilder = new StringBuilder(); CompletableFuture<String> futureAnswer = new CompletableFuture<>(); CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>(); String userMessage = "What is the square root of 485906798473894056 and 97866249624785 in scientific notation?"; assistant.chat(userMessage) .onNext(answerBuilder::append) .onComplete(response -> { futureAnswer.complete(answerBuilder.toString()); futureResponse.complete(response); }) .onError(futureAnswer::completeExceptionally) .start(); String answer = futureAnswer.get(30, SECONDS); Response<AiMessage> response = futureResponse.get(30, SECONDS); assertThat(answer).contains("6.97", "9.89"); assertThat(response.content().text()).isEqualTo(answer); TokenUsage tokenUsage = response.tokenUsage(); assertThat(tokenUsage.inputTokenCount()).isCloseTo(79 + 160, tokenizerPrecision); assertThat(tokenUsage.outputTokenCount()).isCloseTo(54 + 58, tokenizerPrecision); assertThat(tokenUsage.totalTokenCount()) .isEqualTo(tokenUsage.inputTokenCount() + tokenUsage.outputTokenCount()); assertThat(response.finishReason()).isEqualTo(STOP); verify(calculator).squareRoot(485906798473894056.0); verify(calculator).squareRoot(97866249624785.0); verifyNoMoreInteractions(calculator); List<ChatMessage> messages = chatMemory.messages(); assertThat(messages).hasSize(5); assertThat(messages.get(0)).isInstanceOf(dev.langchain4j.data.message.UserMessage.class); assertThat(messages.get(0).text()).isEqualTo(userMessage); AiMessage aiMessage = (AiMessage) messages.get(1); assertThat(aiMessage.text()).isNull(); assertThat(aiMessage.toolExecutionRequests()).hasSize(2); ToolExecutionRequest firstToolExecutionRequest = aiMessage.toolExecutionRequests().get(0); assertThat(firstToolExecutionRequest.id()).isNotBlank(); assertThat(firstToolExecutionRequest.name()).isEqualTo("squareRoot"); assertThat(firstToolExecutionRequest.arguments()) .isEqualToIgnoringWhitespace("{\"arg0\": 485906798473894056}"); ToolExecutionRequest secondToolExecutionRequest = aiMessage.toolExecutionRequests().get(1); assertThat(secondToolExecutionRequest.id()).isNotBlank(); assertThat(secondToolExecutionRequest.name()).isEqualTo("squareRoot"); assertThat(secondToolExecutionRequest.arguments()) .isEqualToIgnoringWhitespace("{\"arg0\": 97866249624785}"); ToolExecutionResultMessage firstToolExecutionResultMessage = (ToolExecutionResultMessage) messages.get(2); assertThat(firstToolExecutionResultMessage.id()).isEqualTo(firstToolExecutionRequest.id()); assertThat(firstToolExecutionResultMessage.toolName()).isEqualTo("squareRoot"); assertThat(firstToolExecutionResultMessage.text()).isEqualTo("6.97070153193991E8"); ToolExecutionResultMessage secondToolExecutionResultMessage = (ToolExecutionResultMessage) messages.get(3); assertThat(secondToolExecutionResultMessage.id()).isEqualTo(secondToolExecutionRequest.id()); assertThat(secondToolExecutionResultMessage.toolName()).isEqualTo("squareRoot"); assertThat(secondToolExecutionResultMessage.text()).isEqualTo("9892737.215997653"); assertThat(messages.get(4)).isInstanceOf(AiMessage.class); assertThat(messages.get(4).text()).contains("6.97", "9.89"); } }
[ "dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder" ]
[((1370, 1695), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((1370, 1674), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((1370, 1642), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((1370, 1611), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((1370, 1581), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((1370, 1512), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((1370, 1459), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((8819, 9266), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((8819, 9241), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((8819, 9205), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((8819, 9170), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((8819, 9089), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((8819, 9042), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((8819, 8969), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((8819, 8912), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((13702, 14102), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((13702, 14077), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((13702, 14041), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((13702, 14006), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((13702, 13972), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((13702, 13925), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((13702, 13852), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((13702, 13795), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder')]
package com.redhat; import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive; import dev.langchain4j.data.document.Document; import dev.langchain4j.data.document.loader.FileSystemDocumentLoader; import dev.langchain4j.data.document.parser.TextDocumentParser; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.store.embedding.EmbeddingStoreIngestor; import io.quarkiverse.langchain4j.redis.RedisEmbeddingStore; import io.quarkus.runtime.StartupEvent; import jakarta.enterprise.context.ApplicationScoped; import jakarta.enterprise.event.Observes; import jakarta.inject.Inject; import java.io.File; import java.util.List; @ApplicationScoped public class IngestorExample { /** * The embedding store (the database). * The bean is provided by the quarkus-langchain4j-redis extension. */ @Inject RedisEmbeddingStore store; /** * The embedding model (how the vector of a document is computed). * The bean is provided by the LLM (like openai) extension. */ @Inject EmbeddingModel embeddingModel; public void ingest(@Observes StartupEvent event) { System.out.printf("Ingesting documents...%n"); // List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/bank").toPath(), // List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/museum").toPath(), // List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/halffoods").toPath(), List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/insurance").toPath(), new TextDocumentParser()); var ingestor = EmbeddingStoreIngestor.builder() .embeddingStore(store) .embeddingModel(embeddingModel) .documentSplitter(recursive(500, 0)) .build(); ingestor.ingest(documents); System.out.printf("Ingested %d documents.%n", documents.size()); } }
[ "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder" ]
[((1785, 1982), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1785, 1957), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1785, 1904), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1785, 1856), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')]
import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.input.structured.StructuredPrompt; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.model.output.structured.Description; import dev.langchain4j.service.*; import java.math.BigDecimal; import java.math.BigInteger; import java.time.LocalDate; import java.time.LocalDateTime; import java.time.LocalTime; import java.util.List; import static java.util.Arrays.asList; public class OtherServiceExamples { static ChatLanguageModel chatLanguageModel = OpenAiChatModel.withApiKey(ApiKeys.OPENAI_API_KEY); static class Sentiment_Extracting_AI_Service_Example { enum Sentiment { POSITIVE, NEUTRAL, NEGATIVE; } interface SentimentAnalyzer { @UserMessage("Analyze sentiment of {{it}}") Sentiment analyzeSentimentOf(String text); @UserMessage("Does {{it}} have a positive sentiment?") boolean isPositive(String text); } public static void main(String[] args) { SentimentAnalyzer sentimentAnalyzer = AiServices.create(SentimentAnalyzer.class, chatLanguageModel); Sentiment sentiment = sentimentAnalyzer.analyzeSentimentOf("It is good!"); System.out.println(sentiment); // POSITIVE boolean positive = sentimentAnalyzer.isPositive("It is bad!"); System.out.println(positive); // false } } static class Number_Extracting_AI_Service_Example { interface NumberExtractor { @UserMessage("Extract number from {{it}}") int extractInt(String text); @UserMessage("Extract number from {{it}}") long extractLong(String text); @UserMessage("Extract number from {{it}}") BigInteger extractBigInteger(String text); @UserMessage("Extract number from {{it}}") float extractFloat(String text); @UserMessage("Extract number from {{it}}") double extractDouble(String text); @UserMessage("Extract number from {{it}}") BigDecimal extractBigDecimal(String text); } public static void main(String[] args) { NumberExtractor extractor = AiServices.create(NumberExtractor.class, chatLanguageModel); String text = "After countless millennia of computation, the supercomputer Deep Thought finally announced " + "that the answer to the ultimate question of life, the universe, and everything was forty two."; int intNumber = extractor.extractInt(text); System.out.println(intNumber); // 42 long longNumber = extractor.extractLong(text); System.out.println(longNumber); // 42 BigInteger bigIntegerNumber = extractor.extractBigInteger(text); System.out.println(bigIntegerNumber); // 42 float floatNumber = extractor.extractFloat(text); System.out.println(floatNumber); // 42.0 double doubleNumber = extractor.extractDouble(text); System.out.println(doubleNumber); // 42.0 BigDecimal bigDecimalNumber = extractor.extractBigDecimal(text); System.out.println(bigDecimalNumber); // 42.0 } } static class Date_and_Time_Extracting_AI_Service_Example { interface DateTimeExtractor { @UserMessage("Extract date from {{it}}") LocalDate extractDateFrom(String text); @UserMessage("Extract time from {{it}}") LocalTime extractTimeFrom(String text); @UserMessage("Extract date and time from {{it}}") LocalDateTime extractDateTimeFrom(String text); } public static void main(String[] args) { DateTimeExtractor extractor = AiServices.create(DateTimeExtractor.class, chatLanguageModel); String text = "The tranquility pervaded the evening of 1968, just fifteen minutes shy of midnight," + " following the celebrations of Independence Day."; LocalDate date = extractor.extractDateFrom(text); System.out.println(date); // 1968-07-04 LocalTime time = extractor.extractTimeFrom(text); System.out.println(time); // 23:45 LocalDateTime dateTime = extractor.extractDateTimeFrom(text); System.out.println(dateTime); // 1968-07-04T23:45 } } static class POJO_Extracting_AI_Service_Example { static class Person { private String firstName; private String lastName; private LocalDate birthDate; @Override public String toString() { return "Person {" + " firstName = \"" + firstName + "\"" + ", lastName = \"" + lastName + "\"" + ", birthDate = " + birthDate + " }"; } } interface PersonExtractor { @UserMessage("Extract information about a person from {{it}}") Person extractPersonFrom(String text); } public static void main(String[] args) { ChatLanguageModel chatLanguageModel = OpenAiChatModel.builder() .apiKey(System.getenv("OPENAI_API_KEY")) // When extracting POJOs with the LLM that supports the "json mode" feature // (e.g., OpenAI, Azure OpenAI, Ollama, etc.), it is advisable to use it to get more reliable results. // When using this feature, LLM will be forced to output a valid JSON. // Please note that this feature is not (yet) supported when using "demo" key. .responseFormat("json_object") .build(); PersonExtractor extractor = AiServices.create(PersonExtractor.class, chatLanguageModel); String text = "In 1968, amidst the fading echoes of Independence Day, " + "a child named John arrived under the calm evening sky. " + "This newborn, bearing the surname Doe, marked the start of a new journey."; Person person = extractor.extractPersonFrom(text); System.out.println(person); // Person { firstName = "John", lastName = "Doe", birthDate = 1968-07-04 } } } static class POJO_With_Descriptions_Extracting_AI_Service_Example { static class Recipe { @Description("short title, 3 words maximum") private String title; @Description("short description, 2 sentences maximum") private String description; @Description("each step should be described in 4 words, steps should rhyme") private List<String> steps; private Integer preparationTimeMinutes; @Override public String toString() { return "Recipe {" + " title = \"" + title + "\"" + ", description = \"" + description + "\"" + ", steps = " + steps + ", preparationTimeMinutes = " + preparationTimeMinutes + " }"; } } @StructuredPrompt("Create a recipe of a {{dish}} that can be prepared using only {{ingredients}}") static class CreateRecipePrompt { private String dish; private List<String> ingredients; } interface Chef { Recipe createRecipeFrom(String... ingredients); Recipe createRecipe(CreateRecipePrompt prompt); } public static void main(String[] args) { Chef chef = AiServices.create(Chef.class, chatLanguageModel); Recipe recipe = chef.createRecipeFrom("cucumber", "tomato", "feta", "onion", "olives"); System.out.println(recipe); // Recipe { // title = "Greek Salad", // description = "A refreshing mix of veggies and feta cheese in a zesty dressing.", // steps = [ // "Chop cucumber and tomato", // "Add onion and olives", // "Crumble feta on top", // "Drizzle with dressing and enjoy!" // ], // preparationTimeMinutes = 10 // } CreateRecipePrompt prompt = new CreateRecipePrompt(); prompt.dish = "salad"; prompt.ingredients = asList("cucumber", "tomato", "feta", "onion", "olives"); Recipe anotherRecipe = chef.createRecipe(prompt); System.out.println(anotherRecipe); // Recipe ... } } static class AI_Service_with_System_Message_Example { interface Chef { @SystemMessage("You are a professional chef. You are friendly, polite and concise.") String answer(String question); } public static void main(String[] args) { Chef chef = AiServices.create(Chef.class, chatLanguageModel); String answer = chef.answer("How long should I grill chicken?"); System.out.println(answer); // Grilling chicken usually takes around 10-15 minutes per side, depending on ... } } static class AI_Service_with_System_and_User_Messages_Example { interface TextUtils { @SystemMessage("You are a professional translator into {{language}}") @UserMessage("Translate the following text: {{text}}") String translate(@V("text") String text, @V("language") String language); @SystemMessage("Summarize every message from user in {{n}} bullet points. Provide only bullet points.") List<String> summarize(@UserMessage String text, @V("n") int n); } public static void main(String[] args) { TextUtils utils = AiServices.create(TextUtils.class, chatLanguageModel); String translation = utils.translate("Hello, how are you?", "italian"); System.out.println(translation); // Ciao, come stai? String text = "AI, or artificial intelligence, is a branch of computer science that aims to create " + "machines that mimic human intelligence. This can range from simple tasks such as recognizing " + "patterns or speech to more complex tasks like making decisions or predictions."; List<String> bulletPoints = utils.summarize(text, 3); System.out.println(bulletPoints); // [ // "- AI is a branch of computer science", // "- It aims to create machines that mimic human intelligence", // "- It can perform simple or complex tasks" // ] } } static class AI_Service_with_UserName_Example { interface Assistant { String chat(@UserName String name, @UserMessage String message); } public static void main(String[] args) { Assistant assistant = AiServices.create(Assistant.class, chatLanguageModel); String answer = assistant.chat("Klaus", "Hi, tell me my name if you see it."); System.out.println(answer); // Hello! Your name is Klaus. How can I assist you today? } } }
[ "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((5313, 5888), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5313, 5859), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5313, 5399), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')]
package ai.equity.salt; import dev.langchain4j.model.openai.OpenAiLanguageModel; import dev.langchain4j.model.language.LanguageModel; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import static org.junit.Assert.assertThrows; class OpenAiApiTest { private static final String FAKE_API_KEY = "asdfghjkl"; LanguageModel modelNoApiKey = OpenAiLanguageModel.builder() .apiKey(FAKE_API_KEY) .logRequests(true) .logResponses(true) .build(); @Test void testExceptionMessage() { String prompt = "What is the capital of Germany?"; Exception exception = assertThrows(RuntimeException.class, () -> { modelNoApiKey.generate(prompt); }); String expectedMessage = "Incorrect API key provided: " + FAKE_API_KEY + ". You can find your API key at https://platform.openai.com/account/api-keys."; String actualMessage = exception.getMessage(); Assertions.assertTrue(actualMessage.contains(expectedMessage)); } }
[ "dev.langchain4j.model.openai.OpenAiLanguageModel.builder" ]
[((377, 524), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((377, 503), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((377, 471), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((377, 440), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder')]
/**************************************************************************************** Copyright © 2003-2012 hbasesoft Corporation. All rights reserved. Reproduction or <br> transmission in whole or in part, in any form or by any means, electronic, mechanical <br> or otherwise, is prohibited without the prior written consent of the copyright owner. <br> ****************************************************************************************/ package com.hbasesoft.framework.langchain4j.demo; import java.io.IOException; import com.hbasesoft.framework.common.utils.PropertyHolder; import com.hbasesoft.framework.langchain4j.dashscope.QwenChatModel; import dev.langchain4j.chain.ConversationalChain; /** * <Description> <br> * * @author 王伟<br> * @version 1.0<br> * @taskId <br> * @CreateDate 2023年10月26日 <br> * @since V1.0<br> * @see com.hbasesoft.framework.langchain4j.demo <br> */ public class ChatMemoryExamples { /** * Description: <br> * * @author 王伟<br> * @taskId <br> * @param args * @throws IOException <br> */ public static void main(final String[] args) throws IOException { ConversationalChain chain = ConversationalChain.builder() .chatLanguageModel(QwenChatModel.builder().apiKey(PropertyHolder.getProperty("qwen.apikey")).build()) // .chatMemory() // you can override default chat memory .build(); String answer = chain.execute("Hello, my name is Klaus"); System.out.println(answer); // Hello Klaus! How can I assist you today? String answerWithName = chain.execute("What is my name?"); System.out.println(answerWithName); // Your name is Klaus. } }
[ "dev.langchain4j.chain.ConversationalChain.builder" ]
[((1211, 1444), 'dev.langchain4j.chain.ConversationalChain.builder'), ((1211, 1354), 'dev.langchain4j.chain.ConversationalChain.builder'), ((1272, 1353), 'com.hbasesoft.framework.langchain4j.dashscope.QwenChatModel.builder'), ((1272, 1345), 'com.hbasesoft.framework.langchain4j.dashscope.QwenChatModel.builder')]
/* * Copyright 2024 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gemini.workshop; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.vertexai.VertexAiGeminiChatModel; import dev.langchain4j.agent.tool.JsonSchemaProperty; import dev.langchain4j.agent.tool.ToolExecutionRequest; import dev.langchain4j.agent.tool.ToolSpecification; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.data.message.ChatMessage; import dev.langchain4j.data.message.UserMessage; import dev.langchain4j.data.message.ToolExecutionResultMessage; import dev.langchain4j.model.output.Response; import java.util.List; import java.util.ArrayList; public class Step8_FunctionCalling { public static void main(String[] args) { ChatLanguageModel model = VertexAiGeminiChatModel.builder() .project(System.getenv("PROJECT_ID")) .location(System.getenv("LOCATION")) .modelName("gemini-1.0-pro") .maxOutputTokens(100) .build(); ToolSpecification weatherToolSpec = ToolSpecification.builder() .name("getWeatherForecast") .description("Get the weather forecast for a location") .addParameter("location", JsonSchemaProperty.STRING, JsonSchemaProperty.description("the location to get the weather forecast for")) .build(); List<ChatMessage> allMessages = new ArrayList<>(); // 1) Ask about the weather UserMessage weatherQuestion = UserMessage.from("What is the weather in Paris?"); allMessages.add(weatherQuestion); // 2) The model replies with a function call request Response<AiMessage> messageResponse = model.generate(allMessages, weatherToolSpec); ToolExecutionRequest toolExecutionRequest = messageResponse.content().toolExecutionRequests().getFirst(); System.out.println("Tool execution request: " + toolExecutionRequest); allMessages.add(messageResponse.content()); // Here, we would call a real weather forecast service // 3) We send back the result of the function call ToolExecutionResultMessage toolExecResMsg = ToolExecutionResultMessage.from(toolExecutionRequest, "{\"location\":\"Paris\",\"forecast\":\"sunny\", \"temperature\": 20}"); allMessages.add(toolExecResMsg); // 4) The model answers with a sentence describing the weather Response<AiMessage> weatherResponse = model.generate(allMessages); System.out.println("Answer: " + weatherResponse.content().text()); } }
[ "dev.langchain4j.agent.tool.ToolSpecification.builder", "dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder" ]
[((1325, 1553), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1325, 1532), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1325, 1498), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1325, 1457), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1325, 1408), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1600, 1917), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((1600, 1896), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((1600, 1735), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((1600, 1667), 'dev.langchain4j.agent.tool.ToolSpecification.builder')]
package net.savantly.mainbot.config; import static dev.langchain4j.model.openai.OpenAiModelName.TEXT_EMBEDDING_ADA_002; import static java.time.Duration.ofSeconds; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.model.openai.OpenAiEmbeddingModel; import lombok.RequiredArgsConstructor; @Configuration @RequiredArgsConstructor public class EmbeddingModelConfig { private final OpenAIConfig openAIConfig; @Bean public EmbeddingModel embeddingModel() { EmbeddingModel embeddingModel = OpenAiEmbeddingModel.builder() .apiKey(openAIConfig.getApiKey()) // https://platform.openai.com/account/api-keys .modelName(TEXT_EMBEDDING_ADA_002) .timeout(ofSeconds(30)) .build(); return embeddingModel; } }
[ "dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder" ]
[((651, 895), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((651, 870), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((651, 830), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((651, 731), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder')]
package org.agoncal.fascicle.langchain4j.context; // tag::adocSnippet[] import dev.langchain4j.chain.ConversationalChain; import dev.langchain4j.model.azure.AzureOpenAiChatModel; import dev.langchain4j.memory.ChatMemory; import dev.langchain4j.memory.chat.MessageWindowChatMemory; // tag::adocSkip[] /** * @author Antonio Goncalves * http://www.antoniogoncalves.org * -- */ // end::adocSkip[] public class MusicianService { public static void main(String[] args) throws InterruptedException { MusicianService musicianService = new MusicianService(); // musicianService.useNoMemory(); musicianService.useConversationalChain(); // musicianService.useConversationalChainWithMemory(); } private static final String AZURE_OPENAI_KEY = System.getenv("AZURE_OPENAI_KEY"); private static final String AZURE_OPENAI_ENDPOINT = System.getenv("AZURE_OPENAI_ENDPOINT"); private static final String AZURE_OPENAI_DEPLOYMENT_NAME = System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME"); private static final String PROMPT = "When was the first Beatles album released?"; // ################# // ### NO MEMORY ### // ################# public void useNoMemory() throws InterruptedException { System.out.println("### useNoMemory"); AzureOpenAiChatModel model = AzureOpenAiChatModel.builder() .apiKey(AZURE_OPENAI_KEY) .endpoint(AZURE_OPENAI_ENDPOINT) .deploymentName(AZURE_OPENAI_DEPLOYMENT_NAME) .temperature(0.7) .logRequestsAndResponses(false) .build(); System.out.println(">>>>" + model.generate("My name is Antonio Goncalves")); Thread.sleep(5000); System.out.println(">>>>" + model.generate("When was the first Rolling Stones album released?")); Thread.sleep(5000); System.out.println(">>>>" + model.generate("What's the name of the singer?")); Thread.sleep(5000); System.out.println(">>>>" + model.generate("What is my name?")); } // ################################ // ### USE CONVERSATIONAL CHAIN ### // ################################ public void useConversationalChain() throws InterruptedException { System.out.println("### useConversationalChain"); AzureOpenAiChatModel model = AzureOpenAiChatModel.builder() .apiKey(AZURE_OPENAI_KEY) .endpoint(AZURE_OPENAI_ENDPOINT) .deploymentName(AZURE_OPENAI_DEPLOYMENT_NAME) .temperature(0.7) .logRequestsAndResponses(true) .build(); ConversationalChain chain = ConversationalChain.builder() .chatLanguageModel(model) .build(); System.out.println("\n\n################################"); System.out.println("# My name is Antonio Goncalves #"); System.out.println("################################"); System.out.println(">>>>" + chain.execute("My name is Antonio Goncalves")); Thread.sleep(5000); System.out.println("\n\n#####################################################"); System.out.println("# When was the first Rolling Stones album released? #"); System.out.println("#####################################################"); System.out.println(">>>>" + chain.execute("When was the first Rolling Stones album released?")); Thread.sleep(5000); System.out.println("\n\n##################################"); System.out.println("# What's the name of the singer? #"); System.out.println("##################################"); System.out.println(">>>>" + chain.execute("What's the name of the singer?")); Thread.sleep(5000); System.out.println("\n\n####################"); System.out.println("# What is my name? #"); System.out.println("####################"); System.out.println(">>>>" + chain.execute("What is my name?")); } // ################################ // ### USE CONVERSATIONAL CHAIN ### // ################################ public void useConversationalChainWithMemory() throws InterruptedException { System.out.println("### useConversationalChainWithMemory"); AzureOpenAiChatModel model = AzureOpenAiChatModel.builder() .apiKey(AZURE_OPENAI_KEY) .endpoint(AZURE_OPENAI_ENDPOINT) .deploymentName(AZURE_OPENAI_DEPLOYMENT_NAME) .temperature(0.7) .logRequestsAndResponses(true) .build(); ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(2); ConversationalChain chain = ConversationalChain.builder() .chatLanguageModel(model) .chatMemory(chatMemory) .build(); System.out.println("\n\n################################"); System.out.println("# My name is Antonio Goncalves #"); System.out.println("################################"); System.out.println(">>>>" + chain.execute("My name is Antonio Goncalves")); Thread.sleep(5000); System.out.println("\n\n#####################################################"); System.out.println("# When was the first Rolling Stones album released? #"); System.out.println("#####################################################"); System.out.println(">>>>" + chain.execute("When was the first Rolling Stones album released?")); Thread.sleep(5000); System.out.println("\n\n##################################"); System.out.println("# What's the name of the singer? #"); System.out.println("##################################"); System.out.println(">>>>" + chain.execute("What's the name of the singer?")); Thread.sleep(5000); System.out.println("\n\n####################"); System.out.println("# What is my name? #"); System.out.println("####################"); System.out.println(">>>>" + chain.execute("What is my name?")); } } // end::adocSnippet[]
[ "dev.langchain4j.model.azure.AzureOpenAiChatModel.builder", "dev.langchain4j.chain.ConversationalChain.builder" ]
[((1289, 1519), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((1289, 1504), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((1289, 1466), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((1289, 1442), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((1289, 1390), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((1289, 1351), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((2205, 2434), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((2205, 2419), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((2205, 2382), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((2205, 2358), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((2205, 2306), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((2205, 2267), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((2469, 2545), 'dev.langchain4j.chain.ConversationalChain.builder'), ((2469, 2530), 'dev.langchain4j.chain.ConversationalChain.builder'), ((4017, 4246), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((4017, 4231), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((4017, 4194), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((4017, 4170), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((4017, 4118), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((4017, 4079), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((4354, 4460), 'dev.langchain4j.chain.ConversationalChain.builder'), ((4354, 4445), 'dev.langchain4j.chain.ConversationalChain.builder'), ((4354, 4415), 'dev.langchain4j.chain.ConversationalChain.builder')]
package ru.vzotov.ai; import com.fasterxml.jackson.databind.ObjectMapper; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.boot.autoconfigure.ImportAutoConfiguration; import org.springframework.boot.autoconfigure.condition.ConditionalOnBean; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import ru.vzotov.accounting.domain.model.PersistentPropertyRepository; import ru.vzotov.ai.application.PurchaseCategoryIndexer; import ru.vzotov.ai.application.PurchaseCategoryProcessor; import ru.vzotov.ai.interfaces.facade.AIFacade; import ru.vzotov.ai.interfaces.facade.impl.AIFacadeImpl; import ru.vzotov.cashreceipt.domain.model.PurchaseCategoryRepository; import ru.vzotov.langchain4j.gigachat.spring.AutoConfig; import ru.vzotov.purchases.domain.model.PurchaseRepository; @ConditionalOnProperty(prefix = AIModuleProperties.PREFIX, name = "enabled") @Configuration @ImportAutoConfiguration(AutoConfig.class) public class AIModule { private static final Logger log = LoggerFactory.getLogger(AIModule.class); @Bean AIModuleProperties aiModuleProperties() { return new AIModuleProperties(); } @Bean EmbeddingStore<TextSegment> embeddingStore(AIModuleProperties properties) { PgVectorConfigProperties config = properties.getPgvector(); return PgVectorEmbeddingStore.builder() .host(config.getHost()) .port(config.getPort()) .database(config.getDatabase()) .user(config.getUser()) .password(config.getPassword()) .dimension(config.getDimension()) .table(config.getTable()) .createTable(config.getCreate()) .dropTableFirst(config.getDrop()) .useIndex(true) .indexListSize(config.getIndexListSize()) .build(); } @Bean PurchaseCategoryProcessor processor(EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel, AIModuleProperties properties) { PurchasesConfigProperties config = properties.getPurchases(); return PurchaseCategoryProcessor.builder() .embeddingStore(embeddingStore) .embeddingModel(embeddingModel) .partitionSize(config.getPartitionSize()) .build(); } @Bean AIFacade facade( PurchaseCategoryRepository purchaseCategoryRepository, PurchaseRepository purchaseRepository, EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel, ChatLanguageModel chatLanguageModel, ObjectMapper objectMapper) { return AIFacadeImpl.builder() .purchaseCategoryRepository(purchaseCategoryRepository) .purchaseRepository(purchaseRepository) .embeddingStore(embeddingStore) .embeddingModel(embeddingModel) .chatLanguageModel(chatLanguageModel) .objectMapper(objectMapper) .build(); } @Bean @ConditionalOnBean(PersistentPropertyRepository.class) PurchaseCategoryIndexer indexer(AIModuleProperties properties, ObjectMapper objectMapper, PurchaseRepository purchaseRepository, PersistentPropertyRepository propertyRepository, PurchaseCategoryProcessor processor) { return PurchaseCategoryIndexer.builder() .modelType(properties.getModelType()) .objectMapper(objectMapper) .purchaseRepository(purchaseRepository) .propertyRepository(propertyRepository) .processor(processor) .build(); } }
[ "dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder" ]
[((1751, 2305), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 2280), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 2222), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 2190), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 2140), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 2091), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 2049), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 1999), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 1951), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 1911), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 1863), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((1751, 1823), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((2637, 2851), 'ru.vzotov.ai.application.PurchaseCategoryProcessor.builder'), ((2637, 2826), 'ru.vzotov.ai.application.PurchaseCategoryProcessor.builder'), ((2637, 2768), 'ru.vzotov.ai.application.PurchaseCategoryProcessor.builder'), ((2637, 2720), 'ru.vzotov.ai.application.PurchaseCategoryProcessor.builder'), ((3213, 3582), 'ru.vzotov.ai.interfaces.facade.impl.AIFacadeImpl.builder'), ((3213, 3557), 'ru.vzotov.ai.interfaces.facade.impl.AIFacadeImpl.builder'), ((3213, 3513), 'ru.vzotov.ai.interfaces.facade.impl.AIFacadeImpl.builder'), ((3213, 3459), 'ru.vzotov.ai.interfaces.facade.impl.AIFacadeImpl.builder'), ((3213, 3411), 'ru.vzotov.ai.interfaces.facade.impl.AIFacadeImpl.builder'), ((3213, 3363), 'ru.vzotov.ai.interfaces.facade.impl.AIFacadeImpl.builder'), ((3213, 3307), 'ru.vzotov.ai.interfaces.facade.impl.AIFacadeImpl.builder'), ((4040, 4346), 'ru.vzotov.ai.application.PurchaseCategoryIndexer.builder'), ((4040, 4321), 'ru.vzotov.ai.application.PurchaseCategoryIndexer.builder'), ((4040, 4283), 'ru.vzotov.ai.application.PurchaseCategoryIndexer.builder'), ((4040, 4227), 'ru.vzotov.ai.application.PurchaseCategoryIndexer.builder'), ((4040, 4171), 'ru.vzotov.ai.application.PurchaseCategoryIndexer.builder'), ((4040, 4127), 'ru.vzotov.ai.application.PurchaseCategoryIndexer.builder')]
package fr.anthonyquere.talkwithme.core.ai.langchain; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.service.AiServices; import fr.anthonyquere.talkwithme.core.ai.langchain.services.Summary; import fr.anthonyquere.talkwithme.core.ai.langchain.services.TalkWithCompanion; import fr.anthonyquere.talkwithme.core.crud.companions.Companion; import fr.anthonyquere.talkwithme.core.crud.message.MessageRepository; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @Configuration public class CompanionAiService { @Bean public TalkWithCompanion buildAiCompanionService( ChatLanguageModel model, MessageRepository messageRepository ) { return AiServices.builder(TalkWithCompanion.class) .chatLanguageModel(model) .chatMemoryProvider(companion -> new CompanionChatMemory((Companion) companion, messageRepository)) .build(); } @Bean public Summary buildAiSummaryService( ChatLanguageModel model ) { return AiServices.builder(Summary.class) .chatLanguageModel(model) .build(); } }
[ "dev.langchain4j.service.AiServices.builder" ]
[((749, 945), 'dev.langchain4j.service.AiServices.builder'), ((749, 930), 'dev.langchain4j.service.AiServices.builder'), ((749, 824), 'dev.langchain4j.service.AiServices.builder'), ((1045, 1125), 'dev.langchain4j.service.AiServices.builder'), ((1045, 1110), 'dev.langchain4j.service.AiServices.builder')]
package com.revolvingSolutions.aicvgeneratorbackend.service; import com.revolvingSolutions.aicvgeneratorbackend.agent.*; import com.revolvingSolutions.aicvgeneratorbackend.constants.StaticValues; import com.revolvingSolutions.aicvgeneratorbackend.model.aimodels.*; import com.revolvingSolutions.aicvgeneratorbackend.model.user.User; import com.revolvingSolutions.aicvgeneratorbackend.request.AI.ChatRequest; import com.revolvingSolutions.aicvgeneratorbackend.request.AI.ExtractionRequest; import com.revolvingSolutions.aicvgeneratorbackend.request.AI.GenerationRequest; import com.revolvingSolutions.aicvgeneratorbackend.request.AI.UrlExtractionRequest; import com.revolvingSolutions.aicvgeneratorbackend.response.AI.ChatResponse; import com.revolvingSolutions.aicvgeneratorbackend.response.AI.ExtractionResponse; import com.revolvingSolutions.aicvgeneratorbackend.response.AI.GenerationResponse; import dev.langchain4j.classification.TextClassifier; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.data.message.ChatMessage; import dev.langchain4j.data.message.SystemMessage; import dev.langchain4j.data.message.UserMessage; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.memory.chat.MessageWindowChatMemory; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.moderation.ModerationModel; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.retriever.Retriever; import dev.langchain4j.service.AiServices; import dev.langchain4j.store.memory.chat.ChatMemoryStore; import lombok.RequiredArgsConstructor; import org.jsoup.Jsoup; import org.jsoup.nodes.Document; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import java.io.IOException; import java.time.Duration; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import static dev.langchain4j.data.message.SystemMessage.systemMessage; @Service @RequiredArgsConstructor public class LangChainService { @Value("${app.api.blockAI}") private Boolean block; private final UserService userService; public GenerationResponse GenerateCV( GenerationRequest request ) { if (block) { List<String> mylist = new ArrayList<>(); for (AIEmployment employment : request.getData().getExperience()) { mylist.add(StaticValues.employment_description); } return GenerationResponse.builder() .data( CVData.builder() .firstname(request.getData().getFirstname()) .lastname(request.getData().getLastname()) .phoneNumber(request.getData().getPhoneNumber()) .email(request.getData().getEmail()) .location(request.getData().getLocation()) .description(StaticValues.description) .employmenthistory(request.getData().getExperience()) .qualifications(request.getData().getQualifications()) .links(request.getData().getLinks()) .references(request.getData().getReferences()) .skills(request.getData().getSkills()) .build() ) .build(); } String description = interact(descriptionAgent(chatLanguageModel()),createProfessionalSummaryModel(request.getData()).toString()); if (description == null) description = "Description"; if (request.getData().getExperience() == null) request.getData().setExperience(new ArrayList<>()); if (request.getData().getQualifications() == null) request.getData().setQualifications(new ArrayList<>()); return GenerationResponse.builder() .data( CVData.builder() .firstname(request.getData().getFirstname()) .lastname(request.getData().getLastname()) .phoneNumber(request.getData().getPhoneNumber()) .email(request.getData().getEmail()) .location(request.getData().getLocation()) .description(description) .employmenthistory(request.getData().getExperience()) .qualifications(request.getData().getQualifications()) .links(request.getData().getLinks()) .skills(request.getData().getSkills()) .references(request.getData().getReferences()) .build() ) .build(); } private ProfessionalSummaryModel createProfessionalSummaryModel(AIInputData data) { return ProfessionalSummaryModel.builder() .firstname(data.getFirstname()) .lastname(data.getLastname()) .description(data.getDescription()) .location(data.getLocation()) .experience(data.getExperience()) .qualifications(data.getQualifications()) .skills(data.getSkills()) .build(); } public ExtractionResponse extractData( ExtractionRequest request ) throws Exception { if (request.getText().split(" ").length > 1000) { throw new Exception("Word Limit!!",null); } AIInputData data = extractionAgent(extractionChatLanguageModel()).extractPersonFrom(request.getText()); if (data.getFirstname() == null) data.setFirstname("First Name"); if (data.getLastname() == null) data.setLastname("Last Name"); if (data.getEmail() == null) data.setEmail("Email"); if (data.getLocation() == null) data.setLocation("Location"); if (data.getPhoneNumber() == null) data.setPhoneNumber("Phone number"); if (data.getDescription() == null) data.setDescription("Description"); if (data.getExperience() == null) data.setExperience(new ArrayList<>()); if (data.getQualifications() == null) data.setQualifications(new ArrayList<>()); if (data.getLinks() == null) data.setLinks(new ArrayList<>()); if (data.getSkills() == null) data.setReferences(new ArrayList<>()); return ExtractionResponse.builder() .data( data ) .build(); } public ExtractionResponse extractUrlData( UrlExtractionRequest request ) throws IOException { Document doc = Jsoup.connect(request.getUrl()).get(); AIInputData data = urlExtractionAgent(extractionChatLanguageModel()).extractPersonFrom(doc.toString()); if (data.getFirstname() == null) data.setFirstname("First Name"); if (data.getLastname() == null) data.setLastname("Last Name"); if (data.getEmail() == null) data.setEmail("Email"); if (data.getLocation() == null) data.setLocation("Location"); if (data.getPhoneNumber() == null) data.setPhoneNumber("Phone number"); if (data.getDescription() == null) data.setDescription("Description"); if (data.getExperience() == null) data.setExperience(new ArrayList<>()); if (data.getQualifications() == null) data.setQualifications(new ArrayList<>()); if (data.getLinks() == null) data.setLinks(new ArrayList<>()); if (data.getSkills() == null) data.setReferences(new ArrayList<>()); return ExtractionResponse.builder() .data( data ) .build(); } public User getAISafeModel() { return userService.getUser().getUser(); } public ChatResponse chatBotInteract(ChatRequest request) { List<String> messages = new ArrayList<>(); ChatBotAgent chatBot = chatBotAgent(chatBotLanguageModel(),request.getMessages()); String response = chatBot.chat(0,request.getUserMessage()); request.getMessages().add(request.getUserMessage()); request.getMessages().add(response); return ChatResponse.builder() .messages(request.getMessages()) .build(); } public static String interact(DescriptionAgent agent, String userMessage) { System.out.println("=========================================================================================="); System.out.println("[User]: " + userMessage); System.out.println("=========================================================================================="); String agentAnswer = agent.chat(userMessage); System.out.println("=========================================================================================="); System.out.println("[DescriptionAgent]: " + agentAnswer); System.out.println("=========================================================================================="); return agentAnswer; } private static String interact(EmploymentHistoryExpander agent, String userMessage) { System.out.println("=========================================================================================="); System.out.println("[User]: " + userMessage); System.out.println("=========================================================================================="); String agentAnswer = agent.chat(userMessage); System.out.println("=========================================================================================="); System.out.println("[EmploymentHistoryExpander]: " + agentAnswer); System.out.println("=========================================================================================="); return agentAnswer; } private static String interact(EducationDescriptionAgent agent, String userMessage) { System.out.println("=========================================================================================="); System.out.println("[User]: " + userMessage); System.out.println("=========================================================================================="); String agentAnswer = agent.chat(userMessage); System.out.println("=========================================================================================="); System.out.println("[EducationDescriptionAgent]: " + agentAnswer); System.out.println("=========================================================================================="); return agentAnswer; } private static String interact(ChatBotAgent agent, String userMessage) { System.out.println("=========================================================================================="); System.out.println("[User]: " + userMessage); System.out.println("=========================================================================================="); String agentAnswer = agent.chat(0,userMessage); System.out.println("=========================================================================================="); System.out.println("[EducationDescriptionAgent]: " + agentAnswer); System.out.println("=========================================================================================="); return agentAnswer; } @Value("${langchain4j.chat-model.openai.api-key}") private String apikey; @Value("${langchain4j.chat-model.openai.model-name}") private String modelName; @Value("${langchain4j.chat-model.openai.temperature}") private Double temperature; private final Retriever<TextSegment> retriever; private final ModerationModel moderationModel; private ChatLanguageModel chatLanguageModel() { return OpenAiChatModel.builder() .modelName(modelName) .apiKey(apikey) .temperature(temperature) .logRequests(false) .logResponses(false) .maxRetries(2) .maxTokens(1000) .topP(1.0) .timeout(Duration.ofMinutes(2)) .frequencyPenalty(0.0) .presencePenalty(0.0) .build(); } private ChatLanguageModel educationDescriptionChatModel() { return OpenAiChatModel.builder() .modelName(modelName) .apiKey(apikey) .temperature(0.4) .logRequests(false) .logResponses(false) .maxRetries(2) .maxTokens(1000) .topP(1.0) .timeout(Duration.ofMinutes(2)) .frequencyPenalty(0.0) .presencePenalty(0.0) .build(); } private ChatLanguageModel extractionChatLanguageModel() { return OpenAiChatModel.builder() .modelName(modelName) .apiKey(apikey) .temperature(temperature) .logRequests(false) .logResponses(false) .maxRetries(2) .maxTokens(1000) .topP(1.0) .timeout(Duration.ofMinutes(3)) .frequencyPenalty(0.0) .presencePenalty(0.0) .build(); } private ChatLanguageModel chatBotLanguageModel() { return OpenAiChatModel.builder() .modelName("gpt-4") .apiKey(apikey) .temperature(0.0) .logRequests(false) .logResponses(false) .maxRetries(2) .maxTokens(500) .topP(1.0) .timeout(Duration.ofMinutes(3)) .frequencyPenalty(0.0) .presencePenalty(0.0) .build(); } private DescriptionAgent descriptionAgent(ChatLanguageModel chatLanguageModel) { return AiServices.builder(DescriptionAgent.class) .chatLanguageModel(chatLanguageModel) .chatMemory(MessageWindowChatMemory.withMaxMessages(3)) .build(); } private EmploymentHistoryExpander employmentHistoryExpander(ChatLanguageModel chatLanguageModel) { return AiServices.builder(EmploymentHistoryExpander.class) .chatLanguageModel(chatLanguageModel) .chatMemory(MessageWindowChatMemory.withMaxMessages(3)) .build(); } private EducationDescriptionAgent educationDescriptionAgent(ChatLanguageModel chatLanguageModel) { return AiServices.builder(EducationDescriptionAgent.class) .chatLanguageModel(chatLanguageModel) .chatMemory(MessageWindowChatMemory.withMaxMessages(3)) .build(); } public ExtractionAgent extractionAgent(ChatLanguageModel extractionChatLanguageModel) { return AiServices.builder(ExtractionAgent.class) .chatLanguageModel(extractionChatLanguageModel) .chatMemory(MessageWindowChatMemory.withMaxMessages(5)) .build(); } public UrlExtractionAgent urlExtractionAgent(ChatLanguageModel extractionChatLanguageModel) { return AiServices.builder(UrlExtractionAgent.class) .chatLanguageModel(extractionChatLanguageModel) .chatMemory(MessageWindowChatMemory.withMaxMessages(5)) .build(); } public ChatBotAgent chatBotAgent(ChatLanguageModel chatLanguageModel, List<String> messages) { List<ChatMessage> messagesOff = new ArrayList<ChatMessage>(); boolean user = true; messagesOff.add( systemMessage( "The user has the following information: "+getAISafeModel().toString() ) ); for (int x=0;x<messages.size();x++) { if (user) { user = false; messagesOff.add(new UserMessage(messages.get(x))); } else { user = true; messagesOff.add(new AiMessage(messages.get(x))); } } PersistentChatMemoryStore store = new PersistentChatMemoryStore(messagesOff); return AiServices.builder(ChatBotAgent.class) .chatLanguageModel(chatLanguageModel) .chatMemoryProvider( memoryId-> MessageWindowChatMemory.builder() .chatMemoryStore(store) .maxMessages(100) .build() ) .moderationModel(moderationModel) .retriever(retriever) .build(); } } class PersistentChatMemoryStore implements ChatMemoryStore { public PersistentChatMemoryStore(List<ChatMessage> messages) { this.messages = messages; } private List<ChatMessage> messages; @Override public List<ChatMessage> getMessages(Object memoryId) { return messages; } @Override public void updateMessages(Object memoryId, List<ChatMessage> messages) { this.messages = messages; } @Override public void deleteMessages(Object memoryId) { messages = new ArrayList<>(); } }
[ "dev.langchain4j.service.AiServices.builder", "dev.langchain4j.memory.chat.MessageWindowChatMemory.builder", "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((2489, 3569), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.GenerationResponse.builder'), ((2489, 3540), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.GenerationResponse.builder'), ((4022, 5025), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.GenerationResponse.builder'), ((4022, 5000), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.GenerationResponse.builder'), ((6662, 6781), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.ExtractionResponse.builder'), ((6662, 6756), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.ExtractionResponse.builder'), ((6927, 6964), 'org.jsoup.Jsoup.connect'), ((7847, 7970), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.ExtractionResponse.builder'), ((7847, 7945), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.ExtractionResponse.builder'), ((8463, 8559), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.ChatResponse.builder'), ((8463, 8534), 'com.revolvingSolutions.aicvgeneratorbackend.response.AI.ChatResponse.builder'), ((12156, 12607), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12582), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12544), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12505), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12457), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12430), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12397), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12366), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12329), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12293), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12251), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12156, 12219), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 13138), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 13113), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 13075), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 13036), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 12988), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 12961), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 12928), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 12897), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 12860), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 12824), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 12790), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((12695, 12758), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13675), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13650), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13612), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13573), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13525), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13498), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13465), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13434), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13397), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13361), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13319), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13224, 13287), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 14194), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 14169), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 14131), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 14092), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 14044), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 14017), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 13985), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 13954), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 13917), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 13881), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 13847), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((13754, 13815), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((14302, 14495), 'dev.langchain4j.service.AiServices.builder'), ((14302, 14470), 'dev.langchain4j.service.AiServices.builder'), ((14302, 14398), 'dev.langchain4j.service.AiServices.builder'), ((14622, 14824), 'dev.langchain4j.service.AiServices.builder'), ((14622, 14799), 'dev.langchain4j.service.AiServices.builder'), ((14622, 14727), 'dev.langchain4j.service.AiServices.builder'), ((14951, 15153), 'dev.langchain4j.service.AiServices.builder'), ((14951, 15128), 'dev.langchain4j.service.AiServices.builder'), ((14951, 15056), 'dev.langchain4j.service.AiServices.builder'), ((15269, 15471), 'dev.langchain4j.service.AiServices.builder'), ((15269, 15446), 'dev.langchain4j.service.AiServices.builder'), ((15269, 15374), 'dev.langchain4j.service.AiServices.builder'), ((15593, 15798), 'dev.langchain4j.service.AiServices.builder'), ((15593, 15773), 'dev.langchain4j.service.AiServices.builder'), ((15593, 15701), 'dev.langchain4j.service.AiServices.builder'), ((16593, 17069), 'dev.langchain4j.service.AiServices.builder'), ((16593, 17044), 'dev.langchain4j.service.AiServices.builder'), ((16593, 17006), 'dev.langchain4j.service.AiServices.builder'), ((16593, 16956), 'dev.langchain4j.service.AiServices.builder'), ((16593, 16685), 'dev.langchain4j.service.AiServices.builder'), ((16758, 16938), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((16758, 16897), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((16758, 16847), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder')]
package com.learning.ai.config; import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument; import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO; import com.zaxxer.hikari.HikariDataSource; import dev.langchain4j.data.document.Document; import dev.langchain4j.data.document.DocumentSplitter; import dev.langchain4j.data.document.parser.apache.pdfbox.ApachePdfBoxDocumentParser; import dev.langchain4j.data.document.splitter.DocumentSplitters; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.memory.chat.MessageWindowChatMemory; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.model.openai.OpenAiTokenizer; import dev.langchain4j.rag.content.retriever.ContentRetriever; import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever; import dev.langchain4j.service.AiServices; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.EmbeddingStoreIngestor; import dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore; import java.io.IOException; import java.net.URI; import javax.sql.DataSource; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.core.io.Resource; import org.springframework.core.io.ResourceLoader; @Configuration(proxyBeanMethods = false) public class AIConfig { @Bean AICustomerSupportAgent customerSupportAgent( ChatLanguageModel chatLanguageModel, // ChatTools bookingTools, ContentRetriever contentRetriever) { return AiServices.builder(AICustomerSupportAgent.class) .chatLanguageModel(chatLanguageModel) .chatMemory(MessageWindowChatMemory.withMaxMessages(20)) // .tools(bookingTools) .contentRetriever(contentRetriever) .build(); } @Bean ContentRetriever contentRetriever(EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel) { // You will need to adjust these parameters to find the optimal setting, which will depend on two main factors: // - The nature of your data // - The embedding model you are using int maxResults = 1; double minScore = 0.6; return EmbeddingStoreContentRetriever.builder() .embeddingStore(embeddingStore) .embeddingModel(embeddingModel) .maxResults(maxResults) .minScore(minScore) .build(); } @Bean EmbeddingModel embeddingModel() { return new AllMiniLmL6V2EmbeddingModel(); } @Bean EmbeddingStore<TextSegment> embeddingStore( EmbeddingModel embeddingModel, ResourceLoader resourceLoader, DataSource dataSource) throws IOException { // Normally, you would already have your embedding store filled with your data. // However, for the purpose of this demonstration, we will: HikariDataSource hikariDataSource = (HikariDataSource) dataSource; String jdbcUrl = hikariDataSource.getJdbcUrl(); URI uri = URI.create(jdbcUrl.substring(5)); String host = uri.getHost(); int dbPort = uri.getPort(); String path = uri.getPath(); // 1. Create an postgres embedding store // dimension of the embedding is 384 (all-minilm) and 1536 (openai) EmbeddingStore<TextSegment> embeddingStore = PgVectorEmbeddingStore.builder() .host(host) .port(dbPort != -1 ? dbPort : 5432) .user(hikariDataSource.getUsername()) .password(hikariDataSource.getPassword()) .database(path.substring(1)) .table("ai_vector_store") .dimension(384) .build(); // 2. Load an example document (medicaid-wa-faqs.pdf) Resource pdfResource = resourceLoader.getResource("classpath:medicaid-wa-faqs.pdf"); Document document = loadDocument(pdfResource.getFile().toPath(), new ApachePdfBoxDocumentParser()); // 3. Split the document into segments 500 tokens each // 4. Convert segments into embeddings // 5. Store embeddings into embedding store // All this can be done manually, but we will use EmbeddingStoreIngestor to automate this: DocumentSplitter documentSplitter = DocumentSplitters.recursive(500, 0, new OpenAiTokenizer(GPT_3_5_TURBO)); EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() .documentSplitter(documentSplitter) .embeddingModel(embeddingModel) .embeddingStore(embeddingStore) .build(); ingestor.ingest(document); return embeddingStore; } }
[ "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder", "dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder", "dev.langchain4j.service.AiServices.builder", "dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder" ]
[((1815, 2122), 'dev.langchain4j.service.AiServices.builder'), ((1815, 2097), 'dev.langchain4j.service.AiServices.builder'), ((1815, 1990), 'dev.langchain4j.service.AiServices.builder'), ((1815, 1917), 'dev.langchain4j.service.AiServices.builder'), ((2536, 2773), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2536, 2748), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2536, 2712), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2536, 2672), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2536, 2624), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((3692, 4060), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((3692, 4035), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((3692, 4003), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((3692, 3961), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((3692, 3916), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((3692, 3858), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((3692, 3804), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((3692, 3752), 'dev.langchain4j.store.embedding.pgvector.PgVectorEmbeddingStore.builder'), ((4747, 4952), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((4747, 4927), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((4747, 4879), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((4747, 4831), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')]
package com.azure.migration.java.copilot.rag; import dev.langchain4j.data.document.DocumentSplitter; import dev.langchain4j.data.document.splitter.DocumentSplitters; import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.model.openai.OpenAiTokenizer; import dev.langchain4j.store.embedding.EmbeddingStoreIngestor; import dev.langchain4j.store.embedding.azure.search.AzureAiSearchEmbeddingStore; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import static dev.langchain4j.model.azure.AzureOpenAiModelName.GPT_4_32K; @Configuration public class Configure { @Bean EmbeddingModel embeddingModel() { return new AllMiniLmL6V2EmbeddingModel(); } @Bean EmbeddingStoreIngestor ingestor(AzureAiSearchEmbeddingStore azureAiSearchEmbeddingStore, EmbeddingModel embeddingModel) { DocumentSplitter documentSplitter = DocumentSplitters.recursive(100, 0, new OpenAiTokenizer(GPT_4_32K)); return EmbeddingStoreIngestor.builder() .documentSplitter(documentSplitter) .embeddingStore(azureAiSearchEmbeddingStore) .embeddingModel(embeddingModel) .build(); } }
[ "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder" ]
[((1088, 1306), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1088, 1281), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1088, 1233), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1088, 1172), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')]
package com.example.demo.configuration; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.azure.search.AzureAiSearchEmbeddingStore; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @Configuration public class EmbeddingStoreConfiguration { @Bean EmbeddingStore<TextSegment> embeddingStore() { return AzureAiSearchEmbeddingStore.builder() .endpoint(System.getenv("AZURE_SEARCH_ENDPOINT")) .apiKey(System.getenv("AZURE_SEARCH_KEY")) .dimensions(1536) .build(); } }
[ "dev.langchain4j.store.embedding.azure.search.AzureAiSearchEmbeddingStore.builder" ]
[((475, 696), 'dev.langchain4j.store.embedding.azure.search.AzureAiSearchEmbeddingStore.builder'), ((475, 671), 'dev.langchain4j.store.embedding.azure.search.AzureAiSearchEmbeddingStore.builder'), ((475, 637), 'dev.langchain4j.store.embedding.azure.search.AzureAiSearchEmbeddingStore.builder'), ((475, 578), 'dev.langchain4j.store.embedding.azure.search.AzureAiSearchEmbeddingStore.builder')]
package net.herhoffer.mensabot.rag; import dev.langchain4j.data.document.Document; import dev.langchain4j.data.document.loader.FileSystemDocumentLoader; import dev.langchain4j.data.document.loader.UrlDocumentLoader; import dev.langchain4j.data.document.parser.TextDocumentParser; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.store.embedding.EmbeddingStoreIngestor; import io.quarkiverse.langchain4j.redis.RedisEmbeddingStore; import io.quarkus.runtime.StartupEvent; import jakarta.enterprise.context.ApplicationScoped; import jakarta.enterprise.event.Observes; import jakarta.inject.Inject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import java.io.File; import java.util.List; import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive; @ApplicationScoped public class MensaIngestor { private static final Logger LOG = LoggerFactory.getLogger(MensaIngestor.class); @Inject RedisEmbeddingStore store; @Inject EmbeddingModel embeddingModel; public void ingest(@Observes StartupEvent event) { LOG.info("ingesting documents"); Document docs = UrlDocumentLoader.load("https://neuland.app/api/mensa/", new TextDocumentParser()); EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() .embeddingStore(store) .embeddingModel(embeddingModel) .documentSplitter(recursive(500, 0)) .build(); ingestor.ingest(docs); LOG.info("Documents successfully ingested"); } }
[ "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder" ]
[((1248, 1393), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1248, 1381), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1248, 1341), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1248, 1306), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')]
package my.samples; import dev.langchain4j.data.document.Document; import dev.langchain4j.data.document.loader.FileSystemDocumentLoader; import dev.langchain4j.data.document.parser.apache.pdfbox.ApachePdfBoxDocumentParser; import dev.langchain4j.data.document.splitter.DocumentSplitters; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.EmbeddingStoreIngestor; import dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.Path; import java.nio.file.Paths; public class LoadFINRARuletoES { public static void main(String[] args) { EmbeddingStore<TextSegment> embeddingStore = ElasticsearchEmbeddingStore.builder() .serverUrl("http://localhost:9200") .indexName("finra-rules-embeddings") .dimension(384) .build(); EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() .documentSplitter(DocumentSplitters.recursive(300, 0)) .embeddingModel(embeddingModel) .embeddingStore(embeddingStore) .build(); Path filePath = toPath("example-files/FINRARULES.pdf"); Document document = FileSystemDocumentLoader.loadDocument(filePath, new ApachePdfBoxDocumentParser()); document.metadata().add("fileName", filePath.getFileName().toString()); document.metadata().add("filePath", filePath.toString()); document.metadata().add("source", "FINRA"); document.metadata().add("category", "FINANCE"); ingestor.ingest(document); System.out.println( "Document ingested successfully" ); } private static Path toPath(String fileName) { try { // Corrected path assuming files are in src/main/resources/example-files URL fileUrl = LoadFINRARuletoES.class.getClassLoader().getResource( fileName); if (fileUrl == null) { throw new RuntimeException("Resource not found: " + fileName); } return Paths.get(fileUrl.toURI()); } catch (URISyntaxException e) { throw new RuntimeException("Failed to resolve URI for: " + fileName, e); } } }
[ "dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder", "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder" ]
[((909, 1108), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((909, 1083), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((909, 1051), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((909, 998), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1228, 1452), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1228, 1427), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1228, 1379), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1228, 1331), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')]
package dev.langchain4j.model.zhipu; import dev.langchain4j.agent.tool.ToolExecutionRequest; import dev.langchain4j.agent.tool.ToolParameters; import dev.langchain4j.agent.tool.ToolSpecification; import dev.langchain4j.data.embedding.Embedding; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.data.message.ChatMessage; import dev.langchain4j.data.message.SystemMessage; import dev.langchain4j.data.message.ToolExecutionResultMessage; import dev.langchain4j.data.message.UserMessage; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.internal.Utils; import dev.langchain4j.model.output.FinishReason; import dev.langchain4j.model.output.TokenUsage; import dev.langchain4j.model.zhipu.chat.AssistantMessage; import dev.langchain4j.model.zhipu.chat.ChatCompletionResponse; import dev.langchain4j.model.zhipu.chat.Function; import dev.langchain4j.model.zhipu.chat.FunctionCall; import dev.langchain4j.model.zhipu.chat.Message; import dev.langchain4j.model.zhipu.chat.Parameters; import dev.langchain4j.model.zhipu.chat.Tool; import dev.langchain4j.model.zhipu.chat.ToolCall; import dev.langchain4j.model.zhipu.chat.ToolMessage; import dev.langchain4j.model.zhipu.chat.ToolType; import dev.langchain4j.model.zhipu.embedding.EmbeddingResponse; import dev.langchain4j.model.zhipu.shared.Usage; import java.util.ArrayList; import java.util.List; import java.util.stream.Collectors; import static dev.langchain4j.internal.Exceptions.illegalArgument; import static dev.langchain4j.internal.Utils.isNullOrEmpty; import static dev.langchain4j.model.output.FinishReason.LENGTH; import static dev.langchain4j.model.output.FinishReason.OTHER; import static dev.langchain4j.model.output.FinishReason.STOP; import static dev.langchain4j.model.output.FinishReason.TOOL_EXECUTION; class DefaultZhipuAiHelper { public static List<Embedding> toEmbed(EmbeddingResponse response) { return response.getData().stream() .map(zhipuAiEmbedding -> Embedding.from(zhipuAiEmbedding.getEmbedding())) .collect(Collectors.toList()); } public static String toEmbedTexts(List<TextSegment> textSegments) { List<String> embedText = textSegments.stream() .map(TextSegment::text) .collect(Collectors.toList()); if (Utils.isNullOrEmpty(embedText)) { return null; } return embedText.get(0); } public static List<Tool> toTools(List<ToolSpecification> toolSpecifications) { return toolSpecifications.stream() .map(toolSpecification -> Tool.from(toFunction(toolSpecification))) .collect(Collectors.toList()); } private static Function toFunction(ToolSpecification toolSpecification) { return Function.builder() .name(toolSpecification.name()) .description(toolSpecification.description()) .parameters(toFunctionParameters(toolSpecification.parameters())) .build(); } private static Parameters toFunctionParameters(ToolParameters toolParameters) { return Parameters.builder() .properties(toolParameters.properties()) .required(toolParameters.required()) .build(); } public static List<Message> toZhipuAiMessages(List<ChatMessage> messages) { return messages.stream() .map(DefaultZhipuAiHelper::toZhipuAiMessage) .collect(Collectors.toList()); } private static Message toZhipuAiMessage(ChatMessage message) { if (message instanceof SystemMessage) { SystemMessage systemMessage = (SystemMessage) message; return dev.langchain4j.model.zhipu.chat.SystemMessage.builder() .content(systemMessage.text()) .build(); } if (message instanceof UserMessage) { UserMessage userMessage = (UserMessage) message; return dev.langchain4j.model.zhipu.chat.UserMessage.builder() .content(userMessage.text()) .build(); } if (message instanceof AiMessage) { AiMessage aiMessage = (AiMessage) message; if (!aiMessage.hasToolExecutionRequests()) { return AssistantMessage.builder() .content(aiMessage.text()) .build(); } List<ToolCall> toolCallArrayList = new ArrayList<>(); for (ToolExecutionRequest executionRequest : aiMessage.toolExecutionRequests()) { toolCallArrayList.add(ToolCall.builder() .function( FunctionCall.builder() .name(executionRequest.name()) .arguments(executionRequest.arguments()) .build() ) .type(ToolType.FUNCTION) .id(executionRequest.id()) .build() ); } return AssistantMessage.builder() .content(aiMessage.text()) .toolCalls(toolCallArrayList) .build(); } if (message instanceof ToolExecutionResultMessage) { ToolExecutionResultMessage resultMessage = (ToolExecutionResultMessage) message; return ToolMessage.builder() .content(resultMessage.text()) .build(); } throw illegalArgument("Unknown message type: " + message.type()); } public static AiMessage aiMessageFrom(ChatCompletionResponse response) { Message message = response.getChoices().get(0).getMessage(); AssistantMessage assistantMessage = (AssistantMessage) message; if (isNullOrEmpty(assistantMessage.getToolCalls())) { return AiMessage.from(assistantMessage.getContent()); } return AiMessage.from(specificationsFrom(assistantMessage.getToolCalls())); } public static List<ToolExecutionRequest> specificationsFrom(List<ToolCall> toolCalls) { List<ToolExecutionRequest> specifications = new ArrayList<>(toolCalls.size()); for (ToolCall toolCall : toolCalls) { specifications.add( ToolExecutionRequest.builder() .id(toolCall.getId()) .name(toolCall.getFunction().getName()) .arguments(toolCall.getFunction().getArguments()) .build() ); } return specifications; } public static TokenUsage tokenUsageFrom(Usage zhipuUsage) { if (zhipuUsage == null) { return null; } return new TokenUsage( zhipuUsage.getPromptTokens(), zhipuUsage.getCompletionTokens(), zhipuUsage.getTotalTokens() ); } public static FinishReason finishReasonFrom(String finishReason) { if (finishReason == null) { return null; } switch (finishReason) { case "stop": return STOP; case "length": return LENGTH; case "tool_calls": return TOOL_EXECUTION; default: return OTHER; } } }
[ "dev.langchain4j.model.zhipu.chat.ToolMessage.builder", "dev.langchain4j.agent.tool.ToolExecutionRequest.builder", "dev.langchain4j.model.zhipu.chat.Function.builder", "dev.langchain4j.model.zhipu.chat.AssistantMessage.builder", "dev.langchain4j.model.zhipu.chat.FunctionCall.builder", "dev.langchain4j.model.zhipu.chat.Parameters.builder", "dev.langchain4j.model.zhipu.chat.ToolCall.builder" ]
[((2790, 3025), 'dev.langchain4j.model.zhipu.chat.Function.builder'), ((2790, 3000), 'dev.langchain4j.model.zhipu.chat.Function.builder'), ((2790, 2918), 'dev.langchain4j.model.zhipu.chat.Function.builder'), ((2790, 2856), 'dev.langchain4j.model.zhipu.chat.Function.builder'), ((3133, 3288), 'dev.langchain4j.model.zhipu.chat.Parameters.builder'), ((3133, 3263), 'dev.langchain4j.model.zhipu.chat.Parameters.builder'), ((3133, 3210), 'dev.langchain4j.model.zhipu.chat.Parameters.builder'), ((4327, 4437), 'dev.langchain4j.model.zhipu.chat.AssistantMessage.builder'), ((4327, 4404), 'dev.langchain4j.model.zhipu.chat.AssistantMessage.builder'), ((4651, 5119), 'dev.langchain4j.model.zhipu.chat.ToolCall.builder'), ((4651, 5086), 'dev.langchain4j.model.zhipu.chat.ToolCall.builder'), ((4651, 5035), 'dev.langchain4j.model.zhipu.chat.ToolCall.builder'), ((4651, 4986), 'dev.langchain4j.model.zhipu.chat.ToolCall.builder'), ((4737, 4960), 'dev.langchain4j.model.zhipu.chat.FunctionCall.builder'), ((4737, 4911), 'dev.langchain4j.model.zhipu.chat.FunctionCall.builder'), ((4737, 4830), 'dev.langchain4j.model.zhipu.chat.FunctionCall.builder'), ((5172, 5324), 'dev.langchain4j.model.zhipu.chat.AssistantMessage.builder'), ((5172, 5295), 'dev.langchain4j.model.zhipu.chat.AssistantMessage.builder'), ((5172, 5245), 'dev.langchain4j.model.zhipu.chat.AssistantMessage.builder'), ((5510, 5611), 'dev.langchain4j.model.zhipu.chat.ToolMessage.builder'), ((5510, 5582), 'dev.langchain4j.model.zhipu.chat.ToolMessage.builder'), ((6430, 6693), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((6430, 6656), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((6430, 6578), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((6430, 6510), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')]
import dev.langchain4j.model.azure.AzureOpenAiChatModel; public class AzureOpenAiChatModelExamples { static class Simple_Prompt { public static void main(String[] args) { AzureOpenAiChatModel model = AzureOpenAiChatModel.builder() .apiKey(System.getenv("AZURE_OPENAI_KEY")) .endpoint(System.getenv("AZURE_OPENAI_ENDPOINT")) .deploymentName(System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME")) .temperature(0.3) .logRequestsAndResponses(true) .build(); String response = model.generate("Provide 3 short bullet points explaining why Java is awesome"); System.out.println(response); } } }
[ "dev.langchain4j.model.azure.AzureOpenAiChatModel.builder" ]
[((228, 592), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((228, 563), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((228, 512), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((228, 474), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((228, 391), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder'), ((228, 321), 'dev.langchain4j.model.azure.AzureOpenAiChatModel.builder')]
package dev.langchain4j.rag.content.retriever; import dev.langchain4j.data.embedding.Embedding; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.rag.content.Content; import dev.langchain4j.rag.query.Query; import dev.langchain4j.store.embedding.EmbeddingMatch; import dev.langchain4j.store.embedding.EmbeddingSearchRequest; import dev.langchain4j.store.embedding.EmbeddingSearchResult; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.filter.Filter; import lombok.Builder; import java.util.List; import java.util.function.Function; import static dev.langchain4j.internal.Utils.getOrDefault; import static dev.langchain4j.internal.ValidationUtils.*; import static java.util.stream.Collectors.toList; /** * A {@link ContentRetriever} that retrieves from an {@link EmbeddingStore}. * <br> * By default, it retrieves the 3 most similar {@link Content}s to the provided {@link Query}, * without any {@link Filter}ing. * <br> * <br> * Configurable parameters (optional): * <br> * - {@code maxResults}: The maximum number of {@link Content}s to retrieve. * <br> * - {@code dynamicMaxResults}: It is a {@link Function} that accepts a {@link Query} and returns a {@code maxResults} value. * It can be used to dynamically define {@code maxResults} value, depending on factors such as the query, * the user (using Metadata#chatMemoryId()} from {@link Query#metadata()}), etc. * <br> * - {@code minScore}: The minimum relevance score for the returned {@link Content}s. * {@link Content}s scoring below {@code #minScore} are excluded from the results. * <br> * - {@code dynamicMinScore}: It is a {@link Function} that accepts a {@link Query} and returns a {@code minScore} value. * It can be used to dynamically define {@code minScore} value, depending on factors such as the query, * the user (using Metadata#chatMemoryId()} from {@link Query#metadata()}), etc. * <br> * - {@code filter}: The {@link Filter} that will be applied to a {@link dev.langchain4j.data.document.Metadata} in the * {@link Content#textSegment()}. * <br> * - {@code dynamicFilter}: It is a {@link Function} that accepts a {@link Query} and returns a {@code filter} value. * It can be used to dynamically define {@code filter} value, depending on factors such as the query, * the user (using Metadata#chatMemoryId()} from {@link Query#metadata()}), etc. */ public class EmbeddingStoreContentRetriever implements ContentRetriever { public static final Function<Query, Integer> DEFAULT_MAX_RESULTS = (query) -> 3; public static final Function<Query, Double> DEFAULT_MIN_SCORE = (query) -> 0.0; public static final Function<Query, Filter> DEFAULT_FILTER = (query) -> null; private final EmbeddingStore<TextSegment> embeddingStore; private final EmbeddingModel embeddingModel; private final Function<Query, Integer> maxResultsProvider; private final Function<Query, Double> minScoreProvider; private final Function<Query, Filter> filterProvider; public EmbeddingStoreContentRetriever(EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel) { this( embeddingStore, embeddingModel, DEFAULT_MAX_RESULTS, DEFAULT_MIN_SCORE, DEFAULT_FILTER ); } public EmbeddingStoreContentRetriever(EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel, int maxResults) { this( embeddingStore, embeddingModel, (query) -> maxResults, DEFAULT_MIN_SCORE, DEFAULT_FILTER ); } public EmbeddingStoreContentRetriever(EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel, Integer maxResults, Double minScore) { this( embeddingStore, embeddingModel, (query) -> maxResults, (query) -> minScore, DEFAULT_FILTER ); } @Builder private EmbeddingStoreContentRetriever(EmbeddingStore<TextSegment> embeddingStore, EmbeddingModel embeddingModel, Function<Query, Integer> dynamicMaxResults, Function<Query, Double> dynamicMinScore, Function<Query, Filter> dynamicFilter) { this.embeddingStore = ensureNotNull(embeddingStore, "embeddingStore"); this.embeddingModel = ensureNotNull(embeddingModel, "embeddingModel"); this.maxResultsProvider = getOrDefault(dynamicMaxResults, DEFAULT_MAX_RESULTS); this.minScoreProvider = getOrDefault(dynamicMinScore, DEFAULT_MIN_SCORE); this.filterProvider = getOrDefault(dynamicFilter, DEFAULT_FILTER); } public static class EmbeddingStoreContentRetrieverBuilder { public EmbeddingStoreContentRetrieverBuilder maxResults(Integer maxResults) { if (maxResults != null) { dynamicMaxResults = (query) -> ensureGreaterThanZero(maxResults, "maxResults"); } return this; } public EmbeddingStoreContentRetrieverBuilder minScore(Double minScore) { if (minScore != null) { dynamicMinScore = (query) -> ensureBetween(minScore, 0, 1, "minScore"); } return this; } public EmbeddingStoreContentRetrieverBuilder filter(Filter filter) { if (filter != null) { dynamicFilter = (query) -> filter; } return this; } } @Override public List<Content> retrieve(Query query) { Embedding embeddedQuery = embeddingModel.embed(query.text()).content(); EmbeddingSearchRequest searchRequest = EmbeddingSearchRequest.builder() .queryEmbedding(embeddedQuery) .maxResults(maxResultsProvider.apply(query)) .minScore(minScoreProvider.apply(query)) .filter(filterProvider.apply(query)) .build(); EmbeddingSearchResult<TextSegment> searchResult = embeddingStore.search(searchRequest); return searchResult.matches().stream() .map(EmbeddingMatch::embedded) .map(Content::from) .collect(toList()); } }
[ "dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder" ]
[((6197, 6472), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((6197, 6447), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((6197, 6394), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((6197, 6337), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder'), ((6197, 6276), 'dev.langchain4j.store.embedding.EmbeddingSearchRequest.builder')]
package ma.enset.projet.Dao; import dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore; import java.time.Duration; public class VectorBd implements Dao<ChromaEmbeddingStore>{ private String CHROMA_BASE_URL; private String CHROMA_COLLECTION_NAME; public VectorBd(String CHROMA_BASE_URL, String CHROMA_COLLECTION_NAME) { this.CHROMA_BASE_URL = CHROMA_BASE_URL; this.CHROMA_COLLECTION_NAME = CHROMA_COLLECTION_NAME; } public VectorBd(){ } public String getCHROMA_BASE_URL() { return CHROMA_BASE_URL; } public void setCHROMA_BASE_URL(String CHROMA_BASE_URL) { this.CHROMA_BASE_URL = CHROMA_BASE_URL; } public String getCHROMA_COLLECTION_NAME() { return CHROMA_COLLECTION_NAME; } public void setCHROMA_COLLECTION_NAME(String CHROMA_COLLECTION_NAME) { this.CHROMA_COLLECTION_NAME = CHROMA_COLLECTION_NAME; } @Override public ChromaEmbeddingStore getConnection() { return ChromaEmbeddingStore.builder() .baseUrl(CHROMA_BASE_URL) .collectionName(CHROMA_COLLECTION_NAME) .timeout(Duration.ofSeconds(6)) .build(); } }
[ "dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore.builder" ]
[((1005, 1206), 'dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore.builder'), ((1005, 1181), 'dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore.builder'), ((1005, 1133), 'dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore.builder'), ((1005, 1077), 'dev.langchain4j.store.embedding.chroma.ChromaEmbeddingStore.builder')]
package de.htwg.rag.ingestor; import dev.langchain4j.data.document.Document; import dev.langchain4j.model.embedding.AllMiniLmL6V2QuantizedEmbeddingModel; import dev.langchain4j.store.embedding.EmbeddingStoreIngestor; import io.quarkiverse.langchain4j.pgvector.PgVectorEmbeddingStore; import jakarta.enterprise.context.ApplicationScoped; import jakarta.inject.Inject; import java.util.List; import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive; @ApplicationScoped public class DocumentIngestor { @Inject PgVectorEmbeddingStore store; @Inject AllMiniLmL6V2QuantizedEmbeddingModel embeddingModel; // creates the Ingestor and ingests the documents into the store. // Maybe adapt the overlapSize or change the Documentsplitter for better performance public void ingest(List<Document> documents) { EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() .embeddingStore(store) .embeddingModel(embeddingModel) .documentSplitter(recursive(300, 20)) .build(); if (documents.isEmpty()) { throw new IllegalArgumentException("No documents to ingest"); } else { ingestor.ingest(documents); } } public void setStore(PgVectorEmbeddingStore store) { this.store = store; } public void setEmbeddingModel(AllMiniLmL6V2QuantizedEmbeddingModel embeddingModel) { this.embeddingModel = embeddingModel; } }
[ "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder" ]
[((896, 1094), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((896, 1069), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((896, 1015), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((896, 967), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')]
package io.quarkiverse.langchain4j.samples; import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive; import java.util.List; import jakarta.enterprise.context.ApplicationScoped; import jakarta.inject.Inject; import dev.langchain4j.data.document.Document; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.store.embedding.EmbeddingStoreIngestor; import io.quarkiverse.langchain4j.chroma.ChromaEmbeddingStore; @ApplicationScoped public class IngestorExampleWithChroma { /** * The embedding store (the database). * The bean is provided by the quarkus-langchain4j-chroma extension. */ @Inject ChromaEmbeddingStore store; /** * The embedding model (how is computed the vector of a document). * The bean is provided by the LLM (like openai) extension. */ @Inject EmbeddingModel embeddingModel; public void ingest(List<Document> documents) { EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() .embeddingStore(store) .embeddingModel(embeddingModel) .documentSplitter(recursive(500, 0)) .build(); // Warning - this can take a long time... ingestor.ingest(documents); } }
[ "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder" ]
[((995, 1192), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((995, 1167), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((995, 1114), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((995, 1066), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')]
package com.magicrepokit.chat.component; import cn.hutool.core.util.StrUtil; import com.magicrepokit.chat.constant.GptModel; import com.magicrepokit.langchain.config.ConfigProperties; import dev.langchain4j.data.message.SystemMessage; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.model.Tokenizer; import dev.langchain4j.model.chat.StreamingChatLanguageModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.model.openai.OpenAiEmbeddingModel; import dev.langchain4j.model.openai.OpenAiStreamingChatModel; import dev.langchain4j.model.openai.OpenAiTokenizer; import dev.langchain4j.retriever.EmbeddingStoreRetriever; import dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore; import lombok.AllArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.springframework.stereotype.Component; import java.util.List; @Component @AllArgsConstructor @Slf4j public class LangchainComponent { private ConfigProperties langchainConfigProperties; /** * 获取elasticsearch存储 * * @param indexName 索引名称 * @return ElasticsearchEmbeddingStore */ public ElasticsearchEmbeddingStore getDefaultElasticsearchEmbeddingStore(String indexName) { if (!langchainConfigProperties.getEnabled()) { log.error("未开启elasticsearch"); return null; } String elasticHost = langchainConfigProperties.getElasticHost(); int elasticPort = langchainConfigProperties.getElasticPort(); String url = StrUtil.format("{}:{}", elasticHost, elasticPort); return ElasticsearchEmbeddingStore.builder() .serverUrl(url) .userName(langchainConfigProperties.getElasticUsername()) .password(langchainConfigProperties.getElasticPassword()) .indexName(indexName) .dimension(1536) .build(); } /** * 向量检索 * * @param indexName 索引名称 * @param question 问题 * @return List<TextSegment> */ public List<TextSegment> findRelevant(String indexName, String question) { EmbeddingStoreRetriever embeddingStoreRetriever = new EmbeddingStoreRetriever(getDefaultElasticsearchEmbeddingStore(indexName), getDefaultEmbeddingModel(), 5, 0.8 ); return embeddingStoreRetriever.findRelevant(question); } /** * 向量检索 * * @param indexName 索引名称 * @param question 问题 * @param maxResult 最大结果 * @param minScore 最小分数 * @return List<TextSegment> */ public List<TextSegment> findRelevant(String indexName, String question, int maxResult, double minScore) { if(maxResult<=0){ maxResult=5; } if(minScore<=0){ minScore=0.7; } EmbeddingStoreRetriever embeddingStoreRetriever = new EmbeddingStoreRetriever(getDefaultElasticsearchEmbeddingStore(indexName), getDefaultEmbeddingModel(), maxResult, minScore ); return embeddingStoreRetriever.findRelevant(question); } /** * 获取分词模型 */ public EmbeddingModel getDefaultEmbeddingModel() { return OpenAiEmbeddingModel.builder().apiKey("sk-gRbZ9FJz2E7c7mwO5JOvp2u2rtoWoAbg12CxDy3Y25eLeDvd").baseUrl("https://api.chatanywhere.tech/v1").build(); } /** * 获取分词器 * * @param gptModel * @return */ public Tokenizer getOpenAiTokenizer(GptModel gptModel) { return new OpenAiTokenizer(gptModel.getAcutualModelName()); } /** * 获取默认聊天模型 * * @return StreamingChatLanguageModel */ private StreamingChatLanguageModel getStreamingDefaultChatLanguageModel(GptModel gptModel) { return OpenAiStreamingChatModel.builder() .apiKey("sk-gRbZ9FJz2E7c7mwO5JOvp2u2rtoWoAbg12CxDy3Y25eLeDvd") .baseUrl("https://api.chatanywhere.tech/") .modelName(gptModel.getAcutualModelName()) .build(); } private StreamingChatLanguageModel getStreamingDefaultChatLanguageModel(GptModel gptModel,Double temperature) { return OpenAiStreamingChatModel.builder() .apiKey("sk-gRbZ9FJz2E7c7mwO5JOvp2u2rtoWoAbg12CxDy3Y25eLeDvd") .baseUrl("https://api.chatanywhere.tech/") .modelName(gptModel.getAcutualModelName()) .temperature(temperature) .build(); } /** * 获取聊天模型 * * @return StreamingChatLanguageModel */ public StreamingChatLanguageModel getStreamingChatLanguageModel(GptModel gptModel) { //TODO 获取用户信息 1.查询用户key 2.如果有使用用户,如果没有使用默认 return getStreamingDefaultChatLanguageModel(gptModel); } public StreamingChatLanguageModel getStreamingChatLanguageModel(GptModel gptModel,Double temperature) { //TODO 获取用户信息 1.查询用户key 2.如果有使用用户,如果没有使用默认 return getStreamingDefaultChatLanguageModel(gptModel,temperature); } public SystemMessage getDefalutSystemMessage(GptModel gptModel){ String modelName = gptModel.getModelName(); String prompt = "你是MagicRepoKit研发的"+modelName+"模型。别人问你有关你的身份信息,你可以回答:我是MagicRepoKit研发的"+modelName+"模型。"; return new SystemMessage(prompt); } }
[ "dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder", "dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder", "dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder" ]
[((1628, 1941), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1628, 1916), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1628, 1883), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1628, 1845), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1628, 1771), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1628, 1697), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((3351, 3495), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((3351, 3487), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((3351, 3443), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((3933, 4189), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((3933, 4164), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((3933, 4105), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((3933, 4046), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4329, 4627), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4329, 4602), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4329, 4560), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4329, 4501), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder'), ((4329, 4442), 'dev.langchain4j.model.openai.OpenAiStreamingChatModel.builder')]
package com.example.demo; import java.time.Duration; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.input.Prompt; import dev.langchain4j.model.input.structured.StructuredPrompt; import dev.langchain4j.model.input.structured.StructuredPromptProcessor; import dev.langchain4j.model.openai.OpenAiChatModel; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.PropertySource; import org.springframework.stereotype.Component; public class _03_StructuredPromptTemplates { @StructuredPrompt({ "Design a medical insurance plan for {{coverageType}} with the following features:", "Structure your answer as follows:", "Insurance Plan Name: ...", "Description: ...", "Coverage Duration: ...", "Covered Medical Services:", "- ...", "- ...", "Financial Details:", "Premium: ...", "Deductible: ...", "Claims Process:", "- ...", "- ..." }) static class CreateMedicalInsurancePlan{ String coverageType; CreateMedicalInsurancePlan(String coverageType){ this.coverageType = coverageType; } } public static void main(String[] args) { Duration duration = Duration.ofSeconds(60); ChatLanguageModel model = OpenAiChatModel.builder() .apiKey(ApiKeys.OPENAI_API_KEY) .timeout(duration) .build(); ///ComprehensiveFamilyPlan - DentalAndVisionInsurance - MaternityCoverage CreateMedicalInsurancePlan createMedicalInsurancePlan = new CreateMedicalInsurancePlan("BasicHealthCoverage"); Prompt prompt = StructuredPromptProcessor.toPrompt(createMedicalInsurancePlan); String plan = model.generate(prompt.text()); System.out.println(plan); } }
[ "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((1297, 1394), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1297, 1381), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1297, 1358), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')]
package gcfv2; import java.io.BufferedWriter; import com.google.gson.Gson; import com.google.gson.JsonObject; import java.io.BufferedReader; import java.io.InputStreamReader; import java.util.stream.Collectors; //Logging packages import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.cloud.functions.HttpFunction; import com.google.cloud.functions.HttpRequest; import com.google.cloud.functions.HttpResponse; import dev.langchain4j.model.output.Response; import dev.langchain4j.model.vertexai.VertexAiLanguageModel; public class TextPredictFunction implements HttpFunction { private static Logger logger = LoggerFactory.getLogger(TextPredictFunction.class); public void service(final HttpRequest request, final HttpResponse response) throws Exception { final BufferedWriter writer = response.getWriter(); //Read the environment variables which will be passed to the Vertex AI Model for initialization String GCP_REGION = System.getenv("GCP_REGION"); String GCP_PROJECT = System.getenv("GCP_PROJECT"); //Fetch the prompt from the JSON body in the request BufferedReader reader = new BufferedReader(new InputStreamReader(request.getInputStream())); String jsonRequest = reader.lines().collect(Collectors.joining()); // Parse the JSON data Gson gson = new Gson(); JsonObject jsonRequestObject = gson.fromJson(jsonRequest, JsonObject.class); // Get the data from the JSON object String prompt = jsonRequestObject.get("prompt").getAsString(); if (prompt.length() > 0) { VertexAiLanguageModel vertexAiLanguageModel = VertexAiLanguageModel.builder() .endpoint("us-central1-aiplatform.googleapis.com:443") .project(GCP_PROJECT) .location(GCP_REGION) .publisher("google") .modelName("text-bison@001") .temperature(1.0) .maxOutputTokens(50) .topK(0) .topP(0.0) .maxRetries(3) .build(); Response<String> modelResponse = vertexAiLanguageModel.generate(prompt); logger.info("Result: " + modelResponse.content()); writer.write(modelResponse.content()); } else { logger.info("No prompt provided."); writer.write("No prompt provided."); } } }
[ "dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder" ]
[((1609, 1982), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((1609, 1963), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((1609, 1938), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((1609, 1917), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((1609, 1898), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((1609, 1867), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((1609, 1839), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((1609, 1800), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((1609, 1769), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((1609, 1737), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((1609, 1705), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder')]
package com.docuverse.backend.configuration; import dev.langchain4j.data.document.Document; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.model.openai.OpenAiEmbeddingModel; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore; import io.github.cdimascio.dotenv.Dotenv; import org.springframework.context.annotation.Bean; import org.springframework.core.io.Resource; import org.springframework.core.io.ResourceLoader; import dev.langchain4j.data.document.Document; import dev.langchain4j.data.document.UrlDocumentLoader; import java.net.URISyntaxException; import java.net.URL; import java.nio.file.Path; import java.nio.file.Paths; import java.util.List; import java.net.URL; import static dev.langchain4j.model.openai.OpenAiModelName.TEXT_EMBEDDING_ADA_002; import static dev.langchain4j.data.document.UrlDocumentLoader.load; public class PineconeConfiguration { private static Path toPath(String fileName) { try { URL fileUrl = PineconeConfiguration.class.getResource(fileName); return Paths.get(fileUrl.toURI()); } catch (URISyntaxException e) { throw new RuntimeException(e); } } Dotenv dotenv = Dotenv.load(); @Bean EmbeddingStore<TextSegment> embeddingStore(EmbeddingModel embeddingModel, ResourceLoader resourceLoader) { EmbeddingStore<TextSegment> embeddingStore = PineconeEmbeddingStore .builder() .apiKey(dotenv.get("PINECONE_API_KEY")) .environment(dotenv.get("PINECONE_ENVIRONMENT")) .projectId(dotenv.get("PINECONE_PROJECTID")) .index(dotenv.get("PINECONE_INDEX")) .build(); // URL fileUrl = "some"; // Document document = load() return embeddingStore; } OpenAiEmbeddingModel embeddingModel = OpenAiEmbeddingModel.builder() .apiKey(dotenv.get("OPENAI_API_KEY")) .modelName(TEXT_EMBEDDING_ADA_002) .build(); String url = ""; }
[ "dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder" ]
[((1984, 2132), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1984, 2111), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1984, 2064), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder')]
package ai.equity.salt.openai.model; import ai.equity.salt.openai.OpenAiProperties; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.model.openai.OpenAiLanguageModel; import lombok.RequiredArgsConstructor; import org.springframework.stereotype.Component; import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO_1106; @Component @RequiredArgsConstructor public class OpenAiModelFactory { private final OpenAiProperties properties; public OpenAiLanguageModel create() { return OpenAiLanguageModel.builder() .apiKey(properties.key()) .logRequests(true) .logResponses(true) .build(); } public OpenAiChatModel createDefaultChatModel() { return OpenAiChatModel.builder() .modelName(GPT_3_5_TURBO_1106) .apiKey(properties.key()) .maxTokens(1024) .temperature(0.0) .topP(0.0) .logRequests(true) .logResponses(true) .build(); } }
[ "dev.langchain4j.model.openai.OpenAiLanguageModel.builder", "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((540, 707), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((540, 682), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((540, 646), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((540, 611), 'dev.langchain4j.model.openai.OpenAiLanguageModel.builder'), ((785, 1089), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((785, 1064), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((785, 1028), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((785, 993), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((785, 966), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((785, 932), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((785, 899), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((785, 857), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')]
package io.quarkiverse.langchain4j.openai; import static dev.langchain4j.internal.RetryUtils.withRetry; import static dev.langchain4j.model.openai.OpenAiModelName.DALL_E_2; import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.nio.file.StandardOpenOption; import java.time.Duration; import java.util.Base64; import java.util.List; import java.util.Optional; import java.util.UUID; import java.util.stream.Collectors; import dev.ai4j.openai4j.image.GenerateImagesRequest; import dev.ai4j.openai4j.image.GenerateImagesResponse; import dev.langchain4j.data.image.Image; import dev.langchain4j.model.image.ImageModel; import dev.langchain4j.model.output.Response; @SuppressWarnings("OptionalUsedAsFieldOrParameterType") public class QuarkusOpenAiImageModel implements ImageModel { private final String modelName; private final String size; private final String quality; private final String style; private final Optional<String> user; private final String responseFormat; private final Integer maxRetries; private final Optional<Path> persistDirectory; private final QuarkusOpenAiClient client; public QuarkusOpenAiImageModel(String baseUrl, String apiKey, String organizationId, String modelName, String size, String quality, String style, Optional<String> user, String responseFormat, Duration timeout, Integer maxRetries, Boolean logRequests, Boolean logResponses, Optional<Path> persistDirectory) { this.modelName = modelName; this.size = size; this.quality = quality; this.style = style; this.user = user; this.responseFormat = responseFormat; this.maxRetries = maxRetries; this.persistDirectory = persistDirectory; this.client = QuarkusOpenAiClient.builder() .baseUrl(baseUrl) .openAiApiKey(apiKey) .organizationId(organizationId) .callTimeout(timeout) .connectTimeout(timeout) .readTimeout(timeout) .writeTimeout(timeout) .logRequests(logRequests) .logResponses(logResponses) .build(); } @Override public Response<Image> generate(String prompt) { GenerateImagesRequest request = requestBuilder(prompt).build(); GenerateImagesResponse response = withRetry(() -> client.imagesGeneration(request), maxRetries).execute(); persistIfNecessary(response); return Response.from(fromImageData(response.data().get(0))); } @Override public Response<List<Image>> generate(String prompt, int n) { GenerateImagesRequest request = requestBuilder(prompt).n(n).build(); GenerateImagesResponse response = withRetry(() -> client.imagesGeneration(request), maxRetries).execute(); persistIfNecessary(response); return Response.from( response.data().stream().map(QuarkusOpenAiImageModel::fromImageData).collect(Collectors.toList())); } private void persistIfNecessary(GenerateImagesResponse response) { if (persistDirectory.isEmpty()) { return; } Path persistTo = persistDirectory.get(); try { Files.createDirectories(persistTo); } catch (IOException e) { throw new UncheckedIOException(e); } for (GenerateImagesResponse.ImageData data : response.data()) { try { data.url( data.url() != null ? FilePersistor.persistFromUri(data.url(), persistTo).toUri() : FilePersistor.persistFromBase64String(data.b64Json(), persistTo).toUri()); } catch (IOException e) { throw new UncheckedIOException(e); } } } private static Image fromImageData(GenerateImagesResponse.ImageData data) { return Image.builder().url(data.url()).base64Data(data.b64Json()).revisedPrompt(data.revisedPrompt()).build(); } private GenerateImagesRequest.Builder requestBuilder(String prompt) { var builder = GenerateImagesRequest .builder() .prompt(prompt) .size(size) .quality(quality) .style(style) .responseFormat(responseFormat); if (DALL_E_2.equals(modelName)) { builder.model(dev.ai4j.openai4j.image.ImageModel.DALL_E_2); } if (user.isPresent()) { builder.user(user.get()); } return builder; } public static Builder builder() { return new Builder(); } public static class Builder { private String baseUrl; private String apiKey; private String organizationId; private String modelName; private String size; private String quality; private String style; private Optional<String> user; private String responseFormat; private Duration timeout; private Integer maxRetries; private Boolean logRequests; private Boolean logResponses; private Optional<Path> persistDirectory; public Builder baseUrl(String baseUrl) { this.baseUrl = baseUrl; return this; } public Builder apiKey(String apiKey) { this.apiKey = apiKey; return this; } public Builder organizationId(String organizationId) { this.organizationId = organizationId; return this; } public Builder timeout(Duration timeout) { this.timeout = timeout; return this; } public Builder maxRetries(Integer maxRetries) { this.maxRetries = maxRetries; return this; } public Builder logRequests(Boolean logRequests) { this.logRequests = logRequests; return this; } public Builder logResponses(Boolean logResponses) { this.logResponses = logResponses; return this; } public Builder modelName(String modelName) { this.modelName = modelName; return this; } public Builder size(String size) { this.size = size; return this; } public Builder quality(String quality) { this.quality = quality; return this; } public Builder style(String style) { this.style = style; return this; } public Builder user(Optional<String> user) { this.user = user; return this; } public Builder responseFormat(String responseFormat) { this.responseFormat = responseFormat; return this; } public Builder persistDirectory(Optional<Path> persistDirectory) { this.persistDirectory = persistDirectory; return this; } public QuarkusOpenAiImageModel build() { return new QuarkusOpenAiImageModel(baseUrl, apiKey, organizationId, modelName, size, quality, style, user, responseFormat, timeout, maxRetries, logRequests, logResponses, persistDirectory); } } /** * Copied from {@code dev.ai4j.openai4j.FilePersistor} */ private static class FilePersistor { static Path persistFromUri(URI uri, Path destinationFolder) { try { Path fileName = Paths.get(uri.getPath()).getFileName(); Path destinationFilePath = destinationFolder.resolve(fileName); try (InputStream inputStream = uri.toURL().openStream()) { java.nio.file.Files.copy(inputStream, destinationFilePath, StandardCopyOption.REPLACE_EXISTING); } return destinationFilePath; } catch (IOException e) { throw new RuntimeException("Error persisting file from URI: " + uri, e); } } public static Path persistFromBase64String(String base64EncodedString, Path destinationFolder) throws IOException { byte[] decodedBytes = Base64.getDecoder().decode(base64EncodedString); Path destinationFile = destinationFolder.resolve(randomFileName()); Files.write(destinationFile, decodedBytes, StandardOpenOption.CREATE); return destinationFile; } private static String randomFileName() { return UUID.randomUUID().toString().replaceAll("-", "").substring(0, 20); } } }
[ "dev.langchain4j.data.image.Image.builder" ]
[((4164, 4266), 'dev.langchain4j.data.image.Image.builder'), ((4164, 4258), 'dev.langchain4j.data.image.Image.builder'), ((4164, 4222), 'dev.langchain4j.data.image.Image.builder'), ((4164, 4195), 'dev.langchain4j.data.image.Image.builder'), ((7813, 7851), 'java.nio.file.Paths.get'), ((8498, 8545), 'java.util.Base64.getDecoder'), ((8827, 8892), 'java.util.UUID.randomUUID'), ((8827, 8875), 'java.util.UUID.randomUUID'), ((8827, 8855), 'java.util.UUID.randomUUID')]
package org.acme; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.data.message.UserMessage; import dev.langchain4j.model.StreamingResponseHandler; import dev.langchain4j.model.chat.StreamingChatLanguageModel; import dev.langchain4j.model.ollama.OllamaStreamingChatModel; import dev.langchain4j.model.output.Response; import org.acme.persona.Persona; import java.util.List; import java.util.Scanner; import java.util.concurrent.CountDownLatch; public class StreamingChatRunner { public static final String MODEL_NAME = "dolphin-mixtral"; public static final String BASE_URL = "http://localhost:11434"; public static void main(String[] args) throws InterruptedException { // Build the ChatLanguageModel StreamingChatLanguageModel model = OllamaStreamingChatModel.builder().baseUrl(BASE_URL).modelName(MODEL_NAME) .temperature(0.7).repeatPenalty(1.5).build(); var input = ""; var scanner = new Scanner(System.in); var latch = new CountDownLatch(1); do { System.out.print("> "); input = scanner.nextLine(); converse(model, input, latch); latch.await(); } while(!input.equals("bye")); } private static void converse(StreamingChatLanguageModel model, String message, CountDownLatch latch) { model.generate( List.of(Persona.ABUSIVE.getSystemMessage(), UserMessage.from(message)), new StreamingResponseHandler<>() { @Override public void onComplete(Response<AiMessage> response) { System.out.println(); latch.countDown(); } @Override public void onNext(String token) { System.out.print(token); } @Override public void onError(Throwable error) { error.printStackTrace(); } } ); } }
[ "dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder" ]
[((791, 926), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((791, 918), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((791, 899), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((791, 865), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((791, 843), 'dev.langchain4j.model.ollama.OllamaStreamingChatModel.builder'), ((1411, 1445), 'org.acme.persona.Persona.ABUSIVE.getSystemMessage')]
package org.feuyeux.ai.langchain.hellolangchain; import static org.feuyeux.ai.langchain.hellolangchain.OpenApi.getKey; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.input.Prompt; import dev.langchain4j.model.input.PromptTemplate; import dev.langchain4j.model.openai.OpenAiChatModel; import java.util.Map; import lombok.extern.slf4j.Slf4j; /** * @author feuyeux */ @Slf4j public class Hello { public static void main(String[] args) { Prompt prompt = PromptTemplate.from("你是顶级的短片作家,请根据{{title}}的内容,写一篇50字的精品短文,然后翻译成英文。") .apply(Map.of("title", "窗外")); ChatLanguageModel model = OpenAiChatModel.builder().apiKey(getKey()).build(); String response = model.generate(prompt.text()); log.info("{}", response); } }
[ "dev.langchain4j.model.input.PromptTemplate.from", "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((504, 689), 'dev.langchain4j.model.input.PromptTemplate.from'), ((721, 771), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((721, 763), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')]
package net.savantly.mainbot.service.languagetools; import java.time.Duration; import java.util.List; import org.springframework.beans.factory.annotation.Value; import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import dev.langchain4j.agent.tool.ToolSpecification; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.data.message.ChatMessage; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.model.output.Response; import dev.langchain4j.service.AiServices; import lombok.RequiredArgsConstructor; import net.savantly.mainbot.config.OpenAIConfig; @Configuration @RequiredArgsConstructor public class LanguageToolsConfiguration { private final OpenAIConfig openAIConfig; @Value("${language-tools.log-requests:false}") private boolean logRequests = false; @Value("${language-tools.log-responses:true}") private boolean logResponses = true; @Bean @ConditionalOnProperty(prefix = "openai", name = "enabled", havingValue = "true") public LanguageToolModel getLanguageToolModel() { String apiKey = openAIConfig.getApiKey(); var chatModel = OpenAiChatModel.builder() .apiKey(apiKey) // https://platform.openai.com/account/api-keys .modelName(openAIConfig.getChatModelId()) .temperature(openAIConfig.getTemperature()) .logResponses(logResponses) .logRequests(logRequests) .timeout(Duration.ofSeconds(openAIConfig.getTimeoutSeconds())) .build(); return new LanguageToolModel() { @Override public Response<AiMessage> generate(List<ChatMessage> messages) { return chatModel.generate(messages); } @Override public Response<AiMessage> generate(List<ChatMessage> messages, List<ToolSpecification> toolSpecifications) { return chatModel.generate(messages, toolSpecifications); } @Override public Response<AiMessage> generate(List<ChatMessage> messages, ToolSpecification toolSpecification) { return chatModel.generate(messages, toolSpecification); } }; } @Bean @ConditionalOnProperty(prefix = "openai", name = "enabled", havingValue = "false", matchIfMissing = true) public LanguageToolModel getMockLanguageToolModel() { return new MockChatLanguageModel(); } @Bean public LanguageTools getLanguageTools(LanguageToolModel model) { return AiServices.builder(LanguageTools.class) .chatLanguageModel(model) .build(); } }
[ "dev.langchain4j.service.AiServices.builder", "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((1275, 1688), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1275, 1663), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1275, 1584), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1275, 1542), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1275, 1498), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1275, 1438), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1275, 1332), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2717, 2823), 'dev.langchain4j.service.AiServices.builder'), ((2717, 2798), 'dev.langchain4j.service.AiServices.builder')]
package de.htwg.rag.dataTools; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.service.AiServices; import jakarta.enterprise.context.ApplicationScoped; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.List; @ApplicationScoped public class Summarizer { public String summarize(String text) { if (text == null || text.isEmpty()) { throw new IllegalArgumentException("Text is empty or null"); } // Summarize the text and ingest it // Use of the @RegisterAiService annotation is not possible on TextSummarizer because no option to say chatMemory = null ChatLanguageModel model = OpenAiChatModel.builder() .apiKey(System.getenv("OPENAI_APIKEY")) .modelName("gpt-3.5-turbo") .timeout(Duration.ofMinutes(5)) .logRequests(true) .logResponses(true) .build(); TextSummarizerService summarizer = AiServices.create(TextSummarizerService.class, model); List<String> textList = Arrays.asList(text.split("\n")); // Create list of always 30 texts together List<List<String>> chunk = new ArrayList<>(); for (int i = 0; i < textList.size(); i += 30) { chunk.add(textList.subList(i, Math.min(i + 30, textList.size()))); } // Summarize the chunks and add them to the document StringBuilder summarizedText = new StringBuilder(); for (List<String> strings : chunk) { summarizedText.append(summarizer.summarize(String.join("\n", strings))); } return summarizedText.toString(); } }
[ "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((764, 1033), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((764, 1008), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((764, 972), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((764, 937), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((764, 889), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((764, 845), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')]
package org.example; import dev.langchain4j.chain.ConversationalRetrievalChain; import dev.langchain4j.data.document.Document; import dev.langchain4j.data.document.DocumentParser; import dev.langchain4j.data.document.parser.TextDocumentParser; import dev.langchain4j.data.document.splitter.DocumentSplitters; import dev.langchain4j.data.message.SystemMessage; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.memory.ChatMemory; import dev.langchain4j.memory.chat.TokenWindowChatMemory; import dev.langchain4j.model.Tokenizer; import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.model.openai.OpenAiChatModelName; import dev.langchain4j.model.openai.OpenAiTokenizer; import dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.EmbeddingStoreIngestor; import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore; import org.apache.poi.xssf.extractor.XSSFExcelExtractor; import org.apache.poi.xssf.usermodel.XSSFWorkbook; import java.io.FileNotFoundException; import java.io.IOException; import java.io.InputStream; import java.nio.file.Paths; import java.util.Scanner; import static dev.langchain4j.data.document.loader.FileSystemDocumentLoader.loadDocument; import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO; public class _11_RAG { public static void main(String[] args) throws FileNotFoundException { // Debugger op info // In process model EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>(); EmbeddingStoreIngestor ingestor = EmbeddingStoreIngestor.builder() .documentSplitter(DocumentSplitters.recursive(500, 0)) .embeddingModel(embeddingModel) .embeddingStore(embeddingStore) .build(); EmbeddingStoreContentRetriever retriever = EmbeddingStoreContentRetriever.builder() .embeddingModel(embeddingModel) .embeddingStore(embeddingStore) .build(); OpenAiChatModel model = OpenAiChatModel.builder() .apiKey(ApiKeys.OPENAI_PAID) .modelName(OpenAiChatModelName.GPT_3_5_TURBO) .temperature(0d) .build(); Document document = loadDocument(Paths.get("src/main/resources/Martin-and-Donny.txt"), new TextDocumentParser()); // Document document = loadDocument(Paths.get("src/main/resources/All the books.xlsx"), new ApacheExcelParser()); ingestor.ingest(document); // Opzet chatmemory, om antwoorden kort te houden Tokenizer tokenizer = new OpenAiTokenizer(GPT_3_5_TURBO); ChatMemory chatMemory = TokenWindowChatMemory.withMaxTokens(1000, tokenizer); chatMemory.add(SystemMessage.from("Keep your answers short.")); ConversationalRetrievalChain chain = ConversationalRetrievalChain.builder() .chatLanguageModel(model) .contentRetriever(retriever) .chatMemory(chatMemory) .build(); Scanner scanner = new Scanner(System.in); System.out.println("Embedding store ready. Start asking questions."); System.out.println(); while (scanner.hasNext()) { String in = scanner.nextLine(); String answer = chain.execute(in); System.out.println(answer); } } } class ApacheExcelParser implements DocumentParser { @Override public Document parse(InputStream inputStream) { try { XSSFWorkbook workbook = new XSSFWorkbook(inputStream); XSSFExcelExtractor extractor = new XSSFExcelExtractor(workbook); return Document.from(extractor.getText()); } catch (IOException e) { throw new RuntimeException(e); } } }
[ "dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder", "dev.langchain4j.chain.ConversationalRetrievalChain.builder", "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder", "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((1885, 2109), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1885, 2084), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1885, 2036), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1885, 1988), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((2163, 2324), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2163, 2299), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2163, 2251), 'dev.langchain4j.rag.content.retriever.EmbeddingStoreContentRetriever.builder'), ((2359, 2549), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2359, 2524), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2359, 2491), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2359, 2429), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((3159, 3349), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3159, 3324), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3159, 3284), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder'), ((3159, 3239), 'dev.langchain4j.chain.ConversationalRetrievalChain.builder')]
import dev.langchain4j.agent.tool.Tool; import dev.langchain4j.memory.chat.MessageWindowChatMemory; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.service.AiServices; public class _10_ServiceWithToolsExample { // Please also check CustomerSupportApplication and CustomerSupportApplicationTest // from spring-boot-example module static class Calculator { @Tool("Calculates the length of a string") int stringLength(String s) { System.out.println("Called stringLength() with s='" + s + "'"); return s.length(); } @Tool("Calculates the sum of two numbers") int add(int a, int b) { System.out.println("Called add() with a=" + a + ", b=" + b); return a + b; } @Tool("Calculates the square root of a number") double sqrt(int x) { System.out.println("Called sqrt() with x=" + x); return Math.sqrt(x); } } interface Assistant { String chat(String userMessage); } public static void main(String[] args) { ChatLanguageModel model = OpenAiChatModel.builder() .apiKey(ApiKeys.OPENAI_API_KEY) .logRequests(false) .build(); Assistant assistant = AiServices.builder(Assistant.class) .chatLanguageModel(model) .tools(new Calculator()) .chatMemory(MessageWindowChatMemory.withMaxMessages(10)) .build(); String question = "What is the square root of the sum of the numbers of letters in the words \"hello\" and \"world\"?"; String answer = assistant.chat(question); System.out.println(answer); // The square root of the sum of the number of letters in the words "hello" and "world" is approximately 3.162. } }
[ "dev.langchain4j.service.AiServices.builder", "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((1202, 1336), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1202, 1311), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1202, 1275), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1369, 1585), 'dev.langchain4j.service.AiServices.builder'), ((1369, 1560), 'dev.langchain4j.service.AiServices.builder'), ((1369, 1487), 'dev.langchain4j.service.AiServices.builder'), ((1369, 1446), 'dev.langchain4j.service.AiServices.builder')]
package com.tencent.supersonic.headless.core.chat.parser.llm; import com.tencent.supersonic.common.util.JsonUtil; import com.tencent.supersonic.headless.core.config.OptimizationConfig; import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMReq; import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMReq.SqlGenerationMode; import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMResp; import com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMSqlResp; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.input.Prompt; import dev.langchain4j.model.input.PromptTemplate; import dev.langchain4j.model.output.Response; import lombok.extern.slf4j.Slf4j; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.InitializingBean; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Service; import java.util.HashMap; import java.util.List; import java.util.Map; @Service @Slf4j public class OnePassSqlGeneration implements SqlGeneration, InitializingBean { private static final Logger keyPipelineLog = LoggerFactory.getLogger("keyPipeline"); @Autowired private ChatLanguageModel chatLanguageModel; @Autowired private SqlExamplarLoader sqlExampleLoader; @Autowired private OptimizationConfig optimizationConfig; @Autowired private SqlPromptGenerator sqlPromptGenerator; @Override public LLMResp generation(LLMReq llmReq, Long dataSetId) { //1.retriever sqlExamples keyPipelineLog.info("dataSetId:{},llmReq:{}", dataSetId, llmReq); List<Map<String, String>> sqlExamples = sqlExampleLoader.retrieverSqlExamples(llmReq.getQueryText(), optimizationConfig.getText2sqlExampleNum()); //2.generator linking and sql prompt by sqlExamples,and generate response. String promptStr = sqlPromptGenerator.generatorLinkingAndSqlPrompt(llmReq, sqlExamples); Prompt prompt = PromptTemplate.from(JsonUtil.toString(promptStr)).apply(new HashMap<>()); keyPipelineLog.info("request prompt:{}", prompt.toSystemMessage()); Response<AiMessage> response = chatLanguageModel.generate(prompt.toSystemMessage()); String result = response.content().text(); keyPipelineLog.info("model response:{}", result); //3.format response. String schemaLinkStr = OutputFormat.getSchemaLinks(response.content().text()); String sql = OutputFormat.getSql(response.content().text()); Map<String, LLMSqlResp> sqlRespMap = new HashMap<>(); sqlRespMap.put(sql, LLMSqlResp.builder().sqlWeight(1D).fewShots(sqlExamples).build()); keyPipelineLog.info("schemaLinkStr:{},sqlRespMap:{}", schemaLinkStr, sqlRespMap); LLMResp llmResp = new LLMResp(); llmResp.setQuery(llmReq.getQueryText()); llmResp.setSqlRespMap(sqlRespMap); return llmResp; } @Override public void afterPropertiesSet() { SqlGenerationFactory.addSqlGenerationForFactory(SqlGenerationMode.ONE_PASS_AUTO_COT, this); } }
[ "dev.langchain4j.model.input.PromptTemplate.from" ]
[((2083, 2155), 'dev.langchain4j.model.input.PromptTemplate.from'), ((2710, 2774), 'com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMSqlResp.builder'), ((2710, 2766), 'com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMSqlResp.builder'), ((2710, 2744), 'com.tencent.supersonic.headless.core.chat.query.llm.s2sql.LLMSqlResp.builder')]
package com.ramesh.langchain; import static dev.langchain4j.data.document.FileSystemDocumentLoader.loadDocument; import static java.time.Duration.ofSeconds; import static java.util.stream.Collectors.joining; import java.util.HashMap; import java.util.List; import java.util.Map; import dev.langchain4j.data.document.Document; import dev.langchain4j.data.document.DocumentSplitter; import dev.langchain4j.data.document.splitter.DocumentSplitters; import dev.langchain4j.data.embedding.Embedding; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.embedding.AllMiniLmL6V2QuantizedEmbeddingModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.model.input.Prompt; import dev.langchain4j.model.input.PromptTemplate; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.model.openai.OpenAiTokenizer; import dev.langchain4j.store.embedding.EmbeddingMatch; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore; /*** * This project demonstrates how to use LangChain embeddings and receive LLM response based * on the relevant embeddings of the input prompt * Note that no LangChain chain is created here. So the response comes in one single shot */ public class ChatWithDocumentLive { // Open AI Key and Chat GPT Model to use public static String OPENAI_API_KEY = "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"; public static String OPENAI_MODEL = "gpt-3.5-turbo"; public static void main(String[] args) { System.out.println("Loading sample document and splitting into words..."); // loading input document and splitting into segments Document document = loadDocument(".\\simpsons_adventures.txt"); DocumentSplitter splitter = DocumentSplitters.recursive(100, 0, new OpenAiTokenizer(OPENAI_MODEL)); List<TextSegment> segments = splitter.split(document); // Generating embeddings for the words in document and storing in memory System.out.println("Generating embeddings for the words in document and storing in memory..."); EmbeddingModel embeddingModel = new AllMiniLmL6V2QuantizedEmbeddingModel(); List<Embedding> embeddings = embeddingModel.embedAll(segments) .content(); EmbeddingStore<TextSegment> embeddingStore = new InMemoryEmbeddingStore<>(); embeddingStore.addAll(embeddings, segments); // Generating embeddings for prompt \"Who is Simpson?\ System.out.println("\nGenerating embeddings for prompt \"Who is Simpson?\""); String question = "Who is Simpson?"; Embedding questionEmbedding = embeddingModel.embed(question) .content(); int maxResults = 3; double minScore = 0.7; // Getting relavant embeddings or words for prompt \"Who is Simpson?\" from the embeddings stored in memory") System.out.println("Getting relavant embeddings or words for prompt \"Who is Simpson?\" from the embeddings stored in memory"); List<EmbeddingMatch<TextSegment>> relevantEmbeddings = embeddingStore.findRelevant(questionEmbedding, maxResults, minScore); // Sending relevant embeddings and prompt \"Who is Simpson?\" to chat gpt System.out.println("Sending relevant embeddings and prompt \"Who is Simpson?\" to chat gpt"); // creating a LangChain PromptTemplate PromptTemplate promptTemplate = PromptTemplate.from("Answer the following question to the best of your ability:\n" + "\n" + "Question:\n" + "{{question}}\n" + "\n" + "Base your answer on the following information:\n" + "{{information}}"); // streaming the responses from chatGPT and joining in the end String information = relevantEmbeddings.stream() .map(match -> match.embedded() .text()) .collect(joining("\n\n")); Map<String, Object> variables = new HashMap<>(); variables.put("question", question); variables.put("information", information); Prompt prompt = promptTemplate.apply(variables); // creating the chatmodel ChatLanguageModel chatModel = OpenAiChatModel.builder() .apiKey(OPENAI_API_KEY) .timeout(ofSeconds(60)) .build(); // calling chatgpt and generating the response AiMessage aiMessage = chatModel.generate(prompt.toUserMessage()) .content(); System.out.println("response from ChatGPT for prompt \"Who is Simpson?\"\n"); System.out.println(aiMessage.text()); } }
[ "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((4321, 4439), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4321, 4418), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((4321, 4382), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')]
import dev.langchain4j.data.embedding.Embedding; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.store.embedding.EmbeddingMatch; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore; import java.util.List; public class ElasticsearchEmbeddingStoreExample { /** * To run this example, ensure you have Elasticsearch running locally. If not, then: * - Execute "docker pull docker.elastic.co/elasticsearch/elasticsearch:8.9.0" * - Execute "docker run -d -p 9200:9200 -p 9300:9300 -e discovery.type=single-node -e xpack.security.enabled=false docker.elastic.co/elasticsearch/elasticsearch:8.9.0" * - Wait until Elasticsearch is ready to serve (may take a few minutes) */ public static void main(String[] args) throws InterruptedException { EmbeddingStore<TextSegment> embeddingStore = ElasticsearchEmbeddingStore.builder() .serverUrl("http://localhost:9200") .build(); EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); TextSegment segment1 = TextSegment.from("I like football."); Embedding embedding1 = embeddingModel.embed(segment1).content(); embeddingStore.add(embedding1, segment1); TextSegment segment2 = TextSegment.from("The weather is good today."); Embedding embedding2 = embeddingModel.embed(segment2).content(); embeddingStore.add(embedding2, segment2); Thread.sleep(1000); // to be sure that embeddings were persisted Embedding queryEmbedding = embeddingModel.embed("What is your favourite sport?").content(); List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(queryEmbedding, 1); EmbeddingMatch<TextSegment> embeddingMatch = relevant.get(0); System.out.println(embeddingMatch.score()); // 0.81442887 System.out.println(embeddingMatch.embedded().text()); // I like football. } }
[ "dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder" ]
[((1055, 1169), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder'), ((1055, 1144), 'dev.langchain4j.store.embedding.elasticsearch.ElasticsearchEmbeddingStore.builder')]
package org.acme; import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO; import static dev.langchain4j.data.message.UserMessage.userMessage; import dev.langchain4j.chain.ConversationalChain; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.data.message.UserMessage; import dev.langchain4j.memory.ChatMemory; import dev.langchain4j.memory.chat.TokenWindowChatMemory; import dev.langchain4j.model.StreamingResponseHandler; import dev.langchain4j.model.Tokenizer; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.chat.StreamingChatLanguageModel; import dev.langchain4j.model.openai.OpenAiTokenizer; import dev.langchain4j.model.output.Response; import io.quarkus.logging.Log; import io.smallrye.mutiny.Multi; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; import jakarta.ws.rs.Produces; import jakarta.ws.rs.core.MediaType; import org.eclipse.microprofile.reactive.messaging.Channel; import org.eclipse.microprofile.reactive.messaging.Emitter; @Path("/stream") public class StreamingResource { @StreamingOpenAi StreamingChatLanguageModel streamModel; @OpenAi ChatLanguageModel model; @Channel("response") Multi<String> response; @Channel("response") Emitter<String> emitter; @GET @Path("/") @Produces(MediaType.SERVER_SENT_EVENTS) public Multi<String> stream() { return response; } @GET @Path("/memory") @Produces(MediaType.TEXT_PLAIN) public void memory() { Tokenizer tokenizer = new OpenAiTokenizer(GPT_3_5_TURBO); ChatMemory chatMemory = TokenWindowChatMemory.withMaxTokens(1000, tokenizer); /**SystemMessage systemMessage = SystemMessage.from( "You are a senior developer explaining to another senior Java developer " + "using developing in Quarkus framework"); chatMemory.add(systemMessage);**/ UserMessage userMessage1 = userMessage( "How to write a REST endpoint in Java? "); chatMemory.add(userMessage1); emitter.send("[User]: " + userMessage1.text()); final Response<AiMessage> response1 = model.generate(chatMemory.messages()); chatMemory.add(response1.content()); emitter.send("[LLM]: "+ response1.content().text()); UserMessage userMessage2 = userMessage( "Create a test of the first point? " + "Be short, 15 lines of code maximum."); chatMemory.add(userMessage2); emitter.send("[User]: " + userMessage2.text()); final Response<AiMessage> response2 = model.generate(chatMemory.messages()); emitter.send("[LLM]: " + response2.content().text()); } @GET @Path("/chain") @Produces(MediaType.TEXT_PLAIN) public void chain() { ConversationalChain chain = ConversationalChain.builder() .chatLanguageModel(model) .build(); String userMessage1 = "Can you give a brief explanation of Kubernetes, 3 lines max?"; emitter.send("[User]: " + userMessage1); String answer1 = chain.execute(userMessage1); emitter.send("[LLM]: " + answer1); String userMessage2 = "Can you give me a YAML example to deploy an application for that?"; emitter.send("[User]: " + userMessage2); String answer2 = chain.execute(userMessage2); emitter.send("[LLM]: " + answer2); } @GET @Path("/ask") @Produces(MediaType.TEXT_PLAIN) public void hello() { String prompt = "Explain me why earth is flat"; streamModel.generate(prompt, new StreamingResponseHandler<>() { @Override public void onNext(String token) { emitter.send(token); } @Override public void onError(Throwable error) { Log.error(error.getMessage()); } }); } }
[ "dev.langchain4j.chain.ConversationalChain.builder" ]
[((2851, 2939), 'dev.langchain4j.chain.ConversationalChain.builder'), ((2851, 2918), 'dev.langchain4j.chain.ConversationalChain.builder')]
package com.redhat.hackathon; import dev.langchain4j.data.document.Document; import dev.langchain4j.data.document.loader.FileSystemDocumentLoader; import dev.langchain4j.data.document.parser.TextDocumentParser; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.store.embedding.EmbeddingStoreIngestor; import io.quarkiverse.langchain4j.redis.RedisEmbeddingStore; import io.quarkus.runtime.StartupEvent; import jakarta.enterprise.context.ApplicationScoped; import jakarta.enterprise.event.Observes; import jakarta.inject.Inject; import java.io.File; import java.util.List; import static dev.langchain4j.data.document.splitter.DocumentSplitters.recursive; @ApplicationScoped public class DocumentIngestor { /** * The embedding store (the database). * The bean is provided by the quarkus-langchain4j-redis extension. */ @Inject RedisEmbeddingStore store; /** * The embedding model (how the vector of a document is computed). * The bean is provided by the LLM (like openai) extension. */ @Inject EmbeddingModel embeddingModel; public void ingest(@Observes StartupEvent event) { if ("disabled".equals("disabled")) { System.out.println("Skipping ingesting document for RAG; seems not really applicable for our hackathon scope.\n"); return; } System.out.printf("Ingesting documents...%n"); List<Document> documents = FileSystemDocumentLoader.loadDocuments(new File("src/main/resources/catalog").toPath(), new TextDocumentParser()); var ingestor = EmbeddingStoreIngestor.builder() .embeddingStore(store) .embeddingModel(embeddingModel) .documentSplitter(recursive(500, 0)) .build(); ingestor.ingest(documents); System.out.printf("Ingested %d documents.%n", documents.size()); } }
[ "dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder" ]
[((1596, 1793), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1596, 1768), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1596, 1715), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder'), ((1596, 1667), 'dev.langchain4j.store.embedding.EmbeddingStoreIngestor.builder')]
/* * Copyright 2024 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package gemini.workshop; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.vertexai.VertexAiGeminiChatModel; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.model.input.Prompt; import dev.langchain4j.model.input.PromptTemplate; import dev.langchain4j.model.output.Response; import java.util.Map; public class Step6_TextClassification { public static void main(String[] args) { ChatLanguageModel model = VertexAiGeminiChatModel.builder() .project(System.getenv("PROJECT_ID")) .location(System.getenv("LOCATION")) .modelName("gemini-1.0-pro") .maxOutputTokens(10) .maxRetries(3) .build(); PromptTemplate promptTemplate = PromptTemplate.from(""" Analyze the sentiment of the text below. Respond only with one word to describe the sentiment. INPUT: This is fantastic news! OUTPUT: POSITIVE INPUT: Pi is roughly equal to 3.14 OUTPUT: NEUTRAL INPUT: I really disliked the pizza. Who would use pineapples as a pizza topping? OUTPUT: NEGATIVE INPUT: {{text}} OUTPUT: """); Prompt prompt = promptTemplate.apply( Map.of("text", "I love strawberries!")); Response<AiMessage> response = model.generate(prompt.toUserMessage()); System.out.println(response.content().text()); } }
[ "dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder" ]
[((1068, 1322), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1068, 1301), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1068, 1274), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1068, 1241), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1068, 1200), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder'), ((1068, 1151), 'dev.langchain4j.model.vertexai.VertexAiGeminiChatModel.builder')]
import dev.langchain4j.data.embedding.Embedding; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.model.embedding.AllMiniLmL6V2EmbeddingModel; import dev.langchain4j.model.embedding.EmbeddingModel; import dev.langchain4j.store.embedding.EmbeddingMatch; import dev.langchain4j.store.embedding.EmbeddingStore; import dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore; import java.util.List; public class PineconeEmbeddingStoreExample { public static void main(String[] args) { EmbeddingStore<TextSegment> embeddingStore = PineconeEmbeddingStore.builder() .apiKey(System.getenv("PINECONE_API_KEY")) .environment("northamerica-northeast1-gcp") // Project ID can be found in the Pinecone url: // https://app.pinecone.io/organizations/{organization}/projects/{environment}:{projectId}/indexes .projectId("19a129b") // Make sure the dimensions of the Pinecone index match the dimensions of the embedding model // (384 for all-MiniLM-L6-v2, 1536 for text-embedding-ada-002, etc.) .index("test") .build(); EmbeddingModel embeddingModel = new AllMiniLmL6V2EmbeddingModel(); TextSegment segment1 = TextSegment.from("I like football."); Embedding embedding1 = embeddingModel.embed(segment1).content(); embeddingStore.add(embedding1, segment1); TextSegment segment2 = TextSegment.from("The weather is good today."); Embedding embedding2 = embeddingModel.embed(segment2).content(); embeddingStore.add(embedding2, segment2); Embedding queryEmbedding = embeddingModel.embed("What is your favourite sport?").content(); List<EmbeddingMatch<TextSegment>> relevant = embeddingStore.findRelevant(queryEmbedding, 1); EmbeddingMatch<TextSegment> embeddingMatch = relevant.get(0); System.out.println(embeddingMatch.score()); // 0.8144288515898701 System.out.println(embeddingMatch.embedded().text()); // I like football. } }
[ "dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder" ]
[((573, 1192), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder'), ((573, 1167), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder'), ((573, 941), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder'), ((573, 724), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder'), ((573, 664), 'dev.langchain4j.store.embedding.pinecone.PineconeEmbeddingStore.builder')]
package org.agoncal.fascicle.langchain4j.testingdebugging; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.openai.OpenAiChatModel; import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO; import static java.time.Duration.ofSeconds; // tag::adocSkip[] /** * @author Antonio Goncalves * http://www.antoniogoncalves.org * -- */ // end::adocSkip[] public class AuthorService { private static final String OPENAI_API_KEY = System.getenv("OPENAI_API_KEY"); private String[] scifiAuthors = {"Isaac Asimov", "Nora Jemisin", "Douglas Adams"}; String url; public AuthorService(/*String url*/) { this.url = url; } public String getAuthorBiography(int index) { ChatLanguageModel model = OpenAiChatModel.builder() .apiKey(OPENAI_API_KEY) //.baseUrl(this.url) //.proxy("http://localhost:3128") .modelName(GPT_3_5_TURBO) .temperature(0.3) .timeout(ofSeconds(60)) .logRequests(true) .logResponses(true) .build(); String prompt = "Write a short biography about " + scifiAuthors[index]; String biography = model.generate(prompt); return biography; } public static void main(String[] args) { AuthorService authorService = new AuthorService(); System.out.println(authorService.getAuthorBiography(0)); } } // end::adocSnippet[]
[ "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((758, 1032), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((758, 1017), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((758, 991), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((758, 966), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((758, 936), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((758, 912), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((758, 813), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')]
package org.agoncal.fascicle.langchain4j.accessing.localai; import dev.langchain4j.model.localai.LocalAiChatModel; // tag::adocSkip[] /** * @author Antonio Goncalves * http://www.antoniogoncalves.org * -- */ // end::adocSkip[] public class MusicianService { public static void main(String[] args) { MusicianService musicianService = new MusicianService(); musicianService.useLocalAiChatModel(); } private static final String AZURE_OPENAI_KEY = System.getenv("AZURE_OPENAI_KEY"); private static final String AZURE_OPENAI_ENDPOINT = System.getenv("AZURE_OPENAI_ENDPOINT"); private static final String AZURE_OPENAI_DEPLOYMENT_NAME = System.getenv("AZURE_OPENAI_DEPLOYMENT_NAME"); private static final String PROMPT = "When was the first Beatles album released?"; // ############################### // ### AZURE OPENAI CHAT MODEL ### // ############################### public void useLocalAiChatModel() { System.out.println("### useLocalAiChatModel"); // tag::adocSnippet[] LocalAiChatModel model = LocalAiChatModel.builder() .temperature(0.3) .build(); // end::adocSnippet[] String completion = model.generate("When was the first Rolling Stones album released?"); System.out.println(completion); } }
[ "dev.langchain4j.model.localai.LocalAiChatModel.builder" ]
[((1048, 1113), 'dev.langchain4j.model.localai.LocalAiChatModel.builder'), ((1048, 1098), 'dev.langchain4j.model.localai.LocalAiChatModel.builder')]
package io.quarkiverse.langchain4j.azure.openai; import static dev.langchain4j.internal.RetryUtils.withRetry; import static dev.langchain4j.internal.ValidationUtils.ensureNotBlank; import static dev.langchain4j.model.openai.OpenAiModelName.DALL_E_2; import java.io.IOException; import java.io.InputStream; import java.io.UncheckedIOException; import java.net.URI; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; import java.nio.file.StandardCopyOption; import java.nio.file.StandardOpenOption; import java.time.Duration; import java.util.Base64; import java.util.List; import java.util.Optional; import java.util.UUID; import java.util.concurrent.Callable; import java.util.stream.Collectors; import dev.ai4j.openai4j.SyncOrAsync; import dev.ai4j.openai4j.image.GenerateImagesRequest; import dev.ai4j.openai4j.image.GenerateImagesResponse; import dev.langchain4j.data.image.Image; import dev.langchain4j.model.image.ImageModel; import dev.langchain4j.model.output.Response; import io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient; @SuppressWarnings("OptionalUsedAsFieldOrParameterType") public class AzureOpenAiImageModel implements ImageModel { private final String modelName; private final String size; private final String quality; private final String style; private final Optional<String> user; private final String responseFormat; private final Integer maxRetries; private final Optional<Path> persistDirectory; private final QuarkusOpenAiClient client; public AzureOpenAiImageModel(String endpoint, String apiKey, String apiVersion, String modelName, String size, String quality, String style, Optional<String> user, String responseFormat, Duration timeout, Integer maxRetries, Boolean logRequests, Boolean logResponses, Optional<Path> persistDirectory) { this.modelName = modelName; this.size = size; this.quality = quality; this.style = style; this.user = user; this.responseFormat = responseFormat; this.maxRetries = maxRetries; this.persistDirectory = persistDirectory; this.client = QuarkusOpenAiClient.builder() .baseUrl(ensureNotBlank(endpoint, "endpoint")) .azureApiKey(apiKey) .apiVersion(apiVersion) .callTimeout(timeout) .connectTimeout(timeout) .readTimeout(timeout) .writeTimeout(timeout) .logRequests(logRequests) .logResponses(logResponses) .build(); } @Override public Response<Image> generate(String prompt) { var request = requestBuilder(prompt).build(); var response = withRetry(new ImageGenerator(request), maxRetries).execute(); persistIfNecessary(response); return Response.from(fromImageData(response.data().get(0))); } @Override public Response<List<Image>> generate(String prompt, int n) { var request = requestBuilder(prompt).n(n).build(); var response = withRetry(new ImageGenerator(request), maxRetries).execute(); persistIfNecessary(response); return Response.from( response.data().stream().map(AzureOpenAiImageModel::fromImageData).collect(Collectors.toList())); } private void persistIfNecessary(GenerateImagesResponse response) { if (persistDirectory.isEmpty()) { return; } var persistTo = persistDirectory.get(); try { Files.createDirectories(persistTo); } catch (IOException e) { throw new UncheckedIOException(e); } for (GenerateImagesResponse.ImageData data : response.data()) { try { data.url( data.url() != null ? FilePersistor.persistFromUri(data.url(), persistTo).toUri() : FilePersistor.persistFromBase64String(data.b64Json(), persistTo).toUri()); } catch (IOException e) { throw new UncheckedIOException(e); } } } private static Image fromImageData(GenerateImagesResponse.ImageData data) { return Image.builder().url(data.url()).base64Data(data.b64Json()).revisedPrompt(data.revisedPrompt()).build(); } private GenerateImagesRequest.Builder requestBuilder(String prompt) { var builder = GenerateImagesRequest.builder() .prompt(prompt) .size(size) .quality(quality) .style(style) .responseFormat(responseFormat); if (DALL_E_2.equals(modelName)) { builder.model(dev.ai4j.openai4j.image.ImageModel.DALL_E_2); } if (user.isPresent()) { builder.user(user.get()); } return builder; } public static Builder builder() { return new Builder(); } public static class Builder { private String endpoint; private String apiKey; private String apiVersion; private String modelName; private String size; private String quality; private String style; private Optional<String> user; private String responseFormat; private Duration timeout; private Integer maxRetries; private Boolean logRequests; private Boolean logResponses; private Optional<Path> persistDirectory; public Builder endpoint(String endpoint) { this.endpoint = endpoint; return this; } public Builder apiKey(String apiKey) { this.apiKey = apiKey; return this; } public Builder apiVersion(String apiVersion) { this.apiVersion = apiVersion; return this; } public Builder timeout(Duration timeout) { this.timeout = timeout; return this; } public Builder maxRetries(Integer maxRetries) { this.maxRetries = maxRetries; return this; } public Builder logRequests(Boolean logRequests) { this.logRequests = logRequests; return this; } public Builder logResponses(Boolean logResponses) { this.logResponses = logResponses; return this; } public Builder modelName(String modelName) { this.modelName = modelName; return this; } public Builder size(String size) { this.size = size; return this; } public Builder quality(String quality) { this.quality = quality; return this; } public Builder style(String style) { this.style = style; return this; } public Builder user(Optional<String> user) { this.user = user; return this; } public Builder responseFormat(String responseFormat) { this.responseFormat = responseFormat; return this; } public Builder persistDirectory(Optional<Path> persistDirectory) { this.persistDirectory = persistDirectory; return this; } public AzureOpenAiImageModel build() { return new AzureOpenAiImageModel(endpoint, apiKey, apiVersion, modelName, size, quality, style, user, responseFormat, timeout, maxRetries, logRequests, logResponses, persistDirectory); } } private class ImageGenerator implements Callable<SyncOrAsync<GenerateImagesResponse>> { private final GenerateImagesRequest request; private ImageGenerator(GenerateImagesRequest request) { this.request = request; } @Override public SyncOrAsync<GenerateImagesResponse> call() { return client.imagesGeneration(request); } } /** * Copied from {@code dev.ai4j.openai4j.FilePersistor} */ private static class FilePersistor { static Path persistFromUri(URI uri, Path destinationFolder) { try { Path fileName = Paths.get(uri.getPath()).getFileName(); Path destinationFilePath = destinationFolder.resolve(fileName); try (InputStream inputStream = uri.toURL().openStream()) { Files.copy(inputStream, destinationFilePath, StandardCopyOption.REPLACE_EXISTING); } return destinationFilePath; } catch (IOException e) { throw new RuntimeException("Error persisting file from URI: " + uri, e); } } public static Path persistFromBase64String(String base64EncodedString, Path destinationFolder) throws IOException { byte[] decodedBytes = Base64.getDecoder().decode(base64EncodedString); Path destinationFile = destinationFolder.resolve(randomFileName()); Files.write(destinationFile, decodedBytes, StandardOpenOption.CREATE); return destinationFile; } private static String randomFileName() { return UUID.randomUUID().toString().replaceAll("-", "").substring(0, 20); } } }
[ "dev.langchain4j.data.image.Image.builder" ]
[((2190, 2626), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((2190, 2601), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((2190, 2557), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((2190, 2515), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((2190, 2476), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((2190, 2438), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((2190, 2397), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((2190, 2359), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((2190, 2319), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((2190, 2282), 'io.quarkiverse.langchain4j.openai.QuarkusOpenAiClient.builder'), ((4295, 4397), 'dev.langchain4j.data.image.Image.builder'), ((4295, 4389), 'dev.langchain4j.data.image.Image.builder'), ((4295, 4353), 'dev.langchain4j.data.image.Image.builder'), ((4295, 4326), 'dev.langchain4j.data.image.Image.builder'), ((4502, 4705), 'dev.ai4j.openai4j.image.GenerateImagesRequest.builder'), ((4502, 4657), 'dev.ai4j.openai4j.image.GenerateImagesRequest.builder'), ((4502, 4627), 'dev.ai4j.openai4j.image.GenerateImagesRequest.builder'), ((4502, 4593), 'dev.ai4j.openai4j.image.GenerateImagesRequest.builder'), ((4502, 4565), 'dev.ai4j.openai4j.image.GenerateImagesRequest.builder'), ((8309, 8347), 'java.nio.file.Paths.get'), ((8980, 9027), 'java.util.Base64.getDecoder'), ((9309, 9374), 'java.util.UUID.randomUUID'), ((9309, 9357), 'java.util.UUID.randomUUID'), ((9309, 9337), 'java.util.UUID.randomUUID')]
package org.agoncal.fascicle.langchain4j.accessing.ollama; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.data.message.SystemMessage; import dev.langchain4j.data.message.UserMessage; import dev.langchain4j.model.input.Prompt; import dev.langchain4j.model.ollama.OllamaChatModel; import dev.langchain4j.model.ollama.OllamaLanguageModel; import dev.langchain4j.model.output.FinishReason; import dev.langchain4j.model.output.Response; import dev.langchain4j.model.output.TokenUsage; // tag::adocSkip[] /** * @author Antonio Goncalves * http://www.antoniogoncalves.org * -- */ // end::adocSkip[] public class MusicianService { public static void main(String[] args) { MusicianService musicianService = new MusicianService(); musicianService.useOllamaLanguageModel(); // musicianService.useOllamaLanguageModelPrompt(); // musicianService.useOllamaLanguageModelBuilder(); // musicianService.useOllamaChatModel(); // musicianService.useOllamaChatModelBuilder(); } private static final String PROMPT = "When was the first Beatles album released?"; // ############################# // ### OPENAI LANGUAGE MODEL ### // ############################# public void useOllamaLanguageModel() { System.out.println("### useOpenAiLanguageModel"); // tag::adocSnippet[] OllamaLanguageModel model = OllamaLanguageModel.builder() .baseUrl("http://localhost:11434") .modelName("orca-mini") .build(); // end::adocSnippet[] Response<String> completion = model.generate("When was the first Beatles album released?"); String content = completion.content(); TokenUsage tokenUsage = completion.tokenUsage(); System.out.println(content); System.out.println(tokenUsage.inputTokenCount()); System.out.println(tokenUsage.outputTokenCount()); System.out.println(tokenUsage.totalTokenCount()); } public void useOllamaLanguageModelPrompt() { System.out.println("### useOpenAiLanguageModelPrompt"); OllamaLanguageModel model = OllamaLanguageModel.builder() .baseUrl("http://localhost:11434") .modelName("orca-mini") .build(); Prompt prompt = new Prompt("When was the first Beatles album released?"); Response<String> completion = model.generate(prompt); String content = completion.content(); FinishReason finishReason = completion.finishReason(); TokenUsage tokenUsage = completion.tokenUsage(); System.out.println(content); System.out.println(finishReason.name()); System.out.println(tokenUsage.inputTokenCount()); System.out.println(tokenUsage.outputTokenCount()); System.out.println(tokenUsage.totalTokenCount()); } public void useOllamaLanguageModelBuilder() { System.out.println("### useOpenAiLanguageModelBuilder"); OllamaLanguageModel model = OllamaLanguageModel.builder() .baseUrl("http://localhost:11434") .modelName("orca-mini") .temperature(0.3) .build(); Response<String> completion = model.generate(PROMPT); System.out.println(completion.content()); System.out.println(completion.finishReason()); System.out.println(completion.tokenUsage()); } // ######################### // ### OPENAI CHAT MODEL ### // ######################### public void useOllamaChatModel() { System.out.println("### useOpenAiChatModel"); OllamaChatModel model = OllamaChatModel.builder().build(); String completion = model.generate("When was the first Rolling Stones album released?"); System.out.println(completion); } public void useOllamaChatModelBuilder() { System.out.println("### useOpenAiChatModelBuilder"); OllamaChatModel model = OllamaChatModel.builder() // .modelName(GPT_3_5_TURBO) .temperature(0.9) .build(); String completion = model.generate("When was the first Rolling Stones album released?"); System.out.println(completion); } public void useOllamaChatModelAiMessage() { System.out.println("### useOpenAiChatModelAiMessage"); OllamaChatModel model = OllamaChatModel.builder().build(); SystemMessage sysMsg = new SystemMessage("You are a music expert."); UserMessage userMsg = new UserMessage("When was the first Rolling Stones album released?"); Response<AiMessage> completion = model.generate(sysMsg, userMsg); System.out.println(completion); } }
[ "dev.langchain4j.model.ollama.OllamaLanguageModel.builder", "dev.langchain4j.model.ollama.OllamaChatModel.builder" ]
[((1359, 1474), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((1359, 1459), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((1359, 1429), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((2038, 2153), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((2038, 2138), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((2038, 2108), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((2837, 2976), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((2837, 2961), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((2837, 2937), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((2837, 2907), 'dev.langchain4j.model.ollama.OllamaLanguageModel.builder'), ((3398, 3431), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((3699, 3797), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((3699, 3782), 'dev.langchain4j.model.ollama.OllamaChatModel.builder'), ((4069, 4102), 'dev.langchain4j.model.ollama.OllamaChatModel.builder')]
package com.baeldung.langchain; import static org.assertj.core.api.Assertions.assertThat; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import dev.langchain4j.agent.tool.Tool; import dev.langchain4j.memory.chat.MessageWindowChatMemory; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.service.AiServices; public class ServiceWithToolsLiveTest { private static final Logger logger = LoggerFactory.getLogger(ServiceWithToolsLiveTest.class); static class Calculator { @Tool("Calculates the length of a string") int stringLength(String s) { return s.length(); } @Tool("Calculates the sum of two numbers") int add(int a, int b) { return a + b; } } interface Assistant { String chat(String userMessage); } @Test public void givenServiceWithTools_whenPrompted_thenValidResponse() { Assistant assistant = AiServices.builder(Assistant.class) .chatLanguageModel(OpenAiChatModel.withApiKey(Constants.OPENAI_API_KEY)) .tools(new Calculator()) .chatMemory(MessageWindowChatMemory.withMaxMessages(10)) .build(); String question = "What is the sum of the numbers of letters in the words \"language\" and \"model\"?"; String answer = assistant.chat(question); logger.info(answer); assertThat(answer).contains("13"); } }
[ "dev.langchain4j.service.AiServices.builder" ]
[((987, 1234), 'dev.langchain4j.service.AiServices.builder'), ((987, 1213), 'dev.langchain4j.service.AiServices.builder'), ((987, 1144), 'dev.langchain4j.service.AiServices.builder'), ((987, 1107), 'dev.langchain4j.service.AiServices.builder')]
package io.stargate.test.data; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.input.Prompt; import dev.langchain4j.model.input.PromptTemplate; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.model.openai.OpenAiEmbeddingModel; import dev.langchain4j.model.openai.OpenAiModelName; import dev.langchain4j.model.output.Response; import io.stargate.sdk.data.DataApiClient; import io.stargate.sdk.data.CollectionRepository; import io.stargate.sdk.data.NamespaceClient; import io.stargate.sdk.data.domain.query.Filter; import io.stargate.sdk.data.domain.odm.Document; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.MethodOrderer; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.TestMethodOrder; import java.io.File; import java.io.FileNotFoundException; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Scanner; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; /** * Philosopher Demo with Vector Client. */ @TestMethodOrder(MethodOrderer.OrderAnnotation.class) public class VectorClientPhilosopherTest { @Data @AllArgsConstructor @NoArgsConstructor private static class Quote { private String philosopher; private String quote; private Set<String> tags; } // OpenAI Usual Suspects static OpenAiEmbeddingModel openaiVectorizer = OpenAiEmbeddingModel.builder() .apiKey(System.getenv("OPENAI_API_KEY")) .modelName(OpenAiModelName.TEXT_EMBEDDING_ADA_002) .timeout(Duration.ofSeconds(15)) .logRequests(true) .logResponses(true) .build(); public static float[] vectorize(String inputText) { return openaiVectorizer.embed(inputText).content().vector(); } public static CollectionRepository<Quote> vectorStore; @Test @Order(1) @DisplayName("01. Create a namespace and a collection") public void init() { // Initialization DataApiClient jsonApiClient = new DataApiClient(); NamespaceClient nsClient = jsonApiClient.createNamespace("vector_openai"); nsClient.deleteCollection("philosophers"); nsClient.createCollection("philosophers", 1536); // Low level client jsonApiClient.namespace("vector_openai").collection("philosophers"); // Crud Repository on a Collection jsonApiClient.namespace("vector_openai").collectionRepository("philosophers", Quote.class); // Vector = crud repository + vector native vectorStore = jsonApiClient .namespace("vector_openai") .collectionRepository("philosophers", Quote.class); } @Test @Order(2) @DisplayName("02. Loading DataSet") public void shouldLoadDataset() { // Ingest CSV AtomicInteger rowId = new AtomicInteger(); loadQuotesFromCsv("/philosopher-quotes.csv").forEach(quote -> { System.out.println("Inserting " + rowId.get() + ") " + quote.getQuote()); vectorStore.insert(new Document<Quote>( String.valueOf(rowId.incrementAndGet()), quote, vectorize(quote.getQuote()))); }); } @Test @Order(3) @DisplayName("03. Should Similarity Search") public void shouldSimilaritySearch() { vectorStore = new DataApiClient() .namespace("vector_openai") .collectionRepository("philosophers", Quote.class); float[] embeddings = vectorize("We struggle all our life for nothing"); vectorStore.findVector(embeddings, null,3) .stream() .map(Document::getData) .map(Quote::getQuote) .forEach(System.out::println); } @Test @Order(4) @DisplayName("04. Should filter with meta data") public void shouldMetaDataFiltering() { new DataApiClient() .namespace("vector_openai") .collectionRepository("philosophers", Quote.class) .findVector( vectorize("We struggle all our life for nothing"), new Filter().where("philosopher").isEqualsTo("plato"), 2) .forEach(r -> System.out.println(r.getSimilarity() + " - " + r.getData().getQuote())); } @Test @Order(5) @DisplayName("05. Should filter with meta data tags") public void shouldMetaDataFilteringWithTags() { vectorStore = new DataApiClient() .namespace("vector_openai") .collectionRepository("philosophers", Quote.class); vectorStore.count(new Filter().where("tags").isAnArrayContaining("love")); } static ChatLanguageModel openaiGenerator = OpenAiChatModel.builder() .apiKey(System.getenv("OPENAI_API_KEY")) .modelName(OpenAiModelName.GPT_3_5_TURBO) .temperature(0.7) .timeout(Duration.ofSeconds(15)) .maxRetries(3) .logResponses(true) .logRequests(true) .build(); @Test @Order(6) @DisplayName("06. Should Generate new quotes") public void should_generate_new_quotes() { vectorStore = new DataApiClient() .namespace("vector_openai") .collectionRepository("philosophers", Quote.class); // === Params == String topic = "politics and virtue"; String author = "nietzsche"; int records = 4; // ==== RAG === List<String> ragQuotes = vectorStore .findVector( vectorize(topic), new Filter().where("philosopher").isEqualsTo(author),2) .stream() .map(r -> r.getData().getQuote()) .collect(Collectors.toList()); // === Completion === PromptTemplate promptTemplate = PromptTemplate.from( "Generate a single short philosophical quote on the given topic,\n" + "similar in spirit and form to the provided actual example quotes.\n" + "Do not exceed 20-30 words in your quote.\n" + "REFERENCE TOPIC: \n{{topic}}" + "\nACTUAL EXAMPLES:\n{{rag}}"); Map<String, Object> variables = new HashMap<>(); variables.put("topic", topic); variables.put("information", String.join(", ", ragQuotes)); Prompt prompt = promptTemplate.apply(variables); Response<AiMessage> aiMessage = openaiGenerator.generate(prompt.toUserMessage()); String answer = aiMessage.content().text(); System.out.println(answer); } // --- Utilities (loading CSV) --- private List<Quote> loadQuotesFromCsv(String filePath) { List<Quote> quotes = new ArrayList<>(); File csvFile = new File(VectorClientPhilosopherTest.class.getResource(filePath).getFile()); try (Scanner scanner = new Scanner(csvFile)) { while (scanner.hasNextLine()) { Quote q = mapCsvLine(scanner.nextLine()); if (q != null) quotes.add(q); } } catch (FileNotFoundException fex) { throw new IllegalArgumentException("file is not in the classpath", fex); } return quotes; } private Quote mapCsvLine(String line) { String[] parts = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)", -1); if (parts.length >= 3) { String author = parts[0]; String quote = parts[1].replaceAll("\"", ""); Set<String> tags = new HashSet<>(Arrays.asList(parts[2].split("\\;"))); return new Quote(author, quote, tags); } return null; } }
[ "dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder", "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((1750, 2031), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1750, 2009), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1750, 1976), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1750, 1944), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1750, 1898), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1750, 1834), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((5155, 5473), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5155, 5452), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5155, 5421), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5155, 5389), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5155, 5362), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5155, 5317), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5155, 5287), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((5155, 5233), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')]
package dev.langchain4j.model.vertexai; import com.google.cloud.vertexai.VertexAI; import com.google.cloud.vertexai.api.GenerationConfig; import com.google.cloud.vertexai.generativeai.GenerativeModel; import dev.langchain4j.agent.tool.JsonSchemaProperty; import dev.langchain4j.agent.tool.ToolExecutionRequest; import dev.langchain4j.agent.tool.ToolSpecification; import dev.langchain4j.data.message.*; import dev.langchain4j.model.StreamingResponseHandler; import dev.langchain4j.model.chat.StreamingChatLanguageModel; import dev.langchain4j.model.chat.TestStreamingResponseHandler; import dev.langchain4j.model.output.Response; import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.Base64; import java.util.List; import java.util.concurrent.CompletableFuture; import static dev.langchain4j.internal.Utils.readBytes; import static dev.langchain4j.model.output.FinishReason.LENGTH; import static dev.langchain4j.model.output.FinishReason.STOP; import static dev.langchain4j.model.vertexai.VertexAiGeminiChatModelIT.CAT_IMAGE_URL; import static dev.langchain4j.model.vertexai.VertexAiGeminiChatModelIT.DICE_IMAGE_URL; import static java.util.Arrays.asList; import static java.util.Collections.singletonList; import static java.util.concurrent.TimeUnit.SECONDS; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; class VertexAiGeminiStreamingChatModelIT { StreamingChatLanguageModel model = VertexAiGeminiStreamingChatModel.builder() .project(System.getenv("GCP_PROJECT_ID")) .location(System.getenv("GCP_LOCATION")) .modelName("gemini-pro") .build(); StreamingChatLanguageModel visionModel = VertexAiGeminiStreamingChatModel.builder() .project(System.getenv("GCP_PROJECT_ID")) .location(System.getenv("GCP_LOCATION")) .modelName("gemini-pro-vision") .build(); @Test void should_stream_answer() throws Exception { // given String userMessage = "What is the capital of Germany?"; // when CompletableFuture<String> futureAnswer = new CompletableFuture<>(); CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>(); model.generate(userMessage, new StreamingResponseHandler<AiMessage>() { private final StringBuilder answerBuilder = new StringBuilder(); @Override public void onNext(String token) { System.out.println("onNext: '" + token + "'"); answerBuilder.append(token); } @Override public void onComplete(Response<AiMessage> response) { System.out.println("onComplete: '" + response.content() + "'"); futureAnswer.complete(answerBuilder.toString()); futureResponse.complete(response); } @Override public void onError(Throwable error) { futureAnswer.completeExceptionally(error); futureResponse.completeExceptionally(error); } }); String answer = futureAnswer.get(30, SECONDS); Response<AiMessage> response = futureResponse.get(30, SECONDS); // then assertThat(answer).contains("Berlin"); assertThat(response.content().text()).isEqualTo(answer); assertThat(response.tokenUsage().inputTokenCount()).isEqualTo(7); assertThat(response.tokenUsage().outputTokenCount()).isGreaterThan(0); assertThat(response.tokenUsage().totalTokenCount()) .isEqualTo(response.tokenUsage().inputTokenCount() + response.tokenUsage().outputTokenCount()); assertThat(response.finishReason()).isEqualTo(STOP); } @Test void should_deny_system_message() { // given SystemMessage systemMessage = SystemMessage.from("Be polite"); UserMessage userMessage = UserMessage.from("Tell me a joke"); // when-then assertThatThrownBy(() -> model.generate(asList(systemMessage, userMessage), null)) .isExactlyInstanceOf(IllegalArgumentException.class) .hasMessage("SystemMessage is currently not supported by Gemini"); } @Test void should_respect_maxOutputTokens() throws Exception { // given StreamingChatLanguageModel model = VertexAiGeminiStreamingChatModel.builder() .project(System.getenv("GCP_PROJECT_ID")) .location(System.getenv("GCP_LOCATION")) .modelName("gemini-pro") .maxOutputTokens(1) .build(); String userMessage = "Tell me a joke"; // when CompletableFuture<String> futureAnswer = new CompletableFuture<>(); CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>(); model.generate(userMessage, new StreamingResponseHandler<AiMessage>() { private final StringBuilder answerBuilder = new StringBuilder(); @Override public void onNext(String token) { System.out.println("onNext: '" + token + "'"); answerBuilder.append(token); } @Override public void onComplete(Response<AiMessage> response) { System.out.println("onComplete: '" + response.content() + "'"); futureAnswer.complete(answerBuilder.toString()); futureResponse.complete(response); } @Override public void onError(Throwable error) { futureAnswer.completeExceptionally(error); futureResponse.completeExceptionally(error); } }); String answer = futureAnswer.get(30, SECONDS); Response<AiMessage> response = futureResponse.get(30, SECONDS); // then assertThat(answer).isNotBlank(); assertThat(response.content().text()).isEqualTo(answer); assertThat(response.tokenUsage().inputTokenCount()).isEqualTo(4); assertThat(response.tokenUsage().outputTokenCount()).isEqualTo(1); assertThat(response.tokenUsage().totalTokenCount()) .isEqualTo(response.tokenUsage().inputTokenCount() + response.tokenUsage().outputTokenCount()); assertThat(response.finishReason()).isEqualTo(LENGTH); } @Test void should_allow_custom_generativeModel_and_generationConfig() throws Exception { // given VertexAI vertexAi = new VertexAI(System.getenv("GCP_PROJECT_ID"), System.getenv("GCP_LOCATION")); GenerativeModel generativeModel = new GenerativeModel("gemini-pro", vertexAi); GenerationConfig generationConfig = GenerationConfig.getDefaultInstance(); StreamingChatLanguageModel model = new VertexAiGeminiStreamingChatModel(generativeModel, generationConfig); String userMessage = "What is the capital of Germany?"; // when CompletableFuture<Response<AiMessage>> futureResponse = new CompletableFuture<>(); model.generate(userMessage, new StreamingResponseHandler<AiMessage>() { @Override public void onNext(String token) { } @Override public void onComplete(Response<AiMessage> response) { futureResponse.complete(response); } @Override public void onError(Throwable error) { futureResponse.completeExceptionally(error); } }); Response<AiMessage> response = futureResponse.get(30, SECONDS); // then assertThat(response.content().text()).contains("Berlin"); } @Test void should_accept_text_and_image_from_public_url() { // given UserMessage userMessage = UserMessage.from( ImageContent.from(CAT_IMAGE_URL), TextContent.from("What do you see? Reply in one word.") ); // when TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>(); visionModel.generate(singletonList(userMessage), handler); Response<AiMessage> response = handler.get(); // then assertThat(response.content().text()).containsIgnoringCase("cat"); } @Test void should_accept_text_and_image_from_google_storage_url() { // given UserMessage userMessage = UserMessage.from( ImageContent.from("gs://langchain4j-test/cat.png"), TextContent.from("What do you see? Reply in one word.") ); // when TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>(); visionModel.generate(singletonList(userMessage), handler); Response<AiMessage> response = handler.get(); // then assertThat(response.content().text()).containsIgnoringCase("cat"); } @Test void should_accept_text_and_base64_image() { // given String base64Data = Base64.getEncoder().encodeToString(readBytes(CAT_IMAGE_URL)); UserMessage userMessage = UserMessage.from( ImageContent.from(base64Data, "image/png"), TextContent.from("What do you see? Reply in one word.") ); // when TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>(); visionModel.generate(singletonList(userMessage), handler); Response<AiMessage> response = handler.get(); // then assertThat(response.content().text()).containsIgnoringCase("cat"); } @Test void should_accept_text_and_multiple_images_from_public_urls() { // given UserMessage userMessage = UserMessage.from( ImageContent.from(CAT_IMAGE_URL), ImageContent.from(DICE_IMAGE_URL), TextContent.from("What do you see? Reply with one word per image.") ); // when TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>(); visionModel.generate(singletonList(userMessage), handler); Response<AiMessage> response = handler.get(); // then assertThat(response.content().text()) .containsIgnoringCase("cat") .containsIgnoringCase("dice"); } @Test void should_accept_text_and_multiple_images_from_google_storage_urls() { // given UserMessage userMessage = UserMessage.from( ImageContent.from("gs://langchain4j-test/cat.png"), ImageContent.from("gs://langchain4j-test/dice.png"), TextContent.from("What do you see? Reply with one word per image.") ); // when TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>(); visionModel.generate(singletonList(userMessage), handler); Response<AiMessage> response = handler.get(); // then assertThat(response.content().text()) .containsIgnoringCase("cat") .containsIgnoringCase("dice"); } @Test void should_accept_text_and_multiple_base64_images() { // given String catBase64Data = Base64.getEncoder().encodeToString(readBytes(CAT_IMAGE_URL)); String diceBase64Data = Base64.getEncoder().encodeToString(readBytes(DICE_IMAGE_URL)); UserMessage userMessage = UserMessage.from( ImageContent.from(catBase64Data, "image/png"), ImageContent.from(diceBase64Data, "image/png"), TextContent.from("What do you see? Reply with one word per image.") ); // when TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>(); visionModel.generate(singletonList(userMessage), handler); Response<AiMessage> response = handler.get(); // then assertThat(response.content().text()) .containsIgnoringCase("cat") .containsIgnoringCase("dice"); } @Test void should_accept_text_and_multiple_images_from_different_sources() { // given UserMessage userMessage = UserMessage.from( ImageContent.from(CAT_IMAGE_URL), ImageContent.from("gs://langchain4j-test/dog.jpg"), ImageContent.from(Base64.getEncoder().encodeToString(readBytes(DICE_IMAGE_URL)), "image/png"), TextContent.from("What do you see? Reply with one word per image.") ); // when TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>(); visionModel.generate(singletonList(userMessage), handler); Response<AiMessage> response = handler.get(); // then assertThat(response.content().text()) .containsIgnoringCase("cat") .containsIgnoringCase("dog") .containsIgnoringCase("dice"); } @Test void should_accept_function_call() { // given VertexAiGeminiStreamingChatModel model = VertexAiGeminiStreamingChatModel.builder() .project(System.getenv("GCP_PROJECT_ID")) .location(System.getenv("GCP_LOCATION")) .modelName("gemini-pro") .build(); ToolSpecification weatherToolSpec = ToolSpecification.builder() .name("getWeatherForecast") .description("Get the weather forecast for a location") .addParameter("location", JsonSchemaProperty.STRING, JsonSchemaProperty.description("the location to get the weather forecast for")) .build(); List<ChatMessage> allMessages = new ArrayList<>(); UserMessage weatherQuestion = UserMessage.from("What is the weather in Paris?"); System.out.println("Question: " + weatherQuestion.text()); allMessages.add(weatherQuestion); // when TestStreamingResponseHandler<AiMessage> handler = new TestStreamingResponseHandler<>(); model.generate(allMessages, weatherToolSpec, handler); Response<AiMessage> messageResponse = handler.get(); // then assertThat(messageResponse.content().hasToolExecutionRequests()).isTrue(); ToolExecutionRequest toolExecutionRequest = messageResponse.content().toolExecutionRequests().get(0); assertThat(toolExecutionRequest.arguments()).contains("Paris"); assertThat(toolExecutionRequest.name()).isEqualTo("getWeatherForecast"); allMessages.add(messageResponse.content()); // when (feeding the function return value back) ToolExecutionResultMessage toolExecResMsg = ToolExecutionResultMessage.from(toolExecutionRequest, "{\"location\":\"Paris\",\"forecast\":\"sunny\", \"temperature\": 20}"); allMessages.add(toolExecResMsg); handler = new TestStreamingResponseHandler<>(); model.generate(allMessages, handler); Response<AiMessage> weatherResponse = handler.get(); // then System.out.println("Answer: " + weatherResponse.content().text()); assertThat(weatherResponse.content().text()).containsIgnoringCase("sunny"); } }
[ "dev.langchain4j.agent.tool.ToolSpecification.builder" ]
[((9104, 9164), 'java.util.Base64.getEncoder'), ((11338, 11398), 'java.util.Base64.getEncoder'), ((11432, 11493), 'java.util.Base64.getEncoder'), ((12472, 12533), 'java.util.Base64.getEncoder'), ((13457, 13774), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((13457, 13753), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((13457, 13592), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((13457, 13524), 'dev.langchain4j.agent.tool.ToolSpecification.builder')]
package com.exoreaction.quadim; import static java.time.Duration.ofSeconds; import com.exoreaction.quadim.resource.util.SkillDefinitionHelper; import com.exoreaction.quadim.service.ApiKeys; import com.fasterxml.jackson.core.json.JsonReadFeature; import com.fasterxml.jackson.databind.DeserializationFeature; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.SerializationFeature; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.model.openai.OpenAiModelName; import dev.langchain4j.service.MemoryId; import dev.langchain4j.service.UserMessage; import java.util.Arrays; import org.junit.jupiter.api.Test; public class AiAssistedTranslationTest { public static final ObjectMapper mapper = new ObjectMapper() .configure(SerializationFeature.WRITE_DATES_AS_TIMESTAMPS, false) .configure(SerializationFeature.FAIL_ON_EMPTY_BEANS, false) .configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false) .configure(JsonReadFeature.ALLOW_UNESCAPED_CONTROL_CHARS.mappedFeature(), true) .enable(JsonReadFeature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER.mappedFeature()) .findAndRegisterModules(); SkillDefinitionHelper helper; @Test public void testAIAssistedTranslationFromEnglishToNorwegian() throws Exception { // PLan ChatLanguageModel model = OpenAiChatModel.builder() .apiKey(ApiKeys.MY_OPENAI_API_KEY) .modelName(OpenAiModelName.GPT_3_5_TURBO_16K) .timeout(ofSeconds(900)) .temperature(0.2) .build(); String initialProjectDescription = "Chief Developer, Technical Architect: Developed several core modules in PasientSky's platform, " + "including communication module against Norwegian public health authorities, topology module for clinical diagnosis " + "(ICPC2, CDC10 and SNOWMED), product module (medicines and prescription goods) m.m. Technical architect, Quality assurer. "; int n = 343; try { String res0 = model.generate( "Translate " + initialProjectDescription + " from English to Norwegian"); System.out.println(n + " Translated descriptions:" + n++ + "\n\n" + res0 + "\n\n"); } catch (Exception e) { System.out.println("Exception handling - Stacktrace:" + Arrays.toString(e.getStackTrace())); } } @Test public void testAIAssistedTranslationFromEnglishToSpanish() throws Exception { // PLan ChatLanguageModel model = OpenAiChatModel.builder() .apiKey(ApiKeys.MY_OPENAI_API_KEY) .modelName(OpenAiModelName.GPT_3_5_TURBO) .timeout(ofSeconds(20)) .temperature(0.2) .maxTokens(1000) .build(); String initialProjectDescription = "Chief Developer, Technical Architect: Developed several core modules in PasientSky's platform, " + "including communication module against Norwegian public health authorities, topology module for clinical diagnosis " + "(ICPC2, CDC10 and SNOWMED), product module (medicines and prescription goods) m.m. Technical architect, Quality assurer. "; int n = 343; try { String res0 = model.generate( "Translate " + initialProjectDescription + " from English to Spanish"); System.out.println(n + " Translated descriptions:" + n++ + "\n\n" + res0 + "\n\n"); } catch (Exception e) { System.out.println("Exception handling - Stacktrace:" + Arrays.toString(e.getStackTrace())); } } interface Assistant { String chat(@MemoryId int memoryId, @UserMessage String userMessage); } }
[ "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((1091, 1152), 'com.fasterxml.jackson.core.json.JsonReadFeature.ALLOW_UNESCAPED_CONTROL_CHARS.mappedFeature'), ((1180, 1250), 'com.fasterxml.jackson.core.json.JsonReadFeature.ALLOW_BACKSLASH_ESCAPING_ANY_CHARACTER.mappedFeature'), ((1475, 1693), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1475, 1672), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1475, 1642), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1475, 1605), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1475, 1547), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2661, 2903), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2661, 2882), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2661, 2853), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2661, 2823), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2661, 2787), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((2661, 2733), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')]
package dev.langchain4j.data.message; import dev.langchain4j.agent.tool.ToolExecutionRequest; import org.junit.jupiter.api.Test; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.Arguments; import org.junit.jupiter.params.provider.MethodSource; import java.util.List; import java.util.stream.Stream; import static dev.langchain4j.data.message.ChatMessageDeserializer.messageFromJson; import static dev.langchain4j.data.message.ChatMessageDeserializer.messagesFromJson; import static dev.langchain4j.data.message.ChatMessageSerializer.messageToJson; import static dev.langchain4j.data.message.UserMessage.userMessage; import static java.util.Collections.emptyList; import static java.util.Collections.singletonList; import static org.assertj.core.api.Assertions.assertThat; class ChatMessageSerializerTest { @ParameterizedTest @MethodSource void should_serialize_and_deserialize_chat_message(ChatMessage message, String expectedJson) { String json = messageToJson(message); assertThat(json).isEqualToIgnoringWhitespace(expectedJson); ChatMessage deserializedMessage = messageFromJson(json); assertThat(deserializedMessage).isEqualTo(message); } static Stream<Arguments> should_serialize_and_deserialize_chat_message() { return Stream.of( Arguments.of( SystemMessage.from("hello"), "{\"text\":\"hello\",\"type\":\"SYSTEM\"}" ), Arguments.of( UserMessage.from("hello"), "{\"contents\":[{\"text\":\"hello\",\"type\":\"TEXT\"}],\"type\":\"USER\"}" ), Arguments.of( UserMessage.from("Klaus", "hello"), "{\"name\":\"Klaus\",\"contents\":[{\"text\":\"hello\",\"type\":\"TEXT\"}],\"type\":\"USER\"}" ), Arguments.of( UserMessage.from(ImageContent.from("http://image.url")), "{\"contents\":[{\"image\":{\"url\":\"http://image.url\"},\"detailLevel\":\"LOW\",\"type\":\"IMAGE\"}],\"type\":\"USER\"}" ), Arguments.of( UserMessage.from(ImageContent.from("aGVsbG8=", "image/png")), "{\"contents\":[{\"image\":{\"base64Data\":\"aGVsbG8\\u003d\",\"mimeType\":\"image/png\"},\"detailLevel\":\"LOW\",\"type\":\"IMAGE\"}],\"type\":\"USER\"}" ), Arguments.of( AiMessage.from("hello"), "{\"text\":\"hello\",\"type\":\"AI\"}" ), Arguments.of( AiMessage.from(ToolExecutionRequest.builder() .name("weather") .arguments("{\"city\": \"Munich\"}") .build()), "{\"toolExecutionRequests\":[{\"name\":\"weather\",\"arguments\":\"{\\\"city\\\": \\\"Munich\\\"}\"}],\"type\":\"AI\"}" ), Arguments.of( ToolExecutionResultMessage.from("12345", "weather", "sunny"), "{\"id\":\"12345\",\"toolName\":\"weather\",\"text\":\"sunny\",\"type\":\"TOOL_EXECUTION_RESULT\"}" ) ); } @Test void should_deserialize_user_message_in_old_schema() { String json = "{\"text\":\"hello\",\"type\":\"USER\"}"; ChatMessage deserializedMessage = messageFromJson(json); assertThat(deserializedMessage).isEqualTo(UserMessage.from("hello")); } @Test void should_serialize_and_deserialize_empty_list() { List<ChatMessage> messages = emptyList(); String json = ChatMessageSerializer.messagesToJson(messages); List<ChatMessage> deserializedMessages = messagesFromJson(json); assertThat(deserializedMessages).isEmpty(); } @Test void should_deserialize_null_as_empty_list() { assertThat(messagesFromJson(null)).isEmpty(); } @Test void should_serialize_and_deserialize_list_with_one_message() { List<ChatMessage> messages = singletonList(userMessage("hello")); String json = ChatMessageSerializer.messagesToJson(messages); assertThat(json).isEqualTo("[{\"contents\":[{\"text\":\"hello\",\"type\":\"TEXT\"}],\"type\":\"USER\"}]"); List<ChatMessage> deserializedMessages = messagesFromJson(json); assertThat(deserializedMessages).isEqualTo(messages); } @Test void should_serialize_and_deserialize_list_with_one_message_in_old_schema() { String json = "[{\"text\":\"hello\",\"type\":\"USER\"}]"; List<ChatMessage> deserializedMessages = messagesFromJson(json); assertThat(deserializedMessages).containsExactly(UserMessage.from("hello")); } }
[ "dev.langchain4j.agent.tool.ToolExecutionRequest.builder" ]
[((2765, 2954), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2765, 2913), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder'), ((2765, 2844), 'dev.langchain4j.agent.tool.ToolExecutionRequest.builder')]
package com.dtsx.astra.sdk.vector; import com.dtsx.astra.sdk.AstraDB; import com.dtsx.astra.sdk.AstraDBAdmin; import com.dtsx.astra.sdk.AstraDBRepository; import com.dtsx.astra.sdk.utils.AstraRc; import dev.langchain4j.model.openai.OpenAiEmbeddingModel; import dev.langchain4j.model.openai.OpenAiModelName; import io.stargate.sdk.data.domain.odm.Document; import lombok.AllArgsConstructor; import lombok.Data; import lombok.NoArgsConstructor; import lombok.extern.slf4j.Slf4j; import org.junit.jupiter.api.BeforeAll; import org.junit.jupiter.api.DisplayName; import org.junit.jupiter.api.Order; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.condition.EnabledIfEnvironmentVariable; import java.io.File; import java.io.FileNotFoundException; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Scanner; import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import static org.junit.jupiter.api.Assertions.assertTrue; @Slf4j class VectorClientPhilosopherTest { static final String DBNAME_VECTOR_CLIENT = "test_java_astra_db_client"; static final String VECTOR_STORE_NAME = "demo_philosophers"; static final String DATASET = "/philosopher-quotes.csv"; @Data @AllArgsConstructor @NoArgsConstructor static class Quote { private String philosopher; private String quote; private Set<String> tags; } static AstraDBRepository<Quote> quoteRepository; static OpenAiEmbeddingModel openaiVectorizer = OpenAiEmbeddingModel.builder() .apiKey(System.getenv("OPENAI_API_KEY")) .modelName(OpenAiModelName.TEXT_EMBEDDING_ADA_002) .timeout(Duration.ofSeconds(15)) .logRequests(true) .logResponses(true) .build(); static float[] vectorize(String inputText) { return openaiVectorizer.embed(inputText).content().vector(); } @BeforeAll public static void setup() { if (System.getenv(AstraRc.ASTRA_DB_APPLICATION_TOKEN) == null) { throw new IllegalStateException("Please setup 'ASTRA_DB_APPLICATION_TOKEN' env variable"); } new AstraDBAdmin().createDatabase(DBNAME_VECTOR_CLIENT); log.info("db is created and active"); } @Test @Order(1) @DisplayName("01. Import Data") @EnabledIfEnvironmentVariable(named = "ASTRA_DB_APPLICATION_TOKEN", matches = "Astra.*") public void shouldIngestCsv() { // Init the Store AstraDB dbClient = new AstraDBAdmin().getDatabase(DBNAME_VECTOR_CLIENT); dbClient.deleteCollection(VECTOR_STORE_NAME); quoteRepository = dbClient.createCollection(VECTOR_STORE_NAME, 1536, Quote.class); log.info("store {} is created ", VECTOR_STORE_NAME); assertTrue(dbClient.isCollectionExists(VECTOR_STORE_NAME)); // Populate the Store AtomicInteger rowId = new AtomicInteger(); loadQuotesFromCsv(DATASET).forEach(quote -> { log.info("Inserting {}: {}", rowId.get(), quote.getQuote()); Document<Quote> quoteDoc = new Document<Quote>( String.valueOf(rowId.incrementAndGet()), quote, vectorize(quote.getQuote())); quoteRepository.insert(quoteDoc); }); } @Test @Order(2) @DisplayName("02. Should Similarity Search") public void shouldSimilaritySearch() { quoteRepository = new AstraDBAdmin() .getDatabase(DBNAME_VECTOR_CLIENT) .getCollection(VECTOR_STORE_NAME, Quote.class); float[] embeddings = vectorize("We struggle all our life for nothing"); quoteRepository.findVector(embeddings,3) .stream() .map(Document::getData) .map(Quote::getQuote) .forEach(System.out::println); } // --- Utilities (loading CSV) --- private List<Quote> loadQuotesFromCsv(String filePath) { List<Quote> quotes = new ArrayList<>(); File csvFile = new File(VectorClientPhilosopherTest.class.getResource(filePath).getFile()); try (Scanner scanner = new Scanner(csvFile)) { while (scanner.hasNextLine()) { Quote q = mapCsvLine(scanner.nextLine()); if (q != null) quotes.add(q); } } catch (FileNotFoundException fex) { throw new IllegalArgumentException("file is not in the classpath", fex); } return quotes; } private Quote mapCsvLine(String line) { String[] parts = line.split(",(?=(?:[^\"]*\"[^\"]*\")*[^\"]*$)", -1); if (parts.length >= 3) { String author = parts[0]; String quote = parts[1].replaceAll("\"", ""); Set<String> tags = new HashSet<>(Arrays.asList(parts[2].split("\\;"))); return new Quote(author, quote, tags); } return null; } }
[ "dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder" ]
[((1581, 1856), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1581, 1835), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1581, 1803), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1581, 1772), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1581, 1727), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder'), ((1581, 1664), 'dev.langchain4j.model.openai.OpenAiEmbeddingModel.builder')]
package dev.langchain4j.model.image; import dev.langchain4j.data.image.Image; import dev.langchain4j.model.output.Response; import org.assertj.core.api.WithAssertions; import org.junit.jupiter.api.Test; import java.net.URI; class ImageModelTest implements WithAssertions { public static class FixedImageModel implements ImageModel { private final Image image; public FixedImageModel(Image image) { this.image = image; } @Override public Response<Image> generate(String prompt) { return Response.from(image); } } public static final Image PLACEHOLDER_IMAGE; static { try { PLACEHOLDER_IMAGE = Image.builder().url(new URI("https://foo.bar")).build(); } catch (Exception e) { throw new RuntimeException(e); } } @Test public void test_not_supported() { ImageModel model = new FixedImageModel(PLACEHOLDER_IMAGE); assertThatThrownBy(() -> model.generate("prompt", 1)) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Operation is not supported"); assertThatThrownBy(() -> model.edit(null, "prompt")) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Operation is not supported"); assertThatThrownBy(() -> model.edit(null, null, "prompt")) .isInstanceOf(IllegalArgumentException.class) .hasMessage("Operation is not supported"); } @Test public void test_trivial() { ImageModel model = new FixedImageModel(PLACEHOLDER_IMAGE); Response<Image> response = model.generate("prompt"); assertThat(response).isNotNull(); assertThat(response.content()).isEqualTo(PLACEHOLDER_IMAGE); } }
[ "dev.langchain4j.data.image.Image.builder" ]
[((719, 774), 'dev.langchain4j.data.image.Image.builder'), ((719, 766), 'dev.langchain4j.data.image.Image.builder')]
package org.acme.example; import java.util.ArrayList; import java.util.List; import java.util.function.Supplier; import jakarta.inject.Singleton; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.model.output.Response; import dev.langchain4j.model.scoring.ScoringModel; import dev.langchain4j.rag.DefaultRetrievalAugmentor; import dev.langchain4j.rag.RetrievalAugmentor; import dev.langchain4j.rag.content.Content; import dev.langchain4j.rag.content.aggregator.ReRankingContentAggregator; import dev.langchain4j.rag.content.retriever.ContentRetriever; import dev.langchain4j.rag.query.Query; import io.quarkiverse.langchain4j.RegisterAiService; @RegisterAiService(retrievalAugmentor = AiServiceWithReranking.AugmentorWithReranking.class) public interface AiServiceWithReranking { String chat(String message); @Singleton class AugmentorWithReranking implements Supplier<RetrievalAugmentor> { ContentRetriever retriever = new ContentRetriever() { @Override public List<Content> retrieve(Query query) { if (query.text().equals("What is the fastest car?")) { return List.of(new Content("Ferrari goes 350"), new Content("Bugatti goes 450")); } else { throw new UnsupportedOperationException(); } } }; ScoringModel scoringModel = new ScoringModel() { @Override public Response<List<Double>> scoreAll(List<TextSegment> documents, String query) { List<Double> scores = new ArrayList<>(); for (TextSegment document : documents) { if (document.text().equals("Ferrari goes 350")) { scores.add(0.5); } else if (document.text().equals("Bugatti goes 450")) { scores.add(0.9); } else { scores.add(0.0); } } return new Response<>(scores); } }; @Override public RetrievalAugmentor get() { return DefaultRetrievalAugmentor.builder() .contentRetriever(retriever) .contentAggregator(new ReRankingContentAggregator(scoringModel, ReRankingContentAggregator.DEFAULT_QUERY_SELECTOR, 0.8)) .build(); } } }
[ "dev.langchain4j.rag.DefaultRetrievalAugmentor.builder" ]
[((2186, 2496), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((2186, 2467), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder'), ((2186, 2270), 'dev.langchain4j.rag.DefaultRetrievalAugmentor.builder')]
package org.acme.example.openai.aiservices; import java.util.Map; import java.util.concurrent.ConcurrentHashMap; import jakarta.annotation.PreDestroy; import jakarta.enterprise.context.RequestScoped; import jakarta.inject.Singleton; import jakarta.ws.rs.GET; import jakarta.ws.rs.Path; import org.jboss.resteasy.reactive.RestQuery; import dev.langchain4j.agent.tool.Tool; import dev.langchain4j.memory.ChatMemory; import dev.langchain4j.memory.chat.ChatMemoryProvider; import dev.langchain4j.memory.chat.MessageWindowChatMemory; import io.quarkiverse.langchain4j.RegisterAiService; @Path("assistant-with-tool") public class AssistantWithToolsResource { private final Assistant assistant; public AssistantWithToolsResource(Assistant assistant) { this.assistant = assistant; } @GET public String get(@RestQuery String message) { return assistant.chat(message); } @RegisterAiService(tools = Calculator.class, chatMemoryProviderSupplier = RegisterAiService.BeanChatMemoryProviderSupplier.class) public interface Assistant { String chat(String userMessage); } @Singleton public static class Calculator { @Tool("Calculates the length of a string") int stringLength(String s) { return s.length(); } @Tool("Calculates the sum of two numbers") int add(int a, int b) { return a + b; } @Tool("Calculates the square root of a number") double sqrt(int x) { return Math.sqrt(x); } } @RequestScoped public static class ChatMemoryBean implements ChatMemoryProvider { private final Map<Object, ChatMemory> memories = new ConcurrentHashMap<>(); @Override public ChatMemory get(Object memoryId) { return memories.computeIfAbsent(memoryId, id -> MessageWindowChatMemory.builder() .maxMessages(20) .id(memoryId) .build()); } @PreDestroy public void close() { memories.clear(); } } }
[ "dev.langchain4j.memory.chat.MessageWindowChatMemory.builder" ]
[((1867, 2000), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((1867, 1971), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder'), ((1867, 1937), 'dev.langchain4j.memory.chat.MessageWindowChatMemory.builder')]
package dev.langchain4j.model.chat; import dev.langchain4j.agent.tool.ToolSpecification; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.data.message.ChatMessage; import dev.langchain4j.data.message.UserMessage; import dev.langchain4j.model.StreamingResponseHandler; import dev.langchain4j.model.output.Response; import org.assertj.core.api.WithAssertions; import org.junit.jupiter.api.Test; import java.util.ArrayList; import java.util.List; import java.util.Locale; class StreamingChatLanguageModelTest implements WithAssertions { public static class StreamingUpperCaseEchoModel implements StreamingChatLanguageModel { @Override public void generate(List<ChatMessage> messages, StreamingResponseHandler<AiMessage> handler) { ChatMessage lastMessage = messages.get(messages.size() - 1); Response<AiMessage> response = new Response<>(new AiMessage(lastMessage.text().toUpperCase(Locale.ROOT))); handler.onComplete(response); } } public static final class CollectorResponseHandler<T> implements StreamingResponseHandler<T> { private final List<Response<T>> responses = new ArrayList<>(); public List<Response<T>> responses() { return responses; } @Override public void onNext(String token) {} @Override public void onError(Throwable error) {} @Override public void onComplete(Response<T> response) { responses.add(response); } } @Test public void test_not_supported() { StreamingUpperCaseEchoModel model = new StreamingUpperCaseEchoModel(); CollectorResponseHandler<AiMessage> handler = new CollectorResponseHandler<>(); List<ChatMessage> messages = new ArrayList<>(); assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> model.generate(messages, new ArrayList<>(), handler)) .withMessageContaining("Tools are currently not supported by this model"); assertThatExceptionOfType(IllegalArgumentException.class) .isThrownBy(() -> model.generate(messages, ToolSpecification.builder().name("foo").build(), handler)) .withMessageContaining("Tools are currently not supported by this model"); } @Test public void test_generate() { StreamingChatLanguageModel model = new StreamingUpperCaseEchoModel(); { List<ChatMessage> messages = new ArrayList<>(); messages.add(new UserMessage("Hello")); messages.add(new AiMessage("Hi")); messages.add(new UserMessage("How are you?")); CollectorResponseHandler<AiMessage> handler = new CollectorResponseHandler<>(); model.generate(messages, handler); Response<AiMessage> response = handler.responses().get(0); assertThat(response.content().text()).isEqualTo("HOW ARE YOU?"); assertThat(response.tokenUsage()).isNull(); assertThat(response.finishReason()).isNull(); } { CollectorResponseHandler<AiMessage> handler = new CollectorResponseHandler<>(); model.generate("How are you?", handler); Response<AiMessage> response = handler.responses().get(0); assertThat(response.content().text()).isEqualTo("HOW ARE YOU?"); assertThat(response.tokenUsage()).isNull(); assertThat(response.finishReason()).isNull(); } } }
[ "dev.langchain4j.agent.tool.ToolSpecification.builder" ]
[((2183, 2230), 'dev.langchain4j.agent.tool.ToolSpecification.builder'), ((2183, 2222), 'dev.langchain4j.agent.tool.ToolSpecification.builder')]
package dev.langchain4j.data.document.splitter; import dev.langchain4j.data.document.Document; import dev.langchain4j.data.document.DocumentSplitter; import dev.langchain4j.data.document.Metadata; import dev.langchain4j.data.segment.TextSegment; import dev.langchain4j.model.Tokenizer; import dev.langchain4j.model.openai.OpenAiTokenizer; import org.junit.jupiter.api.Test; import java.util.List; import static dev.langchain4j.data.document.Metadata.metadata; import static dev.langchain4j.data.segment.TextSegment.textSegment; import static dev.langchain4j.model.openai.OpenAiModelName.GPT_3_5_TURBO; import static java.lang.String.format; import static org.assertj.core.api.Assertions.assertThat; class DocumentByParagraphSplitterTest { @Test void should_split_into_segments_with_one_paragraph_per_segment() { int maxSegmentSize = 30; String firstParagraph = "This is a first paragraph."; assertThat(firstParagraph).hasSizeLessThan(maxSegmentSize); String secondParagraph = "This is a second paragraph."; assertThat(secondParagraph).hasSizeLessThan(maxSegmentSize); assertThat(firstParagraph + "\n \n" + secondParagraph).hasSizeGreaterThan(maxSegmentSize); Document document = Document.from( format(" %s \n \n %s ", firstParagraph, secondParagraph), metadata("document", "0") ); DocumentSplitter splitter = new DocumentByParagraphSplitter(maxSegmentSize, 0); List<TextSegment> segments = splitter.split(document); segments.forEach(segment -> assertThat(segment.text().length()).isLessThanOrEqualTo(maxSegmentSize)); assertThat(segments).containsExactly( textSegment(firstParagraph, metadata("index", "0").add("document", "0")), textSegment(secondParagraph, metadata("index", "1").add("document", "0")) ); } @Test void should_split_into_segments_with_multiple_paragraphs_per_segment() { int maxSegmentSize = 60; String firstParagraph = "This is a first paragraph."; String secondParagraph = "This is a second paragraph."; assertThat(firstParagraph + secondParagraph).hasSizeLessThan(maxSegmentSize); String thirdParagraph = "This is a third paragraph."; assertThat(thirdParagraph).hasSizeLessThan(maxSegmentSize); assertThat(firstParagraph + secondParagraph + thirdParagraph) .hasSizeGreaterThan(maxSegmentSize); Document document = Document.from( format(" %s \n \n %s \n \n %s ", firstParagraph, secondParagraph, thirdParagraph), metadata("document", "0") ); DocumentSplitter splitter = new DocumentByParagraphSplitter(maxSegmentSize, 0); List<TextSegment> segments = splitter.split(document); segments.forEach(segment -> assertThat(segment.text().length()).isLessThanOrEqualTo(maxSegmentSize)); assertThat(segments).containsExactly( textSegment(firstParagraph + "\n\n" + secondParagraph, metadata("index", "0").add("document", "0")), textSegment(thirdParagraph, metadata("index", "1").add("document", "0")) ); } @Test void should_split_paragraph_into_sentences_if_it_does_not_fit_into_segment() { int maxSegmentSize = 50; String firstParagraph = "This is a first paragraph."; assertThat(firstParagraph).hasSizeLessThan(maxSegmentSize); String firstSentenceOfSecondParagraph = "This is a fist sentence of a second paragraph."; assertThat(firstSentenceOfSecondParagraph).hasSizeLessThan(maxSegmentSize); String secondSentenceOfSecondParagraph = "This is a second sentence of a second paragraph."; assertThat(secondSentenceOfSecondParagraph).hasSizeLessThan(maxSegmentSize); String secondParagraph = firstSentenceOfSecondParagraph + " " + secondSentenceOfSecondParagraph; assertThat(secondParagraph).hasSizeGreaterThan(maxSegmentSize); String thirdParagraph = "This is a third paragraph."; assertThat(thirdParagraph).hasSizeLessThan(maxSegmentSize); Document document = Document.from( format(" %s \n \n %s \n \n %s ", firstParagraph, secondParagraph, thirdParagraph), metadata("document", "0") ); DocumentSplitter splitter = new DocumentByParagraphSplitter(maxSegmentSize, 0); List<TextSegment> segments = splitter.split(document); segments.forEach(segment -> assertThat(segment.text().length()).isLessThanOrEqualTo(maxSegmentSize)); assertThat(segments).containsExactly( textSegment(firstParagraph, metadata("index", "0").add("document", "0")), textSegment(firstSentenceOfSecondParagraph, metadata("index", "1").add("document", "0")), textSegment(secondSentenceOfSecondParagraph, metadata("index", "2").add("document", "0")), textSegment(thirdParagraph, metadata("index", "3").add("document", "0")) ); } @Test void should_split_sample_text_containing_multiple_paragraphs() { int maxSegmentSize = 65; Tokenizer tokenizer = new OpenAiTokenizer(GPT_3_5_TURBO); String p1 = "In a small town nestled between two vast mountains, there was a shop unlike any other. " + "A unique haven. " + "Visitors would often comment on its peculiar charm, always slightly different from what they " + "remembered on their previous visits. " + "The store stood as a testament to the passage of time and the ever-changing landscape of tales."; assertThat(tokenizer.estimateTokenCountInText(p1)).isEqualTo(62); String p2p1 = "Upon entering, the first thing to strike you was the enormity of it all. " + "Every inch of space was occupied with books. " + "Some stood tall and regal on the highest shelves, looking as if they had witnessed epochs come and go. " + "They were leather-bound, with pages yellowed by age."; assertThat(tokenizer.estimateTokenCountInText(p2p1)).isEqualTo(60); String p2p2 = "Others, smaller and brightly adorned, were reminiscent of summer days and childhood laughter. " + "But these physical objects were mere vessels. " + "It was the stories inside that held power."; assertThat(tokenizer.estimateTokenCountInText(p2p2)).isEqualTo(33); String p3 = "Mrs. Jenkins ran the shop. " + "A mystery in her own right. " + "Her silver hair cascaded like a waterfall, and her eyes seemed to see more than most. " + "With just a glance, she'd find the perfect story for you."; assertThat(tokenizer.estimateTokenCountInText(p3)).isEqualTo(47); String p4p1 = "One wet afternoon, Eli entered. " + "He was just a boy, lost in the vastness of the store. " + "Between the aisles, his small fingers danced on the spines of books, feeling the heartbeat of " + "countless tales. " + "Then, a simple brown-covered book whispered to him."; assertThat(tokenizer.estimateTokenCountInText(p4p1)).isEqualTo(56); String p4p2 = "Without grandeur or pretense, it beckoned. " + "And he listened."; assertThat(tokenizer.estimateTokenCountInText(p4p2)).isEqualTo(15); String p5 = "He read. " + "And read. " + "The world around him melted."; assertThat(tokenizer.estimateTokenCountInText(p5)).isEqualTo(12); String p6 = "When Mrs. Jenkins approached, night had fallen. " + "She gently remarked, \"Books have a way of finding their reader.\" " + "Eli simply nodded, understanding the profound truth in her words."; assertThat(tokenizer.estimateTokenCountInText(p6)).isEqualTo(36); String p7 = "Some places and stories remain etched in our souls, offering lessons and moments of sheer wonder. " + "They defy definition."; assertThat(tokenizer.estimateTokenCountInText(p7)).isEqualTo(23); Document document = Document.from( format("%s\n\n%s %s\n\n%s\n\n%s %s\n\n%s\n\n%s\n\n%s", p1, p2p1, p2p2, p3, p4p1, p4p2, p5, p6, p7), metadata("document", "0") ); DocumentSplitter splitter = new DocumentByParagraphSplitter(maxSegmentSize, 0, tokenizer); List<TextSegment> segments = splitter.split(document); segments.forEach(segment -> assertThat(tokenizer.estimateTokenCountInText(segment.text())).isLessThanOrEqualTo(maxSegmentSize)); assertThat(segments).containsExactly( textSegment(p1, metadata("index", "0").add("document", "0")), textSegment(p2p1, metadata("index", "1").add("document", "0")), textSegment(p2p2, metadata("index", "2").add("document", "0")), textSegment(p3, metadata("index", "3").add("document", "0")), textSegment(p4p1, metadata("index", "4").add("document", "0")), textSegment(p4p2, metadata("index", "5").add("document", "0")), textSegment(p5 + "\n\n" + p6, metadata("index", "6").add("document", "0")), textSegment(p7, metadata("index", "7").add("document", "0")) ); } @Test void should_split_sample_text_containing_multiple_paragraphs_with_overlap() { int maxSegmentSize = 65; int maxOverlapSize = 15; Tokenizer tokenizer = new OpenAiTokenizer(GPT_3_5_TURBO); String s1 = "In a small town nestled between two vast mountains, there was a shop unlike any other."; String s2 = "A unique haven."; String s3 = "Visitors would often comment on its peculiar charm, always slightly different from what they remembered on their previous visits."; String s4 = "The store stood as a testament to the passage of time and the ever-changing landscape of tales."; String s5 = "Upon entering, the first thing to strike you was the enormity of it all."; String s6 = "Every inch of space was occupied with books."; String s7 = "Some stood tall and regal on the highest shelves, looking as if they had witnessed epochs come and go."; String s8 = "They were leather-bound, with pages yellowed by age."; String s9 = "Others, smaller and brightly adorned, were reminiscent of summer days and childhood laughter."; String s10 = "But these physical objects were mere vessels."; String s11 = "It was the stories inside that held power."; String s12 = "Mrs. Jenkins ran the shop."; String s13 = "A mystery in her own right."; String s14 = "Her silver hair cascaded like a waterfall, and her eyes seemed to see more than most."; String s15 = "With just a glance, she'd find the perfect story for you."; String s16 = "One wet afternoon, Eli entered."; String s17 = "He was just a boy, lost in the vastness of the store."; String s18 = "Between the aisles, his small fingers danced on the spines of books, feeling the heartbeat of countless tales."; String s19 = "Then, a simple brown-covered book whispered to him."; String s20 = "Without grandeur or pretense, it beckoned."; String s21 = "And he listened."; String s22 = "He read."; String s23 = "And read."; String s24 = "The world around him melted."; String s25 = "When Mrs. Jenkins approached, night had fallen."; String s26 = "She gently remarked, \"Books have a way of finding their reader.\""; String s27 = "Eli simply nodded, understanding the profound truth in her words."; String s28 = "Some places and stories remain etched in our souls, offering lessons and moments of sheer wonder."; String s29 = "They defy definition."; Document document = Document.from( format("%s %s %s %s\n\n%s %s %s %s %s %s %s\n\n%s %s %s %s\n\n%s %s %s %s %s %s %s %s %s\n\n%s %s %s %s %s", s1, s2, s3, s4, s5, s6, s7, s8, s9, s10, s11, s12, s13, s14, s15, s16, s17, s18, s19, s20, s21, s22, s23, s24, s25, s26, s27, s28, s29 ), metadata("document", "0") ); DocumentSplitter splitter = new DocumentByParagraphSplitter(maxSegmentSize, maxOverlapSize, tokenizer); List<TextSegment> segments = splitter.split(document); segments.forEach(segment -> assertThat(tokenizer.estimateTokenCountInText(segment.text())).isLessThanOrEqualTo(maxSegmentSize)); assertThat(segments).containsExactly( textSegment(format("%s %s %s %s", s1, s2, s3, s4), metadata("index", "0").add("document", "0")), textSegment(format("%s %s %s %s", s5, s6, s7, s8), metadata("index", "1").add("document", "0")), textSegment(format("%s %s %s %s", s8, s9, s10, s11), metadata("index", "2").add("document", "0")), textSegment(format("%s\n\n%s %s %s %s", s11, s12, s13, s14, s15), metadata("index", "3").add("document", "0")), textSegment(format("%s %s %s %s", s15, s16, s17, s18), metadata("index", "4").add("document", "0")), textSegment(format("%s %s %s %s %s %s", s19, s20, s21, s22, s23, s24), metadata("index", "5").add("document", "0")), textSegment(format("%s %s %s %s %s %s", s22, s23, s24, s25, s26, s27), metadata("index", "6").add("document", "0")), textSegment(format("%s %s %s", s27, s28, s29), metadata("index", "7").add("document", "0")) ); } @Test void should_split_sample_text_without_paragraphs() { int maxSegmentSize = 100; Tokenizer tokenizer = new OpenAiTokenizer(GPT_3_5_TURBO); String segment1 = "In a small town nestled between two vast mountains, there was a shop unlike any other. " + "A unique haven. " + "Visitors would often comment on its peculiar charm, always slightly different from what they " + "remembered on their previous visits. " + "The store stood as a testament to the passage of time and the ever-changing landscape of tales. " + "Upon entering, the first thing to strike you was the enormity of it all. " + "Every inch of space was occupied with books."; String segment2 = "Some stood tall and regal on the highest shelves, " + "looking as if they had witnessed epochs come and go. " + "They were leather-bound, with pages yellowed by age. " + "Others, smaller and brightly adorned, were reminiscent of summer days and childhood laughter. " + "But these physical objects were mere vessels. " + "It was the stories inside that held power. " + "Mrs. Jenkins ran the shop. " + "A mystery in her own right."; String segment3 = "Her silver hair cascaded like a waterfall, and her eyes seemed to see more than most. " + "With just a glance, she'd find the perfect story for you. " + "One wet afternoon, Eli entered. " + "He was just a boy, lost in the vastness of the store. " + "Between the aisles, his small fingers danced on the spines of books, feeling the heartbeat of " + "countless tales. " + "Then, a simple brown-covered book whispered to him."; String segment4 = "Without grandeur or pretense, it beckoned. " + "And he listened. " + "He read. " + "And read. " + "The world around him melted. " + "When Mrs. Jenkins approached, night had fallen. " + "She gently remarked, \"Books have a way of finding their reader.\" " + "Eli simply nodded, understanding the profound truth in her words. " + "Some places and stories remain etched in our souls, offering lessons and moments of sheer wonder. " + "They defy definition."; Document document = Document.from( format("%s %s %s %s", segment1, segment2, segment3, segment4), metadata("document", "0") ); DocumentSplitter splitter = new DocumentByParagraphSplitter(maxSegmentSize, 0, tokenizer); List<TextSegment> segments = splitter.split(document); segments.forEach(segment -> assertThat(tokenizer.estimateTokenCountInText(segment.text())).isLessThanOrEqualTo(maxSegmentSize)); assertThat(segments).containsExactly( textSegment(segment1, metadata("index", "0").add("document", "0")), textSegment(segment2, metadata("index", "1").add("document", "0")), textSegment(segment3, metadata("index", "2").add("document", "0")), textSegment(segment4, metadata("index", "3").add("document", "0")) ); } @Test void should_split_sample_text_without_paragraphs_with_small_overlap() { // given int maxSegmentSize = 100; int maxOverlapSize = 25; Tokenizer tokenizer = new OpenAiTokenizer(GPT_3_5_TURBO); DocumentSplitter splitter = new DocumentByParagraphSplitter(maxSegmentSize, maxOverlapSize, tokenizer); Document document = Document.from(sentences(0, 28), Metadata.from("document", "0")); // when List<TextSegment> segments = splitter.split(document); // then segments.forEach(segment -> assertThat(tokenizer.estimateTokenCountInText(segment.text())).isLessThanOrEqualTo(maxSegmentSize)); assertThat(segments).containsExactly( TextSegment.from(sentences(0, 5), Metadata.from("index", "0").add("document", "0")), TextSegment.from(sentences(5, 12), Metadata.from("index", "1").add("document", "0")), TextSegment.from(sentences(10, 16), Metadata.from("index", "2").add("document", "0")), TextSegment.from(sentences(15, 24), Metadata.from("index", "3").add("document", "0")), TextSegment.from(sentences(21, 28), Metadata.from("index", "4").add("document", "0")) ); assertThat(tokenizer.estimateTokenCountInText(sentences(5, 5))).isLessThanOrEqualTo(maxOverlapSize); assertThat(tokenizer.estimateTokenCountInText(sentences(10, 12))).isLessThanOrEqualTo(maxOverlapSize); assertThat(tokenizer.estimateTokenCountInText(sentences(15, 16))).isLessThanOrEqualTo(maxOverlapSize); assertThat(tokenizer.estimateTokenCountInText(sentences(21, 24))).isLessThanOrEqualTo(maxOverlapSize); } @Test void should_split_sample_text_without_paragraphs_with_big_overlap() { // given int maxSegmentSize = 100; int maxOverlapSize = 80; Tokenizer tokenizer = new OpenAiTokenizer(GPT_3_5_TURBO); DocumentSplitter splitter = new DocumentByParagraphSplitter(maxSegmentSize, maxOverlapSize, tokenizer); Document document = Document.from(sentences(0, 28), Metadata.from("document", "0")); // when List<TextSegment> segments = splitter.split(document); // then segments.forEach(segment -> assertThat(tokenizer.estimateTokenCountInText(segment.text())).isLessThanOrEqualTo(maxSegmentSize)); assertThat(segments).containsExactly( TextSegment.from(sentences(0, 5), Metadata.from("index", "0").add("document", "0")), TextSegment.from(sentences(1, 6), Metadata.from("index", "1").add("document", "0")), TextSegment.from(sentences(3, 8), Metadata.from("index", "2").add("document", "0")), // TODO fix chopped "Mrs." TextSegment.from(sentences(4, 10) + " Mrs.", Metadata.from("index", "3").add("document", "0")), TextSegment.from(sentences(5, 12), Metadata.from("index", "4").add("document", "0")), TextSegment.from(sentences(7, 15), Metadata.from("index", "5").add("document", "0")), TextSegment.from(sentences(9, 16), Metadata.from("index", "6").add("document", "0")), // TODO fix chopped s18 // TODO splitter should prioritize progressing forward instead of maximizing overlap TextSegment.from(sentences(10, 16) + " " + sentences[17].replace(" countless tales.", ""), Metadata.from("index", "7").add("document", "0")), // TODO this segment should not be present, there is s14-s19 below TextSegment.from(sentences(13, 17), Metadata.from("index", "8").add("document", "0")), TextSegment.from(sentences(13, 18), Metadata.from("index", "9").add("document", "0")), TextSegment.from(sentences(14, 23), Metadata.from("index", "10").add("document", "0")), TextSegment.from(sentences(16, 24), Metadata.from("index", "11").add("document", "0")), TextSegment.from(sentences(17, 26), Metadata.from("index", "12").add("document", "0")), TextSegment.from(sentences(18, 28), Metadata.from("index", "13").add("document", "0")) ); assertThat(tokenizer.estimateTokenCountInText(sentences(1, 5))).isLessThanOrEqualTo(maxOverlapSize); assertThat(tokenizer.estimateTokenCountInText(sentences(3, 6))).isLessThanOrEqualTo(maxOverlapSize); assertThat(tokenizer.estimateTokenCountInText(sentences(4, 8))).isLessThanOrEqualTo(maxOverlapSize); assertThat(tokenizer.estimateTokenCountInText(sentences(5, 10))).isLessThanOrEqualTo(maxOverlapSize); assertThat(tokenizer.estimateTokenCountInText(sentences(7, 12))).isLessThanOrEqualTo(maxOverlapSize); assertThat(tokenizer.estimateTokenCountInText(sentences(9, 15))).isLessThanOrEqualTo(maxOverlapSize); assertThat(tokenizer.estimateTokenCountInText(sentences(10, 16))).isLessThanOrEqualTo(maxOverlapSize); assertThat(tokenizer.estimateTokenCountInText(sentences(13, 16))).isLessThanOrEqualTo(maxOverlapSize); assertThat(tokenizer.estimateTokenCountInText(sentences(13, 17))).isLessThanOrEqualTo(maxOverlapSize); assertThat(tokenizer.estimateTokenCountInText(sentences(14, 18))).isLessThanOrEqualTo(maxOverlapSize); assertThat(tokenizer.estimateTokenCountInText(sentences(16, 23))).isLessThanOrEqualTo(maxOverlapSize); assertThat(tokenizer.estimateTokenCountInText(sentences(17, 24))).isLessThanOrEqualTo(maxOverlapSize); assertThat(tokenizer.estimateTokenCountInText(sentences(18, 26))).isLessThanOrEqualTo(maxOverlapSize); } static String[] sentences = { "In a small town nestled between two vast mountains, there was a shop unlike any other.", "A unique haven.", "Visitors would often comment on its peculiar charm, always slightly different from what they remembered on their previous visits.", "The store stood as a testament to the passage of time and the ever-changing landscape of tales.", "Upon entering, the first thing to strike you was the enormity of it all.", "Every inch of space was occupied with books.", "Some stood tall and regal on the highest shelves, looking as if they had witnessed epochs come and go.", "They were leather-bound, with pages yellowed by age.", "Others, smaller and brightly adorned, were reminiscent of summer days and childhood laughter.", "But these physical objects were mere vessels.", "It was the stories inside that held power.", "Mrs. Jenkins ran the shop.", "A mystery in her own right.", "Her silver hair cascaded like a waterfall, and her eyes seemed to see more than most.", "With just a glance, she'd find the perfect story for you.", "One wet afternoon, Eli entered.", "He was just a boy, lost in the vastness of the store.", "Between the aisles, his small fingers danced on the spines of books, feeling the heartbeat of countless tales.", "Then, a simple brown-covered book whispered to him.", "Without grandeur or pretense, it beckoned.", "And he listened.", "He read.", "And read.", "The world around him melted.", "When Mrs. Jenkins approached, night had fallen.", "She gently remarked, \"Books have a way of finding their reader.\"", "Eli simply nodded, understanding the profound truth in her words.", "Some places and stories remain etched in our souls, offering lessons and moments of sheer wonder.", "They defy definition." }; private static String sentences(int fromInclusive, int toInclusive) { StringBuilder sb = new StringBuilder(); for (int i = fromInclusive; i <= toInclusive; i++) { if (sb.length() > 0) { sb.append(" "); } sb.append(sentences[i]); } return sb.toString(); } }
[ "dev.langchain4j.data.document.Metadata.from" ]
[((18156, 18204), 'dev.langchain4j.data.document.Metadata.from'), ((18258, 18306), 'dev.langchain4j.data.document.Metadata.from'), ((18361, 18409), 'dev.langchain4j.data.document.Metadata.from'), ((18464, 18512), 'dev.langchain4j.data.document.Metadata.from'), ((18567, 18615), 'dev.langchain4j.data.document.Metadata.from'), ((19866, 19914), 'dev.langchain4j.data.document.Metadata.from'), ((19967, 20015), 'dev.langchain4j.data.document.Metadata.from'), ((20068, 20116), 'dev.langchain4j.data.document.Metadata.from'), ((20223, 20271), 'dev.langchain4j.data.document.Metadata.from'), ((20325, 20373), 'dev.langchain4j.data.document.Metadata.from'), ((20427, 20475), 'dev.langchain4j.data.document.Metadata.from'), ((20529, 20577), 'dev.langchain4j.data.document.Metadata.from'), ((20828, 20876), 'dev.langchain4j.data.document.Metadata.from'), ((21014, 21062), 'dev.langchain4j.data.document.Metadata.from'), ((21117, 21165), 'dev.langchain4j.data.document.Metadata.from'), ((21220, 21269), 'dev.langchain4j.data.document.Metadata.from'), ((21324, 21373), 'dev.langchain4j.data.document.Metadata.from'), ((21428, 21477), 'dev.langchain4j.data.document.Metadata.from'), ((21532, 21581), 'dev.langchain4j.data.document.Metadata.from')]
package com.baeldung.langchain; import static org.assertj.core.api.Assertions.assertThat; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import dev.langchain4j.agent.tool.Tool; import dev.langchain4j.memory.chat.MessageWindowChatMemory; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.service.AiServices; public class ServiceWithToolsLiveTest { private static final Logger logger = LoggerFactory.getLogger(ServiceWithToolsLiveTest.class); static class Calculator { @Tool("Calculates the length of a string") int stringLength(String s) { return s.length(); } @Tool("Calculates the sum of two numbers") int add(int a, int b) { return a + b; } } interface Assistant { String chat(String userMessage); } @Test public void givenServiceWithTools_whenPrompted_thenValidResponse() { Assistant assistant = AiServices.builder(Assistant.class) .chatLanguageModel(OpenAiChatModel.withApiKey(Constants.OPENAI_API_KEY)) .tools(new Calculator()) .chatMemory(MessageWindowChatMemory.withMaxMessages(10)) .build(); String question = "What is the sum of the numbers of letters in the words \"language\" and \"model\"?"; String answer = assistant.chat(question); logger.info(answer); assertThat(answer).contains("13"); } }
[ "dev.langchain4j.service.AiServices.builder" ]
[((987, 1234), 'dev.langchain4j.service.AiServices.builder'), ((987, 1213), 'dev.langchain4j.service.AiServices.builder'), ((987, 1144), 'dev.langchain4j.service.AiServices.builder'), ((987, 1107), 'dev.langchain4j.service.AiServices.builder')]
package com.baeldung.langchain; import static org.assertj.core.api.Assertions.assertThat; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import dev.langchain4j.agent.tool.Tool; import dev.langchain4j.memory.chat.MessageWindowChatMemory; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.service.AiServices; public class ServiceWithToolsLiveTest { private static final Logger logger = LoggerFactory.getLogger(ServiceWithToolsLiveTest.class); static class Calculator { @Tool("Calculates the length of a string") int stringLength(String s) { return s.length(); } @Tool("Calculates the sum of two numbers") int add(int a, int b) { return a + b; } } interface Assistant { String chat(String userMessage); } @Test public void givenServiceWithTools_whenPrompted_thenValidResponse() { Assistant assistant = AiServices.builder(Assistant.class) .chatLanguageModel(OpenAiChatModel.withApiKey(Constants.OPENAI_API_KEY)) .tools(new Calculator()) .chatMemory(MessageWindowChatMemory.withMaxMessages(10)) .build(); String question = "What is the sum of the numbers of letters in the words \"language\" and \"model\"?"; String answer = assistant.chat(question); logger.info(answer); assertThat(answer).contains("13"); } }
[ "dev.langchain4j.service.AiServices.builder" ]
[((987, 1234), 'dev.langchain4j.service.AiServices.builder'), ((987, 1213), 'dev.langchain4j.service.AiServices.builder'), ((987, 1144), 'dev.langchain4j.service.AiServices.builder'), ((987, 1107), 'dev.langchain4j.service.AiServices.builder')]
package com.moyz.adi.common.interfaces; import com.moyz.adi.common.exception.BaseException; import com.moyz.adi.common.util.JsonUtil; import com.moyz.adi.common.util.LocalCache; import com.moyz.adi.common.vo.AnswerMeta; import com.moyz.adi.common.vo.ChatMeta; import com.moyz.adi.common.vo.PromptMeta; import com.moyz.adi.common.vo.SseAskParams; import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.data.message.ChatMessage; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.chat.StreamingChatLanguageModel; import dev.langchain4j.model.output.Response; import dev.langchain4j.service.AiServices; import dev.langchain4j.service.TokenStream; import lombok.extern.slf4j.Slf4j; import org.apache.commons.lang3.StringUtils; import org.springframework.web.servlet.mvc.method.annotation.SseEmitter; import java.io.IOException; import java.net.Proxy; import java.util.UUID; import static com.moyz.adi.common.enums.ErrorEnum.B_LLM_SERVICE_DISABLED; @Slf4j public abstract class AbstractLLMService<T> { protected Proxy proxy; protected String modelName; protected T setting; protected StreamingChatLanguageModel streamingChatLanguageModel; protected ChatLanguageModel chatLanguageModel; public AbstractLLMService(String modelName, String settingName, Class<T> clazz, Proxy proxy) { this.modelName = modelName; this.proxy = proxy; String st = LocalCache.CONFIGS.get(settingName); setting = JsonUtil.fromJson(st, clazz); } /** * 检测该service是否可用(不可用的情况通常是没有配置key) * * @return */ public abstract boolean isEnabled(); public ChatLanguageModel getChatLLM() { if (null != chatLanguageModel) { return chatLanguageModel; } chatLanguageModel = buildChatLLM(); return chatLanguageModel; } public StreamingChatLanguageModel getStreamingChatLLM() { if (null != streamingChatLanguageModel) { return streamingChatLanguageModel; } streamingChatLanguageModel = buildStreamingChatLLM(); return streamingChatLanguageModel; } protected abstract ChatLanguageModel buildChatLLM(); protected abstract StreamingChatLanguageModel buildStreamingChatLLM(); protected abstract String parseError(Object error); public Response<AiMessage> chat(ChatMessage chatMessage) { if(!isEnabled()){ log.error("llm service is disabled"); throw new BaseException(B_LLM_SERVICE_DISABLED); } return getChatLLM().generate(chatMessage); } public void sseChat(SseAskParams params, TriConsumer<String, PromptMeta, AnswerMeta> consumer) { if(!isEnabled()){ log.error("llm service is disabled"); throw new BaseException(B_LLM_SERVICE_DISABLED); } //create chat assistant AiServices<IChatAssistant> serviceBuilder = AiServices.builder(IChatAssistant.class) .streamingChatLanguageModel(getStreamingChatLLM()); if (null != params.getChatMemory()) { serviceBuilder.chatMemory(params.getChatMemory()); } IChatAssistant chatAssistant = serviceBuilder.build(); TokenStream tokenStream; if (StringUtils.isNotBlank(params.getSystemMessage())) { tokenStream = chatAssistant.chat(params.getSystemMessage(), params.getUserMessage()); } else { tokenStream = chatAssistant.chat(params.getUserMessage()); } tokenStream.onNext((content) -> { log.info("get content:{}", content); //加空格配合前端的fetchEventSource进行解析,见https://github.com/Azure/fetch-event-source/blob/45ac3cfffd30b05b79fbf95c21e67d4ef59aa56a/src/parse.ts#L129-L133 try { params.getSseEmitter().send(" " + content); } catch (IOException e) { log.error("stream onNext error", e); } }) .onComplete((response) -> { log.info("返回数据结束了:{}", response); String questionUuid = StringUtils.isNotBlank(params.getRegenerateQuestionUuid()) ? params.getRegenerateQuestionUuid() : UUID.randomUUID().toString().replace("-", ""); PromptMeta questionMeta = new PromptMeta(response.tokenUsage().inputTokenCount(), questionUuid); AnswerMeta answerMeta = new AnswerMeta(response.tokenUsage().outputTokenCount(), UUID.randomUUID().toString().replace("-", "")); ChatMeta chatMeta = new ChatMeta(questionMeta, answerMeta); String meta = JsonUtil.toJson(chatMeta).replaceAll("\r\n", ""); log.info("meta:" + meta); try { params.getSseEmitter().send(" [META]" + meta); } catch (IOException e) { log.error("stream onComplete error", e); throw new RuntimeException(e); } // close eventSourceEmitter after tokens was calculated params.getSseEmitter().complete(); consumer.accept(response.content().text(), questionMeta, answerMeta); }) .onError((error) -> { log.error("stream error", error); try { String errorMsg = parseError(error); if(StringUtils.isBlank(errorMsg)){ errorMsg = error.getMessage(); } params.getSseEmitter().send(SseEmitter.event().name("error").data(errorMsg)); } catch (IOException e) { log.error("sse error", e); } params.getSseEmitter().complete(); }) .start(); } }
[ "dev.langchain4j.service.AiServices.builder" ]
[((1439, 1474), 'com.moyz.adi.common.util.LocalCache.CONFIGS.get'), ((2985, 3092), 'dev.langchain4j.service.AiServices.builder'), ((4357, 4402), 'java.util.UUID.randomUUID'), ((4357, 4385), 'java.util.UUID.randomUUID'), ((4622, 4667), 'java.util.UUID.randomUUID'), ((4622, 4650), 'java.util.UUID.randomUUID'), ((4784, 4832), 'com.moyz.adi.common.util.JsonUtil.toJson'), ((5780, 5827), 'org.springframework.web.servlet.mvc.method.annotation.SseEmitter.event'), ((5780, 5812), 'org.springframework.web.servlet.mvc.method.annotation.SseEmitter.event')]
/* * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package services; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.concurrent.ExecutionException; import java.io.IOException; import java.text.SimpleDateFormat; import java.util.stream.*; import javax.annotation.PostConstruct; import com.google.cloud.aiplatform.v1.Endpoint; import com.google.cloud.aiplatform.v1.EndpointName; import com.google.cloud.aiplatform.v1.EndpointServiceClient; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.RequestBody; import org.springframework.web.bind.annotation.RequestHeader; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; import services.actuator.StartupCheck; // Vision API packages import com.google.cloud.vision.v1.*; import com.google.cloud.vision.v1.Feature.Type; import com.google.cloud.MetadataConfig; import com.google.cloud.firestore.*; import com.google.api.core.ApiFuture; //LangChain4j packages import dev.langchain4j.data.message.AiMessage; import dev.langchain4j.data.message.UserMessage; import dev.langchain4j.model.output.Response; import dev.langchain4j.model.vertexai.VertexAiChatModel; import dev.langchain4j.model.vertexai.VertexAiLanguageModel; // Vertex AI packages @RestController public class EventController { private static final Logger logger = LoggerFactory.getLogger(EventController.class); private static final String projectID = MetadataConfig.getProjectId(); private static final String zone = MetadataConfig.getZone(); private static final List<String> requiredFields = Arrays.asList("ce-id", "ce-source", "ce-type", "ce-specversion"); @Autowired private EventService eventService; @PostConstruct public void init() { logger.info("ImageAnalysisApplication: EventController Post Construct Initializer " + new SimpleDateFormat("HH:mm:ss.SSS").format(new java.util.Date(System.currentTimeMillis()))); logger.info("ImageAnalysisApplication: EventController Post Construct - StartupCheck can be enabled"); StartupCheck.up(); } @GetMapping("start") String start(){ logger.info("ImageAnalysisApplication: EventController - Executed start endpoint request " + new SimpleDateFormat("HH:mm:ss.SSS").format(new java.util.Date(System.currentTimeMillis()))); return "EventController started"; } @RequestMapping(value = "/", method = RequestMethod.POST) public ResponseEntity<String> receiveMessage( @RequestBody Map<String, Object> body, @RequestHeader Map<String, String> headers) throws IOException, InterruptedException, ExecutionException { // Validate the number of available processors logger.info("EventController: Active processors: " + Runtime.getRuntime().availableProcessors()); System.out.println("Header elements"); for (String field : requiredFields) { if (headers.get(field) == null) { String msg = String.format("Missing expected header: %s.", field); System.out.println(msg); return new ResponseEntity<String>(msg, HttpStatus.BAD_REQUEST); } else { System.out.println(field + " : " + headers.get(field)); } } System.out.println("Body elements"); for (String bodyField : body.keySet()) { System.out.println(bodyField + " : " + body.get(bodyField)); } if (headers.get("ce-subject") == null) { String msg = "Missing expected header: ce-subject."; System.out.println(msg); return new ResponseEntity<String>(msg, HttpStatus.BAD_REQUEST); } String ceSubject = headers.get("ce-subject"); String msg = "Detected change in Cloud Storage bucket: (ce-subject) : " + ceSubject; System.out.println(msg); String fileName = (String)body.get("name"); String bucketName = (String)body.get("bucket"); logger.info("New picture uploaded " + fileName); if(fileName == null){ msg = "Missing expected body element: file name"; System.out.println(msg); return new ResponseEntity<String>(msg, HttpStatus.BAD_REQUEST); } try (ImageAnnotatorClient vision = ImageAnnotatorClient.create()) { List<AnnotateImageRequest> requests = new ArrayList<>(); ImageSource imageSource = ImageSource.newBuilder() .setGcsImageUri("gs://" + bucketName + "/" + fileName) .build(); Image image = Image.newBuilder() .setSource(imageSource) .build(); Feature featureLabel = Feature.newBuilder() .setType(Type.LABEL_DETECTION) .build(); Feature featureImageProps = Feature.newBuilder() .setType(Type.IMAGE_PROPERTIES) .build(); Feature featureSafeSearch = Feature.newBuilder() .setType(Type.SAFE_SEARCH_DETECTION) .build(); Feature featureTextDetection = Feature.newBuilder() .setType(Type.TEXT_DETECTION) .build(); Feature featureLogoDetection = Feature.newBuilder() .setType(Type.LOGO_DETECTION) .build(); AnnotateImageRequest request = AnnotateImageRequest.newBuilder() .addFeatures(featureLabel) .addFeatures(featureImageProps) .addFeatures(featureSafeSearch) .addFeatures(featureTextDetection) .addFeatures(featureLogoDetection) .setImage(image) .build(); requests.add(request); logger.info("Calling the Vision API..."); BatchAnnotateImagesResponse result = vision.batchAnnotateImages(requests); List<AnnotateImageResponse> responses = result.getResponsesList(); if (responses.size() == 0) { logger.info("No response received from Vision API."); return new ResponseEntity<String>(msg, HttpStatus.BAD_REQUEST); } AnnotateImageResponse response = responses.get(0); if (response.hasError()) { logger.info("Error: " + response.getError().getMessage()); return new ResponseEntity<String>(msg, HttpStatus.BAD_REQUEST); } List<String> labels = response.getLabelAnnotationsList().stream() .map(annotation -> annotation.getDescription()) .collect(Collectors.toList()); logger.info("Annotations found by Vision API:"); for (String label: labels) { logger.info("- " + label); } String mainColor = "#FFFFFF"; ImageProperties imgProps = response.getImagePropertiesAnnotation(); if (imgProps.hasDominantColors()) { DominantColorsAnnotation colorsAnn = imgProps.getDominantColors(); ColorInfo colorInfo = colorsAnn.getColors(0); mainColor = rgbHex( colorInfo.getColor().getRed(), colorInfo.getColor().getGreen(), colorInfo.getColor().getBlue()); logger.info("Color: " + mainColor); } boolean isSafe = false; if (response.hasSafeSearchAnnotation()) { SafeSearchAnnotation safeSearch = response.getSafeSearchAnnotation(); isSafe = Stream.of( safeSearch.getAdult(), safeSearch.getMedical(), safeSearch.getRacy(), safeSearch.getSpoof(), safeSearch.getViolence()) .allMatch( likelihood -> likelihood != Likelihood.LIKELY && likelihood != Likelihood.VERY_LIKELY ); logger.info("Is Image Safe? " + isSafe); } logger.info("Logo Annotations:"); for (EntityAnnotation annotation : response.getLogoAnnotationsList()) { logger.info("Logo: " + annotation.getDescription()); List<Property> properties = annotation.getPropertiesList(); logger.info("Logo property list:"); for (Property property : properties) { logger.info(String.format("Name: %s, Value: %s"), property.getName(), property.getValue()); } } String prompt = "Explain the text "; String textElements = ""; logger.info("Text Annotations:"); for (EntityAnnotation annotation : response.getTextAnnotationsList()) { textElements = annotation.getDescription(); prompt += textElements + " "; logger.info("Text: " + textElements); // if(textElements.matches("^[a-zA-Z0-9]+$")) prompt += textElements; } // build alternative prompt using Vertex AI // extractTextFromImage(bucketName, fileName); Response<AiMessage> modelResponse = null; if (prompt.length() > 0) { VertexAiChatModel vertexAiChatModel = VertexAiChatModel.builder() .endpoint("us-central1-aiplatform.googleapis.com:443") .project(projectID) .location(zone) .publisher("google") .modelName("chat-bison@001") .temperature(0.1) .maxOutputTokens(50) .topK(0) .topP(0.0) .maxRetries(3) .build(); modelResponse = vertexAiChatModel.generate(UserMessage.from(prompt)); logger.info("Result Chat Model: " + modelResponse.content().text()); } if (prompt.length() > 0) { VertexAiLanguageModel vertexAiTextModel = VertexAiLanguageModel.builder() .endpoint("us-central1-aiplatform.googleapis.com:443") .project(projectID) .location(zone) .publisher("google") .modelName("text-bison@001") .temperature(0.1) .maxOutputTokens(50) .topK(0) .topP(0.0) .maxRetries(3) .build(); Response<String> textResponse = vertexAiTextModel.generate(prompt); logger.info("Result Text Model: " + textResponse.content()); } // Saving result to Firestore if (isSafe && modelResponse != null) { ApiFuture<WriteResult> writeResult = eventService.storeImage(fileName, labels, mainColor, modelResponse.content().text()); logger.info("Picture metadata saved in Firestore at " + writeResult.get().getUpdateTime()); } } return new ResponseEntity<String>(msg, HttpStatus.OK); } // private void extractTextFromImage(String bucketName, String fileName) throws IOException { // try (EndpointServiceClient endpointServiceClient = EndpointServiceClient.create()) { // EndpointName name = // EndpointName.ofProjectLocationEndpointName("[PROJECT]", "[LOCATION]", "[ENDPOINT]"); // Endpoint response = endpointServiceClient.getEndpoint(name); // logger.info("Endpoint description: " +response.getDescription()); // } // } private static String rgbHex(float red, float green, float blue) { return String.format("#%02x%02x%02x", (int)red, (int)green, (int)blue); } } // [END eventarc_audit_storage_handler]
[ "dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder", "dev.langchain4j.model.vertexai.VertexAiChatModel.builder" ]
[((9657, 10150), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 10119), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 10082), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 10049), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 10018), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 9975), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 9935), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 9884), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 9841), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 9803), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((9657, 9761), 'dev.langchain4j.model.vertexai.VertexAiChatModel.builder'), ((10409, 10906), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10875), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10838), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10805), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10774), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10731), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10691), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10640), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10597), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10559), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder'), ((10409, 10517), 'dev.langchain4j.model.vertexai.VertexAiLanguageModel.builder')]
package com.roy.langchainjavachat.controller; import com.roy.langchainjavachat.annotation.ReWriteBody; import com.roy.langchainjavachat.model.req.ChatMsgReq; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.openai.OpenAiChatModel; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import lombok.extern.slf4j.Slf4j; import org.springframework.beans.factory.annotation.Value; import org.springframework.web.bind.annotation.*; import java.util.List; import static dev.langchain4j.model.openai.OpenAiChatModelName.GPT_3_5_TURBO; /** * 对话管理 * * @author roy */ @Slf4j @ReWriteBody @RestController @Api(tags = "对话管理") @RequestMapping("/v1/chat/") public class ChatController { @Value("${OPENAI_API_KEY}") String OPENAI_API_KEY; @GetMapping("qa") @ApiOperation(value = "与大模型对话(单轮问答)") public String llmQA(@ApiParam(value = "问句", required = true) @RequestParam String question) { ChatLanguageModel model = OpenAiChatModel.builder() .baseUrl("https://dgr.life/v1") .apiKey(OPENAI_API_KEY) // Please use your own OpenAI API key .modelName(GPT_3_5_TURBO) .build(); return model.generate(question); } @GetMapping("chat") @ApiOperation(value = "与大模型对话(多轮问答)") public void llm(@ApiParam(value = "问句", required = true) @RequestBody List<ChatMsgReq> req) { } @GetMapping("knowledge_base_chat") @ApiOperation(value = "与知识库对话") public String knowledgeBaseChat(@ApiParam(value = "问句", required = true) @RequestParam String question) { ChatLanguageModel model = OpenAiChatModel.builder() .baseUrl("https://dgr.life/v1") .apiKey(System.getenv("OPENAI_API_KEY")) // Please use your own OpenAI API key .modelName(GPT_3_5_TURBO) .build(); return model.generate(question); } }
[ "dev.langchain4j.model.openai.OpenAiChatModel.builder" ]
[((1077, 1295), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1077, 1270), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1077, 1190), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1077, 1150), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1776, 2011), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1776, 1986), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1776, 1906), 'dev.langchain4j.model.openai.OpenAiChatModel.builder'), ((1776, 1849), 'dev.langchain4j.model.openai.OpenAiChatModel.builder')]
package fr.anthonyquere.talkwithme.core.ai.langchain.models; import dev.langchain4j.model.chat.ChatLanguageModel; import dev.langchain4j.model.mistralai.MistralAiChatModel; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; @Configuration public class MistralAIConfiguration { @Value("${mistral-ia.api-key}") private String mistralApiKey; @Bean public ChatLanguageModel buildMistralModel() { return MistralAiChatModel.builder() .apiKey(mistralApiKey) .maxTokens(100) .modelName("mistral-tiny") .temperature(1.0d) .logRequests(true) .logResponses(true) .maxRetries(1) .build(); } }
[ "dev.langchain4j.model.mistralai.MistralAiChatModel.builder" ]
[((535, 759), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((535, 744), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((535, 723), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((535, 697), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((535, 672), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((535, 647), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((535, 614), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder'), ((535, 592), 'dev.langchain4j.model.mistralai.MistralAiChatModel.builder')]
package com.example.block18springai.controller; import com.example.block18springai.ai_config.ApiKeys; import com.example.block18springai.ai_config.Assistant; import dev.langchain4j.memory.ChatMemory; import dev.langchain4j.memory.chat.MessageWindowChatMemory; import dev.langchain4j.model.openai.OpenAiChatModel; import dev.langchain4j.service.AiServices; import org.springframework.web.bind.annotation.PostMapping; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; @RequestMapping("/ai") @RestController public class AIController_LangChain4j { ChatMemory chatMemory = MessageWindowChatMemory.withMaxMessages(10); Assistant assistant = AiServices.builder(Assistant.class) .chatLanguageModel(OpenAiChatModel.withApiKey(ApiKeys.OPENAI_API_KEY)) .chatMemory(chatMemory) .build(); @PostMapping("/message") public String chat(@RequestParam String message) { return assistant.chat(message); } }
[ "dev.langchain4j.service.AiServices.builder" ]
[((784, 959), 'dev.langchain4j.service.AiServices.builder'), ((784, 938), 'dev.langchain4j.service.AiServices.builder'), ((784, 902), 'dev.langchain4j.service.AiServices.builder')]