Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
a3a30ee
feat: upgrade LangChain4j from 1.0.0-beta3 to 1.4.0
rokon12 Sep 19, 2025
fcdc2ee
feat: update step-00 for LangChain4j 1.4.0 API compatibility
rokon12 Sep 19, 2025
ffb9be5
feat: update step-01-prompts for LangChain4j 1.4.0 API compatibility
rokon12 Sep 19, 2025
16e3d9f
feat: update step-02-chat-memory for LangChain4j 1.4.0 API compatibility
rokon12 Sep 19, 2025
1199b90
feat: enhance step-00 with better error handling and model parameter …
rokon12 Sep 20, 2025
f263f16
feat: improve dependency management and API compatibility
rokon12 Sep 20, 2025
6787e8a
feat: enhance step-01 development experience and logging
rokon12 Sep 20, 2025
4df3c5b
fix: improve Maven archetype generation and error handling
rokon12 Sep 20, 2025
bdc6fac
refactor: improve step-02 code quality and configuration
rokon12 Sep 20, 2025
94beaf1
feat: enhance Maven archetype generation with process utilities
rokon12 Sep 20, 2025
e7e0031
feat: update step-03-tools for LangChain4j 1.4.0 API compatibility
rokon12 Sep 20, 2025
4ae8897
feat: add GitHubTool for repository management and code analysis
rokon12 Sep 20, 2025
f94b57d
feat: add GPT-5 models support to step-03-tools
rokon12 Sep 20, 2025
fc828e6
feat: enhance JakartaEEAgent with advanced personality and comedy fea…
rokon12 Sep 20, 2025
9bd4ca1
feat: upgrade to full GPT-5 model for maximum capabilities
rokon12 Sep 20, 2025
e21e777
feat: upgrade step-04-inmemory-rag to LangChain4j 1.4.0 with GPT-5 su…
rokon12 Sep 20, 2025
a123267
feat: upgrade step-05-easy-rag to LangChain4j 1.4.0 with GPT-5 support
rokon12 Sep 20, 2025
3843296
fix: complete step-04 and step-05 LangChain4j 1.4.0 dependencies update
rokon12 Sep 21, 2025
4d13228
feat: complete LangChain4j 1.4.0 upgrade for step-06, step-07, step-0…
rokon12 Sep 21, 2025
1cf8eca
feat: finalize LangChain4j 1.4.0 upgrade for all remaining modules
rokon12 Sep 21, 2025
de7159e
feat: implement complete children's book creation agent system
rokon12 Sep 21, 2025
d3460b9
feat: enhance children's book agent with character consistency and di…
rokon12 Sep 21, 2025
2a79fa4
feat: add parallel image generation for faster book creation
rokon12 Sep 22, 2025
0ff0887
feat: implement true LangChain4j agentic architecture
rokon12 Sep 22, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 18 additions & 3 deletions pom.xml
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@

<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
Expand Down Expand Up @@ -46,7 +47,9 @@
<wildfly.version>35.0.1.Final</wildfly.version>

<!-- LangChain4j -->
<langchain4j.version>1.0.0-beta3</langchain4j.version>
<langchain4j.version>1.4.0</langchain4j.version>
<langchain4j.embedding.all.minilm.version>${langchain4j-document-parser-apache-tika.version}</langchain4j.embedding.all.minilm.version>
<langchain4j.mcp.version>${langchain4j-document-parser-apache-tika.version}</langchain4j.mcp.version>

<!-- Logging -->
<logback.version>1.5.12</logback.version>
Expand All @@ -72,6 +75,8 @@
<surefire-plugin.version>3.5.2</surefire-plugin.version>
<failsafe-plugin.version>3.5.2</failsafe-plugin.version>
<dependency-check-plugin.version>9.0.9</dependency-check-plugin.version>
<langchain4j-document-parser-apache-tika.version>1.5.0-beta11</langchain4j-document-parser-apache-tika.version>
<langchain4j-pgvector.version>1.5.0-beta11</langchain4j-pgvector.version>
</properties>

<dependencyManagement>
Expand Down Expand Up @@ -118,12 +123,17 @@
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-mcp</artifactId>
<version>${langchain4j.version}</version>
<version>${langchain4j.mcp.version}</version>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-embeddings-all-minilm-l6-v2</artifactId>
<version>${langchain4j.version}</version>
<version>${langchain4j.embedding.all.minilm.version}</version>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-document-parser-apache-tika</artifactId>
<version>${langchain4j-document-parser-apache-tika.version}</version>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
Expand Down Expand Up @@ -188,6 +198,11 @@
<version>${rest-assured.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-pgvector</artifactId>
<version>${langchain4j-pgvector.version}</version>
</dependency>
</dependencies>
</dependencyManagement>

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
import jakarta.inject.Named;
import lombok.Getter;
import lombok.Setter;
import lombok.extern.slf4j.Slf4j;

import java.util.List;

Expand Down Expand Up @@ -34,7 +33,7 @@ public void init() {
this.modelName = config.getModelName();
this.temperature = config.getTemperature();
this.topP = config.getTopP();
this.maxTokens = config.getMaxTokens();
this.maxTokens = config.getMaxCompletionToken();
this.frequencyPenalty = config.getFrequencyPenalty();
this.logRequests = config.isLogRequests();
this.logResponses = config.isLogResponses();
Expand All @@ -53,7 +52,7 @@ public String updateConfiguration() {
config.setModelName(modelName);
config.setTemperature(temperature);
config.setTopP(topP);
config.setMaxTokens(maxTokens);
config.setMaxCompletionToken(maxTokens);
config.setFrequencyPenalty(frequencyPenalty);
config.setLogRequests(logRequests);
config.setLogResponses(logResponses);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,10 +43,12 @@ public List<String> getAllowedModelsList() {

@Inject
@ConfigProperty(name = "langchain4j.open-ai.chat-model.max-tokens")
private int maxTokens;
private int maxCompletionToken;

@Inject
@ConfigProperty(name = "langchain4j.open-ai.chat-model.frequency-penalty")
private double frequencyPenalty;

@Inject
@ConfigProperty(name = "langchain4j.open-ai.chat-model.log-requests")
private boolean logRequests;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,37 +16,79 @@ public class LangChainService {

@Inject
public LangChainService(LangChain4JConfig config) {
chatModel = OpenAiChatModel.builder()
.apiKey(config.getApiKey())
.modelName(config.getModelName())
.temperature(config.getTemperature())
.topP(config.getTopP())
.timeout(config.getTimeout())
.maxTokens(config.getMaxTokens())
.frequencyPenalty(config.getFrequencyPenalty())
.logRequests(config.isLogRequests())
.logResponses(config.isLogResponses())
.build();
chatModel = createModel(config);
}

public void sendMessage(String message, Consumer<String> consumer) {
log.info("User message: {}", message);
consumer.accept(chatModel.chat(message));
try {
String response = chatModel.chat(message);
if (response != null) {
consumer.accept(response);
} else {
log.error("Received null response from chat model for message: {}", message);
consumer.accept("Error: No response from model");
}
} catch (Exception e) {
log.error("Error calling chat model: ", e);
consumer.accept("Error: " + e.getMessage());
}
}

public synchronized void updateConfiguration(LangChain4JConfig config) {
log.info("Updating configuration with new settings : {}", config);
chatModel = OpenAiChatModel.builder()
chatModel = createModel(config);
log.info("Configuration updated successfully");
}

private static OpenAiChatModel createModel(LangChain4JConfig config) {
OpenAiChatModel.OpenAiChatModelBuilder builder = OpenAiChatModel.builder()
.apiKey(config.getApiKey())
.modelName(config.getModelName())
.temperature(config.getTemperature())
.topP(config.getTopP())
.modelName(config.getModelName());

String model = safeLower(config.getModelName());

// Controls that some model families don't accept
if (supportsTemperature(model)) {
builder.temperature(config.getTemperature());
}
if (supportsFrequencyPenalty(model)) {
builder.frequencyPenalty(config.getFrequencyPenalty());
}
if (supportsTopP(model)) {
builder.topP(config.getTopP());
}

return builder
.timeout(config.getTimeout())
.maxTokens(config.getMaxTokens())
.frequencyPenalty(config.getFrequencyPenalty())
.maxCompletionTokens(config.getMaxCompletionToken())
.logRequests(config.isLogRequests())
.logResponses(config.isLogResponses())
.build();
log.info("Configuration updated successfully");
}

private static boolean supportsTemperature(String model) {
return !isO1(model) && !isGpt5(model);
}

private static boolean supportsFrequencyPenalty(String model) {
return !isO1(model) && !isGpt5(model);
}

private static boolean supportsTopP(String model) {
return !isO1(model) ;
}

private static boolean isO1(String model) {
return model.startsWith("o1-") || model.equals("o1");
}

private static boolean isGpt5(String model) {
return model.startsWith("gpt-5");
}

private static String safeLower(String s) {
return s == null ? "" : s.toLowerCase();
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,10 @@
langchain4j.open-ai.api-key=${OPENAI_API_KEY}

# Allowed models list
langchain4j.open-ai.chat-model.allowed-models=gpt-4,gpt-4o, gpt-4.1, gpt-4.1-mini, gpt-4.1-nano, gpt-4o-mini,o1-preview,o1-mini,gpt-4-turbo,gpt-3.5-turbo,gpt-4.5-preview
langchain4j.open-ai.chat-model.allowed-models=gpt-5, gpt-5-mini, gpt-5-nano, gpt-4,gpt-4o, gpt-4.1, gpt-4.1-mini, gpt-4.1-nano, gpt-4o-mini,o1-preview,o1-mini,gpt-4-turbo,gpt-3.5-turbo,gpt-4.5-preview

# Default model
langchain4j.open-ai.chat-model.model-name=gpt-4.1-nano
langchain4j.open-ai.chat-model.model-name=gpt-5
langchain4j.open-ai.chat-model.temperature=0.7
langchain4j.open-ai.chat-model.top-p=1.0
langchain4j.open-ai.chat-model.timeout=PT60S
Expand Down
2 changes: 1 addition & 1 deletion step-00-chatbot-first-step/src/main/webapp/chat.xhtml
Original file line number Diff line number Diff line change
Expand Up @@ -276,7 +276,7 @@ Feel free to ask any questions!`;
</div>

<div class="form-group">
<h:outputLabel for="main-maxTokens" value="Max Tokens:"/>
<h:outputLabel for="main-maxTokens" value="Max Completion Tokens:"/>
<h:inputText id="main-maxTokens" value="#{configurationBean.maxTokens}"
required="true" styleClass="config-input">
<f:convertNumber integerOnly="true"/>
Expand Down
9 changes: 9 additions & 0 deletions step-01-prompts/.claude/settings.local.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
{
"permissions": {
"allow": [
"Read(//Users/bazlur/projects/llm-jakarta/step-02-chat-memory/**)"
],
"deny": [],
"ask": []
}
}
41 changes: 13 additions & 28 deletions step-01-prompts/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -3,70 +3,58 @@
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>

<groupId>learning.jakarta.ai</groupId>
<parent>
<groupId>learning.jakarta.ai</groupId>
<artifactId>llm-jakarta</artifactId>
<version>0.1-SNAPSHOT</version>
</parent>
<artifactId>llm-jakarta-step-01</artifactId>
<version>0.1-SNAPSHOT</version>
<packaging>war</packaging>

<name>llm-jakarta-step-01</name>
<packaging>war</packaging>
<description>
This is a very simple Jakarta EE application generated by the official Eclipse Starter.
</description>


<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<project.report.sourceEncoding>UTF-8</project.report.sourceEncoding>
<project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
<maven.compiler.release>21</maven.compiler.release>
<jakartaee-api.version>10.0.0</jakartaee-api.version>
<wildfly.version>33.0.2.Final</wildfly.version>
<compiler-plugin.version>3.13.0</compiler-plugin.version>
<war-plugin.version>3.4.0</war-plugin.version>
<wildfly-plugin.version>5.0.1.Final</wildfly-plugin.version>
<!-- Rely on parent for dependency and plugin versions -->
</properties>

<dependencies>
<dependency>
<groupId>jakarta.platform</groupId>
<artifactId>jakarta.jakartaee-api</artifactId>
<version>${jakartaee-api.version}</version>
<scope>provided</scope>
</dependency>

<dependency>
<groupId>jakarta.faces</groupId>
<artifactId>jakarta.faces-api</artifactId>
<version>4.0.0</version>
<scope>provided</scope>
</dependency>

<dependency>
<groupId>org.eclipse.microprofile.config</groupId>
<artifactId>microprofile-config-api</artifactId>
<version>3.1</version>
<scope>provided</scope>
</dependency>

<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j-open-ai</artifactId>
<version>0.36.2</version>
</dependency>

<dependency>
<groupId>dev.langchain4j</groupId>
<artifactId>langchain4j</artifactId>
<version>0.36.2</version>
</dependency>

<!-- Logback Classic -->
<!-- Logback for application logging -->
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
<version>1.5.12</version>
</dependency>

<dependency>
<groupId>org.projectlombok</groupId>
<artifactId>lombok</artifactId>
<version>1.18.36</version>
<scope>provided</scope>
</dependency>
</dependencies>
Expand All @@ -77,11 +65,9 @@
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>${compiler-plugin.version}</version>
</plugin>
<plugin>
<artifactId>maven-war-plugin</artifactId>
<version>${war-plugin.version}</version>
<configuration>
<failOnMissingWebXml>false</failOnMissingWebXml>
</configuration>
Expand All @@ -91,9 +77,8 @@
<plugin>
<groupId>org.wildfly.plugins</groupId>
<artifactId>wildfly-maven-plugin</artifactId>
<version>${wildfly-plugin.version}</version>
<configuration>
<version>${wildfly.version}</version>
<!-- Inherit WildFly version from parent -->
<server-config>standalone.xml</server-config>
</configuration>
</plugin>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,9 +28,17 @@ public class LangChain4JConfig {
@ConfigProperty(name = "langchain4j.open-ai.chat-model.timeout")
private Duration timeout;

@Inject
@ConfigProperty(name = "langchain4j.open-ai.chat-model.top-p")
private double topP;

@Inject
@ConfigProperty(name = "langchain4j.open-ai.chat-model.max-tokens")
private int maxTokens;
private int maxCompletionToken;

@Inject
@ConfigProperty(name = "langchain4j.open-ai.chat-model.allowed-models")
private String allowedModels;
@Inject
@ConfigProperty(name = "langchain4j.open-ai.chat-model.frequency-penalty")
private double frequencyPenalty;
Expand All @@ -46,5 +54,12 @@ public class LangChain4JConfig {
@ConfigProperty(name = "langchain4j.personality-type")
private PersonalityType personalityType;

public java.util.List<String> getAllowedModelsList() {
return java.util.Arrays.asList(allowedModels.split(","))
.stream()
.map(String::trim)
.toList();
}

}

Loading
Loading