Skip to content

Commit

Permalink
Enable CI, fix LogLevel NPE (#12)
Browse files Browse the repository at this point in the history
  • Loading branch information
LangChain4j authored Jan 14, 2024
1 parent db1b821 commit f193c92
Show file tree
Hide file tree
Showing 16 changed files with 125 additions and 29 deletions.
29 changes: 29 additions & 0 deletions .github/workflows/main.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
name: Java CI

on:
push:
branches:
- main
pull_request:
branches:
- main

jobs:
java_build:
strategy:
matrix:
java_version: [ 8, 11, 17, 21 ]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up JDK ${{ matrix.java_version }}
uses: actions/setup-java@v4
with:
java-version: ${{ matrix.java_version }}
distribution: 'temurin'
cache: 'maven'
- name: Build with JDK ${{ matrix.java_version }}
run: mvn -B clean test ${{ matrix.included_modules }}
env:
OPENAI_BASE_URL: 'http://langchain4j.dev:8082/v1'
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
2 changes: 1 addition & 1 deletion .github/workflows/release.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
gpg-passphrase: GPG_PASSPHRASE

- name: release
run: mvn -B clean deploy -Psign -DskipTests
run: mvn -B clean deploy -Psign
env:
OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }}
GPG_PASSPHRASE: ${{ secrets.GPG_PASSPHRASE }}
Expand Down
7 changes: 4 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -64,6 +64,7 @@ Customizable way:
String apiKey = System.getenv("OPENAI_API_KEY");
OpenAiClient client = OpenAiClient.builder()
.baseUrl(baseUrl)
.openAiApiKey(apiKey)
.organizationId(orgId)
.callTimeout(ofSeconds(60))
Expand Down Expand Up @@ -91,7 +92,7 @@ Customizable way:

```
CompletionRequest request = CompletionRequest.builder()
.model(TEXT_DAVINCI_003)
.model(GPT_3_5_TURBO_INSTRUCT)
.prompt("Write a poem about ChatGPT")
.temperature(0.9)
...
Expand All @@ -115,7 +116,7 @@ Customizable way:

```
CompletionRequest request = CompletionRequest.builder()
.model(TEXT_DAVINCI_003)
.model(GPT_3_5_TURBO_INSTRUCT)
.prompt("Write a poem about ChatGPT")
.temperature(0.9)
...
Expand Down Expand Up @@ -143,7 +144,7 @@ Customizable way:

```
CompletionRequest request = CompletionRequest.builder()
.model(TEXT_DAVINCI_003)
.model(GPT_3_5_TURBO_INSTRUCT)
.prompt("Write a poem about ChatGPT")
.temperature(0.9)
...
Expand Down
6 changes: 4 additions & 2 deletions src/main/java/dev/ai4j/openai4j/OpenAiClient.java
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,8 @@
import dev.ai4j.openai4j.spi.OpenAiClientBuilderFactory;
import dev.ai4j.openai4j.spi.ServiceHelper;

import static dev.ai4j.openai4j.LogLevel.DEBUG;

public abstract class OpenAiClient {

public abstract SyncOrAsyncOrStreaming<CompletionResponse> completion(CompletionRequest request);
Expand Down Expand Up @@ -67,7 +69,7 @@ public abstract static class Builder<T extends OpenAiClient, B extends Builder<T
public Proxy proxy;
public boolean logRequests;
public boolean logResponses;
public LogLevel logLevel;
public LogLevel logLevel = DEBUG;
public boolean logStreamingResponses;
public Path persistTo;

Expand Down Expand Up @@ -190,7 +192,7 @@ public B logRequests(Boolean logRequests) {

public B logLevel(LogLevel logLevel) {
if (logLevel == null) {
logLevel = LogLevel.DEBUG;
logLevel = DEBUG;
}
this.logLevel = logLevel;
return (B) this;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

public enum CompletionModel {

TEXT_DAVINCI_003("text-davinci-003");
GPT_3_5_TURBO_INSTRUCT("gpt-3.5-turbo-instruct");

private final String value;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import java.util.Map;
import java.util.Objects;

import static dev.ai4j.openai4j.completion.CompletionModel.TEXT_DAVINCI_003;
import static dev.ai4j.openai4j.completion.CompletionModel.GPT_3_5_TURBO_INSTRUCT;
import static java.util.Collections.unmodifiableList;
import static java.util.Collections.unmodifiableMap;

Expand Down Expand Up @@ -186,7 +186,7 @@ public static Builder builder() {

public static final class Builder {

private String model = TEXT_DAVINCI_003.toString();
private String model = GPT_3_5_TURBO_INSTRUCT.toString();
private String prompt;
private String suffix;
private Integer maxTokens;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
class ChatCompletionAsyncTest extends RateLimitAwareTest {

private final OpenAiClient client = OpenAiClient.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.openAiApiKey(System.getenv("OPENAI_API_KEY"))
.logRequests()
.logResponses()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@
class ChatCompletionStreamingTest extends RateLimitAwareTest {

private final OpenAiClient client = OpenAiClient.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.openAiApiKey(System.getenv("OPENAI_API_KEY"))
.logRequests()
.logResponses()
Expand Down Expand Up @@ -754,34 +755,51 @@ void testGpt4Vision() throws Exception {
@Test
void testCancelStreamingAfterStreamingStarted() throws Exception {

OpenAiClient client = OpenAiClient.builder()
// without caching
.openAiApiKey(System.getenv("OPENAI_API_KEY"))
.logRequests()
.logResponses()
.logStreamingResponses()
.build();

AtomicBoolean streamingStarted = new AtomicBoolean(false);
AtomicBoolean streamingCancelled = new AtomicBoolean(false);
AtomicBoolean cancellationSucceeded = new AtomicBoolean(true);

ResponseHandle responseHandle = client.chatCompletion("Write a poem about AI in 10 words")
.onPartialResponse(partialResponse -> {
streamingStarted.set(true);
System.out.println("[[streaming started]]");
if (streamingCancelled.get()) {
cancellationSucceeded.set(false);
System.out.println("[[cancellation failed]]");
}
})
.onComplete(() -> cancellationSucceeded.set(false))
.onError(e -> cancellationSucceeded.set(false))
.onComplete(() -> {
cancellationSucceeded.set(false);
System.out.println("[[cancellation failed]]");
})
.onError(e -> {
cancellationSucceeded.set(false);
System.out.println("[[cancellation failed]]");
})
.execute();

while (!streamingStarted.get()) {
Thread.sleep(200);
Thread.sleep(10);
}

newSingleThreadExecutor().execute(() -> {
responseHandle.cancel();
streamingCancelled.set(true);
System.out.println("[[streaming cancelled]]");
});

while (!streamingCancelled.get()) {
Thread.sleep(200);
Thread.sleep(10);
}
Thread.sleep(5000);
Thread.sleep(2000);

assertThat(cancellationSucceeded).isTrue();
}
Expand All @@ -792,22 +810,32 @@ void testCancelStreamingBeforeStreamingStarted() throws Exception {
AtomicBoolean cancellationSucceeded = new AtomicBoolean(true);

ResponseHandle responseHandle = client.chatCompletion("Write a poem about AI in 10 words")
.onPartialResponse(partialResponse -> cancellationSucceeded.set(false))
.onComplete(() -> cancellationSucceeded.set(false))
.onError(e -> cancellationSucceeded.set(false))
.onPartialResponse(partialResponse -> {
cancellationSucceeded.set(false);
System.out.println("[[cancellation failed]]");
})
.onComplete(() -> {
cancellationSucceeded.set(false);
System.out.println("[[cancellation failed]]");
})
.onError(e -> {
cancellationSucceeded.set(false);
System.out.println("[[cancellation failed]]");
})
.execute();

AtomicBoolean streamingCancelled = new AtomicBoolean(false);

newSingleThreadExecutor().execute(() -> {
responseHandle.cancel();
streamingCancelled.set(true);
System.out.println("[[streaming cancelled]]");
});

while (!streamingCancelled.get()) {
Thread.sleep(200);
Thread.sleep(10);
}
Thread.sleep(5000);
Thread.sleep(2000);

assertThat(cancellationSucceeded).isTrue();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@ class ChatCompletionTest extends RateLimitAwareTest {
static final Tool WEATHER_TOOL = Tool.from(WEATHER_FUNCTION);

private final OpenAiClient client = OpenAiClient.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.openAiApiKey(System.getenv("OPENAI_API_KEY"))
.logRequests()
.logResponses()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ class CompletionAsyncTest extends RateLimitAwareTest {
private static final String PROMPT = "write exactly the following 2 words: 'hello world'";

private final OpenAiClient client = OpenAiClient.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.openAiApiKey(System.getenv("OPENAI_API_KEY"))
.logRequests()
.logResponses()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ class CompletionStreamingTest extends RateLimitAwareTest {
private static final String PROMPT = "write exactly the following 2 words: 'hello world'";

private final OpenAiClient client = OpenAiClient.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.openAiApiKey(System.getenv("OPENAI_API_KEY"))
.logRequests()
.logResponses()
Expand Down Expand Up @@ -66,34 +67,51 @@ void testCustomizableApi() throws Exception {
@Test
void testCancelStreamingAfterStreamingStarted() throws InterruptedException {

OpenAiClient client = OpenAiClient.builder()
// without caching
.openAiApiKey(System.getenv("OPENAI_API_KEY"))
.logRequests()
.logResponses()
.logStreamingResponses()
.build();

AtomicBoolean streamingStarted = new AtomicBoolean(false);
AtomicBoolean streamingCancelled = new AtomicBoolean(false);
AtomicBoolean cancellationSucceeded = new AtomicBoolean(true);

ResponseHandle responseHandle = client.completion("Write a poem about AI in 10 words")
.onPartialResponse(partialResponse -> {
streamingStarted.set(true);
System.out.println("[[streaming started]]");
if (streamingCancelled.get()) {
cancellationSucceeded.set(false);
System.out.println("[[cancellation failed]]");
}
})
.onComplete(() -> cancellationSucceeded.set(false))
.onError(e -> cancellationSucceeded.set(false))
.onComplete(() -> {
cancellationSucceeded.set(false);
System.out.println("[[cancellation failed]]");
})
.onError(e -> {
cancellationSucceeded.set(false);
System.out.println("[[cancellation failed]]");
})
.execute();

while (!streamingStarted.get()) {
Thread.sleep(200);
Thread.sleep(10);
}

newSingleThreadExecutor().execute(() -> {
responseHandle.cancel();
streamingCancelled.set(true);
System.out.println("[[streaming cancelled]]");
});

while (!streamingCancelled.get()) {
Thread.sleep(200);
Thread.sleep(10);
}
Thread.sleep(5000);
Thread.sleep(2000);

assertThat(cancellationSucceeded).isTrue();
}
Expand All @@ -104,22 +122,32 @@ void testCancelStreamingBeforeStreamingStarted() throws InterruptedException {
AtomicBoolean cancellationSucceeded = new AtomicBoolean(true);

ResponseHandle responseHandle = client.completion("Write a poem about AI in 10 words")
.onPartialResponse(partialResponse -> cancellationSucceeded.set(false))
.onComplete(() -> cancellationSucceeded.set(false))
.onError(e -> cancellationSucceeded.set(false))
.onPartialResponse(partialResponse -> {
cancellationSucceeded.set(false);
System.out.println("[[cancellation failed]]");
})
.onComplete(() -> {
cancellationSucceeded.set(false);
System.out.println("[[cancellation failed]]");
})
.onError(e -> {
cancellationSucceeded.set(false);
System.out.println("[[cancellation failed]]");
})
.execute();

AtomicBoolean streamingCancelled = new AtomicBoolean(false);

newSingleThreadExecutor().execute(() -> {
responseHandle.cancel();
streamingCancelled.set(true);
System.out.println("[[streaming cancelled]]");
});

while (!streamingCancelled.get()) {
Thread.sleep(200);
Thread.sleep(10);
}
Thread.sleep(5000);
Thread.sleep(2000);

assertThat(cancellationSucceeded).isTrue();
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ class CompletionTest extends RateLimitAwareTest {
private static final String PROMPT = "write exactly the following 2 words: 'hello world'";

private final OpenAiClient client = OpenAiClient.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.openAiApiKey(System.getenv("OPENAI_API_KEY"))
.logRequests()
.logResponses()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ public class EmbeddingsAsyncTest extends RateLimitAwareTest {
private static final String INPUT = "hello";

private final OpenAiClient client = OpenAiClient.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.openAiApiKey(System.getenv("OPENAI_API_KEY"))
.logRequests()
.logResponses()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ public class EmbeddingsTest extends RateLimitAwareTest {
private static final String INPUT = "hello";

private final OpenAiClient client = OpenAiClient.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.openAiApiKey(System.getenv("OPENAI_API_KEY"))
.logRequests()
.logResponses()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,6 +15,7 @@ public class ModerationAsyncTest extends RateLimitAwareTest {
private static final String INPUT = "hello";

private final OpenAiClient client = OpenAiClient.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.openAiApiKey(System.getenv("OPENAI_API_KEY"))
.logRequests()
.logResponses()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ public class ModerationTest extends RateLimitAwareTest {
private static final String INPUT = "hello";

private final OpenAiClient client = OpenAiClient.builder()
.baseUrl(System.getenv("OPENAI_BASE_URL"))
.openAiApiKey(System.getenv("OPENAI_API_KEY"))
.logRequests()
.logResponses()
Expand Down

0 comments on commit f193c92

Please sign in to comment.