Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -483,7 +483,7 @@ private Mono<List<ProcessingResult>> postProcessFailureMessages(List<ProcessingR
var nonRetryable = failures.stream()
.filter(failure -> !isRetryableException(failure.error()))
.map(failure -> {
log.error("Non-retryable error for messageId '{}', removing from stream",
log.warn("Non-retryable error for messageId '{}', removing from stream",
failure.messageId(), failure.error());
return failure.messageId();
})
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -99,12 +99,9 @@ protected void score(@NonNull TraceThreadToScoreLlmAsJudge message) {
.subscribe(
unused -> log.info("Processed trace threads for projectId '{}', ruleId '{}' for workspace '{}'",
message.projectId(), message.ruleId(), message.workspaceId()),
error -> {
log.error(
"Error processing trace thread for projectId '{}', ruleId '{}' for workspace '{}': {}",
message.projectId(), message.ruleId(), message.workspaceId(), error.getMessage());
log.error("Error processing trace thread scoring", error);
});
error -> log.error(
"Error processing trace thread for projectId '{}', ruleId '{}' for workspace '{}'",
message.projectId(), message.ruleId(), message.workspaceId(), error));
}

private Mono<Void> processThreadScores(TraceThreadToScoreLlmAsJudge message, String currentThreadId) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ public ChatCompletionResponse create(@NonNull ChatCompletionRequest rawRequest,
providerError
.ifPresent(llmProviderError -> failHandlingLLMProviderError(runtimeException, llmProviderError));

log.error(UNEXPECTED_ERROR_CALLING_LLM_PROVIDER, runtimeException);
log.warn(UNEXPECTED_ERROR_CALLING_LLM_PROVIDER, runtimeException);
Copy link
Copy Markdown
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Sorry as I didn't notice on the first review.

I'd maybe keep these two changes on this file at error level , as they're real errors and re-thrown.

Even better, probably these logs can and should be removed as the exception below should be printed somewhere if not caught or if logged by some Dropwizard error handle.

I'd say the best scenario would be removing these two if you confirm logs are duplicated.

throw new InternalServerErrorException(buildDetailedErrorMessage(runtimeException), runtimeException);
}

Expand Down Expand Up @@ -117,7 +117,7 @@ public ChatResponse scoreTrace(@NonNull ChatRequest chatRequest,
providerError
.ifPresent(llmProviderError -> failHandlingLLMProviderError(runtimeException, llmProviderError));

log.error(UNEXPECTED_ERROR_CALLING_LLM_PROVIDER, runtimeException);
log.warn(UNEXPECTED_ERROR_CALLING_LLM_PROVIDER, runtimeException);
throw new InternalServerErrorException(buildDetailedErrorMessage(runtimeException), runtimeException);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,9 @@ private Integer getCode(OpenAiError error) {
case "invalid_api_key" -> 401;
case "internal_error" -> 500;
case "invalid_request_error" -> 400;
case "rate_limit_exceeded" -> 429;
case "insufficient_quota" -> 402;
case "model_not_found" -> 404;
default -> null;
};
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,73 @@
package com.comet.opik.infrastructure.llm.openai;

import io.dropwizard.jersey.errors.ErrorMessage;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.MethodSource;

import java.util.stream.Stream;

import static org.assertj.core.api.Assertions.assertThat;

class OpenAiErrorMessageTest {

static Stream<Arguments> errorCodeMappings() {
return Stream.of(
Arguments.of("invalid_api_key", 401, "some error message"),
Arguments.of("internal_error", 500, "some error message"),
Arguments.of("invalid_request_error", 400, "some error message"),
Arguments.of("rate_limit_exceeded", 429, "some error message"),
Arguments.of("insufficient_quota", 402, "some error message"),
Arguments.of("model_not_found", 404, "some error message"));
}

@ParameterizedTest
@MethodSource("errorCodeMappings")
void toErrorMessageWhenErrorCodeReturnsExpectedHttpStatus(String errorCode, int expectedHttpStatus,
String message) {
var error = new OpenAiErrorMessage(
new OpenAiErrorMessage.OpenAiError(message, errorCode, "some_type"));

var expected = new ErrorMessage(expectedHttpStatus, message);
var actual = error.toErrorMessage();

assertThat(actual)
.usingRecursiveComparison()
.isEqualTo(expected);
}

static Stream<Arguments> unknownErrorCodeCases() {
return Stream.of(
Arguments.of("unknown_code", "unknown error"),
Arguments.of("some_other_error", "another error"));
}

@ParameterizedTest
@MethodSource("unknownErrorCodeCases")
void toErrorMessageWhenErrorCodeUnknownReturns500WithDetails(String errorCode, String message) {
var error = new OpenAiErrorMessage(
new OpenAiErrorMessage.OpenAiError(message, errorCode, "some_type"));

var expected = new ErrorMessage(500, message, errorCode);
var actual = error.toErrorMessage();

assertThat(actual)
.usingRecursiveComparison()
.isEqualTo(expected);
}

static Stream<Arguments> nullMessageCases() {
return Stream.of(
Arguments.of("rate_limit_exceeded"),
Arguments.of("unknown_code"));
}

@ParameterizedTest
@MethodSource("nullMessageCases")
void toErrorMessageWhenMessageNullReturnsNull(String errorCode) {
var error = new OpenAiErrorMessage(
new OpenAiErrorMessage.OpenAiError(null, errorCode, "some_type"));

assertThat(error.toErrorMessage()).isNull();
}
}
Loading