Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
import com.google.genai.types.Part;
import java.util.Base64;
import java.util.Map;
import java.util.Objects;
import org.jspecify.annotations.Nullable;

/** Shared models for Chat Completions Request and Response. */
Expand All @@ -45,6 +46,50 @@ private ChatCompletionsCommon() {}
public static final String METADATA_KEY_SYSTEM_FINGERPRINT = "system_fingerprint";
public static final String METADATA_KEY_SERVICE_TIER = "service_tier";

/**
* Prefix used to mark refusal content in a text Part, since there is no dedicated field for
* refusal content in the Gemini API.
*/
static final String REFUSAL_PREFIX = "[[REFUSAL]]: ";

/**
* Result of splitting a text part into its non-refusal content and refusal content. Either
* component may be {@code null} when absent.
*/
record RefusalSplit(@Nullable String content, @Nullable String refusal) {}

/**
* Splits a text Part value into a content portion and a refusal portion based on the {@link
* #REFUSAL_PREFIX} sentinel:
*
* <ul>
* <li>If {@code text} starts with the prefix, the entire suffix becomes the refusal and the
* content is {@code null}.
* <li>If {@code text} contains {@code "\n" + REFUSAL_PREFIX} (i.e., the prefix on its own line
* after some content), the text is split: everything before the newline is content,
* everything after the prefix is refusal.
* <li>Otherwise the text is returned as content with no refusal. The prefix is intentionally
* NOT recognized mid-line without a preceding newline.
* </ul>
*
* @param text the raw text from a {@link Part#text()}.
* @return a {@link RefusalSplit} with the content and refusal portions.
*/
static RefusalSplit parseRefusalPrefix(String text) {
Objects.requireNonNull(text, "text cannot be null");
if (text.startsWith(REFUSAL_PREFIX)) {
return new RefusalSplit(null, text.substring(REFUSAL_PREFIX.length()));
}
String separator = "\n" + REFUSAL_PREFIX;
int index = text.indexOf(separator);
if (index >= 0) {
String before = text.substring(0, index);
String after = text.substring(index + separator.length());
return new RefusalSplit(before.isEmpty() ? null : before, after);
}
return new RefusalSplit(text, null);
}

/**
* See
* https://developers.openai.com/api/reference/resources/chat#(resource)%20chat.completions%20%3E%20(model)%20chat_completion_message_tool_call%20%3E%20(schema)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -350,14 +350,26 @@ private static List<Message> processContent(Content content) {
List<ContentPart> contentParts = new ArrayList<>();
List<ChatCompletionsCommon.ToolCall> toolCalls = new ArrayList<>();
List<Message> toolResponses = new ArrayList<>();
List<String> refusals = new ArrayList<>();

content
.parts()
.ifPresent(
parts -> {
for (Part part : parts) {
if (part.text().isPresent()) {
contentParts.add(processTextPart(part));
// Text Parts may carry refusal content prefixed with REFUSAL_PREFIX.
ChatCompletionsCommon.RefusalSplit split =
ChatCompletionsCommon.parseRefusalPrefix(part.text().get());
if (split.content() != null) {
ContentPart textPart = new ContentPart();
textPart.type = "text";
textPart.text = split.content();
contentParts.add(textPart);
}
if (split.refusal() != null) {
refusals.add(split.refusal());
}
} else if (part.inlineData().isPresent()) {
contentParts.add(processInlineDataPart(part));
} else if (part.fileData().isPresent()) {
Expand All @@ -381,6 +393,9 @@ private static List<Message> processContent(Content content) {
if (!toolCalls.isEmpty()) {
msg.toolCalls = ImmutableList.copyOf(toolCalls);
}
if (!refusals.isEmpty()) {
msg.refusal = String.join("\n", refusals);
}
if (!contentParts.isEmpty()) {
if (contentParts.size() == 1 && Objects.equals(contentParts.get(0).type, "text")) {
msg.content = new MessageContent(contentParts.get(0).text);
Expand All @@ -394,19 +409,6 @@ private static List<Message> processContent(Content content) {
}
}

/**
* Processes a text part and returns a mapped ContentPart.
*
* @param part The input part containing simple text.
* @return The mapped text part.
*/
private static ContentPart processTextPart(Part part) {
ContentPart textPart = new ContentPart();
textPart.type = "text";
textPart.text = part.text().get();
return textPart;
}

/**
* Processes an inline data part and returns a mapped ContentPart.
*
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ private ImmutableList<Part> mapMessageToParts(Message message) {
parts.add(Part.fromText(message.content));
}
if (message.refusal != null) {
parts.add(Part.fromText(message.refusal));
parts.add(Part.fromText(ChatCompletionsCommon.REFUSAL_PREFIX + message.refusal));
}
if (message.toolCalls != null) {
parts.addAll(mapToolCallsToParts(message.toolCalls));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -245,6 +245,157 @@ public void testFromLlmRequest_basic() throws Exception {
assertThat(request.messages.get(0).content.getValue()).isEqualTo("Hello");
}

@Test
public void testFromLlmRequest_withRefusal() throws Exception {
LlmRequest llmRequest =
LlmRequest.builder()
.model("gemini-1.5-pro")
.contents(
ImmutableList.of(
Content.builder()
.role("model")
.parts(
ImmutableList.of(
Part.fromText("Regular text response"),
Part.fromText(
ChatCompletionsCommon.REFUSAL_PREFIX + "I cannot do that.")))
.build()))
.build();

ChatCompletionsRequest request = ChatCompletionsRequest.fromLlmRequest(llmRequest, false);

assertThat(request.messages).hasSize(1);
ChatCompletionsRequest.Message message = request.messages.get(0);
assertThat(message.role).isEqualTo("assistant");
assertThat(message.refusal).isEqualTo("I cannot do that.");
assertThat(message.content.getValue()).isEqualTo("Regular text response");
}

@Test
public void testFromLlmRequest_withRefusalEmbeddedAfterNewline() throws Exception {
// A single Part containing both content and refusal, separated by "\n[[REFUSAL]]: ".
LlmRequest llmRequest =
LlmRequest.builder()
.model("gemini-1.5-pro")
.contents(
ImmutableList.of(
Content.builder()
.role("model")
.parts(
ImmutableList.of(
Part.fromText(
"Partial text answer\n"
+ ChatCompletionsCommon.REFUSAL_PREFIX
+ "System error or refusal")))
.build()))
.build();

ChatCompletionsRequest request = ChatCompletionsRequest.fromLlmRequest(llmRequest, false);

assertThat(request.messages).hasSize(1);
ChatCompletionsRequest.Message message = request.messages.get(0);
assertThat(message.role).isEqualTo("assistant");
assertThat(message.content.getValue()).isEqualTo("Partial text answer");
assertThat(message.refusal).isEqualTo("System error or refusal");
}

@Test
public void testFromLlmRequest_withMultipleRefusalsJoinedWithNewline() throws Exception {
LlmRequest llmRequest =
LlmRequest.builder()
.model("gemini-1.5-pro")
.contents(
ImmutableList.of(
Content.builder()
.role("model")
.parts(
ImmutableList.of(
Part.fromText(ChatCompletionsCommon.REFUSAL_PREFIX + "First"),
Part.fromText(ChatCompletionsCommon.REFUSAL_PREFIX + "Second")))
.build()))
.build();

ChatCompletionsRequest request = ChatCompletionsRequest.fromLlmRequest(llmRequest, false);

assertThat(request.messages).hasSize(1);
ChatCompletionsRequest.Message message = request.messages.get(0);
assertThat(message.role).isEqualTo("assistant");
assertThat(message.refusal).isEqualTo("First\nSecond");
assertThat(message.content).isNull();
}

@Test
public void testFromLlmRequest_withRefusalOnlyHasNullContent() throws Exception {
LlmRequest llmRequest =
LlmRequest.builder()
.model("gemini-1.5-pro")
.contents(
ImmutableList.of(
Content.builder()
.role("model")
.parts(
ImmutableList.of(
Part.fromText(
ChatCompletionsCommon.REFUSAL_PREFIX + "Only a refusal")))
.build()))
.build();

ChatCompletionsRequest request = ChatCompletionsRequest.fromLlmRequest(llmRequest, false);

assertThat(request.messages).hasSize(1);
ChatCompletionsRequest.Message message = request.messages.get(0);
assertThat(message.role).isEqualTo("assistant");
assertThat(message.refusal).isEqualTo("Only a refusal");
assertThat(message.content).isNull();
}

@Test
public void testFromLlmRequest_withRefusalPrefixAfterEmptyContentLine() throws Exception {
// Edge case: text begins with "\n[[REFUSAL]]: ..." -- empty content before the prefix.
// Expectation: no content part, refusal populated.
String text = "\n" + ChatCompletionsCommon.REFUSAL_PREFIX + "Refusal only";
LlmRequest llmRequest =
LlmRequest.builder()
.model("gemini-1.5-pro")
.contents(
ImmutableList.of(
Content.builder()
.role("model")
.parts(ImmutableList.of(Part.fromText(text)))
.build()))
.build();

ChatCompletionsRequest request = ChatCompletionsRequest.fromLlmRequest(llmRequest, false);

assertThat(request.messages).hasSize(1);
ChatCompletionsRequest.Message message = request.messages.get(0);
assertThat(message.refusal).isEqualTo("Refusal only");
assertThat(message.content).isNull();
}

@Test
public void testFromLlmRequest_withRefusalPrefixMidLineIsNotSplit() throws Exception {
// The prefix is intentionally NOT recognized mid-line without a preceding newline.
String inlineText = "foo " + ChatCompletionsCommon.REFUSAL_PREFIX + "bar";
LlmRequest llmRequest =
LlmRequest.builder()
.model("gemini-1.5-pro")
.contents(
ImmutableList.of(
Content.builder()
.role("model")
.parts(ImmutableList.of(Part.fromText(inlineText)))
.build()))
.build();

ChatCompletionsRequest request = ChatCompletionsRequest.fromLlmRequest(llmRequest, false);

assertThat(request.messages).hasSize(1);
ChatCompletionsRequest.Message message = request.messages.get(0);
assertThat(message.refusal).isNull();
assertThat(message.content.getValue()).isEqualTo(inlineText);
}

@Test
public void testFromLlmRequest_withSystemInstruction() throws Exception {
LlmRequest llmRequest =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -504,6 +504,7 @@ public void testToLlmResponse_withRefusal() throws Exception {
"index": 0,
"message": {
"role": "assistant",
"content": "Partial text answer",
"refusal": "System error or refusal"
},
"finish_reason": "stop"
Expand All @@ -521,8 +522,11 @@ public void testToLlmResponse_withRefusal() throws Exception {

// Content
assertThat(response.content().get().role()).hasValue("model");
assertThat(response.content().get().parts().get()).hasSize(2);
assertThat(response.content().get().parts().get().get(0).text())
.hasValue("System error or refusal");
.hasValue("Partial text answer");
assertThat(response.content().get().parts().get().get(1).text())
.hasValue("[[REFUSAL]]: System error or refusal");

// Custom Metadata
List<CustomMetadata> metadata = response.customMetadata().get();
Expand Down
Loading