Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
59 changes: 57 additions & 2 deletions core/src/main/java/com/google/adk/events/Event.java
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import com.google.genai.types.FunctionResponse;
import com.google.genai.types.GenerateContentResponseUsageMetadata;
import com.google.genai.types.GroundingMetadata;
import com.google.genai.types.Transcription;
import java.time.Instant;
import java.util.List;
import java.util.Objects;
Expand All @@ -42,6 +43,7 @@
import org.jspecify.annotations.Nullable;

// TODO - b/413761119 update Agent.java when resolved.

/** Represents an event in a session. */
@JsonDeserialize(builder = Event.Builder.class)
public class Event extends JsonBaseModel {
Expand All @@ -64,6 +66,9 @@ public class Event extends JsonBaseModel {
private @Nullable GroundingMetadata groundingMetadata;
private @Nullable List<CustomMetadata> customMetadata;
private @Nullable String modelVersion;
private @Nullable Transcription inputTranscription;
private @Nullable Transcription outputTranscription;

private long timestamp;

private Event() {}
Expand Down Expand Up @@ -266,6 +271,32 @@ public void setModelVersion(@Nullable String modelVersion) {
this.modelVersion = modelVersion;
}

/**
* Input transcription. The transcription is independent to the model turn which means it doesn’t
* imply any ordering between transcription and model turn.
*/
@JsonProperty("inputTranscription")
public Optional<Transcription> inputTranscription() {
return Optional.ofNullable(inputTranscription);
}

public void setInputTranscription(@Nullable Transcription inputTranscription) {
this.inputTranscription = inputTranscription;
}

/**
* Output transcription. The transcription is independent to the model turn which means it doesn’t
* imply any ordering between transcription and model turn.
*/
@JsonProperty("outputTranscription")
public Optional<Transcription> outputTranscription() {
return Optional.ofNullable(outputTranscription);
}

public void setOutputTranscription(@Nullable Transcription outputTranscription) {
this.outputTranscription = outputTranscription;
}

/** The timestamp of the event. */
@JsonProperty("timestamp")
public long timestamp() {
Expand Down Expand Up @@ -362,6 +393,8 @@ public static class Builder {
private @Nullable GroundingMetadata groundingMetadata;
private @Nullable List<CustomMetadata> customMetadata;
private @Nullable String modelVersion;
private @Nullable Transcription inputTranscription;
private @Nullable Transcription outputTranscription;
private @Nullable Long timestamp;

@JsonCreator
Expand Down Expand Up @@ -520,6 +553,20 @@ public Builder modelVersion(@Nullable String value) {
return this;
}

@CanIgnoreReturnValue
@JsonProperty("inputTranscription")
public Builder inputTranscription(@Nullable Transcription value) {
this.inputTranscription = value;
return this;
}

@CanIgnoreReturnValue
@JsonProperty("outputTranscription")
public Builder outputTranscription(@Nullable Transcription value) {
this.outputTranscription = value;
return this;
}

public Event build() {
Event event = new Event();
event.setId(id);
Expand All @@ -541,6 +588,8 @@ public Event build() {
event.setModelVersion(modelVersion);
event.setActions(actions().orElseGet(() -> EventActions.builder().build()));
event.setTimestamp(timestamp().orElseGet(() -> Instant.now().toEpochMilli()));
event.setInputTranscription(inputTranscription);
event.setOutputTranscription(outputTranscription);
return event;
}
}
Expand Down Expand Up @@ -575,7 +624,9 @@ public Builder toBuilder() {
.branch(this.branch)
.groundingMetadata(this.groundingMetadata)
.customMetadata(this.customMetadata)
.modelVersion(this.modelVersion);
.modelVersion(this.modelVersion)
.inputTranscription(this.inputTranscription)
.outputTranscription(this.outputTranscription);
if (this.timestamp != 0) {
builder.timestamp(this.timestamp);
}
Expand Down Expand Up @@ -608,7 +659,9 @@ public boolean equals(Object obj) {
&& Objects.equals(branch, other.branch)
&& Objects.equals(groundingMetadata, other.groundingMetadata)
&& Objects.equals(customMetadata, other.customMetadata)
&& Objects.equals(modelVersion, other.modelVersion);
&& Objects.equals(modelVersion, other.modelVersion)
&& Objects.equals(inputTranscription, other.inputTranscription)
&& Objects.equals(outputTranscription, other.outputTranscription);
}

@Override
Expand Down Expand Up @@ -637,6 +690,8 @@ public int hashCode() {
groundingMetadata,
customMetadata,
modelVersion,
inputTranscription,
outputTranscription,
timestamp);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -677,7 +677,9 @@ private Flowable<Event> buildPostprocessingEvents(
&& updatedResponse.errorCode().isEmpty()
&& !updatedResponse.interrupted().orElse(false)
&& !updatedResponse.turnComplete().orElse(false)
&& updatedResponse.usageMetadata().isEmpty()) {
&& updatedResponse.usageMetadata().isEmpty()
&& updatedResponse.inputTranscription().isEmpty()
&& updatedResponse.outputTranscription().isEmpty()) {
return processorEvents;
}

Expand Down Expand Up @@ -740,7 +742,9 @@ private Event buildModelResponseEvent(
.avgLogprobs(llmResponse.avgLogprobs().orElse(null))
.finishReason(llmResponse.finishReason().orElse(null))
.usageMetadata(llmResponse.usageMetadata().orElse(null))
.modelVersion(llmResponse.modelVersion().orElse(null));
.modelVersion(llmResponse.modelVersion().orElse(null))
.inputTranscription(llmResponse.inputTranscription().orElse(null))
.outputTranscription(llmResponse.outputTranscription().orElse(null));

Event event = eventBuilder.build();

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,8 @@ private static LlmResponse createServerContentResponse(LiveServerContent serverC
.partial(serverContent.turnComplete().map(completed -> !completed).orElse(false))
.turnComplete(serverContent.turnComplete().orElse(false))
.interrupted(serverContent.interrupted().orElse(null))
.inputTranscription(serverContent.inputTranscription().orElse(null))
.outputTranscription(serverContent.outputTranscription().orElse(null))
.build();
}

Expand Down
21 changes: 21 additions & 0 deletions core/src/main/java/com/google/adk/models/LlmResponse.java
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@
import com.google.genai.types.GenerateContentResponsePromptFeedback;
import com.google.genai.types.GenerateContentResponseUsageMetadata;
import com.google.genai.types.GroundingMetadata;
import com.google.genai.types.Transcription;
import java.util.List;
import java.util.Optional;
import org.jspecify.annotations.Nullable;
Expand Down Expand Up @@ -115,6 +116,20 @@ public abstract class LlmResponse extends JsonBaseModel {
@JsonProperty("modelVersion")
public abstract Optional<String> modelVersion();

/**
* Input transcription. The transcription is independent to the model turn which means it doesn’t
* imply any ordering between transcription and model turn.
*/
@JsonProperty("inputTranscription")
public abstract Optional<Transcription> inputTranscription();

/**
* Output transcription. The transcription is independent to the model turn which means it doesn’t
* imply any ordering between transcription and model turn.
*/
@JsonProperty("outputTranscription")
public abstract Optional<Transcription> outputTranscription();

public abstract Builder toBuilder();

/** Builder for constructing {@link LlmResponse} instances. */
Expand Down Expand Up @@ -164,6 +179,12 @@ public abstract Builder usageMetadata(
@JsonProperty("modelVersion")
public abstract Builder modelVersion(@Nullable String modelVersion);

@JsonProperty("inputTranscription")
public abstract Builder inputTranscription(@Nullable Transcription inputTranscription);

@JsonProperty("outputTranscription")
public abstract Builder outputTranscription(@Nullable Transcription outputTranscription);

@CanIgnoreReturnValue
public final Builder response(GenerateContentResponse response) {
Optional<List<Candidate>> candidatesOpt = response.candidates();
Expand Down
76 changes: 76 additions & 0 deletions core/src/test/java/com/google/adk/events/EventTest.java
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import com.google.genai.types.FunctionCall;
import com.google.genai.types.GenerateContentResponseUsageMetadata;
import com.google.genai.types.Part;
import com.google.genai.types.Transcription;
import java.time.Instant;
import java.util.concurrent.ConcurrentHashMap;
import org.junit.Test;
Expand Down Expand Up @@ -192,6 +193,81 @@ public void event_json_serialization_works() throws Exception {
assertThat(deserializedEvent).isEqualTo(EVENT);
}

@Test
public void event_builder_with_transcriptions_works() {
Transcription inputTranscription =
Transcription.builder().text("user said hello").finished(true).build();
Transcription outputTranscription =
Transcription.builder().text("model said hi").finished(false).build();
Event event =
Event.builder()
.id("event_id")
.invocationId("invocation_id")
.author("agent")
.timestamp(123456789L)
.inputTranscription(inputTranscription)
.outputTranscription(outputTranscription)
.build();

assertThat(event.inputTranscription()).hasValue(inputTranscription);
assertThat(event.outputTranscription()).hasValue(outputTranscription);
}

@Test
public void event_transcriptions_empty_by_default() {
Event event =
Event.builder().id("event_id").invocationId("invocation_id").author("agent").build();

assertThat(event.inputTranscription()).isEmpty();
assertThat(event.outputTranscription()).isEmpty();
}

@Test
public void event_equals_differentiates_transcriptions() {
Transcription transcription = Transcription.builder().text("hello").finished(true).build();
Event eventWithTranscription =
Event.builder()
.id("event_id")
.invocationId("invocation_id")
.author("agent")
.timestamp(123456789L)
.inputTranscription(transcription)
.build();
Event eventWithoutTranscription =
Event.builder()
.id("event_id")
.invocationId("invocation_id")
.author("agent")
.timestamp(123456789L)
.build();

assertThat(eventWithTranscription).isNotEqualTo(eventWithoutTranscription);
}

@Test
public void event_json_serialization_with_transcriptions_works() throws Exception {
Transcription inputTranscription =
Transcription.builder().text("user said hello").finished(true).build();
Transcription outputTranscription =
Transcription.builder().text("model said hi").finished(false).build();
Event event =
Event.builder()
.id("event_id")
.invocationId("invocation_id")
.author("agent")
.timestamp(123456789L)
.inputTranscription(inputTranscription)
.outputTranscription(outputTranscription)
.build();

String json = event.toJson();
Event deserialized = Event.fromJson(json);

assertThat(deserialized).isEqualTo(event);
assertThat(deserialized.inputTranscription()).hasValue(inputTranscription);
assertThat(deserialized.outputTranscription()).hasValue(outputTranscription);
}

@Test
public void finalResponse_returnsTrueIfNoToolCalls() {
Event event =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,6 +43,7 @@
import com.google.genai.types.FunctionDeclaration;
import com.google.genai.types.GenerateContentResponseUsageMetadata;
import com.google.genai.types.Part;
import com.google.genai.types.Transcription;
import io.opentelemetry.context.Context;
import io.opentelemetry.context.ContextKey;
import io.opentelemetry.context.Scope;
Expand Down Expand Up @@ -641,6 +642,94 @@ public void run_contextPropagation() {
assertThat(events.get(0).content()).hasValue(content);
}

@Test
public void postprocess_onlyInputTranscription_returnsEvent() {
Transcription inputTranscription =
Transcription.builder().text("user said hello").finished(true).build();
LlmResponse llmResponse = LlmResponse.builder().inputTranscription(inputTranscription).build();
InvocationContext invocationContext =
createInvocationContext(createTestAgent(createTestLlm(llmResponse)));
BaseLlmFlow baseLlmFlow = createBaseLlmFlowWithoutProcessors();
Event baseEvent =
Event.builder()
.invocationId(invocationContext.invocationId())
.author(invocationContext.agent().name())
.build();

List<Event> events =
baseLlmFlow
.postprocess(
invocationContext,
baseEvent,
LlmRequest.builder().build(),
llmResponse,
Context.current())
.toList()
.blockingGet();

assertThat(events).hasSize(1);
Event event = getOnlyElement(events);
assertThat(event.inputTranscription()).hasValue(inputTranscription);
assertThat(event.outputTranscription()).isEmpty();
}

@Test
public void postprocess_onlyOutputTranscription_returnsEvent() {
Transcription outputTranscription =
Transcription.builder().text("model replied hi").finished(false).build();
LlmResponse llmResponse =
LlmResponse.builder().outputTranscription(outputTranscription).build();
InvocationContext invocationContext =
createInvocationContext(createTestAgent(createTestLlm(llmResponse)));
BaseLlmFlow baseLlmFlow = createBaseLlmFlowWithoutProcessors();
Event baseEvent =
Event.builder()
.invocationId(invocationContext.invocationId())
.author(invocationContext.agent().name())
.build();

List<Event> events =
baseLlmFlow
.postprocess(
invocationContext,
baseEvent,
LlmRequest.builder().build(),
llmResponse,
Context.current())
.toList()
.blockingGet();

assertThat(events).hasSize(1);
Event event = getOnlyElement(events);
assertThat(event.outputTranscription()).hasValue(outputTranscription);
assertThat(event.inputTranscription()).isEmpty();
}

@Test
public void run_responseWithTranscriptions_propagatesTranscriptionsToEvent() {
Transcription inputTranscription =
Transcription.builder().text("user said hello").finished(true).build();
Transcription outputTranscription =
Transcription.builder().text("model replied hi").finished(true).build();
Content content = Content.fromParts(Part.fromText("model replied hi"));
LlmResponse llmResponse =
LlmResponse.builder()
.content(content)
.inputTranscription(inputTranscription)
.outputTranscription(outputTranscription)
.build();
TestLlm testLlm = createTestLlm(llmResponse);
InvocationContext invocationContext = createInvocationContext(createTestAgent(testLlm));
BaseLlmFlow baseLlmFlow = createBaseLlmFlowWithoutProcessors();

List<Event> events = baseLlmFlow.run(invocationContext).toList().blockingGet();

assertThat(events).hasSize(1);
Event event = getOnlyElement(events);
assertThat(event.inputTranscription()).hasValue(inputTranscription);
assertThat(event.outputTranscription()).hasValue(outputTranscription);
}

@Test
public void postprocess_noResponseProcessors_onlyUsageMetadata_returnsEvent() {
GenerateContentResponseUsageMetadata usageMetadata =
Expand Down
Loading