diff --git a/README.md b/README.md
index 962efba..4dc306b 100644
--- a/README.md
+++ b/README.md
@@ -5,7 +5,7 @@
SimDeck is a developer tool built for streamlining mobile app development for coding agents.
- Drive Simulator from the CLI using agents, browser, and automated tests on macOS.
+ Drive iOS Simulators and Android emulators from the CLI using agents, browser, and automated tests on macOS.
@@ -35,8 +35,9 @@ view inside the editor.
## Features
-- Local simulator video stream over browser-native WebRTC H.264
-- Full simulator control & inspection using private accessibility APIs - available using `simdeck` CLI
+- Local iOS Simulator and Android emulator video over browser-native WebRTC H.264
+- Android emulator frames are sourced from emulator gRPC and encoded through macOS VideoToolbox
+- Full simulator control & inspection using private iOS accessibility APIs and Android UIAutomator - available using `simdeck` CLI
- Real-time screen `describe` command using accessibility view tree - available in token-efficient format for agents
- CoreSimulator chrome asset rendering for device bezels
- NativeScript, React Native, UIKit and SwiftUI runtime inspector plugins to view app's view hierarchy live
@@ -133,6 +134,7 @@ simdeck boot
simdeck shutdown
simdeck erase
simdeck install /path/to/App.app
+simdeck install android: /path/to/app.apk
simdeck uninstall com.example.App
simdeck open-url https://example.com
simdeck launch com.apple.Preferences
@@ -172,6 +174,14 @@ simdeck logs --seconds 30 --limit 200
without launching Simulator.app, then falls back to `xcrun simctl` when private
booting is unavailable.
+Android emulators appear in `simdeck list` with IDs like
+`android:SimDeck_Pixel_8_API_36`. For Android IDs, lifecycle, install, launch,
+URL, screenshot, logs, UIAutomator `describe`, tap, swipe, text, key, home, app
+switcher, rotation, pasteboard, and browser live view route through the Android
+SDK tools (`emulator` and `adb`) plus the emulator gRPC screenshot stream for
+live video. `simdeck stream` remains iOS-only because it writes the iOS H.264
+transport stream.
+
`stream` writes an Annex B H.264 elementary stream to stdout for diagnostics or
external tools such as `ffplay`.
diff --git a/cli/native/XCWNativeBridge.h b/cli/native/XCWNativeBridge.h
index bc02dae..768a3fe 100644
--- a/cli/native/XCWNativeBridge.h
+++ b/cli/native/XCWNativeBridge.h
@@ -84,6 +84,11 @@ bool xcw_native_session_rotate_right(void * _Nonnull handle, char * _Nullable *
bool xcw_native_session_rotate_left(void * _Nonnull handle, char * _Nullable * _Nullable error_message);
void xcw_native_session_set_frame_callback(void * _Nonnull handle, xcw_native_frame_callback _Nullable callback, void * _Nullable user_data);
+void * _Nullable xcw_native_h264_encoder_create(xcw_native_frame_callback _Nullable callback, void * _Nullable user_data, char * _Nullable * _Nullable error_message);
+void xcw_native_h264_encoder_destroy(void * _Nullable handle);
+bool xcw_native_h264_encoder_encode_rgba(void * _Nonnull handle, const uint8_t * _Nonnull rgba, size_t length, uint32_t width, uint32_t height, uint64_t timestamp_us, char * _Nullable * _Nullable error_message);
+void xcw_native_h264_encoder_request_keyframe(void * _Nonnull handle);
+
void xcw_native_free_string(char * _Nullable value);
void xcw_native_free_bytes(xcw_native_owned_bytes bytes);
void xcw_native_release_shared_bytes(xcw_native_shared_bytes bytes);
diff --git a/cli/native/XCWNativeBridge.m b/cli/native/XCWNativeBridge.m
index bdbaa45..4f73f96 100644
--- a/cli/native/XCWNativeBridge.m
+++ b/cli/native/XCWNativeBridge.m
@@ -3,11 +3,13 @@
#import "DFPrivateSimulatorDisplayBridge.h"
#import "XCWAccessibilityBridge.h"
#import "XCWChromeRenderer.h"
+#import "XCWH264Encoder.h"
#import "XCWNativeSession.h"
#import "XCWSimctl.h"
#import
#import
+#import
#include
#include
@@ -63,10 +65,190 @@ static xcw_native_owned_bytes XCWOwnedBytesFromData(NSData *data) {
return bytes;
}
+static xcw_native_shared_bytes XCWSharedBytesFromData(NSData *data) {
+ if (data.length == 0) {
+ return (xcw_native_shared_bytes){0};
+ }
+
+ CFTypeRef owner = CFRetain((__bridge CFTypeRef)data);
+ return (xcw_native_shared_bytes){
+ .data = data.bytes,
+ .length = data.length,
+ .owner = (const void *)owner,
+ };
+}
+
static XCWNativeSession *XCWNativeSessionFromHandle(void *handle) {
return (__bridge XCWNativeSession *)handle;
}
+@interface XCWNativeH264Encoder : NSObject
+
+- (instancetype)initWithFrameCallback:(xcw_native_frame_callback)callback
+ userData:(void *)userData;
+- (BOOL)encodeRGBA:(const uint8_t *)rgba
+ length:(size_t)length
+ width:(uint32_t)width
+ height:(uint32_t)height
+ error:(NSError * _Nullable __autoreleasing *)error;
+- (void)requestKeyFrame;
+- (void)invalidate;
+
+@end
+
+@implementation XCWNativeH264Encoder {
+ XCWH264Encoder *_encoder;
+ xcw_native_frame_callback _callback;
+ void *_callbackUserData;
+ uint64_t _frameSequence;
+}
+
+- (instancetype)initWithFrameCallback:(xcw_native_frame_callback)callback
+ userData:(void *)userData {
+ self = [super init];
+ if (self == nil) {
+ return nil;
+ }
+
+ _callback = callback;
+ _callbackUserData = userData;
+ __weak typeof(self) weakSelf = self;
+ @synchronized (XCWNativeH264Encoder.class) {
+ const char *previousCodec = getenv("SIMDECK_VIDEO_CODEC");
+ char *previousCodecCopy = previousCodec != NULL ? strdup(previousCodec) : NULL;
+ const char *androidCodec = getenv("SIMDECK_ANDROID_VIDEO_CODEC");
+ if (androidCodec == NULL || strlen(androidCodec) == 0) {
+ androidCodec = "software";
+ }
+ setenv("SIMDECK_VIDEO_CODEC", androidCodec, 1);
+ _encoder = [[XCWH264Encoder alloc] initWithOutputHandler:^(NSData *sampleData,
+ uint64_t timestampUs,
+ BOOL isKeyFrame,
+ NSString * _Nullable codec,
+ NSData * _Nullable decoderConfig,
+ CGSize dimensions) {
+ __strong typeof(weakSelf) strongSelf = weakSelf;
+ if (strongSelf == nil || strongSelf->_callback == NULL || sampleData.length == 0) {
+ return;
+ }
+ strongSelf->_frameSequence += 1;
+ xcw_native_frame frame = {
+ .frame_sequence = strongSelf->_frameSequence,
+ .timestamp_us = timestampUs,
+ .is_keyframe = isKeyFrame,
+ .width = (uint32_t)llround(dimensions.width),
+ .height = (uint32_t)llround(dimensions.height),
+ .codec = codec.UTF8String,
+ .description = XCWSharedBytesFromData(decoderConfig),
+ .data = XCWSharedBytesFromData(sampleData),
+ };
+ strongSelf->_callback(&frame, strongSelf->_callbackUserData);
+ }];
+ if (previousCodecCopy != NULL) {
+ setenv("SIMDECK_VIDEO_CODEC", previousCodecCopy, 1);
+ free(previousCodecCopy);
+ } else {
+ unsetenv("SIMDECK_VIDEO_CODEC");
+ }
+ }
+ return self;
+}
+
+- (void)dealloc {
+ [self invalidate];
+}
+
+- (BOOL)encodeRGBA:(const uint8_t *)rgba
+ length:(size_t)length
+ width:(uint32_t)width
+ height:(uint32_t)height
+ error:(NSError * _Nullable __autoreleasing *)error {
+ if (rgba == NULL || width == 0 || height == 0) {
+ if (error != NULL) {
+ *error = [NSError errorWithDomain:@"SimDeck.NativeH264Encoder"
+ code:1
+ userInfo:@{ NSLocalizedDescriptionKey: @"RGBA frame input was empty." }];
+ }
+ return NO;
+ }
+ size_t expectedLength = (size_t)width * (size_t)height * 4;
+ if (length < expectedLength) {
+ if (error != NULL) {
+ *error = [NSError errorWithDomain:@"SimDeck.NativeH264Encoder"
+ code:2
+ userInfo:@{ NSLocalizedDescriptionKey: @"RGBA frame input was truncated." }];
+ }
+ return NO;
+ }
+
+ NSDictionary *attributes = @{
+ (__bridge NSString *)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA),
+ (__bridge NSString *)kCVPixelBufferWidthKey: @(width),
+ (__bridge NSString *)kCVPixelBufferHeightKey: @(height),
+ (__bridge NSString *)kCVPixelBufferIOSurfacePropertiesKey: @{},
+ };
+ CVPixelBufferRef pixelBuffer = NULL;
+ CVReturn createStatus = CVPixelBufferCreate(kCFAllocatorDefault,
+ (size_t)width,
+ (size_t)height,
+ kCVPixelFormatType_32BGRA,
+ (__bridge CFDictionaryRef)attributes,
+ &pixelBuffer);
+ if (createStatus != kCVReturnSuccess || pixelBuffer == NULL) {
+ if (error != NULL) {
+ *error = [NSError errorWithDomain:@"SimDeck.NativeH264Encoder"
+ code:createStatus
+ userInfo:@{ NSLocalizedDescriptionKey: @"Unable to allocate a VideoToolbox pixel buffer." }];
+ }
+ return NO;
+ }
+
+ CVReturn lockStatus = CVPixelBufferLockBaseAddress(pixelBuffer, 0);
+ if (lockStatus != kCVReturnSuccess) {
+ CVPixelBufferRelease(pixelBuffer);
+ if (error != NULL) {
+ *error = [NSError errorWithDomain:@"SimDeck.NativeH264Encoder"
+ code:lockStatus
+ userInfo:@{ NSLocalizedDescriptionKey: @"Unable to lock a VideoToolbox pixel buffer." }];
+ }
+ return NO;
+ }
+
+ uint8_t *dst = CVPixelBufferGetBaseAddress(pixelBuffer);
+ size_t dstRowBytes = CVPixelBufferGetBytesPerRow(pixelBuffer);
+ size_t srcRowBytes = (size_t)width * 4;
+ for (uint32_t y = 0; y < height; y += 1) {
+ const uint8_t *srcRow = rgba + ((size_t)y * srcRowBytes);
+ uint8_t *dstRow = dst + ((size_t)y * dstRowBytes);
+ for (uint32_t x = 0; x < width; x += 1) {
+ const uint8_t *src = srcRow + ((size_t)x * 4);
+ uint8_t *pixel = dstRow + ((size_t)x * 4);
+ pixel[0] = src[2];
+ pixel[1] = src[1];
+ pixel[2] = src[0];
+ pixel[3] = src[3];
+ }
+ }
+ CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
+ [_encoder encodePixelBuffer:pixelBuffer];
+ CVPixelBufferRelease(pixelBuffer);
+ return YES;
+}
+
+- (void)requestKeyFrame {
+ [_encoder requestKeyFrame];
+}
+
+- (void)invalidate {
+ [_encoder invalidate];
+}
+
+@end
+
+static XCWNativeH264Encoder *XCWNativeH264EncoderFromHandle(void *handle) {
+ return (__bridge XCWNativeH264Encoder *)handle;
+}
+
static BOOL XCWPerformSimctlAction(char **errorMessage, BOOL (^action)(XCWSimctl *simctl, NSError **error)) {
XCWSimctl *simctl = [[XCWSimctl alloc] init];
NSError *error = nil;
@@ -803,6 +985,58 @@ void xcw_native_session_set_frame_callback(void *handle, xcw_native_frame_callba
}
}
+void *xcw_native_h264_encoder_create(xcw_native_frame_callback callback, void *user_data, char **error_message) {
+ @autoreleasepool {
+ XCWNativeH264Encoder *encoder = [[XCWNativeH264Encoder alloc] initWithFrameCallback:callback
+ userData:user_data];
+ if (encoder == nil) {
+ if (error_message != NULL) {
+ *error_message = XCWCopyCString(@"Unable to create the native H.264 encoder.");
+ }
+ return NULL;
+ }
+ return (__bridge_retained void *)encoder;
+ }
+}
+
+void xcw_native_h264_encoder_destroy(void *handle) {
+ if (handle == NULL) {
+ return;
+ }
+ @autoreleasepool {
+ XCWNativeH264Encoder *encoder = CFBridgingRelease(handle);
+ [encoder invalidate];
+ }
+}
+
+bool xcw_native_h264_encoder_encode_rgba(void *handle,
+ const uint8_t *rgba,
+ size_t length,
+ uint32_t width,
+ uint32_t height,
+ uint64_t timestamp_us,
+ char **error_message) {
+ (void)timestamp_us;
+ @autoreleasepool {
+ NSError *error = nil;
+ BOOL ok = [XCWNativeH264EncoderFromHandle(handle) encodeRGBA:rgba
+ length:length
+ width:width
+ height:height
+ error:&error];
+ if (!ok) {
+ XCWSetErrorMessage(error_message, error);
+ }
+ return ok;
+ }
+}
+
+void xcw_native_h264_encoder_request_keyframe(void *handle) {
+ @autoreleasepool {
+ [XCWNativeH264EncoderFromHandle(handle) requestKeyFrame];
+ }
+}
+
void xcw_native_free_string(char *value) {
if (value != NULL) {
free(value);
diff --git a/client/src/api/types.ts b/client/src/api/types.ts
index 44a65b5..2092319 100644
--- a/client/src/api/types.ts
+++ b/client/src/api/types.ts
@@ -10,11 +10,17 @@ export interface PrivateDisplayInfo {
export interface SimulatorMetadata {
udid: string;
name: string;
+ platform?: "ios-simulator" | "android-emulator" | string;
runtimeName?: string;
runtimeIdentifier?: string;
deviceTypeName?: string;
deviceTypeIdentifier?: string;
isBooted: boolean;
+ android?: {
+ avdName?: string;
+ grpcPort?: number;
+ serial?: string;
+ };
privateDisplay?: PrivateDisplayInfo;
}
@@ -43,6 +49,7 @@ export interface ChromeProfile {
screenWidth: number;
screenHeight: number;
cornerRadius: number;
+ chromeStyle?: "asset" | "css-android" | string;
hasScreenMask?: boolean;
}
diff --git a/client/src/app/AppShell.tsx b/client/src/app/AppShell.tsx
index b3f9580..56e45df 100644
--- a/client/src/app/AppShell.tsx
+++ b/client/src/app/AppShell.tsx
@@ -164,6 +164,9 @@ function shouldUseRemoteStreamDefault(apiRoot: string): boolean {
}
function shouldRenderNativeChrome(simulator: SimulatorMetadata): boolean {
+ if (simulator.platform === "android-emulator") {
+ return true;
+ }
const identifier = simulator.deviceTypeIdentifier ?? "";
const name = simulator.name ?? "";
return (
@@ -604,9 +607,12 @@ export function AppShell({
const chromeUrl = selectedSimulator
? buildChromeUrl(selectedSimulator.udid, streamStamp)
: "";
+ const chromeUsesAsset = Boolean(
+ viewportChromeProfile && viewportChromeProfile.chromeStyle !== "css-android",
+ );
const chromeRequired = Boolean(
(shouldRenderChrome && !chromeProfileReady) ||
- (viewportChromeProfile && chromeUrl),
+ (chromeUsesAsset && chromeUrl),
);
const simulatorRotationQuarterTurns =
normalizeSimulatorRotationQuarterTurns(selectedSimulator);
@@ -1665,6 +1671,7 @@ export function AppShell({
chromeProfile={viewportChromeProfile}
chromeRequired={chromeRequired}
chromeScreenStyle={viewportScreenStyle}
+ chromeStyle={viewportChromeProfile?.chromeStyle}
chromeUrl={chromeUrl}
debugPanel={
debugVisible ? (
diff --git a/client/src/features/stream/streamWorkerClient.ts b/client/src/features/stream/streamWorkerClient.ts
index 6c0f4b2..63cfe2f 100644
--- a/client/src/features/stream/streamWorkerClient.ts
+++ b/client/src/features/stream/streamWorkerClient.ts
@@ -26,7 +26,7 @@ let activeWebRtcControlChannel: RTCDataChannel | null = null;
let activeWebRtcTelemetryChannel: RTCDataChannel | null = null;
let activeStreamClient: StreamWorkerClient | null = null;
-export type StreamBackend = "webrtc";
+export type StreamBackend = "screenshot" | "webrtc";
export function sendWebRtcControlMessage(encoded: string): boolean {
return sendDataChannelMessage(activeWebRtcControlChannel, encoded);
diff --git a/client/src/features/stream/useLiveStream.ts b/client/src/features/stream/useLiveStream.ts
index d3eccf5..293f093 100644
--- a/client/src/features/stream/useLiveStream.ts
+++ b/client/src/features/stream/useLiveStream.ts
@@ -1,6 +1,6 @@
import { useEffect, useRef, useState } from "react";
-import { apiHeaders } from "../../api/client";
+import { accessTokenFromLocation, apiHeaders } from "../../api/client";
import { apiUrl } from "../../api/config";
import type { SimulatorMetadata } from "../../api/types";
import type { Size } from "../viewport/types";
@@ -26,6 +26,10 @@ const CLIENT_TELEMETRY_INTERVAL_MS = 1000;
const REMOTE_CLIENT_TELEMETRY_INTERVAL_MS = 5000;
const CLIENT_TELEMETRY_ID_STORAGE_KEY = "simdeck.streamClientId";
const VISUAL_ARTIFACT_TELEMETRY_INTERVAL_MS = 1000;
+const SCREENSHOT_POLL_INTERVAL_MS = 500;
+const ANDROID_GRPC_STREAM_MAX_EDGE = 960;
+const ANDROID_GRPC_STREAM_MAX_FPS = 30;
+const ANDROID_FRAME_HEADER_BYTES = 32;
interface UseLiveStreamOptions {
canvasElement: HTMLCanvasElement | null;
@@ -88,6 +92,31 @@ function buildClientTelemetryUrl(): string {
).toString();
}
+function isAndroidSimulator(simulator: SimulatorMetadata | null): boolean {
+ return simulator?.platform === "android-emulator";
+}
+
+function clearCanvas(canvasElement: HTMLCanvasElement | null): void {
+ if (!canvasElement) {
+ return;
+ }
+ const context = canvasElement.getContext("2d");
+ if (!context) {
+ return;
+ }
+ context.clearRect(0, 0, canvasElement.width, canvasElement.height);
+}
+
+function buildWebSocketUrl(path: string): string {
+ const url = new URL(apiUrl(path), window.location.href);
+ url.protocol = url.protocol === "https:" ? "wss:" : "ws:";
+ const token = accessTokenFromLocation();
+ if (token) {
+ url.searchParams.set("simdeckToken", token);
+ }
+ return url.toString();
+}
+
export function useLiveStream({
canvasElement,
paused = false,
@@ -114,6 +143,8 @@ export function useLiveStream({
const [fps, setFps] = useState(0);
const [streamCanvasRevision, setStreamCanvasRevision] = useState(0);
const [runtimeInfo] = useState(detectRuntimeInfo);
+ const androidSimulator = isAndroidSimulator(simulator);
+ const androidRawFrameStream = false;
if (!clientTelemetryIdRef.current) {
clientTelemetryIdRef.current = createClientTelemetryId();
@@ -144,6 +175,12 @@ export function useLiveStream({
}, []);
useEffect(() => {
+ if (androidRawFrameStream) {
+ workerClientRef.current?.destroy();
+ workerClientRef.current = null;
+ return;
+ }
+
if (paused || !canvasElement || workerClientRef.current) {
return;
}
@@ -202,7 +239,7 @@ export function useLiveStream({
workerClient.destroy();
workerClientRef.current = null;
};
- }, [canvasElement, paused]);
+ }, [androidRawFrameStream, canvasElement, paused]);
useEffect(() => {
latestDecodedFramesRef.current = stats.decodedFrames;
@@ -255,6 +292,10 @@ export function useLiveStream({
}, [simulator?.udid]);
useEffect(() => {
+ if (androidRawFrameStream) {
+ return;
+ }
+
const workerClient = workerClientRef.current;
if (!workerClient) {
return;
@@ -290,10 +331,264 @@ export function useLiveStream({
return () => {
workerClient.disconnect();
};
- }, [canvasElement, simulator?.isBooted, simulator?.udid, paused, remote]);
+ }, [
+ androidRawFrameStream,
+ canvasElement,
+ simulator?.isBooted,
+ simulator?.udid,
+ paused,
+ remote,
+ ]);
+
+ useEffect(() => {
+ if (!androidRawFrameStream) {
+ return;
+ }
+
+ setDeviceNaturalSize(null);
+ setStats(createEmptyStreamStats());
+ setStatus({ state: "idle" });
+ setError("");
+ setFps(0);
+
+ if (paused || !canvasElement || !simulator?.isBooted) {
+ clearCanvas(canvasElement);
+ return;
+ }
+
+ const context = canvasElement.getContext("2d", { alpha: false });
+ if (!context) {
+ const message = "Unable to attach the screenshot stream canvas.";
+ setError(message);
+ setStatus({ error: message, state: "error" });
+ return;
+ }
+
+ let cancelled = false;
+ let frameSequence = 0;
+ let lastFrameRenderedAt = 0;
+ const controller = new AbortController();
+ let fallbackStarted = false;
+
+ const noteRenderedFrame = (
+ width: number,
+ height: number,
+ renderStartedAt: number,
+ codec: string,
+ ) => {
+ const frameRenderedAt = performance.now();
+ const renderMs = frameRenderedAt - renderStartedAt;
+ const latestFrameGapMs =
+ lastFrameRenderedAt > 0 ? frameRenderedAt - lastFrameRenderedAt : 0;
+ lastFrameRenderedAt = frameRenderedAt;
+ frameSequence += 1;
+ setDeviceNaturalSize({ height, width });
+ setStatus({ state: "streaming" });
+ setError("");
+ setStats((current) => ({
+ ...current,
+ averageRenderMs:
+ (current.averageRenderMs * Math.max(0, frameSequence - 1) +
+ renderMs) /
+ frameSequence,
+ codec,
+ decodedFrames: frameSequence,
+ frameSequence,
+ height,
+ latestFrameGapMs,
+ latestRenderMs: renderMs,
+ maxRenderMs: Math.max(current.maxRenderMs, renderMs),
+ receivedPackets: frameSequence,
+ renderedFrames: frameSequence,
+ width,
+ }));
+ };
+
+ const renderRgbaFrame = (buffer: ArrayBuffer) => {
+ if (buffer.byteLength <= ANDROID_FRAME_HEADER_BYTES) {
+ throw new Error("Android frame was truncated.");
+ }
+ const view = new DataView(buffer);
+ if (
+ view.getUint8(0) !== 0x53 ||
+ view.getUint8(1) !== 0x44 ||
+ view.getUint8(2) !== 0x41 ||
+ view.getUint8(3) !== 0x46
+ ) {
+ throw new Error("Android frame had an invalid header.");
+ }
+ const width = view.getUint32(8, true);
+ const height = view.getUint32(12, true);
+ const expectedBytes = ANDROID_FRAME_HEADER_BYTES + width * height * 4;
+ if (width <= 0 || height <= 0 || buffer.byteLength < expectedBytes) {
+ throw new Error("Android frame dimensions were invalid.");
+ }
+ const renderStartedAt = performance.now();
+ if (canvasElement.width !== width || canvasElement.height !== height) {
+ canvasElement.width = width;
+ canvasElement.height = height;
+ }
+ context.putImageData(
+ new ImageData(
+ new Uint8ClampedArray(
+ buffer,
+ ANDROID_FRAME_HEADER_BYTES,
+ width * height * 4,
+ ),
+ width,
+ height,
+ ),
+ 0,
+ 0,
+ );
+ noteRenderedFrame(width, height, renderStartedAt, "rgba-grpc");
+ };
+
+ const renderScreenshot = async () => {
+ const url = new URL(
+ apiUrl(
+ `/api/simulators/${encodeURIComponent(simulator.udid)}/screenshot.png`,
+ ),
+ window.location.href,
+ );
+ url.searchParams.set("stamp", String(Date.now()));
+ const response = await fetch(url.toString(), {
+ cache: "no-store",
+ headers: apiHeaders({ Accept: "image/png" }),
+ signal: controller.signal,
+ });
+ if (!response.ok) {
+ throw new Error(`Screenshot request failed with ${response.status}.`);
+ }
+
+ const renderStartedAt = performance.now();
+ const bitmap = await createImageBitmap(await response.blob());
+ try {
+ if (
+ canvasElement.width !== bitmap.width ||
+ canvasElement.height !== bitmap.height
+ ) {
+ canvasElement.width = bitmap.width;
+ canvasElement.height = bitmap.height;
+ }
+ context.drawImage(bitmap, 0, 0);
+ } finally {
+ bitmap.close?.();
+ }
+
+ noteRenderedFrame(canvasElement.width, canvasElement.height, renderStartedAt, "png");
+ };
+
+ const pollScreenshots = async () => {
+ setStatus({ detail: "Waiting for Android screenshots.", state: "connecting" });
+ while (!cancelled) {
+ const startedAt = performance.now();
+ try {
+ await renderScreenshot();
+ } catch (pollError) {
+ if (cancelled || controller.signal.aborted) {
+ return;
+ }
+ const message =
+ pollError instanceof Error
+ ? pollError.message
+ : "Unable to load Android screenshot.";
+ setError(message);
+ setStatus({ error: message, state: "error" });
+ }
+ const elapsedMs = performance.now() - startedAt;
+ const waitMs = Math.max(80, SCREENSHOT_POLL_INTERVAL_MS - elapsedMs);
+ await new Promise((resolve) => window.setTimeout(resolve, waitMs));
+ }
+ };
+
+ const startScreenshotFallback = () => {
+ if (fallbackStarted || cancelled) {
+ return;
+ }
+ fallbackStarted = true;
+ void pollScreenshots();
+ };
+
+ const streamUrl = new URL(
+ buildWebSocketUrl(
+ `/api/simulators/${encodeURIComponent(simulator.udid)}/android/frames`,
+ ),
+ );
+ streamUrl.searchParams.set(
+ "maxEdge",
+ String(streamConfig?.maxEdge ?? ANDROID_GRPC_STREAM_MAX_EDGE),
+ );
+ streamUrl.searchParams.set(
+ "maxFps",
+ String(Math.min(60, streamConfig?.fps ?? ANDROID_GRPC_STREAM_MAX_FPS)),
+ );
+ const frameSocket = new WebSocket(streamUrl.toString());
+ frameSocket.binaryType = "arraybuffer";
+ frameSocket.onopen = () => {
+ setStatus({ detail: "Waiting for Android emulator frames.", state: "connecting" });
+ };
+ frameSocket.onmessage = (event) => {
+ if (cancelled) {
+ return;
+ }
+ if (typeof event.data === "string") {
+ try {
+ const message = JSON.parse(event.data) as { error?: string; type?: string };
+ if (message.error) {
+ throw new Error(message.error);
+ }
+ } catch (streamError) {
+ const message =
+ streamError instanceof Error
+ ? streamError.message
+ : "Android emulator frame stream failed.";
+ setError(message);
+ setStatus({ error: message, state: "error" });
+ startScreenshotFallback();
+ }
+ return;
+ }
+ try {
+ renderRgbaFrame(event.data as ArrayBuffer);
+ } catch (streamError) {
+ const message =
+ streamError instanceof Error
+ ? streamError.message
+ : "Unable to render Android emulator frame.";
+ setError(message);
+ setStatus({ error: message, state: "error" });
+ startScreenshotFallback();
+ }
+ };
+ frameSocket.onerror = () => {
+ if (frameSequence === 0) {
+ startScreenshotFallback();
+ }
+ };
+ frameSocket.onclose = () => {
+ if (!cancelled && frameSequence === 0) {
+ startScreenshotFallback();
+ }
+ };
+ return () => {
+ cancelled = true;
+ controller.abort();
+ frameSocket.close();
+ };
+ }, [
+ androidRawFrameStream,
+ canvasElement,
+ simulator?.isBooted,
+ simulator?.udid,
+ paused,
+ streamConfig?.fps,
+ streamConfig?.maxEdge,
+ ]);
useEffect(() => {
if (
+ androidRawFrameStream ||
streamConfigApplyKey <= 0 ||
paused ||
!simulator?.isBooted ||
@@ -310,6 +605,7 @@ export function useLiveStream({
streamConfig?.fps,
streamConfig?.maxEdge,
streamConfig?.quality,
+ androidRawFrameStream,
]);
useEffect(() => {
@@ -378,15 +674,18 @@ export function useLiveStream({
};
}, [remote, simulator?.udid]);
+ const effectiveRuntimeInfo = runtimeInfo;
+ const streamBackend: StreamBackend = "webrtc";
+
return {
deviceNaturalSize,
error,
fps,
hasFrame: status.state === "streaming" || stats.decodedFrames > 0,
- runtimeInfo,
+ runtimeInfo: effectiveRuntimeInfo,
stats,
status,
- streamBackend: "webrtc",
- streamCanvasKey: `webrtc-${streamCanvasRevision}`,
+ streamBackend,
+ streamCanvasKey: `${streamBackend}-${streamCanvasRevision}`,
};
}
diff --git a/client/src/features/viewport/DeviceChrome.tsx b/client/src/features/viewport/DeviceChrome.tsx
index be333aa..a00a935 100644
--- a/client/src/features/viewport/DeviceChrome.tsx
+++ b/client/src/features/viewport/DeviceChrome.tsx
@@ -12,6 +12,7 @@ interface DeviceChromeProps {
accessibilityRoots: AccessibilityNode[];
accessibilitySelectedId: string;
chromeScreenStyle: CSSProperties | null;
+ chromeStyle?: string;
chromeUrl: string;
hasFrame: boolean;
isBooted: boolean;
@@ -47,6 +48,7 @@ export function DeviceChrome({
accessibilityRoots,
accessibilitySelectedId,
chromeScreenStyle,
+ chromeStyle,
chromeUrl,
hasFrame,
isBooted,
@@ -76,23 +78,28 @@ export function DeviceChrome({
useChromeProfile,
}: DeviceChromeProps) {
if (useChromeProfile) {
+ const useCssAndroidChrome = chromeStyle === "css-android";
return (
-
+ {useCssAndroidChrome ? (
+
+ ) : (
+
+ )}
simdeck erase
```
-`list` returns the same simulator inventory the browser UI renders. Lifecycle commands return JSON and use the native bridge, preferring private CoreSimulator paths when available and falling back to `xcrun simctl`.
+`list` returns the same simulator inventory the browser UI renders, including
+Android AVDs as IDs like `android:Pixel_8_API_36`. iOS lifecycle commands use
+the native bridge, preferring private CoreSimulator paths when available and
+falling back to `xcrun simctl`. Android lifecycle commands use the Android SDK
+`emulator` and `adb` tools.
## Apps And URLs
```sh
simdeck install /path/to/App.app
+simdeck install android: /path/to/app.apk
simdeck uninstall com.example.App
simdeck launch com.example.App
simdeck open-url https://example.com
@@ -246,9 +251,13 @@ simdeck chrome-profile
`stream` writes Annex B H.264 samples to stdout and runs until interrupted, or
until `--frames` samples have been written. It is intended for diagnostics and
-external tools.
+external tools, and is iOS-only. Android live viewing in the browser uses the
+WebRTC H.264 endpoint; raw frames come from emulator gRPC and are encoded
+through VideoToolbox.
-`logs` fetches recent simulator logs. `chrome-profile` returns the CoreSimulator chrome layout used by the browser viewport.
+`logs` fetches recent simulator logs or Android `logcat` output. `chrome-profile`
+returns the CoreSimulator chrome layout for iOS and a screen-sized profile for
+Android.
## HTTP Fast Path
diff --git a/docs/extensions/browser-client.md b/docs/extensions/browser-client.md
index b9124c3..938d818 100644
--- a/docs/extensions/browser-client.md
+++ b/docs/extensions/browser-client.md
@@ -44,7 +44,7 @@ client/
| `api/` | Typed wrappers around the SimDeck REST API and shared TypeScript types. |
| `features/simulators/` | Sidebar list of simulators plus boot/shutdown affordances. |
| `features/viewport/` | Frame canvas, chrome compositing, hit testing. |
-| `features/stream/` | WebRTC client, receiver stats, and video frame plumbing. |
+| `features/stream/` | WebRTC H.264 client for iOS and Android, receiver stats, and frame plumbing. |
| `features/input/` | Touch / keyboard / hardware-button affordances. |
| `features/accessibility/` | Accessibility tree pane and source switcher. |
| `features/toolbar/` | Top toolbar (rotate, home, app switcher, dark mode toggle, refresh). |
@@ -55,8 +55,8 @@ client/
2. `main.tsx` mounts the React tree at `#root`.
3. `AppShell` calls `GET /api/health` to learn the active encoder mode.
4. The simulator sidebar fetches `GET /api/simulators` and renders the list.
-5. Selecting a simulator posts an SDP offer to `/api/simulators//webrtc/offer`.
-6. The browser renders the H.264 video track through native WebRTC playback.
+5. Selecting a device posts an SDP offer to `/api/simulators//webrtc/offer`.
+6. The browser renders the H.264 video track through native WebRTC playback. Android emulator frames are sourced from emulator gRPC on the server and encoded through VideoToolbox before WebRTC delivery.
7. Touch and key events round-trip through `POST /api/simulators//touch` and `/key`.
## Dev workflow
diff --git a/docs/guide/architecture.md b/docs/guide/architecture.md
index f84794b..ee75fd7 100644
--- a/docs/guide/architecture.md
+++ b/docs/guide/architecture.md
@@ -4,13 +4,13 @@ SimDeck is intentionally split into a small number of clearly-scoped layers. Eve
## High-level layout
-SimDeck has three layers stacked between the browser and the iOS Simulator:
+SimDeck has three layers stacked between the browser and the target device:
-1. **Browser / VS Code** runs the React client from `client/`. It speaks HTTP for control and WebRTC for live video, served by the Rust server.
+1. **Browser / VS Code** runs the React client from `client/`. It speaks HTTP for control and WebRTC H.264 for live video, served by the Rust server.
2. **The Rust server** (`server/`, built on `axum` + `tokio`) owns the CLI entrypoint, project daemon lifecycle, REST routes (`api/`), the stream transports (`transport/`), the inspector WebSocket hub (`inspector.rs`), the per-UDID session registry (`simulators/`), metrics, and log streaming.
-3. **The Objective-C bridge** (`cli/`) is reached through a narrow C ABI in `cli/native/XCWNativeBridge.*`. It wraps `xcrun simctl`, the private `CoreSimulator` direct-boot path, the per-session hardware/software H.264 encoder, the headless display bridge that produces frames and accepts HID input, and the device-chrome renderer.
+3. **Native device bridges** own platform-specific work. The Objective-C bridge (`cli/`) is reached through a narrow C ABI in `cli/native/XCWNativeBridge.*` for iOS. The Rust Android bridge (`server/src/android.rs`) shells out to the Android SDK for AVD discovery, emulator lifecycle, ADB input, screenshots, UIAutomator, and logcat.
-Underneath all of that is the iOS Simulator itself — `CoreSimulator` for lifecycle, `SimulatorKit` for chrome assets.
+Underneath all of that are the iOS Simulator (`CoreSimulator` and `SimulatorKit`) and the Android emulator (`emulator` and `adb`).
## Layer responsibilities
@@ -24,6 +24,7 @@ Key modules:
| ----------------------------------- | -------------------------------------------------------------------------------------------- |
| `server/src/main.rs` | CLI entrypoint, project daemon management, AppKit main-thread shim, tokio runtime bootstrap. |
| `server/src/api/routes.rs` | Every `/api/*` route, including simulator control, accessibility, and inspector proxy. |
+| `server/src/android.rs` | Android AVD discovery, emulator lifecycle, ADB input, screenshots, UIAutomator, and logcat. |
| `server/src/transport/webrtc.rs` | WebRTC offer/answer endpoint for H.264 browser video. |
| `server/src/transport/packet.rs` | Shared encoded frame type used between simulator sessions and transports. |
| `server/src/inspector.rs` | WebSocket hub for the NativeScript runtime inspector. |
@@ -53,13 +54,16 @@ Inside the bridge:
### `client/` — React browser UI
-The React app served at `/` is a thin shell that calls the REST API and consumes live video over WebRTC H.264.
+The React app served at `/` is a thin shell that calls the REST API. It consumes
+live device video over WebRTC H.264. iOS frames come from the native simulator
+display bridge; Android frames come from emulator gRPC `streamScreenshot` and
+are encoded through VideoToolbox on the server.
Layout under `client/src/`:
- `app/AppShell.tsx` — top-level shell.
- `api/` — typed wrappers around `/api/*` (`client.ts`, `controls.ts`, `simulators.ts`, `types.ts`).
-- `features/stream/` — WebRTC client, receiver stats, and video frame plumbing.
+- `features/stream/` — WebRTC client, receiver stats, and frame plumbing.
- `features/viewport/` — frame canvas, hit testing, chrome compositing.
- `features/input/` — touch/keyboard/hardware button affordances.
- `features/accessibility/` — accessibility tree pane and source switcher.
@@ -84,7 +88,7 @@ Most control endpoints follow the same path: a typed Rust handler in `server/src
### Live video
-The browser posts an SDP offer to `/api/simulators/{udid}/webrtc/offer`. The handler in `transport::webrtc` ensures the per-UDID `SimulatorSession` is started, waits up to ~3 s for the first H.264 keyframe, returns an SDP answer, and writes the simulator frame source to a WebRTC video track.
+The browser posts an SDP offer to `/api/simulators/{udid}/webrtc/offer`. The handler in `transport::webrtc` starts the selected frame source, waits for the first H.264 keyframe, returns an SDP answer, and writes H.264 samples to a WebRTC video track. For Android, that source is emulator gRPC raw pixels passed through the shared VideoToolbox encoder path.
### Input
diff --git a/docs/guide/installation.md b/docs/guide/installation.md
index 43e3392..0fabaa3 100644
--- a/docs/guide/installation.md
+++ b/docs/guide/installation.md
@@ -10,6 +10,7 @@ SimDeck only runs on macOS. The native bridge links private `CoreSimulator` and
| ---------------------------------- | ------------------------------------------------------------------------------------ |
| **macOS 13+** | Required for current `CoreSimulator` and Apple's VideoToolbox H.264 encoder. |
| **Xcode + iOS Simulator runtimes** | The native bridge invokes `xcrun simctl` and the Simulator app. |
+| **Android SDK tools** | Optional. Required for Android emulator support (`emulator`, `adb`, and AVD images). |
| **Node.js ≥ 18** | The launcher (`bin/simdeck.mjs`) and the bundled client tooling. |
| **Rust (stable)** | Required only when building from source. Installed via [rustup](https://rustup.rs/). |
diff --git a/server/Cargo.lock b/server/Cargo.lock
index 3b8e980..a015723 100644
--- a/server/Cargo.lock
+++ b/server/Cargo.lock
@@ -150,6 +150,28 @@ dependencies = [
"syn",
]
+[[package]]
+name = "async-stream"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476"
+dependencies = [
+ "async-stream-impl",
+ "futures-core",
+ "pin-project-lite",
+]
+
+[[package]]
+name = "async-stream-impl"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
[[package]]
name = "async-trait"
version = "0.1.89"
@@ -173,13 +195,40 @@ version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
+[[package]]
+name = "axum"
+version = "0.7.9"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f"
+dependencies = [
+ "async-trait",
+ "axum-core 0.4.5",
+ "bytes",
+ "futures-util",
+ "http",
+ "http-body",
+ "http-body-util",
+ "itoa",
+ "matchit 0.7.3",
+ "memchr",
+ "mime",
+ "percent-encoding",
+ "pin-project-lite",
+ "rustversion",
+ "serde",
+ "sync_wrapper",
+ "tower 0.5.3",
+ "tower-layer",
+ "tower-service",
+]
+
[[package]]
name = "axum"
version = "0.8.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31b698c5f9a010f6573133b09e0de5408834d0c82f8d7475a89fc1867a71cd90"
dependencies = [
- "axum-core",
+ "axum-core 0.5.6",
"base64",
"bytes",
"form_urlencoded",
@@ -190,7 +239,7 @@ dependencies = [
"hyper",
"hyper-util",
"itoa",
- "matchit",
+ "matchit 0.8.4",
"memchr",
"mime",
"percent-encoding",
@@ -203,12 +252,32 @@ dependencies = [
"sync_wrapper",
"tokio",
"tokio-tungstenite",
- "tower",
+ "tower 0.5.3",
"tower-layer",
"tower-service",
"tracing",
]
+[[package]]
+name = "axum-core"
+version = "0.4.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199"
+dependencies = [
+ "async-trait",
+ "bytes",
+ "futures-util",
+ "http",
+ "http-body",
+ "http-body-util",
+ "mime",
+ "pin-project-lite",
+ "rustversion",
+ "sync_wrapper",
+ "tower-layer",
+ "tower-service",
+]
+
[[package]]
name = "axum-core"
version = "0.5.6"
@@ -561,6 +630,12 @@ dependencies = [
"spki",
]
+[[package]]
+name = "either"
+version = "1.15.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719"
+
[[package]]
name = "elliptic-curve"
version = "0.13.8"
@@ -620,6 +695,12 @@ version = "0.1.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5baebc0774151f905a1a2cc41989300b1e6fbb29aff0ceffa1064fdd3088d582"
+[[package]]
+name = "fnv"
+version = "1.0.7"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1"
+
[[package]]
name = "foldhash"
version = "0.1.5"
@@ -791,6 +872,31 @@ dependencies = [
"subtle",
]
+[[package]]
+name = "h2"
+version = "0.4.14"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "171fefbc92fe4a4de27e0698d6a5b392d6a0e333506bc49133760b3bcf948733"
+dependencies = [
+ "atomic-waker",
+ "bytes",
+ "fnv",
+ "futures-core",
+ "futures-sink",
+ "http",
+ "indexmap 2.14.0",
+ "slab",
+ "tokio",
+ "tokio-util",
+ "tracing",
+]
+
+[[package]]
+name = "hashbrown"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888"
+
[[package]]
name = "hashbrown"
version = "0.15.5"
@@ -897,6 +1003,7 @@ dependencies = [
"bytes",
"futures-channel",
"futures-core",
+ "h2",
"http",
"http-body",
"httparse",
@@ -905,6 +1012,20 @@ dependencies = [
"pin-project-lite",
"smallvec",
"tokio",
+ "want",
+]
+
+[[package]]
+name = "hyper-timeout"
+version = "0.5.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0"
+dependencies = [
+ "hyper",
+ "hyper-util",
+ "pin-project-lite",
+ "tokio",
+ "tower-service",
]
[[package]]
@@ -914,12 +1035,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96547c2556ec9d12fb1578c4eaf448b04993e7fb79cbaad930a656880a6bdfa0"
dependencies = [
"bytes",
+ "futures-channel",
+ "futures-util",
"http",
"http-body",
"hyper",
+ "libc",
"pin-project-lite",
+ "socket2 0.6.3",
"tokio",
"tower-service",
+ "tracing",
]
[[package]]
@@ -1031,6 +1157,16 @@ dependencies = [
"icu_properties",
]
+[[package]]
+name = "indexmap"
+version = "1.9.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bd070e393353796e801d209ad339e89596eb4c8d430d18ede6a1cced8fafbd99"
+dependencies = [
+ "autocfg",
+ "hashbrown 0.12.3",
+]
+
[[package]]
name = "indexmap"
version = "2.14.0"
@@ -1085,6 +1221,15 @@ version = "1.70.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a6cb138bb79a146c1bd460005623e142ef0181e3d0219cb493e02f7d08a35695"
+[[package]]
+name = "itertools"
+version = "0.14.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2b192c782037fadd9cfa75548310488aabdbf3d2da73885b31bd0abd03351285"
+dependencies = [
+ "either",
+]
+
[[package]]
name = "itoa"
version = "1.0.18"
@@ -1149,6 +1294,12 @@ dependencies = [
"regex-automata",
]
+[[package]]
+name = "matchit"
+version = "0.7.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94"
+
[[package]]
name = "matchit"
version = "0.8.4"
@@ -1378,6 +1529,26 @@ version = "2.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220"
+[[package]]
+name = "pin-project"
+version = "1.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "cbf0d9e68100b3a7989b4901972f265cd542e560a3a8a724e1e20322f4d06ce9"
+dependencies = [
+ "pin-project-internal",
+]
+
+[[package]]
+name = "pin-project-internal"
+version = "1.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "a990e22f43e84855daf260dded30524ef4a9021cc7541c26540500a50b624389"
+dependencies = [
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
[[package]]
name = "pin-project-lite"
version = "0.2.17"
@@ -1470,6 +1641,29 @@ dependencies = [
"unicode-ident",
]
+[[package]]
+name = "prost"
+version = "0.13.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5"
+dependencies = [
+ "bytes",
+ "prost-derive",
+]
+
+[[package]]
+name = "prost-derive"
+version = "0.13.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d"
+dependencies = [
+ "anyhow",
+ "itertools",
+ "proc-macro2",
+ "quote",
+ "syn",
+]
+
[[package]]
name = "quote"
version = "1.0.45"
@@ -1626,6 +1820,12 @@ dependencies = [
"windows-sys 0.52.0",
]
+[[package]]
+name = "roxmltree"
+version = "0.20.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "6c20b6793b5c2fa6553b250154b78d6d0db37e72700ae35fad9387a46f487c97"
+
[[package]]
name = "rtcp"
version = "0.12.0"
@@ -1882,7 +2082,7 @@ name = "simdeck-server"
version = "0.1.0"
dependencies = [
"anyhow",
- "axum",
+ "axum 0.8.9",
"base64",
"bytes",
"cc",
@@ -1891,12 +2091,15 @@ dependencies = [
"hex",
"http",
"libc",
+ "prost",
+ "roxmltree",
"serde",
"serde_json",
"sha2",
"thiserror 2.0.18",
"tokio",
"tokio-stream",
+ "tonic",
"tower-http",
"tracing",
"tracing-subscriber",
@@ -2182,6 +2385,56 @@ dependencies = [
"tokio",
]
+[[package]]
+name = "tonic"
+version = "0.12.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52"
+dependencies = [
+ "async-stream",
+ "async-trait",
+ "axum 0.7.9",
+ "base64",
+ "bytes",
+ "h2",
+ "http",
+ "http-body",
+ "http-body-util",
+ "hyper",
+ "hyper-timeout",
+ "hyper-util",
+ "percent-encoding",
+ "pin-project",
+ "prost",
+ "socket2 0.5.10",
+ "tokio",
+ "tokio-stream",
+ "tower 0.4.13",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+]
+
+[[package]]
+name = "tower"
+version = "0.4.13"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c"
+dependencies = [
+ "futures-core",
+ "futures-util",
+ "indexmap 1.9.3",
+ "pin-project",
+ "pin-project-lite",
+ "rand 0.8.6",
+ "slab",
+ "tokio",
+ "tokio-util",
+ "tower-layer",
+ "tower-service",
+ "tracing",
+]
+
[[package]]
name = "tower"
version = "0.5.3"
@@ -2298,6 +2551,12 @@ dependencies = [
"tracing-log",
]
+[[package]]
+name = "try-lock"
+version = "0.2.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b"
+
[[package]]
name = "tungstenite"
version = "0.29.0"
@@ -2431,6 +2690,15 @@ dependencies = [
"atomic-waker",
]
+[[package]]
+name = "want"
+version = "0.3.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bfa7760aed19e106de2c7c0b581b509f2f25d3dacaf737cb82ac61bc6d760b0e"
+dependencies = [
+ "try-lock",
+]
+
[[package]]
name = "wasi"
version = "0.11.1+wasi-snapshot-preview1"
@@ -2517,7 +2785,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bb0e353e6a2fbdc176932bbaab493762eb1255a7900fe0fea1a2f96c296cc909"
dependencies = [
"anyhow",
- "indexmap",
+ "indexmap 2.14.0",
"wasm-encoder",
"wasmparser",
]
@@ -2530,7 +2798,7 @@ checksum = "47b807c72e1bac69382b3a6fb3dbe8ea4c0ed87ff5629b8685ae6b9a611028fe"
dependencies = [
"bitflags 2.11.1",
"hashbrown 0.15.5",
- "indexmap",
+ "indexmap 2.14.0",
"semver",
]
@@ -2887,7 +3155,7 @@ checksum = "b7c566e0f4b284dd6561c786d9cb0142da491f46a9fbed79ea69cdad5db17f21"
dependencies = [
"anyhow",
"heck",
- "indexmap",
+ "indexmap 2.14.0",
"prettyplease",
"syn",
"wasm-metadata",
@@ -2918,7 +3186,7 @@ checksum = "9d66ea20e9553b30172b5e831994e35fbde2d165325bec84fc43dbf6f4eb9cb2"
dependencies = [
"anyhow",
"bitflags 2.11.1",
- "indexmap",
+ "indexmap 2.14.0",
"log",
"serde",
"serde_derive",
@@ -2937,7 +3205,7 @@ checksum = "ecc8ac4bc1dc3381b7f59c34f00b67e18f910c2c0f50015669dde7def656a736"
dependencies = [
"anyhow",
"id-arena",
- "indexmap",
+ "indexmap 2.14.0",
"log",
"semver",
"serde",
diff --git a/server/Cargo.toml b/server/Cargo.toml
index 37fd0c4..e1ebe5b 100644
--- a/server/Cargo.toml
+++ b/server/Cargo.toml
@@ -14,12 +14,15 @@ futures = "0.3"
hex = "0.4"
http = "1.1"
libc = "0.2"
+prost = "0.13"
+roxmltree = "0.20"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
sha2 = "0.10"
thiserror = "2.0"
tokio = { version = "1.42", features = ["fs", "io-util", "macros", "process", "rt-multi-thread", "signal", "sync", "time"] }
tokio-stream = "0.1"
+tonic = { version = "0.12", features = ["transport"] }
tower-http = { version = "0.6", features = ["cors", "fs", "trace"] }
tracing = "0.1"
tracing-subscriber = { version = "0.3", features = ["env-filter", "fmt"] }
diff --git a/server/src/android.rs b/server/src/android.rs
new file mode 100644
index 0000000..05cf00f
--- /dev/null
+++ b/server/src/android.rs
@@ -0,0 +1,1355 @@
+use crate::error::AppError;
+use bytes::BytesMut;
+use http::uri::PathAndQuery;
+use serde_json::{json, Value};
+use std::collections::{HashMap, HashSet};
+use std::env;
+use std::ffi::OsString;
+use std::future::Future;
+use std::path::{Path, PathBuf};
+use std::process::{Command, Stdio};
+use std::sync::{Mutex, OnceLock};
+use std::thread;
+use std::time::{Duration, Instant};
+use tonic::metadata::MetadataValue;
+use tonic::transport::{Channel, Endpoint};
+
+const ANDROID_ID_PREFIX: &str = "android:";
+const DEFAULT_GRPC_PORT_BASE: u16 = 8554;
+const DEFAULT_ANDROID_STREAM_MAX_EDGE: u32 = 960;
+const ANDROID_TOUCH_IDENTIFIER: i32 = 1;
+const RUNNING_EMULATOR_CACHE_TTL: Duration = Duration::from_secs(2);
+const AVD_GRPC_PORT_CACHE_TTL: Duration = Duration::from_secs(60);
+const SCREEN_SIZE_CACHE_TTL: Duration = Duration::from_secs(60);
+
+#[derive(Clone, Default)]
+pub struct AndroidBridge;
+
+#[derive(Clone, Debug)]
+pub struct AndroidDevice {
+ pub avd_name: String,
+ pub serial: Option,
+ pub is_booted: bool,
+ pub grpc_port: u16,
+}
+
+#[derive(Debug)]
+pub struct AndroidFrame {
+ pub width: u32,
+ pub height: u32,
+ pub seq: u32,
+ pub timestamp_us: u64,
+ pub rgba: Vec,
+}
+
+pub struct AndroidGrpcFrameStream {
+ inner: tonic::Streaming,
+}
+
+pub fn is_android_id(id: &str) -> bool {
+ id.starts_with(ANDROID_ID_PREFIX)
+}
+
+pub fn avd_from_id(id: &str) -> Result {
+ id.strip_prefix(ANDROID_ID_PREFIX)
+ .filter(|value| !value.trim().is_empty())
+ .map(ToOwned::to_owned)
+ .ok_or_else(|| AppError::bad_request(format!("Invalid Android emulator id `{id}`.")))
+}
+
+pub fn id_for_avd(avd_name: &str) -> String {
+ format!("{ANDROID_ID_PREFIX}{avd_name}")
+}
+
+impl AndroidBridge {
+ pub fn list_devices(&self) -> Result, AppError> {
+ if !self.emulator_path().exists() {
+ return Ok(Vec::new());
+ }
+
+ let avds = self
+ .run_emulator(["-list-avds"])?
+ .lines()
+ .map(str::trim)
+ .filter(|line| !line.is_empty())
+ .map(ToOwned::to_owned)
+ .collect::>();
+ if avds.is_empty() {
+ return Ok(Vec::new());
+ }
+
+ let running = self.running_emulators().unwrap_or_default();
+ Ok(avds
+ .into_iter()
+ .enumerate()
+ .map(|(index, avd_name)| AndroidDevice {
+ serial: running.get(&avd_name).cloned(),
+ is_booted: running.contains_key(&avd_name),
+ grpc_port: DEFAULT_GRPC_PORT_BASE + index as u16,
+ avd_name,
+ })
+ .collect())
+ }
+
+ pub fn enrich_devices(&self, devices: Vec) -> Vec {
+ devices
+ .into_iter()
+ .map(|device| self.device_value(device))
+ .collect()
+ }
+
+ pub fn boot(&self, id: &str) -> Result<(), AppError> {
+ let avd_name = avd_from_id(id)?;
+ if self.resolve_serial(&avd_name).is_ok() {
+ return Ok(());
+ }
+ let grpc_port = self.grpc_port_for_avd(&avd_name)?;
+ Command::new(self.emulator_path())
+ .args([
+ "-avd",
+ &avd_name,
+ "-no-window",
+ "-no-audio",
+ "-gpu",
+ "swiftshader_indirect",
+ "-grpc",
+ &grpc_port.to_string(),
+ ])
+ .stdin(Stdio::null())
+ .stdout(Stdio::null())
+ .stderr(Stdio::null())
+ .spawn()
+ .map_err(|error| {
+ AppError::native(format!(
+ "Unable to start Android emulator `{avd_name}`: {error}"
+ ))
+ })?;
+ Ok(())
+ }
+
+ pub fn shutdown(&self, id: &str) -> Result<(), AppError> {
+ let avd_name = avd_from_id(id)?;
+ let serial = self.resolve_serial(&avd_name)?;
+ let _ = self.run_adb(["-s", &serial, "emu", "kill"])?;
+ Ok(())
+ }
+
+ pub fn erase(&self, id: &str) -> Result<(), AppError> {
+ let avd_name = avd_from_id(id)?;
+ if self.resolve_serial(&avd_name).is_ok() {
+ return Err(AppError::bad_request(
+ "Shutdown the Android emulator before erasing it.",
+ ));
+ }
+ let avd_dir = self.avd_dir(&avd_name);
+ for file_name in [
+ "userdata-qemu.img",
+ "cache.img",
+ "data.img",
+ "sdcard.img",
+ "snapshots.img",
+ ] {
+ let path = avd_dir.join(file_name);
+ if path.exists() {
+ std::fs::remove_file(&path).map_err(|error| {
+ AppError::native(format!("Unable to remove {}: {error}", path.display()))
+ })?;
+ }
+ }
+ Ok(())
+ }
+
+ pub fn wait_until_booted(&self, id: &str, timeout_duration: Duration) -> Result<(), AppError> {
+ let avd_name = avd_from_id(id)?;
+ let deadline = Instant::now() + timeout_duration;
+ loop {
+ if let Ok(serial) = self.resolve_serial(&avd_name) {
+ if self
+ .run_adb(["-s", &serial, "shell", "getprop", "sys.boot_completed"])
+ .unwrap_or_default()
+ .trim()
+ == "1"
+ {
+ return Ok(());
+ }
+ }
+ if Instant::now() >= deadline {
+ return Err(AppError::native(format!(
+ "Android emulator `{avd_name}` did not finish booting in time."
+ )));
+ }
+ thread::sleep(Duration::from_millis(500));
+ }
+ }
+
+ pub fn screenshot_png(&self, id: &str) -> Result, AppError> {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb_bytes(["-s", &serial, "exec-out", "screencap", "-p"])
+ }
+
+ pub fn install_app(&self, id: &str, app_path: &str) -> Result<(), AppError> {
+ if !app_path.ends_with(".apk") {
+ return Err(AppError::bad_request(
+ "Android install expects an `.apk` path.",
+ ));
+ }
+ let serial = self.serial_for_id(id)?;
+ self.run_adb(["-s", &serial, "install", "-r", app_path])?;
+ Ok(())
+ }
+
+ pub fn uninstall_app(&self, id: &str, package_name: &str) -> Result<(), AppError> {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb(["-s", &serial, "uninstall", package_name])?;
+ Ok(())
+ }
+
+ pub fn open_url(&self, id: &str, url: &str) -> Result<(), AppError> {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb([
+ "-s",
+ &serial,
+ "shell",
+ "am",
+ "start",
+ "-a",
+ "android.intent.action.VIEW",
+ "-d",
+ url,
+ ])?;
+ Ok(())
+ }
+
+ pub fn launch_package(&self, id: &str, package: &str) -> Result<(), AppError> {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb([
+ "-s",
+ &serial,
+ "shell",
+ "monkey",
+ "-p",
+ package,
+ "-c",
+ "android.intent.category.LAUNCHER",
+ "1",
+ ])?;
+ Ok(())
+ }
+
+ pub fn set_pasteboard_text(&self, id: &str, text: &str) -> Result<(), AppError> {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb_shell(&serial, &format!("cmd clipboard set {}", shell_quote(text)))?;
+ Ok(())
+ }
+
+ pub fn pasteboard_text(&self, id: &str) -> Result {
+ let serial = self.serial_for_id(id)?;
+ Ok(self.run_adb_shell(&serial, "cmd clipboard get")?)
+ }
+
+ pub fn send_touch(&self, id: &str, x: f64, y: f64, phase: &str) -> Result<(), AppError> {
+ if self.send_touch_grpc(id, x, y, phase).is_ok() {
+ return Ok(());
+ }
+ if phase != "ended" && phase != "cancelled" {
+ return Ok(());
+ }
+ let serial = self.serial_for_id(id)?;
+ let (width, height) = self.screen_size_for_serial(&serial)?;
+ let px = (x.clamp(0.0, 1.0) * (width - 1.0)).round().max(0.0);
+ let py = (y.clamp(0.0, 1.0) * (height - 1.0)).round().max(0.0);
+ self.run_adb([
+ "-s",
+ &serial,
+ "shell",
+ "input",
+ "tap",
+ &px.to_string(),
+ &py.to_string(),
+ ])?;
+ Ok(())
+ }
+
+ pub fn send_swipe(
+ &self,
+ id: &str,
+ start_x: f64,
+ start_y: f64,
+ end_x: f64,
+ end_y: f64,
+ duration_ms: u64,
+ ) -> Result<(), AppError> {
+ if self
+ .send_swipe_grpc(id, start_x, start_y, end_x, end_y, duration_ms)
+ .is_ok()
+ {
+ return Ok(());
+ }
+ let serial = self.serial_for_id(id)?;
+ let (width, height) = self.screen_size_for_serial(&serial)?;
+ let coords = [start_x, start_y, end_x, end_y]
+ .into_iter()
+ .enumerate()
+ .map(|(index, value)| {
+ let max = if index % 2 == 0 {
+ width - 1.0
+ } else {
+ height - 1.0
+ };
+ (value.clamp(0.0, 1.0) * max).round().max(0.0).to_string()
+ })
+ .collect::>();
+ self.run_adb([
+ "-s",
+ &serial,
+ "shell",
+ "input",
+ "swipe",
+ &coords[0],
+ &coords[1],
+ &coords[2],
+ &coords[3],
+ &duration_ms.to_string(),
+ ])?;
+ Ok(())
+ }
+
+ pub fn send_key(&self, id: &str, key_code: u16, _modifiers: u32) -> Result<(), AppError> {
+ if self
+ .send_key_grpc(id, grpc::KeyboardEvent::usb_keypress(i32::from(key_code)))
+ .is_ok()
+ {
+ return Ok(());
+ }
+ let serial = self.serial_for_id(id)?;
+ let android_key = android_key_code(key_code);
+ self.run_adb([
+ "-s",
+ &serial,
+ "shell",
+ "input",
+ "keyevent",
+ &android_key.to_string(),
+ ])?;
+ Ok(())
+ }
+
+ pub fn type_text(&self, id: &str, text: &str) -> Result<(), AppError> {
+ if self
+ .send_key_grpc(id, grpc::KeyboardEvent::text(text.to_owned()))
+ .is_ok()
+ {
+ return Ok(());
+ }
+ let serial = self.serial_for_id(id)?;
+ let escaped = text.replace('%', "%25").replace(' ', "%s");
+ self.run_adb(["-s", &serial, "shell", "input", "text", &escaped])?;
+ Ok(())
+ }
+
+ pub fn press_home(&self, id: &str) -> Result<(), AppError> {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb(["-s", &serial, "shell", "input", "keyevent", "3"])?;
+ Ok(())
+ }
+
+ pub fn open_app_switcher(&self, id: &str) -> Result<(), AppError> {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb(["-s", &serial, "shell", "input", "keyevent", "187"])?;
+ Ok(())
+ }
+
+ pub fn press_button(&self, id: &str, button: &str, duration_ms: u32) -> Result<(), AppError> {
+ match button {
+ "home" => self.press_home(id),
+ "lock" | "side-button" => {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb(["-s", &serial, "shell", "input", "keyevent", "26"])?;
+ if duration_ms > 500 {
+ thread::sleep(Duration::from_millis(u64::from(duration_ms)));
+ self.run_adb(["-s", &serial, "shell", "input", "keyevent", "26"])?;
+ }
+ Ok(())
+ }
+ "back" => {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb(["-s", &serial, "shell", "input", "keyevent", "4"])?;
+ Ok(())
+ }
+ _ => Err(AppError::bad_request(format!(
+ "Unsupported Android hardware button `{button}`."
+ ))),
+ }
+ }
+
+ pub fn rotate_right(&self, id: &str) -> Result<(), AppError> {
+ let serial = self.serial_for_id(id)?;
+ self.run_adb(["-s", &serial, "emu", "rotate"])?;
+ Ok(())
+ }
+
+ pub fn toggle_appearance(&self, id: &str) -> Result<(), AppError> {
+ let serial = self.serial_for_id(id)?;
+ let current = self.run_adb_shell(&serial, "cmd uimode night")?;
+ let mode = if current.to_lowercase().contains("yes") {
+ "no"
+ } else {
+ "yes"
+ };
+ self.run_adb(["-s", &serial, "shell", "cmd", "uimode", "night", mode])?;
+ Ok(())
+ }
+
+ pub fn logs(&self, id: &str, limit: usize) -> Result, AppError> {
+ let serial = self.serial_for_id(id)?;
+ let raw = self.run_adb([
+ "-s",
+ &serial,
+ "logcat",
+ "-d",
+ "-v",
+ "threadtime",
+ "-t",
+ &limit.max(1).to_string(),
+ ])?;
+ Ok(raw
+ .lines()
+ .map(|line| {
+ json!({
+ "timestamp": "",
+ "level": android_log_level(line),
+ "process": "",
+ "pid": Value::Null,
+ "subsystem": "android",
+ "category": "logcat",
+ "message": line,
+ })
+ })
+ .collect())
+ }
+
+ pub fn chrome_profile(&self, id: &str) -> Result {
+ let serial = self.serial_for_id(id)?;
+ let (width, height) = self.screen_size_for_serial(&serial)?;
+ let horizontal_bezel = (width * 0.055).clamp(48.0, 80.0);
+ let vertical_bezel = (height * 0.04).clamp(64.0, 104.0);
+ Ok(json!({
+ "totalWidth": width + horizontal_bezel * 2.0,
+ "totalHeight": height + vertical_bezel * 2.0,
+ "screenX": horizontal_bezel,
+ "screenY": vertical_bezel,
+ "screenWidth": width,
+ "screenHeight": height,
+ "cornerRadius": (width * 0.055).clamp(32.0, 56.0),
+ "hasScreenMask": false,
+ "chromeStyle": "css-android",
+ }))
+ }
+
+ pub async fn grpc_frame_stream(
+ &self,
+ id: &str,
+ max_edge: Option,
+ ) -> Result {
+ let avd_name = avd_from_id(id)?;
+ let port = self.grpc_port_for_avd(&avd_name)?;
+ let mut format = grpc::ImageFormat {
+ format: grpc::image_format::ImgFormat::Rgba8888 as i32,
+ width: 0,
+ height: 0,
+ display: 0,
+ transport: None,
+ };
+ if let Ok(serial) = self.resolve_serial(&avd_name) {
+ if let Ok((width, height)) = self.screen_size_for_serial(&serial) {
+ let max_edge = max_edge
+ .unwrap_or(DEFAULT_ANDROID_STREAM_MAX_EDGE)
+ .clamp(240, 2400) as f64;
+ let largest = width.max(height);
+ if largest > max_edge {
+ let scale = max_edge / largest;
+ format.width = (width * scale).round().max(1.0) as u32;
+ format.height = (height * scale).round().max(1.0) as u32;
+ }
+ }
+ }
+
+ let endpoint = Endpoint::from_shared(format!("http://127.0.0.1:{port}"))
+ .map_err(|error| AppError::native(format!("Invalid Android gRPC endpoint: {error}")))?
+ .connect()
+ .await
+ .map_err(|error| {
+ AppError::native(format!(
+ "Unable to connect to Android emulator gRPC: {error}"
+ ))
+ })?;
+ let mut grpc = tonic::client::Grpc::new(endpoint);
+ grpc.ready().await.map_err(|error| {
+ AppError::native(format!("Android emulator gRPC is not ready: {error}"))
+ })?;
+ let path = PathAndQuery::from_static(
+ "/android.emulation.control.EmulatorController/streamScreenshot",
+ );
+ let mut request = tonic::Request::new(format);
+ if let Some(token) = emulator_grpc_token(port) {
+ let value = MetadataValue::try_from(format!("Bearer {token}")).map_err(|error| {
+ AppError::native(format!("Invalid Android emulator gRPC token: {error}"))
+ })?;
+ request.metadata_mut().insert("authorization", value);
+ }
+ let response = grpc
+ .server_streaming(request, path, tonic::codec::ProstCodec::default())
+ .await
+ .map_err(|error| {
+ AppError::native(format!(
+ "Android emulator screenshot stream failed: {error}"
+ ))
+ })?;
+ Ok(AndroidGrpcFrameStream {
+ inner: response.into_inner(),
+ })
+ }
+
+ pub fn accessibility_tree(
+ &self,
+ id: &str,
+ max_depth: Option,
+ ) -> Result {
+ let serial = self.serial_for_id(id)?;
+ let raw = self.run_adb_shell(
+ &serial,
+ "uiautomator dump /sdcard/simdeck_ui.xml >/dev/null && cat /sdcard/simdeck_ui.xml",
+ )?;
+ let xml = extract_xml(&raw);
+ let document = roxmltree::Document::parse(xml).map_err(|error| {
+ AppError::native(format!("Unable to parse UIAutomator XML: {error}"))
+ })?;
+ let mut roots = Vec::new();
+ let root = document.root_element();
+ let max_depth = max_depth.unwrap_or(80).min(80);
+ for child in root.children().filter(|node| node.has_tag_name("node")) {
+ roots.push(android_node_value(child, 0, max_depth));
+ }
+ let (width, height) = self.screen_size_for_serial(&serial)?;
+ if roots.is_empty() {
+ roots.push(json!({
+ "type": "screen",
+ "role": "screen",
+ "frame": frame_value(0.0, 0.0, width, height),
+ "children": [],
+ }));
+ }
+ Ok(json!({
+ "source": "android-uiautomator",
+ "availableSources": ["android-uiautomator"],
+ "roots": roots,
+ }))
+ }
+
+ fn send_touch_grpc(&self, id: &str, x: f64, y: f64, phase: &str) -> Result<(), AppError> {
+ self.block_on_grpc(self.send_touch_grpc_async(id, x, y, phase))
+ }
+
+ async fn send_touch_grpc_async(
+ &self,
+ id: &str,
+ x: f64,
+ y: f64,
+ phase: &str,
+ ) -> Result<(), AppError> {
+ let avd_name = avd_from_id(id)?;
+ let serial = self.resolve_serial(&avd_name)?;
+ let (width, height) = self.screen_size_for_serial(&serial)?;
+ let pressure = match phase {
+ "began" | "moved" => 1,
+ "ended" | "cancelled" => 0,
+ _ => return Ok(()),
+ };
+ let event = grpc::TouchEvent {
+ touches: vec![grpc::Touch {
+ x: normalized_to_pixel(x, width),
+ y: normalized_to_pixel(y, height),
+ identifier: ANDROID_TOUCH_IDENTIFIER,
+ pressure,
+ touch_major: 8,
+ touch_minor: 8,
+ expiration: grpc::touch::EventExpiration::NeverExpire as i32,
+ orientation: 0,
+ }],
+ display: 0,
+ };
+ self.grpc_unary_for_avd::(
+ &avd_name,
+ "/android.emulation.control.EmulatorController/sendTouch",
+ event,
+ )
+ .await?;
+ Ok(())
+ }
+
+ fn send_swipe_grpc(
+ &self,
+ id: &str,
+ start_x: f64,
+ start_y: f64,
+ end_x: f64,
+ end_y: f64,
+ duration_ms: u64,
+ ) -> Result<(), AppError> {
+ let duration_ms = duration_ms.clamp(50, 1500);
+ let steps = (duration_ms / 8).clamp(4, 120);
+ self.send_touch_grpc(id, start_x, start_y, "began")?;
+ for step in 1..steps {
+ let t = step as f64 / steps as f64;
+ self.send_touch_grpc(
+ id,
+ start_x + (end_x - start_x) * t,
+ start_y + (end_y - start_y) * t,
+ "moved",
+ )?;
+ thread::sleep(Duration::from_millis((duration_ms / steps).max(1)));
+ }
+ self.send_touch_grpc(id, end_x, end_y, "ended")
+ }
+
+ fn send_key_grpc(&self, id: &str, event: grpc::KeyboardEvent) -> Result<(), AppError> {
+ self.block_on_grpc(async {
+ let avd_name = avd_from_id(id)?;
+ self.grpc_unary_for_avd::(
+ &avd_name,
+ "/android.emulation.control.EmulatorController/sendKey",
+ event,
+ )
+ .await?;
+ Ok(())
+ })
+ }
+
+ fn block_on_grpc(&self, future: F) -> Result
+ where
+ F: Future>,
+ {
+ if let Ok(handle) = tokio::runtime::Handle::try_current() {
+ return handle.block_on(future);
+ }
+ tokio::runtime::Builder::new_current_thread()
+ .enable_all()
+ .build()
+ .map_err(|error| AppError::internal(format!("Unable to create gRPC runtime: {error}")))?
+ .block_on(future)
+ }
+
+ async fn grpc_unary_for_avd(
+ &self,
+ avd_name: &str,
+ path: &'static str,
+ request: Req,
+ ) -> Result
+ where
+ Req: prost::Message + Default + Send + 'static,
+ Resp: prost::Message + Default + Send + 'static,
+ {
+ let port = self.grpc_port_for_avd(avd_name)?;
+ let channel = grpc_channel_for_port(port)?;
+ let mut grpc = tonic::client::Grpc::new(channel);
+ grpc.ready().await.map_err(|error| {
+ AppError::native(format!("Android emulator gRPC is not ready: {error}"))
+ })?;
+ let mut request = tonic::Request::new(request);
+ if let Some(token) = emulator_grpc_token(port) {
+ let value = MetadataValue::try_from(format!("Bearer {token}")).map_err(|error| {
+ AppError::native(format!("Invalid Android emulator gRPC token: {error}"))
+ })?;
+ request.metadata_mut().insert("authorization", value);
+ }
+ let response = grpc
+ .unary(
+ request,
+ PathAndQuery::from_static(path),
+ tonic::codec::ProstCodec::default(),
+ )
+ .await
+ .map_err(|error| {
+ AppError::native(format!("Android emulator gRPC input failed: {error}"))
+ })?;
+ Ok(response.into_inner())
+ }
+
+ fn device_value(&self, device: AndroidDevice) -> Value {
+ let id = id_for_avd(&device.avd_name);
+ let private_display = if let Some(serial) = device.serial.as_deref() {
+ let (width, height) = self.screen_size_for_serial(serial).unwrap_or((0.0, 0.0));
+ json!({
+ "displayReady": width > 0.0 && height > 0.0,
+ "displayStatus": "Ready",
+ "displayWidth": width,
+ "displayHeight": height,
+ "frameSequence": 0,
+ "rotationQuarterTurns": 0,
+ })
+ } else {
+ json!({
+ "displayReady": false,
+ "displayStatus": "Boot required",
+ "displayWidth": 0,
+ "displayHeight": 0,
+ "frameSequence": 0,
+ "rotationQuarterTurns": 0,
+ })
+ };
+ json!({
+ "udid": id,
+ "id": id,
+ "platform": "android-emulator",
+ "name": device.avd_name,
+ "state": if device.is_booted { "Booted" } else { "Shutdown" },
+ "isBooted": device.is_booted,
+ "isAvailable": true,
+ "lastBootedAt": Value::Null,
+ "dataPath": self.avd_dir(&device.avd_name),
+ "logPath": Value::Null,
+ "deviceTypeIdentifier": "android-emulator",
+ "deviceTypeName": "Android Emulator",
+ "runtimeIdentifier": "android",
+ "runtimeName": "Android",
+ "android": {
+ "avdName": device.avd_name,
+ "serial": device.serial,
+ "grpcPort": device.grpc_port,
+ },
+ "privateDisplay": private_display,
+ })
+ }
+
+ fn serial_for_id(&self, id: &str) -> Result {
+ self.resolve_serial(&avd_from_id(id)?)
+ }
+
+ fn resolve_serial(&self, avd_name: &str) -> Result {
+ self.running_emulators()?.remove(avd_name).ok_or_else(|| {
+ AppError::native(format!("Android emulator `{avd_name}` is not running."))
+ })
+ }
+
+ fn running_emulators(&self) -> Result, AppError> {
+ static CACHE: OnceLock)>>> = OnceLock::new();
+ let cache = CACHE.get_or_init(|| Mutex::new(None));
+ if let Some((updated_at, running)) = cache.lock().unwrap().as_ref() {
+ if updated_at.elapsed() < RUNNING_EMULATOR_CACHE_TTL {
+ return Ok(running.clone());
+ }
+ }
+ if !self.adb_path().exists() {
+ return Ok(HashMap::new());
+ }
+ let output = self.run_adb(["devices"])?;
+ let mut result = HashMap::new();
+ for line in output.lines().skip(1) {
+ let mut parts = line.split_whitespace();
+ let Some(serial) = parts.next() else { continue };
+ let Some(state) = parts.next() else { continue };
+ if state != "device" || !serial.starts_with("emulator-") {
+ continue;
+ }
+ if let Ok(name_output) = self.run_adb(["-s", serial, "emu", "avd", "name"]) {
+ if let Some(name) = name_output
+ .lines()
+ .map(str::trim)
+ .find(|line| !line.is_empty() && *line != "OK")
+ {
+ result.insert(name.to_owned(), serial.to_owned());
+ }
+ }
+ }
+ *cache.lock().unwrap() = Some((Instant::now(), result.clone()));
+ Ok(result)
+ }
+
+ fn grpc_port_for_avd(&self, avd_name: &str) -> Result {
+ static CACHE: OnceLock)>>> = OnceLock::new();
+ let cache = CACHE.get_or_init(|| Mutex::new(None));
+ if let Some((updated_at, ports)) = cache.lock().unwrap().as_ref() {
+ if updated_at.elapsed() < AVD_GRPC_PORT_CACHE_TTL {
+ if let Some(port) = ports.get(avd_name) {
+ return Ok(*port);
+ }
+ }
+ }
+
+ let ports = self
+ .run_emulator(["-list-avds"])?
+ .lines()
+ .map(str::trim)
+ .filter(|line| !line.is_empty())
+ .enumerate()
+ .map(|(index, name)| (name.to_owned(), DEFAULT_GRPC_PORT_BASE + index as u16))
+ .collect::>();
+ let port = ports
+ .get(avd_name)
+ .copied()
+ .ok_or_else(|| AppError::not_found(format!("Unknown Android AVD `{avd_name}`.")))?;
+ *cache.lock().unwrap() = Some((Instant::now(), ports));
+ Ok(port)
+ }
+
+ fn screen_size_for_serial(&self, serial: &str) -> Result<(f64, f64), AppError> {
+ static CACHE: OnceLock>> = OnceLock::new();
+ let cache = CACHE.get_or_init(|| Mutex::new(HashMap::new()));
+ if let Some((updated_at, size)) = cache.lock().unwrap().get(serial) {
+ if updated_at.elapsed() < SCREEN_SIZE_CACHE_TTL {
+ return Ok(*size);
+ }
+ }
+ let output = self.run_adb(["-s", serial, "shell", "wm", "size"])?;
+ let size = output
+ .split_whitespace()
+ .find(|part| part.contains('x'))
+ .ok_or_else(|| AppError::native("Android emulator did not report a screen size."))?;
+ let (width, height) = size
+ .split_once('x')
+ .ok_or_else(|| AppError::native("Android emulator reported an invalid screen size."))?;
+ let width = width
+ .parse::()
+ .map_err(|_| AppError::native("Android emulator reported an invalid width."))?;
+ let height = height
+ .parse::()
+ .map_err(|_| AppError::native("Android emulator reported an invalid height."))?;
+ cache
+ .lock()
+ .unwrap()
+ .insert(serial.to_owned(), (Instant::now(), (width, height)));
+ Ok((width, height))
+ }
+
+ fn run_adb_shell(&self, serial: &str, script: &str) -> Result {
+ self.run_adb(["-s", serial, "shell", script])
+ }
+
+ fn run_adb(&self, args: [&str; N]) -> Result {
+ run_command_text(self.adb_path(), args)
+ }
+
+ fn run_adb_bytes(&self, args: [&str; N]) -> Result, AppError> {
+ run_command_bytes(self.adb_path(), args)
+ }
+
+ fn run_emulator(&self, args: [&str; N]) -> Result {
+ run_command_text(self.emulator_path(), args)
+ }
+
+ fn adb_path(&self) -> PathBuf {
+ sdk_root().join("platform-tools/adb")
+ }
+
+ fn emulator_path(&self) -> PathBuf {
+ sdk_root().join("emulator/emulator")
+ }
+
+ fn avd_dir(&self, avd_name: &str) -> PathBuf {
+ home_dir().join(format!(".android/avd/{avd_name}.avd"))
+ }
+}
+
+impl AndroidGrpcFrameStream {
+ pub async fn next_frame(&mut self) -> Result, AppError> {
+ let Some(image) = self.inner.message().await.map_err(|error| {
+ AppError::native(format!(
+ "Android emulator screenshot stream failed: {error}"
+ ))
+ })?
+ else {
+ return Ok(None);
+ };
+ let format = image.format.ok_or_else(|| {
+ AppError::native("Android emulator screenshot did not include an image format.")
+ })?;
+ let width = if format.width > 0 {
+ format.width
+ } else {
+ image.width
+ };
+ let height = if format.height > 0 {
+ format.height
+ } else {
+ image.height
+ };
+ if width == 0 || height == 0 {
+ return Err(AppError::native(
+ "Android emulator screenshot did not include dimensions.",
+ ));
+ }
+ let rgba = rgba_display_order(
+ &image.image,
+ width,
+ height,
+ grpc::image_format::ImgFormat::try_from(format.format)
+ .unwrap_or(grpc::image_format::ImgFormat::Rgba8888),
+ )?;
+ Ok(Some(AndroidFrame {
+ width,
+ height,
+ seq: image.seq,
+ timestamp_us: image.timestamp_us,
+ rgba,
+ }))
+ }
+}
+
+fn run_command_text(program: PathBuf, args: [&str; N]) -> Result {
+ let output = run_command(program, args)?;
+ String::from_utf8(output)
+ .map_err(|error| AppError::native(format!("Command returned non-UTF8 output: {error}")))
+}
+
+fn run_command_bytes(
+ program: PathBuf,
+ args: [&str; N],
+) -> Result, AppError> {
+ run_command(program, args)
+}
+
+fn run_command(program: PathBuf, args: [&str; N]) -> Result, AppError> {
+ if !program.exists() {
+ return Err(AppError::native(format!(
+ "Android SDK binary not found at {}.",
+ program.display()
+ )));
+ }
+ let output = Command::new(&program)
+ .args(args)
+ .env("ANDROID_HOME", sdk_root())
+ .env("ANDROID_SDK_ROOT", sdk_root())
+ .env("JAVA_HOME", java_home())
+ .output()
+ .map_err(|error| {
+ AppError::native(format!("Unable to run {}: {error}", program.display()))
+ })?;
+ if output.status.success() {
+ return Ok(output.stdout);
+ }
+ let stderr = String::from_utf8_lossy(&output.stderr);
+ let stdout = String::from_utf8_lossy(&output.stdout);
+ Err(AppError::native(format!(
+ "{} failed: {}{}",
+ program
+ .file_name()
+ .and_then(|name| name.to_str())
+ .unwrap_or("Android command"),
+ stderr.trim(),
+ if stdout.trim().is_empty() {
+ String::new()
+ } else {
+ format!(" {}", stdout.trim())
+ }
+ )))
+}
+
+fn grpc_channel_for_port(port: u16) -> Result {
+ static CHANNELS: OnceLock>> = OnceLock::new();
+ let channels = CHANNELS.get_or_init(|| Mutex::new(HashMap::new()));
+ let mut channels = channels.lock().unwrap();
+ if let Some(channel) = channels.get(&port) {
+ return Ok(channel.clone());
+ }
+ let endpoint = Endpoint::from_shared(format!("http://127.0.0.1:{port}"))
+ .map_err(|error| AppError::native(format!("Invalid Android gRPC endpoint: {error}")))?;
+ let channel = endpoint.connect_lazy();
+ channels.insert(port, channel.clone());
+ Ok(channel)
+}
+
+fn normalized_to_pixel(value: f64, extent: f64) -> i32 {
+ (value.clamp(0.0, 1.0) * (extent - 1.0).max(0.0))
+ .round()
+ .max(0.0) as i32
+}
+
+fn sdk_root() -> PathBuf {
+ env::var_os("ANDROID_HOME")
+ .or_else(|| env::var_os("ANDROID_SDK_ROOT"))
+ .map(PathBuf::from)
+ .filter(|path| path.exists())
+ .unwrap_or_else(|| home_dir().join("Library/Android/sdk"))
+}
+
+fn java_home() -> OsString {
+ env::var_os("JAVA_HOME").unwrap_or_else(|| OsString::from("/opt/homebrew/opt/openjdk"))
+}
+
+fn home_dir() -> PathBuf {
+ env::var_os("HOME")
+ .map(PathBuf::from)
+ .unwrap_or_else(|| Path::new("/").to_path_buf())
+}
+
+fn emulator_grpc_token(port: u16) -> Option {
+ per_instance_grpc_token(port).or_else(global_grpc_token)
+}
+
+fn per_instance_grpc_token(port: u16) -> Option {
+ let running_dir = home_dir().join("Library/Caches/TemporaryItems/avd/running");
+ let entries = std::fs::read_dir(running_dir).ok()?;
+ let port_value = port.to_string();
+ for entry in entries.flatten() {
+ let path = entry.path();
+ if path.extension().and_then(|ext| ext.to_str()) != Some("ini") {
+ continue;
+ }
+ let contents = std::fs::read_to_string(path).ok()?;
+ let fields = parse_ini(&contents);
+ if fields.get("grpc.port") == Some(&port_value) {
+ if let Some(token) = fields.get("grpc.token").filter(|token| !token.is_empty()) {
+ return Some(token.to_owned());
+ }
+ }
+ }
+ None
+}
+
+fn global_grpc_token() -> Option {
+ std::fs::read_to_string(home_dir().join(".emulator_console_auth_token"))
+ .ok()
+ .map(|token| token.trim().to_owned())
+ .filter(|token| !token.is_empty())
+}
+
+fn parse_ini(contents: &str) -> HashMap {
+ contents
+ .lines()
+ .filter_map(|line| {
+ let line = line.trim();
+ let (key, value) = line.split_once('=')?;
+ Some((key.trim().to_owned(), value.trim().to_owned()))
+ })
+ .collect()
+}
+
+fn rgba_display_order(
+ image: &[u8],
+ width: u32,
+ height: u32,
+ format: grpc::image_format::ImgFormat,
+) -> Result, AppError> {
+ let width = width as usize;
+ let height = height as usize;
+ match format {
+ grpc::image_format::ImgFormat::Rgba8888 => {
+ let row_len = width * 4;
+ if image.len() < row_len * height {
+ return Err(AppError::native(
+ "Android emulator returned a truncated RGBA frame.",
+ ));
+ }
+ Ok(image[..row_len * height].to_vec())
+ }
+ grpc::image_format::ImgFormat::Rgb888 => {
+ let src_row_len = width * 3;
+ if image.len() < src_row_len * height {
+ return Err(AppError::native(
+ "Android emulator returned a truncated RGB frame.",
+ ));
+ }
+ let mut out = BytesMut::with_capacity(width * height * 4);
+ out.resize(width * height * 4, 255);
+ for y in 0..height {
+ let src_row = y * src_row_len;
+ let dst_row = y * width * 4;
+ for x in 0..width {
+ let src = src_row + x * 3;
+ let dst = dst_row + x * 4;
+ out[dst] = image[src];
+ out[dst + 1] = image[src + 1];
+ out[dst + 2] = image[src + 2];
+ out[dst + 3] = 255;
+ }
+ }
+ Ok(out.to_vec())
+ }
+ grpc::image_format::ImgFormat::Png => Err(AppError::native(
+ "Android emulator gRPC returned PNG instead of raw pixels.",
+ )),
+ }
+}
+
+fn extract_xml(output: &str) -> &str {
+ output
+ .find(", depth: usize, max_depth: usize) -> Value {
+ let bounds = parse_bounds(node.attribute("bounds").unwrap_or(""));
+ let class_name = node.attribute("class").unwrap_or("");
+ let short_class = class_name.rsplit('.').next().unwrap_or(class_name);
+ let text = node.attribute("text").unwrap_or("");
+ let content_desc = node.attribute("content-desc").unwrap_or("");
+ let label = if !text.is_empty() { text } else { content_desc };
+ let mut children = Vec::new();
+ if depth < max_depth {
+ for child in node.children().filter(|child| child.has_tag_name("node")) {
+ children.push(android_node_value(child, depth + 1, max_depth));
+ }
+ }
+ json!({
+ "source": "android-uiautomator",
+ "type": map_android_class(short_class),
+ "role": map_android_class(short_class),
+ "className": class_name,
+ "AXIdentifier": node.attribute("resource-id").unwrap_or(""),
+ "AXLabel": label,
+ "AXValue": text,
+ "text": text,
+ "title": label,
+ "enabled": node.attribute("enabled") == Some("true"),
+ "isHidden": node.attribute("visible-to-user") == Some("false"),
+ "frame": frame_value(bounds.0, bounds.1, bounds.2, bounds.3),
+ "frameInScreen": frame_value(bounds.0, bounds.1, bounds.2, bounds.3),
+ "children": children,
+ })
+}
+
+fn parse_bounds(value: &str) -> (f64, f64, f64, f64) {
+ let numbers = value
+ .replace("][", ",")
+ .replace(['[', ']'], "")
+ .split(',')
+ .filter_map(|part| part.parse::().ok())
+ .collect::>();
+ if numbers.len() != 4 {
+ return (0.0, 0.0, 0.0, 0.0);
+ }
+ (
+ numbers[0],
+ numbers[1],
+ (numbers[2] - numbers[0]).max(0.0),
+ (numbers[3] - numbers[1]).max(0.0),
+ )
+}
+
+fn frame_value(x: f64, y: f64, width: f64, height: f64) -> Value {
+ json!({ "x": x, "y": y, "width": width, "height": height })
+}
+
+fn map_android_class(class_name: &str) -> &'static str {
+ match class_name {
+ "Button" | "ImageButton" | "FloatingActionButton" => "button",
+ "EditText" => "textField",
+ "TextView" => "staticText",
+ "ImageView" => "image",
+ "CheckBox" => "checkBox",
+ "RadioButton" => "radioButton",
+ "Switch" | "ToggleButton" => "switch",
+ "SeekBar" => "slider",
+ "RecyclerView" | "ListView" => "table",
+ "ScrollView" | "HorizontalScrollView" | "NestedScrollView" => "scrollView",
+ "WebView" => "webView",
+ _ => "other",
+ }
+}
+
+fn android_key_code(hid: u16) -> u16 {
+ match hid {
+ 40 => 66,
+ 41 => 111,
+ 42 => 67,
+ 43 => 61,
+ 44 => 62,
+ 79 => 22,
+ 80 => 21,
+ 81 => 20,
+ 82 => 19,
+ _ => hid,
+ }
+}
+
+fn android_log_level(line: &str) -> &'static str {
+ if line.contains(" E ") {
+ "error"
+ } else if line.contains(" W ") {
+ "warning"
+ } else if line.contains(" D ") {
+ "debug"
+ } else {
+ "info"
+ }
+}
+
+fn shell_quote(value: &str) -> String {
+ format!("'{}'", value.replace('\'', "'\\''"))
+}
+
+#[allow(dead_code)]
+fn _dedupe(values: impl IntoIterator- ) -> Vec
{
+ let mut seen = HashSet::new();
+ values
+ .into_iter()
+ .filter(|value| seen.insert(value.clone()))
+ .collect()
+}
+
+mod grpc {
+ #[derive(Clone, PartialEq, ::prost::Message)]
+ pub struct Empty {}
+
+ #[derive(Clone, PartialEq, ::prost::Message)]
+ pub struct Touch {
+ #[prost(int32, tag = "1")]
+ pub x: i32,
+ #[prost(int32, tag = "2")]
+ pub y: i32,
+ #[prost(int32, tag = "3")]
+ pub identifier: i32,
+ #[prost(int32, tag = "4")]
+ pub pressure: i32,
+ #[prost(int32, tag = "5")]
+ pub touch_major: i32,
+ #[prost(int32, tag = "6")]
+ pub touch_minor: i32,
+ #[prost(enumeration = "touch::EventExpiration", tag = "7")]
+ pub expiration: i32,
+ #[prost(int32, tag = "8")]
+ pub orientation: i32,
+ }
+
+ pub mod touch {
+ #[derive(
+ Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
+ )]
+ #[repr(i32)]
+ pub enum EventExpiration {
+ Unspecified = 0,
+ NeverExpire = 1,
+ }
+ }
+
+ #[derive(Clone, PartialEq, ::prost::Message)]
+ pub struct TouchEvent {
+ #[prost(message, repeated, tag = "1")]
+ pub touches: Vec,
+ #[prost(int32, tag = "2")]
+ pub display: i32,
+ }
+
+ #[derive(Clone, PartialEq, ::prost::Message)]
+ pub struct KeyboardEvent {
+ #[prost(enumeration = "keyboard_event::KeyCodeType", tag = "1")]
+ pub code_type: i32,
+ #[prost(enumeration = "keyboard_event::KeyEventType", tag = "2")]
+ pub event_type: i32,
+ #[prost(int32, tag = "3")]
+ pub key_code: i32,
+ #[prost(string, tag = "4")]
+ pub key: String,
+ #[prost(string, tag = "5")]
+ pub text: String,
+ }
+
+ impl KeyboardEvent {
+ pub fn usb_keypress(key_code: i32) -> Self {
+ Self {
+ code_type: keyboard_event::KeyCodeType::Usb as i32,
+ event_type: keyboard_event::KeyEventType::Keypress as i32,
+ key_code,
+ key: String::new(),
+ text: String::new(),
+ }
+ }
+
+ pub fn text(text: String) -> Self {
+ Self {
+ code_type: keyboard_event::KeyCodeType::Usb as i32,
+ event_type: keyboard_event::KeyEventType::Keypress as i32,
+ key_code: 0,
+ key: String::new(),
+ text,
+ }
+ }
+ }
+
+ pub mod keyboard_event {
+ #[derive(
+ Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
+ )]
+ #[repr(i32)]
+ pub enum KeyCodeType {
+ Usb = 0,
+ Evdev = 1,
+ Xkb = 2,
+ Win = 3,
+ Mac = 4,
+ }
+
+ #[derive(
+ Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
+ )]
+ #[repr(i32)]
+ pub enum KeyEventType {
+ Keydown = 0,
+ Keyup = 1,
+ Keypress = 2,
+ }
+ }
+
+ #[derive(Clone, PartialEq, ::prost::Message)]
+ pub struct ImageTransport {
+ #[prost(enumeration = "image_transport::TransportChannel", tag = "1")]
+ pub channel: i32,
+ #[prost(string, tag = "2")]
+ pub handle: String,
+ }
+
+ pub mod image_transport {
+ #[derive(
+ Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
+ )]
+ #[repr(i32)]
+ pub enum TransportChannel {
+ Unspecified = 0,
+ Mmap = 1,
+ }
+ }
+
+ #[derive(Clone, PartialEq, ::prost::Message)]
+ pub struct ImageFormat {
+ #[prost(enumeration = "image_format::ImgFormat", tag = "1")]
+ pub format: i32,
+ #[prost(uint32, tag = "3")]
+ pub width: u32,
+ #[prost(uint32, tag = "4")]
+ pub height: u32,
+ #[prost(uint32, tag = "5")]
+ pub display: u32,
+ #[prost(message, optional, tag = "6")]
+ pub transport: Option,
+ }
+
+ pub mod image_format {
+ #[derive(
+ Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration,
+ )]
+ #[repr(i32)]
+ pub enum ImgFormat {
+ Png = 0,
+ Rgba8888 = 1,
+ Rgb888 = 2,
+ }
+ }
+
+ #[derive(Clone, PartialEq, ::prost::Message)]
+ pub struct Image {
+ #[prost(message, optional, tag = "1")]
+ pub format: Option,
+ #[prost(uint32, tag = "2")]
+ pub width: u32,
+ #[prost(uint32, tag = "3")]
+ pub height: u32,
+ #[prost(bytes = "vec", tag = "4")]
+ pub image: Vec,
+ #[prost(uint32, tag = "5")]
+ pub seq: u32,
+ #[prost(uint64, tag = "6")]
+ pub timestamp_us: u64,
+ }
+}
diff --git a/server/src/api/routes.rs b/server/src/api/routes.rs
index 6d908f9..d61c827 100644
--- a/server/src/api/routes.rs
+++ b/server/src/api/routes.rs
@@ -1,3 +1,4 @@
+use crate::android::{self, AndroidBridge};
use crate::api::json::json;
use crate::auth;
use crate::config::Config;
@@ -43,6 +44,7 @@ pub struct AppState {
pub inspectors: InspectorHub,
pub metrics: Arc,
pub simulator_inventory: SimulatorInventoryCache,
+ pub android: AndroidBridge,
}
#[derive(Clone, Default)]
@@ -382,6 +384,13 @@ struct AccessibilityPointQuery {
y: f64,
}
+#[derive(Deserialize)]
+#[serde(rename_all = "camelCase")]
+struct AndroidFrameQuery {
+ max_edge: Option,
+ max_fps: Option,
+}
+
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct AccessibilityTreeQuery {
@@ -476,6 +485,10 @@ pub fn router(state: AppState) -> Router {
.route("/api/simulators/{udid}/batch", post(run_batch))
.route("/api/simulators/{udid}/touch", post(send_touch))
.route("/api/simulators/{udid}/control", get(control_socket))
+ .route(
+ "/api/simulators/{udid}/android/frames",
+ get(android_frame_socket),
+ )
.route("/api/simulators/{udid}/webrtc/offer", post(webrtc_offer))
.route(
"/api/simulators/{udid}/touch-sequence",
@@ -904,9 +917,9 @@ async fn inspector_response(
}
async fn list_simulators(State(state): State) -> Result, AppError> {
- let simulators = list_simulators_cached(state.clone(), false).await?;
+ let simulators = all_device_values(state.clone(), false).await?;
Ok(json(json_value!({
- "simulators": state.registry.enrich_simulators(simulators),
+ "simulators": simulators,
})))
}
@@ -914,6 +927,15 @@ async fn boot_simulator(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ let action_udid = udid.clone();
+ run_android_action(state.clone(), move |android| {
+ android.boot(&action_udid)?;
+ android.wait_until_booted(&action_udid, Duration::from_secs(120))
+ })
+ .await?;
+ return simulator_payload(state, udid).await;
+ }
forget_lifecycle_session(&state, &udid);
let action_udid = udid.clone();
run_bridge_action(state.clone(), move |bridge| {
@@ -927,6 +949,11 @@ async fn shutdown_simulator(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ let action_udid = udid.clone();
+ run_android_action(state.clone(), move |android| android.shutdown(&action_udid)).await?;
+ return simulator_payload(state, udid).await;
+ }
forget_lifecycle_session(&state, &udid);
let action_udid = udid.clone();
run_bridge_action(state.clone(), move |bridge| {
@@ -940,6 +967,11 @@ async fn erase_simulator(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ let action_udid = udid.clone();
+ run_android_action(state, move |android| android.erase(&action_udid)).await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
forget_lifecycle_session(&state, &udid);
let action_udid = udid.clone();
run_bridge_action(state, move |bridge| bridge.erase_simulator(&action_udid)).await?;
@@ -964,6 +996,14 @@ async fn install_app(
"Request body must include `appPath`.",
));
}
+ if android::is_android_id(&udid) {
+ let action_udid = udid.clone();
+ run_android_action(state, move |android| {
+ android.install_app(&action_udid, &payload.app_path)
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
let action_udid = udid.clone();
run_bridge_action(state, move |bridge| {
bridge.install_app(&action_udid, &payload.app_path)
@@ -982,6 +1022,14 @@ async fn uninstall_app(
"Request body must include `bundleId`.",
));
}
+ if android::is_android_id(&udid) {
+ let action_udid = udid.clone();
+ run_android_action(state, move |android| {
+ android.uninstall_app(&action_udid, &payload.bundle_id)
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
let action_udid = udid.clone();
run_bridge_action(state, move |bridge| {
bridge.uninstall_app(&action_udid, &payload.bundle_id)
@@ -994,6 +1042,10 @@ async fn get_pasteboard(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ let text = run_android_action(state, move |android| android.pasteboard_text(&udid)).await?;
+ return Ok(json(json_value!({ "text": text })));
+ }
let text = run_bridge_action(state, move |bridge| bridge.pasteboard_text(&udid)).await?;
Ok(json(json_value!({ "text": text })))
}
@@ -1003,6 +1055,13 @@ async fn set_pasteboard(
Path(udid): Path,
Json(payload): Json,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ android.set_pasteboard_text(&udid, &payload.text)
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| {
bridge.set_pasteboard_text(&udid, &payload.text)
})
@@ -1014,7 +1073,11 @@ async fn screenshot_png(
State(state): State,
Path(udid): Path,
) -> Result<(StatusCode, HeaderMap, Vec), AppError> {
- let png = run_bridge_action(state, move |bridge| bridge.screenshot_png(&udid)).await?;
+ let png = if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.screenshot_png(&udid)).await?
+ } else {
+ run_bridge_action(state, move |bridge| bridge.screenshot_png(&udid)).await?
+ };
let mut headers = HeaderMap::new();
headers.insert(header::CONTENT_TYPE, "image/png".parse().unwrap());
headers.insert(
@@ -1028,6 +1091,10 @@ async fn toggle_appearance(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.toggle_appearance(&udid)).await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
let action_udid = udid.clone();
run_bridge_action(state, move |bridge| bridge.toggle_appearance(&action_udid)).await?;
Ok(json(json_value!({ "ok": true })))
@@ -1037,6 +1104,9 @@ async fn refresh_stream(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ return Ok(json(json_value!({ "ok": true, "stream": "screenshot" })));
+ }
let session = state.registry.get_or_create_async(&udid).await?;
if let Err(error) = session.ensure_started_async().await {
state.registry.remove(&udid);
@@ -1054,6 +1124,14 @@ async fn open_url(
if payload.url.trim().is_empty() {
return Err(AppError::bad_request("Request body must include `url`."));
}
+ if android::is_android_id(&udid) {
+ let action_udid = udid.clone();
+ run_android_action(state, move |android| {
+ android.open_url(&action_udid, &payload.url)
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
let action_udid = udid.clone();
run_bridge_action(state, move |bridge| {
bridge.open_url(&action_udid, &payload.url)
@@ -1072,6 +1150,14 @@ async fn launch_bundle(
"Request body must include `bundleId`.",
));
}
+ if android::is_android_id(&udid) {
+ let action_udid = udid.clone();
+ run_android_action(state, move |android| {
+ android.launch_package(&action_udid, &payload.bundle_id)
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
let action_udid = udid.clone();
run_bridge_action(state, move |bridge| {
bridge.launch_bundle(&action_udid, &payload.bundle_id)
@@ -1202,6 +1288,13 @@ async fn send_touch(
let x = payload.x.clamp(0.0, 1.0);
let y = payload.y.clamp(0.0, 1.0);
let phase = payload.phase;
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ android.send_touch(&udid, x, y, &phase)
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| {
let input = bridge.create_input_session(&udid)?;
input.send_touch(x, y, &phase)
@@ -1232,6 +1325,24 @@ async fn send_touch_sequence(
));
}
}
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ for event in payload.events {
+ android.send_touch(
+ &udid,
+ event.x.clamp(0.0, 1.0),
+ event.y.clamp(0.0, 1.0),
+ &event.phase,
+ )?;
+ if let Some(delay_ms) = event.delay_ms_after.filter(|delay_ms| *delay_ms > 0) {
+ std::thread::sleep(Duration::from_millis(delay_ms));
+ }
+ }
+ Ok(())
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| {
let input = bridge.create_input_session(&udid)?;
for event in payload.events {
@@ -1255,9 +1366,279 @@ async fn control_socket(
Path(udid): Path,
websocket: WebSocketUpgrade,
) -> impl IntoResponse {
+ if android::is_android_id(&udid) {
+ return websocket
+ .on_upgrade(move |socket| handle_android_control_socket(state, udid, socket));
+ }
websocket.on_upgrade(move |socket| handle_control_socket(state, udid, socket))
}
+async fn android_frame_socket(
+ State(state): State,
+ Path(udid): Path,
+ Query(query): Query,
+ websocket: WebSocketUpgrade,
+) -> impl IntoResponse {
+ websocket.on_upgrade(move |socket| {
+ handle_android_frame_socket(state, udid, query.max_edge, query.max_fps, socket)
+ })
+}
+
+async fn handle_android_frame_socket(
+ state: AppState,
+ udid: String,
+ max_edge: Option,
+ max_fps: Option,
+ socket: WebSocket,
+) {
+ let (mut sender, mut receiver) = socket.split();
+ if !android::is_android_id(&udid) {
+ let _ = sender
+ .send(Message::Text(
+ json_value!({
+ "type": "error",
+ "error": "Android frame streaming only supports Android emulator IDs."
+ })
+ .to_string()
+ .into(),
+ ))
+ .await;
+ return;
+ }
+
+ let mut stream = match state.android.grpc_frame_stream(&udid, max_edge).await {
+ Ok(stream) => stream,
+ Err(error) => {
+ let _ = sender
+ .send(Message::Text(
+ json_value!({ "type": "error", "error": error.to_string() })
+ .to_string()
+ .into(),
+ ))
+ .await;
+ return;
+ }
+ };
+
+ let _ = sender
+ .send(Message::Text(
+ json_value!({ "type": "ready", "udid": udid, "platform": "android-emulator" })
+ .to_string()
+ .into(),
+ ))
+ .await;
+
+ let min_frame_gap = max_fps
+ .filter(|fps| *fps > 0)
+ .map(|fps| Duration::from_millis(1000 / u64::from(fps.min(60))))
+ .unwrap_or_else(|| Duration::from_millis(83));
+ let mut last_sent_at = Instant::now() - min_frame_gap;
+
+ loop {
+ tokio::select! {
+ message = receiver.next() => {
+ match message {
+ Some(Ok(Message::Close(_))) | None => break,
+ Some(Ok(_)) => {}
+ Some(Err(_)) => break,
+ }
+ }
+ frame = stream.next_frame() => {
+ let frame = match frame {
+ Ok(Some(frame)) => frame,
+ Ok(None) => break,
+ Err(error) => {
+ let _ = sender
+ .send(Message::Text(
+ json_value!({ "type": "error", "error": error.to_string() })
+ .to_string()
+ .into(),
+ ))
+ .await;
+ break;
+ }
+ };
+ let now = Instant::now();
+ if now.duration_since(last_sent_at) < min_frame_gap {
+ continue;
+ }
+ last_sent_at = now;
+ if sender
+ .send(Message::Binary(encode_android_frame(frame).into()))
+ .await
+ .is_err()
+ {
+ break;
+ }
+ }
+ }
+ }
+}
+
+fn encode_android_frame(frame: android::AndroidFrame) -> Vec {
+ const HEADER_LEN: usize = 32;
+ let mut bytes = Vec::with_capacity(HEADER_LEN + frame.rgba.len());
+ bytes.extend_from_slice(b"SDAF");
+ bytes.push(1);
+ bytes.push(1);
+ bytes.extend_from_slice(&[0, 0]);
+ bytes.extend_from_slice(&frame.width.to_le_bytes());
+ bytes.extend_from_slice(&frame.height.to_le_bytes());
+ bytes.extend_from_slice(&frame.seq.to_le_bytes());
+ bytes.extend_from_slice(&0u32.to_le_bytes());
+ bytes.extend_from_slice(&frame.timestamp_us.to_le_bytes());
+ bytes.extend_from_slice(&frame.rgba);
+ bytes
+}
+
+async fn handle_android_control_socket(state: AppState, udid: String, socket: WebSocket) {
+ let (mut sender, mut receiver) = socket.split();
+ let mut active_touch: Option = None;
+ let _ = sender
+ .send(Message::Text(
+ json_value!({ "type": "ready", "udid": udid, "platform": "android-emulator" })
+ .to_string()
+ .into(),
+ ))
+ .await;
+ while let Some(message) = receiver.next().await {
+ let text = match message {
+ Ok(Message::Text(text)) => text,
+ Ok(Message::Binary(bytes)) => match String::from_utf8(bytes.to_vec()) {
+ Ok(text) => text.into(),
+ Err(_) => continue,
+ },
+ Ok(Message::Close(_)) => break,
+ Ok(Message::Ping(_)) | Ok(Message::Pong(_)) => continue,
+ Err(_) => break,
+ };
+ let Ok(control_message) = serde_json::from_str::(&text) else {
+ continue;
+ };
+ let state = state.clone();
+ let udid = udid.clone();
+ let _ = run_android_control_message(state, udid, control_message, &mut active_touch).await;
+ }
+}
+
+struct AndroidControlTouch {
+ started_at: Instant,
+ start_x: f64,
+ start_y: f64,
+ latest_x: f64,
+ latest_y: f64,
+}
+
+async fn run_android_control_message(
+ state: AppState,
+ udid: String,
+ message: ControlMessage,
+ active_touch: &mut Option,
+) -> Result<(), AppError> {
+ match message {
+ ControlMessage::Touch { x, y, phase } => {
+ handle_android_control_touch(state, udid, x, y, phase, active_touch).await
+ }
+ other => {
+ run_android_action(state, move |android| match other {
+ ControlMessage::Key {
+ key_code,
+ modifiers,
+ } => android.send_key(&udid, key_code, modifiers.unwrap_or(0)),
+ ControlMessage::DismissKeyboard => android.send_key(&udid, 41, 0),
+ ControlMessage::Home => android.press_home(&udid),
+ ControlMessage::AppSwitcher => android.open_app_switcher(&udid),
+ ControlMessage::RotateLeft | ControlMessage::RotateRight => {
+ android.rotate_right(&udid)
+ }
+ ControlMessage::ToggleAppearance => android.toggle_appearance(&udid),
+ ControlMessage::Touch { .. } => Ok(()),
+ })
+ .await
+ }
+ }
+}
+
+async fn handle_android_control_touch(
+ state: AppState,
+ udid: String,
+ x: f64,
+ y: f64,
+ phase: String,
+ active_touch: &mut Option,
+) -> Result<(), AppError> {
+ if !x.is_finite() || !y.is_finite() {
+ return Err(AppError::bad_request(
+ "`x` and `y` must be finite normalized numbers.",
+ ));
+ }
+ let x = x.clamp(0.0, 1.0);
+ let y = y.clamp(0.0, 1.0);
+ match phase.as_str() {
+ "began" => {
+ *active_touch = Some(AndroidControlTouch {
+ started_at: Instant::now(),
+ start_x: x,
+ start_y: y,
+ latest_x: x,
+ latest_y: y,
+ });
+ run_android_action(state, move |android| {
+ android.send_touch(&udid, x, y, "began")
+ })
+ .await
+ }
+ "moved" => {
+ if let Some(touch) = active_touch.as_mut() {
+ touch.latest_x = x;
+ touch.latest_y = y;
+ }
+ run_android_action(state, move |android| {
+ android.send_touch(&udid, x, y, "moved")
+ })
+ .await
+ }
+ "ended" => {
+ let touch = active_touch.take().unwrap_or(AndroidControlTouch {
+ started_at: Instant::now(),
+ start_x: x,
+ start_y: y,
+ latest_x: x,
+ latest_y: y,
+ });
+ let end_x = x;
+ let end_y = y;
+ let distance =
+ ((end_x - touch.start_x).powi(2) + (end_y - touch.start_y).powi(2)).sqrt();
+ let duration_ms = touch.started_at.elapsed().as_millis().clamp(80, 1500) as u64;
+ run_android_action(state, move |android| {
+ if distance >= 0.025 {
+ android
+ .send_touch(&udid, end_x, end_y, "ended")
+ .or_else(|_| {
+ android.send_swipe(
+ &udid,
+ touch.start_x,
+ touch.start_y,
+ end_x,
+ end_y,
+ duration_ms,
+ )
+ })
+ } else {
+ android.send_touch(&udid, end_x, end_y, "ended")
+ }
+ })
+ .await
+ }
+ "cancelled" => {
+ *active_touch = None;
+ Ok(())
+ }
+ _ => Ok(()),
+ }
+}
+
async fn webrtc_offer(
State(state): State,
Path(udid): Path,
@@ -1391,6 +1772,13 @@ async fn send_key(
Path(udid): Path,
Json(payload): Json,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ android.send_key(&udid, payload.key_code, payload.modifiers.unwrap_or(0))
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| {
bridge.send_key(&udid, payload.key_code, payload.modifiers.unwrap_or(0))
})
@@ -1413,6 +1801,21 @@ async fn send_key_sequence(
"Key sequence cannot contain more than 512 key codes.",
));
}
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ let delay_ms = payload.delay_ms.unwrap_or(0);
+ let key_count = payload.key_codes.len();
+ for (index, key_code) in payload.key_codes.into_iter().enumerate() {
+ android.send_key(&udid, key_code, 0)?;
+ if delay_ms > 0 && index + 1 < key_count {
+ std::thread::sleep(Duration::from_millis(delay_ms));
+ }
+ }
+ Ok(())
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| {
let input = bridge.create_input_session(&udid)?;
let delay_ms = payload.delay_ms.unwrap_or(0);
@@ -1433,6 +1836,10 @@ async fn dismiss_keyboard(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.send_key(&udid, 41, 0)).await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| bridge.send_key(&udid, 41, 0)).await?;
Ok(json(json_value!({ "ok": true })))
}
@@ -1445,6 +1852,13 @@ async fn press_button(
if payload.button.trim().is_empty() {
return Err(AppError::bad_request("Request body must include `button`."));
}
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ android.press_button(&udid, &payload.button, payload.duration_ms.unwrap_or(0))
+ })
+ .await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| {
bridge.press_button(&udid, &payload.button, payload.duration_ms.unwrap_or(0))
})
@@ -1456,6 +1870,10 @@ async fn press_home(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.press_home(&udid)).await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| bridge.press_home(&udid)).await?;
Ok(json(json_value!({ "ok": true })))
}
@@ -1464,6 +1882,10 @@ async fn open_app_switcher(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.open_app_switcher(&udid)).await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| bridge.open_app_switcher(&udid)).await?;
Ok(json(json_value!({ "ok": true })))
}
@@ -1472,6 +1894,10 @@ async fn rotate_right(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.rotate_right(&udid)).await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| bridge.rotate_right(&udid)).await?;
Ok(json(json_value!({ "ok": true })))
}
@@ -1480,6 +1906,10 @@ async fn rotate_left(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.rotate_right(&udid)).await?;
+ return Ok(json(json_value!({ "ok": true })));
+ }
run_bridge_action(state, move |bridge| bridge.rotate_left(&udid)).await?;
Ok(json(json_value!({ "ok": true })))
}
@@ -1488,6 +1918,11 @@ async fn chrome_profile(
State(state): State,
Path(udid): Path,
) -> Result, AppError> {
+ if android::is_android_id(&udid) {
+ let profile =
+ run_android_action(state, move |android| android.chrome_profile(&udid)).await?;
+ return Ok(json(profile));
+ }
let profile = run_bridge_action(state, move |bridge| bridge.chrome_profile(&udid)).await?;
Ok(json(json_value!(profile)))
}
@@ -1496,6 +1931,11 @@ async fn chrome_png(
State(state): State,
Path(udid): Path,
) -> Result<(StatusCode, HeaderMap, Vec), AppError> {
+ if android::is_android_id(&udid) {
+ return Err(AppError::not_found(
+ "Android emulators do not expose device chrome assets.",
+ ));
+ }
let png = run_bridge_action(state, move |bridge| bridge.chrome_png(&udid)).await?;
let mut headers = HeaderMap::new();
headers.insert(header::CONTENT_TYPE, "image/png".parse().unwrap());
@@ -1510,6 +1950,11 @@ async fn screen_mask_png(
State(state): State,
Path(udid): Path,
) -> Result<(StatusCode, HeaderMap, Vec), AppError> {
+ if android::is_android_id(&udid) {
+ return Err(AppError::not_found(
+ "Android emulators do not expose screen mask assets.",
+ ));
+ }
let png = run_bridge_action(state, move |bridge| bridge.screen_mask_png(&udid)).await?;
let mut headers = HeaderMap::new();
headers.insert(header::CONTENT_TYPE, "image/png".parse().unwrap());
@@ -1544,6 +1989,22 @@ async fn accessibility_tree_value(
max_depth: Option,
include_hidden: bool,
) -> Result {
+ if android::is_android_id(&udid) {
+ let requested_source = source
+ .filter(|source| *source != "auto")
+ .map(|source| source.to_owned());
+ return run_android_action(state, move |android| {
+ let mut tree = android.accessibility_tree(&udid, max_depth)?;
+ if include_hidden {
+ tree["includeHidden"] = Value::Bool(true);
+ }
+ if let Some(source) = requested_source {
+ tree["requestedSource"] = Value::String(source);
+ }
+ Ok(tree)
+ })
+ .await;
+ }
let requested_source = AccessibilityHierarchySource::parse(source)?;
let max_depth = max_depth.map(|depth| depth.min(80));
@@ -1677,6 +2138,13 @@ async fn accessibility_point(
));
}
+ if android::is_android_id(&udid) {
+ let snapshot = run_android_action(state, move |android| {
+ android.accessibility_tree(&udid, None)
+ })
+ .await?;
+ return Ok(json(snapshot));
+ }
let snapshot = accessibility_snapshot(state, udid, Some((query.x, query.y)), None).await?;
Ok(json(snapshot))
}
@@ -1725,6 +2193,17 @@ async fn perform_tap_payload(
tap_point_from_snapshot(&snapshot, &payload.selector)?
};
+ if android::is_android_id(&udid) {
+ return run_android_action(state, move |android| {
+ android.send_touch(&udid, x, y, "began")?;
+ if duration_ms > 0 {
+ std::thread::sleep(Duration::from_millis(duration_ms));
+ }
+ android.send_touch(&udid, x, y, "ended")
+ })
+ .await;
+ }
+
run_bridge_action(state, move |bridge| {
let input = bridge.create_input_session(&udid)?;
input.send_touch(x, y, "began")?;
@@ -1807,6 +2286,13 @@ async fn run_batch_step(state: AppState, udid: String, step: BatchStep) -> Resul
key_code,
modifiers,
} => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ android.send_key(&udid, key_code, modifiers.unwrap_or(0))
+ })
+ .await?;
+ return Ok(json_value!({ "action": "key" }));
+ }
run_bridge_action(state, move |bridge| {
bridge.send_key(&udid, key_code, modifiers.unwrap_or(0))
})
@@ -1825,6 +2311,21 @@ async fn run_batch_step(state: AppState, udid: String, step: BatchStep) -> Resul
"keySequence cannot contain more than 512 key codes.",
));
}
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ let delay_ms = delay_ms.unwrap_or(0);
+ let key_count = key_codes.len();
+ for (index, key_code) in key_codes.into_iter().enumerate() {
+ android.send_key(&udid, key_code, 0)?;
+ if delay_ms > 0 && index + 1 < key_count {
+ std::thread::sleep(Duration::from_millis(delay_ms));
+ }
+ }
+ Ok(())
+ })
+ .await?;
+ return Ok(json_value!({ "action": "keySequence" }));
+ }
run_bridge_action(state, move |bridge| {
let input = bridge.create_input_session(&udid)?;
let delay_ms = delay_ms.unwrap_or(0);
@@ -1853,6 +2354,28 @@ async fn run_batch_step(state: AppState, udid: String, step: BatchStep) -> Resul
"touch requires finite normalized x and y.",
));
}
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ let x = x.clamp(0.0, 1.0);
+ let y = y.clamp(0.0, 1.0);
+ if down.unwrap_or(false) || up.unwrap_or(false) {
+ if down.unwrap_or(false) {
+ android.send_touch(&udid, x, y, "began")?;
+ }
+ if down.unwrap_or(false) && up.unwrap_or(false) {
+ std::thread::sleep(Duration::from_millis(delay_ms.unwrap_or(100)));
+ }
+ if up.unwrap_or(false) {
+ android.send_touch(&udid, x, y, "ended")?;
+ }
+ } else {
+ android.send_touch(&udid, x, y, phase.as_deref().unwrap_or("began"))?;
+ }
+ Ok(())
+ })
+ .await?;
+ return Ok(json_value!({ "action": "touch" }));
+ }
run_bridge_action(state, move |bridge| {
let input = bridge.create_input_session(&udid)?;
let x = x.clamp(0.0, 1.0);
@@ -1884,6 +2407,31 @@ async fn run_batch_step(state: AppState, udid: String, step: BatchStep) -> Resul
"touchSequence cannot contain more than 64 events.",
));
}
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ for event in events {
+ if !event.x.is_finite() || !event.y.is_finite() {
+ return Err(AppError::bad_request(
+ "touchSequence requires finite normalized x and y.",
+ ));
+ }
+ android.send_touch(
+ &udid,
+ event.x.clamp(0.0, 1.0),
+ event.y.clamp(0.0, 1.0),
+ &event.phase,
+ )?;
+ if let Some(delay_ms) =
+ event.delay_ms_after.filter(|delay_ms| *delay_ms > 0)
+ {
+ std::thread::sleep(Duration::from_millis(delay_ms));
+ }
+ }
+ Ok(())
+ })
+ .await?;
+ return Ok(json_value!({ "action": "touchSequence" }));
+ }
run_bridge_action(state, move |bridge| {
let input = bridge.create_input_session(&udid)?;
for event in events {
@@ -1923,6 +2471,20 @@ async fn run_batch_step(state: AppState, udid: String, step: BatchStep) -> Resul
"swipe requires finite normalized coordinates.",
));
}
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ android.send_swipe(
+ &udid,
+ start_x,
+ start_y,
+ end_x,
+ end_y,
+ duration_ms.unwrap_or(350),
+ )
+ })
+ .await?;
+ return Ok(json_value!({ "action": "swipe" }));
+ }
run_bridge_action(state, move |bridge| {
let step_count = steps.unwrap_or(12).max(1);
let delay =
@@ -1955,6 +2517,20 @@ async fn run_batch_step(state: AppState, udid: String, step: BatchStep) -> Resul
} => {
let (start_x, start_y, end_x, end_y, default_duration_ms) =
normalized_gesture_coordinates(&preset, delta)?;
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ android.send_swipe(
+ &udid,
+ start_x,
+ start_y,
+ end_x,
+ end_y,
+ duration_ms.unwrap_or(default_duration_ms),
+ )
+ })
+ .await?;
+ return Ok(json_value!({ "action": "gesture", "preset": preset }));
+ }
run_bridge_action(state, move |bridge| {
let step_count = steps.unwrap_or(12).max(1);
let delay = Duration::from_millis(
@@ -1977,6 +2553,23 @@ async fn run_batch_step(state: AppState, udid: String, step: BatchStep) -> Resul
Ok(json_value!({ "action": "gesture", "preset": preset }))
}
BatchStep::Type { text, delay_ms } => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ if delay_ms.is_some() {
+ for character in text.chars() {
+ android.type_text(&udid, &character.to_string())?;
+ if let Some(delay_ms) = delay_ms.filter(|delay_ms| *delay_ms > 0) {
+ std::thread::sleep(Duration::from_millis(delay_ms));
+ }
+ }
+ Ok(())
+ } else {
+ android.type_text(&udid, &text)
+ }
+ })
+ .await?;
+ return Ok(json_value!({ "action": "type" }));
+ }
run_bridge_action(state, move |bridge| {
let input = bridge.create_input_session(&udid)?;
for character in text.chars() {
@@ -1999,6 +2592,13 @@ async fn run_batch_step(state: AppState, udid: String, step: BatchStep) -> Resul
button,
duration_ms,
} => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ android.press_button(&udid, &button, duration_ms.unwrap_or(0))
+ })
+ .await?;
+ return Ok(json_value!({ "action": "button" }));
+ }
run_bridge_action(state, move |bridge| {
bridge.press_button(&udid, &button, duration_ms.unwrap_or(0))
})
@@ -2006,34 +2606,69 @@ async fn run_batch_step(state: AppState, udid: String, step: BatchStep) -> Resul
Ok(json_value!({ "action": "button" }))
}
BatchStep::Launch { bundle_id } => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| {
+ android.launch_package(&udid, &bundle_id)
+ })
+ .await?;
+ return Ok(json_value!({ "action": "launch" }));
+ }
run_bridge_action(state, move |bridge| bridge.launch_bundle(&udid, &bundle_id)).await?;
Ok(json_value!({ "action": "launch" }))
}
BatchStep::OpenUrl { url } => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.open_url(&udid, &url)).await?;
+ return Ok(json_value!({ "action": "openUrl" }));
+ }
run_bridge_action(state, move |bridge| bridge.open_url(&udid, &url)).await?;
Ok(json_value!({ "action": "openUrl" }))
}
BatchStep::Home => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.press_home(&udid)).await?;
+ return Ok(json_value!({ "action": "home" }));
+ }
run_bridge_action(state, move |bridge| bridge.press_home(&udid)).await?;
Ok(json_value!({ "action": "home" }))
}
BatchStep::DismissKeyboard => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.send_key(&udid, 41, 0)).await?;
+ return Ok(json_value!({ "action": "dismissKeyboard" }));
+ }
run_bridge_action(state, move |bridge| bridge.send_key(&udid, 41, 0)).await?;
Ok(json_value!({ "action": "dismissKeyboard" }))
}
BatchStep::AppSwitcher => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.open_app_switcher(&udid)).await?;
+ return Ok(json_value!({ "action": "appSwitcher" }));
+ }
run_bridge_action(state, move |bridge| bridge.open_app_switcher(&udid)).await?;
Ok(json_value!({ "action": "appSwitcher" }))
}
BatchStep::RotateLeft => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.rotate_right(&udid)).await?;
+ return Ok(json_value!({ "action": "rotateLeft" }));
+ }
run_bridge_action(state, move |bridge| bridge.rotate_left(&udid)).await?;
Ok(json_value!({ "action": "rotateLeft" }))
}
BatchStep::RotateRight => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.rotate_right(&udid)).await?;
+ return Ok(json_value!({ "action": "rotateRight" }));
+ }
run_bridge_action(state, move |bridge| bridge.rotate_right(&udid)).await?;
Ok(json_value!({ "action": "rotateRight" }))
}
BatchStep::ToggleAppearance => {
+ if android::is_android_id(&udid) {
+ run_android_action(state, move |android| android.toggle_appearance(&udid)).await?;
+ return Ok(json_value!({ "action": "toggleAppearance" }));
+ }
run_bridge_action(state, move |bridge| bridge.toggle_appearance(&udid)).await?;
Ok(json_value!({ "action": "toggleAppearance" }))
}
@@ -2461,6 +3096,10 @@ async fn simulator_logs(
Query(query): Query,
) -> Result, AppError> {
let limit = query.limit.unwrap_or(250).clamp(1, 1000);
+ if android::is_android_id(&udid) {
+ let entries = run_android_action(state, move |android| android.logs(&udid, limit)).await?;
+ return Ok(json(json_value!({ "entries": entries })));
+ }
let filters = LogFilters::new(
split_filter_values(query.levels.as_deref()),
split_filter_values(query.processes.as_deref()),
@@ -3426,6 +4065,28 @@ where
})?
}
+async fn run_android_action(state: AppState, action: F) -> Result
+where
+ F: FnOnce(AndroidBridge) -> Result + Send + 'static,
+ T: Send + 'static,
+{
+ let android = state.android.clone();
+ task::spawn_blocking(move || action(android))
+ .await
+ .map_err(|error| {
+ AppError::internal(format!("Failed to join Android bridge task: {error}"))
+ })?
+}
+
+async fn all_device_values(state: AppState, force_refresh: bool) -> Result, AppError> {
+ let ios = list_simulators_cached(state.clone(), force_refresh).await?;
+ let mut values = state.registry.enrich_simulators(ios);
+ let android_devices =
+ run_android_action(state.clone(), |android| android.list_devices()).await?;
+ values.extend(state.android.enrich_devices(android_devices));
+ Ok(values)
+}
+
async fn list_simulators_cached(
state: AppState,
force_refresh: bool,
@@ -3458,8 +4119,7 @@ async fn accessibility_snapshot(
}
async fn simulator_payload(state: AppState, udid: String) -> Result, AppError> {
- let simulators = list_simulators_cached(state.clone(), true).await?;
- let enriched = state.registry.enrich_simulators(simulators);
+ let enriched = all_device_values(state.clone(), true).await?;
let simulator = enriched
.into_iter()
.find(|entry| entry.get("udid").and_then(Value::as_str) == Some(udid.as_str()))
diff --git a/server/src/main.rs b/server/src/main.rs
index 7992a0c..ca759ff 100644
--- a/server/src/main.rs
+++ b/server/src/main.rs
@@ -1,3 +1,4 @@
+mod android;
mod api;
mod auth;
mod config;
@@ -2061,7 +2062,16 @@ fn main() -> anyhow::Result<()> {
up,
delay_ms,
} => {
- if let Some(server_url) = service_url.as_deref().filter(|_| normalized) {
+ let android_device = android::is_android_id(&udid);
+ if android_device && !normalized {
+ anyhow::bail!("Android touch coordinates require --normalized.");
+ }
+ let command_server_url = if android_device {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref().filter(|_| normalized) {
if down || up {
let mut events = Vec::new();
if down {
@@ -2116,8 +2126,13 @@ fn main() -> anyhow::Result<()> {
pre_delay_ms,
post_delay_ms,
} => {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
if let (Some(server_url), Some(x), Some(y), true, None, None, None, None) = (
- service_url.as_deref(),
+ command_server_url.as_deref(),
x,
y,
normalized,
@@ -2129,7 +2144,7 @@ fn main() -> anyhow::Result<()> {
sleep_ms(pre_delay_ms);
service_tap(server_url, &udid, x, y, duration_ms)?;
sleep_ms(post_delay_ms);
- } else if let Some(server_url) = service_url.as_deref() {
+ } else if let Some(server_url) = command_server_url.as_deref() {
sleep_ms(pre_delay_ms);
service_tap_element(
server_url,
@@ -2191,18 +2206,44 @@ fn main() -> anyhow::Result<()> {
pre_delay_ms,
post_delay_ms,
} => {
- if let Some(server_url) = service_url.as_deref().filter(|_| normalized) {
+ let android_device = android::is_android_id(&udid);
+ if android_device && !normalized {
+ anyhow::bail!("Android swipe coordinates require --normalized.");
+ }
+ let command_server_url = if android_device {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref().filter(|_| normalized) {
sleep_ms(pre_delay_ms);
- service_swipe(
- server_url,
- &udid,
- start_x,
- start_y,
- end_x,
- end_y,
- duration_ms,
- steps,
- )?;
+ if android_device {
+ service_batch(
+ server_url,
+ &udid,
+ vec![serde_json::json!({
+ "action": "swipe",
+ "startX": start_x,
+ "startY": start_y,
+ "endX": end_x,
+ "endY": end_y,
+ "durationMs": duration_ms,
+ "steps": steps,
+ })],
+ false,
+ )?;
+ } else {
+ service_swipe(
+ server_url,
+ &udid,
+ start_x,
+ start_y,
+ end_x,
+ end_y,
+ duration_ms,
+ steps,
+ )?;
+ }
sleep_ms(post_delay_ms);
} else {
let (start_x, start_y) =
@@ -2237,7 +2278,36 @@ fn main() -> anyhow::Result<()> {
pre_delay_ms,
post_delay_ms,
} => {
- if let Some(server_url) = service_url.as_deref().filter(|_| normalized) {
+ let android_device = android::is_android_id(&udid);
+ let command_server_url = if android_device {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if android_device {
+ let server_url = command_server_url
+ .as_deref()
+ .ok_or_else(|| anyhow::anyhow!("Android command requires SimDeck daemon."))?;
+ sleep_ms(pre_delay_ms);
+ service_batch(
+ server_url,
+ &udid,
+ vec![serde_json::json!({
+ "action": "gesture",
+ "preset": preset,
+ "durationMs": duration_ms,
+ "delta": delta,
+ "steps": 4,
+ })],
+ false,
+ )?;
+ sleep_ms(post_delay_ms);
+ println_json(
+ &serde_json::json!({ "ok": true, "udid": udid, "action": "gesture", "preset": preset }),
+ )?;
+ return Ok(());
+ }
+ if let Some(server_url) = command_server_url.as_deref().filter(|_| normalized) {
let gesture = gesture_coordinates(
&bridge,
&udid,
@@ -2300,6 +2370,9 @@ fn main() -> anyhow::Result<()> {
duration_ms,
steps,
} => {
+ if android::is_android_id(&udid) {
+ anyhow::bail!("Android pinch gestures are not supported by the ADB input bridge.");
+ }
let frames = pinch_frames(
&bridge,
&udid,
@@ -2325,6 +2398,9 @@ fn main() -> anyhow::Result<()> {
duration_ms,
steps,
} => {
+ if android::is_android_id(&udid) {
+ anyhow::bail!("Android rotate gestures are not supported by the ADB input bridge.");
+ }
let frames = rotate_gesture_frames(
&bridge,
&udid,
@@ -2353,7 +2429,12 @@ fn main() -> anyhow::Result<()> {
} => {
let key_code = parse_hid_key(&key)?;
sleep_ms(pre_delay_ms);
- if let Some(server_url) = service_url.as_deref().filter(|_| duration_ms == 0) {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref().filter(|_| duration_ms == 0) {
service_key(server_url, &udid, key_code, modifiers)?;
} else if duration_ms > 0 && modifiers == 0 {
let input = bridge.create_input_session(&udid)?;
@@ -2373,7 +2454,12 @@ fn main() -> anyhow::Result<()> {
delay_ms,
} => {
let keys = parse_key_list(&keycodes)?;
- if let Some(server_url) = service_url.as_deref() {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref() {
service_key_sequence(server_url, &udid, &keys, delay_ms)?;
} else {
let input = bridge.create_input_session(&udid)?;
@@ -2396,7 +2482,12 @@ fn main() -> anyhow::Result<()> {
} => {
let modifier_mask = parse_modifier_mask(&modifiers)?;
let key_code = parse_hid_key(&key)?;
- if let Some(server_url) = service_url.as_deref() {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref() {
service_key(server_url, &udid, key_code, modifier_mask)?;
} else {
bridge.send_key(&udid, key_code, modifier_mask)?;
@@ -2412,7 +2503,21 @@ fn main() -> anyhow::Result<()> {
delay_ms,
} => {
let text = read_text_input(text, stdin, file)?;
- type_text(&bridge, &udid, &text, delay_ms)?;
+ if android::is_android_id(&udid) {
+ let server_url = command_service_url(explicit_server_url.clone())?;
+ service_batch(
+ &server_url,
+ &udid,
+ vec![serde_json::json!({
+ "action": "type",
+ "text": text,
+ "delayMs": delay_ms,
+ })],
+ false,
+ )?;
+ } else {
+ type_text(&bridge, &udid, &text, delay_ms)?;
+ }
println_json(&serde_json::json!({ "ok": true, "udid": udid, "action": "type" }))?;
Ok(())
}
@@ -2421,7 +2526,12 @@ fn main() -> anyhow::Result<()> {
button,
duration_ms,
} => {
- if let Some(server_url) = service_url.as_deref() {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref() {
service_button(server_url, &udid, &button, duration_ms)?;
} else {
bridge.press_button(&udid, &button, duration_ms)?;
@@ -2438,7 +2548,12 @@ fn main() -> anyhow::Result<()> {
stdin,
continue_on_error,
} => {
- let report = if let Some(server_url) = service_url.as_deref() {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ let report = if let Some(server_url) = command_server_url.as_deref() {
let step_lines = read_batch_steps(steps, file, stdin)?;
service_batch(
server_url,
@@ -2453,7 +2568,12 @@ fn main() -> anyhow::Result<()> {
Ok(())
}
Command::DismissKeyboard { udid } => {
- if let Some(server_url) = service_url.as_deref() {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref() {
service_post_ok(server_url, &udid, "dismiss-keyboard", &Value::Null)?;
} else {
bridge.send_key(&udid, 41, 0)?;
@@ -2467,7 +2587,12 @@ fn main() -> anyhow::Result<()> {
Ok(())
}
Command::Home { udid } => {
- if let Some(server_url) = service_url.as_deref() {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref() {
service_post_ok(server_url, &udid, "home", &Value::Null)?;
} else {
bridge.press_home(&udid)?;
@@ -2476,7 +2601,12 @@ fn main() -> anyhow::Result<()> {
Ok(())
}
Command::AppSwitcher { udid } => {
- if let Some(server_url) = service_url.as_deref() {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref() {
service_post_ok(server_url, &udid, "app-switcher", &Value::Null)?;
} else {
bridge.open_app_switcher(&udid)?;
@@ -2487,7 +2617,12 @@ fn main() -> anyhow::Result<()> {
Ok(())
}
Command::RotateLeft { udid } => {
- if let Some(server_url) = service_url.as_deref() {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref() {
service_post_ok(server_url, &udid, "rotate-left", &Value::Null)?;
} else {
bridge.rotate_left(&udid)?;
@@ -2498,7 +2633,12 @@ fn main() -> anyhow::Result<()> {
Ok(())
}
Command::RotateRight { udid } => {
- if let Some(server_url) = service_url.as_deref() {
+ let command_server_url = if android::is_android_id(&udid) {
+ Some(command_service_url(explicit_server_url.clone())?)
+ } else {
+ service_url.clone()
+ };
+ if let Some(server_url) = command_server_url.as_deref() {
service_post_ok(server_url, &udid, "rotate-right", &Value::Null)?;
} else {
bridge.rotate_right(&udid)?;
@@ -4820,6 +4960,7 @@ async fn serve(
inspectors,
metrics,
simulator_inventory: Default::default(),
+ android: Default::default(),
};
let http_router = app_router(
diff --git a/server/src/native/ffi.rs b/server/src/native/ffi.rs
index e87d8fb..6a5b8bf 100644
--- a/server/src/native/ffi.rs
+++ b/server/src/native/ffi.rs
@@ -220,6 +220,23 @@ unsafe extern "C" {
error_message: *mut *mut c_char,
) -> bool;
+ pub fn xcw_native_h264_encoder_create(
+ callback: Option,
+ user_data: *mut c_void,
+ error_message: *mut *mut c_char,
+ ) -> *mut c_void;
+ pub fn xcw_native_h264_encoder_destroy(handle: *mut c_void);
+ pub fn xcw_native_h264_encoder_encode_rgba(
+ handle: *mut c_void,
+ rgba: *const u8,
+ length: usize,
+ width: u32,
+ height: u32,
+ timestamp_us: u64,
+ error_message: *mut *mut c_char,
+ ) -> bool;
+ pub fn xcw_native_h264_encoder_request_keyframe(handle: *mut c_void);
+
pub fn xcw_native_free_string(value: *mut c_char);
pub fn xcw_native_free_bytes(bytes: xcw_native_owned_bytes);
pub fn xcw_native_release_shared_bytes(bytes: xcw_native_shared_bytes);
diff --git a/server/src/transport/webrtc.rs b/server/src/transport/webrtc.rs
index 3da6616..251f18e 100644
--- a/server/src/transport/webrtc.rs
+++ b/server/src/transport/webrtc.rs
@@ -1,15 +1,19 @@
+use crate::android;
use crate::api::routes::{run_control_message, AppState, ControlMessage};
use crate::error::AppError;
use crate::metrics::counters::ClientStreamStats;
+use crate::native::ffi;
+use crate::transport::packet::{FramePacket, SharedFrame};
use bytes::Bytes;
use serde::{Deserialize, Serialize};
use std::collections::{HashMap, VecDeque};
-use std::sync::atomic::Ordering;
-use std::sync::{Arc, Mutex, OnceLock};
+use std::ffi::{c_void, CStr};
+use std::sync::atomic::{AtomicUsize, Ordering};
+use std::sync::{Arc, Mutex, OnceLock, RwLock, Weak};
use std::time::Duration;
use tokio::sync::{broadcast, mpsc};
use tokio::task;
-use tokio::time;
+use tokio::time::{self, Instant};
use tracing::{info, warn};
use webrtc::api::interceptor_registry::register_default_interceptors;
use webrtc::api::media_engine::{MediaEngine, MIME_TYPE_H264};
@@ -48,6 +52,10 @@ const WEBRTC_REALTIME_KEYFRAME_WRITE_TIMEOUT: Duration = Duration::from_millis(9
const WEBRTC_INITIAL_KEYFRAME_TIMEOUT: Duration = Duration::from_secs(5);
const WEBRTC_RTP_OUTBOUND_MTU: usize = 1200;
const WEBRTC_PEER_DISCONNECTED_TIMEOUT: Duration = Duration::from_secs(12);
+const ANDROID_WEBRTC_FRAME_BROADCAST_CAPACITY: usize = 128;
+const DEFAULT_ANDROID_WEBRTC_MAX_EDGE: u32 = 1280;
+const DEFAULT_ANDROID_WEBRTC_FPS: u64 = 60;
+const MAX_ANDROID_WEBRTC_FPS: u64 = 120;
static WEBRTC_MEDIA_STREAMS: OnceLock>>> =
OnceLock::new();
const MAX_WEBRTC_MEDIA_STREAMS_PER_UDID: usize = 16;
@@ -96,17 +104,31 @@ pub async fn create_answer(
"WebRTC payload must include type `offer`.",
));
}
-
- let session = state.registry.get_or_create_async(&udid).await?;
- if let Err(error) = session.ensure_started_async().await {
- state.registry.remove(&udid);
- return Err(error);
- }
if payload.transport.is_some() {
return Err(AppError::bad_request(
"WebRTC preview supports media tracks only.",
));
}
+
+ let source = if android::is_android_id(&udid) {
+ WebRtcVideoSource::Android(
+ AndroidWebRtcSource::start(
+ state.android.clone(),
+ state.metrics.clone(),
+ udid.clone(),
+ android_webrtc_max_edge(),
+ )
+ .await?,
+ )
+ } else {
+ let session = state.registry.get_or_create_async(&udid).await?;
+ if let Err(error) = session.ensure_started_async().await {
+ state.registry.remove(&udid);
+ return Err(error);
+ }
+ WebRtcVideoSource::Simulator(session)
+ };
+
info!(
"WebRTC offer for {udid}: remote_candidates={} remote_candidate_types={} ice_servers={} ice_transport_policy={}",
count_sdp_candidates(&payload.sdp),
@@ -118,9 +140,9 @@ pub async fn create_answer(
ice_transport_policy_label()
);
- let first_frame = wait_for_h264_sync_keyframe(&session, WEBRTC_INITIAL_KEYFRAME_TIMEOUT)
+ let first_frame = wait_for_h264_sync_keyframe(&source, WEBRTC_INITIAL_KEYFRAME_TIMEOUT)
.await
- .ok_or_else(|| AppError::native("Timed out waiting for a simulator H.264 keyframe."))?;
+ .ok_or_else(|| AppError::native("Timed out waiting for a device H.264 keyframe."))?;
let codec = first_frame
.codec
.as_deref()
@@ -169,13 +191,22 @@ pub async fn create_answer(
);
register_diagnostics(&peer_connection, &udid);
let (stream_control_tx, stream_control_rx) = mpsc::unbounded_channel();
- register_control_data_channel(
- &peer_connection,
- session.clone(),
- state.clone(),
- udid.clone(),
- stream_control_tx,
- );
+ match &source {
+ WebRtcVideoSource::Simulator(session) => register_control_data_channel(
+ &peer_connection,
+ session.clone(),
+ state.clone(),
+ udid.clone(),
+ stream_control_tx,
+ ),
+ WebRtcVideoSource::Android(source) => register_android_data_channel(
+ &peer_connection,
+ source.clone(),
+ state.clone(),
+ udid.clone(),
+ stream_control_tx,
+ ),
+ }
let video_track = Arc::new(TrackLocalStaticRTP::new(
RTCRtpCodecCapability {
@@ -193,7 +224,7 @@ pub async fn create_answer(
.add_track(video_track.clone() as Arc)
.await
.map_err(|error| AppError::internal(format!("add WebRTC video track: {error}")))?;
- let rtcp_session = session.clone();
+ let rtcp_source = source.clone();
let rtcp_udid = udid.clone();
tokio::spawn(async move {
while let Ok((packets, _attributes)) = rtp_sender.read_rtcp().await {
@@ -202,7 +233,7 @@ pub async fn create_answer(
.any(|packet| rtcp_packet_requests_keyframe(packet.as_ref()))
{
info!("WebRTC RTCP requested keyframe for {rtcp_udid}");
- rtcp_session.request_keyframe();
+ rtcp_source.request_keyframe();
}
}
});
@@ -240,7 +271,7 @@ pub async fn create_answer(
WebRtcMediaStream {
state,
udid,
- session,
+ source,
first_frame,
peer_connection,
video_track,
@@ -439,6 +470,157 @@ fn attach_control_data_channel(
}));
}
+fn register_android_data_channel(
+ peer_connection: &Arc,
+ source: AndroidWebRtcSource,
+ state: AppState,
+ udid: String,
+ stream_control_tx: mpsc::UnboundedSender,
+) {
+ peer_connection.on_data_channel(Box::new(move |channel: Arc| {
+ let source = source.clone();
+ let state = state.clone();
+ let udid = udid.clone();
+ let stream_control_tx = stream_control_tx.clone();
+ Box::pin(async move {
+ let label = channel.label();
+ if label != WEBRTC_CONTROL_CHANNEL_LABEL && label != WEBRTC_TELEMETRY_CHANNEL_LABEL {
+ return;
+ }
+ attach_android_data_channel(channel, source, state, udid, stream_control_tx);
+ })
+ }));
+}
+
+fn attach_android_data_channel(
+ channel: Arc,
+ source: AndroidWebRtcSource,
+ state: AppState,
+ udid: String,
+ stream_control_tx: mpsc::UnboundedSender,
+) {
+ let (control_tx, control_rx) = mpsc::unbounded_channel::();
+ task::spawn(run_android_webrtc_control_queue(
+ state.clone(),
+ udid.clone(),
+ control_rx,
+ ));
+ channel.on_message(Box::new(move |message: DataChannelMessage| {
+ let source = source.clone();
+ let state = state.clone();
+ let udid = udid.clone();
+ let stream_control_tx = stream_control_tx.clone();
+ let control_tx = control_tx.clone();
+ Box::pin(async move {
+ let Ok(text) = std::str::from_utf8(&message.data) else {
+ warn!("Invalid Android WebRTC control message bytes for {udid}");
+ return;
+ };
+ if let Ok(message) = serde_json::from_str::(text) {
+ match message {
+ WebRtcDataChannelMessage::ClientStats { stats } => {
+ if !stats.client_id.trim().is_empty() && !stats.kind.trim().is_empty() {
+ state.metrics.record_client_stream_stats(*stats);
+ }
+ }
+ WebRtcDataChannelMessage::StreamControl {
+ force_keyframe,
+ snapshot,
+ } => {
+ let command = WebRtcStreamCommand {
+ force_keyframe: force_keyframe.unwrap_or(false),
+ snapshot: snapshot.unwrap_or(false),
+ };
+ if command.force_keyframe || command.snapshot {
+ source.request_keyframe();
+ }
+ let _ = stream_control_tx.send(command);
+ }
+ }
+ return;
+ }
+
+ let control_message = match serde_json::from_str::(text) {
+ Ok(message) => message,
+ Err(error) => {
+ warn!("Invalid Android WebRTC control message for {udid}: {error}");
+ return;
+ }
+ };
+ if control_tx.send(control_message).is_err() {
+ warn!("Android WebRTC control queue closed for {udid}");
+ }
+ })
+ }));
+}
+
+async fn run_android_webrtc_control_queue(
+ state: AppState,
+ udid: String,
+ mut receiver: mpsc::UnboundedReceiver,
+) {
+ let mut pending = VecDeque::new();
+ loop {
+ let mut message = match pending.pop_front() {
+ Some(message) => message,
+ None => match receiver.recv().await {
+ Some(message) => message,
+ None => break,
+ },
+ };
+ if webrtc_control_message_is_move(&message) {
+ while let Ok(next_message) = receiver.try_recv() {
+ if webrtc_control_message_is_move(&next_message) {
+ message = next_message;
+ } else {
+ pending.push_back(next_message);
+ break;
+ }
+ }
+ }
+
+ if let Err(error) =
+ run_android_webrtc_control_message(state.clone(), udid.clone(), message).await
+ {
+ warn!("Android WebRTC control message failed for {udid}: {error}");
+ }
+ }
+}
+
+async fn run_android_webrtc_control_message(
+ state: AppState,
+ udid: String,
+ message: ControlMessage,
+) -> Result<(), AppError> {
+ task::spawn_blocking(move || match message {
+ ControlMessage::Touch { x, y, phase } => {
+ if !x.is_finite() || !y.is_finite() {
+ return Err(AppError::bad_request(
+ "`x` and `y` must be finite normalized numbers.",
+ ));
+ }
+ state
+ .android
+ .send_touch(&udid, x.clamp(0.0, 1.0), y.clamp(0.0, 1.0), &phase)
+ }
+ ControlMessage::Key {
+ key_code,
+ modifiers,
+ } => state
+ .android
+ .send_key(&udid, key_code, modifiers.unwrap_or(0)),
+ ControlMessage::DismissKeyboard => state.android.send_key(&udid, 41, 0),
+ ControlMessage::Home => state.android.press_home(&udid),
+ ControlMessage::AppSwitcher => state.android.open_app_switcher(&udid),
+ ControlMessage::RotateLeft | ControlMessage::RotateRight => {
+ state.android.rotate_right(&udid)
+ }
+ ControlMessage::ToggleAppearance => state.android.toggle_appearance(&udid),
+ })
+ .await
+ .map_err(|error| AppError::internal(format!("Failed to join Android control task: {error}")))?
+}
+
async fn run_webrtc_control_queue(
session: crate::simulators::session::SimulatorSession,
state: AppState,
@@ -796,26 +978,383 @@ fn ice_transport_policy() -> RTCIceTransportPolicy {
}
}
+#[derive(Clone)]
+struct AndroidWebRtcSource {
+ inner: Arc,
+}
+
+struct AndroidWebRtcSourceInner {
+ udid: String,
+ encoder_handle: AtomicUsize,
+ callback_user_data: AtomicUsize,
+ shutdown_tx: broadcast::Sender<()>,
+ sender: broadcast::Sender,
+ latest_keyframe: RwLock>,
+ metrics: Arc,
+}
+
+unsafe impl Send for AndroidWebRtcSourceInner {}
+unsafe impl Sync for AndroidWebRtcSourceInner {}
+
+impl AndroidWebRtcSource {
+ async fn start(
+ bridge: android::AndroidBridge,
+ metrics: Arc,
+ udid: String,
+ max_edge: u32,
+ ) -> Result {
+ let mut frame_stream = bridge.grpc_frame_stream(&udid, Some(max_edge)).await?;
+ let (sender, _) = broadcast::channel(ANDROID_WEBRTC_FRAME_BROADCAST_CAPACITY);
+ let (shutdown_tx, mut shutdown_rx) = broadcast::channel(1);
+ let inner = Arc::new(AndroidWebRtcSourceInner {
+ udid: udid.clone(),
+ encoder_handle: AtomicUsize::new(0),
+ callback_user_data: AtomicUsize::new(0),
+ shutdown_tx,
+ sender,
+ latest_keyframe: RwLock::new(None),
+ metrics,
+ });
+ let user_data = Weak::into_raw(Arc::downgrade(&inner)) as *mut c_void;
+ let mut error = std::ptr::null_mut();
+ let handle = unsafe {
+ ffi::xcw_native_h264_encoder_create(
+ Some(android_h264_encoder_frame_callback),
+ user_data,
+ &mut error,
+ )
+ };
+ if handle.is_null() {
+ unsafe {
+ let _ = Weak::from_raw(user_data as *const AndroidWebRtcSourceInner);
+ }
+ return Err(unsafe { take_native_error(error) }
+ .unwrap_or_else(|| AppError::native("Unable to create Android H.264 encoder.")));
+ }
+ inner
+ .encoder_handle
+ .store(handle as usize, Ordering::Release);
+ inner
+ .callback_user_data
+ .store(user_data as usize, Ordering::Release);
+
+ let source = Self { inner };
+ let task_inner = Arc::downgrade(&source.inner);
+ tokio::spawn(async move {
+ let min_frame_gap = android_webrtc_frame_interval();
+ let mut last_encoded_at = Instant::now() - min_frame_gap;
+ loop {
+ tokio::select! {
+ _ = shutdown_rx.recv() => break,
+ frame = frame_stream.next_frame() => {
+ let frame = match frame {
+ Ok(Some(frame)) => frame,
+ Ok(None) => break,
+ Err(error) => {
+ let udid = task_inner
+ .upgrade()
+ .map(|inner| inner.udid.clone())
+ .unwrap_or_else(|| "android".to_owned());
+ warn!("Android WebRTC raw frame stream failed for {udid}: {error}");
+ break;
+ }
+ };
+ let Some(inner) = task_inner.upgrade() else {
+ break;
+ };
+ let now = Instant::now();
+ if now.duration_since(last_encoded_at) < min_frame_gap {
+ continue;
+ }
+ last_encoded_at = now;
+ let handle = inner.encoder_handle.load(Ordering::Acquire);
+ let udid = inner.udid.clone();
+ let encode_result = task::spawn_blocking(move || {
+ encode_android_rgba_frame(handle, &frame)
+ })
+ .await
+ .map_err(|error| AppError::internal(format!("Failed to join Android encoder task: {error}")))
+ .and_then(|result| result);
+ if let Err(error) = encode_result {
+ warn!("Android VideoToolbox encode failed for {udid}: {error}");
+ }
+ }
+ }
+ }
+ });
+ source.request_keyframe();
+ Ok(source)
+ }
+
+ fn subscribe(&self) -> broadcast::Receiver {
+ self.inner.sender.subscribe()
+ }
+
+ async fn wait_for_keyframe(&self, timeout_duration: Duration) -> Option {
+ let deadline = Instant::now() + timeout_duration;
+ let baseline_sequence = self
+ .inner
+ .latest_keyframe
+ .read()
+ .unwrap()
+ .as_ref()
+ .map_or(0, |frame| frame.frame_sequence);
+ let mut receiver = self.inner.sender.subscribe();
+ self.request_keyframe();
+
+ loop {
+ if let Some(frame) = self.inner.latest_keyframe.read().unwrap().clone() {
+ if frame.frame_sequence > baseline_sequence {
+ return Some(frame);
+ }
+ }
+ let remaining = deadline.checked_duration_since(Instant::now())?;
+ match time::timeout(remaining, receiver.recv()).await {
+ Ok(Ok(frame)) if frame.is_keyframe && frame.frame_sequence > baseline_sequence => {
+ return Some(frame)
+ }
+ Ok(Ok(_)) | Ok(Err(broadcast::error::RecvError::Lagged(_))) => {
+ self.request_keyframe();
+ }
+ Ok(Err(_)) | Err(_) => return None,
+ }
+ }
+ }
+
+ fn request_refresh(&self) {}
+
+ fn request_keyframe(&self) {
+ self.inner
+ .metrics
+ .keyframe_requests
+ .fetch_add(1, Ordering::Relaxed);
+ unsafe {
+ ffi::xcw_native_h264_encoder_request_keyframe(
+ self.inner.encoder_handle.load(Ordering::Acquire) as *mut c_void,
+ );
+ }
+ }
+}
+
+impl Drop for AndroidWebRtcSourceInner {
+ fn drop(&mut self) {
+ let _ = self.shutdown_tx.send(());
+ let encoder_handle = self.encoder_handle.load(Ordering::Acquire);
+ let callback_user_data = self.callback_user_data.load(Ordering::Acquire);
+ unsafe {
+ if encoder_handle != 0 {
+ ffi::xcw_native_h264_encoder_destroy(encoder_handle as *mut c_void);
+ }
+ if callback_user_data != 0 {
+ let _ = Weak::from_raw(callback_user_data as *const AndroidWebRtcSourceInner);
+ }
+ }
+ }
+}
+
+unsafe extern "C" fn android_h264_encoder_frame_callback(
+ frame: *const ffi::xcw_native_frame,
+ user_data: *mut c_void,
+) {
+ if frame.is_null() || user_data.is_null() {
+ return;
+ }
+
+ let weak = unsafe { Weak::from_raw(user_data as *const AndroidWebRtcSourceInner) };
+ if let Some(inner) = weak.upgrade() {
+ unsafe {
+ inner.handle_encoded_frame(&*frame);
+ }
+ }
+ let _ = Weak::into_raw(weak);
+}
+
+impl AndroidWebRtcSourceInner {
+ fn handle_encoded_frame(&self, frame: &ffi::xcw_native_frame) {
+ let description = unsafe { copy_native_shared_bytes(frame.description) };
+ let Some(data) = (unsafe { copy_native_shared_bytes(frame.data) }) else {
+ return;
+ };
+ let packet = Arc::new(FramePacket {
+ frame_sequence: frame.frame_sequence,
+ timestamp_us: frame.timestamp_us,
+ is_keyframe: frame.is_keyframe,
+ width: frame.width,
+ height: frame.height,
+ codec: native_c_string(frame.codec),
+ description,
+ data,
+ });
+ self.metrics.frames_encoded.fetch_add(1, Ordering::Relaxed);
+ if packet.is_keyframe {
+ self.metrics
+ .keyframes_encoded
+ .fetch_add(1, Ordering::Relaxed);
+ *self.latest_keyframe.write().unwrap() = Some(packet.clone());
+ }
+ let _ = self.sender.send(packet);
+ }
+}
+
+fn encode_android_rgba_frame(
+ encoder_handle: usize,
+ frame: &android::AndroidFrame,
+) -> Result<(), AppError> {
+ unsafe {
+ let mut error = std::ptr::null_mut();
+ let ok = ffi::xcw_native_h264_encoder_encode_rgba(
+ encoder_handle as *mut c_void,
+ frame.rgba.as_ptr(),
+ frame.rgba.len(),
+ frame.width,
+ frame.height,
+ frame.timestamp_us,
+ &mut error,
+ );
+ if ok {
+ Ok(())
+ } else {
+ Err(take_native_error(error)
+ .unwrap_or_else(|| AppError::native("Android VideoToolbox encode failed.")))
+ }
+ }
+}
+
+unsafe fn copy_native_shared_bytes(bytes: ffi::xcw_native_shared_bytes) -> Option {
+ if bytes.data.is_null() || bytes.length == 0 {
+ if !bytes.owner.is_null() {
+ unsafe {
+ ffi::xcw_native_release_shared_bytes(bytes);
+ }
+ }
+ return None;
+ }
+
+ let copied =
+ unsafe { Bytes::copy_from_slice(std::slice::from_raw_parts(bytes.data, bytes.length)) };
+ unsafe {
+ ffi::xcw_native_release_shared_bytes(bytes);
+ }
+ Some(copied)
+}
+
+fn native_c_string(ptr: *const i8) -> Option {
+ if ptr.is_null() {
+ return None;
+ }
+ let value = unsafe { CStr::from_ptr(ptr) }
+ .to_string_lossy()
+ .trim()
+ .to_owned();
+ if value.is_empty() {
+ None
+ } else {
+ Some(value)
+ }
+}
+
+unsafe fn take_native_error(raw: *mut i8) -> Option {
+ if raw.is_null() {
+ return None;
+ }
+ let message = unsafe { CStr::from_ptr(raw) }
+ .to_string_lossy()
+ .into_owned();
+ unsafe {
+ ffi::xcw_native_free_string(raw);
+ }
+ Some(AppError::native(message))
+}
+
+fn android_webrtc_max_edge() -> u32 {
+ std::env::var("SIMDECK_ANDROID_WEBRTC_MAX_EDGE")
+ .ok()
+ .and_then(|value| value.parse::().ok())
+ .unwrap_or(DEFAULT_ANDROID_WEBRTC_MAX_EDGE)
+ .clamp(360, 2400)
+}
+
+fn android_webrtc_frame_interval() -> Duration {
+ let fps = std::env::var("SIMDECK_REALTIME_FPS")
+ .or_else(|_| std::env::var("SIMDECK_LOCAL_STREAM_FPS"))
+ .or_else(|_| std::env::var("SIMDECK_ANDROID_WEBRTC_FPS"))
+ .ok()
+ .and_then(|value| value.parse::().ok())
+ .unwrap_or(DEFAULT_ANDROID_WEBRTC_FPS)
+ .clamp(15, MAX_ANDROID_WEBRTC_FPS);
+ Duration::from_micros(1_000_000 / fps)
+}
+
+#[derive(Clone)]
+enum WebRtcVideoSource {
+ Simulator(crate::simulators::session::SimulatorSession),
+ Android(AndroidWebRtcSource),
+}
+
+impl WebRtcVideoSource {
+ fn subscribe(&self) -> WebRtcFrameReceiver {
+ match self {
+ Self::Simulator(session) => WebRtcFrameReceiver::Simulator(session.subscribe()),
+ Self::Android(source) => WebRtcFrameReceiver::Android(source.subscribe()),
+ }
+ }
+
+ async fn wait_for_keyframe(&self, timeout_duration: Duration) -> Option {
+ match self {
+ Self::Simulator(session) => session.wait_for_keyframe(timeout_duration).await,
+ Self::Android(source) => source.wait_for_keyframe(timeout_duration).await,
+ }
+ }
+
+ fn request_refresh(&self) {
+ match self {
+ Self::Simulator(session) => session.request_refresh(),
+ Self::Android(source) => source.request_refresh(),
+ }
+ }
+
+ fn request_keyframe(&self) {
+ match self {
+ Self::Simulator(session) => session.request_keyframe(),
+ Self::Android(source) => source.request_keyframe(),
+ }
+ }
+}
+
+enum WebRtcFrameReceiver {
+ Simulator(crate::simulators::session::FrameSubscription),
+ Android(broadcast::Receiver),
+}
+
+impl WebRtcFrameReceiver {
+ async fn recv(&mut self) -> Result {
+ match self {
+ Self::Simulator(receiver) => receiver.recv().await,
+ Self::Android(receiver) => receiver.recv().await,
+ }
+ }
+}
+
async fn wait_for_h264_sync_keyframe(
- session: &crate::simulators::session::SimulatorSession,
+ source: &WebRtcVideoSource,
timeout_duration: Duration,
-) -> Option {
+) -> Option {
let deadline = time::Instant::now() + timeout_duration;
loop {
let remaining = deadline.checked_duration_since(time::Instant::now())?;
- let frame = session.wait_for_keyframe(remaining).await?;
+ let frame = source.wait_for_keyframe(remaining).await?;
if h264_frame_is_decoder_sync(&frame) {
return Some(frame);
}
- session.request_keyframe();
+ source.request_keyframe();
}
}
struct WebRtcMediaStream {
state: AppState,
- session: crate::simulators::session::SimulatorSession,
+ source: WebRtcVideoSource,
udid: String,
- first_frame: crate::transport::packet::SharedFrame,
+ first_frame: SharedFrame,
peer_connection: Arc,
video_track: Arc,
cancellation_token: broadcast::Sender<()>,
@@ -827,7 +1366,7 @@ impl WebRtcMediaStream {
async fn run(self) {
let Self {
state,
- session,
+ source,
udid,
first_frame,
peer_connection,
@@ -836,7 +1375,7 @@ impl WebRtcMediaStream {
mut cancellation,
mut stream_control_rx,
} = self;
- let mut rx = session.subscribe();
+ let mut rx = source.subscribe();
let mut send_timing = WebRtcSendTiming::new();
let mut peer_state_interval = time::interval(Duration::from_millis(250));
let realtime_stream = realtime_stream_enabled();
@@ -873,10 +1412,10 @@ impl WebRtcMediaStream {
if recovery_action_for_write_timeout(realtime_stream)
== FrameRecoveryAction::Refresh
{
- session.request_refresh();
+ source.request_refresh();
} else {
waiting_for_keyframe = true;
- session.request_keyframe();
+ source.request_keyframe();
}
}
Err(error) => {
@@ -915,9 +1454,9 @@ impl WebRtcMediaStream {
};
if command.force_keyframe || command.snapshot {
waiting_for_keyframe = true;
- session.request_keyframe();
+ source.request_keyframe();
} else {
- session.request_refresh();
+ source.request_refresh();
}
}
frame = rx.recv() => {
@@ -929,7 +1468,7 @@ impl WebRtcMediaStream {
.frames_dropped_server
.fetch_add(skipped, Ordering::Relaxed);
waiting_for_keyframe = true;
- session.request_keyframe();
+ source.request_keyframe();
continue;
}
Err(broadcast::error::RecvError::Closed) => {
@@ -945,7 +1484,7 @@ impl WebRtcMediaStream {
waiting_for_keyframe = false;
} else if frame.is_keyframe {
waiting_for_keyframe = true;
- session.request_keyframe();
+ source.request_keyframe();
state.metrics.frames_dropped_server.fetch_add(1, Ordering::Relaxed);
continue;
}
@@ -970,9 +1509,9 @@ impl WebRtcMediaStream {
let recovery_action = recovery_action_for_write_timeout(realtime_stream);
waiting_for_keyframe = recovery_action == FrameRecoveryAction::Keyframe;
if recovery_action == FrameRecoveryAction::Refresh {
- session.request_refresh();
+ source.request_refresh();
} else {
- session.request_keyframe();
+ source.request_keyframe();
}
}
Err(error) => {
diff --git a/skills/simdeck/SKILL.md b/skills/simdeck/SKILL.md
index f6791cc..a5cef25 100644
--- a/skills/simdeck/SKILL.md
+++ b/skills/simdeck/SKILL.md
@@ -5,7 +5,7 @@ description: Use for simulator lifecycle, app install/launch, live viewing, UI i
# SimDeck Agent Guide
-SimDeck automates iOS Simulators. Use the CLI for automation and the browser UI for live human visibility. Works with UIKit, SwiftUI, React Native, Expo, and NativeScript apps.
+SimDeck automates iOS Simulators and Android emulators. Use the CLI for automation and the browser UI for live human visibility. iOS works with UIKit, SwiftUI, React Native, Expo, and NativeScript apps; Android works through ADB, emulator lifecycle, screenshots, logs, and UIAutomator hierarchy dumps.
SimDeck uses one warm daemon per project. Check it with `simdeck daemon status`; start it or open the browser UI when needed:
@@ -40,6 +40,7 @@ simdeck shutdown
simdeck erase
simdeck core-simulator restart
simdeck install /path/to/App.app
+simdeck install android: /path/to/app.apk
simdeck launch com.example.App
simdeck uninstall com.example.App
simdeck open-url myapp://route
@@ -49,6 +50,12 @@ simdeck toggle-appearance
Build apps with project tooling.
+Android devices use IDs like `android:Pixel_8_API_36`. `simdeck list` discovers
+AVDs from the Android SDK, `boot` starts `emulator -avd ... -no-window`, and
+live browser viewing uses the same WebRTC H.264 endpoint as iOS. Android frames
+come from emulator gRPC and are encoded through VideoToolbox. `simdeck stream`
+is still iOS-only.
+
## Fast Agent Inspection
Use targeted checks for test loops. `describe` is a diagnostic snapshot of the whole hierarchy; it is useful for planning, but it is expensive. For verification, prefer the daemon APIs exposed by `simdeck/test`: `query`, `waitFor`, `assert`, selector `tap`, and `batch`.
@@ -66,6 +73,8 @@ simdeck describe --direct
```
Use `--source auto` with the project daemon. Use `--direct` or `--source native-ax` for the private CoreSimulator accessibility bridge. NativeScript inspector runtime can add richer hierarchy data.
+For Android IDs, `describe` uses `uiautomator dump`; use `--format agent` or
+`--format compact-json` the same way as iOS.
Prefer selectors, coordinates only when needed. Selector taps go through the daemon and wait for the element server-side.