diff --git a/internal/webcodecs/webcodecs.go b/internal/webcodecs/webcodecs.go
new file mode 100644
index 000000000..57eecbb7c
--- /dev/null
+++ b/internal/webcodecs/webcodecs.go
@@ -0,0 +1,45 @@
+package webcodecs
+
+import (
+ "errors"
+
+ "github.com/AlexxIT/go2rtc/internal/api"
+ "github.com/AlexxIT/go2rtc/internal/api/ws"
+ "github.com/AlexxIT/go2rtc/internal/app"
+ "github.com/AlexxIT/go2rtc/internal/streams"
+ "github.com/AlexxIT/go2rtc/pkg/webcodecs"
+ "github.com/rs/zerolog"
+)
+
+func Init() {
+ log = app.GetLogger("webcodecs")
+
+ ws.HandleFunc("webcodecs", handlerWS)
+}
+
+var log zerolog.Logger
+
+func handlerWS(tr *ws.Transport, msg *ws.Message) error {
+ stream, _ := streams.GetOrPatch(tr.Request.URL.Query())
+ if stream == nil {
+ return errors.New(api.StreamNotFound)
+ }
+
+ cons := webcodecs.NewConsumer(nil)
+ cons.WithRequest(tr.Request)
+
+ if err := stream.AddConsumer(cons); err != nil {
+ log.Debug().Err(err).Msg("[webcodecs] add consumer")
+ return err
+ }
+
+ tr.Write(&ws.Message{Type: "webcodecs", Value: cons.GetInitInfo()})
+
+ go cons.WriteTo(tr.Writer())
+
+ tr.OnClose(func() {
+ stream.RemoveConsumer(cons)
+ })
+
+ return nil
+}
diff --git a/main.go b/main.go
index 00c059e3e..3c3a159a1 100644
--- a/main.go
+++ b/main.go
@@ -42,6 +42,7 @@ import (
"github.com/AlexxIT/go2rtc/internal/tapo"
"github.com/AlexxIT/go2rtc/internal/tuya"
"github.com/AlexxIT/go2rtc/internal/v4l2"
+ "github.com/AlexxIT/go2rtc/internal/webcodecs"
"github.com/AlexxIT/go2rtc/internal/webrtc"
"github.com/AlexxIT/go2rtc/internal/webtorrent"
"github.com/AlexxIT/go2rtc/internal/wyoming"
@@ -70,9 +71,10 @@ func main() {
{"rtsp", rtsp.Init}, // rtsp source, RTSP server
{"webrtc", webrtc.Init}, // webrtc source, WebRTC server
// Main API
- {"mp4", mp4.Init}, // MP4 API
- {"hls", hls.Init}, // HLS API
- {"mjpeg", mjpeg.Init}, // MJPEG API
+ {"mp4", mp4.Init}, // MP4 API
+ {"webcodecs", webcodecs.Init}, // WebCodecs API
+ {"hls", hls.Init}, // HLS API
+ {"mjpeg", mjpeg.Init}, // MJPEG API
// Other sources and servers
{"hass", hass.Init}, // hass source, Hass API server
{"homekit", homekit.Init}, // homekit source, HomeKit server
diff --git a/pkg/webcodecs/consumer.go b/pkg/webcodecs/consumer.go
new file mode 100644
index 000000000..e67c1de7d
--- /dev/null
+++ b/pkg/webcodecs/consumer.go
@@ -0,0 +1,267 @@
+package webcodecs
+
+import (
+ "encoding/binary"
+ "errors"
+ "io"
+ "sync"
+
+ "github.com/AlexxIT/go2rtc/pkg/aac"
+ "github.com/AlexxIT/go2rtc/pkg/core"
+ "github.com/AlexxIT/go2rtc/pkg/h264"
+ "github.com/AlexxIT/go2rtc/pkg/h264/annexb"
+ "github.com/AlexxIT/go2rtc/pkg/h265"
+ "github.com/pion/rtp"
+)
+
+// Binary frame header (9 bytes):
+// Byte 0: flags (bit7=video, bit6=keyframe, bits0-5=trackID)
+// Byte 1-8: timestamp in microseconds (uint64 BE)
+// Byte 9+: payload
+
+const headerSize = 9
+
+type Consumer struct {
+ core.Connection
+ wr *core.WriteBuffer
+ mu sync.Mutex
+ start bool
+
+ UseGOP bool
+}
+
+type InitInfo struct {
+ Video *VideoInfo `json:"video,omitempty"`
+ Audio *AudioInfo `json:"audio,omitempty"`
+}
+
+type VideoInfo struct {
+ Codec string `json:"codec"`
+}
+
+type AudioInfo struct {
+ Codec string `json:"codec"`
+ SampleRate int `json:"sampleRate"`
+ Channels int `json:"channels"`
+}
+
+func NewConsumer(medias []*core.Media) *Consumer {
+ if medias == nil {
+ medias = []*core.Media{
+ {
+ Kind: core.KindVideo,
+ Direction: core.DirectionSendonly,
+ Codecs: []*core.Codec{
+ {Name: core.CodecH264},
+ {Name: core.CodecH265},
+ },
+ },
+ {
+ Kind: core.KindAudio,
+ Direction: core.DirectionSendonly,
+ Codecs: []*core.Codec{
+ {Name: core.CodecAAC},
+ {Name: core.CodecOpus},
+ {Name: core.CodecPCMA},
+ {Name: core.CodecPCMU},
+ },
+ },
+ }
+ }
+
+ wr := core.NewWriteBuffer(nil)
+ return &Consumer{
+ Connection: core.Connection{
+ ID: core.NewID(),
+ FormatName: "webcodecs",
+ Medias: medias,
+ Transport: wr,
+ },
+ wr: wr,
+ }
+}
+
+func (c *Consumer) AddTrack(media *core.Media, _ *core.Codec, track *core.Receiver) error {
+ trackID := byte(len(c.Senders))
+
+ codec := track.Codec.Clone()
+ handler := core.NewSender(media, codec)
+
+ switch track.Codec.Name {
+ case core.CodecH264:
+ clockRate := codec.ClockRate
+ handler.Handler = func(packet *rtp.Packet) {
+ keyframe := h264.IsKeyframe(packet.Payload)
+ if !c.start {
+ if !keyframe {
+ return
+ }
+ c.start = true
+ }
+
+ payload := annexb.DecodeAVCC(packet.Payload, true)
+ flags := byte(0x80) | trackID // video flag
+ if keyframe {
+ flags |= 0x40 // keyframe flag
+ }
+
+ c.mu.Lock()
+ msg := buildFrame(flags, rtpToMicroseconds(packet.Timestamp, clockRate), payload)
+ if n, err := c.wr.Write(msg); err == nil {
+ c.Send += n
+ }
+ c.mu.Unlock()
+ }
+
+ if track.Codec.IsRTP() {
+ handler.Handler = h264.RTPDepay(track.Codec, handler.Handler)
+ } else {
+ handler.Handler = h264.RepairAVCC(track.Codec, handler.Handler)
+ }
+
+ case core.CodecH265:
+ clockRate := codec.ClockRate
+ handler.Handler = func(packet *rtp.Packet) {
+ keyframe := h265.IsKeyframe(packet.Payload)
+ if !c.start {
+ if !keyframe {
+ return
+ }
+ c.start = true
+ }
+
+ payload := annexb.DecodeAVCC(packet.Payload, true)
+ flags := byte(0x80) | trackID // video flag
+ if keyframe {
+ flags |= 0x40 // keyframe flag
+ }
+
+ c.mu.Lock()
+ msg := buildFrame(flags, rtpToMicroseconds(packet.Timestamp, clockRate), payload)
+ if n, err := c.wr.Write(msg); err == nil {
+ c.Send += n
+ }
+ c.mu.Unlock()
+ }
+
+ if track.Codec.IsRTP() {
+ handler.Handler = h265.RTPDepay(track.Codec, handler.Handler)
+ } else {
+ handler.Handler = h265.RepairAVCC(track.Codec, handler.Handler)
+ }
+
+ default:
+ clockRate := codec.ClockRate
+ handler.Handler = func(packet *rtp.Packet) {
+ if !c.start {
+ return
+ }
+
+ flags := trackID // audio flag (bit7=0)
+
+ c.mu.Lock()
+ msg := buildFrame(flags, rtpToMicroseconds(packet.Timestamp, clockRate), packet.Payload)
+ if n, err := c.wr.Write(msg); err == nil {
+ c.Send += n
+ }
+ c.mu.Unlock()
+ }
+
+ switch track.Codec.Name {
+ case core.CodecAAC:
+ if track.Codec.IsRTP() {
+ handler.Handler = aac.RTPDepay(handler.Handler)
+ }
+ case core.CodecOpus, core.CodecPCMA, core.CodecPCMU:
+ // pass through directly — WebCodecs decodes these natively
+ default:
+ handler.Handler = nil
+ }
+ }
+
+ if handler.Handler == nil {
+ s := "webcodecs: unsupported codec: " + track.Codec.String()
+ println(s)
+ return errors.New(s)
+ }
+
+ handler.HandleRTP(track)
+ c.Senders = append(c.Senders, handler)
+
+ return nil
+}
+
+func (c *Consumer) GetInitInfo() *InitInfo {
+ info := &InitInfo{}
+
+ for _, sender := range c.Senders {
+ codec := sender.Codec
+ switch codec.Name {
+ case core.CodecH264:
+ info.Video = &VideoInfo{
+ Codec: "avc1." + h264.GetProfileLevelID(codec.FmtpLine),
+ }
+ case core.CodecH265:
+ info.Video = &VideoInfo{
+ Codec: "hvc1.1.6.L153.B0",
+ }
+ case core.CodecAAC:
+ channels := int(codec.Channels)
+ if channels == 0 {
+ channels = 1
+ }
+ info.Audio = &AudioInfo{
+ Codec: "mp4a.40.2",
+ SampleRate: int(codec.ClockRate),
+ Channels: channels,
+ }
+ case core.CodecOpus:
+ channels := int(codec.Channels)
+ if channels == 0 {
+ channels = 2
+ }
+ info.Audio = &AudioInfo{
+ Codec: "opus",
+ SampleRate: int(codec.ClockRate),
+ Channels: channels,
+ }
+ case core.CodecPCMA:
+ info.Audio = &AudioInfo{
+ Codec: "alaw",
+ SampleRate: int(codec.ClockRate),
+ Channels: 1,
+ }
+ case core.CodecPCMU:
+ info.Audio = &AudioInfo{
+ Codec: "ulaw",
+ SampleRate: int(codec.ClockRate),
+ Channels: 1,
+ }
+ }
+ }
+
+ return info
+}
+
+func (c *Consumer) WriteTo(wr io.Writer) (int64, error) {
+ if len(c.Senders) == 1 && c.Senders[0].Codec.IsAudio() {
+ c.start = true
+ }
+
+ return c.wr.WriteTo(wr)
+}
+
+func buildFrame(flags byte, timestamp uint64, payload []byte) []byte {
+ msg := make([]byte, headerSize+len(payload))
+ msg[0] = flags
+ binary.BigEndian.PutUint64(msg[1:9], timestamp)
+ copy(msg[headerSize:], payload)
+ return msg
+}
+
+func rtpToMicroseconds(timestamp uint32, clockRate uint32) uint64 {
+ if clockRate == 0 {
+ return uint64(timestamp)
+ }
+ return uint64(timestamp) * 1_000_000 / uint64(clockRate)
+}
diff --git a/www/links.html b/www/links.html
index 13e08edfd..7984b9d49 100644
--- a/www/links.html
+++ b/www/links.html
@@ -55,6 +55,7 @@
Any codec in source
H264/H265 source
stream.html WebRTC stream / browsers: all / codecs: H264, PCMU, PCMA, OPUS / +H265 in Safari
stream.html MSE stream / browsers: Chrome, Firefox, Safari Mac/iPad / codecs: H264, H265*, AAC, PCMA*, PCMU*, PCM* / +OPUS in Chrome and Firefox
+ stream.html WebCodecs stream / browsers: Chrome, Edge, Firefox, Safari Mac/iPad / codecs: H264, H265, AAC, OPUS, PCMA, PCMU
stream.mp4 legacy MP4 stream with AAC audio / browsers: Chrome, Firefox / codecs: H264, H265*, AAC
stream.mp4 modern MP4 stream with common audio / browsers: Chrome, Firefox / codecs: H264, H265*, AAC, FLAC (PCMA, PCMU, PCM)
stream.mp4 MP4 stream with any audio / browsers: Chrome / codecs: H264, H265*, AAC, OPUS, MP3, FLAC (PCMA, PCMU, PCM)
diff --git a/www/stream.html b/www/stream.html
index de7ad1235..9f44c87ca 100644
--- a/www/stream.html
+++ b/www/stream.html
@@ -50,12 +50,14 @@
const background = params.get('background') !== 'false';
const width = '1 0 ' + (params.get('width') || '320px');
+ const renderer = params.get('renderer');
for (let i = 0; i < streams.length; i++) {
/** @type {VideoStream} */
const video = document.createElement('video-stream');
video.background = background;
video.mode = modes[i] || video.mode;
+ if (renderer) video.renderer = renderer;
video.style.flex = width;
video.src = new URL('api/ws?src=' + encodeURIComponent(streams[i]), location.href);
document.body.appendChild(video);
diff --git a/www/video-renderer.js b/www/video-renderer.js
new file mode 100644
index 000000000..2b4249bd9
--- /dev/null
+++ b/www/video-renderer.js
@@ -0,0 +1,431 @@
+/**
+ * VideoRenderer — Cascading VideoFrame renderer: WebGPU → WebGL2 → Canvas 2D.
+ *
+ * Each tier uses its own canvas to avoid context-type locking.
+ * Automatically initializes on first frame and downgrades on errors.
+ *
+ * Usage:
+ * const renderer = new VideoRenderer(container, {cascade: 'webgpu,webgl,2d'});
+ * // in VideoDecoder output callback:
+ * renderer.draw(frame);
+ * frame.close();
+ * // cleanup:
+ * renderer.destroy();
+ */
+
+const TIER_WEBGPU = 0;
+const TIER_WEBGL2 = 1;
+const TIER_CANVAS2D = 2;
+const TIER_NAMES = ['WebGPU', 'WebGL2', 'Canvas2D'];
+const TIER_MAP = {webgpu: TIER_WEBGPU, webgl: TIER_WEBGL2, '2d': TIER_CANVAS2D};
+
+const WGSL_VERTEX = `
+struct Out { @builtin(position) pos: vec4f, @location(0) uv: vec2f }
+@vertex fn main(@builtin(vertex_index) i: u32) -> Out {
+ var p = array(
+ vec2f( 1, 1), vec2f( 1,-1), vec2f(-1,-1),
+ vec2f( 1, 1), vec2f(-1,-1), vec2f(-1, 1));
+ var u = array(
+ vec2f(1,0), vec2f(1,1), vec2f(0,1),
+ vec2f(1,0), vec2f(0,1), vec2f(0,0));
+ return Out(vec4f(p[i],0,1), u[i]);
+}`;
+
+const WGSL_FRAGMENT = `
+@group(0) @binding(0) var s: sampler;
+@group(0) @binding(1) var t: texture_external;
+@fragment fn main(@location(0) uv: vec2f) -> @location(0) vec4f {
+ return textureSampleBaseClampToEdge(t, s, uv);
+}`;
+
+class WebGPUTier {
+ constructor(canvas) {
+ this.canvas = canvas;
+ this.device = null;
+ this.ctx = null;
+ this.pipeline = null;
+ this.sampler = null;
+ this.format = null;
+ }
+
+ async init() {
+ if (!navigator.gpu) { VideoRenderer.log('WebGPU: API not available'); return false; }
+ try {
+ VideoRenderer.log('WebGPU: requesting adapter...');
+ const adapter = await navigator.gpu.requestAdapter();
+ if (!adapter) { VideoRenderer.log('WebGPU: no adapter available'); return false; }
+ const info = adapter.info || {};
+ VideoRenderer.log('WebGPU: adapter:', info.vendor || '?', info.architecture || '?', info.description || '');
+
+ this.device = await adapter.requestDevice();
+ this.format = navigator.gpu.getPreferredCanvasFormat();
+
+ this.ctx = this.canvas.getContext('webgpu');
+ if (!this.ctx) {
+ VideoRenderer.log('WebGPU: getContext("webgpu") returned null');
+ this.device.destroy(); this.device = null; return false;
+ }
+ this.ctx.configure({device: this.device, format: this.format, alphaMode: 'opaque'});
+
+ this.pipeline = this.device.createRenderPipeline({
+ layout: 'auto',
+ vertex: {
+ module: this.device.createShaderModule({code: WGSL_VERTEX}),
+ entryPoint: 'main',
+ },
+ fragment: {
+ module: this.device.createShaderModule({code: WGSL_FRAGMENT}),
+ entryPoint: 'main',
+ targets: [{format: this.format}],
+ },
+ primitive: {topology: 'triangle-list'},
+ });
+
+ this.sampler = this.device.createSampler({magFilter: 'linear', minFilter: 'linear'});
+ VideoRenderer.log('WebGPU: initialized, format:', this.format);
+ return true;
+ } catch (e) {
+ VideoRenderer.log('WebGPU: init failed:', e.message || e);
+ this.device = null; this.ctx = null;
+ return false;
+ }
+ }
+
+ draw(frame, w, h) {
+ if (this.canvas.width !== w || this.canvas.height !== h) {
+ this.canvas.width = w; this.canvas.height = h;
+ }
+ const bind = this.device.createBindGroup({
+ layout: this.pipeline.getBindGroupLayout(0),
+ entries: [
+ {binding: 0, resource: this.sampler},
+ {binding: 1, resource: this.device.importExternalTexture({source: frame})},
+ ],
+ });
+ const enc = this.device.createCommandEncoder();
+ const pass = enc.beginRenderPass({colorAttachments: [{
+ view: this.ctx.getCurrentTexture().createView(),
+ loadOp: 'clear', storeOp: 'store',
+ }]});
+ pass.setPipeline(this.pipeline);
+ pass.setBindGroup(0, bind);
+ pass.draw(6);
+ pass.end();
+ this.device.queue.submit([enc.finish()]);
+ }
+
+ destroy() {
+ try { if (this.device) this.device.destroy(); } catch {}
+ this.device = null; this.ctx = null; this.pipeline = null; this.sampler = null;
+ }
+}
+
+const GLSL_VERTEX = `#version 300 es
+out vec2 vUV;
+void main() {
+ float x = float(gl_VertexID & 1) * 2.0;
+ float y = float(gl_VertexID & 2);
+ vUV = vec2(x, 1.0 - y);
+ gl_Position = vec4(vUV * 2.0 - 1.0, 0.0, 1.0);
+ vUV.y = 1.0 - vUV.y;
+}`;
+
+const GLSL_FRAGMENT = `#version 300 es
+precision mediump float;
+in vec2 vUV;
+uniform sampler2D uTex;
+out vec4 c;
+void main() { c = texture(uTex, vUV); }`;
+
+class WebGL2Tier {
+ constructor(canvas) {
+ this.canvas = canvas;
+ this.gl = null;
+ this.program = null;
+ this.texture = null;
+ this.lastW = 0;
+ this.lastH = 0;
+ }
+
+ init() {
+ try {
+ this.gl = this.canvas.getContext('webgl2', {
+ alpha: false, desynchronized: true, antialias: false,
+ powerPreference: 'high-performance',
+ });
+ } catch (e) { VideoRenderer.log('WebGL2: getContext threw:', e.message || e); this.gl = null; }
+ if (!this.gl) { VideoRenderer.log('WebGL2: not available'); return false; }
+
+ const gl = this.gl;
+
+ const vs = gl.createShader(gl.VERTEX_SHADER);
+ gl.shaderSource(vs, GLSL_VERTEX);
+ gl.compileShader(vs);
+ if (!gl.getShaderParameter(vs, gl.COMPILE_STATUS)) {
+ VideoRenderer.log('WebGL2: vertex shader error:', gl.getShaderInfoLog(vs));
+ gl.deleteShader(vs); this.gl = null; return false;
+ }
+
+ const fs = gl.createShader(gl.FRAGMENT_SHADER);
+ gl.shaderSource(fs, GLSL_FRAGMENT);
+ gl.compileShader(fs);
+ if (!gl.getShaderParameter(fs, gl.COMPILE_STATUS)) {
+ VideoRenderer.log('WebGL2: fragment shader error:', gl.getShaderInfoLog(fs));
+ gl.deleteShader(vs); gl.deleteShader(fs); this.gl = null; return false;
+ }
+
+ this.program = gl.createProgram();
+ gl.attachShader(this.program, vs);
+ gl.attachShader(this.program, fs);
+ gl.linkProgram(this.program);
+ gl.deleteShader(vs);
+ gl.deleteShader(fs);
+
+ if (!gl.getProgramParameter(this.program, gl.LINK_STATUS)) {
+ VideoRenderer.log('WebGL2: program link error:', gl.getProgramInfoLog(this.program));
+ gl.deleteProgram(this.program);
+ this.gl = null; this.program = null; return false;
+ }
+
+ gl.useProgram(this.program);
+ this.texture = gl.createTexture();
+ gl.activeTexture(gl.TEXTURE0);
+ gl.bindTexture(gl.TEXTURE_2D, this.texture);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
+ gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
+ gl.uniform1i(gl.getUniformLocation(this.program, 'uTex'), 0);
+
+ const dbg = gl.getExtension('WEBGL_debug_renderer_info');
+ const gpuName = dbg ? gl.getParameter(dbg.UNMASKED_RENDERER_WEBGL) : gl.getParameter(gl.RENDERER);
+ VideoRenderer.log('WebGL2: initialized, GPU:', gpuName);
+ return true;
+ }
+
+ draw(frame, w, h) {
+ const gl = this.gl;
+ if (w !== this.lastW || h !== this.lastH) {
+ this.canvas.width = w; this.canvas.height = h;
+ this.lastW = w; this.lastH = h;
+ gl.viewport(0, 0, w, h);
+ }
+ gl.useProgram(this.program);
+ gl.bindTexture(gl.TEXTURE_2D, this.texture);
+ gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, frame);
+ gl.drawArrays(gl.TRIANGLES, 0, 3);
+ }
+
+ destroy() {
+ try {
+ if (this.gl) {
+ if (this.texture) this.gl.deleteTexture(this.texture);
+ if (this.program) this.gl.deleteProgram(this.program);
+ const ext = this.gl.getExtension('WEBGL_lose_context');
+ if (ext) ext.loseContext();
+ }
+ } catch {}
+ this.gl = null; this.program = null; this.texture = null;
+ }
+}
+
+class Canvas2DTier {
+ constructor(canvas) {
+ this.canvas = canvas;
+ this.ctx = null;
+ }
+
+ init() { return true; }
+
+ draw(frame, w, h) {
+ if (this.canvas.width !== w) this.canvas.width = w;
+ if (this.canvas.height !== h) this.canvas.height = h;
+ if (!this.ctx) this.ctx = this.canvas.getContext('2d');
+ if (this.ctx) this.ctx.drawImage(frame, 0, 0, w, h);
+ }
+
+ destroy() { this.ctx = null; }
+}
+
+export class VideoRenderer {
+ static log(msg, ...args) {
+ console.debug('[WebCodecs]', msg, ...args);
+ }
+
+ /**
+ * @param {HTMLElement} container — element to insert canvases into
+ * @param {object} [options]
+ * @param {string} [options.cascade='webgpu,webgl,2d'] — comma-separated tier names
+ * @param {string} [options.canvasStyle] — CSS for created canvases
+ */
+ constructor(container, options = {}) {
+ const cascade = options.cascade || 'webgpu,webgl,2d';
+ const canvasStyle = options.canvasStyle || 'display:block;max-width:100%;max-height:100%';
+
+ this._container = container;
+ this._allowedTiers = cascade.split(',').map(s => TIER_MAP[s.trim()]).filter(t => t !== undefined);
+ this._tier = -1; // not initialized
+ this._initPromise = null;
+ this._destroyed = false;
+
+ const makeCanvas = () => {
+ const c = document.createElement('canvas');
+ c.style.cssText = canvasStyle;
+ return c;
+ };
+
+ // Each tier gets its own canvas
+ this._tiers = {
+ [TIER_WEBGPU]: new WebGPUTier(makeCanvas()),
+ [TIER_WEBGL2]: new WebGL2Tier(makeCanvas()),
+ [TIER_CANVAS2D]: new Canvas2DTier(makeCanvas()),
+ };
+
+ this._activeCanvas = null;
+
+ // WebGL2 context loss/restore
+ const gl2Canvas = this._tiers[TIER_WEBGL2].canvas;
+ gl2Canvas.addEventListener('webglcontextlost', (e) => {
+ e.preventDefault();
+ if (this._tier === TIER_WEBGL2) {
+ this._tiers[TIER_WEBGL2].destroy();
+ this._tier = this._nextAllowed(TIER_CANVAS2D);
+ this._swapCanvas(this._tier);
+ VideoRenderer.log('WebGL2 context lost, fallback to', TIER_NAMES[this._tier]);
+ }
+ });
+ gl2Canvas.addEventListener('webglcontextrestored', () => {
+ if (this._tier > TIER_WEBGL2 && this._allowedTiers.includes(TIER_WEBGL2)) {
+ if (this._tiers[TIER_WEBGL2].init()) {
+ this._tier = TIER_WEBGL2;
+ this._swapCanvas(TIER_WEBGL2);
+ VideoRenderer.log('WebGL2 context restored');
+ }
+ }
+ });
+
+ VideoRenderer.log('cascade:', this._allowedTiers.map(t => TIER_NAMES[t]).join(' → '));
+ }
+
+ /** @returns {string} current tier name or 'none' */
+ get currentTier() {
+ return this._tier >= 0 ? TIER_NAMES[this._tier] : 'none';
+ }
+
+ /** @returns {HTMLCanvasElement|null} currently active canvas */
+ get canvas() {
+ return this._activeCanvas;
+ }
+
+ /**
+ * Draw a VideoFrame. Does NOT close the frame — caller is responsible.
+ * @param {VideoFrame} frame
+ */
+ draw(frame) {
+ if (this._destroyed) return;
+ const w = frame.displayWidth, h = frame.displayHeight;
+ if (w === 0 || h === 0) return;
+
+ // Already initialized — fast path
+ if (this._tier >= 0) {
+ try {
+ this._tiers[this._tier].draw(frame, w, h);
+ } catch (e) {
+ this._downgrade(e);
+ try {
+ this._tiers[this._tier].draw(frame, w, h);
+ } catch (e2) {
+ this._tier = TIER_CANVAS2D;
+ this._swapCanvas(TIER_CANVAS2D);
+ VideoRenderer.log('renderer error, fallback to Canvas2D:', e2.message || e2);
+ this._tiers[TIER_CANVAS2D].draw(frame, w, h);
+ }
+ }
+ return;
+ }
+
+ // Async init in progress — use Canvas2D temporarily
+ if (this._initPromise) {
+ this._tiers[TIER_CANVAS2D].draw(frame, w, h);
+ if (!this._activeCanvas) this._swapCanvas(TIER_CANVAS2D);
+ return;
+ }
+
+ // First frame — initialize
+ VideoRenderer.log('first frame, resolution:', w + 'x' + h, '— initializing...');
+ const first = this._allowedTiers[0] ?? TIER_CANVAS2D;
+
+ if (first === TIER_WEBGPU && navigator.gpu) {
+ this._initPromise = this._tiers[TIER_WEBGPU].init().then(ok => {
+ if (this._destroyed) return;
+ if (ok) {
+ this._tier = TIER_WEBGPU;
+ this._swapCanvas(TIER_WEBGPU);
+ } else {
+ this._initSync(TIER_WEBGL2);
+ }
+ VideoRenderer.log('renderer ready:', TIER_NAMES[this._tier]);
+ this._initPromise = null;
+ });
+ // Render first frames with Canvas2D while WebGPU inits
+ this._tiers[TIER_CANVAS2D].draw(frame, w, h);
+ this._swapCanvas(TIER_CANVAS2D);
+ } else {
+ this._initSync(first === TIER_WEBGPU ? TIER_WEBGL2 : first);
+ VideoRenderer.log('renderer ready:', TIER_NAMES[this._tier]);
+ this._tiers[this._tier].draw(frame, w, h);
+ this._swapCanvas(this._tier);
+ }
+ }
+
+ destroy() {
+ this._destroyed = true;
+ for (const tier of Object.values(this._tiers)) {
+ tier.destroy();
+ if (tier.canvas.parentElement) tier.canvas.remove();
+ }
+ this._activeCanvas = null;
+ this._tier = -1;
+ }
+
+ _nextAllowed(minTier) {
+ for (const t of this._allowedTiers) { if (t >= minTier) return t; }
+ return TIER_CANVAS2D;
+ }
+
+ _initSync(startTier) {
+ const tryGL = this._allowedTiers.includes(TIER_WEBGL2);
+ if (startTier <= TIER_WEBGL2 && tryGL && this._tiers[TIER_WEBGL2].init()) {
+ this._tier = TIER_WEBGL2;
+ } else {
+ this._tier = this._nextAllowed(TIER_CANVAS2D);
+ }
+ }
+
+ _downgrade(error) {
+ const oldTier = this._tier;
+ this._tier = this._nextAllowed(oldTier + 1);
+ if (this._tier === TIER_WEBGL2 && !this._tiers[TIER_WEBGL2].gl && !this._tiers[TIER_WEBGL2].init()) {
+ this._tier = this._nextAllowed(TIER_CANVAS2D);
+ }
+ this._swapCanvas(this._tier);
+ VideoRenderer.log('renderer error, downgrade:', TIER_NAMES[oldTier], '→', TIER_NAMES[this._tier], error.message || error);
+ }
+
+ _swapCanvas(tier) {
+ const target = this._tiers[tier]?.canvas;
+ if (!target || target === this._activeCanvas) return;
+
+ if (this._activeCanvas) {
+ this._activeCanvas.style.display = 'none';
+ }
+
+ if (target.parentElement !== this._container) {
+ // Insert before the first hidden canvas or at the end
+ const ref = this._activeCanvas || null;
+ this._container.insertBefore(target, ref);
+ }
+ target.style.display = '';
+ this._activeCanvas = target;
+ }
+}
diff --git a/www/video-rtc.js b/www/video-rtc.js
index 953fdae66..d488b2739 100644
--- a/www/video-rtc.js
+++ b/www/video-rtc.js
@@ -1,3 +1,5 @@
+import {WebCodecsPlayer} from './video-webcodecs.js';
+
/**
* VideoRTC v1.6.0 - Video player for go2rtc streaming application.
*
@@ -33,11 +35,18 @@ export class VideoRTC extends HTMLElement {
];
/**
- * [config] Supported modes (webrtc, webrtc/tcp, mse, hls, mp4, mjpeg).
+ * [config] Supported modes (webrtc, webrtc/tcp, mse, hls, mp4, mjpeg, webcodecs).
* @type {string}
*/
this.mode = 'webrtc,mse,hls,mjpeg';
+ /**
+ * [config] Renderer cascade for WebCodecs (webgpu, webgl, 2d).
+ * Order defines priority. Default: try all in order.
+ * @type {string}
+ */
+ this.renderer = 'webgpu,webgl,2d';
+
/**
* [Config] Requested medias (video, audio, microphone).
* @type {string}
@@ -338,6 +347,13 @@ export class VideoRTC extends HTMLElement {
this.pc = null;
}
+ // cleanup WebCodecs resources
+ if (this._wc) {
+ this._wc.destroy();
+ this._wc = null;
+ }
+ this.video.style.display = 'block';
+
this.video.src = '';
this.video.srcObject = null;
}
@@ -365,7 +381,10 @@ export class VideoRTC extends HTMLElement {
const modes = [];
- if (this.mode.includes('mse') && ('MediaSource' in window || 'ManagedMediaSource' in window)) {
+ if (this.mode.includes('webcodecs') && 'VideoDecoder' in window) {
+ modes.push('webcodecs');
+ this.onwebcodecs();
+ } else if (this.mode.includes('mse') && ('MediaSource' in window || 'ManagedMediaSource' in window)) {
modes.push('mse');
this.onmse();
} else if (this.mode.includes('hls') && this.video.canPlayType('application/vnd.apple.mpegurl')) {
@@ -501,6 +520,35 @@ export class VideoRTC extends HTMLElement {
};
}
+ onwebcodecsready() {}
+
+ onwebcodecserror(error) {}
+
+ onwebcodecs() {
+ this._wc = new WebCodecsPlayer(this, {
+ cascade: this.renderer,
+ media: this.media,
+ });
+
+ this.onmessage['webcodecs'] = msg => {
+ if (msg.type !== 'webcodecs') return;
+ this._wc.init(msg.value).then(result => {
+ if (result.error) {
+ this.onwebcodecserror(result.error);
+ } else {
+ this.video.style.display = 'none';
+ this.onwebcodecsready();
+ }
+ });
+ };
+
+ this.ondata = data => {
+ this._wc.feed(data);
+ };
+
+ this.send({type: 'webcodecs', value: ''});
+ }
+
onwebrtc() {
const pc = new RTCPeerConnection(this.pcConfig);
diff --git a/www/video-stream.js b/www/video-stream.js
index 5b7c1ea58..03b0c6841 100644
--- a/www/video-stream.js
+++ b/www/video-stream.js
@@ -79,6 +79,8 @@ class VideoStream extends VideoRTC {
case 'mjpeg':
this.divMode = msg.type.toUpperCase();
break;
+ case 'webcodecs':
+ break;
}
};
@@ -98,6 +100,14 @@ class VideoStream extends VideoRTC {
this.divMode = 'RTC';
}
}
+
+ onwebcodecsready() {
+ this.divMode = 'WEBCODECS';
+ }
+
+ onwebcodecserror(error) {
+ this.divError = error;
+ }
}
customElements.define('video-stream', VideoStream);
diff --git a/www/video-webcodecs.js b/www/video-webcodecs.js
new file mode 100644
index 000000000..a082b938f
--- /dev/null
+++ b/www/video-webcodecs.js
@@ -0,0 +1,341 @@
+import {VideoRenderer} from './video-renderer.js';
+
+/**
+ * WebCodecsPlayer — handles video/audio decoding, controls, and rendering.
+ *
+ * Usage:
+ * const player = new WebCodecsPlayer(parentElement, {cascade, media});
+ * const result = player.init(info); // {error?: string}
+ * player.feed(data); // binary frame
+ * player.unmute(); // start audio (user gesture)
+ * player.destroy();
+ */
+
+const HEADER_SIZE = 9;
+
+// SVG icon paths (24x24 viewBox)
+const ICONS = {
+ play: 'M8 5v14l11-7z',
+ pause: 'M6 19h4V5H6v14zm8-14v14h4V5h-4z',
+ volume: 'M3 9v6h4l5 5V4L7 9H3zm13.5 3c0-1.77-1.02-3.29-2.5-4.03v8.05c1.48-.73 2.5-2.25 2.5-4.02zM14 3.23v2.06c2.89.86 5 3.54 5 6.71s-2.11 5.85-5 6.71v2.06c4.01-.91 7-4.49 7-8.77s-2.99-7.86-7-8.77z',
+ muted: 'M16.5 12c0-1.77-1.02-3.29-2.5-4.03v2.21l2.45 2.45c.03-.2.05-.41.05-.63zm2.5 0c0 .94-.2 1.82-.54 2.64l1.51 1.51C20.63 14.91 21 13.5 21 12c0-4.28-2.99-7.86-7-8.77v2.06c2.89.86 5 3.54 5 6.71zM4.27 3L3 4.27 7.73 9H3v6h4l5 5v-6.73l4.25 4.25c-.67.52-1.42.93-2.25 1.18v2.06c1.38-.31 2.63-.95 3.69-1.81L19.73 21 21 19.73l-9-9L4.27 3zM12 4L9.91 6.09 12 8.18V4z',
+ fs: 'M7 14H5v5h5v-2H7v-3zm-2-4h2V7h3V5H5v5zm12 7h-3v2h5v-5h-2v3zM14 5v2h3v3h2V5h-5z',
+ fsExit: 'M5 16h3v3h2v-5H5v2zm3-8H5v2h5V5H8v3zm6 11h2v-3h3v-2h-5v5zm2-11V5h-2v5h5V8h-3z',
+};
+
+function svgIcon(path) {
+ return ``;
+}
+
+export class WebCodecsPlayer {
+ /**
+ * @param {HTMLElement} parent — element to insert container into
+ * @param {object} [options]
+ * @param {string} [options.cascade='webgpu,webgl,2d'] — renderer cascade
+ * @param {string} [options.media='video,audio'] — requested media types
+ */
+ constructor(parent, options = {}) {
+ this._parent = parent;
+ this._media = options.media || 'video,audio';
+ this._paused = false;
+ this._muted = true;
+
+ // Decoders
+ this._videoDecoder = null;
+ this._audioDecoder = null;
+ this._audioCtx = null;
+ this._gainNode = null;
+ this._audioInfo = null;
+ this._audioStarted = false;
+
+ // Build DOM
+ this._container = this._createContainer();
+ this._renderer = new VideoRenderer(this._container, {
+ cascade: options.cascade || 'webgpu,webgl,2d',
+ canvasStyle: 'display:block;max-width:100%;max-height:100%',
+ });
+ }
+
+ /**
+ * Initialize decoders with server info.
+ * @param {{video?: {codec: string}, audio?: {codec: string, sampleRate: number, channels: number}}} info
+ * @returns {Promise<{error?: string}>}
+ */
+ async init(info) {
+ VideoRenderer.log('init:', info.video ? 'video=' + info.video.codec : 'no video',
+ info.audio ? 'audio=' + info.audio.codec + ' ' + info.audio.sampleRate + 'Hz' : 'no audio');
+
+ if (info.video) {
+ const config = {codec: info.video.codec, optimizeForLatency: true};
+ try {
+ const support = await VideoDecoder.isConfigSupported(config);
+ if (!support.supported) {
+ VideoRenderer.log('VideoDecoder: codec not supported:', info.video.codec);
+ return {error: 'video codec not supported: ' + info.video.codec};
+ }
+ this._videoDecoder = new VideoDecoder({
+ output: frame => {
+ this._renderer.draw(frame);
+ frame.close();
+ },
+ error: err => VideoRenderer.log('VideoDecoder error:', err),
+ });
+ this._videoDecoder.configure(support.config);
+ VideoRenderer.log('VideoDecoder: configured', info.video.codec);
+ } catch (err) {
+ VideoRenderer.log('VideoDecoder: config check failed:', err.message || err);
+ return {error: err.message || String(err)};
+ }
+ }
+
+ if (info.audio && this._media.includes('audio')) {
+ this._audioInfo = info.audio;
+ this._audioStarted = false;
+ } else {
+ this._hideAudioControls();
+ }
+
+ this._parent.insertBefore(this._container, this._parent.firstChild);
+ return {};
+ }
+
+ /**
+ * Feed a binary frame from WebSocket.
+ * @param {ArrayBuffer} data
+ */
+ feed(data) {
+ if (this._paused || data.byteLength < HEADER_SIZE) return;
+
+ const view = new DataView(data);
+ const flags = view.getUint8(0);
+ const isVideo = (flags & 0x80) !== 0;
+ const isKeyframe = (flags & 0x40) !== 0;
+ const timestamp = Number(view.getBigUint64(1));
+ const payload = new Uint8Array(data, HEADER_SIZE);
+
+ if (isVideo && this._videoDecoder?.state === 'configured') {
+ this._videoDecoder.decode(new EncodedVideoChunk({
+ type: isKeyframe ? 'key' : 'delta',
+ timestamp,
+ data: payload,
+ }));
+ } else if (!isVideo && this._audioDecoder?.state === 'configured') {
+ this._audioDecoder.decode(new EncodedAudioChunk({
+ type: 'key',
+ timestamp,
+ data: payload,
+ }));
+ }
+ }
+
+ /** Start audio playback. Call from a user gesture (click handler). */
+ unmute() {
+ this._muted = false;
+ this._startAudio();
+ this._updateVolume();
+ }
+
+ /** Stop audio playback. */
+ mute() {
+ this._muted = true;
+ this._updateVolume();
+ }
+
+ /** @returns {boolean} */
+ get paused() { return this._paused; }
+
+ /** @returns {boolean} */
+ get muted() { return this._muted; }
+
+ /** @returns {HTMLElement} */
+ get container() { return this._container; }
+
+ destroy() {
+ if (this._videoDecoder) {
+ try { this._videoDecoder.close(); } catch {}
+ this._videoDecoder = null;
+ }
+ if (this._audioDecoder) {
+ try { this._audioDecoder.close(); } catch {}
+ this._audioDecoder = null;
+ }
+ if (this._gainNode) this._gainNode = null;
+ if (this._audioCtx) {
+ try { this._audioCtx.close(); } catch {}
+ this._audioCtx = null;
+ }
+ this._audioInfo = null;
+ this._audioStarted = false;
+ if (this._renderer) {
+ this._renderer.destroy();
+ this._renderer = null;
+ }
+ if (this._container?.parentElement) {
+ this._container.remove();
+ }
+ }
+
+ _createContainer() {
+ const container = document.createElement('div');
+ container.style.cssText = 'position:relative;width:100%;height:100%;background:#000;' +
+ 'display:flex;align-items:center;justify-content:center;overflow:hidden';
+
+ const controls = document.createElement('div');
+ controls.style.cssText = 'position:absolute;bottom:0;left:0;right:0;display:flex;' +
+ 'align-items:center;gap:4px;padding:4px 8px;background:rgba(23,23,23,.85);' +
+ 'opacity:0;transition:opacity .3s;user-select:none;z-index:1;height:36px;box-sizing:border-box';
+ container.addEventListener('mouseenter', () => { controls.style.opacity = '1'; });
+ container.addEventListener('mouseleave', () => { controls.style.opacity = '0'; });
+ container.addEventListener('touchstart', ev => {
+ if (ev.target === container || ev.target.tagName === 'CANVAS') {
+ controls.style.opacity = controls.style.opacity === '1' ? '0' : '1';
+ }
+ }, {passive: true});
+
+ const btnStyle = 'background:none;border:none;cursor:pointer;padding:4px;display:flex;' +
+ 'align-items:center;justify-content:center;opacity:.85';
+
+ // Play/Pause
+ const btnPlay = document.createElement('button');
+ btnPlay.style.cssText = btnStyle;
+ btnPlay.innerHTML = svgIcon(ICONS.pause);
+ btnPlay.title = 'Pause';
+ btnPlay.addEventListener('click', () => this._togglePause());
+
+ // Live label
+ const timeLabel = document.createElement('span');
+ timeLabel.style.cssText = 'color:#fff;font-size:12px;font-family:Arial,sans-serif;padding:0 4px;min-width:36px';
+ timeLabel.textContent = 'LIVE';
+
+ const spacer = document.createElement('div');
+ spacer.style.flex = '1';
+
+ // Mute
+ const btnMute = document.createElement('button');
+ btnMute.style.cssText = btnStyle;
+ btnMute.innerHTML = svgIcon(ICONS.muted);
+ btnMute.title = 'Unmute';
+ btnMute.addEventListener('click', () => {
+ this._muted = !this._muted;
+ if (!this._muted) this._startAudio();
+ this._updateVolume();
+ });
+
+ // Volume slider
+ const volume = document.createElement('input');
+ volume.type = 'range';
+ volume.min = '0';
+ volume.max = '1';
+ volume.step = '0.05';
+ volume.value = '1';
+ volume.style.cssText = 'width:60px;cursor:pointer;accent-color:#fff;height:4px';
+ volume.addEventListener('input', () => {
+ this._muted = false;
+ this._startAudio();
+ this._updateVolume();
+ });
+
+ // Fullscreen
+ const btnFS = document.createElement('button');
+ btnFS.style.cssText = btnStyle;
+ btnFS.innerHTML = svgIcon(ICONS.fs);
+ btnFS.title = 'Fullscreen';
+ btnFS.addEventListener('click', () => {
+ if (document.fullscreenElement) {
+ document.exitFullscreen();
+ } else {
+ container.requestFullscreen().catch(() => {});
+ }
+ });
+ document.addEventListener('fullscreenchange', () => {
+ const isFS = document.fullscreenElement === container;
+ btnFS.innerHTML = svgIcon(isFS ? ICONS.fsExit : ICONS.fs);
+ btnFS.title = isFS ? 'Exit fullscreen' : 'Fullscreen';
+ });
+
+ controls.append(btnPlay, timeLabel, spacer, btnMute, volume, btnFS);
+ container.append(controls);
+
+ // Store refs for updates
+ this._btnPlay = btnPlay;
+ this._btnMute = btnMute;
+ this._volume = volume;
+
+ return container;
+ }
+
+ _hideAudioControls() {
+ if (this._btnMute) this._btnMute.style.display = 'none';
+ if (this._volume) this._volume.style.display = 'none';
+ }
+
+ _togglePause() {
+ this._paused = !this._paused;
+ this._btnPlay.innerHTML = svgIcon(this._paused ? ICONS.play : ICONS.pause);
+ this._btnPlay.title = this._paused ? 'Play' : 'Pause';
+ if (this._paused && this._audioCtx) this._audioCtx.suspend();
+ if (!this._paused && this._audioCtx) {
+ this._audioCtx._nextTime = 0;
+ this._audioCtx.resume();
+ }
+ }
+
+ _updateVolume() {
+ if (this._gainNode) {
+ this._gainNode.gain.value = this._muted ? 0 : parseFloat(this._volume.value);
+ }
+ if (this._audioCtx?.state === 'suspended') {
+ this._audioCtx.resume();
+ }
+ const isMuted = this._muted || parseFloat(this._volume.value) === 0;
+ this._btnMute.innerHTML = svgIcon(isMuted ? ICONS.muted : ICONS.volume);
+ this._btnMute.title = isMuted ? 'Unmute' : 'Mute';
+ }
+
+ _startAudio() {
+ if (this._audioStarted || !this._audioInfo) return;
+ this._audioStarted = true;
+
+ const info = this._audioInfo;
+ const config = {codec: info.codec, sampleRate: info.sampleRate, numberOfChannels: info.channels};
+
+ AudioDecoder.isConfigSupported(config).then(support => {
+ if (!support.supported) {
+ VideoRenderer.log('AudioDecoder: codec not supported:', info.codec);
+ return;
+ }
+
+ const actx = new AudioContext({sampleRate: info.sampleRate});
+ this._audioCtx = actx;
+ this._gainNode = actx.createGain();
+ this._gainNode.connect(actx.destination);
+
+ this._audioDecoder = new AudioDecoder({
+ output: data => {
+ if (actx.state === 'closed') { data.close(); return; }
+ const buf = actx.createBuffer(data.numberOfChannels, data.numberOfFrames, data.sampleRate);
+ for (let ch = 0; ch < data.numberOfChannels; ch++) {
+ data.copyTo(buf.getChannelData(ch), {planeIndex: ch, format: 'f32-planar'});
+ }
+ const src = actx.createBufferSource();
+ src.buffer = buf;
+ src.connect(this._gainNode);
+ const now = actx.currentTime;
+ if ((actx._nextTime || 0) < now) {
+ actx._nextTime = now;
+ }
+ src.start(actx._nextTime);
+ actx._nextTime += buf.duration;
+ data.close();
+ },
+ error: () => { this._audioDecoder = null; },
+ });
+ this._audioDecoder.configure(support.config);
+
+ VideoRenderer.log('audio started:', info.codec, info.sampleRate + 'Hz', info.channels + 'ch');
+ this._updateVolume();
+ }).catch(err => {
+ VideoRenderer.log('AudioDecoder: config check failed:', err.message || err);
+ });
+ }
+}