frontend: video streaming now works!!

- changed the backend to append the buffers together and dispatch them
  as such (this is what moonlight-embedded does)
- fixed the frontend trying to playback an empty buffer because types
  are hard (this is why I should have used protobuf...)
This commit is contained in:
2025-07-21 02:06:55 -06:00
parent 7a2b0fd4d6
commit a11a12828c
2 changed files with 49 additions and 33 deletions
+14 -4
View File
@@ -39,10 +39,16 @@ type SetupPacket = {
Setup: Setup Setup: Setup
} }
type DecodeBuffer = {
buffer_bype: string,
data: Array<number>,
}
type DecodeUnit = { type DecodeUnit = {
frame_number: number, frame_number: number,
frame_type: string, frame_type: string,
buffer: Array<number>, buffer: DecodeBuffer,
receieve_time_ms: number,
} }
type DecodeUnitPacket = { type DecodeUnitPacket = {
@@ -209,7 +215,8 @@ export async function connectToStream(url: string, cert_hash: Array<number>) {
let config: VideoDecoderConfig | undefined = undefined; let config: VideoDecoderConfig | undefined = undefined;
if (packet.Setup.video_format == "H264") { if (packet.Setup.video_format == "H264") {
config = { config = {
codec: 'avc1.42E01E', // H.264 codec //codec: 'avc1.42E01E', // H.264 codec
codec: 'avc1.4D002A', // H.264 codec
codedWidth: packet.Setup.width, codedWidth: packet.Setup.width,
codedHeight: packet.Setup.height, codedHeight: packet.Setup.height,
}; };
@@ -220,6 +227,8 @@ export async function connectToStream(url: string, cert_hash: Array<number>) {
const codecSupport = await VideoDecoder.isConfigSupported(config); const codecSupport = await VideoDecoder.isConfigSupported(config);
if (codecSupport.supported) { if (codecSupport.supported) {
videoDecoder.configure(config); videoDecoder.configure(config);
} else {
throw new Error(`Could not configure decoder`);
} }
} else if (Object.hasOwn(packets[i], "DecodeUnit")) { } else if (Object.hasOwn(packets[i], "DecodeUnit")) {
@@ -231,10 +240,11 @@ export async function connectToStream(url: string, cert_hash: Array<number>) {
frame_type = "key"; frame_type = "key";
} }
const chunk = new EncodedVideoChunk({ const chunk = new EncodedVideoChunk({
timestamp: 0, timestamp: packet.DecodeUnit.receieve_time_ms,
type: frame_type, type: frame_type,
data: new Uint8Array(packet.DecodeUnit.buffer), data: new Uint8Array(packet.DecodeUnit.buffer.data),
}); });
console.log(chunk);
videoDecoder.decode(chunk); videoDecoder.decode(chunk);
@@ -155,8 +155,10 @@ impl RendererMessage {
}) })
} }
fn from_decode_unit(decode_unit: _DECODE_UNIT) -> Result<Vec<Self>> { fn from_decode_unit(decode_unit: _DECODE_UNIT) -> Result<Self> {
let mut buffers = Vec::new(); //fn from_decode_unit(decode_unit: _DECODE_UNIT) -> Result<Vec<Self>> {
let mut buffer = Vec::new();
//let mut buffers = Vec::new();
if decode_unit.bufferList.is_null() { if decode_unit.bufferList.is_null() {
return Err(anyhow!("DecodeUnit bufferList is null")); return Err(anyhow!("DecodeUnit bufferList is null"));
@@ -165,32 +167,35 @@ impl RendererMessage {
let mut index = 0; let mut index = 0;
loop { loop {
let mut b = Buffer::try_from(next)?;
buffer.append(&mut b.data);
//buffers.push(msg);
index = index + 1;
if next.next.is_null() { if next.next.is_null() {
break; break;
} }
let buffer = Buffer::try_from(next)?;
let msg = RendererMessage::DecodeUnit {
frame_number: <u64>::try_from(decode_unit.frameNumber)?,
frame_type: FrameType::try_from(decode_unit.frameType)?,
host_processing_latency: decode_unit.frameHostProcessingLatency,
receieve_time_ms: decode_unit.receiveTimeMs,
enqueue_time_ms: decode_unit.enqueueTimeMs,
presentation_time: decode_unit.presentationTimeMs as u64,
full_length: <usize>::try_from(decode_unit.fullLength)?,
buffer,
index,
hdr_active: decode_unit.hdrActive,
colorspace: decode_unit.colorspace,
};
buffers.push(msg);
index = index + 1;
next = unsafe { *next.next }; next = unsafe { *next.next };
} }
Ok(buffers) Ok(RendererMessage::DecodeUnit {
frame_number: <u64>::try_from(decode_unit.frameNumber)?,
frame_type: FrameType::try_from(decode_unit.frameType)?,
host_processing_latency: decode_unit.frameHostProcessingLatency,
receieve_time_ms: decode_unit.receiveTimeMs,
enqueue_time_ms: decode_unit.enqueueTimeMs,
presentation_time: decode_unit.presentationTimeMs as u64,
full_length: <usize>::try_from(decode_unit.fullLength)?,
buffer: Buffer {
data: buffer,
buffer_type: BufferType::PICDATA,
},
index,
hdr_active: decode_unit.hdrActive,
colorspace: decode_unit.colorspace,
})
} }
} }
@@ -258,7 +263,7 @@ extern "C" fn submit_decode_unit_cb(decode_unit: PDECODE_UNIT) -> std::os::raw::
} }
let decode_unit = unsafe { *decode_unit }; let decode_unit = unsafe { *decode_unit };
let messages = match RendererMessage::from_decode_unit(decode_unit) { let message = match RendererMessage::from_decode_unit(decode_unit) {
Ok(m) => m, Ok(m) => m,
Err(e) => { Err(e) => {
error!("Cannot construct RendererMessage: {e}"); error!("Cannot construct RendererMessage: {e}");
@@ -266,13 +271,14 @@ extern "C" fn submit_decode_unit_cb(decode_unit: PDECODE_UNIT) -> std::os::raw::
} }
}; };
for msg in messages { send_message(message)
let ret = send_message(msg); //for msg in messages {
if ret != 0 { // let ret = send_message(msg);
return ret; // if ret != 0 {
} // return ret;
} // }
0 //}
//0
} }
pub fn decoder_callbacks() -> Result<(DECODER_RENDERER_CALLBACKS, mpsc::Receiver<RendererMessage>)> pub fn decoder_callbacks() -> Result<(DECODER_RENDERER_CALLBACKS, mpsc::Receiver<RendererMessage>)>