149 lines
4.0 KiB
TypeScript
149 lines
4.0 KiB
TypeScript
type Setup = {
|
|
video_format: string,
|
|
width: number,
|
|
height: number,
|
|
redraw_rate: number,
|
|
dr_flags: number,
|
|
}
|
|
|
|
type SetupPacket = {
|
|
Setup: Setup
|
|
}
|
|
|
|
type DecodeBuffer = {
|
|
buffer_bype: string,
|
|
data: Array<number>,
|
|
}
|
|
|
|
type DecodeUnit = {
|
|
frame_number: number,
|
|
frame_type: string,
|
|
buffer: DecodeBuffer,
|
|
receieve_time_ms: number,
|
|
}
|
|
|
|
type DecodeUnitPacket = {
|
|
DecodeUnit: DecodeUnit
|
|
}
|
|
|
|
function parseData(newBuffer: Uint8Array, oldBuffer: Uint8Array): [Array<Object>, Uint8Array<ArrayBuffer>] {
|
|
let packets = new Array<Object>();
|
|
let unparsedData = new Uint8Array();
|
|
|
|
let data = new Uint8Array([...oldBuffer, ...newBuffer]);
|
|
let index = 0;
|
|
while (true) {
|
|
if (index >= data.length) {
|
|
break
|
|
}
|
|
const view = new DataView(data.buffer.slice(index, index + 4));
|
|
const dataLength = view.getUint32(0, true);
|
|
|
|
const slice_start_index = index + 4;
|
|
const slice_end_index = index + 4 + dataLength;
|
|
|
|
if (data.length < slice_end_index) {
|
|
unparsedData = new Uint8Array(data.buffer.slice(index, data.length));
|
|
break;
|
|
}
|
|
|
|
const dataToParse = data.buffer.slice(slice_start_index, slice_end_index);
|
|
const decoder = new TextDecoder('utf-8');
|
|
const jsonString = decoder.decode(dataToParse);
|
|
|
|
packets.push(JSON.parse(jsonString));
|
|
|
|
index += 4 + dataLength;
|
|
}
|
|
return [packets, unparsedData];
|
|
}
|
|
|
|
|
|
export async function streamVideoFromReader(reader: ReadableStreamDefaultReader, canvasElement: OffscreenCanvas) {
|
|
const canvasCtx: OffscreenCanvasRenderingContext2D | null = canvasElement.getContext('2d');
|
|
if (canvasCtx == null) {
|
|
throw new Error(`Could not get 2d canvas context`);
|
|
}
|
|
|
|
try {
|
|
let unparsedData = new Uint8Array();
|
|
|
|
const videoDecoder = new VideoDecoder({
|
|
output: (frame) => {
|
|
// Set canvas dimensions to match the frame
|
|
canvasElement.width = frame.displayWidth;
|
|
canvasElement.height = frame.displayHeight;
|
|
|
|
// Draw the decoded frame to canvas
|
|
canvasCtx.drawImage(frame, 0, 0);
|
|
|
|
// Important: close the frame to free memory
|
|
frame.close();
|
|
},
|
|
error: (e) => {
|
|
console.error('Decode error:', e);
|
|
}
|
|
});
|
|
|
|
while (true) {
|
|
const { value, done } = await reader.read();
|
|
if (done) break;
|
|
|
|
let [packets, remainingData] = parseData(value, unparsedData);
|
|
unparsedData = remainingData;
|
|
|
|
for (let i = 0; i < packets.length; i++) {
|
|
if (Object.hasOwn(packets[i], "Setup")) {
|
|
let packet = packets[i] as SetupPacket;
|
|
|
|
let config: VideoDecoderConfig | undefined = undefined;
|
|
if (packet.Setup.video_format == "H264") {
|
|
config = {
|
|
//codec: 'avc1.42E01E', // H.264 codec
|
|
codec: 'avc1.4D002A', // H.264 codec
|
|
codedWidth: packet.Setup.width,
|
|
codedHeight: packet.Setup.height,
|
|
};
|
|
} else {
|
|
throw new Error(`Unsupported video codec ${packet.Setup.video_format}`);
|
|
}
|
|
|
|
const codecSupport = await VideoDecoder.isConfigSupported(config);
|
|
if (codecSupport.supported) {
|
|
videoDecoder.configure(config);
|
|
} else {
|
|
throw new Error(`Could not configure decoder`);
|
|
}
|
|
|
|
} else if (Object.hasOwn(packets[i], "DecodeUnit")) {
|
|
let packet = packets[i] as DecodeUnitPacket;
|
|
|
|
|
|
let frame_type: EncodedAudioChunkType = "delta";
|
|
if (packet.DecodeUnit.frame_type == "IDR") {
|
|
frame_type = "key";
|
|
}
|
|
const chunk = new EncodedVideoChunk({
|
|
timestamp: packet.DecodeUnit.receieve_time_ms,
|
|
type: frame_type,
|
|
data: new Uint8Array(packet.DecodeUnit.buffer.data),
|
|
});
|
|
|
|
videoDecoder.decode(chunk);
|
|
|
|
} else {
|
|
throw new Error(`Got packet of unknown type`);
|
|
}
|
|
}
|
|
|
|
}
|
|
|
|
} catch (e) {
|
|
var error = <Error>e;
|
|
console.error('Error connecting to stream:', error);
|
|
alert('Failed to connect to stream: ' + error.message);
|
|
}
|
|
}
|
|
|
|
|