@@ -0,0 +1,3 @@ | |||||
{ | |||||
"liveServer.settings.port": 5501 | |||||
} |
@@ -3,47 +3,202 @@ | |||||
<html lang="en" xmlns="http://www.w3.org/1999/xhtml"> | <html lang="en" xmlns="http://www.w3.org/1999/xhtml"> | ||||
<head> | <head> | ||||
<meta charset="utf-8" /> | <meta charset="utf-8" /> | ||||
<title>fmp4 demo</title> | |||||
<title>WebSocket MSE Fmp4 demo</title> | |||||
<script type="text/javascript" src="signalr.min.js"></script> | |||||
</head> | </head> | ||||
<body> | <body> | ||||
<h1>MSE FMp4 Demo</h1> | <h1>MSE FMp4 Demo</h1> | ||||
<video autoplay controls></video> | |||||
<video id="stream_live" width="640" height="480" controls="false" autoplay="true" | |||||
muted="muted" | |||||
preload="auto"> | |||||
浏览器不支持 | |||||
</video> | |||||
<ul id="messagesList"></ul> | |||||
<!--<video src="/JT1078_4.mp4" autoplay controls></video>--> | <!--<video src="/JT1078_4.mp4" autoplay controls></video>--> | ||||
<script> | <script> | ||||
var video = document.querySelector('video'); | |||||
var mimeCodec = 'video/mp4;codecs="avc1.42E01E, mp4a.40.2"'; | |||||
if ('MediaSource' in window && MediaSource.isTypeSupported(mimeCodec)) { | |||||
var mediaSource = new MediaSource(); | |||||
console.log(mediaSource.readyState); | |||||
video.src = URL.createObjectURL(mediaSource); | |||||
mediaSource.addEventListener('sourceopen', sourceOpen); | |||||
} else { | |||||
console.log('Unsupported MIME type pr cpdec:', mimeCodec); | |||||
} | |||||
function sourceOpen(_) { | |||||
URL.revokeObjectURL(video.src); | |||||
console.log(this.readyState); | |||||
var mediaSource = this; | |||||
var sourceBuffer = mediaSource.addSourceBuffer(mimeCodec); | |||||
fetchMedia("/JT1078_4.mp4", function (buf) { | |||||
sourceBuffer.addEventListener("updateend", function (_) { | |||||
mediaSource.endOfStream(); | |||||
video.play(); | |||||
console.log(mediaSource.readyState); | |||||
}); | |||||
sourceBuffer.appendBuffer(buf); | |||||
//var mimeCodec = 'video/mp4;codecs="avc1.4D0014, mp4a.40.2"'; | |||||
// *** USER PARAMETERS *** | |||||
var verbose = true; | |||||
// var verbose = true; // enable for saturating the console .. | |||||
var buffering_sec = 1; // use some reasonable value | |||||
var buffering_sec_seek = buffering_sec * 0.9; | |||||
// ..seek the stream if it's this much away or | |||||
// from the last available timestamp | |||||
var buffering_sec_seek_distance = buffering_sec * 0.5; | |||||
// .. jump to this distance from the last avail. timestamp | |||||
// *** INTERNAL PARAMETERS *** | |||||
// set mimetype and codec | |||||
var mimeType = "video/mp4"; | |||||
var codecs = "avc1.4D0014"; // https://wiki.whatwg.org/wiki/Video_type_parameters | |||||
// if your stream has audio, remember to include it in these definitions.. otherwise your mse goes sour | |||||
var codecPars = mimeType + ';codecs="' + codecs + '"'; | |||||
var stream_started = false; // is the source_buffer updateend callback active nor not | |||||
// create media source instance | |||||
var ms = new MediaSource(); | |||||
// queue for incoming media packets | |||||
var queue = []; | |||||
var stream_live; // the HTMLMediaElement (i.e. <video> element) | |||||
var ws; // websocket | |||||
var seeked = false; // have have seeked manually once .. | |||||
var cc = 0; | |||||
var source_buffer; // source_buffer instance | |||||
var pass = 0; | |||||
// *** MP4 Box manipulation functions *** | |||||
// taken from here: https://stackoverflow.com/questions/54186634/sending-periodic-metadata-in-fragmented-live-mp4-stream/ | |||||
function toInt(arr, index) { // From bytes to big-endian 32-bit integer. Input: Uint8Array, index | |||||
var dv = new DataView(arr.buffer, 0); | |||||
return dv.getInt32(index, false); // big endian | |||||
} | |||||
function toString(arr, fr, to) { // From bytes to string. Input: Uint8Array, start index, stop index. | |||||
// https://developers.google.com/web/updates/2012/06/How-to-convert-ArrayBuffer-to-and-from-String | |||||
return String.fromCharCode.apply(null, arr.slice(fr, to)); | |||||
} | |||||
function getBox(arr, i) { // input Uint8Array, start index | |||||
return [toInt(arr, i), toString(arr, i + 4, i + 8)] | |||||
} | |||||
function getSubBox(arr, box_name) { // input Uint8Array, box name | |||||
var i = 0; | |||||
res = getBox(arr, i); | |||||
main_length = res[0]; name = res[1]; // this boxes length and name | |||||
i = i + 8; | |||||
var sub_box = null; | |||||
while (i < main_length) { | |||||
res = getBox(arr, i); | |||||
l = res[0]; name = res[1]; | |||||
if (box_name == name) { | |||||
sub_box = arr.slice(i, i + l) | |||||
} | |||||
i = i + l; | |||||
} | |||||
return sub_box; | |||||
} | |||||
function hasFirstSampleFlag(arr) { // input Uint8Array | |||||
// [moof [mfhd] [traf [tfhd] [tfdt] [trun]]] | |||||
var traf = getSubBox(arr, "traf"); | |||||
if (traf == null) { return false; } | |||||
var trun = getSubBox(traf, "trun"); | |||||
if (trun == null) { return false; } | |||||
// ISO/IEC 14496-12:2012(E) .. pages 5 and 57 | |||||
// bytes: (size 4), (name 4), (version 1 + tr_flags 3) | |||||
var flags = trun.slice(10, 13); // console.log(flags); | |||||
f = flags[1] & 4; // console.log(f); | |||||
return f == 4; | |||||
} | |||||
// consider these callbacks: | |||||
// - putPacket : called when websocket receives data | |||||
// - loadPacket : called when source_buffer is ready for more data | |||||
// Both operate on a common fifo | |||||
function putPacket(arr) { | |||||
// receives ArrayBuffer. Called when websocket gets more data | |||||
// first packet ever to arrive: write directly to source_buffer | |||||
// source_buffer ready to accept: write directly to source_buffer | |||||
// otherwise insert it to queue | |||||
var memview = new Uint8Array(arr); | |||||
if (verbose) { console.log("got", arr.byteLength, "bytes. Values=", memview[0], memview[1], memview[2], memview[3], memview[4]); } | |||||
res = getBox(memview, 0); | |||||
main_length = res[0]; name = res[1]; // this boxes length and name | |||||
if ((name == "ftyp") && (pass == 0)) { | |||||
pass = pass + 1; | |||||
console.log("got ftyp"); | |||||
} | |||||
else if ((name == "moov") && (pass == 1)) { | |||||
pass = pass + 1; | |||||
console.log("got moov"); | |||||
} | |||||
else if ((name == "moof") && (pass == 2)) { | |||||
if (hasFirstSampleFlag(memview)) { | |||||
pass = pass + 1; | |||||
console.log("got that special moof"); | |||||
} | |||||
else { | |||||
return; | |||||
} | |||||
} | |||||
else if (pass < 3) { | |||||
return; | |||||
} | |||||
// keep the latency to minimum | |||||
let latest = stream_live.duration; | |||||
if ((stream_live.duration >= buffering_sec) && | |||||
((latest - stream_live.currentTime) > buffering_sec_seek)) { | |||||
console.log("seek from ", stream_live.currentTime, " to ", latest); | |||||
df = (stream_live.duration - stream_live.currentTime); // this much away from the last available frame | |||||
if ((df > buffering_sec_seek)) { | |||||
seek_to = stream_live.duration - buffering_sec_seek_distance; | |||||
stream_live.currentTime = seek_to; | |||||
} | |||||
} | |||||
data = arr; | |||||
if (!stream_started) { | |||||
if (verbose) { console.log("Streaming started: ", memview[0], memview[1], memview[2], memview[3], memview[4]); } | |||||
source_buffer.appendBuffer(data); | |||||
stream_started = true; | |||||
cc = cc + 1; | |||||
return; | |||||
} | |||||
queue.push(data); // add to the end | |||||
if (verbose) { console.log("queue push:", queue.length); } | |||||
} | |||||
function loadPacket() { // called when source_buffer is ready for more | |||||
if (!source_buffer.updating) { // really, really ready | |||||
if (queue.length > 0) { | |||||
inp = queue.shift(); // pop from the beginning | |||||
if (verbose) { console.log("queue pop:", queue.length); } | |||||
var memview = new Uint8Array(inp); | |||||
if (verbose) { console.log(" ==> writing buffer with", memview[0], memview[1], memview[2], memview[3]); } | |||||
source_buffer.appendBuffer(inp); | |||||
cc = cc + 1; | |||||
} | |||||
else { // the queue runs empty, so the next packet is fed directly | |||||
stream_started = false; | |||||
} | |||||
} | |||||
else { // so it was not? | |||||
} | |||||
} | |||||
function opened() { // MediaSource object is ready to go | |||||
// https://developer.mozilla.org/en-US/docs/Web/API/MediaSource/duration | |||||
ms.duration = buffering_sec; | |||||
source_buffer = ms.addSourceBuffer(codecPars); | |||||
// https://developer.mozilla.org/en-US/docs/Web/API/source_buffer/mode | |||||
var myMode = source_buffer.mode; | |||||
source_buffer.mode = 'sequence'; | |||||
// source_buffer.mode = 'segments'; | |||||
source_buffer.addEventListener("updateend", loadPacket); | |||||
ws = new signalR.HubConnectionBuilder() | |||||
.withUrl("http://127.0.0.1:5000/FMp4Hub") | |||||
.build(); | |||||
ws.on("video", (message) => { | |||||
var buff=base64ToArrayBuffer(message); | |||||
console.log(buff); | |||||
putPacket(buff); | |||||
}); | }); | ||||
ws.start().catch(err => console.error(err)); | |||||
} | } | ||||
function fetchMedia(url, callback) { | |||||
console.log(url); | |||||
var xhr = new XMLHttpRequest; | |||||
xhr.open('get', url); | |||||
xhr.responseType = 'arraybuffer'; | |||||
xhr.onload = function () { | |||||
callback(xhr.response); | |||||
}; | |||||
xhr.send(); | |||||
function startup() { | |||||
ms.addEventListener('sourceopen', opened, false); | |||||
// get reference to video | |||||
stream_live = document.getElementById('stream_live'); | |||||
// set mediasource as source of video | |||||
stream_live.src = window.URL.createObjectURL(ms); | |||||
} | } | ||||
function base64ToArrayBuffer(base64) { | |||||
var binary_string = window.atob(base64); | |||||
var len = binary_string.length; | |||||
var bytes = new Uint8Array(len); | |||||
for (var i = 0; i < len; i++) { | |||||
bytes[i] = binary_string.charCodeAt(i); | |||||
} | |||||
return bytes.buffer; | |||||
} | |||||
window.onload = function () { | |||||
startup(); | |||||
} | |||||
</script> | </script> | ||||
</body> | </body> | ||||
</html> | </html> |
@@ -277,7 +277,7 @@ namespace JT1078.FMp4.Test | |||||
fragmentBox.MediaDataBox = new MediaDataBox(); | fragmentBox.MediaDataBox = new MediaDataBox(); | ||||
fragmentBox.MediaDataBox.Data = nalus.Select(s => s.RawData).ToList(); | fragmentBox.MediaDataBox.Data = nalus.Select(s => s.RawData).ToList(); | ||||
moofs.Add(fragmentBox); | moofs.Add(fragmentBox); | ||||
foreach(var moof in moofs) | |||||
foreach (var moof in moofs) | |||||
{ | { | ||||
moof.ToBuffer(ref writer); | moof.ToBuffer(ref writer); | ||||
} | } | ||||
@@ -437,6 +437,7 @@ namespace JT1078.FMp4.Test | |||||
public void Test4() | public void Test4() | ||||
{ | { | ||||
FMp4Encoder fMp4Encoder = new FMp4Encoder(); | FMp4Encoder fMp4Encoder = new FMp4Encoder(); | ||||
H264Decoder h264Decoder = new H264Decoder(); | |||||
var packages = ParseNALUTests(); | var packages = ParseNALUTests(); | ||||
var filepath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "H264", "JT1078_4.mp4"); | var filepath = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "H264", "JT1078_4.mp4"); | ||||
if (File.Exists(filepath)) | if (File.Exists(filepath)) | ||||
@@ -444,15 +445,20 @@ namespace JT1078.FMp4.Test | |||||
File.Delete(filepath); | File.Delete(filepath); | ||||
} | } | ||||
using var fileStream = new FileStream(filepath, FileMode.OpenOrCreate, FileAccess.Write); | using var fileStream = new FileStream(filepath, FileMode.OpenOrCreate, FileAccess.Write); | ||||
var ftyp = fMp4Encoder.EncoderFtypBox(); | |||||
fileStream.Write(ftyp); | |||||
var package1 = packages[0]; | var package1 = packages[0]; | ||||
var buffer1 = fMp4Encoder.EncoderFirstVideoBox(package1); | |||||
fileStream.Write(buffer1); | |||||
int moofOffset = buffer1.Length; | |||||
foreach (var package in packages.Take(2)) | |||||
var nalus1 = h264Decoder.ParseNALU(package1); | |||||
var moov = fMp4Encoder.EncoderMoovBox(nalus1, package1.Bodies.Length); | |||||
fileStream.Write(moov); | |||||
int moofOffset = ftyp.Length + moov.Length; | |||||
var flag = package1.Label3.DataType == Protocol.Enums.JT1078DataType.视频I帧 ? 1u : 0u; | |||||
var otherMoofBuffer = fMp4Encoder.EncoderMoofBox(nalus1, package1.Bodies.Length, package1.Timestamp, flag); | |||||
foreach (var package in packages) | |||||
{ | { | ||||
var otherBuffer = fMp4Encoder.EncoderOtherVideoBox(package, (ulong)moofOffset); | |||||
moofOffset += otherBuffer.Length; | |||||
fileStream.Write(otherBuffer); | |||||
var otherNalus = h264Decoder.ParseNALU(package); | |||||
var otherMdatBuffer = fMp4Encoder.EncoderMdatBox(otherNalus, package.Bodies.Length); | |||||
fileStream.Write(otherMdatBuffer); | |||||
} | } | ||||
fileStream.Close(); | fileStream.Close(); | ||||
} | } | ||||
@@ -464,7 +470,7 @@ namespace JT1078.FMp4.Test | |||||
//01 20 00 00 | //01 20 00 00 | ||||
var a = BinaryPrimitives.ReadUInt32LittleEndian(new byte[] { 0x01, 0x60, 0, 0 }); | var a = BinaryPrimitives.ReadUInt32LittleEndian(new byte[] { 0x01, 0x60, 0, 0 }); | ||||
var b = BinaryPrimitives.ReadUInt32LittleEndian(new byte[] { 0x01, 0x20, 0, 0 }); | var b = BinaryPrimitives.ReadUInt32LittleEndian(new byte[] { 0x01, 0x20, 0, 0 }); | ||||
//00 00 01 60 | //00 00 01 60 | ||||
//00 00 01 20 | //00 00 01 20 | ||||
var c = BinaryPrimitives.ReadUInt32BigEndian(new byte[] { 0, 0, 0x01, 0x20 }); | var c = BinaryPrimitives.ReadUInt32BigEndian(new byte[] { 0, 0, 0x01, 0x20 }); | ||||
@@ -495,15 +501,15 @@ namespace JT1078.FMp4.Test | |||||
{ | { | ||||
var filepath1 = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "H264", "JT1078_1.mp4"); | var filepath1 = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "H264", "JT1078_1.mp4"); | ||||
var filepath2 = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "H264", "jt1078_1_fragmented.mp4"); | var filepath2 = Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "H264", "jt1078_1_fragmented.mp4"); | ||||
var byte1=File.ReadAllBytes(filepath1); | |||||
var byte2=File.ReadAllBytes(filepath2); | |||||
if(byte1.Length== byte2.Length) | |||||
var byte1 = File.ReadAllBytes(filepath1); | |||||
var byte2 = File.ReadAllBytes(filepath2); | |||||
if (byte1.Length == byte2.Length) | |||||
{ | { | ||||
for(var i=0;i< byte1.Length; i++) | |||||
for (var i = 0; i < byte1.Length; i++) | |||||
{ | { | ||||
if (byte1[i] != byte2[i]) | if (byte1[i] != byte2[i]) | ||||
{ | { | ||||
} | } | ||||
} | } | ||||
} | } | ||||
@@ -40,7 +40,10 @@ namespace JT1078.FMp4 | |||||
Start(ref writer); | Start(ref writer); | ||||
MovieHeaderBox.ToBuffer(ref writer); | MovieHeaderBox.ToBuffer(ref writer); | ||||
TrackBox.ToBuffer(ref writer); | TrackBox.ToBuffer(ref writer); | ||||
MovieExtendsBox.ToBuffer(ref writer); | |||||
if (MovieExtendsBox != null) | |||||
{ | |||||
MovieExtendsBox.ToBuffer(ref writer); | |||||
} | |||||
if (UserDataBox != null) | if (UserDataBox != null) | ||||
{ | { | ||||
UserDataBox.ToBuffer(ref writer); | UserDataBox.ToBuffer(ref writer); | ||||
@@ -2,6 +2,7 @@ | |||||
using JT1078.FMp4.MessagePack; | using JT1078.FMp4.MessagePack; | ||||
using JT1078.FMp4.Samples; | using JT1078.FMp4.Samples; | ||||
using JT1078.Protocol; | using JT1078.Protocol; | ||||
using JT1078.Protocol.Enums; | |||||
using JT1078.Protocol.H264; | using JT1078.Protocol.H264; | ||||
using JT1078.Protocol.MessagePack; | using JT1078.Protocol.MessagePack; | ||||
using System; | using System; | ||||
@@ -23,6 +24,7 @@ namespace JT1078.FMp4 | |||||
/// moof n | /// moof n | ||||
/// mdat n | /// mdat n | ||||
/// mfra | /// mfra | ||||
/// ref: https://www.w3.org/TR/mse-byte-stream-format-isobmff/#movie-fragment-relative-addressing | |||||
/// </summary> | /// </summary> | ||||
public class FMp4Encoder | public class FMp4Encoder | ||||
{ | { | ||||
@@ -35,6 +37,233 @@ namespace JT1078.FMp4 | |||||
h264Decoder = new H264Decoder(); | h264Decoder = new H264Decoder(); | ||||
} | } | ||||
/// <summary> | |||||
/// 编码ftyp盒子 | |||||
/// </summary> | |||||
/// <returns></returns> | |||||
public byte[] EncoderFtypBox() | |||||
{ | |||||
byte[] buffer = FMp4ArrayPool.Rent(4096); | |||||
FMp4MessagePackWriter writer = new FMp4MessagePackWriter(buffer); | |||||
try | |||||
{ | |||||
//ftyp | |||||
FileTypeBox fileTypeBox = new FileTypeBox(); | |||||
fileTypeBox.MajorBrand = "msdh"; | |||||
fileTypeBox.MinorVersion = "\0\0\0\0"; | |||||
fileTypeBox.CompatibleBrands.Add("isom"); | |||||
fileTypeBox.CompatibleBrands.Add("mp42"); | |||||
fileTypeBox.CompatibleBrands.Add("msdh"); | |||||
fileTypeBox.CompatibleBrands.Add("nsix"); | |||||
fileTypeBox.CompatibleBrands.Add("iso5"); | |||||
fileTypeBox.CompatibleBrands.Add("iso6"); | |||||
fileTypeBox.ToBuffer(ref writer); | |||||
var data = writer.FlushAndGetArray(); | |||||
return data; | |||||
} | |||||
finally | |||||
{ | |||||
FMp4ArrayPool.Return(buffer); | |||||
} | |||||
} | |||||
/// <summary> | |||||
/// 编码moov盒子 | |||||
/// </summary> | |||||
/// <returns></returns> | |||||
public byte[] EncoderMoovBox(List<H264NALU> nalus, int naluLength) | |||||
{ | |||||
byte[] buffer = FMp4ArrayPool.Rent(naluLength + 4096); | |||||
FMp4MessagePackWriter writer = new FMp4MessagePackWriter(buffer); | |||||
try | |||||
{ | |||||
var spsNALU = nalus.FirstOrDefault(n => n.NALUHeader.NalUnitType == 7); | |||||
//SPS | |||||
spsNALU.RawData = h264Decoder.DiscardEmulationPreventionBytes(spsNALU.RawData); | |||||
var ppsNALU = nalus.FirstOrDefault(n => n.NALUHeader.NalUnitType == 8); | |||||
ppsNALU.RawData = h264Decoder.DiscardEmulationPreventionBytes(ppsNALU.RawData); | |||||
ExpGolombReader h264GolombReader = new ExpGolombReader(spsNALU.RawData); | |||||
var spsInfo = h264GolombReader.ReadSPS(); | |||||
//moov | |||||
MovieBox movieBox = new MovieBox(); | |||||
movieBox.MovieHeaderBox = new MovieHeaderBox(0, 2); | |||||
movieBox.MovieHeaderBox.CreationTime = 0; | |||||
movieBox.MovieHeaderBox.ModificationTime = 0; | |||||
movieBox.MovieHeaderBox.Duration = 0; | |||||
movieBox.MovieHeaderBox.Timescale = 1000; | |||||
movieBox.MovieHeaderBox.NextTrackID = 99; | |||||
movieBox.TrackBox = new TrackBox(); | |||||
movieBox.TrackBox.TrackHeaderBox = new TrackHeaderBox(0, 3); | |||||
movieBox.TrackBox.TrackHeaderBox.CreationTime = 0; | |||||
movieBox.TrackBox.TrackHeaderBox.ModificationTime = 0; | |||||
movieBox.TrackBox.TrackHeaderBox.TrackID = 1; | |||||
movieBox.TrackBox.TrackHeaderBox.Duration = 0; | |||||
movieBox.TrackBox.TrackHeaderBox.TrackIsAudio = false; | |||||
movieBox.TrackBox.TrackHeaderBox.Width = (uint)spsInfo.width; | |||||
movieBox.TrackBox.TrackHeaderBox.Height = (uint)spsInfo.height; | |||||
movieBox.TrackBox.MediaBox = new MediaBox(); | |||||
movieBox.TrackBox.MediaBox.MediaHeaderBox = new MediaHeaderBox(); | |||||
movieBox.TrackBox.MediaBox.MediaHeaderBox.CreationTime = 0; | |||||
movieBox.TrackBox.MediaBox.MediaHeaderBox.ModificationTime = 0; | |||||
movieBox.TrackBox.MediaBox.MediaHeaderBox.Timescale = 1200000; | |||||
movieBox.TrackBox.MediaBox.MediaHeaderBox.Duration = 0; | |||||
movieBox.TrackBox.MediaBox.HandlerBox = new HandlerBox(); | |||||
movieBox.TrackBox.MediaBox.HandlerBox.HandlerType = HandlerType.vide; | |||||
movieBox.TrackBox.MediaBox.HandlerBox.Name = "VideoHandler"; | |||||
movieBox.TrackBox.MediaBox.MediaInformationBox = new MediaInformationBox(); | |||||
movieBox.TrackBox.MediaBox.MediaInformationBox.VideoMediaHeaderBox = new VideoMediaHeaderBox(); | |||||
movieBox.TrackBox.MediaBox.MediaInformationBox.DataInformationBox = new DataInformationBox(); | |||||
movieBox.TrackBox.MediaBox.MediaInformationBox.DataInformationBox.DataReferenceBox = new DataReferenceBox(); | |||||
movieBox.TrackBox.MediaBox.MediaInformationBox.DataInformationBox.DataReferenceBox.DataEntryBoxes = new List<DataEntryBox>(); | |||||
movieBox.TrackBox.MediaBox.MediaInformationBox.DataInformationBox.DataReferenceBox.DataEntryBoxes.Add(new DataEntryUrlBox(1)); | |||||
movieBox.TrackBox.MediaBox.MediaInformationBox.SampleTableBox = new SampleTableBox(); | |||||
movieBox.TrackBox.MediaBox.MediaInformationBox.SampleTableBox.SampleDescriptionBox = new SampleDescriptionBox(movieBox.TrackBox.MediaBox.HandlerBox.HandlerType); | |||||
movieBox.TrackBox.MediaBox.MediaInformationBox.SampleTableBox.SampleDescriptionBox.SampleEntries = new List<SampleEntry>(); | |||||
AVC1SampleEntry avc1 = new AVC1SampleEntry(); | |||||
avc1.AVCConfigurationBox = new AVCConfigurationBox(); | |||||
//h264 | |||||
avc1.Width = (ushort)movieBox.TrackBox.TrackHeaderBox.Width; | |||||
avc1.Height = (ushort)movieBox.TrackBox.TrackHeaderBox.Height; | |||||
avc1.AVCConfigurationBox.AVCLevelIndication = spsInfo.levelIdc; | |||||
avc1.AVCConfigurationBox.AVCProfileIndication = spsInfo.profileIdc; | |||||
avc1.AVCConfigurationBox.ProfileCompatibility = (byte)spsInfo.profileCompat; | |||||
avc1.AVCConfigurationBox.PPSs = new List<byte[]>() { ppsNALU.RawData }; | |||||
avc1.AVCConfigurationBox.SPSs = new List<byte[]>() { spsNALU.RawData }; | |||||
movieBox.TrackBox.MediaBox.MediaInformationBox.SampleTableBox.SampleDescriptionBox.SampleEntries.Add(avc1); | |||||
movieBox.TrackBox.MediaBox.MediaInformationBox.SampleTableBox.TimeToSampleBox = new TimeToSampleBox() { | |||||
TimeToSampleInfos=new List<TimeToSampleBox.TimeToSampleInfo> | |||||
{ | |||||
new TimeToSampleBox.TimeToSampleInfo | |||||
{ | |||||
SampleCount=0, | |||||
SampleDelta=0 | |||||
} | |||||
} | |||||
}; | |||||
movieBox.TrackBox.MediaBox.MediaInformationBox.SampleTableBox.SampleToChunkBox = new SampleToChunkBox() { | |||||
SampleToChunkInfos=new List<SampleToChunkBox.SampleToChunkInfo>() | |||||
{ | |||||
new SampleToChunkBox.SampleToChunkInfo | |||||
{ | |||||
} | |||||
} | |||||
}; | |||||
movieBox.TrackBox.MediaBox.MediaInformationBox.SampleTableBox.SampleSizeBox = new SampleSizeBox() { | |||||
EntrySize = new List<uint>() | |||||
{ | |||||
0 | |||||
} | |||||
}; | |||||
movieBox.TrackBox.MediaBox.MediaInformationBox.SampleTableBox.ChunkOffsetBox = new ChunkOffsetBox() { | |||||
ChunkOffset=new List<uint>() | |||||
{ | |||||
0 | |||||
} | |||||
}; | |||||
movieBox.MovieExtendsBox = new MovieExtendsBox(); | |||||
movieBox.MovieExtendsBox.TrackExtendsBoxs = new List<TrackExtendsBox>(); | |||||
TrackExtendsBox trex = new TrackExtendsBox(); | |||||
trex.TrackID = 1; | |||||
trex.DefaultSampleDescriptionIndex = 1; | |||||
trex.DefaultSampleDuration = 0; | |||||
trex.DefaultSampleSize = 0; | |||||
trex.DefaultSampleFlags = 0; | |||||
movieBox.MovieExtendsBox.TrackExtendsBoxs.Add(trex); | |||||
movieBox.ToBuffer(ref writer); | |||||
var data = writer.FlushAndGetArray(); | |||||
return data; | |||||
} | |||||
finally | |||||
{ | |||||
FMp4ArrayPool.Return(buffer); | |||||
} | |||||
} | |||||
/// <summary> | |||||
/// 编码Moof盒子 | |||||
/// </summary> | |||||
/// <returns></returns> | |||||
public byte[] EncoderMoofBox(List<H264NALU> nalus, int naluLength,ulong timestamp, uint keyframeFlag,uint moofOffset=0) | |||||
{ | |||||
byte[] buffer = FMp4ArrayPool.Rent(naluLength + 4096); | |||||
FMp4MessagePackWriter writer = new FMp4MessagePackWriter(buffer); | |||||
try | |||||
{ | |||||
var movieFragmentBox = new MovieFragmentBox(); | |||||
movieFragmentBox.MovieFragmentHeaderBox = new MovieFragmentHeaderBox(); | |||||
movieFragmentBox.MovieFragmentHeaderBox.SequenceNumber = sn++; | |||||
movieFragmentBox.TrackFragmentBox = new TrackFragmentBox(); | |||||
//0x39 写文件 | |||||
//0x02 分段 | |||||
movieFragmentBox.TrackFragmentBox.TrackFragmentHeaderBox = new TrackFragmentHeaderBox(2); | |||||
movieFragmentBox.TrackFragmentBox.TrackFragmentHeaderBox.TrackID = 1; | |||||
movieFragmentBox.TrackFragmentBox.TrackFragmentHeaderBox.DefaultSampleDuration = 48000; | |||||
movieFragmentBox.TrackFragmentBox.TrackFragmentHeaderBox.DefaultSampleSize = (uint)naluLength; | |||||
movieFragmentBox.TrackFragmentBox.TrackFragmentHeaderBox.DefaultSampleFlags = 0x1010000; | |||||
movieFragmentBox.TrackFragmentBox.TrackFragmentBaseMediaDecodeTimeBox = new TrackFragmentBaseMediaDecodeTimeBox(); | |||||
//trun | |||||
//0x39 写文件 | |||||
//0x02 分段 | |||||
uint flag = 0u; | |||||
if (!first) | |||||
{ | |||||
flag = 4u; | |||||
movieFragmentBox.TrackFragmentBox.TrackFragmentBaseMediaDecodeTimeBox.BaseMediaDecodeTime = 0; | |||||
movieFragmentBox.TrackFragmentBox.TrackRunBox = new TrackRunBox(flags: flag); | |||||
first = true; | |||||
} | |||||
else | |||||
{ | |||||
flag = 0x000400; | |||||
movieFragmentBox.TrackFragmentBox.TrackFragmentBaseMediaDecodeTimeBox.BaseMediaDecodeTime = timestamp * 1000; | |||||
movieFragmentBox.TrackFragmentBox.TrackRunBox = new TrackRunBox(flags: flag); | |||||
} | |||||
movieFragmentBox.TrackFragmentBox.TrackRunBox.FirstSampleFlags = 0; | |||||
movieFragmentBox.TrackFragmentBox.TrackRunBox.TrackRunInfos = new List<TrackRunBox.TrackRunInfo>(); | |||||
movieFragmentBox.TrackFragmentBox.TrackRunBox.TrackRunInfos.Add(new TrackRunBox.TrackRunInfo()); | |||||
movieFragmentBox.TrackFragmentBox.TrackRunBox.TrackRunInfos.Add(new TrackRunBox.TrackRunInfo() | |||||
{ | |||||
SampleSize = (uint)naluLength, | |||||
//SampleCompositionTimeOffset = package.Label3.DataType == JT1078DataType.视频I帧 ? package.LastIFrameInterval : package.LastFrameInterval, | |||||
SampleFlags = flag | |||||
}); | |||||
movieFragmentBox.ToBuffer(ref writer); | |||||
var data = writer.FlushAndGetArray(); | |||||
return data; | |||||
} | |||||
finally | |||||
{ | |||||
FMp4ArrayPool.Return(buffer); | |||||
} | |||||
} | |||||
/// <summary> | |||||
/// 编码Mdat盒子 | |||||
/// </summary> | |||||
/// <returns></returns> | |||||
public byte[] EncoderMdatBox(List<H264NALU> nalus, int naluLength) | |||||
{ | |||||
byte[] buffer = FMp4ArrayPool.Rent(naluLength + 4096); | |||||
FMp4MessagePackWriter writer = new FMp4MessagePackWriter(buffer); | |||||
try | |||||
{ | |||||
var mediaDataBox = new MediaDataBox(); | |||||
mediaDataBox.Data = nalus.Select(s => s.RawData).ToList(); | |||||
mediaDataBox.ToBuffer(ref writer); | |||||
var data = writer.FlushAndGetArray(); | |||||
return data; | |||||
} | |||||
finally | |||||
{ | |||||
FMp4ArrayPool.Return(buffer); | |||||
} | |||||
} | |||||
/// <summary> | /// <summary> | ||||
/// 编码首个视频盒子 | /// 编码首个视频盒子 | ||||
/// </summary> | /// </summary> | ||||
@@ -43,7 +272,7 @@ namespace JT1078.FMp4 | |||||
public byte[] EncoderFirstVideoBox(JT1078Package package) | public byte[] EncoderFirstVideoBox(JT1078Package package) | ||||
{ | { | ||||
byte[] buffer = FMp4ArrayPool.Rent(package.Bodies.Length + 4096); | byte[] buffer = FMp4ArrayPool.Rent(package.Bodies.Length + 4096); | ||||
FMp4MessagePackWriter writer = new FMp4MessagePackWriter(new byte[10 * 1024 * 1024]); | |||||
FMp4MessagePackWriter writer = new FMp4MessagePackWriter(buffer); | |||||
try | try | ||||
{ | { | ||||
var nalus = h264Decoder.ParseNALU(package); | var nalus = h264Decoder.ParseNALU(package); | ||||
@@ -133,6 +362,10 @@ namespace JT1078.FMp4 | |||||
} | } | ||||
} | } | ||||
uint sn = 1; | |||||
bool first = false; | |||||
/// <summary> | /// <summary> | ||||
/// 编码其他视频数据盒子 | /// 编码其他视频数据盒子 | ||||
/// </summary> | /// </summary> | ||||
@@ -148,9 +381,11 @@ namespace JT1078.FMp4 | |||||
var nalus = h264Decoder.ParseNALU(package); | var nalus = h264Decoder.ParseNALU(package); | ||||
var movieFragmentBox = new MovieFragmentBox(); | var movieFragmentBox = new MovieFragmentBox(); | ||||
movieFragmentBox.MovieFragmentHeaderBox = new MovieFragmentHeaderBox(); | movieFragmentBox.MovieFragmentHeaderBox = new MovieFragmentHeaderBox(); | ||||
movieFragmentBox.MovieFragmentHeaderBox.SequenceNumber = package.SN; | |||||
movieFragmentBox.MovieFragmentHeaderBox.SequenceNumber = sn++; | |||||
movieFragmentBox.TrackFragmentBox = new TrackFragmentBox(); | movieFragmentBox.TrackFragmentBox = new TrackFragmentBox(); | ||||
movieFragmentBox.TrackFragmentBox.TrackFragmentHeaderBox = new TrackFragmentHeaderBox(0x39); | |||||
//0x39 写文件 | |||||
//0x02 分段 | |||||
movieFragmentBox.TrackFragmentBox.TrackFragmentHeaderBox = new TrackFragmentHeaderBox(2); | |||||
movieFragmentBox.TrackFragmentBox.TrackFragmentHeaderBox.TrackID = 1; | movieFragmentBox.TrackFragmentBox.TrackFragmentHeaderBox.TrackID = 1; | ||||
movieFragmentBox.TrackFragmentBox.TrackFragmentHeaderBox.BaseDataOffset = moofOffset; | movieFragmentBox.TrackFragmentBox.TrackFragmentHeaderBox.BaseDataOffset = moofOffset; | ||||
movieFragmentBox.TrackFragmentBox.TrackFragmentHeaderBox.DefaultSampleDuration = 48000; | movieFragmentBox.TrackFragmentBox.TrackFragmentHeaderBox.DefaultSampleDuration = 48000; | ||||
@@ -159,10 +394,22 @@ namespace JT1078.FMp4 | |||||
movieFragmentBox.TrackFragmentBox.TrackFragmentBaseMediaDecodeTimeBox = new TrackFragmentBaseMediaDecodeTimeBox(); | movieFragmentBox.TrackFragmentBox.TrackFragmentBaseMediaDecodeTimeBox = new TrackFragmentBaseMediaDecodeTimeBox(); | ||||
movieFragmentBox.TrackFragmentBox.TrackFragmentBaseMediaDecodeTimeBox.BaseMediaDecodeTime = package.Timestamp * 1000; | movieFragmentBox.TrackFragmentBox.TrackFragmentBaseMediaDecodeTimeBox.BaseMediaDecodeTime = package.Timestamp * 1000; | ||||
//trun | //trun | ||||
movieFragmentBox.TrackFragmentBox.TrackRunBox = new TrackRunBox(flags: 0x5); | |||||
//0x39 写文件 | |||||
//0x02 分段 | |||||
uint flag = package.Label3.DataType == JT1078DataType.视频I帧 ? 1u : 0u; | |||||
movieFragmentBox.TrackFragmentBox.TrackRunBox = new TrackRunBox(flags: 0x000400); | |||||
movieFragmentBox.TrackFragmentBox.TrackRunBox.FirstSampleFlags = 0; | movieFragmentBox.TrackFragmentBox.TrackRunBox.FirstSampleFlags = 0; | ||||
movieFragmentBox.TrackFragmentBox.TrackRunBox.TrackRunInfos = new List<TrackRunBox.TrackRunInfo>(); | movieFragmentBox.TrackFragmentBox.TrackRunBox.TrackRunInfos = new List<TrackRunBox.TrackRunInfo>(); | ||||
movieFragmentBox.TrackFragmentBox.TrackRunBox.TrackRunInfos.Add(new TrackRunBox.TrackRunInfo()); | |||||
//movieFragmentBox.TrackFragmentBox.TrackRunBox.TrackRunInfos.Add(new TrackRunBox.TrackRunInfo()); | |||||
foreach (var nalu in nalus) | |||||
{ | |||||
movieFragmentBox.TrackFragmentBox.TrackRunBox.TrackRunInfos.Add(new TrackRunBox.TrackRunInfo() | |||||
{ | |||||
SampleSize = (uint)nalu.RawData.Length, | |||||
SampleCompositionTimeOffset = package.Label3.DataType == JT1078DataType.视频I帧 ? package.LastIFrameInterval : package.LastFrameInterval, | |||||
SampleFlags = flag | |||||
}); | |||||
} | |||||
movieFragmentBox.ToBuffer(ref writer); | movieFragmentBox.ToBuffer(ref writer); | ||||
var mediaDataBox = new MediaDataBox(); | var mediaDataBox = new MediaDataBox(); | ||||
mediaDataBox.Data = nalus.Select(s => s.RawData).ToList(); | mediaDataBox.Data = nalus.Select(s => s.RawData).ToList(); | ||||
@@ -1219,6 +1219,7 @@ | |||||
moof n | moof n | ||||
mdat n | mdat n | ||||
mfra | mfra | ||||
ref: https://www.w3.org/TR/mse-byte-stream-format-isobmff/#movie-fragment-relative-addressing | |||||
</summary> | </summary> | ||||
</member> | </member> | ||||
<member name="M:JT1078.FMp4.FMp4Encoder.#ctor"> | <member name="M:JT1078.FMp4.FMp4Encoder.#ctor"> | ||||
@@ -1226,6 +1227,30 @@ | |||||
</summary> | </summary> | ||||
</member> | </member> | ||||
<member name="M:JT1078.FMp4.FMp4Encoder.EncoderFtypBox"> | |||||
<summary> | |||||
编码ftyp盒子 | |||||
</summary> | |||||
<returns></returns> | |||||
</member> | |||||
<member name="M:JT1078.FMp4.FMp4Encoder.EncoderMoovBox(System.Collections.Generic.List{JT1078.Protocol.H264.H264NALU},System.Int32)"> | |||||
<summary> | |||||
编码moov盒子 | |||||
</summary> | |||||
<returns></returns> | |||||
</member> | |||||
<member name="M:JT1078.FMp4.FMp4Encoder.EncoderMoofBox(System.Collections.Generic.List{JT1078.Protocol.H264.H264NALU},System.Int32,System.UInt64,System.UInt32,System.UInt32)"> | |||||
<summary> | |||||
编码Moof盒子 | |||||
</summary> | |||||
<returns></returns> | |||||
</member> | |||||
<member name="M:JT1078.FMp4.FMp4Encoder.EncoderMdatBox(System.Collections.Generic.List{JT1078.Protocol.H264.H264NALU},System.Int32)"> | |||||
<summary> | |||||
编码Mdat盒子 | |||||
</summary> | |||||
<returns></returns> | |||||
</member> | |||||
<member name="M:JT1078.FMp4.FMp4Encoder.EncoderFirstVideoBox(JT1078.Protocol.JT1078Package)"> | <member name="M:JT1078.FMp4.FMp4Encoder.EncoderFirstVideoBox(JT1078.Protocol.JT1078Package)"> | ||||
<summary> | <summary> | ||||
编码首个视频盒子 | 编码首个视频盒子 | ||||
@@ -0,0 +1,48 @@ | |||||
using JT1078.SignalR.Test.Services; | |||||
using Microsoft.AspNetCore.SignalR; | |||||
using Microsoft.Extensions.Logging; | |||||
using System; | |||||
using System.Collections.Generic; | |||||
using System.Linq; | |||||
using System.Security.Claims; | |||||
using System.Text.Json; | |||||
using System.Threading.Tasks; | |||||
namespace JT1078.SignalR.Test.Hubs | |||||
{ | |||||
public class FMp4Hub : Hub | |||||
{ | |||||
private readonly ILogger logger; | |||||
private readonly WsSession wsSession; | |||||
public FMp4Hub( | |||||
WsSession wsSession, | |||||
ILoggerFactory loggerFactory) | |||||
{ | |||||
this.wsSession = wsSession; | |||||
logger = loggerFactory.CreateLogger<FMp4Hub>(); | |||||
} | |||||
public override Task OnConnectedAsync() | |||||
{ | |||||
if (logger.IsEnabled(LogLevel.Debug)) | |||||
{ | |||||
logger.LogDebug($"链接上:{Context.ConnectionId}"); | |||||
} | |||||
wsSession.TryAdd(Context.ConnectionId); | |||||
return base.OnConnectedAsync(); | |||||
} | |||||
public override Task OnDisconnectedAsync(Exception exception) | |||||
{ | |||||
if (logger.IsEnabled(LogLevel.Debug)) | |||||
{ | |||||
logger.LogDebug($"断开链接:{Context.ConnectionId}"); | |||||
} | |||||
wsSession.TryRemove(Context.ConnectionId); | |||||
return base.OnDisconnectedAsync(exception); | |||||
} | |||||
} | |||||
} |
@@ -0,0 +1,24 @@ | |||||
<Project Sdk="Microsoft.NET.Sdk.Web"> | |||||
<PropertyGroup> | |||||
<TargetFramework>net5.0</TargetFramework> | |||||
</PropertyGroup> | |||||
<ItemGroup> | |||||
<ProjectReference Include="..\JT1078.FMp4\JT1078.FMp4.csproj" /> | |||||
</ItemGroup> | |||||
<ItemGroup> | |||||
<None Include="..\..\doc\video\jt1078_3.txt" Link="H264\jt1078_3.txt"> | |||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory> | |||||
</None> | |||||
<None Include="..\..\doc\video\jt1078_1.txt" Link="H264\jt1078_1.txt"> | |||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory> | |||||
</None> | |||||
<None Include="..\..\doc\video\jt1078_2.txt" Link="H264\jt1078_2.txt"> | |||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory> | |||||
</None> | |||||
<None Include="..\..\doc\video\jt1078_1_fragmented.mp4" Link="H264\jt1078_1_fragmented.mp4"> | |||||
<CopyToOutputDirectory>Always</CopyToOutputDirectory> | |||||
</None> | |||||
</ItemGroup> | |||||
</Project> |
@@ -0,0 +1,26 @@ | |||||
using Microsoft.AspNetCore.Hosting; | |||||
using Microsoft.Extensions.Configuration; | |||||
using Microsoft.Extensions.Hosting; | |||||
using Microsoft.Extensions.Logging; | |||||
using System; | |||||
using System.Collections.Generic; | |||||
using System.Linq; | |||||
using System.Threading.Tasks; | |||||
namespace JT1078.SignalR.Test | |||||
{ | |||||
public class Program | |||||
{ | |||||
public static void Main(string[] args) | |||||
{ | |||||
CreateHostBuilder(args).Build().Run(); | |||||
} | |||||
public static IHostBuilder CreateHostBuilder(string[] args) => | |||||
Host.CreateDefaultBuilder(args) | |||||
.ConfigureWebHostDefaults(webBuilder => | |||||
{ | |||||
webBuilder.UseStartup<Startup>(); | |||||
}); | |||||
} | |||||
} |
@@ -0,0 +1,108 @@ | |||||
using Microsoft.AspNetCore.SignalR; | |||||
using Microsoft.Extensions.Hosting; | |||||
using Microsoft.Extensions.Logging; | |||||
using System; | |||||
using System.Text; | |||||
using System.Text.Json; | |||||
using System.Threading; | |||||
using System.Threading.Tasks; | |||||
using Microsoft.Extensions.Options; | |||||
using System.Collections.Generic; | |||||
using System.Linq; | |||||
using Microsoft.AspNetCore.Authorization; | |||||
using Microsoft.AspNetCore.Http; | |||||
using JT1078.SignalR.Test.Hubs; | |||||
using JT1078.FMp4; | |||||
using JT1078.Protocol; | |||||
using System.IO; | |||||
using JT1078.Protocol.Extensions; | |||||
using JT1078.Protocol.H264; | |||||
namespace JT1078.SignalR.Test.Services | |||||
{ | |||||
public class ToWebSocketService: BackgroundService | |||||
{ | |||||
private readonly ILogger<ToWebSocketService> logger; | |||||
private readonly IHubContext<FMp4Hub> _hubContext; | |||||
private readonly FMp4Encoder fMp4Encoder; | |||||
private readonly WsSession wsSession; | |||||
private readonly H264Decoder h264Decoder; | |||||
public ToWebSocketService( | |||||
H264Decoder h264Decoder, | |||||
WsSession wsSession, | |||||
FMp4Encoder fMp4Encoder, | |||||
ILoggerFactory loggerFactory, | |||||
IHubContext<FMp4Hub> hubContext) | |||||
{ | |||||
this.h264Decoder = h264Decoder; | |||||
logger = loggerFactory.CreateLogger<ToWebSocketService>(); | |||||
this.fMp4Encoder = fMp4Encoder; | |||||
_hubContext = hubContext; | |||||
this.wsSession = wsSession; | |||||
} | |||||
public Queue<byte[]> q = new Queue<byte[]>(); | |||||
public void a() | |||||
{ | |||||
List<JT1078Package> packages = new List<JT1078Package>(); | |||||
var lines = File.ReadAllLines(Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "H264", "jt1078_3.txt")); | |||||
int mergeBodyLength = 0; | |||||
foreach (var line in lines) | |||||
{ | |||||
var data = line.Split(','); | |||||
var bytes = data[6].ToHexBytes(); | |||||
JT1078Package package = JT1078Serializer.Deserialize(bytes); | |||||
mergeBodyLength += package.DataBodyLength; | |||||
var packageMerge = JT1078Serializer.Merge(package); | |||||
if (packageMerge != null) | |||||
{ | |||||
packages.Add(packageMerge); | |||||
} | |||||
} | |||||
var ftyp = fMp4Encoder.EncoderFtypBox(); | |||||
q.Enqueue(ftyp); | |||||
var package1 = packages[0]; | |||||
var nalus1 = h264Decoder.ParseNALU(package1); | |||||
var moov = fMp4Encoder.EncoderMoovBox(nalus1, package1.Bodies.Length); | |||||
q.Enqueue(moov); | |||||
var flag = package1.Label3.DataType == Protocol.Enums.JT1078DataType.视频I帧 ? 1u : 0u; | |||||
var moofBuffer = fMp4Encoder.EncoderMoofBox(nalus1, package1.Bodies.Length, package1.Timestamp, flag); | |||||
q.Enqueue(moofBuffer); | |||||
foreach (var package in packages) | |||||
{ | |||||
var otherNalus = h264Decoder.ParseNALU(package); | |||||
var otherMdatBuffer = fMp4Encoder.EncoderMdatBox(otherNalus, package.Bodies.Length); | |||||
q.Enqueue(otherMdatBuffer); | |||||
} | |||||
} | |||||
protected async override Task ExecuteAsync(CancellationToken stoppingToken) | |||||
{ | |||||
a(); | |||||
while (!stoppingToken.IsCancellationRequested) | |||||
{ | |||||
try | |||||
{ | |||||
if (wsSession.GetCount() > 0) | |||||
{ | |||||
if (q.Count > 0) | |||||
{ | |||||
await _hubContext.Clients.All.SendAsync("video", q.Dequeue(), stoppingToken); | |||||
} | |||||
} | |||||
} | |||||
catch (Exception ex) | |||||
{ | |||||
logger.LogError(ex,""); | |||||
} | |||||
await Task.Delay(1000); | |||||
} | |||||
} | |||||
} | |||||
} |
@@ -0,0 +1,33 @@ | |||||
using System; | |||||
using System.Collections.Concurrent; | |||||
using System.Collections.Generic; | |||||
using System.Linq; | |||||
using System.Threading.Tasks; | |||||
namespace JT1078.SignalR.Test.Services | |||||
{ | |||||
public class WsSession | |||||
{ | |||||
private ConcurrentDictionary<string, string> sessions; | |||||
public WsSession() | |||||
{ | |||||
sessions = new ConcurrentDictionary<string, string>(); | |||||
} | |||||
public void TryAdd(string connectionId) | |||||
{ | |||||
sessions.TryAdd(connectionId, connectionId); | |||||
} | |||||
public int GetCount() | |||||
{ | |||||
return sessions.Count; | |||||
} | |||||
public void TryRemove(string connectionId) | |||||
{ | |||||
sessions.TryRemove(connectionId,out _); | |||||
} | |||||
} | |||||
} |
@@ -0,0 +1,66 @@ | |||||
using JT1078.FMp4; | |||||
using JT1078.Protocol.H264; | |||||
using JT1078.SignalR.Test.Hubs; | |||||
using JT1078.SignalR.Test.Services; | |||||
using Microsoft.AspNetCore.Builder; | |||||
using Microsoft.AspNetCore.Hosting; | |||||
using Microsoft.AspNetCore.Mvc; | |||||
using Microsoft.Extensions.Configuration; | |||||
using Microsoft.Extensions.DependencyInjection; | |||||
using Microsoft.Extensions.Hosting; | |||||
using Microsoft.Extensions.Logging; | |||||
using System; | |||||
using System.Collections.Generic; | |||||
using System.Linq; | |||||
using System.Threading.Tasks; | |||||
namespace JT1078.SignalR.Test | |||||
{ | |||||
public class Startup | |||||
{ | |||||
public Startup(IConfiguration configuration) | |||||
{ | |||||
Configuration = configuration; | |||||
} | |||||
public IConfiguration Configuration { get; } | |||||
// This method gets called by the runtime. Use this method to add services to the container. | |||||
public void ConfigureServices(IServiceCollection services) | |||||
{ | |||||
services.AddControllers(); | |||||
services.AddSignalR(); | |||||
services.AddSingleton<FMp4Encoder>(); | |||||
services.AddSingleton<H264Decoder>(); | |||||
services.AddSingleton<WsSession>(); | |||||
services.AddHostedService<ToWebSocketService>(); | |||||
services.AddCors(options => options.AddPolicy("CorsPolicy", builder => | |||||
{ | |||||
builder.AllowAnyMethod() | |||||
.AllowAnyHeader() | |||||
.AllowCredentials() | |||||
.SetIsOriginAllowed(o => true); | |||||
})); | |||||
} | |||||
// This method gets called by the runtime. Use this method to configure the HTTP request pipeline. | |||||
public void Configure(IApplicationBuilder app, IWebHostEnvironment env) | |||||
{ | |||||
if (env.IsDevelopment()) | |||||
{ | |||||
app.UseDeveloperExceptionPage(); | |||||
} | |||||
app.UseRouting(); | |||||
app.UseCors("CorsPolicy"); | |||||
app.UseAuthorization(); | |||||
app.UseEndpoints(endpoints => | |||||
{ | |||||
endpoints.MapHub<FMp4Hub>("/FMp4Hub"); | |||||
}); | |||||
app.UseEndpoints(endpoints => | |||||
{ | |||||
endpoints.MapControllers(); | |||||
}); | |||||
} | |||||
} | |||||
} |
@@ -0,0 +1,9 @@ | |||||
{ | |||||
"Logging": { | |||||
"LogLevel": { | |||||
"Default": "Information", | |||||
"Microsoft": "Warning", | |||||
"Microsoft.Hosting.Lifetime": "Information" | |||||
} | |||||
} | |||||
} |
@@ -0,0 +1,10 @@ | |||||
{ | |||||
"Logging": { | |||||
"LogLevel": { | |||||
"Default": "Information", | |||||
"Microsoft": "Warning", | |||||
"Microsoft.Hosting.Lifetime": "Information" | |||||
} | |||||
}, | |||||
"AllowedHosts": "*" | |||||
} |
@@ -40,6 +40,8 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Benchmarks", "Benchmarks", | |||||
EndProject | EndProject | ||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JT1078.AV.Benchmark", "JT1078.AV.Benchmark\JT1078.AV.Benchmark.csproj", "{93D6C094-5A3A-4DFA-B52B-605FDFFB6094}" | Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JT1078.AV.Benchmark", "JT1078.AV.Benchmark\JT1078.AV.Benchmark.csproj", "{93D6C094-5A3A-4DFA-B52B-605FDFFB6094}" | ||||
EndProject | EndProject | ||||
Project("{9A19103F-16F7-4668-BE54-9A1E7A4F7556}") = "JT1078.SignalR.Test", "JT1078.SignalR.Test\JT1078.SignalR.Test.csproj", "{6A063AF3-611F-4A1C-ACCF-BF903B7C7014}" | |||||
EndProject | |||||
Global | Global | ||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution | GlobalSection(SolutionConfigurationPlatforms) = preSolution | ||||
Debug|Any CPU = Debug|Any CPU | Debug|Any CPU = Debug|Any CPU | ||||
@@ -218,6 +220,18 @@ Global | |||||
{93D6C094-5A3A-4DFA-B52B-605FDFFB6094}.Release|x64.Build.0 = Release|Any CPU | {93D6C094-5A3A-4DFA-B52B-605FDFFB6094}.Release|x64.Build.0 = Release|Any CPU | ||||
{93D6C094-5A3A-4DFA-B52B-605FDFFB6094}.Release|x86.ActiveCfg = Release|Any CPU | {93D6C094-5A3A-4DFA-B52B-605FDFFB6094}.Release|x86.ActiveCfg = Release|Any CPU | ||||
{93D6C094-5A3A-4DFA-B52B-605FDFFB6094}.Release|x86.Build.0 = Release|Any CPU | {93D6C094-5A3A-4DFA-B52B-605FDFFB6094}.Release|x86.Build.0 = Release|Any CPU | ||||
{6A063AF3-611F-4A1C-ACCF-BF903B7C7014}.Debug|Any CPU.ActiveCfg = Debug|Any CPU | |||||
{6A063AF3-611F-4A1C-ACCF-BF903B7C7014}.Debug|Any CPU.Build.0 = Debug|Any CPU | |||||
{6A063AF3-611F-4A1C-ACCF-BF903B7C7014}.Debug|x64.ActiveCfg = Debug|Any CPU | |||||
{6A063AF3-611F-4A1C-ACCF-BF903B7C7014}.Debug|x64.Build.0 = Debug|Any CPU | |||||
{6A063AF3-611F-4A1C-ACCF-BF903B7C7014}.Debug|x86.ActiveCfg = Debug|Any CPU | |||||
{6A063AF3-611F-4A1C-ACCF-BF903B7C7014}.Debug|x86.Build.0 = Debug|Any CPU | |||||
{6A063AF3-611F-4A1C-ACCF-BF903B7C7014}.Release|Any CPU.ActiveCfg = Release|Any CPU | |||||
{6A063AF3-611F-4A1C-ACCF-BF903B7C7014}.Release|Any CPU.Build.0 = Release|Any CPU | |||||
{6A063AF3-611F-4A1C-ACCF-BF903B7C7014}.Release|x64.ActiveCfg = Release|Any CPU | |||||
{6A063AF3-611F-4A1C-ACCF-BF903B7C7014}.Release|x64.Build.0 = Release|Any CPU | |||||
{6A063AF3-611F-4A1C-ACCF-BF903B7C7014}.Release|x86.ActiveCfg = Release|Any CPU | |||||
{6A063AF3-611F-4A1C-ACCF-BF903B7C7014}.Release|x86.Build.0 = Release|Any CPU | |||||
EndGlobalSection | EndGlobalSection | ||||
GlobalSection(SolutionProperties) = preSolution | GlobalSection(SolutionProperties) = preSolution | ||||
HideSolutionNode = FALSE | HideSolutionNode = FALSE | ||||
@@ -231,6 +245,7 @@ Global | |||||
{5564C20B-BFF4-4A2A-BDF2-C7427E93E993} = {0655AF84-E578-409F-AB0E-B47E0D2F6814} | {5564C20B-BFF4-4A2A-BDF2-C7427E93E993} = {0655AF84-E578-409F-AB0E-B47E0D2F6814} | ||||
{56E76D56-4CCC-401F-B25D-9AB41D58A10A} = {0655AF84-E578-409F-AB0E-B47E0D2F6814} | {56E76D56-4CCC-401F-B25D-9AB41D58A10A} = {0655AF84-E578-409F-AB0E-B47E0D2F6814} | ||||
{93D6C094-5A3A-4DFA-B52B-605FDFFB6094} = {807ADB1F-FED4-4A56-82D2-F08F1FB7C886} | {93D6C094-5A3A-4DFA-B52B-605FDFFB6094} = {807ADB1F-FED4-4A56-82D2-F08F1FB7C886} | ||||
{6A063AF3-611F-4A1C-ACCF-BF903B7C7014} = {0655AF84-E578-409F-AB0E-B47E0D2F6814} | |||||
EndGlobalSection | EndGlobalSection | ||||
GlobalSection(ExtensibilityGlobals) = postSolution | GlobalSection(ExtensibilityGlobals) = postSolution | ||||
SolutionGuid = {FAE1656D-226F-4B4B-8C33-615D7E632B26} | SolutionGuid = {FAE1656D-226F-4B4B-8C33-615D7E632B26} | ||||