可用于rtsp流检测,独立视频解码,音频解码
namespace RtspClient_Decode { public partial class MainFrom : Form { // 视频 Dispatcher _dispatcher = Dispatcher.CurrentDispatcher; Bitmap _videoBitmap; TransformParameters _transformParameters; Dictionary<FFmpegVideoCodecId, FFmpegVideoDecoder> _videoDecodersMap = new Dictionary<FFmpegVideoCodecId, FFmpegVideoDecoder>(); // 音频 BufferedWaveProvider _audioOut; WaveOut _waveOut; Dictionary<FFmpegAudioCodecId, FFmpegAudioDecoder> _audioDecodersMap = new Dictionary<FFmpegAudioCodecId, FFmpegAudioDecoder>(); // 连接 CancellationTokenSource _cancellationTokenSource; int _msgLine = 1; bool _checkVideo; bool _checkAudio; public MainFrom() { InitializeComponent(); cbxProtocol.SelectedIndex = 0; } void btnControl_Click(object sender, EventArgs e) { _checkVideo = chbVideo.Checked; _checkAudio = chbAudio.Checked; switch (btnControl.Text) { case "播放": btnControl.Text = "停止"; Connect(); break; case "停止": _cancellationTokenSource.Cancel(); //_connectTask.Wait(CancellationToken.None); // btnControl.Text = "播放"; break; } } void Connect() { if (_checkVideo) { _videoBitmap = new Bitmap(video.Width, video.Height); _transformParameters = _videoBitmap.GetTransformParameters(); } var serverUri = new Uri(txtAddress.Text); var credentials = new NetworkCredential(txtUsername.Text, txtPassword.Text); var connectionParameters = new ConnectionParameters(serverUri, credentials); connectionParameters.RtpTransport = (RtpTransportProtocol)(cbxProtocol.SelectedIndex); _cancellationTokenSource = new CancellationTokenSource(); var _connectTask = ConnectAsync(connectionParameters, _cancellationTokenSource.Token); } async Task ConnectAsync(ConnectionParameters connectionParameters, CancellationToken token) { try { TimeSpan delay = TimeSpan.FromSeconds(5); using (var rtspClient = new RtspClient(connectionParameters)) { rtspClient.FrameReceived += RtspClient_FrameReceived; while (true) { UpdateMessage("[Info] Connecting..."); try { await rtspClient.ConnectAsync(token); } catch (OperationCanceledException e) { UpdateMessage("[Error] ConnectAsync,Canceled1:" + e.ToString()); return; } catch (RtspClientException e) { UpdateMessage("[Error] ConnectAsync,Errmsg:" + e.ToString()); await Task.Delay(delay, token); continue; } UpdateMessage("[Info] Connected."); try { await rtspClient.ReceiveAsync(token); } catch (OperationCanceledException e) { UpdateMessage("[Error] ReceiveAsync,Canceled:" + e.ToString()); return; } catch (RtspClientException e) { UpdateMessage("[Error] ReceiveAsync,Errmsg:" + e.ToString()); await Task.Delay(delay, token); } } } } catch (OperationCanceledException e) { UpdateMessage("[Error] ConnectAsync Task,Canceled:" + e.ToString()); } } void RtspClient_FrameReceived(object sender, RtspClientSharp.RawFrames.RawFrame rawFrame) { //UpdateMessage($"[Info] New frame {rawFrame.Timestamp}: {rawFrame.GetType().Name}"); switch (rawFrame.Type) { case FrameType.Video: { // 视频解码 if (!_checkVideo) return; if (!(rawFrame is RawVideoFrame rawVideoFrame)) return; FFmpegVideoDecoder decoder = GetVideoDecoderForFrame(rawVideoFrame); IDecodedVideoFrame decodedFrame = decoder.TryDecode(rawVideoFrame); _dispatcher.Invoke(() => { _videoBitmap.UpdateBitmap(decodedFrame, _transformParameters); video.Image = _videoBitmap; }, DispatcherPriority.Send); } break; case FrameType.Audio: { // 音频解码 G711A if (!_checkAudio) return; if (!(rawFrame is RawAudioFrame rawAudioFrame)) return; FFmpegAudioDecoder decoder = GetAudioDecoderForFrame(rawAudioFrame); if (!decoder.TryDecode(rawAudioFrame)) return; IDecodedAudioFrame decodedFrame = decoder.GetDecodedFrame(new AudioConversionParameters() { OutBitsPerSample = 16 }); if (_audioOut == null) { _audioOut = new BufferedWaveProvider(new WaveFormat(decodedFrame.Format.SampleRate, decodedFrame.Format.BitPerSample, decodedFrame.Format.Channels)); _audioOut.BufferLength = 2560 * 16; _audioOut.DiscardOnBufferOverflow = true; _waveOut = new WaveOut(); _waveOut.Init(_audioOut); _waveOut.Volume = 1.0f; } _audioOut.AddSamples(decodedFrame.DecodedBytes.Array, decodedFrame.DecodedBytes.Offset, decodedFrame.DecodedBytes.Count); if (_waveOut.PlaybackState != PlaybackState.Playing) { _waveOut.Play(); } } break; } } FFmpegAudioDecoder GetAudioDecoderForFrame(RawAudioFrame audioFrame) { FFmpegAudioCodecId codecId = DetectAudioCodecId(audioFrame); if (!_audioDecodersMap.TryGetValue(codecId, out FFmpegAudioDecoder decoder)) { int bitsPerCodedSample = 0; if (audioFrame is RawG726Frame g726Frame) bitsPerCodedSample = g726Frame.BitsPerCodedSample; decoder = FFmpegAudioDecoder.CreateDecoder(codecId, bitsPerCodedSample); _audioDecodersMap.Add(codecId, decoder); } return decoder; } FFmpegAudioCodecId DetectAudioCodecId(RawAudioFrame audioFrame) { if (audioFrame is RawAACFrame) return FFmpegAudioCodecId.AAC; if (audioFrame is RawG711AFrame) return FFmpegAudioCodecId.G711A; if (audioFrame is RawG711UFrame) return FFmpegAudioCodecId.G711U; if (audioFrame is RawG726Frame) return FFmpegAudioCodecId.G726; throw new ArgumentOutOfRangeException(nameof(audioFrame)); } FFmpegVideoDecoder GetVideoDecoderForFrame(RawVideoFrame videoFrame) { FFmpegVideoCodecId codecId = DetectVideoCodecId(videoFrame); if (!_videoDecodersMap.TryGetValue(codecId, out FFmpegVideoDecoder decoder)) { decoder = FFmpegVideoDecoder.CreateDecoder(codecId); _videoDecodersMap.Add(codecId, decoder); } return decoder; } FFmpegVideoCodecId DetectVideoCodecId(RawVideoFrame videoFrame) { if (videoFrame is RawJpegFrame) return FFmpegVideoCodecId.MJPEG; if (videoFrame is RawH264Frame) return FFmpegVideoCodecId.H264; throw new ArgumentOutOfRangeException(nameof(videoFrame)); } void UpdateMessage(string msg) { this.BeginInvoke((EventHandler)(delegate { msg = DateTime.Now.ToString("yyyy-MM-dd HH:mm:ss") + msg; if (_msgLine ++ > 30) { rtbMsg.Clear(); } rtbMsg.AppendText(msg + "\n"); Console.WriteLine(msg); })); } } }
标签:return,RtspClient,解码,decodedFrame,音视频,audioFrame,UpdateMessage,decoder,new From: https://www.cnblogs.com/chen1880/p/16917788.html