代码及工程见https://download.csdn.net/download/daqinzl/88156926
开发工具:visual studio 2019
播放,可采用ffmpeg工具集里的ffplay.exe, 执行命令 ffplay udp://238.1.1.10:6016
也可以参考(C#开发FFMPEG例子(API方式) FFmpeg拉取udp组播流并播放) https://blog.csdn.net/daqinzl/article/details/132112075
网上用C/C++调用FFmpeg的API例子很多,
c#使用ffmpeg.autogen的方式很简单,直接复制C/C++调用FFmpeg的API的代码到C#中,然后在FFmpeg的方法前加上ffmpeg.即可。
C/C++调用FFmpeg的API推送udp组播流的例子可以参考:https://blog.csdn.net/daqinzl/article/details/132080204
主要参考文档(C#开发FFMPEG例子(API方式) FFmpeg拉取RTMP流并播放):https://blog.csdn.net/vanjoge/article/details/79657874
参考文档实现了拉取rtmp流并播放,本文在参考文档提供的源码的基础上,结合C/C++调用FFmpeg的API的例子,做了一些修改,用C#使用ffmpeg.autogen实现推送udp组播流。
主要代码如下:
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using System.Windows.Forms;
using FFmpeg.AutoGen;
namespace FFmpegDemo
{
static unsafe class Program
{
/// <summary>
/// 应用程序的主入口点。
/// </summary>
[STAThread]
static void Main()
{
//Application.EnableVisualStyles();
//Application.SetCompatibleTextRenderingDefault(false);
//Application.Run(new frmPlayer());
//FFmpegDLL目录查找和设置
FFmpegBinariesHelper.RegisterFFmpegBinaries();
ffmpeg.av_register_all();
ffmpeg.avdevice_register_all();
ffmpeg.avcodec_register_all();
ffmpeg.avformat_network_init();
AVFormatContext* m_fmt_ctx = null;
AVInputFormat* m_input_fmt = null;
int video_stream = -1;
//ffmpeg.avcodec_register_all();
string deviceName = "desktop";
string inputformat = "gdigrab";
int FPS = 23; //15
m_fmt_ctx = ffmpeg.avformat_alloc_context();
m_input_fmt = ffmpeg.av_find_input_format(inputformat);
AVDictionary* deoptions = null;
ffmpeg.av_dict_set_int(&deoptions, "framerate", FPS, ffmpeg.AV_DICT_MATCH_CASE);
ffmpeg.av_dict_set_int(&deoptions, "rtbufsize", 3041280 * 100 * 5, 0);
//如果不设置的话,在输入源是直播流的时候,会花屏。单位bytes
//av_dict_set(&deoptions, "buffer_size", "10485760", 0);
//av_dict_set(&deoptions, "reuse", "1", 0);
int ret = ffmpeg.avformat_open_input(&m_fmt_ctx, deviceName, m_input_fmt, &deoptions);
if (ret != 0)
{
return;
}
ffmpeg.av_dict_free(&deoptions);
ret = ffmpeg.avformat_find_stream_info(m_fmt_ctx, null);
if (ret < 0)
{
return;
}
ffmpeg.av_dump_format(m_fmt_ctx, 0, deviceName, 0);
video_stream = ffmpeg.av_find_best_stream(m_fmt_ctx, 0, -1, -1, null, 0); //AVMEDIA_TYPE_VIDEO
if (video_stream < 0)
{
return;
}
AVCodecContext* _codec_ctx = m_fmt_ctx->streams[video_stream]->codec;
AVCodec* _codec = ffmpeg.avcodec_find_decoder(_codec_ctx->codec_id);
if (_codec == null)
{
return;
}
ret = ffmpeg.avcodec_open2(_codec_ctx, _codec, null);
if (ret != 0)
{
return;
}
int width = m_fmt_ctx->streams[video_stream]->codec->width;
int height = m_fmt_ctx->streams[video_stream]->codec->height;
int fps = m_fmt_ctx->streams[video_stream]->codec->framerate.num > 0 ? m_fmt_ctx->streams[video_stream]->codec->framerate.num : 25;
AVPixelFormat videoType = m_fmt_ctx->streams[video_stream]->codec->pix_fmt;
//std::cout << "avstream timebase : " << m_fmt_ctx->streams[video_stream]->time_base.num << " / " << m_fmt_ctx->streams[video_stream]->time_base.den << endl;
Console.WriteLine("avstream timebase : " + m_fmt_ctx->streams[video_stream]->time_base.num + " / " + m_fmt_ctx->streams[video_stream]->time_base.den);
AVDictionary* enoptions = null;
//av_dict_set(&enoptions, "preset", "superfast", 0);
//av_dict_set(&enoptions, "tune", "zerolatency", 0);
ffmpeg.av_dict_set(&enoptions, "preset", "ultrafast", 0);
ffmpeg.av_dict_set(&enoptions, "tune", "zerolatency", 0);
//TODO
//av_dict_set(&enoptions, "pkt_size", "1316", 0); //Maximum UDP packet size
av_dict_set(&dic, "fifo_size", "18800", 0);
av_dict_set(&enoptions, "buffer_size", "0", 1);
av_dict_set(&dic, "bitrate", "11000000", 0);
av_dict_set(&dic, "buffer_size", "1000000", 0);//1316
//av_dict_set(&enoptions, "reuse", "1", 0);
AVCodec* codec = ffmpeg.avcodec_find_encoder(AVCodecID.AV_CODEC_ID_H264);
if (codec == null)
{
Console.WriteLine( "avcodec_find_encoder failed!" );
return;
}
AVCodecContext* vc = ffmpeg.avcodec_alloc_context3(codec);
if (vc == null)
{
Console.WriteLine("avcodec_alloc_context3 failed!" );
return;
}
Console.WriteLine("avcodec_alloc_context3 success!" );// FFmpeg.AutoGen.
vc->flags |= (1 << 22); //AV_CODEC_FLAG_GLOBAL_HEADER
vc->codec_id = AVCodecID.AV_CODEC_ID_H264;
vc->codec_type = FFmpeg.AutoGen.AVMediaType.AVMEDIA_TYPE_VIDEO;
vc->pix_fmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
vc->width = width;
vc->height = height;
vc->time_base.num = 1;
vc->time_base.den = FPS;
//vc->framerate = { FPS,1 };
//TODO
vc->framerate.num = 1;
vc->framerate.den = FPS;
vc->bit_rate = 10241000;
vc->gop_size = 120;
vc->qmin = 10;
vc->qmax = 51;
vc->max_b_frames = 0;
vc->profile = ffmpeg.FF_PROFILE_H264_MAIN;
ret = ffmpeg.avcodec_open2(vc, codec, &enoptions);
if (ret != 0)
{
return;
}
Console.WriteLine( "avcodec_open2 success!" );
ffmpeg.av_dict_free(&enoptions);
SwsContext* vsc = null;
vsc = ffmpeg.sws_getCachedContext(vsc,
width, height, (AVPixelFormat)videoType, //源宽、高、像素格式
width, height, AVPixelFormat.AV_PIX_FMT_YUV420P,//目标宽、高、像素格式
ffmpeg.SWS_BICUBIC, // 尺寸变化使用算法
null, null, null
);
if (vsc==null)
{
Console.WriteLine("sws_getCachedContext failed!");
return;
}
AVFrame* yuv = ffmpeg.av_frame_alloc();
yuv->format = (int)AVPixelFormat.AV_PIX_FMT_YUV420P;
yuv->width = width;
yuv->height = height;
yuv->pts = 0;
ret = ffmpeg.av_frame_get_buffer(yuv, 32);
if (ret != 0)
{
return;
}
//string rtmpurl = "rtmp://192.168.0.105:1935/live/desktop";
string rtmpurl = "udp://224.1.1.1:5001";
AVFormatContext* ic = null;
//ret = ffmpeg.avformat_alloc_output_context2(&ic, null, "flv", rtmpurl);
ret = ffmpeg.avformat_alloc_output_context2(&ic, null, "mpegts", rtmpurl);//UDP
if (ret < 0)
{
return;
}
AVStream* st = ffmpeg.avformat_new_stream(ic, null);
if (st == null)
{
return;
}
st->codecpar->codec_tag = 0;
ffmpeg.avcodec_parameters_from_context(st->codecpar, vc);
ffmpeg.av_dump_format(ic, 0, rtmpurl, 1);
ret = ffmpeg.avio_open(&ic->pb, rtmpurl, ffmpeg.AVIO_FLAG_WRITE);
if (ret != 0)
{
return;
}
ret = ffmpeg.avformat_write_header(ic, null);
if (ret != 0)
{
return;
}
AVPacket* packet = ffmpeg.av_packet_alloc();
AVPacket* Encodepacket = ffmpeg.av_packet_alloc();
int frameIndex = 0;
int EncodeIndex = 0;
AVFrame* rgb = ffmpeg.av_frame_alloc();
AVBitStreamFilterContext* h264bsfc = ffmpeg.av_bitstream_filter_init("h264_mp4toannexb");
long startpts = m_fmt_ctx->start_time;
long lastpts = 0;
AVRational bq = new AVRational(); bq.num = 1; bq.den = FPS;
AVRational cq = new AVRational(); cq.num = 1; cq.den = ffmpeg.AV_TIME_BASE;
long duration = ffmpeg.av_rescale_q(1, bq, cq);
int got_picture = 0;
while (frameIndex < 2000000)
{
ret = ffmpeg.av_read_frame(m_fmt_ctx, packet);
if (ret < 0)
{
break;
}
if (packet->stream_index == video_stream)
{
ret = ffmpeg.avcodec_decode_video2(_codec_ctx, rgb, &got_picture, packet);
if (ret < 0)
{
Console.WriteLine("Decode Error.\n");
return;
}
if (got_picture != null)
{
int h = ffmpeg.sws_scale(vsc, rgb->data, rgb->linesize, 0, height, //源数据
yuv->data, yuv->linesize);
long guesspts = frameIndex * duration;
yuv->pts = guesspts;
frameIndex++;
ret = ffmpeg.avcodec_encode_video2(vc, Encodepacket, yuv, &got_picture);
if (ret < 0)
{
Console.WriteLine("Failed to encode!\n");
break;
}
if (got_picture == 1)
{
Encodepacket->pts = ffmpeg.av_rescale_q(EncodeIndex, vc->time_base, st->time_base);
Encodepacket->dts = Encodepacket->pts;
//std::cout << "frameindex : " << EncodeIndex << " pts : " << Encodepacket->pts << " dts: " << Encodepacket->dts << " encodeSize:" << Encodepacket->size << " curtime - lasttime " << Encodepacket->pts - lastpts << endl;
Console.WriteLine("frameindex : " + EncodeIndex.ToString() + " pts : " + Encodepacket->pts.ToString() + " dts: " + Encodepacket->dts.ToString() + " encodeSize:" + Encodepacket->size.ToString() + " curtime - lasttime " + (Encodepacket->pts - lastpts).ToString());
lastpts = Encodepacket->pts;
ret = ffmpeg.av_interleaved_write_frame(ic, Encodepacket);
EncodeIndex++;
ffmpeg.av_packet_unref(Encodepacket);
}
}
}
ffmpeg.av_packet_unref(packet);
}
ret = ffmpeg.avcodec_send_frame(vc, null);
while (ret >= 0)
{
ret = ffmpeg.avcodec_receive_packet(vc, Encodepacket);
if (ret == ffmpeg.AVERROR(ffmpeg.EAGAIN) || ret == ffmpeg.AVERROR_EOF)
{
break;
}
if (ret < 0)
{
break;
}
ret = ffmpeg.av_interleaved_write_frame(ic, Encodepacket);
EncodeIndex++;
}
ffmpeg.av_write_trailer(ic);
ffmpeg.av_packet_free(&packet);
ffmpeg.av_packet_free(&Encodepacket);
ffmpeg.av_frame_free(&rgb);
ffmpeg.av_frame_free(&yuv);
ffmpeg.av_bitstream_filter_close(h264bsfc);
h264bsfc = null;
if (vsc != null)
{
ffmpeg.sws_freeContext(vsc);
vsc = null;
}
if (_codec_ctx != null)
ffmpeg.avcodec_close(_codec_ctx);
_codec_ctx = null;
_codec = null;
if (vc != null)
ffmpeg.avcodec_free_context(&vc);
if (m_fmt_ctx != null)
ffmpeg.avformat_close_input(&m_fmt_ctx);
if (ic!=null && (ic->flags & ffmpeg.AVFMT_NOFILE)==0)
ffmpeg.avio_closep(&ic->pb);
if (ic != null)
{
ffmpeg.avformat_free_context(ic);
ic = null;
}
m_input_fmt = null;
return;文章来源:https://www.toymoban.com/news/detail-627465.html
}
}
}
文章来源地址https://www.toymoban.com/news/detail-627465.html
到了这里,关于C#开发FFMPEG例子(API方式) FFmpeg推送udp组播流的文章就介绍完了。如果您还想了解更多内容,请在右上角搜索TOY模板网以前的文章或继续浏览下面的相关文章,希望大家以后多多支持TOY模板网!