一、推流器对于输入输出文件无要求
1、输入文件可为网络流URL,可以实现转流器。 2、将输入的文件改为回调函数(内存读取)的形式,可以推送内存中的视频数据。 3、将输入文件改为系统设备(通过libavdevice),同时加上编码的功能,可以实现实时推流器(现场直播)。
二、代码实现
#include <stdio.h>
#define __STDC_CONSTANT_MACROS
#ifdef _WIN32
extern "C"
{
#include "libavformat/avformat.h"
#include "libavutil/mathematics.h"
#include "libavutil/time.h"
#include "libavutil/opt.h"
};
#else
#ifdef __cplusplus
extern "C"
{
#endif
#include <libavformat/avformat.h>
#include <libavutil/mathematics.h>
#include <libavutil/time.h>
#include "libavutil/opt.h"
#ifdef __cplusplus
};
#endif
#endif
#pragma comment(lib,"avformat.lib")
#pragma comment(lib,"avcodec.lib")
#pragma comment(lib,"avutil.lib")
int main(int argc, char* argv[])
{
AVOutputFormat *ofmt = NULL;
AVFormatContext *ifmt_ctx = NULL, *ofmt_ctx = NULL;
AVPacket pkt;
const char *in_filename, *out_filename;
int ret, i;
int videoindex = -1;
int frame_index = 0;
int64_t start_time = 0;
in_filename = "udp://238.1.1.11:1234";
out_filename = "udp://238.1.1.10:6016";
av_register_all();
avformat_network_init();
AVDictionary *inputdic = NULL;
av_dict_set(&inputdic, "buffer_size", "10485760", 0);
av_dict_set(&inputdic, "reuse", "1", 0);
if ((ret = avformat_open_input(&ifmt_ctx, in_filename, 0, &inputdic)) < 0) {
printf("Could not open input file.");
goto end;
}
if ((ret = avformat_find_stream_info(ifmt_ctx, 0)) < 0) {
printf("Failed to retrieve input stream information");
goto end;
}
for (i = 0; i<ifmt_ctx->nb_streams; i++)
if (ifmt_ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO) {
videoindex = i;
break;
}
av_dump_format(ifmt_ctx, 0, in_filename, 0);
avformat_alloc_output_context2(&ofmt_ctx, NULL, "mpegts", out_filename);
if (!ofmt_ctx) {
printf("Could not create output context\n");
ret = AVERROR_UNKNOWN;
goto end;
}
ofmt = ofmt_ctx->oformat;
for (i = 0; i < ifmt_ctx->nb_streams; i++) {
AVStream *in_stream = ifmt_ctx->streams[i];
AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
if (!out_stream) {
printf("Failed allocating output stream\n");
ret = AVERROR_UNKNOWN;
goto end;
}
ret = avcodec_parameters_copy(out_stream->codecpar, ifmt_ctx->streams[i]->codecpar);
if (ret < 0) {
printf("Failed to copy parameters from input to output stream codec context\n");
goto end;
}
out_stream->codec->codec_tag = 0;
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
}
av_dump_format(ofmt_ctx, 0, out_filename, 1);
if (!(ofmt->flags & AVFMT_NOFILE)) {
AVDictionary*dic = NULL;
av_dict_set(&dic, "pkt_size", "1316", 0);
av_dict_set(&dic, "reuse", "1", 0);
ret = avio_open2(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE, NULL, &dic);
if (ret < 0) {
printf("Could not open output URL '%s'", out_filename);
goto end;
}
}
av_opt_set(ofmt_ctx->priv_data, "MpegTSWrite", "1", 0);
av_opt_set(ofmt_ctx->priv_data, "pes_payload_size", "300", 0);
ret = avformat_write_header(ofmt_ctx, NULL);
if (ret < 0) {
printf("Error occurred when opening output URL\n");
goto end;
}
start_time = av_gettime();
int64_t deltpts = 0;
while (1) {
AVStream *in_stream, *out_stream;
ret = av_read_frame(ifmt_ctx, &pkt);
if (ret < 0)
break;
if (pkt.pts == AV_NOPTS_VALUE) {
AVRational time_base1 = ifmt_ctx->streams[videoindex]->time_base;
int64_t calc_duration = (double)AV_TIME_BASE / av_q2d(ifmt_ctx->streams[videoindex]->r_frame_rate);
pkt.pts = (double)(frame_index*calc_duration) / (double)(av_q2d(time_base1)*AV_TIME_BASE);
pkt.dts = pkt.pts;
pkt.duration = (double)calc_duration / (double)(av_q2d(time_base1)*AV_TIME_BASE);
}
if (pkt.stream_index == videoindex) {
AVRational time_base = ifmt_ctx->streams[videoindex]->time_base;
AVRational time_base_q = { 1,AV_TIME_BASE };
int64_t pts_time = av_rescale_q(pkt.dts, time_base, time_base_q);
int64_t now_time = av_gettime() - start_time;
static bool first = true;
if (first)
{
deltpts = pts_time;
first = false;
}
if (pts_time - deltpts > now_time)
av_usleep(pts_time - deltpts - now_time);
}
in_stream = ifmt_ctx->streams[pkt.stream_index];
out_stream = ofmt_ctx->streams[pkt.stream_index];
pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
pkt.pos = -1;
if (pkt.stream_index == videoindex) {
frame_index++;
}
ret = av_interleaved_write_frame(ofmt_ctx, &pkt);
if (ret < 0) {
printf("Error muxing packet\n");
break;
}
av_free_packet(&pkt);
}
av_write_trailer(ofmt_ctx);
end:
avformat_close_input(&ifmt_ctx);
if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
avio_close(ofmt_ctx->pb);
avformat_free_context(ofmt_ctx);
if (ret < 0 && ret != AVERROR_EOF) {
printf("Error occurred.\n");
return -1;
}
return 0;
}
参考
雷神博客 https://blog.csdn.net/leixiaohua1020/article/details/39803457
|