RTSP再学习 -- 利用FFmpeg 将 rtsp 获取H264裸流并保存到文件中

如需转载请注明出处:https://blog.csdn.net/qq_29350001/article/details/78214267服务器

既然已经能够经过 RTSP 获取h264 裸流了。那么经过 FFmpeg 将其保存到文件中怎么作呢?网络

1、首先RTSP获取 h264 裸流

咱们上面两篇文章主要讲的是经过 rtsp://Your ip:554/stream_chn0.h265 播放H.265视频流。ide

PS:我刚试了一下,个人 FFmpeg 程序暂时不支持 h265 ...   以前编译的时候,只提供了 x264没有x265学习

若是感兴趣参看下面两篇文章添加。ui

参看:使用VS2015添加对ffmpeg添加h265 支持。编码

参看:ffmpeg 编码H265和H264对比spa

再结合以前讲的,FFmepg 再学习系列,应该是没问题的。不过很久没有弄了,早忘了了。.net

 

那如今没有能够播放的 H.264 视频流了啊,怎么办?3d

有办法以前讲过一篇文章,参看:LIVE555再学习 -- VLC搭建RTSP服务器(转) 用VLC搭建一个不就完了。调试

 

固然还能够直接用 live555,参看:LIVE555再学习 -- live555实现RTSP直播服务器  (推荐)

2、FFmpeg 将H.264 裸流保存到文件

这个也好说,以前有讲到,参看:FFmpeg再学习 -- SDL 环境搭建和视频显示

将其改改就能够了。

具体代码以下:

参看:利用ffmpeg将RTSP传输的h264原始码流保存到文件中

 

#include "stdafx.h"

#include <stdio.h>  

#define __STDC_CONSTANT_MACROS  

#ifdef _WIN32  
//Windows  
extern "C"
{
#include "libavcodec/avcodec.h"  
#include "libavformat/avformat.h"  
#include "libswscale/swscale.h"  
#include "SDL2/SDL.h"  
};
#else  
//Linux...  
#ifdef __cplusplus  
extern "C"
{
#endif  
#include <libavcodec/avcodec.h>  
#include <libavformat/avformat.h>  
#include <libswscale/swscale.h>  
#include <SDL2/SDL.h>  
#ifdef __cplusplus  
};
#endif  
#endif  

int main(int argc, char* argv[])
{

	AVFormatContext *pFormatCtx;
	int             i, videoindex;
	AVCodecContext  *pCodecCtx;
	AVCodec         *pCodec;
	AVFrame *pFrame, *pFrameYUV;
	uint8_t *out_buffer;
	AVPacket *packet;
	int ret, got_picture;


	struct SwsContext *img_convert_ctx;
	// 改为你本身的 URL
	char filepath[] = "rtsp://192.168.2.xx:8554/1"; 
	av_register_all();
	avformat_network_init();
	pFormatCtx = avformat_alloc_context();

	if (avformat_open_input(&pFormatCtx, filepath, NULL, NULL) != 0)////打开网络流或文件流  
	{
		printf("Couldn't open input stream.\n");
		return -1;
	}
	if (avformat_find_stream_info(pFormatCtx, NULL)<0)
	{
		printf("Couldn't find stream information.\n");
		return -1;
	}
	videoindex = -1;
	for (i = 0; i<pFormatCtx->nb_streams; i++)
		if (pFormatCtx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
		{
			videoindex = i;
			break;
		}
	if (videoindex == -1)
	{
		printf("Didn't find a video stream.\n");
		return -1;
	}
	pCodecCtx = pFormatCtx->streams[videoindex]->codec;
	pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
	if (pCodec == NULL)
	{
		printf("Codec not found.\n");
		return -1;
	}
	if (avcodec_open2(pCodecCtx, pCodec, NULL)<0)
	{
		printf("Could not open codec.\n");
		return -1;
	}
	pFrame = av_frame_alloc();
	pFrameYUV = av_frame_alloc();
	out_buffer = (uint8_t *)av_malloc(avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height));
	avpicture_fill((AVPicture *)pFrameYUV, out_buffer, PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);

	//Output Info---输出一些文件(RTSP)信息  
	printf("---------------- File Information ---------------\n");
	av_dump_format(pFormatCtx, 0, filepath, 0);
	printf("-------------------------------------------------\n");

	img_convert_ctx = sws_getContext(pCodecCtx->width, pCodecCtx->height, pCodecCtx->pix_fmt,
		pCodecCtx->width, pCodecCtx->height, PIX_FMT_YUV420P, SWS_BICUBIC, NULL, NULL, NULL);


	packet = (AVPacket *)av_malloc(sizeof(AVPacket));

	FILE *fpSave;
	if ((fpSave = fopen("geth264.h264", "ab")) == NULL) //h264保存的文件名  
		return 0;
	for (;;)
	{
		//------------------------------  
		if (av_read_frame(pFormatCtx, packet) >= 0)
		{
			if (packet->stream_index == videoindex)
			{
				fwrite(packet->data, 1, packet->size, fpSave);//写数据到文件中  
			}
			av_free_packet(packet);
		}
	}


	//--------------  
	av_frame_free(&pFrameYUV);
	av_frame_free(&pFrame);
	avcodec_close(pCodecCtx);
	avformat_close_input(&pFormatCtx);

	return 0;
}

 

 

调试结果显示以下:


 

生成 geth264.h264 文件,可播放。

3、工程下载

下载:利用FFmpeg 将 rtsp 获取H264裸流并保存到文件中 工程

思考,这里就有两个问题未完成,一个就是怎么将 H265的裸流保存到文件,再有怎么保存成其余格式好比MP4。

保存到MP4文件代码以下:

 

#include "stdafx.h"
#ifdef __cplusplus
extern "C" {
#endif
#include <libavcodec/avcodec.h>
#include <libavdevice/avdevice.h>
#include <libavformat/avformat.h>
#include <libavfilter/avfilter.h>
#include <libavutil/avutil.h>
#include <libswscale/swscale.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <math.h>
#ifdef __cplusplus
}
#endif

AVFormatContext *i_fmt_ctx;
AVStream *i_video_stream;
AVFormatContext *o_fmt_ctx;
AVStream *o_video_stream;

int _tmain(int argc, char **argv)
{
	avcodec_register_all();
	av_register_all();
	avformat_network_init();

	/* should set to NULL so that avformat_open_input() allocate a new one */
	i_fmt_ctx = NULL;
	char rtspUrl[] = "rtsp://192.168.2.xx:8554/H264unicast";
	const char *filename = "1.mp4";
	if (avformat_open_input(&i_fmt_ctx, rtspUrl, NULL, NULL) != 0)
	{
		fprintf(stderr, "could not open input file\n");
		return -1;
	}

	if (avformat_find_stream_info(i_fmt_ctx, NULL)<0)
	{
		fprintf(stderr, "could not find stream info\n");
		return -1;
	}

	//av_dump_format(i_fmt_ctx, 0, argv[1], 0);

	/* find first video stream */
	for (unsigned i = 0; i<i_fmt_ctx->nb_streams; i++)
	{
		if (i_fmt_ctx->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO)
		{
			i_video_stream = i_fmt_ctx->streams[i];
			break;
		}
	}
	if (i_video_stream == NULL)
	{
		fprintf(stderr, "didn't find any video stream\n");
		return -1;
	}

	avformat_alloc_output_context2(&o_fmt_ctx, NULL, NULL, filename);

	/*
	* since all input files are supposed to be identical (framerate, dimension, color format, ...)
	* we can safely set output codec values from first input file
	*/
	o_video_stream = avformat_new_stream(o_fmt_ctx, NULL);
	{
		AVCodecContext *c;
		c = o_video_stream->codec;
		c->bit_rate = 400000;
		c->codec_id = i_video_stream->codec->codec_id;
		c->codec_type = i_video_stream->codec->codec_type;
		c->time_base.num = i_video_stream->time_base.num;
		c->time_base.den = i_video_stream->time_base.den;
		fprintf(stderr, "time_base.num = %d time_base.den = %d\n", c->time_base.num, c->time_base.den);
		c->width = i_video_stream->codec->width;
		c->height = i_video_stream->codec->height;
		c->pix_fmt = i_video_stream->codec->pix_fmt;
		printf("%d %d %d", c->width, c->height, c->pix_fmt);
		c->flags = i_video_stream->codec->flags;
		c->flags |= CODEC_FLAG_GLOBAL_HEADER;
		c->me_range = i_video_stream->codec->me_range;
		c->max_qdiff = i_video_stream->codec->max_qdiff;
		c->qmin = i_video_stream->codec->qmin;
		c->qmax = i_video_stream->codec->qmax;
		c->qcompress = i_video_stream->codec->qcompress;
	}

	avio_open(&o_fmt_ctx->pb, filename, AVIO_FLAG_WRITE);
	avformat_write_header(o_fmt_ctx, NULL);

	int last_pts = 0;
	int last_dts = 0;
	int64_t pts, dts;
	while (1)
	{
		AVPacket i_pkt;
		av_init_packet(&i_pkt);
		i_pkt.size = 0;
		i_pkt.data = NULL;
		if (av_read_frame(i_fmt_ctx, &i_pkt) <0)
			break;
		/*
		* pts and dts should increase monotonically
		* pts should be >= dts
		*/
		i_pkt.flags |= AV_PKT_FLAG_KEY;
		pts = i_pkt.pts;
		i_pkt.pts += last_pts;
		dts = i_pkt.dts;
		i_pkt.dts += last_dts;
		i_pkt.stream_index = 0;

		//printf("%lld %lld\n", i_pkt.pts, i_pkt.dts);
		static int num = 1;
		printf("frame %d\n", num++);
		av_interleaved_write_frame(o_fmt_ctx, &i_pkt);
		//av_free_packet(&i_pkt);
		//av_init_packet(&i_pkt);
	}
	last_dts += dts;
	last_pts += pts;
	avformat_close_input(&i_fmt_ctx);
	av_write_trailer(o_fmt_ctx);
	avcodec_close(o_fmt_ctx->streams[0]->codec);
	av_freep(&o_fmt_ctx->streams[0]->codec);
	av_freep(&o_fmt_ctx->streams[0]);
	avio_close(o_fmt_ctx->pb);
	av_free(o_fmt_ctx);
	return 0;
}

 

如需转载请注明出处:https://blog.csdn.net/qq_29350001/article/details/78214267