Notice
Link
- Today
- Total
Recent Posts
Recent Comments
일 | 월 | 화 | 수 | 목 | 금 | 토 |
---|---|---|---|---|---|---|
1 | 2 | 3 | 4 | |||
5 | 6 | 7 | 8 | 9 | 10 | 11 |
12 | 13 | 14 | 15 | 16 | 17 | 18 |
19 | 20 | 21 | 22 | 23 | 24 | 25 |
26 | 27 | 28 | 29 | 30 | 31 |
Tags
- c언어
- MFC
- view 획득
- 터치좌표 view
- Back 키 클릭 감지
- vc++
- 텍스트컬러
- ffmpeg
- 코드로 서버 재실행
- reactnative
- pid 찾아 kill
- 피쉬랜드
- kill -9
- MySQL
- 말줌임 CSS
- 가변영역 스크롤
- MariaDB
- springboot
- CSS
- Activity 전체화면
- 스크롤적용
- mybatis exception
- rn
- 파티션 빠른 삭제
- springboot 재가동
- DB 계정생성
- CentOS
- SQL 마지막날
- sql exception
- SQL 첫날
Archives
개발은 하는건가..
[MFC] FFMPEG 를 이용한 rtsp client 본문
반응형
FFmpeg 를 이용하여 IP camera 연결용 rtsp client class 이다.
주요부분은 ffmpeg api 를 이용하여 rtsp 연결 및 frame 패킷을 획득하는 부분이므로 코드에서 사용된 decoder 는 포함하지 않았습니다.
# header file
#pragma once
#include "TimeoutHandler.h"
#include "VideoDecoder.h"
#define RTP_CONNECT_TIMEOUT 10000
#define RTP_TIMEOUT_MS 10000
#define RTP_PACKET_TIMEOUT_MS 30000
#define RTP_BASE_FRAME_DURATION 33
#define RTP_MAX_FRAME_DURATION 50 // 최대 프레임 duration (ms 단위 - 최소 15FPS 기준)
#define PROTOCOL_RTSP _T("rtsp://")
#define PROTOCOL_RTSP_LEN ((int)_tcslen(PROTOCOL_RTSP))
class CRtpStreamer
{
public:
CRtpStreamer();
~CRtpStreamer();
// Rtp Notify Events
enum ENotify {
NOTIFY_RTP_PREPARING,
NOTIFY_RTP_LISTENING,
NOTIFY_CONNECTING,
NOTIFY_CONNECTED,
NOTIFY_RTP_STOPPED,
NOTIFY_DISCONNECTED,
NOTIFY_RTP_RECEIVE_FAIL,
NOTIFY_NOT_SUPPORT_CODEC,
NOTIFY_DECODER_INIT_FAIL,
NOTIFY_DECODING_FAIL,
NOTIFY_NO_VIDEO_STREAM,
NOTIFY_UNKNOWN_FORMAT,
NOTIFY_CONNECT_FAIL,
NOTIFY_AUTH_FAIL,
NOTIFY_STREAM_TIMEOUT, // Stream Receive Timeout
__NOTIFY_MAX__
};
struct SVideoInfo {
SIZE sVideoSize;
UINT nAvgFps;
UINT nCodecId; // FFMpeg AvCodecID 와 동일
UINT nTimebase; // 프레임별 duration 에 (nTimebase * 1000) 를 곱한 값이 코덱의 timebase 이다 (동영상 파일 생성시 참조용)
SVideoInfo::SVideoInfo() { ZeroMemory(this, sizeof(SVideoInfo)); }
};
// Rtsp 스트리밍 이벤트 콜백, 여러개 등록이 가능. (콜백구조체의 포인터를 등록하므로 호출자는 등록된 콜백구조체를 메모리상에 유지하고 있어야 한다)
struct SCallback {
void(*OnStreamNotify)(CRtpStreamer *pStreamer, CRtpStreamer::ENotify notify, LONGLONG param, void *pOwner);
void(*OnVideoPacketData)(CRtpStreamer *pStreamer, BYTE* pData, UINT nDataLen, BOOL bIframe, UINT nFrameDuration, void *pOwner);
void(*OnVideoPacket)(CRtpStreamer *pStreamer, AVPacket* pAvpacket, BOOL bIframe, UINT nFrameDuration, void *pOwner);
void(*OnDecodedVideoFrame)(CVideoDecoder::SDecodeFrame *pDecodeFrame, BOOL bIframe, UINT nFrameDuration, void *pOwner);
void* pOwner;
SCallback::SCallback() { ZeroMemory(this, sizeof(SCallback)); }
};
void AddCallback(SCallback *pCallback);
void RemoveCallback(SCallback *pCallback);
void RemoveCallback(void* pOwner);
void SetRequestDecodedVideoFrame(BOOL b, UINT nPreferWidth = 0, UINT nPreferHeight = 0); // TRUE 설정 시 OnDecodedVideoFrame() 콜백으로 디코딩된 프레임을 전달
BOOL Start(TCHAR* pszUrl, TCHAR* pszContextUrl, int nPort = 554, TCHAR* pszId = NULL, TCHAR* pszPwd = NULL);
BOOL Start(TCHAR *pszSdpFilePath);
void Stop();
AVStream* GetVideoStream();
inline SVideoInfo* GetVideoInfo() { return &m_SVideoInfo; }
inline BOOL IsStreaming() { return (m_hStreamerThread != NULL); }
inline void SetStreamerId(UINT nId) { m_nStreamerId = nId; }
inline UINT GetStreamerId() { return m_nStreamerId; }
inline CVideoDecoder* GetVideoDecoder() { return &m_VideoDecoder; }
inline BOOL IsRequestStop() { return m_bIsStopRequest; }
inline void SetStreamTimeoutMs(UINT nMs) { m_nStreamTimeoutMs = nMs; }
inline UINT GetStreamTimeoutMs() { return max(m_nStreamTimeoutMs, RTP_TIMEOUT_MS); }
inline BOOL IsUseSdpFile() { return m_bUseSdpFile; }
inline LONGLONG GetRealStartTimestamp() { return m_nRealStartTimestamp; } // 카메라 측에서 실제로 영상 전송 시작 시간 (변하지 않음)
inline UINT GetLastFramePtsMs() { return m_nLastFramePtsMs; } // 수신된 마지막 프레임의 Pts (경과 시간으로 참고함)
inline LONGLONG GetRealTimestampOfLastFrame() { return m_nRealStartTimestamp + m_nLastFramePtsMs; }
inline UINT GetFps() { return m_nFps; }
inline UINT GetBitrate() { return m_nStreamBitrate; }
inline SIZE* GetResolution() { return &m_SVideoInfo.sVideoSize; }
static CString GetNotifyString(ENotify notify);
private:
BOOL m_bIsStopRequest = TRUE; // 현재 스트리밍 동작 중단 요청여부 (RtspStop() 시 TRUE 설정됨);
char m_szConnectUrl[MAX_PATH] = { NULL, }; // 스트리밍 대상 접속 주소 (전체 Url)
HANDLE m_hStreamerThread = NULL; // 스트리밍 스레드 핸들
LONGLONG m_nRealStartTimestamp = 0; // Unix timestamp ms 단위
UINT m_nLastFramePtsMs = 0;
UINT m_nStreamerId = 0; // 스트리머 식별을 위한 아이디 (호출자에서 임의로 지정)
SVideoInfo m_SVideoInfo; // 현재 스트리밍되는 비디오의 정보
CPtrArray m_CallbackList; // 등록된 콜백 리스트
CTimeoutHandler m_TimeoutHandler; // 스트리밍 스레드에서 Ffmpeg 의 request timeout 체크를 위한 핸들러 클래스
CCriticalSection m_LockCallback; // Callback 설정 동기화를 위한 Lock
CVideoDecoder m_VideoDecoder;
UINT m_nStreamTimeoutMs = RTP_TIMEOUT_MS;
BOOL m_bUseSdpFile = FALSE;
BOOL m_bReqDecodedVideoFrame = FALSE;
UINT m_nPreferDecodeFrameWidth = 0;
UINT m_nPreferDecodeFrameHeight = 0;
AVStream* m_pAvStream = NULL;
// FPS 계산 관련
LONGLONG m_nFrameStartTs = 0;
LONGLONG m_nFrameRateTs = 0;
UINT m_nFrameCntForFps = 0;
UINT m_nStreamBitrateInc = 0;
UINT m_nStreamBitrate = 0;
UINT m_nFps = 0;
static UINT AFXAPI RtpThreadProc(void *param);
void RtpProcBySdpFile();
void RtpProcByUrl();
void SendStreamNotify(ENotify notify, LONGLONG param = 0);
void SendVideoPacket(BYTE* pData, UINT nDataLen, BOOL bIframe, UINT nFrameDuration);
void SendVideoPacket(AVPacket* pAvPacket, BOOL bIframe, UINT nFrameDuration);
void SendDecodedVideoFrame(CVideoDecoder::SDecodeFrame *pDecodeFrame, BOOL bIframe, UINT nFrameDuration);
void RemoveAllCallback();
CString GetAvErrorString(int errNo);
};
# cpp file
// return 값이 1 일 경우 streaming 작업을 중단한다.
int RtpStreamerInterruptCallBack(void *opaque)
{
if (opaque != NULL) {
CTimeoutHandler* pTimeoutHandler = (CTimeoutHandler*)opaque;
if (pTimeoutHandler->IsTimeout() == TRUE) {
if (pTimeoutHandler->GetTimeoutMs() > 1) {
TRACE("Stream timeout (%s)\n", pTimeoutHandler->GetTag());
}
return 1;
}
}
return 0;
}
CRtpStreamer::CRtpStreamer()
{
}
CRtpStreamer::~CRtpStreamer()
{
if (m_bIsStopRequest == FALSE) {
Stop();
}
if (m_hStreamerThread != NULL) {
if (WaitForSingleObject(m_hStreamerThread, 500) == WAIT_TIMEOUT) {
DWORD nExitCode = NULL;
GetExitCodeThread(m_hStreamerThread, &nExitCode);
TerminateThread(m_hStreamerThread, nExitCode);
CloseHandle(m_hStreamerThread);
m_hStreamerThread = NULL;
}
}
}
void CRtpStreamer::AddCallback(SCallback *pCallback)
{
if (pCallback == NULL) {
return;
}
m_LockCallback.Lock();
for (int i = 0; i < m_CallbackList.GetCount(); i++) {
void* ptr = m_CallbackList.GetAt(i);
if (ptr == pCallback) {
m_LockCallback.Unlock();
return;
}
}
m_CallbackList.Add(pCallback);
m_LockCallback.Unlock();
}
void CRtpStreamer::RemoveCallback(SCallback *pCallback)
{
if (pCallback == NULL) {
ASSERT(0);
return;
}
m_LockCallback.Lock();
for (int i = 0; i < m_CallbackList.GetSize(); i++) {
void* ptr = m_CallbackList.GetAt(i);
if (ptr == pCallback) {
m_CallbackList.RemoveAt(i);
break;
}
}
m_LockCallback.Unlock();
}
void CRtpStreamer::RemoveCallback(void* pOwner)
{
if (pOwner == NULL) {
ASSERT(0);
return;
}
m_LockCallback.Lock();
for (int i = 0; i < m_CallbackList.GetSize(); i++) {
SCallback* pCallback = (SCallback*)m_CallbackList.GetAt(i);
if (pCallback->pOwner == pOwner) {
m_CallbackList.RemoveAt(i);
break;
}
}
m_LockCallback.Unlock();
}
BOOL CRtpStreamer::Start(TCHAR* pszUrl, TCHAR* pszContextUrl, int nPort, TCHAR* pszId, TCHAR* pszPwd)
{
if (m_hStreamerThread != NULL) {
TRACE("CRtpStreamer::Start() - RTP thread already running..\n");
return FALSE;
}
if (pszUrl == NULL || nPort == 0) {
ASSERT(0);
TRACE("Invalud connect url\n");
return FALSE;
}
CString strMakeUrl;
if (_tcsnicmp(pszUrl, PROTOCOL_RTSP, PROTOCOL_RTSP_LEN) == 0) {
pszUrl += PROTOCOL_RTSP_LEN;
}
if (pszId != NULL && pszPwd != NULL) {
strMakeUrl.Format(_T("%s%s:%s@%s:%d"), PROTOCOL_RTSP, pszId, pszPwd, pszUrl, nPort);
}
else {
strMakeUrl.Format(_T("%s%s:%d"), PROTOCOL_RTSP, pszUrl, nPort);
}
if (pszContextUrl != NULL) {
if (pszContextUrl[0] != _T('/')) {
strMakeUrl.Append(_T("/"));
}
strMakeUrl.Append(pszContextUrl);
}
strcpy(m_szConnectUrl, (CStringA)strMakeUrl);
m_bUseSdpFile = FALSE;
m_nRealStartTimestamp = 0;
m_hStreamerThread = (HANDLE)_beginthreadex(NULL, 0, RtpThreadProc, this, CREATE_SUSPENDED, NULL);
if (m_hStreamerThread == NULL) {
return FALSE;
}
ResumeThread(m_hStreamerThread);
return TRUE;
}
BOOL CRtpStreamer::Start(TCHAR *pszSdpFilePath)
{
if (m_hStreamerThread != NULL) {
TRACE("CRtpStreamer::Start() - RTP thread already running..\n");
return FALSE;
}
if (PathFileExists(pszSdpFilePath) == FALSE) {
TRACE("CRtpStreamer::Start() - SDP file not found.\n");
return FALSE;
}
CStringA strSdpFilePath;
strSdpFilePath = pszSdpFilePath;
strcpy(m_szConnectUrl, strSdpFilePath.GetBuffer(0));
m_bUseSdpFile = TRUE;
m_hStreamerThread = (HANDLE)_beginthreadex(NULL, 0, RtpThreadProc, this, CREATE_SUSPENDED, NULL);
if (m_hStreamerThread == NULL) {
return FALSE;
}
ResumeThread(m_hStreamerThread);
return TRUE;
}
UINT CRtpStreamer::RtpThreadProc(void *param)
{
CRtpStreamer *pThis = (CRtpStreamer*)param;
pThis->m_TimeoutHandler.SetTag(pThis->m_szConnectUrl);
if (pThis->m_bUseSdpFile == TRUE) {
pThis->RtpProcBySdpFile();
}
else {
pThis->RtpProcByUrl();
}
if (pThis->m_hStreamerThread != NULL) {
CloseHandle(pThis->m_hStreamerThread);
pThis->m_hStreamerThread = NULL;
}
return 0;
}
/* SDP base
c=IN IP4 238.240.0.1
t=0 0
m=video 61646 RTP/AVP 96
a=rtpmap:96 H264/90000
*/
void CRtpStreamer::RtpProcBySdpFile()
{
AVCodec *pAvCodec = NULL;
AVPacket *pAvPacket = NULL;
AVFrame *pAvFrame = NULL;
AVCodecParserContext *pAvParser = NULL;
AVCodecContext *pAvCtx = NULL;
AVFormatContext *pAvFormatCtx = NULL;
AVDictionary *pAvOptions = NULL;
LONGLONG nLastFramePts = 0;
AVInputFormat *file_iformat = NULL;
AVDictionary *format_opts = NULL;
int scan_all_pmts_set = 0;
m_bIsStopRequest = FALSE;
file_iformat = av_find_input_format("sdp");
if (!file_iformat) {
SendStreamNotify(NOTIFY_UNKNOWN_FORMAT);
goto FINALIZE;
}
m_TimeoutHandler.SetTimeoutMs(RTP_CONNECT_TIMEOUT);
int avResult = 0;
BOOL bReceivedIFrame = FALSE;
// 스트림 옵션 설정 (Return value 0 = success )
if (!av_dict_get(format_opts, "scan_all_pmts", NULL, AV_DICT_MATCH_CASE)) {
av_dict_set(&format_opts, "scan_all_pmts", "1", AV_DICT_DONT_OVERWRITE);
scan_all_pmts_set = 1;
}
// input format whitelist - for sdp
av_dict_set(&format_opts, "protocol_whitelist", "file,udp,rtp", 0);
// probe setup
av_dict_set(&format_opts, "probesize", "500000", 0);
av_dict_set(&format_opts, "max_probe_packets", "64", 0);
av_dict_set(&format_opts, "flags", "low_delay", 0);
// buffer
av_dict_set(&format_opts, "analyzeduration", "200000", 0);
av_dict_set(&format_opts, "timeout", "60000000", 0); // 최대 sock io timeout 은 60초
av_dict_set(&format_opts, "buffer_size", "4000000", 0); // 버퍼크기 2MB (재생 버퍼링 사이즈가 아님 raw 패킷의 버퍼 사이즈이므로 비트레이트가 높을 경우 그에 맞게 크게 잡아줘야 함.)
// 인터럽트 콜백 등록 (request timeout 핸들링용)
pAvFormatCtx = avformat_alloc_context();
pAvFormatCtx->interrupt_callback.callback = RtpStreamerInterruptCallBack;
pAvFormatCtx->interrupt_callback.opaque = &m_TimeoutHandler;
avResult = avformat_open_input(&pAvFormatCtx, m_szConnectUrl, file_iformat, &format_opts);
//avResult = avformat_open_input(&pAvFormatCtx, "e:\\EOTS_MULTICAST.sdp", file_iformat, &format_opts);
if (avResult != 0) {
TRACE(_T("Av open failed. (%s)"), GetAvErrorString(avResult));
SendStreamNotify(NOTIFY_RTP_RECEIVE_FAIL, 1);
goto FINALIZE;
}
SendStreamNotify(NOTIFY_RTP_PREPARING);
m_TimeoutHandler.ResetTimeout();
// 스트림 정보 검색
avResult = avformat_find_stream_info(pAvFormatCtx, NULL);
if (avResult != 0) {
TRACE(_T("Av find stream failed. (%s)"), GetAvErrorString(avResult));
SendStreamNotify(NOTIFY_RTP_RECEIVE_FAIL, 2);
goto FINALIZE;
}
int videoStreamIdx = av_find_best_stream(pAvFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0);
if (videoStreamIdx == AVERROR_STREAM_NOT_FOUND || videoStreamIdx == AVERROR_DECODER_NOT_FOUND) {
TRACE(_T("Av find best stream failed. (%s)"), GetAvErrorString(avResult));
SendStreamNotify(NOTIFY_NO_VIDEO_STREAM);
goto FINALIZE;
}
// 비디오 스트림 정보 추출
AVStream* pAvStream = pAvFormatCtx->streams[videoStreamIdx];
UINT nBaseFrameDuration = RTP_MAX_FRAME_DURATION;
m_SVideoInfo.sVideoSize.cx = pAvStream->codecpar->width;
m_SVideoInfo.sVideoSize.cy = pAvStream->codecpar->height;
m_SVideoInfo.nCodecId = pAvStream->codecpar->codec_id;
if (pAvStream->avg_frame_rate.num != 0 && pAvStream->avg_frame_rate.den != 0) {
m_SVideoInfo.nAvgFps = pAvStream->avg_frame_rate.num / pAvStream->avg_frame_rate.den;
nBaseFrameDuration = 1000 / m_SVideoInfo.nAvgFps;
}
if (pAvStream->time_base.den != 0 && pAvStream->time_base.num != 0) {
m_SVideoInfo.nTimebase = pAvStream->time_base.den / (pAvStream->time_base.num * 1000);
}
// 코덱 디코더 검색
pAvCodec = avcodec_find_decoder(pAvStream->codecpar->codec_id);
if (pAvCodec == NULL) {
TRACE(_T("Av find decoder failed. (%s)"), GetAvErrorString(avResult));
SendStreamNotify(NOTIFY_DECODER_INIT_FAIL, 1);
goto FINALIZE;
}
pAvParser = av_parser_init(pAvCodec->id);
if (pAvParser == NULL) {
TRACE(_T("Av parser init failed. (%s)"), GetAvErrorString(avResult));
SendStreamNotify(NOTIFY_DECODER_INIT_FAIL, 2);
goto FINALIZE;
}
pAvCtx = avcodec_alloc_context3(pAvCodec);
if (pAvCtx == NULL) {
TRACE(_T("Av context alloc failed. (%s)"), GetAvErrorString(avResult));
SendStreamNotify(NOTIFY_DECODER_INIT_FAIL, 3);
goto FINALIZE;
}
avResult = avcodec_open2(pAvCtx, pAvCodec, NULL);
if (avResult < 0) {
TRACE(_T("Av codec open failed. (%s)"), GetAvErrorString(avResult));
SendStreamNotify(NOTIFY_DECODER_INIT_FAIL, 3);
goto FINALIZE;
}
pAvPacket = av_packet_alloc();
pAvFrame = av_frame_alloc();
// Video Decoder Init
if (m_VideoDecoder.Initialize(pAvCodec->id) == FALSE) {
SendStreamNotify(NOTIFY_DECODER_INIT_FAIL, 4);
goto FINALIZE;
}
m_TimeoutHandler.ResetTimeout();
m_TimeoutHandler.SetTimeoutMs(GetStreamTimeoutMs());
SendStreamNotify(NOTIFY_RTP_LISTENING);
// 스트리머 메인 루프
while (m_bIsStopRequest == FALSE) {
if (av_read_frame(pAvFormatCtx, pAvPacket) != 0) {
// Stop() 에 의한 실패 사유가 아니라면 Stream timeout 으로 간주.
if (m_bIsStopRequest == FALSE) {
SendStreamNotify(NOTIFY_STREAM_TIMEOUT);
}
break;
}
m_TimeoutHandler.ResetTimeout();
if (m_bIsStopRequest == TRUE) {
break;
}
if (pAvPacket->stream_index == videoStreamIdx) {
// IFrame 을 수신한적이 없을 경우 IFrame 을 수신 할 때까지 continue
if (bReceivedIFrame == FALSE) {
if (pAvPacket->flags == AV_PKT_FLAG_KEY) {
bReceivedIFrame = TRUE;
}
else {
continue;
}
}
// PTS 를 프레임 duration 으로 변환 (호출자 측에서는 별도의 PTS 계산 없이 duration 만큼 프레임 지연 처리 후 display 하면 됨)
LONGLONG frameDurationMs = (pAvPacket->pts - nLastFramePts);
if (m_SVideoInfo.nTimebase > 0) {
frameDurationMs = frameDurationMs / m_SVideoInfo.nTimebase;
}
else {
// 연결 초기에 timebase 값을 못얻었다면 다시 계산해본다.
if (pAvStream->time_base.den != 0 && pAvStream->time_base.num != 0) {
m_SVideoInfo.nTimebase = pAvStream->time_base.den / (pAvStream->time_base.num * 1000);
}
else {
// 그래도 없다면 h264 기본으로 계산
m_SVideoInfo.nTimebase = 90000 / 1000;
frameDurationMs = frameDurationMs / m_SVideoInfo.nTimebase;
}
}
if (frameDurationMs <= 0 || frameDurationMs > RTP_MAX_FRAME_DURATION) {
frameDurationMs = nBaseFrameDuration;
}
nLastFramePts = pAvPacket->pts;
SendVideoPacket(pAvPacket->data, pAvPacket->size, (pAvPacket->flags == AV_PKT_FLAG_KEY), (UINT)frameDurationMs);
//SendVideoPacket(pAvPacket, (pAvPacket->flags == AV_PKT_FLAG_KEY), (UINT)frameDurationMs);
}
av_packet_unref(pAvPacket);
}
SendStreamNotify(NOTIFY_RTP_STOPPED);
FINALIZE:
RemoveAllCallback();
if (pAvParser != NULL) {
av_parser_close(pAvParser);
}
if (pAvCtx != NULL) {
avcodec_free_context(&pAvCtx);
}
if (pAvFrame != NULL) {
av_frame_free(&pAvFrame);
}
if (pAvPacket != NULL) {
av_packet_unref(pAvPacket);
av_packet_free(&pAvPacket);
}
if (pAvFormatCtx != NULL) {
avformat_close_input(&pAvFormatCtx);
}
m_VideoDecoder.UnInitialize();
}
void CRtpStreamer::RtpProcByUrl()
{
if (GetCurrentThreadId() != GetThreadId(m_hStreamerThread)) {
// 작업 스레드를 통해서만 호출되어야함.
ASSERT(0);
return;
}
m_bIsStopRequest = FALSE;
AVCodec *pAvCodec = NULL;
AVPacket *pAvPacket = NULL;
AVFrame *pAvFrame = NULL;
AVCodecParserContext *pAvParser = NULL;
AVCodecContext *pAvCtx = NULL;
AVFormatContext *pAvFormatCtx = NULL;
AVDictionary *pAvOptions = NULL;
LONGLONG nLastFramePts = 0;
LONGLONG nFirstPtsOffset = -1;
BOOL bStreamTimeout = FALSE;
SendStreamNotify(NOTIFY_CONNECTING);
m_TimeoutHandler.SetTimeoutMs(RTP_CONNECT_TIMEOUT);
int avResult = 0;
BOOL bReceivedIFrame = FALSE;
// 스트림 옵션 설정 (Return value 0 = success )
// probe setup
av_dict_set(&pAvOptions, "probesize", "500000", 0);
av_dict_set(&pAvOptions, "max_probe_packets", "64", 0);
av_dict_set(&pAvOptions, "rtsp_transport", "udp", 0);
av_dict_set(&pAvOptions, "flush_packets", "1", 0);
av_dict_set(&pAvOptions, "fflags", "nobuffer", 0);
av_dict_set(&pAvOptions, "analyzeduration", "250000", 0);
av_dict_set(&pAvOptions, "flags", "low_delay", 0);
av_dict_set(&pAvOptions, "stimeout", "30000000", 0); // 최대 sock io timeout 은 30초
av_dict_set(&pAvOptions, "buffer_size", "4000000", 0); // 최대 4MB
// 인터럽트 콜백 등록 (request timeout 핸들링용)
pAvFormatCtx = avformat_alloc_context();
pAvFormatCtx->interrupt_callback.callback = RtpStreamerInterruptCallBack;
pAvFormatCtx->interrupt_callback.opaque = &m_TimeoutHandler;
avResult = avformat_open_input(&pAvFormatCtx, m_szConnectUrl, NULL, &pAvOptions);
if (avResult == AVERROR_HTTP_UNAUTHORIZED) {
SendStreamNotify(NOTIFY_AUTH_FAIL);
goto FINALIZE;
}
else if (avResult != 0) {
TRACE(_T("Av open failed. (%s)"), GetAvErrorString(avResult));
SendStreamNotify(NOTIFY_CONNECT_FAIL, 1);
goto FINALIZE;
}
m_TimeoutHandler.ResetTimeout();
// 스트림 정보 검색
avResult = avformat_find_stream_info(pAvFormatCtx, NULL);
if (avResult != 0) {
TRACE(_T("Av find stream failed. (%s)"), GetAvErrorString(avResult));
SendStreamNotify(NOTIFY_CONNECT_FAIL, 2);
goto FINALIZE;
}
int videoStreamIdx = av_find_best_stream(pAvFormatCtx, AVMEDIA_TYPE_VIDEO, -1, -1, NULL, 0);
if (videoStreamIdx == AVERROR_STREAM_NOT_FOUND || videoStreamIdx == AVERROR_DECODER_NOT_FOUND) {
TRACE(_T("Av find best stream failed. (%s)"), GetAvErrorString(avResult));
SendStreamNotify(NOTIFY_NO_VIDEO_STREAM);
goto FINALIZE;
}
#if 0
// H264 codec 인지 확인 - H264 만 지원하도록 함.
if (pAvFormatCtx->streams[videoStreamIdx]->codecpar->codec_id != AV_CODEC_ID_H264) {
SendEventNotify(NOTIFY_NOT_SUPPORT_CODEC);
goto FINALIZE;
}
#endif
// 비디오 스트림 정보 추출
m_pAvStream = pAvFormatCtx->streams[videoStreamIdx];
UINT nBaseFrameDuration = RTP_BASE_FRAME_DURATION;
m_SVideoInfo.sVideoSize.cx = m_pAvStream->codecpar->width;
m_SVideoInfo.sVideoSize.cy = m_pAvStream->codecpar->height;
m_SVideoInfo.nCodecId = m_pAvStream->codecpar->codec_id;
if (m_pAvStream->avg_frame_rate.num != 0 && m_pAvStream->avg_frame_rate.den != 0) {
m_SVideoInfo.nAvgFps = m_pAvStream->avg_frame_rate.num / m_pAvStream->avg_frame_rate.den;
nBaseFrameDuration = 1000 / m_SVideoInfo.nAvgFps;
}
if (m_pAvStream->time_base.den != 0 && m_pAvStream->time_base.num != 0) {
m_SVideoInfo.nTimebase = m_pAvStream->time_base.den / (m_pAvStream->time_base.num * 1000);
}
// 코덱 디코더 검색
pAvCodec = avcodec_find_decoder(m_pAvStream->codecpar->codec_id);
if (pAvCodec == NULL) {
TRACE(_T("Av find decoder failed. (%s)"), GetAvErrorString(avResult));
SendStreamNotify(NOTIFY_DECODER_INIT_FAIL, 1);
goto FINALIZE;
}
pAvParser = av_parser_init(pAvCodec->id);
if (pAvParser == NULL) {
TRACE(_T("Av parser init failed. (%s)"), GetAvErrorString(avResult));
SendStreamNotify(NOTIFY_DECODER_INIT_FAIL, 2);
goto FINALIZE;
}
pAvCtx = avcodec_alloc_context3(pAvCodec);
if (pAvCtx == NULL) {
TRACE(_T("Av context alloc failed. (%s)"), GetAvErrorString(avResult));
SendStreamNotify(NOTIFY_DECODER_INIT_FAIL, 3);
goto FINALIZE;
}
avResult = avcodec_open2(pAvCtx, pAvCodec, NULL);
if (avResult < 0) {
TRACE(_T("Av codec open failed. (%s)"), GetAvErrorString(avResult));
SendStreamNotify(NOTIFY_DECODER_INIT_FAIL, 3);
goto FINALIZE;
}
pAvPacket = av_packet_alloc();
pAvFrame = av_frame_alloc();
#if 0
avResult = av_read_play(pAvFormatCtx);
if (avResult != 0) {
//SendEventNotify(NOTIFY_CONNECT_FAIL, 3);
goto FINALIZE;
}
#endif
// Video Decoder Init
if (m_VideoDecoder.Initialize(pAvCodec->id, m_pAvStream) == FALSE) {
SendStreamNotify(NOTIFY_DECODER_INIT_FAIL, 4);
goto FINALIZE;
}
m_TimeoutHandler.ResetTimeout();
m_TimeoutHandler.SetTimeoutMs(GetStreamTimeoutMs());
SendStreamNotify(NOTIFY_CONNECTED);
// 스트리머 메인 루프
while (m_bIsStopRequest == FALSE) {
if (av_read_frame(pAvFormatCtx, pAvPacket) != 0) {
// RtspStop() 에 의한 실패 사유가 아니라면 Stream timeout 으로 간주.
if (m_bIsStopRequest == FALSE) {
SendStreamNotify(NOTIFY_STREAM_TIMEOUT);
bStreamTimeout = TRUE;
}
break;
}
m_TimeoutHandler.ResetTimeout();
if (m_bIsStopRequest == TRUE) {
break;
}
if (pAvPacket->stream_index == videoStreamIdx) {
// IFrame 을 수신한적이 없을 경우 IFrame 을 수신 할 때까지 continue
#if 1
if (bReceivedIFrame == FALSE) {
if (pAvPacket->flags == AV_PKT_FLAG_KEY) {
bReceivedIFrame = TRUE;
}
else {
continue;
}
}
#endif
// PTS 를 프레임 duration 으로 변환 (호출자 측에서는 별도의 PTS 계산 없이 duration 만큼 프레임 지연 처리 후 display 하면 됨)
LONGLONG frameDurationMs = (pAvPacket->pts - nLastFramePts);
if (m_SVideoInfo.nTimebase > 0) {
frameDurationMs = frameDurationMs / m_SVideoInfo.nTimebase;
}
else {
// 연결 초기에 timebase 값을 못얻었다면 다시 계산해본다.
if (m_pAvStream->time_base.den != 0 && m_pAvStream->time_base.num != 0) {
m_SVideoInfo.nTimebase = m_pAvStream->time_base.den / (m_pAvStream->time_base.num * 1000);
}
else {
// 그래도 없다면 h264 기본으로 계산
m_SVideoInfo.nTimebase = 90000 / 1000;
frameDurationMs = frameDurationMs / m_SVideoInfo.nTimebase;
}
}
if (frameDurationMs <= 0 || frameDurationMs > RTP_MAX_FRAME_DURATION) {
frameDurationMs = nBaseFrameDuration;
}
nLastFramePts = pAvPacket->pts;
m_nLastFramePtsMs = (UINT)(pAvPacket->pts / m_SVideoInfo.nTimebase);
// 최초 프레임 Timestamp 기록
if (nFirstPtsOffset == -1) {
nFirstPtsOffset = m_nLastFramePtsMs;
}
//TRACE("Last Pts Ms = %d \n", m_nLastFramePtsMs);
if (m_nRealStartTimestamp == 0) {
if (AV_NOPTS_VALUE != pAvFormatCtx->start_time_realtime) {
// Unix timestamp ms
m_nRealStartTimestamp = (pAvFormatCtx->start_time_realtime / 1000) - nFirstPtsOffset;
TRACE("CRtpStreamer::RtpProcByUrl() - RealStartTimeStamp=%I64d\n", m_nRealStartTimestamp);
}
}
SendVideoPacket(pAvPacket->data, pAvPacket->size, (pAvPacket->flags == AV_PKT_FLAG_KEY), (UINT)frameDurationMs);
}
av_packet_unref(pAvPacket);
}
SendStreamNotify(NOTIFY_DISCONNECTED, bStreamTimeout);
FINALIZE:
RemoveAllCallback();
if (pAvParser != NULL) {
av_parser_close(pAvParser);
}
if (pAvCtx != NULL) {
avcodec_free_context(&pAvCtx);
}
if (pAvFrame != NULL) {
av_frame_free(&pAvFrame);
}
if (pAvPacket != NULL) {
av_packet_unref(pAvPacket);
av_packet_free(&pAvPacket);
}
if (pAvFormatCtx != NULL) {
avformat_close_input(&pAvFormatCtx);
}
m_pAvStream = NULL;
m_VideoDecoder.UnInitialize();
if (m_hStreamerThread != NULL) {
CloseHandle(m_hStreamerThread);
m_hStreamerThread = NULL;
}
}
AVStream* CRtpStreamer::GetVideoStream()
{
return m_pAvStream;
}
// Stop 시 스레드가 종료될때 callback 리스트가 같이 제거되므로 Start 시 다시 Callback 를 등록해야함. (notify에 의한 UI 쓰레드와 DEAD Lock 방지를 위함)
void CRtpStreamer::Stop()
{
if (m_bIsStopRequest == TRUE) {
return;
}
m_bIsStopRequest = TRUE;
m_nFrameStartTs = 0;
m_nFrameRateTs = 0;
m_nFrameCntForFps = 0;
m_nStreamBitrateInc = 0;
m_nStreamBitrate = 0;
m_nFps = 0;
// 현재 blocking 된 ffmpeg 함수에서 바로 빠져나오기 위해 타임아웃을 1ms 로 설정.
m_TimeoutHandler.SetTimeoutMs(1);
if (m_hStreamerThread != NULL) {
WaitForSingleObject(m_hStreamerThread, 100);
}
}
void CRtpStreamer::SendStreamNotify(ENotify notify, LONGLONG param)
{
SCallback *pCallBack = NULL;
m_LockCallback.Lock();
for (int i = 0; i < m_CallbackList.GetSize(); i++) {
pCallBack = (SCallback*)m_CallbackList.GetAt(i);
if (pCallBack != NULL) {
if (pCallBack->OnStreamNotify != NULL) {
pCallBack->OnStreamNotify(this, notify, param, pCallBack->pOwner);
}
}
}
m_LockCallback.Unlock();
}
void CRtpStreamer::SendVideoPacket(BYTE* pData, UINT nDataLen, BOOL bIframe, UINT nFrameDuration)
{
if (m_bIsStopRequest == FALSE) {
SCallback *pCallBack = NULL;
// 디코딩된 프레임 받기 요청이 설정된 경우 여기에서 디코딩 처리하여 프레임 정보를 콜백으로 전달한다.
CVideoDecoder::SDecodeFrame decodedFrame;
BOOL bDecodeOk = FALSE;
if (m_bReqDecodedVideoFrame == TRUE) {
CVideoDecoder* pDecoder = GetVideoDecoder();
decodedFrame.nWidth = m_nPreferDecodeFrameWidth;
decodedFrame.nHeight = m_nPreferDecodeFrameHeight;
bDecodeOk = pDecoder->Decode(pData, nDataLen, &decodedFrame);
if (bDecodeOk == TRUE) {
m_SVideoInfo.sVideoSize.cx = decodedFrame.nSrcDecodeWidth;
m_SVideoInfo.sVideoSize.cy = decodedFrame.nSrcDecodeHeight;
}
}
// calc fps and framerate ------------------------
if (m_nFrameRateTs == 0) {
m_nFrameRateTs = GetTickCount64();
m_nFrameCntForFps = 1;
m_nStreamBitrateInc = nDataLen;
}
else {
LONGLONG elapsedMs = GetTickCount64() - m_nFrameRateTs;
const UINT TICK_RES = 16; // Tickcount 의 정밀도가 15.6ms 정도 이므로 이 값을 빼고 1초를 측정.
if (elapsedMs >= (1000 - TICK_RES)) {
m_nFps = m_nFrameCntForFps;
m_nStreamBitrate = m_nStreamBitrateInc;
m_nFrameRateTs = 0;
}
else {
if (m_bReqDecodedVideoFrame == TRUE) {
m_nFrameCntForFps += (bDecodeOk == TRUE) ? 1 : 0;
}
else {
m_nFrameCntForFps++;
}
m_nStreamBitrateInc += nDataLen;
}
}
m_LockCallback.Lock();
for (int i = 0; i < m_CallbackList.GetSize(); i++) {
pCallBack = (SCallback*)m_CallbackList.GetAt(i);
if (pCallBack != NULL) {
if (m_bReqDecodedVideoFrame == TRUE) {
// 디코딩된 프레임이 있다면 프레임 콜백으로 전달.
if (bDecodeOk == TRUE) {
if (pCallBack->OnDecodedVideoFrame != NULL) {
pCallBack->OnDecodedVideoFrame(&decodedFrame, bIframe, nFrameDuration, pCallBack->pOwner);
}
}
else {
if (pCallBack->OnStreamNotify != NULL) {
pCallBack->OnStreamNotify(this, NOTIFY_DECODING_FAIL, 0, pCallBack->pOwner);
}
}
}
if (pCallBack->OnVideoPacketData != NULL) {
pCallBack->OnVideoPacketData(this, pData, nDataLen, bIframe, nFrameDuration, pCallBack->pOwner);
}
}
}
m_LockCallback.Unlock();
}
}
void CRtpStreamer::SendVideoPacket(AVPacket* pAvPacket, BOOL bIframe, UINT nFrameDuration)
{
if (m_bIsStopRequest == FALSE) {
SCallback *pCallBack = NULL;
m_LockCallback.Lock();
for (int i = 0; i < m_CallbackList.GetSize(); i++) {
pCallBack = (SCallback*)m_CallbackList.GetAt(i);
if (pCallBack != NULL) {
if (pCallBack->OnVideoPacket != NULL) {
pCallBack->OnVideoPacket(this, pAvPacket, bIframe, nFrameDuration, pCallBack->pOwner);
}
}
}
m_LockCallback.Unlock();
}
}
void CRtpStreamer::SendDecodedVideoFrame(CVideoDecoder::SDecodeFrame *pDecodeFrame, BOOL bIframe, UINT nFrameDuration)
{
if (m_bIsStopRequest == FALSE) {
SCallback *pCallBack = NULL;
m_LockCallback.Lock();
for (int i = 0; i < m_CallbackList.GetSize(); i++) {
pCallBack = (SCallback*)m_CallbackList.GetAt(i);
if (pCallBack != NULL) {
if (pCallBack->OnDecodedVideoFrame != NULL) {
pCallBack->OnDecodedVideoFrame(pDecodeFrame, bIframe, nFrameDuration, pCallBack->pOwner);
}
}
}
m_LockCallback.Unlock();
}
}
CString CRtpStreamer::GetNotifyString(ENotify notify)
{
CString resultStr = _T("Unknown");
switch (notify) {
case NOTIFY_RTP_STOPPED:
resultStr = _T("NOTIFY_RTP_STOPPED");
break;
case NOTIFY_RTP_PREPARING:
resultStr = _T("NOTIFY_RTP_PREPARING");
break;
case NOTIFY_RTP_RECEIVE_FAIL:
resultStr = _T("NOTIFY_RTP_RECEIVE_FAIL");
break;
case NOTIFY_NOT_SUPPORT_CODEC:
resultStr = _T("NOTIFY_NOT_SUPPORT_CODEC");
break;
case NOTIFY_DECODER_INIT_FAIL:
resultStr = _T("NOTIFY_DECODER_INIT_FAIL");
break;
case NOTIFY_RTP_LISTENING:
resultStr = _T("NOTIFY_RTP_LISTENING");
break;
case NOTIFY_NO_VIDEO_STREAM:
resultStr = _T("NOTIFY_NO_VIDEO_STREAM");
break;
case NOTIFY_STREAM_TIMEOUT:
resultStr = _T("NOTIFY_STREAM_TIMEOUT");
break;
case NOTIFY_CONNECTING:
resultStr = _T("NOTIFY_CONNECTING");
break;
case NOTIFY_CONNECT_FAIL:
resultStr = _T("NOTIFY_CONNECT_FAIL");
break;
case NOTIFY_AUTH_FAIL:
resultStr = _T("NOTIFY_AUTH_FAIL");
break;
case NOTIFY_CONNECTED:
resultStr = _T("NOTIFY_CONNECTED");
break;
case NOTIFY_DISCONNECTED:
resultStr = _T("NOTIFY_DISCONNECTED");
break;
case NOTIFY_UNKNOWN_FORMAT:
resultStr = _T("NOTIFY_UNKNOWN_FORMAT");
break;
}
return resultStr;
}
CString CRtpStreamer::GetAvErrorString(int errNo) {
char szBuf[128] = { NULL, };
CString strResult;
if (av_strerror(errNo, szBuf, sizeof(szBuf) - 1) == 0) {
strResult = szBuf;
}
return strResult;
}
void CRtpStreamer::RemoveAllCallback()
{
m_LockCallback.Lock();
m_CallbackList.RemoveAll();
m_LockCallback.Unlock();
}
void CRtpStreamer::SetRequestDecodedVideoFrame(BOOL b, UINT nPreferWidth, UINT nPreferHeight)
{
m_bReqDecodedVideoFrame = b;
m_nPreferDecodeFrameWidth = nPreferWidth;
m_nPreferDecodeFrameHeight = nPreferHeight;
}
Comments