2016-08-27 19:10:27 +00:00
|
|
|
#if defined(CONF_VIDEORECORDER)
|
|
|
|
|
2016-08-27 15:51:23 +00:00
|
|
|
#include <engine/console.h>
|
2016-08-30 23:39:59 +00:00
|
|
|
#include <engine/shared/config.h>
|
2020-09-26 19:41:58 +00:00
|
|
|
#include <engine/storage.h>
|
2016-08-27 15:51:23 +00:00
|
|
|
|
2022-03-20 17:03:25 +00:00
|
|
|
#include <engine/client/graphics_threaded.h>
|
2022-03-02 08:32:51 +00:00
|
|
|
#include <engine/sound.h>
|
|
|
|
|
|
|
|
#include <memory>
|
|
|
|
#include <mutex>
|
2016-08-27 15:51:23 +00:00
|
|
|
|
2022-03-20 17:03:25 +00:00
|
|
|
#include "video.h"
|
2021-05-01 21:33:42 +00:00
|
|
|
|
2022-05-18 16:00:05 +00:00
|
|
|
#include <chrono>
|
|
|
|
#include <thread>
|
|
|
|
|
|
|
|
using namespace std::chrono_literals;
|
|
|
|
|
2016-08-30 23:39:59 +00:00
|
|
|
// This code is mostly stolen from https://github.com/FFmpeg/FFmpeg/blob/master/doc/examples/muxing.c
|
|
|
|
|
2016-08-27 19:10:27 +00:00
|
|
|
#define STREAM_PIX_FMT AV_PIX_FMT_YUV420P /* default pix_fmt */
|
|
|
|
|
2021-05-03 21:02:45 +00:00
|
|
|
const size_t FORMAT_GL_NCHANNELS = 4;
|
2020-12-02 18:11:19 +00:00
|
|
|
LOCK g_WriteLock = 0;
|
2020-01-03 20:42:53 +00:00
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
CVideo::CVideo(CGraphics_Threaded *pGraphics, ISound *pSound, IStorage *pStorage, IConsole *pConsole, int Width, int Height, const char *pName) :
|
2016-08-30 23:39:59 +00:00
|
|
|
m_pGraphics(pGraphics),
|
2016-08-27 15:51:23 +00:00
|
|
|
m_pStorage(pStorage),
|
2022-03-02 08:32:51 +00:00
|
|
|
m_pSound(pSound)
|
2016-08-27 15:51:23 +00:00
|
|
|
{
|
2016-08-27 19:10:27 +00:00
|
|
|
m_pFormatContext = 0;
|
|
|
|
m_pFormat = 0;
|
|
|
|
m_pOptDict = 0;
|
|
|
|
|
2022-03-24 18:30:26 +00:00
|
|
|
m_pVideoCodec = 0;
|
|
|
|
m_pAudioCodec = 0;
|
2016-08-27 15:51:23 +00:00
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
m_Width = Width;
|
|
|
|
m_Height = Height;
|
|
|
|
str_copy(m_Name, pName, sizeof(m_Name));
|
2016-08-27 15:51:23 +00:00
|
|
|
|
2016-08-31 16:07:27 +00:00
|
|
|
m_FPS = g_Config.m_ClVideoRecorderFPS;
|
|
|
|
|
2016-08-27 15:51:23 +00:00
|
|
|
m_Recording = false;
|
2016-08-30 23:39:59 +00:00
|
|
|
m_Started = false;
|
2022-03-02 08:32:51 +00:00
|
|
|
m_ProcessingVideoFrame = 0;
|
|
|
|
m_ProcessingAudioFrame = 0;
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2019-10-17 05:38:00 +00:00
|
|
|
m_HasAudio = g_Config.m_ClVideoSndEnable;
|
2016-08-27 15:51:23 +00:00
|
|
|
|
2016-08-30 23:39:59 +00:00
|
|
|
dbg_assert(ms_pCurrentVideo == 0, "ms_pCurrentVideo is NOT set to NULL while creating a new Video.");
|
|
|
|
|
2016-08-31 16:07:27 +00:00
|
|
|
ms_TickTime = time_freq() / m_FPS;
|
2016-08-27 15:51:23 +00:00
|
|
|
ms_pCurrentVideo = this;
|
2020-06-22 21:59:37 +00:00
|
|
|
g_WriteLock = lock_create();
|
2016-08-27 15:51:23 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
CVideo::~CVideo()
|
|
|
|
{
|
|
|
|
ms_pCurrentVideo = 0;
|
2020-06-22 21:59:37 +00:00
|
|
|
lock_destroy(g_WriteLock);
|
2016-08-27 15:51:23 +00:00
|
|
|
}
|
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
void CVideo::Start()
|
2016-08-27 15:51:23 +00:00
|
|
|
{
|
2022-03-02 08:32:51 +00:00
|
|
|
// wait for the graphic thread to idle
|
|
|
|
m_pGraphics->WaitForIdle();
|
|
|
|
|
|
|
|
m_AudioStream = {};
|
|
|
|
m_VideoStream = {};
|
|
|
|
|
2016-08-31 16:07:27 +00:00
|
|
|
char aDate[20];
|
|
|
|
str_timestamp(aDate, sizeof(aDate));
|
2019-09-27 03:06:02 +00:00
|
|
|
char aBuf[256];
|
2021-06-05 14:46:01 +00:00
|
|
|
if(str_length(m_Name) != 0)
|
2019-09-27 03:06:02 +00:00
|
|
|
str_format(aBuf, sizeof(aBuf), "videos/%s", m_Name);
|
|
|
|
else
|
|
|
|
str_format(aBuf, sizeof(aBuf), "videos/%s.mp4", aDate);
|
2016-08-31 16:07:27 +00:00
|
|
|
|
2016-08-27 19:10:27 +00:00
|
|
|
char aWholePath[1024];
|
2016-08-31 16:07:27 +00:00
|
|
|
IOHANDLE File = m_pStorage->OpenFile(aBuf, IOFLAG_WRITE, IStorage::TYPE_SAVE, aWholePath, sizeof(aWholePath));
|
2016-08-27 19:10:27 +00:00
|
|
|
|
|
|
|
if(File)
|
|
|
|
{
|
|
|
|
io_close(File);
|
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
dbg_msg("video_recorder", "Failed to open file for recoding video.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
avformat_alloc_output_context2(&m_pFormatContext, 0, "mp4", aWholePath);
|
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
if(!m_pFormatContext)
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
|
|
|
dbg_msg("video_recorder", "Failed to create formatcontext for recoding video.");
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
|
|
|
|
m_pFormat = m_pFormatContext->oformat;
|
|
|
|
|
2022-03-31 15:51:47 +00:00
|
|
|
#if defined(CONF_ARCH_IA32) || defined(CONF_ARCH_ARM)
|
|
|
|
// use only the minimum of 2 threads on 32-bit to save memory
|
|
|
|
m_VideoThreads = 2;
|
|
|
|
m_AudioThreads = 2;
|
|
|
|
#else
|
2022-03-02 08:32:51 +00:00
|
|
|
m_VideoThreads = std::thread::hardware_concurrency() + 2;
|
|
|
|
// audio gets a bit less
|
|
|
|
m_AudioThreads = (std::thread::hardware_concurrency() / 2) + 2;
|
2022-03-31 15:51:47 +00:00
|
|
|
#endif
|
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
m_CurVideoThreadIndex = 0;
|
|
|
|
m_CurAudioThreadIndex = 0;
|
|
|
|
|
2021-05-03 21:02:45 +00:00
|
|
|
size_t GLNVals = FORMAT_GL_NCHANNELS * m_Width * m_Height;
|
2022-03-02 08:32:51 +00:00
|
|
|
m_vPixelHelper.resize(m_VideoThreads);
|
|
|
|
for(size_t i = 0; i < m_VideoThreads; ++i)
|
|
|
|
{
|
|
|
|
m_vPixelHelper[i].resize(GLNVals * sizeof(uint8_t));
|
|
|
|
}
|
|
|
|
|
|
|
|
m_vBuffer.resize(m_AudioThreads);
|
2016-08-27 19:10:27 +00:00
|
|
|
|
|
|
|
/* Add the audio and video streams using the default format codecs
|
|
|
|
* and initialize the codecs. */
|
2020-06-22 21:59:37 +00:00
|
|
|
if(m_pFormat->video_codec != AV_CODEC_ID_NONE)
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
2022-03-24 18:30:26 +00:00
|
|
|
if(!AddStream(&m_VideoStream, m_pFormatContext, &m_pVideoCodec, m_pFormat->video_codec))
|
2020-09-13 20:49:50 +00:00
|
|
|
return;
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
dbg_msg("video_recorder", "Failed to add VideoStream for recoding video.");
|
|
|
|
}
|
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
if(m_HasAudio && m_pFormat->audio_codec != AV_CODEC_ID_NONE)
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
2022-03-24 18:30:26 +00:00
|
|
|
if(!AddStream(&m_AudioStream, m_pFormatContext, &m_pAudioCodec, m_pFormat->audio_codec))
|
2020-09-13 20:49:50 +00:00
|
|
|
return;
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
|
|
|
else
|
|
|
|
{
|
|
|
|
dbg_msg("video_recorder", "No audio.");
|
|
|
|
}
|
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
m_vVideoThreads.resize(m_VideoThreads);
|
|
|
|
for(size_t i = 0; i < m_VideoThreads; ++i)
|
|
|
|
{
|
|
|
|
m_vVideoThreads[i] = std::make_unique<SVideoRecorderThread>();
|
|
|
|
}
|
|
|
|
for(size_t i = 0; i < m_VideoThreads; ++i)
|
|
|
|
{
|
|
|
|
std::unique_lock<std::mutex> Lock(m_vVideoThreads[i]->m_Mutex);
|
|
|
|
m_vVideoThreads[i]->m_Thread = std::thread([this, i]() { RunVideoThread(i == 0 ? (m_VideoThreads - 1) : (i - 1), i); });
|
|
|
|
m_vVideoThreads[i]->m_Cond.wait(Lock, [this, i]() -> bool { return m_vVideoThreads[i]->m_Started; });
|
|
|
|
}
|
|
|
|
|
|
|
|
m_vAudioThreads.resize(m_AudioThreads);
|
|
|
|
for(size_t i = 0; i < m_AudioThreads; ++i)
|
|
|
|
{
|
|
|
|
m_vAudioThreads[i] = std::make_unique<SAudioRecorderThread>();
|
|
|
|
}
|
|
|
|
for(size_t i = 0; i < m_AudioThreads; ++i)
|
|
|
|
{
|
|
|
|
std::unique_lock<std::mutex> Lock(m_vAudioThreads[i]->m_Mutex);
|
|
|
|
m_vAudioThreads[i]->m_Thread = std::thread([this, i]() { RunAudioThread(i == 0 ? (m_AudioThreads - 1) : (i - 1), i); });
|
|
|
|
m_vAudioThreads[i]->m_Cond.wait(Lock, [this, i]() -> bool { return m_vAudioThreads[i]->m_Started; });
|
|
|
|
}
|
|
|
|
|
2016-08-27 19:10:27 +00:00
|
|
|
/* Now that all the parameters are set, we can open the audio and
|
|
|
|
* video codecs and allocate the necessary encode buffers. */
|
2020-09-13 20:49:50 +00:00
|
|
|
if(!OpenVideo())
|
|
|
|
return;
|
2016-08-27 19:10:27 +00:00
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
if(m_HasAudio)
|
2020-09-13 20:49:50 +00:00
|
|
|
if(!OpenAudio())
|
|
|
|
return;
|
2016-08-27 19:10:27 +00:00
|
|
|
|
2016-08-30 23:39:59 +00:00
|
|
|
// TODO: remove/comment:
|
2016-08-27 19:10:27 +00:00
|
|
|
av_dump_format(m_pFormatContext, 0, aWholePath, 1);
|
|
|
|
|
|
|
|
/* open the output file, if needed */
|
2020-06-22 21:59:37 +00:00
|
|
|
if(!(m_pFormat->flags & AVFMT_NOFILE))
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
2020-06-22 21:59:37 +00:00
|
|
|
int Ret = avio_open(&m_pFormatContext->pb, aWholePath, AVIO_FLAG_WRITE);
|
|
|
|
if(Ret < 0)
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
2022-03-20 11:57:50 +00:00
|
|
|
char aError[AV_ERROR_MAX_STRING_SIZE];
|
|
|
|
av_strerror(Ret, aError, sizeof(aError));
|
|
|
|
dbg_msg("video_recorder", "Could not open '%s': %s", aWholePath, aError);
|
2016-08-27 19:10:27 +00:00
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
m_VideoStream.m_vpSwsCtxs.reserve(m_VideoThreads);
|
|
|
|
|
|
|
|
for(size_t i = 0; i < m_VideoThreads; ++i)
|
2020-01-03 20:42:53 +00:00
|
|
|
{
|
2022-03-02 08:32:51 +00:00
|
|
|
if(m_VideoStream.m_vpSwsCtxs.size() <= i)
|
|
|
|
m_VideoStream.m_vpSwsCtxs.emplace_back(nullptr);
|
|
|
|
|
|
|
|
if(!m_VideoStream.m_vpSwsCtxs[i])
|
|
|
|
{
|
|
|
|
m_VideoStream.m_vpSwsCtxs[i] = sws_getCachedContext(
|
|
|
|
m_VideoStream.m_vpSwsCtxs[i],
|
|
|
|
m_VideoStream.pEnc->width, m_VideoStream.pEnc->height, AV_PIX_FMT_RGBA,
|
|
|
|
m_VideoStream.pEnc->width, m_VideoStream.pEnc->height, AV_PIX_FMT_YUV420P,
|
|
|
|
0, 0, 0, 0);
|
|
|
|
}
|
2020-01-03 20:42:53 +00:00
|
|
|
}
|
2016-08-27 19:10:27 +00:00
|
|
|
|
|
|
|
/* Write the stream header, if any. */
|
2020-06-22 21:59:37 +00:00
|
|
|
int Ret = avformat_write_header(m_pFormatContext, &m_pOptDict);
|
|
|
|
if(Ret < 0)
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
2022-03-20 11:57:50 +00:00
|
|
|
char aError[AV_ERROR_MAX_STRING_SIZE];
|
|
|
|
av_strerror(Ret, aError, sizeof(aError));
|
|
|
|
dbg_msg("video_recorder", "Error occurred when opening output file: %s", aError);
|
2016-08-27 19:10:27 +00:00
|
|
|
return;
|
|
|
|
}
|
2016-08-27 15:51:23 +00:00
|
|
|
m_Recording = true;
|
2016-08-30 23:39:59 +00:00
|
|
|
m_Started = true;
|
|
|
|
ms_Time = time_get();
|
2020-06-22 21:59:37 +00:00
|
|
|
m_Vframe = 0;
|
2016-08-27 15:51:23 +00:00
|
|
|
}
|
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
void CVideo::Pause(bool Pause)
|
2020-02-27 10:50:18 +00:00
|
|
|
{
|
|
|
|
if(ms_pCurrentVideo)
|
2020-06-22 21:59:37 +00:00
|
|
|
m_Recording = !Pause;
|
2020-02-27 10:50:18 +00:00
|
|
|
}
|
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
void CVideo::Stop()
|
2016-08-27 15:51:23 +00:00
|
|
|
{
|
2022-03-02 08:32:51 +00:00
|
|
|
m_pGraphics->WaitForIdle();
|
|
|
|
|
|
|
|
for(size_t i = 0; i < m_VideoThreads; ++i)
|
|
|
|
{
|
|
|
|
{
|
|
|
|
std::unique_lock<std::mutex> Lock(m_vVideoThreads[i]->m_Mutex);
|
|
|
|
m_vVideoThreads[i]->m_Finished = true;
|
|
|
|
m_vVideoThreads[i]->m_Cond.notify_all();
|
|
|
|
}
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
m_vVideoThreads[i]->m_Thread.join();
|
|
|
|
}
|
|
|
|
m_vVideoThreads.clear();
|
|
|
|
|
|
|
|
for(size_t i = 0; i < m_AudioThreads; ++i)
|
|
|
|
{
|
|
|
|
{
|
|
|
|
std::unique_lock<std::mutex> Lock(m_vAudioThreads[i]->m_Mutex);
|
|
|
|
m_vAudioThreads[i]->m_Finished = true;
|
|
|
|
m_vAudioThreads[i]->m_Cond.notify_all();
|
|
|
|
}
|
|
|
|
|
|
|
|
m_vAudioThreads[i]->m_Thread.join();
|
|
|
|
}
|
|
|
|
m_vAudioThreads.clear();
|
|
|
|
|
|
|
|
while(m_ProcessingVideoFrame > 0 || m_ProcessingAudioFrame > 0)
|
2022-05-18 16:00:05 +00:00
|
|
|
std::this_thread::sleep_for(10us);
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
m_Recording = false;
|
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
FinishFrames(&m_VideoStream);
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
if(m_HasAudio)
|
|
|
|
FinishFrames(&m_AudioStream);
|
2016-08-30 23:39:59 +00:00
|
|
|
|
|
|
|
av_write_trailer(m_pFormatContext);
|
2016-08-27 19:10:27 +00:00
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
CloseStream(&m_VideoStream);
|
2016-08-27 19:10:27 +00:00
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
if(m_HasAudio)
|
|
|
|
CloseStream(&m_AudioStream);
|
2020-09-26 19:41:58 +00:00
|
|
|
//fclose(m_dbgfile);
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
if(!(m_pFormat->flags & AVFMT_NOFILE))
|
2016-08-27 19:10:27 +00:00
|
|
|
avio_closep(&m_pFormatContext->pb);
|
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
if(m_pFormatContext)
|
2016-08-27 19:10:27 +00:00
|
|
|
avformat_free_context(m_pFormatContext);
|
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
ISound *volatile pSound = m_pSound;
|
|
|
|
|
|
|
|
pSound->PauseAudioDevice();
|
2022-03-01 22:19:49 +00:00
|
|
|
delete ms_pCurrentVideo;
|
2022-03-02 08:32:51 +00:00
|
|
|
pSound->UnpauseAudioDevice();
|
2016-08-27 15:51:23 +00:00
|
|
|
}
|
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
void CVideo::NextVideoFrameThread()
|
2016-08-27 15:51:23 +00:00
|
|
|
{
|
2022-05-27 15:54:58 +00:00
|
|
|
if(m_Recording)
|
2016-08-27 15:51:23 +00:00
|
|
|
{
|
2021-02-12 12:40:29 +00:00
|
|
|
// #ifdef CONF_PLATFORM_MACOS
|
2016-08-30 23:39:59 +00:00
|
|
|
// CAutoreleasePool AutoreleasePool;
|
|
|
|
// #endif
|
2022-03-02 08:32:51 +00:00
|
|
|
m_VSeq += 1;
|
|
|
|
if(m_VSeq >= 2)
|
2019-09-28 13:22:25 +00:00
|
|
|
{
|
2022-03-02 08:32:51 +00:00
|
|
|
m_ProcessingVideoFrame.fetch_add(1);
|
|
|
|
|
|
|
|
size_t NextVideoThreadIndex = m_CurVideoThreadIndex + 1;
|
|
|
|
if(NextVideoThreadIndex == m_VideoThreads)
|
|
|
|
NextVideoThreadIndex = 0;
|
|
|
|
|
|
|
|
// always wait for the next video thread too, to prevent a dead lock
|
|
|
|
|
|
|
|
{
|
|
|
|
auto *pVideoThread = m_vVideoThreads[NextVideoThreadIndex].get();
|
|
|
|
std::unique_lock<std::mutex> Lock(pVideoThread->m_Mutex);
|
|
|
|
|
|
|
|
if(pVideoThread->m_HasVideoFrame)
|
|
|
|
{
|
|
|
|
pVideoThread->m_Cond.wait(Lock, [&pVideoThread]() -> bool { return !pVideoThread->m_HasVideoFrame; });
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-09-04 23:16:12 +00:00
|
|
|
//dbg_msg("video_recorder", "vframe: %d", m_VideoStream.pEnc->frame_number);
|
2020-06-22 21:59:37 +00:00
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
// after reading the graphic libraries' frame buffer, go threaded
|
|
|
|
{
|
|
|
|
auto *pVideoThread = m_vVideoThreads[m_CurVideoThreadIndex].get();
|
|
|
|
std::unique_lock<std::mutex> Lock(pVideoThread->m_Mutex);
|
|
|
|
|
|
|
|
if(pVideoThread->m_HasVideoFrame)
|
|
|
|
{
|
|
|
|
pVideoThread->m_Cond.wait(Lock, [&pVideoThread]() -> bool { return !pVideoThread->m_HasVideoFrame; });
|
|
|
|
}
|
|
|
|
|
|
|
|
ReadRGBFromGL(m_CurVideoThreadIndex);
|
|
|
|
|
|
|
|
pVideoThread->m_HasVideoFrame = true;
|
|
|
|
{
|
|
|
|
std::unique_lock<std::mutex> LockParent(pVideoThread->m_VideoFillMutex);
|
|
|
|
pVideoThread->m_VideoFrameToFill = m_VSeq;
|
|
|
|
}
|
|
|
|
pVideoThread->m_Cond.notify_all();
|
|
|
|
}
|
|
|
|
|
|
|
|
++m_CurVideoThreadIndex;
|
|
|
|
if(m_CurVideoThreadIndex == m_VideoThreads)
|
|
|
|
m_CurVideoThreadIndex = 0;
|
2019-09-28 13:22:25 +00:00
|
|
|
}
|
2016-08-30 23:39:59 +00:00
|
|
|
|
|
|
|
// sync_barrier();
|
|
|
|
// m_Semaphore.signal();
|
2016-08-27 15:51:23 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
void CVideo::NextVideoFrame()
|
2016-08-27 15:51:23 +00:00
|
|
|
{
|
2020-06-22 21:59:37 +00:00
|
|
|
if(m_Recording)
|
2016-08-27 15:51:23 +00:00
|
|
|
{
|
2016-08-30 23:39:59 +00:00
|
|
|
ms_Time += ms_TickTime;
|
2020-09-26 19:41:58 +00:00
|
|
|
ms_LocalTime = (ms_Time - ms_LocalStartTime) / (float)time_freq();
|
2020-06-22 21:59:37 +00:00
|
|
|
m_Vframe += 1;
|
2016-08-27 15:51:23 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
void CVideo::NextAudioFrameTimeline(ISoundMixFunc Mix)
|
2016-08-27 15:51:23 +00:00
|
|
|
{
|
2020-06-22 21:59:37 +00:00
|
|
|
if(m_Recording && m_HasAudio)
|
2016-08-30 23:39:59 +00:00
|
|
|
{
|
2022-03-02 08:32:51 +00:00
|
|
|
//if(m_VideoStream.pEnc->frame_number * (double)m_AudioStream.pEnc->sample_rate / m_FPS >= (double)m_AudioStream.pEnc->frame_number * m_AudioStream.pEnc->frame_size)
|
|
|
|
double SamplesPerFrame = (double)m_AudioStream.pEnc->sample_rate / m_FPS;
|
|
|
|
while(m_AudioStream.m_SamplesFrameCount >= m_AudioStream.m_SamplesCount)
|
2019-11-02 08:09:00 +00:00
|
|
|
{
|
2022-03-02 08:32:51 +00:00
|
|
|
NextAudioFrame(Mix);
|
2019-11-02 08:09:00 +00:00
|
|
|
}
|
2022-03-02 08:32:51 +00:00
|
|
|
m_AudioStream.m_SamplesFrameCount += SamplesPerFrame;
|
2019-11-02 08:09:00 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
void CVideo::NextAudioFrame(ISoundMixFunc Mix)
|
2019-11-02 08:09:00 +00:00
|
|
|
{
|
2022-03-02 08:32:51 +00:00
|
|
|
if(m_Recording && m_HasAudio)
|
|
|
|
{
|
|
|
|
m_ASeq += 1;
|
|
|
|
|
|
|
|
m_ProcessingAudioFrame.fetch_add(1);
|
|
|
|
|
|
|
|
size_t NextAudioThreadIndex = m_CurAudioThreadIndex + 1;
|
|
|
|
if(NextAudioThreadIndex == m_AudioThreads)
|
|
|
|
NextAudioThreadIndex = 0;
|
|
|
|
|
|
|
|
// always wait for the next Audio thread too, to prevent a dead lock
|
|
|
|
|
2020-09-13 20:49:50 +00:00
|
|
|
{
|
2022-03-02 08:32:51 +00:00
|
|
|
auto *pAudioThread = m_vAudioThreads[NextAudioThreadIndex].get();
|
|
|
|
std::unique_lock<std::mutex> Lock(pAudioThread->m_Mutex);
|
|
|
|
|
|
|
|
if(pAudioThread->m_HasAudioFrame)
|
|
|
|
{
|
|
|
|
pAudioThread->m_Cond.wait(Lock, [&pAudioThread]() -> bool { return !pAudioThread->m_HasAudioFrame; });
|
|
|
|
}
|
2020-09-13 20:49:50 +00:00
|
|
|
}
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
// after reading the graphic libraries' frame buffer, go threaded
|
|
|
|
{
|
|
|
|
auto *pAudioThread = m_vAudioThreads[m_CurAudioThreadIndex].get();
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
std::unique_lock<std::mutex> Lock(pAudioThread->m_Mutex);
|
|
|
|
|
|
|
|
if(pAudioThread->m_HasAudioFrame)
|
|
|
|
{
|
|
|
|
pAudioThread->m_Cond.wait(Lock, [&pAudioThread]() -> bool { return !pAudioThread->m_HasAudioFrame; });
|
|
|
|
}
|
|
|
|
|
|
|
|
Mix(m_vBuffer[m_CurAudioThreadIndex].m_aBuffer, ALEN / 2); // two channels
|
|
|
|
|
|
|
|
int64_t DstNbSamples = av_rescale_rnd(
|
|
|
|
swr_get_delay(m_AudioStream.m_vpSwrCtxs[m_CurAudioThreadIndex], m_AudioStream.pEnc->sample_rate) +
|
|
|
|
m_AudioStream.m_vpFrames[m_CurAudioThreadIndex]->nb_samples,
|
|
|
|
m_AudioStream.pEnc->sample_rate,
|
|
|
|
m_AudioStream.pEnc->sample_rate, AV_ROUND_UP);
|
|
|
|
|
|
|
|
pAudioThread->m_SampleCountStart = m_AudioStream.m_SamplesCount;
|
|
|
|
m_AudioStream.m_SamplesCount += DstNbSamples;
|
|
|
|
|
|
|
|
pAudioThread->m_HasAudioFrame = true;
|
|
|
|
{
|
|
|
|
std::unique_lock<std::mutex> LockParent(pAudioThread->m_AudioFillMutex);
|
|
|
|
pAudioThread->m_AudioFrameToFill = m_ASeq;
|
|
|
|
}
|
|
|
|
pAudioThread->m_Cond.notify_all();
|
|
|
|
}
|
|
|
|
|
|
|
|
++m_CurAudioThreadIndex;
|
|
|
|
if(m_CurAudioThreadIndex == m_AudioThreads)
|
|
|
|
m_CurAudioThreadIndex = 0;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
void CVideo::RunAudioThread(size_t ParentThreadIndex, size_t ThreadIndex)
|
|
|
|
{
|
|
|
|
auto *pThreadData = m_vAudioThreads[ThreadIndex].get();
|
|
|
|
auto *pParentThreadData = m_vAudioThreads[ParentThreadIndex].get();
|
|
|
|
std::unique_lock<std::mutex> Lock(pThreadData->m_Mutex);
|
|
|
|
pThreadData->m_Started = true;
|
|
|
|
pThreadData->m_Cond.notify_all();
|
|
|
|
|
|
|
|
while(!pThreadData->m_Finished)
|
|
|
|
{
|
|
|
|
pThreadData->m_Cond.wait(Lock, [&pThreadData]() -> bool { return pThreadData->m_HasAudioFrame || pThreadData->m_Finished; });
|
|
|
|
pThreadData->m_Cond.notify_all();
|
|
|
|
|
|
|
|
if(pThreadData->m_HasAudioFrame)
|
2016-08-27 15:51:23 +00:00
|
|
|
{
|
2022-03-02 08:32:51 +00:00
|
|
|
FillAudioFrame(ThreadIndex);
|
|
|
|
// check if we need to wait for the parent to finish
|
|
|
|
{
|
|
|
|
std::unique_lock<std::mutex> LockParent(pParentThreadData->m_AudioFillMutex);
|
|
|
|
if(pParentThreadData->m_AudioFrameToFill != 0 && pThreadData->m_AudioFrameToFill >= pParentThreadData->m_AudioFrameToFill)
|
|
|
|
{
|
|
|
|
// wait for the parent to finish its frame
|
|
|
|
pParentThreadData->m_AudioFillCond.wait(LockParent, [&pParentThreadData]() -> bool { return pParentThreadData->m_AudioFrameToFill == 0; });
|
|
|
|
}
|
|
|
|
}
|
|
|
|
{
|
|
|
|
std::unique_lock<std::mutex> LockAudio(pThreadData->m_AudioFillMutex);
|
|
|
|
|
|
|
|
lock_wait(g_WriteLock);
|
|
|
|
m_AudioStream.m_vpFrames[ThreadIndex]->pts = av_rescale_q(pThreadData->m_SampleCountStart, AVRational{1, m_AudioStream.pEnc->sample_rate}, m_AudioStream.pEnc->time_base);
|
|
|
|
WriteFrame(&m_AudioStream, ThreadIndex);
|
|
|
|
lock_unlock(g_WriteLock);
|
|
|
|
|
|
|
|
pThreadData->m_AudioFrameToFill = 0;
|
|
|
|
pThreadData->m_AudioFillCond.notify_all();
|
|
|
|
pThreadData->m_Cond.notify_all();
|
|
|
|
}
|
|
|
|
m_ProcessingAudioFrame.fetch_sub(1);
|
|
|
|
|
|
|
|
pThreadData->m_HasAudioFrame = false;
|
2016-08-27 15:51:23 +00:00
|
|
|
}
|
2022-03-02 08:32:51 +00:00
|
|
|
}
|
|
|
|
}
|
2016-08-27 15:51:23 +00:00
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
void CVideo::FillAudioFrame(size_t ThreadIndex)
|
|
|
|
{
|
|
|
|
av_samples_fill_arrays(
|
|
|
|
(uint8_t **)m_AudioStream.m_vpTmpFrames[ThreadIndex]->data,
|
|
|
|
0, // pointer to linesize (int*)
|
|
|
|
(const uint8_t *)m_vBuffer[ThreadIndex].m_aBuffer,
|
|
|
|
2, // channels
|
|
|
|
m_AudioStream.m_vpTmpFrames[ThreadIndex]->nb_samples,
|
|
|
|
AV_SAMPLE_FMT_S16,
|
|
|
|
0 // align
|
|
|
|
);
|
|
|
|
|
|
|
|
// dbg_msg("video_recorder", "DstNbSamples: %d", DstNbSamples);
|
|
|
|
// fwrite(m_aBuffer, sizeof(short), 2048, m_dbgfile);
|
|
|
|
|
|
|
|
int Ret = av_frame_make_writable(m_AudioStream.m_vpFrames[ThreadIndex]);
|
|
|
|
if(Ret < 0)
|
|
|
|
{
|
|
|
|
dbg_msg("video_recorder", "Error making frame writable");
|
|
|
|
return;
|
|
|
|
}
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
/* convert to destination format */
|
|
|
|
Ret = swr_convert(
|
|
|
|
m_AudioStream.m_vpSwrCtxs[ThreadIndex],
|
|
|
|
m_AudioStream.m_vpFrames[ThreadIndex]->data,
|
|
|
|
m_AudioStream.m_vpFrames[ThreadIndex]->nb_samples,
|
|
|
|
(const uint8_t **)m_AudioStream.m_vpTmpFrames[ThreadIndex]->data,
|
|
|
|
m_AudioStream.m_vpTmpFrames[ThreadIndex]->nb_samples);
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
if(Ret < 0)
|
|
|
|
{
|
|
|
|
dbg_msg("video_recorder", "Error while converting");
|
|
|
|
return;
|
2016-08-30 23:39:59 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
void CVideo::RunVideoThread(size_t ParentThreadIndex, size_t ThreadIndex)
|
2016-08-30 23:39:59 +00:00
|
|
|
{
|
2022-03-02 08:32:51 +00:00
|
|
|
auto *pThreadData = m_vVideoThreads[ThreadIndex].get();
|
|
|
|
auto *pParentThreadData = m_vVideoThreads[ParentThreadIndex].get();
|
|
|
|
std::unique_lock<std::mutex> Lock(pThreadData->m_Mutex);
|
|
|
|
pThreadData->m_Started = true;
|
|
|
|
pThreadData->m_Cond.notify_all();
|
|
|
|
|
|
|
|
while(!pThreadData->m_Finished)
|
|
|
|
{
|
|
|
|
pThreadData->m_Cond.wait(Lock, [&pThreadData]() -> bool { return pThreadData->m_HasVideoFrame || pThreadData->m_Finished; });
|
|
|
|
pThreadData->m_Cond.notify_all();
|
|
|
|
|
|
|
|
if(pThreadData->m_HasVideoFrame)
|
|
|
|
{
|
|
|
|
FillVideoFrame(ThreadIndex);
|
|
|
|
// check if we need to wait for the parent to finish
|
|
|
|
{
|
|
|
|
std::unique_lock<std::mutex> LockParent(pParentThreadData->m_VideoFillMutex);
|
|
|
|
if(pParentThreadData->m_VideoFrameToFill != 0 && pThreadData->m_VideoFrameToFill >= pParentThreadData->m_VideoFrameToFill)
|
|
|
|
{
|
|
|
|
// wait for the parent to finish its frame
|
|
|
|
pParentThreadData->m_VideoFillCond.wait(LockParent, [&pParentThreadData]() -> bool { return pParentThreadData->m_VideoFrameToFill == 0; });
|
|
|
|
}
|
|
|
|
}
|
|
|
|
{
|
|
|
|
std::unique_lock<std::mutex> LockVideo(pThreadData->m_VideoFillMutex);
|
|
|
|
lock_wait(g_WriteLock);
|
|
|
|
m_VideoStream.m_vpFrames[ThreadIndex]->pts = (int64_t)m_VideoStream.pEnc->frame_number;
|
|
|
|
WriteFrame(&m_VideoStream, ThreadIndex);
|
|
|
|
lock_unlock(g_WriteLock);
|
|
|
|
|
|
|
|
pThreadData->m_VideoFrameToFill = 0;
|
|
|
|
pThreadData->m_VideoFillCond.notify_all();
|
|
|
|
pThreadData->m_Cond.notify_all();
|
|
|
|
}
|
|
|
|
m_ProcessingVideoFrame.fetch_sub(1);
|
|
|
|
|
|
|
|
pThreadData->m_HasVideoFrame = false;
|
|
|
|
}
|
|
|
|
}
|
2016-08-30 23:39:59 +00:00
|
|
|
}
|
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
void CVideo::FillVideoFrame(size_t ThreadIndex)
|
2016-08-30 23:39:59 +00:00
|
|
|
{
|
2022-03-20 17:03:25 +00:00
|
|
|
const int InLinesize[1] = {4 * m_VideoStream.pEnc->width};
|
2022-03-02 08:32:51 +00:00
|
|
|
auto *pRGBAData = m_vPixelHelper[ThreadIndex].data();
|
|
|
|
sws_scale(m_VideoStream.m_vpSwsCtxs[ThreadIndex], (const uint8_t *const *)&pRGBAData, InLinesize, 0,
|
|
|
|
m_VideoStream.pEnc->height, m_VideoStream.m_vpFrames[ThreadIndex]->data, m_VideoStream.m_vpFrames[ThreadIndex]->linesize);
|
2016-08-27 15:51:23 +00:00
|
|
|
}
|
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
void CVideo::ReadRGBFromGL(size_t ThreadIndex)
|
2016-08-27 15:51:23 +00:00
|
|
|
{
|
2022-03-20 17:03:25 +00:00
|
|
|
uint32_t Width;
|
|
|
|
uint32_t Height;
|
|
|
|
uint32_t Format;
|
2022-03-02 08:32:51 +00:00
|
|
|
m_pGraphics->GetReadPresentedImageDataFuncUnsafe()(Width, Height, Format, m_vPixelHelper[ThreadIndex]);
|
2016-08-27 15:51:23 +00:00
|
|
|
}
|
2016-08-27 19:10:27 +00:00
|
|
|
|
2020-09-26 19:41:58 +00:00
|
|
|
AVFrame *CVideo::AllocPicture(enum AVPixelFormat PixFmt, int Width, int Height)
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
2020-09-26 19:41:58 +00:00
|
|
|
AVFrame *pPicture;
|
2020-06-22 21:59:37 +00:00
|
|
|
int Ret;
|
2016-08-27 19:10:27 +00:00
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
pPicture = av_frame_alloc();
|
|
|
|
if(!pPicture)
|
2016-08-27 19:10:27 +00:00
|
|
|
return NULL;
|
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
pPicture->format = PixFmt;
|
2020-09-26 19:41:58 +00:00
|
|
|
pPicture->width = Width;
|
2020-06-22 21:59:37 +00:00
|
|
|
pPicture->height = Height;
|
2016-08-27 19:10:27 +00:00
|
|
|
|
|
|
|
/* allocate the buffers for the frame data */
|
2020-06-22 21:59:37 +00:00
|
|
|
Ret = av_frame_get_buffer(pPicture, 32);
|
2020-09-13 20:49:50 +00:00
|
|
|
if(Ret < 0)
|
|
|
|
{
|
2016-08-27 19:10:27 +00:00
|
|
|
dbg_msg("video_recorder", "Could not allocate frame data.");
|
2020-09-13 20:49:50 +00:00
|
|
|
return nullptr;
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
return pPicture;
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
|
|
|
|
2021-06-23 05:05:49 +00:00
|
|
|
AVFrame *CVideo::AllocAudioFrame(enum AVSampleFormat SampleFmt, uint64_t ChannelLayout, int SampleRate, int NbSamples)
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
2020-06-22 21:59:37 +00:00
|
|
|
AVFrame *Frame = av_frame_alloc();
|
|
|
|
int Ret;
|
2016-08-27 19:10:27 +00:00
|
|
|
|
2020-09-13 20:49:50 +00:00
|
|
|
if(!Frame)
|
|
|
|
{
|
2016-08-27 19:10:27 +00:00
|
|
|
dbg_msg("video_recorder", "Error allocating an audio frame");
|
2020-09-13 20:49:50 +00:00
|
|
|
return nullptr;
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
Frame->format = SampleFmt;
|
|
|
|
Frame->channel_layout = ChannelLayout;
|
|
|
|
Frame->sample_rate = SampleRate;
|
|
|
|
Frame->nb_samples = NbSamples;
|
2016-08-27 19:10:27 +00:00
|
|
|
|
2020-09-13 20:49:50 +00:00
|
|
|
if(NbSamples)
|
|
|
|
{
|
2020-06-22 21:59:37 +00:00
|
|
|
Ret = av_frame_get_buffer(Frame, 0);
|
2020-09-13 20:49:50 +00:00
|
|
|
if(Ret < 0)
|
|
|
|
{
|
2016-08-27 19:10:27 +00:00
|
|
|
dbg_msg("video_recorder", "Error allocating an audio buffer");
|
2020-09-13 20:49:50 +00:00
|
|
|
return nullptr;
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
return Frame;
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
|
|
|
|
2020-09-13 20:49:50 +00:00
|
|
|
bool CVideo::OpenVideo()
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
2020-06-22 21:59:37 +00:00
|
|
|
int Ret;
|
2020-09-26 19:41:58 +00:00
|
|
|
AVCodecContext *c = m_VideoStream.pEnc;
|
|
|
|
AVDictionary *opt = 0;
|
2016-08-27 19:10:27 +00:00
|
|
|
|
|
|
|
av_dict_copy(&opt, m_pOptDict, 0);
|
|
|
|
|
|
|
|
/* open the codec */
|
2022-03-24 18:30:26 +00:00
|
|
|
Ret = avcodec_open2(c, m_pVideoCodec, &opt);
|
2016-08-27 19:10:27 +00:00
|
|
|
av_dict_free(&opt);
|
2020-06-22 21:59:37 +00:00
|
|
|
if(Ret < 0)
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
2016-08-31 12:09:06 +00:00
|
|
|
char aBuf[AV_ERROR_MAX_STRING_SIZE];
|
2020-06-22 21:59:37 +00:00
|
|
|
av_strerror(Ret, aBuf, sizeof(aBuf));
|
2016-08-31 12:09:06 +00:00
|
|
|
dbg_msg("video_recorder", "Could not open video codec: %s", aBuf);
|
2020-09-13 20:49:50 +00:00
|
|
|
return false;
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
m_VideoStream.m_vpFrames.clear();
|
|
|
|
m_VideoStream.m_vpFrames.reserve(m_VideoThreads);
|
|
|
|
|
2016-08-27 19:10:27 +00:00
|
|
|
/* allocate and init a re-usable frame */
|
2022-03-02 08:32:51 +00:00
|
|
|
for(size_t i = 0; i < m_VideoThreads; ++i)
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
2022-03-02 08:32:51 +00:00
|
|
|
m_VideoStream.m_vpFrames.emplace_back(nullptr);
|
|
|
|
m_VideoStream.m_vpFrames[i] = AllocPicture(c->pix_fmt, c->width, c->height);
|
|
|
|
if(!m_VideoStream.m_vpFrames[i])
|
|
|
|
{
|
|
|
|
dbg_msg("video_recorder", "Could not allocate video frame");
|
|
|
|
return false;
|
|
|
|
}
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/* If the output format is not YUV420P, then a temporary YUV420P
|
|
|
|
* picture is needed too. It is then converted to the required
|
|
|
|
* output format. */
|
2022-03-02 08:32:51 +00:00
|
|
|
m_VideoStream.m_vpTmpFrames.clear();
|
|
|
|
m_VideoStream.m_vpTmpFrames.reserve(m_VideoThreads);
|
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
if(c->pix_fmt != AV_PIX_FMT_YUV420P)
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
2022-03-02 08:32:51 +00:00
|
|
|
/* allocate and init a re-usable frame */
|
|
|
|
for(size_t i = 0; i < m_VideoThreads; ++i)
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
2022-03-02 08:32:51 +00:00
|
|
|
m_VideoStream.m_vpTmpFrames.emplace_back(nullptr);
|
|
|
|
m_VideoStream.m_vpTmpFrames[i] = AllocPicture(AV_PIX_FMT_YUV420P, c->width, c->height);
|
|
|
|
if(!m_VideoStream.m_vpTmpFrames[i])
|
|
|
|
{
|
|
|
|
dbg_msg("video_recorder", "Could not allocate temporary video frame");
|
|
|
|
return false;
|
|
|
|
}
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/* copy the stream parameters to the muxer */
|
2020-06-22 21:59:37 +00:00
|
|
|
Ret = avcodec_parameters_from_context(m_VideoStream.pSt->codecpar, c);
|
|
|
|
if(Ret < 0)
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
|
|
|
dbg_msg("video_recorder", "Could not copy the stream parameters");
|
2020-09-13 20:49:50 +00:00
|
|
|
return false;
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
2022-03-02 08:32:51 +00:00
|
|
|
m_VSeq = 0;
|
2020-09-13 20:49:50 +00:00
|
|
|
return true;
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
|
|
|
|
2020-09-13 20:49:50 +00:00
|
|
|
bool CVideo::OpenAudio()
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
|
|
|
AVCodecContext *c;
|
2020-06-22 21:59:37 +00:00
|
|
|
int NbSamples;
|
|
|
|
int Ret;
|
2016-08-27 19:10:27 +00:00
|
|
|
AVDictionary *opt = NULL;
|
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
c = m_AudioStream.pEnc;
|
2016-08-27 19:10:27 +00:00
|
|
|
|
|
|
|
/* open it */
|
2019-10-31 14:01:12 +00:00
|
|
|
//m_dbgfile = fopen("/tmp/pcm_dbg", "wb");
|
2016-08-27 19:10:27 +00:00
|
|
|
av_dict_copy(&opt, m_pOptDict, 0);
|
2022-03-24 18:30:26 +00:00
|
|
|
Ret = avcodec_open2(c, m_pAudioCodec, &opt);
|
2016-08-27 19:10:27 +00:00
|
|
|
av_dict_free(&opt);
|
2020-06-22 21:59:37 +00:00
|
|
|
if(Ret < 0)
|
2016-08-31 12:09:06 +00:00
|
|
|
{
|
|
|
|
char aBuf[AV_ERROR_MAX_STRING_SIZE];
|
2020-06-22 21:59:37 +00:00
|
|
|
av_strerror(Ret, aBuf, sizeof(aBuf));
|
2016-08-31 12:09:06 +00:00
|
|
|
dbg_msg("video_recorder", "Could not open audio codec: %s", aBuf);
|
2020-09-13 20:49:50 +00:00
|
|
|
return false;
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
if(c->codec->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE)
|
|
|
|
NbSamples = 10000;
|
2016-08-27 19:10:27 +00:00
|
|
|
else
|
2020-06-22 21:59:37 +00:00
|
|
|
NbSamples = c->frame_size;
|
2016-08-27 19:10:27 +00:00
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
m_AudioStream.m_vpFrames.clear();
|
|
|
|
m_AudioStream.m_vpFrames.reserve(m_AudioThreads);
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
m_AudioStream.m_vpTmpFrames.clear();
|
|
|
|
m_AudioStream.m_vpTmpFrames.reserve(m_AudioThreads);
|
|
|
|
|
|
|
|
/* allocate and init a re-usable frame */
|
|
|
|
for(size_t i = 0; i < m_AudioThreads; ++i)
|
|
|
|
{
|
|
|
|
m_AudioStream.m_vpFrames.emplace_back(nullptr);
|
|
|
|
m_AudioStream.m_vpFrames[i] = AllocAudioFrame(c->sample_fmt, c->channel_layout, c->sample_rate, NbSamples);
|
|
|
|
if(!m_AudioStream.m_vpFrames[i])
|
|
|
|
{
|
|
|
|
dbg_msg("video_recorder", "Could not allocate audio frame");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
|
|
|
|
m_AudioStream.m_vpTmpFrames.emplace_back(nullptr);
|
|
|
|
m_AudioStream.m_vpTmpFrames[i] = AllocAudioFrame(AV_SAMPLE_FMT_S16, AV_CH_LAYOUT_STEREO, g_Config.m_SndRate, NbSamples);
|
|
|
|
if(!m_AudioStream.m_vpTmpFrames[i])
|
|
|
|
{
|
|
|
|
dbg_msg("video_recorder", "Could not allocate audio frame");
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
}
|
2016-08-27 19:10:27 +00:00
|
|
|
|
|
|
|
/* copy the stream parameters to the muxer */
|
2020-06-22 21:59:37 +00:00
|
|
|
Ret = avcodec_parameters_from_context(m_AudioStream.pSt->codecpar, c);
|
2020-09-13 20:49:50 +00:00
|
|
|
if(Ret < 0)
|
|
|
|
{
|
2016-08-27 19:10:27 +00:00
|
|
|
dbg_msg("video_recorder", "Could not copy the stream parameters");
|
2020-09-13 20:49:50 +00:00
|
|
|
return false;
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/* create resampler context */
|
2022-03-02 08:32:51 +00:00
|
|
|
m_AudioStream.m_vpSwrCtxs.clear();
|
2022-03-31 15:30:34 +00:00
|
|
|
m_AudioStream.m_vpSwrCtxs.resize(m_AudioThreads);
|
2022-03-02 08:32:51 +00:00
|
|
|
for(size_t i = 0; i < m_AudioThreads; ++i)
|
2020-09-13 20:49:50 +00:00
|
|
|
{
|
2022-03-02 08:32:51 +00:00
|
|
|
m_AudioStream.m_vpSwrCtxs[i] = swr_alloc();
|
|
|
|
if(!m_AudioStream.m_vpSwrCtxs[i])
|
|
|
|
{
|
|
|
|
dbg_msg("video_recorder", "Could not allocate resampler context");
|
|
|
|
return false;
|
|
|
|
}
|
2016-08-27 19:10:27 +00:00
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
/* set options */
|
|
|
|
av_opt_set_int(m_AudioStream.m_vpSwrCtxs[i], "in_channel_count", 2, 0);
|
|
|
|
av_opt_set_int(m_AudioStream.m_vpSwrCtxs[i], "in_sample_rate", g_Config.m_SndRate, 0);
|
|
|
|
av_opt_set_sample_fmt(m_AudioStream.m_vpSwrCtxs[i], "in_sample_fmt", AV_SAMPLE_FMT_S16, 0);
|
|
|
|
av_opt_set_int(m_AudioStream.m_vpSwrCtxs[i], "out_channel_count", c->channels, 0);
|
|
|
|
av_opt_set_int(m_AudioStream.m_vpSwrCtxs[i], "out_sample_rate", c->sample_rate, 0);
|
|
|
|
av_opt_set_sample_fmt(m_AudioStream.m_vpSwrCtxs[i], "out_sample_fmt", c->sample_fmt, 0);
|
2016-08-27 19:10:27 +00:00
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
/* initialize the resampling context */
|
|
|
|
if(swr_init(m_AudioStream.m_vpSwrCtxs[i]) < 0)
|
|
|
|
{
|
|
|
|
dbg_msg("video_recorder", "Failed to initialize the resampling context");
|
|
|
|
return false;
|
|
|
|
}
|
2020-09-26 19:41:58 +00:00
|
|
|
}
|
2016-08-27 19:10:27 +00:00
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
m_ASeq = 0;
|
2020-09-26 19:41:58 +00:00
|
|
|
return true;
|
2020-09-13 20:49:50 +00:00
|
|
|
}
|
2016-08-27 19:10:27 +00:00
|
|
|
|
|
|
|
/* Add an output stream. */
|
2022-01-17 23:30:34 +00:00
|
|
|
bool CVideo::AddStream(OutputStream *pStream, AVFormatContext *pOC, const AVCodec **ppCodec, enum AVCodecID CodecId)
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
|
|
|
AVCodecContext *c;
|
|
|
|
|
|
|
|
/* find the encoder */
|
2020-09-26 19:41:58 +00:00
|
|
|
*ppCodec = avcodec_find_encoder(CodecId);
|
2020-06-22 21:59:37 +00:00
|
|
|
if(!(*ppCodec))
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
|
|
|
dbg_msg("video_recorder", "Could not find encoder for '%s'",
|
2020-09-13 20:49:50 +00:00
|
|
|
avcodec_get_name(CodecId));
|
|
|
|
return false;
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
pStream->pSt = avformat_new_stream(pOC, NULL);
|
|
|
|
if(!pStream->pSt)
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
|
|
|
dbg_msg("video_recorder", "Could not allocate stream");
|
2020-09-13 20:49:50 +00:00
|
|
|
return false;
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
2020-09-26 19:41:58 +00:00
|
|
|
pStream->pSt->id = pOC->nb_streams - 1;
|
2020-06-22 21:59:37 +00:00
|
|
|
c = avcodec_alloc_context3(*ppCodec);
|
|
|
|
if(!c)
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
|
|
|
dbg_msg("video_recorder", "Could not alloc an encoding context");
|
2020-09-13 20:49:50 +00:00
|
|
|
return false;
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
2020-06-22 21:59:37 +00:00
|
|
|
pStream->pEnc = c;
|
2016-08-27 19:10:27 +00:00
|
|
|
|
2022-03-31 15:51:47 +00:00
|
|
|
#if defined(CONF_ARCH_IA32) || defined(CONF_ARCH_ARM)
|
|
|
|
// use only 1 ffmpeg thread on 32-bit to save memory
|
|
|
|
c->thread_count = 1;
|
|
|
|
#endif
|
|
|
|
|
2020-09-26 19:41:58 +00:00
|
|
|
switch((*ppCodec)->type)
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
2020-09-26 19:41:58 +00:00
|
|
|
case AVMEDIA_TYPE_AUDIO:
|
|
|
|
c->sample_fmt = (*ppCodec)->sample_fmts ? (*ppCodec)->sample_fmts[0] : AV_SAMPLE_FMT_FLTP;
|
|
|
|
c->bit_rate = g_Config.m_SndRate * 2 * 16;
|
|
|
|
c->sample_rate = g_Config.m_SndRate;
|
|
|
|
if((*ppCodec)->supported_samplerates)
|
|
|
|
{
|
|
|
|
c->sample_rate = (*ppCodec)->supported_samplerates[0];
|
|
|
|
for(int i = 0; (*ppCodec)->supported_samplerates[i]; i++)
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
2020-09-26 19:41:58 +00:00
|
|
|
if((*ppCodec)->supported_samplerates[i] == g_Config.m_SndRate)
|
2022-03-02 08:32:51 +00:00
|
|
|
{
|
2020-09-26 19:41:58 +00:00
|
|
|
c->sample_rate = g_Config.m_SndRate;
|
2022-03-02 08:32:51 +00:00
|
|
|
break;
|
|
|
|
}
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
2020-09-26 19:41:58 +00:00
|
|
|
}
|
|
|
|
c->channels = 2;
|
|
|
|
c->channel_layout = AV_CH_LAYOUT_STEREO;
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2020-09-26 19:41:58 +00:00
|
|
|
pStream->pSt->time_base.num = 1;
|
|
|
|
pStream->pSt->time_base.den = c->sample_rate;
|
|
|
|
break;
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2020-09-26 19:41:58 +00:00
|
|
|
case AVMEDIA_TYPE_VIDEO:
|
|
|
|
c->codec_id = CodecId;
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2020-09-26 19:41:58 +00:00
|
|
|
c->bit_rate = 400000;
|
|
|
|
/* Resolution must be a multiple of two. */
|
|
|
|
c->width = m_Width;
|
|
|
|
c->height = m_Height % 2 == 0 ? m_Height : m_Height - 1;
|
|
|
|
/* timebase: This is the fundamental unit of time (in seconds) in terms
|
2016-08-30 23:39:59 +00:00
|
|
|
* of which frame timestamps are represented. For fixed-fps content,
|
|
|
|
* timebase should be 1/framerate and timestamp increments should be
|
|
|
|
* identical to 1. */
|
2020-09-26 19:41:58 +00:00
|
|
|
pStream->pSt->time_base.num = 1;
|
|
|
|
pStream->pSt->time_base.den = m_FPS;
|
|
|
|
c->time_base = pStream->pSt->time_base;
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2020-09-26 19:41:58 +00:00
|
|
|
c->gop_size = 12; /* emit one intra frame every twelve frames at most */
|
|
|
|
c->pix_fmt = STREAM_PIX_FMT;
|
|
|
|
if(c->codec_id == AV_CODEC_ID_MPEG2VIDEO)
|
|
|
|
{
|
|
|
|
/* just for testing, we also add B-frames */
|
|
|
|
c->max_b_frames = 2;
|
|
|
|
}
|
|
|
|
if(c->codec_id == AV_CODEC_ID_MPEG1VIDEO)
|
|
|
|
{
|
|
|
|
/* Needed to avoid using macroblocks in which some coeffs overflow.
|
2016-08-30 23:39:59 +00:00
|
|
|
* This does not happen with normal video, it just happens here as
|
|
|
|
* the motion of the chroma plane does not match the luma plane. */
|
2020-09-26 19:41:58 +00:00
|
|
|
c->mb_decision = 2;
|
|
|
|
}
|
|
|
|
if(CodecId == AV_CODEC_ID_H264)
|
|
|
|
{
|
|
|
|
const char *presets[10] = {"ultrafast", "superfast", "veryfast", "faster", "fast", "medium", "slow", "slower", "veryslow", "placebo"};
|
|
|
|
av_opt_set(c->priv_data, "preset", presets[g_Config.m_ClVideoX264Preset], 0);
|
|
|
|
av_opt_set_int(c->priv_data, "crf", g_Config.m_ClVideoX264Crf, 0);
|
|
|
|
}
|
2016-08-27 19:10:27 +00:00
|
|
|
break;
|
|
|
|
|
2020-09-26 19:41:58 +00:00
|
|
|
default:
|
|
|
|
break;
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
/* Some formats want stream headers to be separate. */
|
2020-06-22 21:59:37 +00:00
|
|
|
if(pOC->oformat->flags & AVFMT_GLOBALHEADER)
|
2016-08-27 19:10:27 +00:00
|
|
|
c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
|
|
|
|
|
2020-09-13 20:49:50 +00:00
|
|
|
return true;
|
|
|
|
}
|
2016-08-27 19:10:27 +00:00
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
void CVideo::WriteFrame(OutputStream *pStream, size_t ThreadIndex)
|
2016-08-30 23:39:59 +00:00
|
|
|
{
|
2020-06-22 21:59:37 +00:00
|
|
|
int RetRecv = 0;
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2021-05-06 10:25:09 +00:00
|
|
|
AVPacket *pPacket = av_packet_alloc();
|
|
|
|
if(pPacket == nullptr)
|
|
|
|
{
|
|
|
|
dbg_msg("video_recorder", "Failed allocating packet");
|
|
|
|
return;
|
|
|
|
}
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2021-05-06 10:25:09 +00:00
|
|
|
pPacket->data = 0;
|
|
|
|
pPacket->size = 0;
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2022-03-02 08:32:51 +00:00
|
|
|
avcodec_send_frame(pStream->pEnc, pStream->m_vpFrames[ThreadIndex]);
|
2016-08-30 23:39:59 +00:00
|
|
|
do
|
|
|
|
{
|
2021-05-06 10:25:09 +00:00
|
|
|
RetRecv = avcodec_receive_packet(pStream->pEnc, pPacket);
|
2020-06-22 21:59:37 +00:00
|
|
|
if(!RetRecv)
|
2016-08-30 23:39:59 +00:00
|
|
|
{
|
|
|
|
/* rescale output packet timestamp values from codec to stream timebase */
|
2021-05-06 10:25:09 +00:00
|
|
|
av_packet_rescale_ts(pPacket, pStream->pEnc->time_base, pStream->pSt->time_base);
|
|
|
|
pPacket->stream_index = pStream->pSt->index;
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2021-05-06 10:25:09 +00:00
|
|
|
if(int Ret = av_interleaved_write_frame(m_pFormatContext, pPacket))
|
2016-08-30 23:39:59 +00:00
|
|
|
{
|
2016-08-31 12:09:06 +00:00
|
|
|
char aBuf[AV_ERROR_MAX_STRING_SIZE];
|
2020-06-22 21:59:37 +00:00
|
|
|
av_strerror(Ret, aBuf, sizeof(aBuf));
|
2016-08-31 12:09:06 +00:00
|
|
|
dbg_msg("video_recorder", "Error while writing video frame: %s", aBuf);
|
2016-08-30 23:39:59 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
break;
|
2020-06-22 21:59:37 +00:00
|
|
|
} while(true);
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
if(RetRecv && RetRecv != AVERROR(EAGAIN))
|
2016-08-30 23:39:59 +00:00
|
|
|
{
|
2020-06-22 21:59:37 +00:00
|
|
|
dbg_msg("video_recorder", "Error encoding frame, error: %d", RetRecv);
|
2016-08-30 23:39:59 +00:00
|
|
|
}
|
2021-05-06 10:25:09 +00:00
|
|
|
|
|
|
|
av_packet_free(&pPacket);
|
2016-08-30 23:39:59 +00:00
|
|
|
}
|
|
|
|
|
2020-09-26 19:41:58 +00:00
|
|
|
void CVideo::FinishFrames(OutputStream *pStream)
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
2016-08-30 23:39:59 +00:00
|
|
|
dbg_msg("video_recorder", "------------");
|
2020-06-22 21:59:37 +00:00
|
|
|
int RetRecv = 0;
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2021-05-06 10:25:09 +00:00
|
|
|
AVPacket *pPacket = av_packet_alloc();
|
|
|
|
if(pPacket == nullptr)
|
|
|
|
{
|
|
|
|
dbg_msg("video_recorder", "Failed allocating packet");
|
|
|
|
return;
|
|
|
|
}
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2021-05-06 10:25:09 +00:00
|
|
|
pPacket->data = 0;
|
|
|
|
pPacket->size = 0;
|
2016-08-27 19:10:27 +00:00
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
avcodec_send_frame(pStream->pEnc, 0);
|
2016-08-30 23:39:59 +00:00
|
|
|
do
|
|
|
|
{
|
2021-05-06 10:25:09 +00:00
|
|
|
RetRecv = avcodec_receive_packet(pStream->pEnc, pPacket);
|
2020-06-22 21:59:37 +00:00
|
|
|
if(!RetRecv)
|
2016-08-30 23:39:59 +00:00
|
|
|
{
|
|
|
|
/* rescale output packet timestamp values from codec to stream timebase */
|
2021-05-06 10:25:09 +00:00
|
|
|
av_packet_rescale_ts(pPacket, pStream->pEnc->time_base, pStream->pSt->time_base);
|
|
|
|
pPacket->stream_index = pStream->pSt->index;
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2021-05-06 10:25:09 +00:00
|
|
|
if(int Ret = av_interleaved_write_frame(m_pFormatContext, pPacket))
|
2016-08-30 23:39:59 +00:00
|
|
|
{
|
2016-08-31 12:09:06 +00:00
|
|
|
char aBuf[AV_ERROR_MAX_STRING_SIZE];
|
2020-06-22 21:59:37 +00:00
|
|
|
av_strerror(Ret, aBuf, sizeof(aBuf));
|
2016-08-31 12:09:06 +00:00
|
|
|
dbg_msg("video_recorder", "Error while writing video frame: %s", aBuf);
|
2016-08-30 23:39:59 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
else
|
|
|
|
break;
|
2020-06-22 21:59:37 +00:00
|
|
|
} while(true);
|
2016-08-30 23:39:59 +00:00
|
|
|
|
2020-06-22 21:59:37 +00:00
|
|
|
if(RetRecv && RetRecv != AVERROR_EOF)
|
2016-08-30 23:39:59 +00:00
|
|
|
{
|
2020-06-22 21:59:37 +00:00
|
|
|
dbg_msg("video_recorder", "failed to finish recoding, error: %d", RetRecv);
|
2016-08-30 23:39:59 +00:00
|
|
|
}
|
2021-05-06 10:25:09 +00:00
|
|
|
|
|
|
|
av_packet_free(&pPacket);
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
|
|
|
|
2020-09-26 19:41:58 +00:00
|
|
|
void CVideo::CloseStream(OutputStream *pStream)
|
2016-08-27 19:10:27 +00:00
|
|
|
{
|
2020-06-22 21:59:37 +00:00
|
|
|
avcodec_free_context(&pStream->pEnc);
|
2022-03-02 08:32:51 +00:00
|
|
|
for(auto *pFrame : pStream->m_vpFrames)
|
|
|
|
av_frame_free(&pFrame);
|
|
|
|
pStream->m_vpFrames.clear();
|
|
|
|
|
|
|
|
for(auto *pFrame : pStream->m_vpTmpFrames)
|
|
|
|
av_frame_free(&pFrame);
|
|
|
|
pStream->m_vpTmpFrames.clear();
|
|
|
|
|
|
|
|
for(auto *pSwsContext : pStream->m_vpSwsCtxs)
|
|
|
|
sws_freeContext(pSwsContext);
|
|
|
|
pStream->m_vpSwsCtxs.clear();
|
|
|
|
|
|
|
|
for(auto *pSwrContext : pStream->m_vpSwrCtxs)
|
|
|
|
swr_free(&pSwrContext);
|
|
|
|
pStream->m_vpSwrCtxs.clear();
|
2016-08-27 19:10:27 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#endif
|