Improve error handling and logging of video recorder

Use `log_error` for all errors and consistently format all error messages.

Handle all ffmpeg errors and output the formatted ffmpeg error message when possible.

Register a log callback for ffmpeg log messages to delegate them to our logging system, to fix the log messages being interleaved with our log messages and not using the correct line breaks on Windows.

Stop video and demo immediately and show an error message popup if the video could not be started successfully.

Remove unnecessary debug output from ffmpeg.
This commit is contained in:
Robert Müller 2024-04-12 12:28:51 +02:00
parent e595545cd6
commit 9d2c11d7ee
7 changed files with 210 additions and 136 deletions

View file

@ -3303,7 +3303,12 @@ void CClient::StartVideo(const char *pFilename, bool WithTimestamp)
Sound()->PauseAudioDevice(); Sound()->PauseAudioDevice();
new CVideo(Graphics(), Sound(), Storage(), Graphics()->ScreenWidth(), Graphics()->ScreenHeight(), aFilename); new CVideo(Graphics(), Sound(), Storage(), Graphics()->ScreenWidth(), Graphics()->ScreenHeight(), aFilename);
Sound()->UnpauseAudioDevice(); Sound()->UnpauseAudioDevice();
IVideo::Current()->Start(); if(!IVideo::Current()->Start())
{
log_error("videorecorder", "Failed to start recording to '%s'", aFilename);
m_DemoPlayer.Stop("Failed to start video recording. See local console for details.");
return;
}
if(m_DemoPlayer.Info()->m_Info.m_Paused) if(m_DemoPlayer.Info()->m_Info.m_Paused)
{ {
IVideo::Current()->Pause(true); IVideo::Current()->Pause(true);

View file

@ -2,6 +2,8 @@
#include "video.h" #include "video.h"
#include <base/log.h>
#include <engine/graphics.h> #include <engine/graphics.h>
#include <engine/shared/config.h> #include <engine/shared/config.h>
#include <engine/sound.h> #include <engine/sound.h>
@ -34,6 +36,46 @@ using namespace std::chrono_literals;
const size_t FORMAT_GL_NCHANNELS = 4; const size_t FORMAT_GL_NCHANNELS = 4;
CLock g_WriteLock; CLock g_WriteLock;
static LEVEL AvLevelToLogLevel(int Level)
{
switch(Level)
{
case AV_LOG_PANIC:
case AV_LOG_FATAL:
case AV_LOG_ERROR:
return LEVEL_ERROR;
case AV_LOG_WARNING:
return LEVEL_WARN;
case AV_LOG_INFO:
return LEVEL_INFO;
case AV_LOG_VERBOSE:
case AV_LOG_DEBUG:
return LEVEL_DEBUG;
case AV_LOG_TRACE:
return LEVEL_TRACE;
default:
dbg_assert(false, "invalid log level");
dbg_break();
}
}
void AvLogCallback(void *pUser, int Level, const char *pFormat, va_list VarArgs)
GNUC_ATTRIBUTE((format(printf, 3, 0)));
void AvLogCallback(void *pUser, int Level, const char *pFormat, va_list VarArgs)
{
const LEVEL LogLevel = AvLevelToLogLevel(Level);
if(LogLevel <= LEVEL_INFO)
{
log_log_v(LogLevel, "videorecorder/libav", pFormat, VarArgs);
}
}
void CVideo::Init()
{
av_log_set_callback(AvLogCallback);
}
CVideo::CVideo(IGraphics *pGraphics, ISound *pSound, IStorage *pStorage, int Width, int Height, const char *pName) : CVideo::CVideo(IGraphics *pGraphics, ISound *pSound, IStorage *pStorage, int Width, int Height, const char *pName) :
m_pGraphics(pGraphics), m_pGraphics(pGraphics),
m_pStorage(pStorage), m_pStorage(pStorage),
@ -70,7 +112,7 @@ CVideo::~CVideo()
ms_pCurrentVideo = nullptr; ms_pCurrentVideo = nullptr;
} }
void CVideo::Start() bool CVideo::Start()
{ {
// wait for the graphic thread to idle // wait for the graphic thread to idle
m_pGraphics->WaitForIdle(); m_pGraphics->WaitForIdle();
@ -78,24 +120,25 @@ void CVideo::Start()
m_AudioStream = {}; m_AudioStream = {};
m_VideoStream = {}; m_VideoStream = {};
char aWholePath[1024]; char aWholePath[IO_MAX_PATH_LENGTH];
IOHANDLE File = m_pStorage->OpenFile(m_aName, IOFLAG_WRITE, IStorage::TYPE_SAVE, aWholePath, sizeof(aWholePath)); IOHANDLE File = m_pStorage->OpenFile(m_aName, IOFLAG_WRITE, IStorage::TYPE_SAVE, aWholePath, sizeof(aWholePath));
if(File) if(File)
{ {
io_close(File); io_close(File);
} }
else else
{ {
dbg_msg("video_recorder", "Failed to open file for recoding video."); log_error("videorecorder", "Could not open file '%s'", aWholePath);
return; return false;
} }
avformat_alloc_output_context2(&m_pFormatContext, 0, "mp4", aWholePath);
if(!m_pFormatContext) const int FormatAllocResult = avformat_alloc_output_context2(&m_pFormatContext, nullptr, "mp4", aWholePath);
if(FormatAllocResult < 0 || !m_pFormatContext)
{ {
dbg_msg("video_recorder", "Failed to create formatcontext for recoding video."); char aError[AV_ERROR_MAX_STRING_SIZE];
return; av_strerror(FormatAllocResult, aError, sizeof(aError));
log_error("videorecorder", "Could not create format context: %s", aError);
return false;
} }
m_pFormat = m_pFormatContext->oformat; m_pFormat = m_pFormatContext->oformat;
@ -127,21 +170,26 @@ void CVideo::Start()
if(m_pFormat->video_codec != AV_CODEC_ID_NONE) if(m_pFormat->video_codec != AV_CODEC_ID_NONE)
{ {
if(!AddStream(&m_VideoStream, m_pFormatContext, &m_pVideoCodec, m_pFormat->video_codec)) if(!AddStream(&m_VideoStream, m_pFormatContext, &m_pVideoCodec, m_pFormat->video_codec))
return; return false;
} }
else else
{ {
dbg_msg("video_recorder", "Failed to add VideoStream for recoding video."); log_error("videorecorder", "Could not determine default video stream codec");
return false;
} }
if(m_HasAudio && m_pFormat->audio_codec != AV_CODEC_ID_NONE) if(m_HasAudio)
{
if(m_pFormat->audio_codec != AV_CODEC_ID_NONE)
{ {
if(!AddStream(&m_AudioStream, m_pFormatContext, &m_pAudioCodec, m_pFormat->audio_codec)) if(!AddStream(&m_AudioStream, m_pFormatContext, &m_pAudioCodec, m_pFormat->audio_codec))
return; return false;
} }
else else
{ {
dbg_msg("video_recorder", "No audio."); log_error("videorecorder", "Could not determine default audio stream codec");
return false;
}
} }
m_vVideoThreads.resize(m_VideoThreads); m_vVideoThreads.resize(m_VideoThreads);
@ -171,25 +219,21 @@ void CVideo::Start()
/* Now that all the parameters are set, we can open the audio and /* Now that all the parameters are set, we can open the audio and
* video codecs and allocate the necessary encode buffers. */ * video codecs and allocate the necessary encode buffers. */
if(!OpenVideo()) if(!OpenVideo())
return; return false;
if(m_HasAudio) if(m_HasAudio && !OpenAudio())
if(!OpenAudio()) return false;
return;
// TODO: remove/comment:
av_dump_format(m_pFormatContext, 0, aWholePath, 1);
/* open the output file, if needed */ /* open the output file, if needed */
if(!(m_pFormat->flags & AVFMT_NOFILE)) if(!(m_pFormat->flags & AVFMT_NOFILE))
{ {
int Ret = avio_open(&m_pFormatContext->pb, aWholePath, AVIO_FLAG_WRITE); const int OpenResult = avio_open(&m_pFormatContext->pb, aWholePath, AVIO_FLAG_WRITE);
if(Ret < 0) if(OpenResult < 0)
{ {
char aError[AV_ERROR_MAX_STRING_SIZE]; char aError[AV_ERROR_MAX_STRING_SIZE];
av_strerror(Ret, aError, sizeof(aError)); av_strerror(OpenResult, aError, sizeof(aError));
dbg_msg("video_recorder", "Could not open '%s': %s", aWholePath, aError); log_error("videorecorder", "Could not open file '%s': %s", aWholePath, aError);
return; return false;
} }
} }
@ -211,18 +255,20 @@ void CVideo::Start()
} }
/* Write the stream header, if any. */ /* Write the stream header, if any. */
int Ret = avformat_write_header(m_pFormatContext, &m_pOptDict); const int WriteHeaderResult = avformat_write_header(m_pFormatContext, &m_pOptDict);
if(Ret < 0) if(WriteHeaderResult < 0)
{ {
char aError[AV_ERROR_MAX_STRING_SIZE]; char aError[AV_ERROR_MAX_STRING_SIZE];
av_strerror(Ret, aError, sizeof(aError)); av_strerror(WriteHeaderResult, aError, sizeof(aError));
dbg_msg("video_recorder", "Error occurred when opening output file: %s", aError); log_error("videorecorder", "Could not write header: %s", aError);
return; return false;
} }
m_Recording = true; m_Recording = true;
m_Started = true; m_Started = true;
ms_Time = time_get(); ms_Time = time_get();
m_Vframe = 0; m_Vframe = 0;
return true;
} }
void CVideo::Pause(bool Pause) void CVideo::Pause(bool Pause)
@ -303,7 +349,6 @@ void CVideo::NextVideoFrameThread()
NextVideoThreadIndex = 0; NextVideoThreadIndex = 0;
// always wait for the next video thread too, to prevent a dead lock // always wait for the next video thread too, to prevent a dead lock
{ {
auto *pVideoThread = m_vVideoThreads[NextVideoThreadIndex].get(); auto *pVideoThread = m_vVideoThreads[NextVideoThreadIndex].get();
std::unique_lock<std::mutex> Lock(pVideoThread->m_Mutex); std::unique_lock<std::mutex> Lock(pVideoThread->m_Mutex);
@ -471,7 +516,7 @@ void CVideo::RunAudioThread(size_t ParentThreadIndex, size_t ThreadIndex)
void CVideo::FillAudioFrame(size_t ThreadIndex) void CVideo::FillAudioFrame(size_t ThreadIndex)
{ {
av_samples_fill_arrays( const int FillArrayResult = av_samples_fill_arrays(
(uint8_t **)m_AudioStream.m_vpTmpFrames[ThreadIndex]->data, (uint8_t **)m_AudioStream.m_vpTmpFrames[ThreadIndex]->data,
nullptr, // pointer to linesize (int*) nullptr, // pointer to linesize (int*)
(const uint8_t *)m_vBuffer[ThreadIndex].m_aBuffer, (const uint8_t *)m_vBuffer[ThreadIndex].m_aBuffer,
@ -480,25 +525,35 @@ void CVideo::FillAudioFrame(size_t ThreadIndex)
AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_S16,
0 // align 0 // align
); );
if(FillArrayResult < 0)
int Ret = av_frame_make_writable(m_AudioStream.m_vpFrames[ThreadIndex]);
if(Ret < 0)
{ {
dbg_msg("video_recorder", "Error making frame writable"); char aError[AV_ERROR_MAX_STRING_SIZE];
av_strerror(FillArrayResult, aError, sizeof(aError));
log_error("videorecorder", "Could not fill audio frame: %s", aError);
return;
}
const int MakeWriteableResult = av_frame_make_writable(m_AudioStream.m_vpFrames[ThreadIndex]);
if(MakeWriteableResult < 0)
{
char aError[AV_ERROR_MAX_STRING_SIZE];
av_strerror(MakeWriteableResult, aError, sizeof(aError));
log_error("videorecorder", "Could not make audio frame writeable: %s", aError);
return; return;
} }
/* convert to destination format */ /* convert to destination format */
Ret = swr_convert( const int ConvertResult = swr_convert(
m_AudioStream.m_vpSwrCtxs[ThreadIndex], m_AudioStream.m_vpSwrCtxs[ThreadIndex],
m_AudioStream.m_vpFrames[ThreadIndex]->data, m_AudioStream.m_vpFrames[ThreadIndex]->data,
m_AudioStream.m_vpFrames[ThreadIndex]->nb_samples, m_AudioStream.m_vpFrames[ThreadIndex]->nb_samples,
(const uint8_t **)m_AudioStream.m_vpTmpFrames[ThreadIndex]->data, (const uint8_t **)m_AudioStream.m_vpTmpFrames[ThreadIndex]->data,
m_AudioStream.m_vpTmpFrames[ThreadIndex]->nb_samples); m_AudioStream.m_vpTmpFrames[ThreadIndex]->nb_samples);
if(ConvertResult < 0)
if(Ret < 0)
{ {
dbg_msg("video_recorder", "Error while converting"); char aError[AV_ERROR_MAX_STRING_SIZE];
av_strerror(ConvertResult, aError, sizeof(aError));
log_error("videorecorder", "Could not convert audio frame: %s", aError);
return; return;
} }
} }
@ -565,22 +620,24 @@ void CVideo::ReadRGBFromGL(size_t ThreadIndex)
AVFrame *CVideo::AllocPicture(enum AVPixelFormat PixFmt, int Width, int Height) AVFrame *CVideo::AllocPicture(enum AVPixelFormat PixFmt, int Width, int Height)
{ {
AVFrame *pPicture; AVFrame *pPicture = av_frame_alloc();
int Ret;
pPicture = av_frame_alloc();
if(!pPicture) if(!pPicture)
{
log_error("videorecorder", "Could not allocate video frame");
return nullptr; return nullptr;
}
pPicture->format = PixFmt; pPicture->format = PixFmt;
pPicture->width = Width; pPicture->width = Width;
pPicture->height = Height; pPicture->height = Height;
/* allocate the buffers for the frame data */ /* allocate the buffers for the frame data */
Ret = av_frame_get_buffer(pPicture, 32); const int FrameBufferAllocResult = av_frame_get_buffer(pPicture, 32);
if(Ret < 0) if(FrameBufferAllocResult < 0)
{ {
dbg_msg("video_recorder", "Could not allocate frame data."); char aError[AV_ERROR_MAX_STRING_SIZE];
av_strerror(FrameBufferAllocResult, aError, sizeof(aError));
log_error("videorecorder", "Could not allocate video frame buffer: %s", aError);
return nullptr; return nullptr;
} }
@ -590,11 +647,9 @@ AVFrame *CVideo::AllocPicture(enum AVPixelFormat PixFmt, int Width, int Height)
AVFrame *CVideo::AllocAudioFrame(enum AVSampleFormat SampleFmt, uint64_t ChannelLayout, int SampleRate, int NbSamples) AVFrame *CVideo::AllocAudioFrame(enum AVSampleFormat SampleFmt, uint64_t ChannelLayout, int SampleRate, int NbSamples)
{ {
AVFrame *pFrame = av_frame_alloc(); AVFrame *pFrame = av_frame_alloc();
int Ret;
if(!pFrame) if(!pFrame)
{ {
dbg_msg("video_recorder", "Error allocating an audio frame"); log_error("videorecorder", "Could not allocate audio frame");
return nullptr; return nullptr;
} }
@ -609,10 +664,12 @@ AVFrame *CVideo::AllocAudioFrame(enum AVSampleFormat SampleFmt, uint64_t Channel
if(NbSamples) if(NbSamples)
{ {
Ret = av_frame_get_buffer(pFrame, 0); const int FrameBufferAllocResult = av_frame_get_buffer(pFrame, 0);
if(Ret < 0) if(FrameBufferAllocResult < 0)
{ {
dbg_msg("video_recorder", "Error allocating an audio buffer"); char aError[AV_ERROR_MAX_STRING_SIZE];
av_strerror(FrameBufferAllocResult, aError, sizeof(aError));
log_error("videorecorder", "Could not allocate audio frame buffer: %s", aError);
return nullptr; return nullptr;
} }
} }
@ -622,19 +679,18 @@ AVFrame *CVideo::AllocAudioFrame(enum AVSampleFormat SampleFmt, uint64_t Channel
bool CVideo::OpenVideo() bool CVideo::OpenVideo()
{ {
int Ret;
AVCodecContext *pContext = m_VideoStream.pEnc; AVCodecContext *pContext = m_VideoStream.pEnc;
AVDictionary *pOptions = nullptr; AVDictionary *pOptions = nullptr;
av_dict_copy(&pOptions, m_pOptDict, 0); av_dict_copy(&pOptions, m_pOptDict, 0);
/* open the codec */ /* open the codec */
Ret = avcodec_open2(pContext, m_pVideoCodec, &pOptions); const int VideoOpenResult = avcodec_open2(pContext, m_pVideoCodec, &pOptions);
av_dict_free(&pOptions); av_dict_free(&pOptions);
if(Ret < 0) if(VideoOpenResult < 0)
{ {
char aBuf[AV_ERROR_MAX_STRING_SIZE]; char aError[AV_ERROR_MAX_STRING_SIZE];
av_strerror(Ret, aBuf, sizeof(aBuf)); av_strerror(VideoOpenResult, aError, sizeof(aError));
dbg_msg("video_recorder", "Could not open video codec: %s", aBuf); log_error("videorecorder", "Could not open video codec: %s", aError);
return false; return false;
} }
@ -648,7 +704,6 @@ bool CVideo::OpenVideo()
m_VideoStream.m_vpFrames[i] = AllocPicture(pContext->pix_fmt, pContext->width, pContext->height); m_VideoStream.m_vpFrames[i] = AllocPicture(pContext->pix_fmt, pContext->width, pContext->height);
if(!m_VideoStream.m_vpFrames[i]) if(!m_VideoStream.m_vpFrames[i])
{ {
dbg_msg("video_recorder", "Could not allocate video frame");
return false; return false;
} }
} }
@ -668,17 +723,18 @@ bool CVideo::OpenVideo()
m_VideoStream.m_vpTmpFrames[i] = AllocPicture(AV_PIX_FMT_YUV420P, pContext->width, pContext->height); m_VideoStream.m_vpTmpFrames[i] = AllocPicture(AV_PIX_FMT_YUV420P, pContext->width, pContext->height);
if(!m_VideoStream.m_vpTmpFrames[i]) if(!m_VideoStream.m_vpTmpFrames[i])
{ {
dbg_msg("video_recorder", "Could not allocate temporary video frame");
return false; return false;
} }
} }
} }
/* copy the stream parameters to the muxer */ /* copy the stream parameters to the muxer */
Ret = avcodec_parameters_from_context(m_VideoStream.pSt->codecpar, pContext); const int AudioStreamCopyResult = avcodec_parameters_from_context(m_VideoStream.pSt->codecpar, pContext);
if(Ret < 0) if(AudioStreamCopyResult < 0)
{ {
dbg_msg("video_recorder", "Could not copy the stream parameters"); char aError[AV_ERROR_MAX_STRING_SIZE];
av_strerror(AudioStreamCopyResult, aError, sizeof(aError));
log_error("videorecorder", "Could not copy video stream parameters: %s", aError);
return false; return false;
} }
m_VSeq = 0; m_VSeq = 0;
@ -687,25 +743,22 @@ bool CVideo::OpenVideo()
bool CVideo::OpenAudio() bool CVideo::OpenAudio()
{ {
AVCodecContext *pContext; AVCodecContext *pContext = m_AudioStream.pEnc;
int NbSamples;
int Ret;
AVDictionary *pOptions = nullptr; AVDictionary *pOptions = nullptr;
av_dict_copy(&pOptions, m_pOptDict, 0);
pContext = m_AudioStream.pEnc;
/* open it */ /* open it */
av_dict_copy(&pOptions, m_pOptDict, 0); const int AudioOpenResult = avcodec_open2(pContext, m_pAudioCodec, &pOptions);
Ret = avcodec_open2(pContext, m_pAudioCodec, &pOptions);
av_dict_free(&pOptions); av_dict_free(&pOptions);
if(Ret < 0) if(AudioOpenResult < 0)
{ {
char aBuf[AV_ERROR_MAX_STRING_SIZE]; char aError[AV_ERROR_MAX_STRING_SIZE];
av_strerror(Ret, aBuf, sizeof(aBuf)); av_strerror(AudioOpenResult, aError, sizeof(aError));
dbg_msg("video_recorder", "Could not open audio codec: %s", aBuf); log_error("videorecorder", "Could not open audio codec: %s", aError);
return false; return false;
} }
int NbSamples;
if(pContext->codec->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE) if(pContext->codec->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE)
NbSamples = 10000; NbSamples = 10000;
else else
@ -728,7 +781,6 @@ bool CVideo::OpenAudio()
#endif #endif
if(!m_AudioStream.m_vpFrames[i]) if(!m_AudioStream.m_vpFrames[i])
{ {
dbg_msg("video_recorder", "Could not allocate audio frame");
return false; return false;
} }
@ -736,20 +788,21 @@ bool CVideo::OpenAudio()
m_AudioStream.m_vpTmpFrames[i] = AllocAudioFrame(AV_SAMPLE_FMT_S16, AV_CH_LAYOUT_STEREO, g_Config.m_SndRate, NbSamples); m_AudioStream.m_vpTmpFrames[i] = AllocAudioFrame(AV_SAMPLE_FMT_S16, AV_CH_LAYOUT_STEREO, g_Config.m_SndRate, NbSamples);
if(!m_AudioStream.m_vpTmpFrames[i]) if(!m_AudioStream.m_vpTmpFrames[i])
{ {
dbg_msg("video_recorder", "Could not allocate audio frame");
return false; return false;
} }
} }
/* copy the stream parameters to the muxer */ /* copy the stream parameters to the muxer */
Ret = avcodec_parameters_from_context(m_AudioStream.pSt->codecpar, pContext); const int AudioStreamCopyResult = avcodec_parameters_from_context(m_AudioStream.pSt->codecpar, pContext);
if(Ret < 0) if(AudioStreamCopyResult < 0)
{ {
dbg_msg("video_recorder", "Could not copy the stream parameters"); char aError[AV_ERROR_MAX_STRING_SIZE];
av_strerror(AudioStreamCopyResult, aError, sizeof(aError));
log_error("videorecorder", "Could not copy audio stream parameters: %s", aError);
return false; return false;
} }
/* create resampler context */ /* create resampling context */
m_AudioStream.m_vpSwrCtxs.clear(); m_AudioStream.m_vpSwrCtxs.clear();
m_AudioStream.m_vpSwrCtxs.resize(m_AudioThreads); m_AudioStream.m_vpSwrCtxs.resize(m_AudioThreads);
for(size_t i = 0; i < m_AudioThreads; ++i) for(size_t i = 0; i < m_AudioThreads; ++i)
@ -757,26 +810,33 @@ bool CVideo::OpenAudio()
m_AudioStream.m_vpSwrCtxs[i] = swr_alloc(); m_AudioStream.m_vpSwrCtxs[i] = swr_alloc();
if(!m_AudioStream.m_vpSwrCtxs[i]) if(!m_AudioStream.m_vpSwrCtxs[i])
{ {
dbg_msg("video_recorder", "Could not allocate resampler context"); log_error("videorecorder", "Could not allocate resampling context");
return false; return false;
} }
/* set options */ /* set options */
av_opt_set_int(m_AudioStream.m_vpSwrCtxs[i], "in_channel_count", 2, 0); dbg_assert(av_opt_set_int(m_AudioStream.m_vpSwrCtxs[i], "in_channel_count", 2, 0) == 0, "invalid option");
av_opt_set_int(m_AudioStream.m_vpSwrCtxs[i], "in_sample_rate", g_Config.m_SndRate, 0); if(av_opt_set_int(m_AudioStream.m_vpSwrCtxs[i], "in_sample_rate", g_Config.m_SndRate, 0) != 0)
av_opt_set_sample_fmt(m_AudioStream.m_vpSwrCtxs[i], "in_sample_fmt", AV_SAMPLE_FMT_S16, 0); {
log_error("videorecorder", "Could not set audio sample rate to %d", g_Config.m_SndRate);
return false;
}
dbg_assert(av_opt_set_sample_fmt(m_AudioStream.m_vpSwrCtxs[i], "in_sample_fmt", AV_SAMPLE_FMT_S16, 0) == 0, "invalid option");
#if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(59, 24, 100) #if LIBAVCODEC_VERSION_INT >= AV_VERSION_INT(59, 24, 100)
av_opt_set_int(m_AudioStream.m_vpSwrCtxs[i], "out_channel_count", pContext->ch_layout.nb_channels, 0); dbg_assert(av_opt_set_int(m_AudioStream.m_vpSwrCtxs[i], "out_channel_count", pContext->ch_layout.nb_channels, 0) == 0, "invalid option");
#else #else
av_opt_set_int(m_AudioStream.m_vpSwrCtxs[i], "out_channel_count", pContext->channels, 0); dbg_assert(av_opt_set_int(m_AudioStream.m_vpSwrCtxs[i], "out_channel_count", pContext->channels, 0) == 0, "invalid option");
#endif #endif
av_opt_set_int(m_AudioStream.m_vpSwrCtxs[i], "out_sample_rate", pContext->sample_rate, 0); dbg_assert(av_opt_set_int(m_AudioStream.m_vpSwrCtxs[i], "out_sample_rate", pContext->sample_rate, 0) == 0, "invalid option");
av_opt_set_sample_fmt(m_AudioStream.m_vpSwrCtxs[i], "out_sample_fmt", pContext->sample_fmt, 0); dbg_assert(av_opt_set_sample_fmt(m_AudioStream.m_vpSwrCtxs[i], "out_sample_fmt", pContext->sample_fmt, 0) == 0, "invalid option");
/* initialize the resampling context */ /* initialize the resampling context */
if(swr_init(m_AudioStream.m_vpSwrCtxs[i]) < 0) const int ResamplingContextInitResult = swr_init(m_AudioStream.m_vpSwrCtxs[i]);
if(ResamplingContextInitResult < 0)
{ {
dbg_msg("video_recorder", "Failed to initialize the resampling context"); char aError[AV_ERROR_MAX_STRING_SIZE];
av_strerror(ResamplingContextInitResult, aError, sizeof(aError));
log_error("videorecorder", "Could not initialize resampling context: %s", aError);
return false; return false;
} }
} }
@ -788,28 +848,25 @@ bool CVideo::OpenAudio()
/* Add an output stream. */ /* Add an output stream. */
bool CVideo::AddStream(OutputStream *pStream, AVFormatContext *pOC, const AVCodec **ppCodec, enum AVCodecID CodecId) const bool CVideo::AddStream(OutputStream *pStream, AVFormatContext *pOC, const AVCodec **ppCodec, enum AVCodecID CodecId) const
{ {
AVCodecContext *pContext;
/* find the encoder */ /* find the encoder */
*ppCodec = avcodec_find_encoder(CodecId); *ppCodec = avcodec_find_encoder(CodecId);
if(!(*ppCodec)) if(!(*ppCodec))
{ {
dbg_msg("video_recorder", "Could not find encoder for '%s'", log_error("videorecorder", "Could not find encoder for codec '%s'", avcodec_get_name(CodecId));
avcodec_get_name(CodecId));
return false; return false;
} }
pStream->pSt = avformat_new_stream(pOC, NULL); pStream->pSt = avformat_new_stream(pOC, nullptr);
if(!pStream->pSt) if(!pStream->pSt)
{ {
dbg_msg("video_recorder", "Could not allocate stream"); log_error("videorecorder", "Could not allocate stream");
return false; return false;
} }
pStream->pSt->id = pOC->nb_streams - 1; pStream->pSt->id = pOC->nb_streams - 1;
pContext = avcodec_alloc_context3(*ppCodec); AVCodecContext *pContext = avcodec_alloc_context3(*ppCodec);
if(!pContext) if(!pContext)
{ {
dbg_msg("video_recorder", "Could not alloc an encoding context"); log_error("videorecorder", "Could not allocate encoding context");
return false; return false;
} }
pStream->pEnc = pContext; pStream->pEnc = pContext;
@ -880,8 +937,9 @@ bool CVideo::AddStream(OutputStream *pStream, AVFormatContext *pOC, const AVCode
if(CodecId == AV_CODEC_ID_H264) if(CodecId == AV_CODEC_ID_H264)
{ {
static const char *s_apPresets[10] = {"ultrafast", "superfast", "veryfast", "faster", "fast", "medium", "slow", "slower", "veryslow", "placebo"}; static const char *s_apPresets[10] = {"ultrafast", "superfast", "veryfast", "faster", "fast", "medium", "slow", "slower", "veryslow", "placebo"};
av_opt_set(pContext->priv_data, "preset", s_apPresets[g_Config.m_ClVideoX264Preset], 0); dbg_assert(g_Config.m_ClVideoX264Preset < (int)std::size(s_apPresets), "preset index invalid");
av_opt_set_int(pContext->priv_data, "crf", g_Config.m_ClVideoX264Crf, 0); dbg_assert(av_opt_set(pContext->priv_data, "preset", s_apPresets[g_Config.m_ClVideoX264Preset], 0) == 0, "invalid option");
dbg_assert(av_opt_set_int(pContext->priv_data, "crf", g_Config.m_ClVideoX264Crf, 0) == 0, "invalid option");
} }
break; break;
@ -898,12 +956,10 @@ bool CVideo::AddStream(OutputStream *pStream, AVFormatContext *pOC, const AVCode
void CVideo::WriteFrame(OutputStream *pStream, size_t ThreadIndex) void CVideo::WriteFrame(OutputStream *pStream, size_t ThreadIndex)
{ {
int RetRecv = 0;
AVPacket *pPacket = av_packet_alloc(); AVPacket *pPacket = av_packet_alloc();
if(pPacket == nullptr) if(pPacket == nullptr)
{ {
dbg_msg("video_recorder", "Failed allocating packet"); log_error("videorecorder", "Could not allocate packet");
return; return;
} }
@ -911,29 +967,33 @@ void CVideo::WriteFrame(OutputStream *pStream, size_t ThreadIndex)
pPacket->size = 0; pPacket->size = 0;
avcodec_send_frame(pStream->pEnc, pStream->m_vpFrames[ThreadIndex]); avcodec_send_frame(pStream->pEnc, pStream->m_vpFrames[ThreadIndex]);
int RecvResult = 0;
do do
{ {
RetRecv = avcodec_receive_packet(pStream->pEnc, pPacket); RecvResult = avcodec_receive_packet(pStream->pEnc, pPacket);
if(!RetRecv) if(!RecvResult)
{ {
/* rescale output packet timestamp values from codec to stream timebase */ /* rescale output packet timestamp values from codec to stream timebase */
av_packet_rescale_ts(pPacket, pStream->pEnc->time_base, pStream->pSt->time_base); av_packet_rescale_ts(pPacket, pStream->pEnc->time_base, pStream->pSt->time_base);
pPacket->stream_index = pStream->pSt->index; pPacket->stream_index = pStream->pSt->index;
if(int Ret = av_interleaved_write_frame(m_pFormatContext, pPacket)) const int WriteFrameResult = av_interleaved_write_frame(m_pFormatContext, pPacket);
if(WriteFrameResult < 0)
{ {
char aBuf[AV_ERROR_MAX_STRING_SIZE]; char aError[AV_ERROR_MAX_STRING_SIZE];
av_strerror(Ret, aBuf, sizeof(aBuf)); av_strerror(WriteFrameResult, aError, sizeof(aError));
dbg_msg("video_recorder", "Error while writing video frame: %s", aBuf); log_error("videorecorder", "Could not write video frame: %s", aError);
} }
} }
else else
break; break;
} while(true); } while(true);
if(RetRecv && RetRecv != AVERROR(EAGAIN)) if(RecvResult && RecvResult != AVERROR(EAGAIN))
{ {
dbg_msg("video_recorder", "Error encoding frame, error: %d", RetRecv); char aError[AV_ERROR_MAX_STRING_SIZE];
av_strerror(RecvResult, aError, sizeof(aError));
log_error("videorecorder", "Could not encode video frame: %s", aError);
} }
av_packet_free(&pPacket); av_packet_free(&pPacket);
@ -941,13 +1001,10 @@ void CVideo::WriteFrame(OutputStream *pStream, size_t ThreadIndex)
void CVideo::FinishFrames(OutputStream *pStream) void CVideo::FinishFrames(OutputStream *pStream)
{ {
dbg_msg("video_recorder", "------------");
int RetRecv = 0;
AVPacket *pPacket = av_packet_alloc(); AVPacket *pPacket = av_packet_alloc();
if(pPacket == nullptr) if(pPacket == nullptr)
{ {
dbg_msg("video_recorder", "Failed allocating packet"); log_error("videorecorder", "Could not allocate packet");
return; return;
} }
@ -955,29 +1012,33 @@ void CVideo::FinishFrames(OutputStream *pStream)
pPacket->size = 0; pPacket->size = 0;
avcodec_send_frame(pStream->pEnc, 0); avcodec_send_frame(pStream->pEnc, 0);
int RecvResult = 0;
do do
{ {
RetRecv = avcodec_receive_packet(pStream->pEnc, pPacket); RecvResult = avcodec_receive_packet(pStream->pEnc, pPacket);
if(!RetRecv) if(!RecvResult)
{ {
/* rescale output packet timestamp values from codec to stream timebase */ /* rescale output packet timestamp values from codec to stream timebase */
av_packet_rescale_ts(pPacket, pStream->pEnc->time_base, pStream->pSt->time_base); av_packet_rescale_ts(pPacket, pStream->pEnc->time_base, pStream->pSt->time_base);
pPacket->stream_index = pStream->pSt->index; pPacket->stream_index = pStream->pSt->index;
if(int Ret = av_interleaved_write_frame(m_pFormatContext, pPacket)) const int WriteFrameResult = av_interleaved_write_frame(m_pFormatContext, pPacket);
if(WriteFrameResult < 0)
{ {
char aBuf[AV_ERROR_MAX_STRING_SIZE]; char aError[AV_ERROR_MAX_STRING_SIZE];
av_strerror(Ret, aBuf, sizeof(aBuf)); av_strerror(WriteFrameResult, aError, sizeof(aError));
dbg_msg("video_recorder", "Error while writing video frame: %s", aBuf); log_error("videorecorder", "Could not write video frame: %s", aError);
} }
} }
else else
break; break;
} while(true); } while(true);
if(RetRecv && RetRecv != AVERROR_EOF) if(RecvResult && RecvResult != AVERROR_EOF)
{ {
dbg_msg("video_recorder", "failed to finish recoding, error: %d", RetRecv); char aError[AV_ERROR_MAX_STRING_SIZE];
av_strerror(RecvResult, aError, sizeof(aError));
log_error("videorecorder", "Could not finish recording: %s", aError);
} }
av_packet_free(&pPacket); av_packet_free(&pPacket);

View file

@ -47,7 +47,7 @@ public:
CVideo(IGraphics *pGraphics, ISound *pSound, IStorage *pStorage, int Width, int Height, const char *pName); CVideo(IGraphics *pGraphics, ISound *pSound, IStorage *pStorage, int Width, int Height, const char *pName);
~CVideo(); ~CVideo();
void Start() override REQUIRES(!g_WriteLock); bool Start() override REQUIRES(!g_WriteLock);
void Stop() override; void Stop() override;
void Pause(bool Pause) override; void Pause(bool Pause) override;
bool IsRecording() override { return m_Recording; } bool IsRecording() override { return m_Recording; }
@ -60,7 +60,7 @@ public:
static IVideo *Current() { return IVideo::ms_pCurrentVideo; } static IVideo *Current() { return IVideo::ms_pCurrentVideo; }
static void Init() { av_log_set_level(AV_LOG_DEBUG); } static void Init();
private: private:
void RunVideoThread(size_t ParentThreadIndex, size_t ThreadIndex) REQUIRES(!g_WriteLock); void RunVideoThread(size_t ParentThreadIndex, size_t ThreadIndex) REQUIRES(!g_WriteLock);

View file

@ -91,6 +91,7 @@ public:
virtual int SetPos(int WantedTick) = 0; virtual int SetPos(int WantedTick) = 0;
virtual void Pause() = 0; virtual void Pause() = 0;
virtual void Unpause() = 0; virtual void Unpause() = 0;
virtual const char *ErrorMessage() const = 0;
virtual bool IsPlaying() const = 0; virtual bool IsPlaying() const = 0;
virtual const CInfo *BaseInfo() const = 0; virtual const CInfo *BaseInfo() const = 0;
virtual void GetDemoName(char *pBuffer, size_t BufferSize) const = 0; virtual void GetDemoName(char *pBuffer, size_t BufferSize) const = 0;

View file

@ -167,8 +167,8 @@ public:
const CInfo *BaseInfo() const override { return &m_Info.m_Info; } const CInfo *BaseInfo() const override { return &m_Info.m_Info; }
void GetDemoName(char *pBuffer, size_t BufferSize) const override; void GetDemoName(char *pBuffer, size_t BufferSize) const override;
bool GetDemoInfo(class IStorage *pStorage, class IConsole *pConsole, const char *pFilename, int StorageType, CDemoHeader *pDemoHeader, CTimelineMarkers *pTimelineMarkers, CMapInfo *pMapInfo, IOHANDLE *pFile = nullptr, char *pErrorMessage = nullptr, size_t ErrorMessageSize = 0) const override; bool GetDemoInfo(class IStorage *pStorage, class IConsole *pConsole, const char *pFilename, int StorageType, CDemoHeader *pDemoHeader, CTimelineMarkers *pTimelineMarkers, CMapInfo *pMapInfo, IOHANDLE *pFile = nullptr, char *pErrorMessage = nullptr, size_t ErrorMessageSize = 0) const override;
const char *Filename() { return m_aFilename; } const char *Filename() const { return m_aFilename; }
const char *ErrorMessage() { return m_aErrorMessage; } const char *ErrorMessage() const override { return m_aErrorMessage; }
int Update(bool RealTime = true); int Update(bool RealTime = true);

View file

@ -12,7 +12,7 @@ class IVideo
public: public:
virtual ~IVideo(){}; virtual ~IVideo(){};
virtual void Start() = 0; virtual bool Start() = 0;
virtual void Stop() = 0; virtual void Stop() = 0;
virtual void Pause(bool Pause) = 0; virtual void Pause(bool Pause) = 0;
virtual bool IsRecording() = 0; virtual bool IsRecording() = 0;

View file

@ -1084,9 +1084,16 @@ void CMenus::RenderDemoBrowserList(CUIRect ListView, bool &WasListboxItemActivat
#if defined(CONF_VIDEORECORDER) #if defined(CONF_VIDEORECORDER)
if(!m_DemoRenderInput.IsEmpty()) if(!m_DemoRenderInput.IsEmpty())
{
if(DemoPlayer()->ErrorMessage()[0] == '\0')
{ {
m_Popup = POPUP_RENDER_DONE; m_Popup = POPUP_RENDER_DONE;
} }
else
{
m_DemoRenderInput.Clear();
}
}
#endif #endif
struct SColumn struct SColumn