Merge pull request #1928 from sirius1242/demo_render2

Demo to video converter function.
This commit is contained in:
Dennis Felsing 2020-01-25 21:01:08 +01:00 committed by GitHub
commit 50d848ec43
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
37 changed files with 1601 additions and 58 deletions

View file

@ -79,6 +79,7 @@ endif()
option(WEBSOCKETS "Enable websockets support" OFF)
option(MYSQL "Enable mysql support" OFF)
option(AUTOUPDATE "Enable the autoupdater" ${AUTOUPDATE_DEFAULT})
option(VIDEORECORDER "Enable video recording support via FFmpeg" OFF)
option(CLIENT "Compile client" ON)
option(DOWNLOAD_GTEST "Download and compile GTest" ${AUTO_DEPENDENCIES_DEFAULT})
option(PREFER_BUNDLED_LIBS "Prefer bundled libraries over system libraries" ${AUTO_DEPENDENCIES_DEFAULT})
@ -322,6 +323,9 @@ find_package(Opusfile)
find_package(Pnglite)
find_package(PythonInterp)
find_package(SDL2)
if(VIDEORECORDER)
find_package(FFmpeg)
endif()
find_package(Threads)
find_package(Wavpack)
if(WEBSOCKETS)
@ -391,6 +395,9 @@ show_dependency_status("Opusfile" OPUSFILE)
show_dependency_status("Pnglite" PNGLITE)
show_dependency_status("PythonInterp" PYTHONINTERP)
show_dependency_status("SDL2" SDL2)
if(VIDEORECORDER)
show_dependency_status("FFmpeg" FFMPEG)
endif()
show_dependency_status("Wavpack" WAVPACK)
show_dependency_status("Zlib" ZLIB)
if(WEBSOCKETS)
@ -549,6 +556,7 @@ set(COPY_FILES
${FREETYPE_COPY_FILES}
${OPUSFILE_COPY_FILES}
${SDL2_COPY_FILES}
${FFMPEG_COPY_FILES}
)
file(COPY ${COPY_FILES} DESTINATION .)
@ -716,6 +724,8 @@ set_src(ENGINE_SHARED GLOB src/engine/shared
teehistorian_ex_chunks.h
uuid_manager.cpp
uuid_manager.h
video.cpp
video.h
websockets.cpp
websockets.h
)
@ -818,6 +828,8 @@ if(CLIENT)
text.cpp
updater.cpp
updater.h
video.cpp
video.h
)
set_src(GAME_CLIENT GLOB_RECURSE src/game/client
animstate.cpp
@ -945,6 +957,7 @@ if(CLIENT)
${PNGLITE_LIBRARIES}
${SDL2_LIBRARIES}
${WAVPACK_LIBRARIES}
${FFMPEG_LIBRARIES}
# Order of these three is important.
${OPUSFILE_LIBRARIES}
@ -992,6 +1005,7 @@ if(CLIENT)
${PNGLITE_INCLUDE_DIRS}
${SDL2_INCLUDE_DIRS}
${WAVPACK_INCLUDE_DIRS}
${FFMPEG_INCLUDE_DIRS}
)
set(PARAMS "${WAVPACK_INCLUDE_DIRS};${WAVPACK_INCLUDE_DIRS}")
@ -1670,6 +1684,9 @@ foreach(target ${TARGETS_OWN})
target_compile_definitions(${target} PRIVATE CONF_WEBSOCKETS)
target_include_directories(${target} PRIVATE ${WEBSOCKETS_INCLUDE_DIRS})
endif()
if(VIDEORECORDER)
target_compile_definitions(${target} PRIVATE CONF_VIDEORECORDER)
endif()
if(MYSQL)
target_compile_definitions(${target} PRIVATE CONF_SQL)
target_include_directories(${target} PRIVATE ${MYSQL_INCLUDE_DIRS})

View file

@ -69,6 +69,9 @@ Whether to enable the autoupdater. Packagers may want to disable this for their
* **-DCLIENT=[ON|OFF]** <br>
Whether to enable client compilation. If set to OFF, DDNet will not depend on Curl, Freetype, Ogg, Opus, Opusfile, and SDL2. Default value is ON.
* **-DVIDEORECORDER=[ON|OFF]** <br>
Whether to add video recording support using FFmpeg to the client. You can use command `start_video` and `stop_video` to start and stop conversion from demo to mp4. This feature is currently experimental and not enabled by default.
* **-DDOWNLOAD_GTEST=[ON|OFF]** <br>
Whether to download and compile GTest. Useful if GTest is not installed and, for Linux users, there is no suitable package providing it. Default value is OFF.

138
cmake/FindFFmpeg.cmake Normal file
View file

@ -0,0 +1,138 @@
if(NOT CMAKE_CROSSCOMPILING)
find_package(PkgConfig QUIET)
pkg_check_modules(PC_AVCODEC libavcodec)
pkg_check_modules(PC_AVFORMAT libavformat)
pkg_check_modules(PC_AVUTIL libavutil)
pkg_check_modules(PC_SWSCALE libswscale)
pkg_check_modules(PC_SWRESAMPLE libswresample)
endif()
set_extra_dirs_lib(FFMPEG ffmpeg)
find_library(AVCODEC_LIBRARY
NAMES avcodec libavcodec
HINTS ${HINTS_FFMPEG_LIBDIR} ${PC_AVCODEC_LIBRARY_DIRS}
PATHS ${PATHS_AVCODEC_LIBDIR}
${CROSSCOMPILING_NO_CMAKE_SYSTEM_PATH}
)
find_library(AVFORMAT_LIBRARY
NAMES avformat libavformat
HINTS ${HINTS_FFMPEG_LIBDIR} ${PC_AVFORMAT_LIBRARY_DIRS}
PATHS ${PATHS_AVFORMAT_LIBDIR}
${CROSSCOMPILING_NO_CMAKE_SYSTEM_PATH}
)
find_library(AVUTIL_LIBRARY
NAMES avutil libavutil
HINTS ${HINTS_FFMPEG_LIBDIR} ${PC_AVUTIL_LIBRARY_DIRS}
PATHS ${PATHS_AVUTIL_LIBDIR}
${CROSSCOMPILING_NO_CMAKE_SYSTEM_PATH}
)
find_library(SWSCALE_LIBRARY
NAMES swscale libswscale
HINTS ${HINTS_FFMPEG_LIBDIR} ${PC_SWSCALE_LIBRARY_DIRS}
PATHS ${PATHS_SWSCALE_LIBDIR}
${CROSSCOMPILING_NO_CMAKE_SYSTEM_PATH}
)
find_library(SWRESAMPLE_LIBRARY
NAMES swresample libswresample
HINTS ${HINTS_FFMPEG_LIBDIR} ${PC_SWRESAMPLE_LIBRARY_DIRS}
PATHS ${PATHS_SWRESAMPLE_LIBDIR}
${CROSSCOMPILING_NO_CMAKE_SYSTEM_PATH}
)
set_extra_dirs_include(AVCODEC ffmpeg "${AVCODEC_LIBRARY}")
find_path(AVCODEC_INCLUDEDIR libavcodec
HINTS ${HINTS_AVCODEC_INCLUDEDIR} ${PC_AVCODEC_INCLUDE_DIRS}
PATHS ${PATHS_AVCODEC_INCLUDEDIR}
${CROSSCOMPILING_NO_CMAKE_SYSTEM_PATH}
)
set_extra_dirs_include(AVFORMAT ffmpeg "${AVFORMAT_LIBRARY}")
find_path(AVFORMAT_INCLUDEDIR libavformat
HINTS ${HINTS_AVFORMAT_INCLUDEDIR} ${PC_AVFORMAT_INCLUDE_DIRS}
PATHS ${PATHS_AVFORMAT_INCLUDEDIR}
${CROSSCOMPILING_NO_CMAKE_SYSTEM_PATH}
)
set_extra_dirs_include(AVUTIL ffmpeg "${AVUTIL_LIBRARY}")
find_path(AVUTIL_INCLUDEDIR libavutil
HINTS ${HINTS_AVUTIL_INCLUDEDIR} ${PC_AVUTIL_INCLUDE_DIRS}
PATHS ${PATHS_AVUTIL_INCLUDEDIR}
${CROSSCOMPILING_NO_CMAKE_SYSTEM_PATH}
)
set_extra_dirs_include(SWSCALE ffmpeg "${SWSCALE_LIBRARY}")
find_path(SWSCALE_INCLUDEDIR libswscale
HINTS ${HINTS_SWSCALE_INCLUDEDIR} ${PC_SWSCALE_INCLUDE_DIRS}
PATHS ${PATHS_SWSCALE_INCLUDEDIR}
${CROSSCOMPILING_NO_CMAKE_SYSTEM_PATH}
)
set_extra_dirs_include(SWRESAMPLE ffmpeg "${SWRESAMPLE_LIBRARY}")
find_path(SWRESAMPLE_INCLUDEDIR libswresample
HINTS ${HINTS_SWRESAMPLE_INCLUDEDIR} ${PC_SWRESAMPLE_INCLUDE_DIRS}
PATHS ${PATHS_SWRESAMPLE_INCLUDEDIR}
${CROSSCOMPILING_NO_CMAKE_SYSTEM_PATH}
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(FFMPEG DEFAULT_MSG
AVCODEC_LIBRARY
AVFORMAT_LIBRARY
AVUTIL_LIBRARY
SWSCALE_LIBRARY
SWRESAMPLE_LIBRARY
AVCODEC_INCLUDEDIR
AVFORMAT_INCLUDEDIR
AVUTIL_INCLUDEDIR
SWSCALE_INCLUDEDIR
SWRESAMPLE_INCLUDEDIR
)
mark_as_advanced(
AVCODEC_LIBRARY
AVFORMAT_LIBRARY
AVUTIL_LIBRARY
SWSCALE_LIBRARY
SWRESAMPLE_LIBRARY
AVCODEC_INCLUDEDIR
AVFORMAT_INCLUDEDIR
AVUTIL_INCLUDEDIR
SWSCALE_INCLUDEDIR
SWRESAMPLE_INCLUDEDIR
)
set(FFMPEG_LIBRARIES
${AVCODEC_LIBRARY}
${AVFORMAT_LIBRARY}
${AVUTIL_LIBRARY}
${SWSCALE_LIBRARY}
${SWRESAMPLE_LIBRARY}
)
set(FFMPEG_INCLUDE_DIRS
${AVCODEC_INCLUDEDIR}
${AVFORMAT_INCLUDEDIR}
${AVUTIL_INCLUDEDIR}
${SWSCALE_INCLUDEDIR}
${SWRESAMPLE_INCLUDEDIR}
)
is_bundled(IS_BUNDLED "${AVCODEC_LIBRARY}")
if(IS_BUNDLED AND TARGET_OS STREQUAL "windows")
set(FFMPEG_COPY_FILES
"${EXTRA_FFMPEG_LIBDIR}/avcodec-57.dll"
"${EXTRA_FFMPEG_LIBDIR}/avdevice-57.dll"
"${EXTRA_FFMPEG_LIBDIR}/avfilter-6.dll"
"${EXTRA_FFMPEG_LIBDIR}/avformat-57.dll"
"${EXTRA_FFMPEG_LIBDIR}/avutil-55.dll"
"${EXTRA_FFMPEG_LIBDIR}/postproc-54.dll"
"${EXTRA_FFMPEG_LIBDIR}/swresample-2.dll"
"${EXTRA_FFMPEG_LIBDIR}/swscale-4.dll"
)
else()
set(FFMPEG_COPY_FILES)
endif()

View file

@ -103,6 +103,9 @@ public:
virtual void Restart() = 0;
virtual void Quit() = 0;
virtual const char *DemoPlayer_Play(const char *pFilename, int StorageType) = 0;
#if defined(CONF_VIDEORECORDER)
virtual const char *DemoPlayer_Render(const char *pFilename, int StorageType, const char *pVideoName, int SpeedIndex) = 0;
#endif
virtual void DemoRecorder_Start(const char *pFilename, bool WithTimestamp, int Recorder) = 0;
virtual void DemoRecorder_HandleAutoStart() = 0;
virtual void DemoRecorder_Stop(int Recorder, bool RemoveFile = false) = 0;

View file

@ -28,8 +28,13 @@
#endif
#include <engine/shared/config.h>
#include <base/tl/threading.h>
#if defined(CONF_VIDEORECORDER)
#include "video.h"
#endif
#include "graphics_threaded.h"
#include "backend_sdl.h"
@ -58,10 +63,15 @@ void CGraphicsBackend_Threaded::ThreadFunc(void *pUser)
CAutoreleasePool AutoreleasePool;
#endif
pThis->m_pProcessor->RunBuffer(pThis->m_pBuffer);
sync_barrier();
pThis->m_pBuffer = 0x0;
pThis->m_BufferDone.signal();
}
#if defined(CONF_VIDEORECORDER)
if (IVideo::Current())
IVideo::Current()->nextVideoFrame_thread();
#endif
}
}

View file

@ -5,8 +5,6 @@
#include "graphics_threaded.h"
# if defined(CONF_PLATFORM_MACOSX)
#include <objc/objc-runtime.h>

View file

@ -68,6 +68,10 @@
#include "updater.h"
#include "client.h"
#if defined(CONF_VIDEORECORDER)
#include "video.h"
#endif
#include <zlib.h>
#include "SDL.h"
@ -756,6 +760,7 @@ void CClient::DisconnectWithReason(const char *pReason)
void CClient::Disconnect()
{
m_ButtonRender = false;
if(m_DummyConnected)
DummyDisconnect(0);
if(m_State != IClient::STATE_OFFLINE)
@ -1944,6 +1949,9 @@ void CClient::ProcessServerPacket(CNetChunk *pPacket)
m_aSnapshots[g_Config.m_ClDummy][SNAP_PREV] = m_SnapshotStorage[g_Config.m_ClDummy].m_pFirst;
m_aSnapshots[g_Config.m_ClDummy][SNAP_CURRENT] = m_SnapshotStorage[g_Config.m_ClDummy].m_pLast;
m_LocalStartTime = time_get();
#if defined(CONF_VIDEORECORDER)
IVideo::SetLocalStartTime(m_LocalStartTime);
#endif
SetState(IClient::STATE_ONLINE);
DemoRecorder_HandleAutoStart();
}
@ -2194,6 +2202,9 @@ void CClient::ProcessServerPacketDummy(CNetChunk *pPacket)
m_aSnapshots[!g_Config.m_ClDummy][SNAP_PREV] = m_SnapshotStorage[!g_Config.m_ClDummy].m_pFirst;
m_aSnapshots[!g_Config.m_ClDummy][SNAP_CURRENT] = m_SnapshotStorage[!g_Config.m_ClDummy].m_pLast;
m_LocalStartTime = time_get();
#if defined(CONF_VIDEORECORDER)
IVideo::SetLocalStartTime(m_LocalStartTime);
#endif
SetState(IClient::STATE_ONLINE);
}
@ -2478,7 +2489,22 @@ void CClient::Update()
{
if(State() == IClient::STATE_DEMOPLAYBACK)
{
#if defined(CONF_VIDEORECORDER)
if (m_DemoPlayer.IsPlaying() && IVideo::Current())
{
if (IVideo::Current()->frameRendered())
IVideo::Current()->nextVideoFrame();
if (IVideo::Current()->aframeRendered())
IVideo::Current()->nextAudioFrame_timeline();
}
else if(m_ButtonRender)
Disconnect();
#endif
m_DemoPlayer.Update();
if(m_DemoPlayer.IsPlaying())
{
// update timers
@ -2760,6 +2786,11 @@ void CClient::InitInterfaces()
void CClient::Run()
{
m_LocalStartTime = time_get();
#if defined(CONF_VIDEORECORDER)
IVideo::SetLocalStartTime(m_LocalStartTime);
#endif
m_SnapshotParts[0] = 0;
m_SnapshotParts[1] = 0;
if(m_GenerateTimeoutSeed)
{
@ -2804,6 +2835,11 @@ void CClient::Run()
// init sound, allowed to fail
m_SoundInitFailed = Sound()->Init() != 0;
#if defined(CONF_VIDEORECORDER)
// init video recorder aka ffmpeg
CVideo::Init();
#endif
// open socket
{
NETADDR BindAddr;
@ -3256,6 +3292,49 @@ void CClient::Con_Screenshot(IConsole::IResult *pResult, void *pUserData)
pSelf->Graphics()->TakeScreenshot(0);
}
#if defined(CONF_VIDEORECORDER)
void CClient::Con_StartVideo(IConsole::IResult *pResult, void *pUserData)
{
CClient *pSelf = (CClient *)pUserData;
if (pSelf->State() != IClient::STATE_DEMOPLAYBACK)
pSelf->m_pConsole->Print(IConsole::OUTPUT_LEVEL_STANDARD, "videorecorder", "Can not start videorecorder outside of demoplayer.");
if (!IVideo::Current())
{
new CVideo((CGraphics_Threaded*)pSelf->m_pGraphics, pSelf->Storage(), pSelf->m_pConsole, pSelf->Graphics()->ScreenWidth(), pSelf->Graphics()->ScreenHeight(), "");
IVideo::Current()->start();
}
else
pSelf->m_pConsole->Print(IConsole::OUTPUT_LEVEL_STANDARD, "videorecorder", "Videorecorder already running.");
}
void CClient::StartVideo(IConsole::IResult *pResult, void *pUserData, const char *pVideoName)
{
CClient *pSelf = (CClient *)pUserData;
if (pSelf->State() != IClient::STATE_DEMOPLAYBACK)
pSelf->m_pConsole->Print(IConsole::OUTPUT_LEVEL_STANDARD, "videorecorder", "Can not start videorecorder outside of demoplayer.");
pSelf->m_pConsole->Print(IConsole::OUTPUT_LEVEL_DEBUG, "demo_render", pVideoName);
if (!IVideo::Current())
{
new CVideo((CGraphics_Threaded*)pSelf->m_pGraphics, pSelf->Storage(), pSelf->m_pConsole, pSelf->Graphics()->ScreenWidth(), pSelf->Graphics()->ScreenHeight(), pVideoName);
IVideo::Current()->start();
}
else
pSelf->m_pConsole->Print(IConsole::OUTPUT_LEVEL_STANDARD, "videorecorder", "Videorecorder already running.");
}
void CClient::Con_StopVideo(IConsole::IResult *pResult, void *pUserData)
{
if (IVideo::Current())
IVideo::Current()->stop();
}
#endif
void CClient::Con_Rcon(IConsole::IResult *pResult, void *pUserData)
{
CClient *pSelf = (CClient *)pUserData;
@ -3438,6 +3517,23 @@ const char *CClient::DemoPlayer_Play(const char *pFilename, int StorageType)
return 0;
}
#if defined(CONF_VIDEORECORDER)
const char *CClient::DemoPlayer_Render(const char *pFilename, int StorageType, const char *pVideoName, int SpeedIndex)
{
const char *pError;
pError = DemoPlayer_Play(pFilename, StorageType);
if(pError)
return pError;
m_ButtonRender = true;
this->CClient::StartVideo(NULL, this, pVideoName);
m_DemoPlayer.Play();
m_DemoPlayer.SetSpeed(g_aSpeeds[SpeedIndex]);
//m_pConsole->Print(IConsole::OUTPUT_LEVEL_DEBUG, "demo_recorder", "demo eof");
return 0;
}
#endif
void CClient::Con_Play(IConsole::IResult *pResult, void *pUserData)
{
CClient *pSelf = (CClient *)pUserData;
@ -3740,6 +3836,12 @@ void CClient::RegisterCommands()
m_pConsole->Register("disconnect", "", CFGFLAG_CLIENT, Con_Disconnect, this, "Disconnect from the server");
m_pConsole->Register("ping", "", CFGFLAG_CLIENT, Con_Ping, this, "Ping the current server");
m_pConsole->Register("screenshot", "", CFGFLAG_CLIENT, Con_Screenshot, this, "Take a screenshot");
#if defined(CONF_VIDEORECORDER)
m_pConsole->Register("start_video", "", CFGFLAG_CLIENT, Con_StartVideo, this, "Start recording a video");
m_pConsole->Register("stop_video", "", CFGFLAG_CLIENT, Con_StopVideo, this, "Stop recording a video");
#endif
m_pConsole->Register("rcon", "r[rcon-command]", CFGFLAG_CLIENT, Con_Rcon, this, "Send specified command to rcon");
m_pConsole->Register("rcon_auth", "s[password]", CFGFLAG_CLIENT, Con_RconAuth, this, "Authenticate to rcon");
m_pConsole->Register("rcon_login", "s[username] r[password]", CFGFLAG_CLIENT, Con_RconLogin, this, "Authenticate to rcon with a username");

View file

@ -130,6 +130,7 @@ class CClient : public IClient, public CDemoPlayer::IListener
int m_UseTempRconCommands;
char m_Password[32];
bool m_SendPassword;
bool m_ButtonRender=false;
// version-checking
char m_aVersionStr[10];
@ -366,6 +367,14 @@ public:
static void Con_Minimize(IConsole::IResult *pResult, void *pUserData);
static void Con_Ping(IConsole::IResult *pResult, void *pUserData);
static void Con_Screenshot(IConsole::IResult *pResult, void *pUserData);
#if defined(CONF_VIDEORECORDER)
static void StartVideo(IConsole::IResult *pResult, void *pUserData, const char *pVideName);
static void Con_StartVideo(IConsole::IResult *pResult, void *pUserData);
static void Con_StopVideo(IConsole::IResult *pResult, void *pUserData);
const char *DemoPlayer_Render(const char *pFilename, int StorageType, const char *pVideoName, int SpeedIndex);
#endif
static void Con_Rcon(IConsole::IResult *pResult, void *pUserData);
static void Con_RconAuth(IConsole::IResult *pResult, void *pUserData);
static void Con_RconLogin(IConsole::IResult *pResult, void *pUserData);

View file

@ -2049,7 +2049,9 @@ int CGraphics_Threaded::IssueInit()
if(g_Config.m_GfxBorderless) Flags |= IGraphicsBackend::INITFLAG_BORDERLESS;
if(g_Config.m_GfxFullscreen) Flags |= IGraphicsBackend::INITFLAG_FULLSCREEN;
if(g_Config.m_GfxVsync) Flags |= IGraphicsBackend::INITFLAG_VSYNC;
#ifndef CONF_VIDEORECORDER
if(g_Config.m_GfxResizable) Flags |= IGraphicsBackend::INITFLAG_RESIZABLE;
#endif
int r = m_pBackend->Init("DDNet Client", &g_Config.m_GfxScreen, &g_Config.m_GfxScreenWidth, &g_Config.m_GfxScreenHeight, g_Config.m_GfxFsaaSamples, Flags, &m_DesktopScreenWidth, &m_DesktopScreenHeight, &m_ScreenWidth, &m_ScreenHeight, m_pStorage);
m_UseOpenGL3_3 = m_pBackend->IsOpenGL3_3();

View file

@ -281,7 +281,7 @@ int CInput::Update()
switch (Event.window.event)
{
case SDL_WINDOWEVENT_RESIZED:
#if defined(SDL_VIDEO_DRIVER_X11)
#if defined(SDL_VIDEO_DRIVER_X11) && !defined(CONF_VIDEORECORDER)
Graphics()->Resize(Event.window.data1, Event.window.data2);
#endif
break;

View file

@ -14,6 +14,9 @@
extern "C"
{
#if defined(CONF_VIDEORECORDER)
#include <engine/shared/video.h>
#endif
#include <opusfile.h>
#include <wavpack.h>
}
@ -83,6 +86,7 @@ static int s_WVBufferPosition = 0;
static int s_WVBufferSize = 0;
const int DefaultDistance = 1500;
int m_LastBreak = 0;
// TODO: there should be a faster way todo this
static short Int2Short(int i)
@ -266,18 +270,28 @@ static void Mix(short *pFinalOut, unsigned Frames)
pFinalOut[j] = Int2Short(vl);
pFinalOut[j+1] = Int2Short(vr);
// dbg_msg("sound", "the real shit: %d %d", pFinalOut[j], pFinalOut[j+1]);
}
}
#if defined(CONF_ARCH_ENDIAN_BIG)
swap_endian(pFinalOut, sizeof(short), Frames * 2);
#endif
}
static void SdlCallback(void *pUnused, Uint8 *pStream, int Len)
{
(void)pUnused;
#if defined(CONF_VIDEORECORDER)
if (!(IVideo::Current() && g_Config.m_ClVideoSndEnable))
Mix((short *)pStream, Len/2/2);
else
IVideo::Current()->nextAudioFrame(Mix);
#else
Mix((short *)pStream, Len/2/2);
#endif
}
@ -345,7 +359,10 @@ int CSound::Update()
m_SoundVolume = WantedVolume;
lock_unlock(m_SoundLock);
}
//#if defined(CONF_VIDEORECORDER)
// if(IVideo::Current() && g_Config.m_ClVideoSndEnable)
// IVideo::Current()->nextAudioFrame(Mix);
//#endif
return 0;
}

747
src/engine/client/video.cpp Normal file
View file

@ -0,0 +1,747 @@
#if defined(CONF_VIDEORECORDER)
#include <engine/storage.h>
#include <engine/console.h>
#include <engine/shared/config.h>
#include "video.h"
// This code is mostly stolen from https://github.com/FFmpeg/FFmpeg/blob/master/doc/examples/muxing.c
#define STREAM_PIX_FMT AV_PIX_FMT_YUV420P /* default pix_fmt */
const size_t format_nchannels = 3;
static LOCK m_WriteLock = 0;
CVideo::CVideo(CGraphics_Threaded* pGraphics, IStorage* pStorage, IConsole *pConsole, int width, int height, const char *name) :
m_pGraphics(pGraphics),
m_pStorage(pStorage),
m_pConsole(pConsole),
m_VideoStream(),
m_AudioStream()
{
m_pPixels = 0;
m_pFormatContext = 0;
m_pFormat = 0;
m_pRGB = 0;
m_pOptDict = 0;
m_VideoCodec = 0;
m_AudioCodec = 0;
m_Width = width;
m_Height = height;
str_copy(m_Name, name, sizeof(m_Name));
m_FPS = g_Config.m_ClVideoRecorderFPS;
m_Recording = false;
m_Started = false;
m_ProcessingVideoFrame = false;
m_ProcessingAudioFrame = false;
m_NextFrame = false;
m_NextaFrame = false;
// TODO:
m_HasAudio = g_Config.m_ClVideoSndEnable;
m_SndBufferSize = g_Config.m_SndBufferSize;
dbg_assert(ms_pCurrentVideo == 0, "ms_pCurrentVideo is NOT set to NULL while creating a new Video.");
ms_TickTime = time_freq() / m_FPS;
ms_pCurrentVideo = this;
m_WriteLock = lock_create();
}
CVideo::~CVideo()
{
ms_pCurrentVideo = 0;
lock_destroy(m_WriteLock);
}
void CVideo::start()
{
char aDate[20];
str_timestamp(aDate, sizeof(aDate));
char aBuf[256];
if (strlen(m_Name) != 0)
str_format(aBuf, sizeof(aBuf), "videos/%s", m_Name);
else
str_format(aBuf, sizeof(aBuf), "videos/%s.mp4", aDate);
char aWholePath[1024];
IOHANDLE File = m_pStorage->OpenFile(aBuf, IOFLAG_WRITE, IStorage::TYPE_SAVE, aWholePath, sizeof(aWholePath));
if(File)
{
io_close(File);
}
else
{
dbg_msg("video_recorder", "Failed to open file for recoding video.");
return;
}
avformat_alloc_output_context2(&m_pFormatContext, 0, "mp4", aWholePath);
if (!m_pFormatContext)
{
dbg_msg("video_recorder", "Failed to create formatcontext for recoding video.");
return;
}
m_pFormat = m_pFormatContext->oformat;
size_t nvals = format_nchannels * m_Width * m_Height;
m_pPixels = (uint8_t *)malloc(nvals * sizeof(GLubyte));
m_pRGB = (uint8_t *)malloc(nvals * sizeof(uint8_t));
/* Add the audio and video streams using the default format codecs
* and initialize the codecs. */
if (m_pFormat->video_codec != AV_CODEC_ID_NONE)
{
add_stream(&m_VideoStream, m_pFormatContext, &m_VideoCodec, m_pFormat->video_codec);
}
else
{
dbg_msg("video_recorder", "Failed to add VideoStream for recoding video.");
}
if (m_HasAudio && m_pFormat->audio_codec != AV_CODEC_ID_NONE)
{
add_stream(&m_AudioStream, m_pFormatContext, &m_AudioCodec, m_pFormat->audio_codec);
}
else
{
dbg_msg("video_recorder", "No audio.");
}
/* Now that all the parameters are set, we can open the audio and
* video codecs and allocate the necessary encode buffers. */
open_video();
if (m_HasAudio)
open_audio();
// TODO: remove/comment:
av_dump_format(m_pFormatContext, 0, aWholePath, 1);
/* open the output file, if needed */
if (!(m_pFormat->flags & AVFMT_NOFILE))
{
int ret = avio_open(&m_pFormatContext->pb, aWholePath, AVIO_FLAG_WRITE);
if (ret < 0)
{
char aBuf[AV_ERROR_MAX_STRING_SIZE];
av_strerror(ret, aBuf, sizeof(aBuf));
dbg_msg("video_recorder", "Could not open '%s': %s", aWholePath, aBuf);
return;
}
}
if (!m_VideoStream.sws_ctx)
{
m_VideoStream.sws_ctx = sws_getCachedContext(
m_VideoStream.sws_ctx,
m_VideoStream.enc->width, m_VideoStream.enc->height, AV_PIX_FMT_RGB24,
m_VideoStream.enc->width, m_VideoStream.enc->height, AV_PIX_FMT_YUV420P,
0, 0, 0, 0
);
}
/* Write the stream header, if any. */
int ret = avformat_write_header(m_pFormatContext, &m_pOptDict);
if (ret < 0)
{
char aBuf[AV_ERROR_MAX_STRING_SIZE];
av_strerror(ret, aBuf, sizeof(aBuf));
dbg_msg("video_recorder", "Error occurred when opening output file: %s", aBuf);
return;
}
m_Recording = true;
m_Started = true;
ms_Time = time_get();
m_vframe = 0;
}
void CVideo::stop()
{
m_Recording = false;
while (m_ProcessingVideoFrame || m_ProcessingAudioFrame)
thread_sleep(10);
finish_frames(&m_VideoStream);
if (m_HasAudio)
finish_frames(&m_AudioStream);
av_write_trailer(m_pFormatContext);
close_stream(&m_VideoStream);
if (m_HasAudio)
close_stream(&m_AudioStream);
//fclose(m_dbgfile);
if (!(m_pFormat->flags & AVFMT_NOFILE))
avio_closep(&m_pFormatContext->pb);
if (m_pFormatContext)
avformat_free_context(m_pFormatContext);
if (m_pRGB)
free(m_pRGB);
if (m_pPixels)
free(m_pPixels);
if (ms_pCurrentVideo)
delete ms_pCurrentVideo;
}
void CVideo::nextVideoFrame_thread()
{
if (m_NextFrame && m_Recording)
{
// #ifdef CONF_PLATFORM_MACOSX
// CAutoreleasePool AutoreleasePool;
// #endif
m_vseq += 1;
if(m_vseq >= 2)
{
m_ProcessingVideoFrame = true;
m_VideoStream.frame->pts = (int64_t)m_VideoStream.enc->frame_number;
dbg_msg("video_recorder", "vframe: %d", m_VideoStream.enc->frame_number);
read_rgb_from_gl();
fill_video_frame();
lock_wait(m_WriteLock);
write_frame(&m_VideoStream);
lock_unlock(m_WriteLock);
m_ProcessingVideoFrame = false;
}
m_NextFrame = false;
// sync_barrier();
// m_Semaphore.signal();
}
}
void CVideo::nextVideoFrame()
{
if (m_Recording)
{
// #ifdef CONF_PLATFORM_MACOSX
// CAutoreleasePool AutoreleasePool;
// #endif
dbg_msg("video_recorder", "called");
ms_Time += ms_TickTime;
ms_LocalTime = (ms_Time-ms_LocalStartTime)/(float)time_freq();
m_NextFrame = true;
m_vframe += 1;
// m_pGraphics->KickCommandBuffer();
//thread_sleep(500);
// m_Semaphore.wait();
}
}
void CVideo::nextAudioFrame_timeline()
{
if (m_Recording && m_HasAudio)
{
//if (m_vframe * m_AudioStream.enc->sample_rate / m_FPS >= m_AudioStream.enc->frame_number*m_AudioStream.enc->frame_size)
if (m_VideoStream.enc->frame_number * (double)m_AudioStream.enc->sample_rate / m_FPS >= (double)m_AudioStream.enc->frame_number*m_AudioStream.enc->frame_size)
{
m_NextaFrame = true;
}
}
}
void CVideo::nextAudioFrame(void (*Mix)(short *pFinalOut, unsigned Frames))
{
if (m_NextaFrame && m_Recording && m_HasAudio)
{
m_ProcessingAudioFrame = true;
//dbg_msg("video recorder", "video_frame: %lf", (double)(m_vframe/m_FPS));
//if((double)(m_vframe/m_FPS) < m_AudioStream.enc->frame_number*m_AudioStream.enc->frame_size/m_AudioStream.enc->sample_rate)
//return;
Mix(m_aBuffer, ALEN);
//m_AudioStream.frame->pts = m_AudioStream.enc->frame_number;
dbg_msg("video_recorder", "aframe: %d", m_AudioStream.enc->frame_number);
// memcpy(m_AudioStream.tmp_frame->data[0], pData, sizeof(int16_t) * m_SndBufferSize * 2);
//
// for (int i = 0; i < m_SndBufferSize; i++)
// {
// dbg_msg("video_recorder", "test: %d %d", ((int16_t*)pData)[i*2], ((int16_t*)pData)[i*2 + 1]);
// }
int dst_nb_samples;
av_samples_fill_arrays(
(uint8_t**)m_AudioStream.tmp_frame->data,
0, // pointer to linesize (int*)
(const uint8_t*)m_aBuffer,
2, // channels
m_AudioStream.tmp_frame->nb_samples,
AV_SAMPLE_FMT_S16,
0 // align
);
dst_nb_samples = av_rescale_rnd(
swr_get_delay(
m_AudioStream.swr_ctx,
m_AudioStream.enc->sample_rate
) + m_AudioStream.tmp_frame->nb_samples,
m_AudioStream.enc->sample_rate,
m_AudioStream.enc->sample_rate, AV_ROUND_UP
);
// dbg_msg("video_recorder", "dst_nb_samples: %d", dst_nb_samples);
// fwrite(m_aBuffer, sizeof(short), 2048, m_dbgfile);
int ret = av_frame_make_writable(m_AudioStream.frame);
if (ret < 0)
exit(1);
/* convert to destination format */
ret = swr_convert(
m_AudioStream.swr_ctx,
m_AudioStream.frame->data,
m_AudioStream.frame->nb_samples,
(const uint8_t **)m_AudioStream.tmp_frame->data,
m_AudioStream.tmp_frame->nb_samples
);
if (ret < 0)
{
dbg_msg("video_recorder", "Error while converting");
exit(1);
}
// frame = ost->frame;
//
m_AudioStream.frame->pts = av_rescale_q(m_AudioStream.samples_count, AVRational{1, m_AudioStream.enc->sample_rate}, m_AudioStream.enc->time_base);
m_AudioStream.samples_count += dst_nb_samples;
// dbg_msg("video_recorder", "prewrite----");
lock_wait(m_WriteLock);
write_frame(&m_AudioStream);
lock_unlock(m_WriteLock);
m_ProcessingAudioFrame = false;
m_NextaFrame = false;
}
}
void CVideo::fill_audio_frame()
{
;
}
void CVideo::fill_video_frame()
{
const int in_linesize[1] = { 3 * m_VideoStream.enc->width };
sws_scale(m_VideoStream.sws_ctx, (const uint8_t * const *)&m_pRGB, in_linesize, 0,
m_VideoStream.enc->height, m_VideoStream.frame->data, m_VideoStream.frame->linesize);
}
void CVideo::read_rgb_from_gl()
{
int i, j, k;
size_t cur_gl, cur_rgb;
/* Get RGBA to align to 32 bits instead of just 24 for RGB. May be faster for FFmpeg. */
glReadBuffer(GL_FRONT);
GLint Alignment;
glGetIntegerv(GL_PACK_ALIGNMENT, &Alignment);
glPixelStorei(GL_PACK_ALIGNMENT, 1);
glReadPixels(0, 0, m_Width, m_Height, GL_RGB, GL_UNSIGNED_BYTE, m_pPixels);
glPixelStorei(GL_PACK_ALIGNMENT, Alignment);
for (i = 0; i < m_Height; i++)
{
for (j = 0; j < m_Width; j++)
{
cur_gl = format_nchannels * (m_Width * (m_Height - i - 1) + j);
cur_rgb = format_nchannels * (m_Width * i + j);
for (k = 0; k < (int)format_nchannels; k++)
m_pRGB[cur_rgb + k] = m_pPixels[cur_gl + k];
}
}
}
AVFrame* CVideo::alloc_picture(enum AVPixelFormat pix_fmt, int width, int height)
{
AVFrame* picture;
int ret;
picture = av_frame_alloc();
if (!picture)
return NULL;
picture->format = pix_fmt;
picture->width = width;
picture->height = height;
/* allocate the buffers for the frame data */
ret = av_frame_get_buffer(picture, 32);
if (ret < 0) {
dbg_msg("video_recorder", "Could not allocate frame data.");
exit(1);
}
return picture;
}
AVFrame* CVideo::alloc_audio_frame(enum AVSampleFormat sample_fmt, uint64_t channel_layout, int sample_rate, int nb_samples)
{
AVFrame *frame = av_frame_alloc();
int ret;
if (!frame) {
dbg_msg("video_recorder", "Error allocating an audio frame");
exit(1);
}
frame->format = sample_fmt;
frame->channel_layout = channel_layout;
frame->sample_rate = sample_rate;
frame->nb_samples = nb_samples;
if (nb_samples) {
ret = av_frame_get_buffer(frame, 0);
if (ret < 0) {
dbg_msg("video_recorder", "Error allocating an audio buffer");
exit(1);
}
}
return frame;
}
void CVideo::open_video()
{
int ret;
AVCodecContext* c = m_VideoStream.enc;
AVDictionary* opt = 0;
av_dict_copy(&opt, m_pOptDict, 0);
/* open the codec */
ret = avcodec_open2(c, m_VideoCodec, &opt);
av_dict_free(&opt);
if (ret < 0)
{
char aBuf[AV_ERROR_MAX_STRING_SIZE];
av_strerror(ret, aBuf, sizeof(aBuf));
dbg_msg("video_recorder", "Could not open video codec: %s", aBuf);
exit(1);
}
/* allocate and init a re-usable frame */
m_VideoStream.frame = alloc_picture(c->pix_fmt, c->width, c->height);
if (!m_VideoStream.frame)
{
dbg_msg("video_recorder", "Could not allocate video frame");
exit(1);
}
/* If the output format is not YUV420P, then a temporary YUV420P
* picture is needed too. It is then converted to the required
* output format. */
m_VideoStream.tmp_frame = NULL;
if (c->pix_fmt != AV_PIX_FMT_YUV420P)
{
m_VideoStream.tmp_frame = alloc_picture(AV_PIX_FMT_YUV420P, c->width, c->height);
if (!m_VideoStream.tmp_frame)
{
dbg_msg("video_recorder", "Could not allocate temporary picture");
exit(1);
}
}
/* copy the stream parameters to the muxer */
ret = avcodec_parameters_from_context(m_VideoStream.st->codecpar, c);
if (ret < 0)
{
dbg_msg("video_recorder", "Could not copy the stream parameters");
exit(1);
}
m_vseq = 0;
}
void CVideo::open_audio()
{
AVCodecContext *c;
int nb_samples;
int ret;
AVDictionary *opt = NULL;
c = m_AudioStream.enc;
/* open it */
//m_dbgfile = fopen("/tmp/pcm_dbg", "wb");
av_dict_copy(&opt, m_pOptDict, 0);
ret = avcodec_open2(c, m_AudioCodec, &opt);
av_dict_free(&opt);
if (ret < 0)
{
char aBuf[AV_ERROR_MAX_STRING_SIZE];
av_strerror(ret, aBuf, sizeof(aBuf));
dbg_msg("video_recorder", "Could not open audio codec: %s", aBuf);
exit(1);
}
if (c->codec->capabilities & AV_CODEC_CAP_VARIABLE_FRAME_SIZE)
nb_samples = 10000;
else
nb_samples = c->frame_size;
m_AudioStream.frame = alloc_audio_frame(c->sample_fmt, c->channel_layout, c->sample_rate, nb_samples);
m_AudioStream.tmp_frame = alloc_audio_frame(AV_SAMPLE_FMT_S16, AV_CH_LAYOUT_STEREO, g_Config.m_SndRate, m_SndBufferSize * 2);
/* copy the stream parameters to the muxer */
ret = avcodec_parameters_from_context(m_AudioStream.st->codecpar, c);
if (ret < 0) {
dbg_msg("video_recorder", "Could not copy the stream parameters");
exit(1);
}
/* create resampler context */
m_AudioStream.swr_ctx = swr_alloc();
if (!m_AudioStream.swr_ctx) {
dbg_msg("video_recorder", "Could not allocate resampler context");
exit(1);
}
/* set options */
av_opt_set_int (m_AudioStream.swr_ctx, "in_channel_count", 2, 0);
av_opt_set_int (m_AudioStream.swr_ctx, "in_sample_rate", g_Config.m_SndRate, 0);
av_opt_set_sample_fmt(m_AudioStream.swr_ctx, "in_sample_fmt", AV_SAMPLE_FMT_S16, 0);
av_opt_set_int (m_AudioStream.swr_ctx, "out_channel_count", c->channels, 0);
av_opt_set_int (m_AudioStream.swr_ctx, "out_sample_rate", c->sample_rate, 0);
av_opt_set_sample_fmt(m_AudioStream.swr_ctx, "out_sample_fmt", c->sample_fmt, 0);
/* initialize the resampling context */
if ((ret = swr_init(m_AudioStream.swr_ctx)) < 0) {
dbg_msg("video_recorder", "Failed to initialize the resampling context");
exit(1);
}
}
/* Add an output stream. */
void CVideo::add_stream(OutputStream *ost, AVFormatContext *oc, AVCodec **codec, enum AVCodecID codec_id)
{
AVCodecContext *c;
/* find the encoder */
*codec = avcodec_find_encoder(codec_id);
if (!(*codec))
{
dbg_msg("video_recorder", "Could not find encoder for '%s'",
avcodec_get_name(codec_id));
exit(1);
}
ost->st = avformat_new_stream(oc, NULL);
if (!ost->st)
{
dbg_msg("video_recorder", "Could not allocate stream");
exit(1);
}
ost->st->id = oc->nb_streams-1;
c = avcodec_alloc_context3(*codec);
if (!c)
{
dbg_msg("video_recorder", "Could not alloc an encoding context");
exit(1);
}
ost->enc = c;
switch ((*codec)->type)
{
case AVMEDIA_TYPE_AUDIO:
// m_MixingRate = g_Config.m_SndRate;
//
// // Set 16-bit stereo audio at 22Khz
// Format.freq = g_Config.m_SndRate; // ignore_convention
// Format.format = AUDIO_S16; // ignore_convention
// Format.channels = 2; // ignore_convention
// Format.samples = g_Config.m_SndBufferSize; // ignore_convention
c->sample_fmt = (*codec)->sample_fmts ? (*codec)->sample_fmts[0] : AV_SAMPLE_FMT_FLTP;
c->bit_rate = g_Config.m_SndRate * 2 * 16;
c->frame_size = m_SndBufferSize;
c->sample_rate = g_Config.m_SndRate;
if ((*codec)->supported_samplerates)
{
c->sample_rate = (*codec)->supported_samplerates[0];
for (int i = 0; (*codec)->supported_samplerates[i]; i++)
{
if ((*codec)->supported_samplerates[i] == g_Config.m_SndRate)
c->sample_rate = g_Config.m_SndRate;
}
}
c->channels = 2;
c->channel_layout = AV_CH_LAYOUT_STEREO;
ost->st->time_base.num = 1;
ost->st->time_base.den = c->sample_rate;
break;
case AVMEDIA_TYPE_VIDEO:
c->codec_id = codec_id;
c->bit_rate = 400000;
/* Resolution must be a multiple of two. */
c->width = m_Width;
c->height = m_Height%2==0?m_Height:m_Height-1;
/* timebase: This is the fundamental unit of time (in seconds) in terms
* of which frame timestamps are represented. For fixed-fps content,
* timebase should be 1/framerate and timestamp increments should be
* identical to 1. */
ost->st->time_base.num = 1;
ost->st->time_base.den = m_FPS;
c->time_base = ost->st->time_base;
c->gop_size = 12; /* emit one intra frame every twelve frames at most */
c->pix_fmt = STREAM_PIX_FMT;
if (c->codec_id == AV_CODEC_ID_MPEG2VIDEO)
{
/* just for testing, we also add B-frames */
c->max_b_frames = 2;
}
if (c->codec_id == AV_CODEC_ID_MPEG1VIDEO)
{
/* Needed to avoid using macroblocks in which some coeffs overflow.
* This does not happen with normal video, it just happens here as
* the motion of the chroma plane does not match the luma plane. */
c->mb_decision = 2;
}
if (codec_id == AV_CODEC_ID_H264)
{
const char *presets[10] = {"ultrafast", "superfast", "veryfast", "faster", "fast", "medium", "slow", "slower", "veryslow", "placebo"};
//av_opt_set(c->priv_data, "preset", "slow", 0);
//av_opt_set_int(c->priv_data, "crf", 22, 0);
av_opt_set(c->priv_data, "preset", presets[g_Config.m_ClVideoX264Preset], 0);
av_opt_set_int(c->priv_data, "crf", g_Config.m_ClVideoX264Crf, 0);
}
break;
default:
break;
}
/* Some formats want stream headers to be separate. */
if (oc->oformat->flags & AVFMT_GLOBALHEADER)
c->flags |= AV_CODEC_FLAG_GLOBAL_HEADER;
}
void CVideo::write_frame(OutputStream* pStream)
{
//lock_wait(m_WriteLock);
int ret_recv = 0;
AVPacket Packet = { 0 };
av_init_packet(&Packet);
Packet.data = 0;
Packet.size = 0;
avcodec_send_frame(pStream->enc, pStream->frame);
do
{
ret_recv = avcodec_receive_packet(pStream->enc, &Packet);
if (!ret_recv)
{
/* rescale output packet timestamp values from codec to stream timebase */
av_packet_rescale_ts(&Packet, pStream->enc->time_base, pStream->st->time_base);
Packet.stream_index = pStream->st->index;
if (int ret = av_interleaved_write_frame(m_pFormatContext, &Packet))
{
char aBuf[AV_ERROR_MAX_STRING_SIZE];
av_strerror(ret, aBuf, sizeof(aBuf));
dbg_msg("video_recorder", "Error while writing video frame: %s", aBuf);
}
}
else
break;
} while (true);
if (ret_recv && ret_recv != AVERROR(EAGAIN))
{
dbg_msg("video_recorder", "Error encoding frame, error: %d", ret_recv);
}
//lock_unlock(m_WriteLock);
}
void CVideo::finish_frames(OutputStream* pStream)
{
dbg_msg("video_recorder", "------------");
int ret_recv = 0;
AVPacket Packet = { 0 };
av_init_packet(&Packet);
Packet.data = 0;
Packet.size = 0;
avcodec_send_frame(pStream->enc, 0);
do
{
ret_recv = avcodec_receive_packet(pStream->enc, &Packet);
if (!ret_recv)
{
/* rescale output packet timestamp values from codec to stream timebase */
//if(pStream->st->codec->codec_type == AVMEDIA_TYPE_AUDIO)
av_packet_rescale_ts(&Packet, pStream->enc->time_base, pStream->st->time_base);
Packet.stream_index = pStream->st->index;
if (int ret = av_interleaved_write_frame(m_pFormatContext, &Packet))
{
char aBuf[AV_ERROR_MAX_STRING_SIZE];
av_strerror(ret, aBuf, sizeof(aBuf));
dbg_msg("video_recorder", "Error while writing video frame: %s", aBuf);
}
}
else
break;
} while (true);
if (ret_recv && ret_recv != AVERROR_EOF)
{
dbg_msg("video_recorder", "failed to finish recoding, error: %d", ret_recv);
}
}
void CVideo::close_stream(OutputStream* ost)
{
avcodec_free_context(&ost->enc);
av_frame_free(&ost->frame);
av_frame_free(&ost->tmp_frame);
sws_freeContext(ost->sws_ctx);
swr_free(&ost->swr_ctx);
}
#endif

136
src/engine/client/video.h Normal file
View file

@ -0,0 +1,136 @@
#ifndef ENGINE_CLIENT_VIDEO_H
#define ENGINE_CLIENT_VIDEO_H
#if defined(__ANDROID__)
#define GL_GLEXT_PROTOTYPES
#include <GLES/gl.h>
#include <GLES/glext.h>
#include <GL/glu.h>
#define glOrtho glOrthof
#else
#include "SDL_opengl.h"
#if defined(CONF_PLATFORM_MACOSX)
#include "OpenGL/glu.h"
#else
#include "GL/glu.h"
#endif
#endif
extern "C"
{
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
#include <libavutil/imgutils.h>
#include <libavutil/opt.h>
#include <libswscale/swscale.h>
#include <libswresample/swresample.h>
};
#include <base/system.h>
#include <engine/shared/video.h>
#include <engine/shared/demo.h>
#define ALEN 2048
// a wrapper around a single output AVStream
typedef struct OutputStream {
AVStream *st;
AVCodecContext *enc;
/* pts of the next frame that will be generated */
int64_t next_pts;
int samples_count;
AVFrame *frame;
AVFrame *tmp_frame;
struct SwsContext *sws_ctx;
struct SwrContext *swr_ctx;
} OutputStream;
class CVideo : public IVideo
{
public:
CVideo(class CGraphics_Threaded* pGraphics, class IStorage* pStorage, class IConsole *pConsole, int width, int height, const char *name);
~CVideo();
virtual void start();
virtual void stop();
virtual void nextVideoFrame();
virtual void nextVideoFrame_thread();
virtual bool frameRendered() { return !m_NextFrame; }
virtual void nextAudioFrame(void (*Mix)(short *pFinalOut, unsigned Frames));
virtual void nextAudioFrame_timeline();
virtual bool aframeRendered() { return !m_NextaFrame; }
static IVideo* Current() { return IVideo::ms_pCurrentVideo; }
static void Init() { av_log_set_level(AV_LOG_DEBUG); }
private:
void fill_video_frame();
void read_rgb_from_gl();
void fill_audio_frame();
void open_video();
void open_audio();
AVFrame *alloc_picture(enum AVPixelFormat pix_fmt, int width, int height);
AVFrame* alloc_audio_frame(enum AVSampleFormat sample_fmt, uint64_t channel_layout, int sample_rate, int nb_samples);
void write_frame(OutputStream* pStream);
void finish_frames(OutputStream* pStream);
void close_stream(OutputStream *ost);
void add_stream(OutputStream *ost, AVFormatContext *oc, AVCodec **codec, enum AVCodecID codec_id);
class CGraphics_Threaded* m_pGraphics;
class IStorage* m_pStorage;
class IConsole* m_pConsole;
int m_Width;
int m_Height;
char m_Name[256];
//FILE *m_dbgfile;
int m_vseq;
short m_aBuffer[ALEN*2];
int m_vframe;
int m_FPS;
bool m_Started;
bool m_Recording;
bool m_ProcessingVideoFrame;
bool m_ProcessingAudioFrame;
bool m_NextFrame;
bool m_NextaFrame;
bool m_HasAudio;
GLubyte* m_pPixels;
OutputStream m_VideoStream;
OutputStream m_AudioStream;
AVCodec* m_VideoCodec;
AVCodec* m_AudioCodec;
AVDictionary* m_pOptDict;
AVFormatContext* m_pFormatContext;
AVOutputFormat* m_pFormat;
uint8_t* m_pRGB;
int m_SndBufferSize;
};
#endif

View file

@ -368,3 +368,8 @@ MACRO_CONFIG_INT(GfxEnableTextureUnitOptimization, gfx_enable_texture_unit_optim
MACRO_CONFIG_INT(GfxEnableTextureUnitOptimization, gfx_enable_texture_unit_optimization, 0, 0, 1, CFGFLAG_SAVE|CFGFLAG_CLIENT, "Use multiple texture units, instead of only one.")
#endif
MACRO_CONFIG_INT(GfxUsePreinitBuffer, gfx_use_preinitialized_buffer, 0, 0, 1, CFGFLAG_SAVE|CFGFLAG_CLIENT, "Use only one buffer for data, that is uploaded to the GPU(might help when using an iGPUs).")
#if defined(CONF_VIDEORECORDER)
MACRO_CONFIG_INT(ClVideoRecorderFPS, cl_video_recorder_fps, 60, 1, 1000, CFGFLAG_SAVE|CFGFLAG_CLIENT, "At which FPS the videorecorder should record demos.")
#endif

View file

@ -8,6 +8,12 @@
#include <engine/shared/config.h>
#if defined(CONF_VIDEORECORDER)
#include <engine/shared/video.h>
#endif
#include <game/generated/protocol.h>
#include "compression.h"
#include "demo.h"
#include "memheap.h"
@ -404,6 +410,9 @@ CDemoPlayer::CDemoPlayer(class CSnapshotDelta *pSnapshotDelta)
m_pKeyFrames = 0;
m_SpeedIndex = 4;
m_TickTime = 0;
m_Time = 0;
m_pSnapshotDelta = pSnapshotDelta;
m_LastSnapshotDataSize = -1;
}
@ -556,6 +565,10 @@ void CDemoPlayer::DoTick()
// stop on error or eof
if(m_pConsole)
m_pConsole->Print(IConsole::OUTPUT_LEVEL_ADDINFO, "demo_player", "end of file");
#if defined(CONF_VIDEORECORDER)
if (IVideo::Current())
Stop();
#endif
if(m_Info.m_PreviousTick == -1)
{
if(m_pConsole)
@ -852,6 +865,34 @@ int CDemoPlayer::NextFrame()
return IsPlaying();
}
int64 CDemoPlayer::time()
{
#if defined(CONF_VIDEORECORDER)
static bool s_Recording = false;
if (IVideo::Current())
{
if (!s_Recording)
{
s_Recording = true;
m_Info.m_LastUpdate = IVideo::time();
}
return IVideo::time();
}
else
{
int64 Now = time_get();
if (s_Recording)
{
s_Recording = false;
m_Info.m_LastUpdate = Now;
}
return Now;
}
#else
return time_get();
#endif
}
int CDemoPlayer::Play()
{
// fill in previous and next tick
@ -862,7 +903,7 @@ int CDemoPlayer::Play()
/*m_Info.start_tick = m_Info.previous_tick;
m_Info.start_time = time_get();*/
m_Info.m_CurrentTime = m_Info.m_PreviousTick*time_freq()/SERVER_TICK_SPEED;
m_Info.m_LastUpdate = time_get();
m_Info.m_LastUpdate = time();
return 0;
}
@ -926,7 +967,7 @@ void CDemoPlayer::SetSpeedIndex(int Offset)
int CDemoPlayer::Update(bool RealTime)
{
int64 Now = time_get();
int64 Now = time();
int64 Deltatime = Now-m_Info.m_LastUpdate;
m_Info.m_LastUpdate = Now;
@ -976,6 +1017,8 @@ int CDemoPlayer::Update(bool RealTime)
m_pConsole->Print(IConsole::OUTPUT_LEVEL_ADDINFO, "demo_player", aBuf);
}
}
m_Time += m_TickTime;
}
return 0;
@ -983,6 +1026,11 @@ int CDemoPlayer::Update(bool RealTime)
int CDemoPlayer::Stop()
{
#if defined(CONF_VIDEORECORDER)
if (IVideo::Current())
IVideo::Current()->stop();
#endif
if(!m_File)
return -1;

View file

@ -114,6 +114,11 @@ private:
void ScanFile();
int NextFrame();
int64 time();
int64 m_TickTime;
int64 m_Time;
public:
CDemoPlayer(class CSnapshotDelta *m_pSnapshotDelta);

View file

@ -55,6 +55,9 @@ public:
fs_makedir(GetPath(TYPE_SAVE, "screenshots/auto/stats", aPath, sizeof(aPath)));
fs_makedir(GetPath(TYPE_SAVE, "maps", aPath, sizeof(aPath)));
fs_makedir(GetPath(TYPE_SAVE, "downloadedmaps", aPath, sizeof(aPath)));
#if defined(CONF_VIDEORECORDER)
fs_makedir(GetPath(TYPE_SAVE, "videos", aPath, sizeof(aPath)));
#endif
}
fs_makedir(GetPath(TYPE_SAVE, "dumps", aPath, sizeof(aPath)));
fs_makedir(GetPath(TYPE_SAVE, "demos", aPath, sizeof(aPath)));

View file

@ -0,0 +1,14 @@
#if defined(CONF_VIDEORECORDER)
#include <engine/shared/config.h>
#include "video.h"
IVideo* IVideo::ms_pCurrentVideo = 0;
int64 IVideo::ms_Time = 0;
float IVideo::ms_LocalTime = 0;
int64 IVideo::ms_LocalStartTime = 0;
int64 IVideo::ms_TickTime = 0;
#endif

39
src/engine/shared/video.h Normal file
View file

@ -0,0 +1,39 @@
#ifndef ENGINE_SHARED_VIDEO_H
#define ENGINE_SHARED_VIDEO_H
#include <base/system.h>
class IVideo
{
public:
virtual ~IVideo() {};
virtual void start() = 0;
virtual void stop() = 0;
virtual void nextVideoFrame() = 0;
virtual bool frameRendered() = 0;
virtual void nextVideoFrame_thread() = 0;
virtual void nextAudioFrame(void (*Mix)(short *pFinalOut, unsigned Frames)) = 0;
virtual bool aframeRendered() = 0;
virtual void nextAudioFrame_timeline() = 0;
static IVideo* Current() { return ms_pCurrentVideo; }
static int64 time() { return ms_Time; }
static float LocalTime() { return ms_LocalTime; }
static void SetLocalStartTime(int64 LocalStartTime) { ms_LocalStartTime = LocalStartTime; }
static void SetFPS(int fps) { ms_TickTime = time_freq() / fps; }
protected:
static IVideo* ms_pCurrentVideo;
static int64 ms_Time;
static int64 ms_LocalStartTime;
static float ms_LocalTime;
static int64 ms_TickTime;
};
#endif

View file

@ -3,6 +3,10 @@
#ifndef GAME_CLIENT_COMPONENT_H
#define GAME_CLIENT_COMPONENT_H
#if defined(CONF_VIDEORECORDER)
#include <engine/shared/video.h>
#endif
#include <engine/input.h>
#include "gameclient.h"
@ -29,6 +33,16 @@ protected:
class CLayers *Layers() const { return m_pClient->Layers(); }
class CCollision *Collision() const { return m_pClient->Collision(); }
class IUpdater *Updater() const { return m_pClient->Updater(); }
#if defined(CONF_VIDEORECORDER)
int64 time() const { return IVideo::Current() ? IVideo::time() : time_get(); }
float LocalTime() const { return IVideo::Current() ? IVideo::LocalTime() : Client()->LocalTime(); }
#else
int64 time() const { return time_get(); }
float LocalTime() const { return Client()->LocalTime(); }
#endif
public:
virtual ~CComponent() {}
class CGameClient *GameClient() const { return m_pClient; }

View file

@ -269,7 +269,7 @@ bool CChat::OnInput(IInput::CEvent Event)
{
bool AddEntry = false;
if(m_LastChatSend+time_freq() < time_get())
if(m_LastChatSend+time_freq() < time())
{
Say(m_Mode == MODE_ALL ? 0 : 1, m_Input.GetString());
AddEntry = true;
@ -600,7 +600,7 @@ void CChat::AddLine(int ClientID, int Team, const char *pLine)
}
m_CurrentLine = (m_CurrentLine+1)%MAX_LINES;
m_aLines[m_CurrentLine].m_Time = time_get();
m_aLines[m_CurrentLine].m_Time = time();
m_aLines[m_CurrentLine].m_YOffset[0] = -1.0f;
m_aLines[m_CurrentLine].m_YOffset[1] = -1.0f;
m_aLines[m_CurrentLine].m_ClientID = ClientID;
@ -684,8 +684,8 @@ void CChat::AddLine(int ClientID, int Team, const char *pLine)
}
// play sound
int64 Now = time_get();
if(ClientID == -1) // Server message
int64 Now = time();
if(ClientID == -1)
{
if(Now-m_aLastSoundPlayed[CHAT_SERVER] >= time_freq()*3/10)
{
@ -748,7 +748,7 @@ void CChat::OnPrepareLines()
m_PrevScoreBoardShowed = m_pClient->m_pScoreboard->Active();
m_PrevShowChat = m_Show;
int64 Now = time_get();
int64 Now = time();
float LineWidth = m_pClient->m_pScoreboard->Active() ? 90.0f : 200.0f;
float HeightLimit = m_pClient->m_pScoreboard->Active() ? 230.0f : m_Show ? 50.0f : 200.0f;
float Begin = x;
@ -888,7 +888,7 @@ void CChat::OnPrepareLines()
void CChat::OnRender()
{
// send pending chat messages
if(m_PendingChatCounter > 0 && m_LastChatSend+time_freq() < time_get())
if(m_PendingChatCounter > 0 && m_LastChatSend+time_freq() < time())
{
CHistoryEntry *pEntry = m_History.Last();
for(int i = m_PendingChatCounter-1; pEntry; --i, pEntry = m_History.Prev(pEntry))
@ -969,14 +969,18 @@ void CChat::OnRender()
TextRender()->TextEx(&Cursor, m_Input.GetString(Editing)+m_Input.GetCursorOffset(Editing), -1);
}
#if defined(CONF_VIDEORECORDER)
if(!((g_Config.m_ClShowChat && !IVideo::Current()) || (g_Config.m_ClVideoShowChat && IVideo::Current())))
#else
if(!g_Config.m_ClShowChat)
#endif
return;
y -= 8.0f;
OnPrepareLines();
int64 Now = time_get();
int64 Now = time();
float HeightLimit = m_pClient->m_pScoreboard->Active() ? 230.0f : m_Show ? 50.0f : 200.0f;
int OffsetType = m_pClient->m_pScoreboard->Active() ? 1 : 0;
for(int i = 0; i < MAX_LINES; i++)
@ -1004,7 +1008,7 @@ void CChat::OnRender()
void CChat::Say(int Team, const char *pLine)
{
m_LastChatSend = time_get();
m_LastChatSend = time();
// send chat message
CNetMsg_Cl_Say Msg;
@ -1020,7 +1024,7 @@ void CChat::SayChat(const char *pLine)
bool AddEntry = false;
if(m_LastChatSend+time_freq() < time_get())
if(m_LastChatSend+time_freq() < time())
{
Say(m_Mode == MODE_ALL ? 0 : 1, pLine);
AddEntry = true;

View file

@ -39,7 +39,7 @@ void CDamageInd::Create(vec2 Pos, vec2 Dir)
if(i)
{
i->m_Pos = Pos;
i->m_StartTime = Client()->LocalTime();
i->m_StartTime = LocalTime();
i->m_Dir = Dir*-1;
i->m_StartAngle = (( (float)rand()/(float)RAND_MAX) - 1.0f) * 2.0f * pi;
}
@ -48,24 +48,24 @@ void CDamageInd::Create(vec2 Pos, vec2 Dir)
void CDamageInd::OnRender()
{
Graphics()->TextureSet(g_pData->m_aImages[IMAGE_GAME].m_Id);
static float s_LastLocalTime = Client()->LocalTime();
static float s_LastLocalTime = LocalTime();
for(int i = 0; i < m_NumItems;)
{
if(Client()->State() == IClient::STATE_DEMOPLAYBACK)
{
const IDemoPlayer::CInfo *pInfo = DemoPlayer()->BaseInfo();
if(pInfo->m_Paused)
m_aItems[i].m_StartTime += Client()->LocalTime()-s_LastLocalTime;
m_aItems[i].m_StartTime += LocalTime()-s_LastLocalTime;
else
m_aItems[i].m_StartTime += (Client()->LocalTime()-s_LastLocalTime)*(1.0f-pInfo->m_Speed);
m_aItems[i].m_StartTime += (LocalTime()-s_LastLocalTime)*(1.0f-pInfo->m_Speed);
}
else
{
if(m_pClient->m_Snap.m_pGameInfoObj && m_pClient->m_Snap.m_pGameInfoObj->m_GameStateFlags&GAMESTATEFLAG_PAUSED)
m_aItems[i].m_StartTime += Client()->LocalTime()-s_LastLocalTime;
m_aItems[i].m_StartTime += LocalTime()-s_LastLocalTime;
}
float Life = 0.75f - (Client()->LocalTime() - m_aItems[i].m_StartTime);
float Life = 0.75f - (LocalTime() - m_aItems[i].m_StartTime);
if(Life < 0.0f)
DestroyI(&m_aItems[i]);
else
@ -77,10 +77,7 @@ void CDamageInd::OnRender()
i++;
}
}
s_LastLocalTime = Client()->LocalTime();
Graphics()->QuadsSetRotation(0);
Graphics()->SetColor(1.f, 1.f, 1.f, 1.f);
s_LastLocalTime = LocalTime();
}
void CDamageInd::OnInit()

View file

@ -267,18 +267,18 @@ void CEffects::OnRender()
{
const IDemoPlayer::CInfo *pInfo = DemoPlayer()->BaseInfo();
if(time_get()-LastUpdate100hz > time_freq()/(100*pInfo->m_Speed))
if(time()-LastUpdate100hz > time_freq()/(100*pInfo->m_Speed))
{
m_Add100hz = true;
LastUpdate100hz = time_get();
LastUpdate100hz = time();
}
else
m_Add100hz = false;
if(time_get()-LastUpdate50hz > time_freq()/(100*pInfo->m_Speed))
if(time()-LastUpdate50hz > time_freq()/(100*pInfo->m_Speed))
{
m_Add50hz = true;
LastUpdate50hz = time_get();
LastUpdate50hz = time();
}
else
m_Add50hz = false;
@ -289,18 +289,18 @@ void CEffects::OnRender()
return;
}
if(time_get()-LastUpdate100hz > time_freq()/100)
if(time()-LastUpdate100hz > time_freq()/100)
{
m_Add100hz = true;
LastUpdate100hz = time_get();
LastUpdate100hz = time();
}
else
m_Add100hz = false;
if(time_get()-LastUpdate50hz > time_freq()/100)
if(time()-LastUpdate50hz > time_freq()/100)
{
m_Add50hz = true;
LastUpdate50hz = time_get();
LastUpdate50hz = time();
}
else
m_Add50hz = false;

View file

@ -139,7 +139,7 @@ void CHud::RenderGameTimer()
// last 60 sec red, last 10 sec blink
if(m_pClient->m_Snap.m_pGameInfoObj->m_TimeLimit && Time <= 60 && (m_pClient->m_Snap.m_pGameInfoObj->m_WarmupTimer <= 0))
{
float Alpha = Time <= 10 && (2*time_get()/time_freq()) % 2 ? 0.5f : 1.0f;
float Alpha = Time <= 10 && (2*time()/time_freq()) % 2 ? 0.5f : 1.0f;
TextRender()->TextColor(1.0f, 0.25f, 0.25f, Alpha);
}
TextRender()->Text(0, Half-w/2, 2, FontSize, aBuf, -1);
@ -580,7 +580,7 @@ void CHud::RenderConnectionWarning()
void CHud::RenderTeambalanceWarning()
{
// render prompt about team-balance
bool Flash = time_get()/(time_freq()/2)%2 == 0;
bool Flash = time()/(time_freq()/2)%2 == 0;
if(m_pClient->m_Snap.m_pGameInfoObj->m_GameFlags&GAMEFLAG_TEAMS)
{
int TeamDiff = m_pClient->m_Snap.m_aTeamSize[TEAM_RED]-m_pClient->m_Snap.m_aTeamSize[TEAM_BLUE];
@ -770,7 +770,11 @@ void CHud::OnRender()
m_Height = 300.0f;
Graphics()->MapScreen(0.0f, 0.0f, m_Width, m_Height);
#if defined(CONF_VIDEORECORDER)
if((IVideo::Current()&&g_Config.m_ClVideoShowhud)||(!IVideo::Current()&&g_Config.m_ClShowhud))
#else
if(g_Config.m_ClShowhud)
#endif
{
if(m_pClient->m_Snap.m_pLocalCharacter && !(m_pClient->m_Snap.m_pGameInfoObj->m_GameStateFlags&GAMESTATEFLAG_GAMEOVER))
{

View file

@ -89,22 +89,22 @@ void CItems::RenderProjectile(const CNetObj_Projectile *pCurrent, int ItemID)
{
m_pClient->m_pEffects->SmokeTrail(Pos, Vel*-1, Alpha);
static float s_Time = 0.0f;
static float s_LastLocalTime = Client()->LocalTime();
static float s_LastLocalTime = LocalTime();
if(Client()->State() == IClient::STATE_DEMOPLAYBACK)
{
const IDemoPlayer::CInfo *pInfo = DemoPlayer()->BaseInfo();
if(!pInfo->m_Paused)
s_Time += (Client()->LocalTime()-s_LastLocalTime)*pInfo->m_Speed;
s_Time += (LocalTime()-s_LastLocalTime)*pInfo->m_Speed;
}
else
{
if(m_pClient->m_Snap.m_pGameInfoObj && !(m_pClient->m_Snap.m_pGameInfoObj->m_GameStateFlags&GAMESTATEFLAG_PAUSED))
s_Time += Client()->LocalTime()-s_LastLocalTime;
s_Time += LocalTime()-s_LastLocalTime;
}
Graphics()->QuadsSetRotation(s_Time*pi*2*2 + ItemID);
s_LastLocalTime = Client()->LocalTime();
s_LastLocalTime = LocalTime();
}
else
{
@ -156,22 +156,22 @@ void CItems::RenderPickup(const CNetObj_Pickup *pPrev, const CNetObj_Pickup *pCu
Graphics()->QuadsSetRotation(Angle);
static float s_Time = 0.0f;
static float s_LastLocalTime = Client()->LocalTime();
static float s_LastLocalTime = LocalTime();
float Offset = Pos.y/32.0f + Pos.x/32.0f;
if(Client()->State() == IClient::STATE_DEMOPLAYBACK)
{
const IDemoPlayer::CInfo *pInfo = DemoPlayer()->BaseInfo();
if(!pInfo->m_Paused)
s_Time += (Client()->LocalTime()-s_LastLocalTime)*pInfo->m_Speed;
s_Time += (LocalTime()-s_LastLocalTime)*pInfo->m_Speed;
}
else
{
if(m_pClient->m_Snap.m_pGameInfoObj && !(m_pClient->m_Snap.m_pGameInfoObj->m_GameStateFlags&GAMESTATEFLAG_PAUSED))
s_Time += Client()->LocalTime()-s_LastLocalTime;
s_Time += LocalTime()-s_LastLocalTime;
}
Pos.x += cosf(s_Time*2.0f+Offset)*2.5f;
Pos.y += sinf(s_Time*2.0f+Offset)*2.5f;
s_LastLocalTime = Client()->LocalTime();
s_LastLocalTime = LocalTime();
Graphics()->RenderQuadContainerAsSprite(m_ItemsQuadContainerIndex, QuadOffset, Pos.x, Pos.y);
}

View file

@ -80,7 +80,7 @@ void CMapLayers::EnvelopeEval(float TimeOffset, int Env, float *pChannels, void
CMapItemEnvelope *pItem = (CMapItemEnvelope *)pThis->m_pLayers->Map()->GetItem(Start+Env, 0, 0);
static float s_Time = 0.0f;
static float s_LastLocalTime = pThis->Client()->LocalTime();
static float s_LastLocalTime = pThis->LocalTime();
if(pThis->Client()->State() == IClient::STATE_DEMOPLAYBACK)
{
const IDemoPlayer::CInfo *pInfo = pThis->DemoPlayer()->BaseInfo();
@ -101,7 +101,7 @@ void CMapLayers::EnvelopeEval(float TimeOffset, int Env, float *pChannels, void
else
{
s_Time = mix(pThis->m_LastLocalTick / (float)pThis->Client()->GameTickSpeed(),
pThis->m_CurrentLocalTick / (float)pThis->Client()->GameTickSpeed(),
pThis->m_CurrentLocalTick / (float)pThis->Client()->GameTickSpeed(),
pThis->Client()->IntraGameTick());
}
}
@ -118,10 +118,10 @@ void CMapLayers::EnvelopeEval(float TimeOffset, int Env, float *pChannels, void
pThis->Client()->IntraGameTick());
}
else
s_Time += pThis->Client()->LocalTime()-s_LastLocalTime;
s_Time += pThis->LocalTime()-s_LastLocalTime;
}
pThis->RenderTools()->RenderEvalEnvelope(pPoints+pItem->m_StartPoint, pItem->m_NumPoints, 4, s_Time+TimeOffset, pChannels);
s_LastLocalTime = pThis->Client()->LocalTime();
s_LastLocalTime = pThis->LocalTime();
}
}

View file

@ -1116,6 +1116,20 @@ int CMenus::Render()
pExtraText = "";
ExtraAlign = -1;
}
#if defined(CONF_VIDEORECORDER)
else if(m_Popup == POPUP_RENDER_DEMO)
{
pTitle = Localize("Render demo");
pExtraText = "";
ExtraAlign = -1;
}
else if(m_Popup == POPUP_REPLACE_VIDEO)
{
pTitle = Localize("Replace video");
pExtraText = Localize("File already exists, do you want to overwrite it?");
ExtraAlign = -1;
}
#endif
else if(m_Popup == POPUP_REMOVE_FRIEND)
{
pTitle = Localize("Remove friend");
@ -1527,6 +1541,176 @@ int CMenus::Render()
static float Offset = 0.0f;
DoEditBox(&Offset, &TextBox, m_aCurrentDemoFile, sizeof(m_aCurrentDemoFile), 12.0f, &Offset);
}
#if defined(CONF_VIDEORECORDER)
else if(m_Popup == POPUP_RENDER_DEMO)
{
CUIRect Label, TextBox, Ok, Abort, IncSpeed, DecSpeed, Button;
Box.HSplitBottom(20.f, &Box, &Part);
#if defined(__ANDROID__)
Box.HSplitBottom(60.f, &Box, &Part);
#else
Box.HSplitBottom(24.f, &Box, &Part);
#endif
Part.VMargin(80.0f, &Part);
Part.VSplitMid(&Abort, &Ok);
Ok.VMargin(20.0f, &Ok);
Abort.VMargin(20.0f, &Abort);
static int s_ButtonAbort = 0;
if(DoButton_Menu(&s_ButtonAbort, Localize("Abort"), 0, &Abort) || m_EscapePressed)
m_Popup = POPUP_NONE;
static int s_ButtonOk = 0;
if(DoButton_Menu(&s_ButtonOk, Localize("Ok"), 0, &Ok) || m_EnterPressed)
{
m_Popup = POPUP_NONE;
// name video
if(m_DemolistSelectedIndex >= 0 && !m_DemolistSelectedIsDir)
{
char aBufOld[512];
str_format(aBufOld, sizeof(aBufOld), "%s/%s", m_aCurrentDemoFolder, m_lDemos[m_DemolistSelectedIndex].m_aFilename);
int Length = str_length(m_aCurrentDemoFile);
char aBufNew[512];
if(Length <= 3 || m_aCurrentDemoFile[Length-4] != '.' || str_comp_nocase(m_aCurrentDemoFile+Length-3, "mp4"))
str_format(aBufNew, sizeof(aBufNew), "%s.mp4", m_aCurrentDemoFile);
else
str_format(aBufNew, sizeof(aBufNew), "%s", m_aCurrentDemoFile);
char aWholePath[1024];
// store new video filename to origin buffer
str_copy(m_aCurrentDemoFile, aBufNew, sizeof(m_aCurrentDemoFile));
if(Storage()->FindFile(m_aCurrentDemoFile, "videos", IStorage::TYPE_ALL, aWholePath, sizeof(aWholePath)))
{
PopupMessage(Localize("Error"), Localize("Destination file already exist"), Localize("Ok"));
m_Popup = POPUP_REPLACE_VIDEO;
}
else
{
const char *pError = Client()->DemoPlayer_Render(aBufOld, m_lDemos[m_DemolistSelectedIndex].m_StorageType, m_aCurrentDemoFile, m_Speed);
m_Speed = 4;
//Console()->Print(IConsole::OUTPUT_LEVEL_DEBUG, "demo_render_path", aWholePath);
if(pError)
PopupMessage(Localize("Error"), str_comp(pError, "error loading demo") ? pError : Localize("Error loading demo"), Localize("Ok"));
}
}
}
Box.HSplitBottom(60.f, &Box, &Part);
Box.HSplitBottom(20.f, &Box, &Part);
Part.VSplitLeft(60.0f, 0, &Part);
Part.VSplitLeft(60.0f, 0, &Label);
Part.VSplitMid(&IncSpeed, &DecSpeed);
IncSpeed.VMargin(20.0f, &IncSpeed);
DecSpeed.VMargin(20.0f, &DecSpeed);
Part.VSplitLeft(20.0f, &Button, &Part);
bool IncDemoSpeed = false, DecDemoSpeed = false;
// slowdown
Part.VSplitLeft(5.0f, 0, &Part);
Part.VSplitLeft(Button.h, &Button, &Part);
static int s_SlowDownButton = 0;
if(DoButton_Sprite(&s_SlowDownButton, IMAGE_DEMOBUTTONS, SPRITE_DEMOBUTTON_SLOWER, 0, &Button, CUI::CORNER_ALL))
DecDemoSpeed = true;
// fastforward
Part.VSplitLeft(5.0f, 0, &Part);
Part.VSplitLeft(Button.h, &Button, &Part);
static int s_FastForwardButton = 0;
if(DoButton_Sprite(&s_FastForwardButton, IMAGE_DEMOBUTTONS, SPRITE_DEMOBUTTON_FASTER, 0, &Button, CUI::CORNER_ALL))
IncDemoSpeed = true;
// speed meter
Part.VSplitLeft(15.0f, 0, &Part);
char aBuffer[64];
str_format(aBuffer, sizeof(aBuffer), "Speed: ×%g", g_aSpeeds[m_Speed]);
//str_format(aBuffer, sizeof(aBuffer), "Speed: ×%g", Speed);
UI()->DoLabel(&Part, aBuffer, Button.h*0.7f, -1);
if(IncDemoSpeed)
m_Speed = clamp(m_Speed + 1, 0, (int)(sizeof(g_aSpeeds)/sizeof(g_aSpeeds[0])-1));
else if(DecDemoSpeed)
m_Speed = clamp(m_Speed - 1, 0, (int)(sizeof(g_aSpeeds)/sizeof(g_aSpeeds[0])-1));
Part.VSplitLeft(100.0f, 0, &Part);
Part.VSplitLeft(Button.h, &Button, &Part);
if(DoButton_CheckBox(&g_Config.m_ClVideoShowhud, Localize("Show ingame HUD"), g_Config.m_ClVideoShowhud, &Button))
g_Config.m_ClVideoShowhud ^= 1;
Part.VSplitLeft(150.0f, 0, &Part);
Part.VSplitLeft(Button.h, &Button, &Part);
if(DoButton_CheckBox(&g_Config.m_ClVideoShowChat, Localize("Show chat"), g_Config.m_ClVideoShowChat, &Button))
g_Config.m_ClVideoShowChat ^= 1;
Part.VSplitLeft(150.0f, 0, &Part);
Part.VSplitLeft(Button.h, &Button, &Part);
if(DoButton_CheckBox(&g_Config.m_ClVideoSndEnable, Localize("Use sounds"), g_Config.m_ClVideoSndEnable, &Button))
g_Config.m_ClVideoSndEnable ^= 1;
/*
static int s_ButtonInc = 0;
if(DoButton_Menu(&s_ButtonInc, Localize("IncSpeed"), 0, &IncSpeed))
m_Popup = POPUP_NONE;
static int s_ButtonDec = 0;
if(DoButton_Menu(&s_ButtonDec, Localize("DecSpeed"), 0, &DecSpeed))
m_Popup = POPUP_NONE;
*/
//Abort.VMargin(20.0f, &Abort);
//SpeedBox.VSplitLeft(40.0f, 0, &SpeedBox);
//SpeedBox.VSplitRight(80.0f, &SpeedBox, 0);
//UI()->DoLabel(&Label, Localize("Video speed:"), 18.0f, -1);
//static float Offset2 = 0.0f;
//char Speed[10] = "1";
//DoEditBox(&Offset2, &SpeedBox, Speed, sizeof(Speed), 12.0f, &Offset2);
Box.HSplitBottom(20.f, &Box, &Part);
#if defined(__ANDROID__)
Box.HSplitBottom(60.f, &Box, &Part);
#else
Box.HSplitBottom(24.f, &Box, &Part);
#endif
Part.VSplitLeft(60.0f, 0, &Label);
Label.VSplitLeft(120.0f, 0, &TextBox);
TextBox.VSplitLeft(20.0f, 0, &TextBox);
TextBox.VSplitRight(60.0f, &TextBox, 0);
UI()->DoLabel(&Label, Localize("Video name:"), 18.0f, -1);
static float Offset = 0.0f;
DoEditBox(&Offset, &TextBox, m_aCurrentDemoFile, sizeof(m_aCurrentDemoFile), 12.0f, &Offset);
}
else if(m_Popup == POPUP_REPLACE_VIDEO)
{
CUIRect Yes, No;
Box.HSplitBottom(20.f, &Box, &Part);
#if defined(__ANDROID__)
Box.HSplitBottom(60.f, &Box, &Part);
#else
Box.HSplitBottom(24.f, &Box, &Part);
#endif
Part.VMargin(80.0f, &Part);
Part.VSplitMid(&No, &Yes);
Yes.VMargin(20.0f, &Yes);
No.VMargin(20.0f, &No);
static int s_ButtonAbort = 0;
if(DoButton_Menu(&s_ButtonAbort, Localize("No"), 0, &No) || m_EscapePressed)
m_Popup = POPUP_RENDER_DEMO;
static int s_ButtonTryAgain = 0;
if(DoButton_Menu(&s_ButtonTryAgain, Localize("Yes"), 0, &Yes) || m_EnterPressed)
{
m_Popup = POPUP_NONE;
// render video
char aBuf[512];
str_format(aBuf, sizeof(aBuf), "%s/%s", m_aCurrentDemoFolder, m_lDemos[m_DemolistSelectedIndex].m_aFilename);
const char *pError = Client()->DemoPlayer_Render(aBuf, m_lDemos[m_DemolistSelectedIndex].m_StorageType, m_aCurrentDemoFile, m_Speed);
m_Speed = 4;
if(pError)
PopupMessage(Localize("Error"), str_comp(pError, "error loading demo") ? pError : Localize("Error loading demo"), Localize("Ok"));
}
}
#endif
else if(m_Popup == POPUP_REMOVE_FRIEND)
{
CUIRect Yes, No;
@ -1862,7 +2046,7 @@ void CMenus::RenderBackground()
Graphics()->TextureClear();
Graphics()->QuadsBegin();
float Size = 15.0f;
float OffsetTime = fmod(Client()->LocalTime()*0.15f, 2.0f);
float OffsetTime = fmod(LocalTime()*0.15f, 2.0f);
for(int y = -2; y < (int)(sw/Size); y++)
for(int x = -2; x < (int)(sh/Size); x++)
{

View file

@ -228,6 +228,7 @@ class CMenus : public CComponent
int m_DemolistSelectedIndex;
bool m_DemolistSelectedIsDir;
int m_DemolistStorageType;
int m_Speed = 4;
void DemolistOnUpdate(bool Reset);
//void DemolistPopulate();
@ -410,6 +411,8 @@ public:
POPUP_COUNTRY,
POPUP_DELETE_DEMO,
POPUP_RENAME_DEMO,
POPUP_RENDER_DEMO,
POPUP_REPLACE_VIDEO,
POPUP_REMOVE_FRIEND,
POPUP_SOUNDERROR,
POPUP_PASSWORD,

View file

@ -849,7 +849,11 @@ void CMenus::RenderDemoList(CUIRect MainView)
RenderTools()->DrawUIRect(&MainView, ms_ColorTabbarActive, CUI::CORNER_B, 10.0f);
MainView.Margin(10.0f, &MainView);
#if defined(CONF_VIDEORECORDER)
CUIRect ButtonBar, RefreshRect, FetchRect, PlayRect, DeleteRect, RenameRect, RenderRect, LabelRect, ListBox;
#else
CUIRect ButtonBar, RefreshRect, FetchRect, PlayRect, DeleteRect, RenameRect, LabelRect, ListBox;
#endif
MainView.HSplitBottom(ms_ButtonHeight+5.0f, &MainView, &ButtonBar);
ButtonBar.HSplitTop(5.0f, 0, &ButtonBar);
ButtonBar.VSplitRight(110.0f, &ButtonBar, &PlayRect);
@ -861,6 +865,10 @@ void CMenus::RenderDemoList(CUIRect MainView)
ButtonBar.VSplitLeft(10.0f, 0, &ButtonBar);
ButtonBar.VSplitLeft(110.0f, &RenameRect, &ButtonBar);
ButtonBar.VSplitLeft(10.0f, 0, &ButtonBar);
#if defined(CONF_VIDEORECORDER)
ButtonBar.VSplitLeft(110.0f, &RenderRect, &ButtonBar);
ButtonBar.VSplitLeft(10.0f, 0, &ButtonBar);
#endif
ButtonBar.VSplitLeft(110.0f, &LabelRect, &ButtonBar);
MainView.HSplitBottom(140.0f, &ListBox, &MainView);
@ -1299,6 +1307,20 @@ void CMenus::RenderDemoList(CUIRect MainView)
return;
}
}
#if defined(CONF_VIDEORECORDER)
static int s_RenderButton = 0;
if(DoButton_Menu(&s_RenderButton, Localize("Render"), 0, &RenderRect))
{
if(m_DemolistSelectedIndex >= 0)
{
UI()->SetActiveItem(0);
m_Popup = POPUP_RENDER_DEMO;
str_copy(m_aCurrentDemoFile, m_lDemos[m_DemolistSelectedIndex].m_aFilename, sizeof(m_aCurrentDemoFile));
return;
}
}
#endif
}
UI()->DoLabelScaled(&LabelRect, aFooterLabel, 14.0f, -1);

View file

@ -18,7 +18,7 @@ void CMotd::Clear()
bool CMotd::IsActive()
{
return time_get() < m_ServerMotdTime;
return time() < m_ServerMotdTime;
}
void CMotd::OnStateChange(int NewState, int OldState)
@ -90,7 +90,7 @@ void CMotd::OnMessage(int MsgType, void *pRawMsg)
m_pClient->Console()->Print(IConsole::OUTPUT_LEVEL_STANDARD, "motd", pLast, true);
if(m_aServerMotd[0] && g_Config.m_ClMotdTime)
m_ServerMotdTime = time_get()+time_freq()*g_Config.m_ClMotdTime;
m_ServerMotdTime = time()+time_freq()*g_Config.m_ClMotdTime;
else
m_ServerMotdTime = 0;
}

View file

@ -142,7 +142,7 @@ void CParticles::OnRender()
set_new_tick();
static int64 LastTime = 0;
int64 t = time_get();
int64 t = time();
if(Client()->State() == IClient::STATE_DEMOPLAYBACK)
{

View file

@ -264,11 +264,11 @@ void CPlayers::RenderPlayer(
if(!InAir && WantOtherDir && length(Vel*50) > 500.0f)
{
static int64 SkidSoundTime = 0;
if(time_get()-SkidSoundTime > time_freq()/10)
if(time()-SkidSoundTime > time_freq()/10)
{
if(g_Config.m_SndGame)
m_pClient->m_pSounds->PlayAt(CSounds::CHN_WORLD, SOUND_PLAYER_SKID, 0.25f, Position);
SkidSoundTime = time_get();
SkidSoundTime = time();
}
m_pClient->m_pEffects->SkidTrail(
@ -279,7 +279,11 @@ void CPlayers::RenderPlayer(
// draw gun
{
#if defined(CONF_VIDEORECORDER)
if(ClientID >= 0 && ((GameClient()->m_GameInfo.m_AllowHookColl && g_Config.m_ClShowHookCollAlways) || (Player.m_PlayerFlags&PLAYERFLAG_AIM && ((!Local && ((!IVideo::Current()&&g_Config.m_ClShowHookCollOther)||(IVideo::Current()&&g_Config.m_ClVideoShowHookCollOther))) || (Local && g_Config.m_ClShowHookCollOwn)))))
#else
if(ClientID >= 0 && ((GameClient()->m_GameInfo.m_AllowHookColl && g_Config.m_ClShowHookCollAlways) || (Player.m_PlayerFlags&PLAYERFLAG_AIM && ((!Local && g_Config.m_ClShowHookCollOther) || (Local && g_Config.m_ClShowHookCollOwn)))))
#endif
{
vec2 ExDirection = Direction;
@ -528,7 +532,11 @@ void CPlayers::RenderPlayer(
Graphics()->SetColor(1.0f, 1.0f, 1.0f, Alpha);
Graphics()->QuadsSetRotation(0);
#if defined(CONF_VIDEORECORDER)
if(((!IVideo::Current()&&g_Config.m_ClShowDirection)||(IVideo::Current()&&g_Config.m_ClVideoShowDirection)) && ClientID >= 0 && (!Local || DemoPlayer()->IsPlaying()))
#else
if(g_Config.m_ClShowDirection && ClientID >= 0 && (!Local || DemoPlayer()->IsPlaying()))
#endif
{
if(Player.m_Direction == -1)
{

View file

@ -4,6 +4,7 @@
#include <base/system.h>
#include <base/math.h>
#include <ctime>
#include <engine/graphics.h>
#include <engine/storage.h>
@ -156,7 +157,7 @@ void CSkins::OnInit()
{
time_t rawtime;
struct tm* timeinfo;
time(&rawtime);
std::time(&rawtime);
timeinfo = localtime(&rawtime);
if(timeinfo->tm_mon == 11 && timeinfo->tm_mday >= 24 && timeinfo->tm_mday <= 26)
{ // Christmas

View file

@ -126,7 +126,7 @@ void CSounds::OnRender()
// play sound from queue
if(m_QueuePos > 0)
{
int64 Now = time_get();
int64 Now = time();
if(m_QueueWaitTime <= Now)
{
Play(m_aQueue[0].m_Channel, m_aQueue[0].m_SetId, 1.0f);
@ -141,7 +141,7 @@ void CSounds::ClearQueue()
{
mem_zero(m_aQueue, sizeof(m_aQueue));
m_QueuePos = 0;
m_QueueWaitTime = time_get();
m_QueueWaitTime = time();
}
void CSounds::Enqueue(int Channel, int SetId)

View file

@ -193,7 +193,7 @@ void CVoting::OnMessage(int MsgType, void *pRawMsg)
OnReset();
str_copy(m_aDescription, pMsg->m_pDescription, sizeof(m_aDescription));
str_copy(m_aReason, pMsg->m_pReason, sizeof(m_aReason));
m_Closetime = time_get() + time_freq() * pMsg->m_Timeout;
m_Closetime = time() + time_freq() * pMsg->m_Timeout;
}
else
OnReset();

View file

@ -50,7 +50,7 @@ public:
void Vote(int v); // -1 = no, 1 = yes
int SecondsLeft() { return (m_Closetime - time_get())/time_freq(); }
int SecondsLeft() { return (m_Closetime - time())/time_freq(); }
bool IsVoting() { return m_Closetime != 0; }
int TakenChoice() const { return m_Voted; }
const char *VoteDescription() const { return m_aDescription; }

View file

@ -164,6 +164,16 @@ MACRO_CONFIG_INT(SvSendVotesPerTick, sv_send_votes_per_tick, 5, 1, 15, CFGFLAG_S
MACRO_CONFIG_INT(SvRescue, sv_rescue, 0, 0, 1, CFGFLAG_SERVER, "Allow /rescue command so players can teleport themselves out of freeze")
MACRO_CONFIG_INT(SvRescueDelay, sv_rescue_delay, 5, 0, 1000, CFGFLAG_SERVER, "Number of seconds between two rescues")
#if defined(CONF_VIDEORECORDER)
MACRO_CONFIG_INT(ClVideoShowhud, cl_video_showhud, 0, 0, 1, CFGFLAG_CLIENT|CFGFLAG_SAVE, "Show ingame HUD when rendering video")
MACRO_CONFIG_INT(ClVideoShowChat, cl_video_showchat, 1, 0, 1, CFGFLAG_CLIENT|CFGFLAG_SAVE, "Show chat when rendering video")
MACRO_CONFIG_INT(ClVideoSndEnable, cl_video_sound_enable, 1, 0, 1, CFGFLAG_CLIENT|CFGFLAG_SAVE, "Use sound when rendering video")
MACRO_CONFIG_INT(ClVideoShowHookCollOther, cl_video_showhookcollother, 0, 0, 1, CFGFLAG_CLIENT|CFGFLAG_SAVE, "Show other players' hook collision lines when rendering video")
MACRO_CONFIG_INT(ClVideoShowDirection, cl_video_showdirection, 0, 0, 1, CFGFLAG_CLIENT|CFGFLAG_SAVE, "Show other players' key presses when rendering video")
MACRO_CONFIG_INT(ClVideoX264Crf, cl_video_crf, 22, 0, 51, CFGFLAG_CLIENT|CFGFLAG_SAVE, "Set crf when encode video with libx264")
MACRO_CONFIG_INT(ClVideoX264Preset, cl_video_preset, 6, 0, 9, CFGFLAG_CLIENT|CFGFLAG_SAVE, "Set preset when encode video with libx264, default is 6(slow), 0 is ultrafast, 9 is placebo(the slowest, not recommend)")
#endif
// debug
#ifdef CONF_DEBUG // this one can crash the server if not used correctly
MACRO_CONFIG_INT(DbgDummies, dbg_dummies, 0, 0, 15, CFGFLAG_SERVER, "")