ddnet/src/engine/client/video.h

139 lines
2.9 KiB
C
Raw Normal View History

2016-08-27 15:51:23 +00:00
#ifndef ENGINE_CLIENT_VIDEO_H
#define ENGINE_CLIENT_VIDEO_H
#if defined(__ANDROID__)
#define GL_GLEXT_PROTOTYPES
#include <GLES/gl.h>
#include <GLES/glext.h>
#include <GL/glu.h>
#define glOrtho glOrthof
#else
#include "SDL_opengl.h"
#if defined(CONF_PLATFORM_MACOSX)
#include "OpenGL/glu.h"
2016-08-27 15:51:23 +00:00
#else
#include "GL/glu.h"
2016-08-27 15:51:23 +00:00
#endif
#endif
2016-08-27 15:51:23 +00:00
extern "C"
{
#include <libavcodec/avcodec.h>
2016-08-27 19:10:27 +00:00
#include <libavformat/avformat.h>
2016-08-27 15:51:23 +00:00
#include <libavutil/imgutils.h>
#include <libavutil/opt.h>
#include <libswscale/swscale.h>
2016-08-27 19:10:27 +00:00
#include <libswresample/swresample.h>
2016-08-27 15:51:23 +00:00
};
#include <base/system.h>
#include <engine/shared/video.h>
2019-10-26 11:54:25 +00:00
#include <engine/shared/demo.h>
#define ALEN 2048
2016-08-27 19:10:27 +00:00
// a wrapper around a single output AVStream
typedef struct OutputStream {
AVStream *pSt;
AVCodecContext *pEnc;
2016-08-27 19:10:27 +00:00
/* pts of the next frame that will be generated */
int64 NextPts;
int SamplesCount;
2016-08-27 19:10:27 +00:00
AVFrame *pFrame;
AVFrame *pTmpFrame;
2016-08-27 19:10:27 +00:00
struct SwsContext *pSwsCtx;
struct SwrContext *pSwrCtx;
2016-08-27 19:10:27 +00:00
} OutputStream;
2016-08-27 15:51:23 +00:00
class CVideo : public IVideo
2016-08-27 15:51:23 +00:00
{
public:
CVideo(class CGraphics_Threaded* pGraphics, class IStorage* pStorage, class IConsole *pConsole, int width, int height, const char *name);
2016-08-27 15:51:23 +00:00
~CVideo();
virtual void Start();
virtual void Stop();
virtual void Pause(bool Pause);
virtual bool IsRecording() { return m_Recording; }
virtual void NextVideoFrame();
virtual void NextVideoFrameThread();
virtual bool FrameRendered() { return !m_NextFrame; }
2016-08-27 15:51:23 +00:00
virtual void NextAudioFrame(void (*Mix)(short *pFinalOut, unsigned Frames));
virtual void NextAudioFrameTimeline();
virtual bool AudioFrameRendered() { return !m_NextAudioFrame; }
2016-08-27 15:51:23 +00:00
static IVideo* Current() { return IVideo::ms_pCurrentVideo; }
2016-08-27 15:51:23 +00:00
2019-11-12 13:41:30 +00:00
static void Init() { av_log_set_level(AV_LOG_DEBUG); }
2016-08-27 19:10:27 +00:00
2016-08-27 15:51:23 +00:00
private:
void FillVideoFrame();
void ReadRGBFromGL();
2016-08-27 19:10:27 +00:00
void FillAudioFrame();
void OpenVideo();
void OpenAudio();
AVFrame *AllocPicture(enum AVPixelFormat PixFmt, int Width, int Height);
AVFrame* AllocAudioFrame(enum AVSampleFormat SampleFmt, uint64 ChannelLayout, int SampleRate, int NbSamples);
2016-08-27 19:10:27 +00:00
void WriteFrame(OutputStream* pStream);
void FinishFrames(OutputStream* pStream);
void CloseStream(OutputStream *pStream);
2016-08-27 19:10:27 +00:00
void AddStream(OutputStream *pStream, AVFormatContext *pOC, AVCodec **ppCodec, enum AVCodecID CodecId);
2016-08-27 15:51:23 +00:00
class CGraphics_Threaded* m_pGraphics;
class IStorage* m_pStorage;
class IConsole* m_pConsole;
2016-08-27 15:51:23 +00:00
int m_Width;
int m_Height;
char m_Name[256];
2019-10-31 14:01:12 +00:00
//FILE *m_dbgfile;
int m_Vseq;
short m_aBuffer[ALEN*2];
int m_Vframe;
2016-08-27 15:51:23 +00:00
int m_FPS;
bool m_Started;
2016-08-27 15:51:23 +00:00
bool m_Recording;
bool m_ProcessingVideoFrame;
bool m_ProcessingAudioFrame;
bool m_NextFrame;
bool m_NextAudioFrame;
2016-08-27 15:51:23 +00:00
2016-08-27 19:10:27 +00:00
bool m_HasAudio;
2016-08-27 15:51:23 +00:00
GLubyte* m_pPixels;
2016-08-27 19:10:27 +00:00
OutputStream m_VideoStream;
OutputStream m_AudioStream;
AVCodec* m_VideoCodec;
AVCodec* m_AudioCodec;
AVDictionary* m_pOptDict;
AVFormatContext* m_pFormatContext;
AVOutputFormat* m_pFormat;
2016-08-27 15:51:23 +00:00
uint8_t* m_pRGB;
int m_SndBufferSize;
2016-08-27 15:51:23 +00:00
};
#endif