ddnet/src/engine/client/video.h

128 lines
2.6 KiB
C
Raw Normal View History

2016-08-27 15:51:23 +00:00
#ifndef ENGINE_CLIENT_VIDEO_H
#define ENGINE_CLIENT_VIDEO_H
#if defined(__ANDROID__)
#define GL_GLEXT_PROTOTYPES
#include <GLES/gl.h>
#include <GLES/glext.h>
#include <GL/glu.h>
#define glOrtho glOrthof
#else
#include "SDL_opengl.h"
#if defined(CONF_PLATFORM_MACOSX)
#include "OpenGL/glu.h"
2016-08-27 15:51:23 +00:00
#else
#include "GL/glu.h"
2016-08-27 15:51:23 +00:00
#endif
#endif
2016-08-27 15:51:23 +00:00
extern "C"
{
#include <libavcodec/avcodec.h>
2016-08-27 19:10:27 +00:00
#include <libavformat/avformat.h>
2016-08-27 15:51:23 +00:00
#include <libavutil/imgutils.h>
#include <libavutil/opt.h>
#include <libswscale/swscale.h>
2016-08-27 19:10:27 +00:00
#include <libswresample/swresample.h>
2016-08-27 15:51:23 +00:00
};
#include <base/system.h>
#include <engine/shared/video.h>
2016-08-27 19:10:27 +00:00
// a wrapper around a single output AVStream
typedef struct OutputStream {
AVStream *st;
AVCodecContext *enc;
2016-08-27 19:10:27 +00:00
/* pts of the next frame that will be generated */
int64_t next_pts;
int samples_count;
2016-08-27 19:10:27 +00:00
AVFrame *frame;
AVFrame *tmp_frame;
2016-08-27 19:10:27 +00:00
struct SwsContext *sws_ctx;
struct SwrContext *swr_ctx;
2016-08-27 19:10:27 +00:00
} OutputStream;
2016-08-27 15:51:23 +00:00
class CVideo : public IVideo
2016-08-27 15:51:23 +00:00
{
public:
CVideo(class CGraphics_Threaded* pGraphics, class IStorage* pStorage, class IConsole *pConsole, int width, int height, const char *name);
2016-08-27 15:51:23 +00:00
~CVideo();
virtual void start();
virtual void stop();
virtual void nextVideoFrame();
virtual void nextVideoFrame_thread();
virtual bool frameRendered() { return !m_NextFrame; };
2016-08-27 15:51:23 +00:00
virtual void nextAudioFrame(short* pData);
2016-08-27 15:51:23 +00:00
static IVideo* Current() { return IVideo::ms_pCurrentVideo; }
2016-08-27 15:51:23 +00:00
static void Init() { av_log_set_level(AV_LOG_DEBUG); avcodec_register_all(); av_register_all(); }
2016-08-27 19:10:27 +00:00
2016-08-27 15:51:23 +00:00
private:
void fill_video_frame();
2016-08-27 19:10:27 +00:00
void read_rgb_from_gl();
void fill_audio_frame();
2016-08-27 19:10:27 +00:00
void open_video();
void open_audio();
AVFrame *alloc_picture(enum AVPixelFormat pix_fmt, int width, int height);
AVFrame* alloc_audio_frame(enum AVSampleFormat sample_fmt, uint64_t channel_layout, int sample_rate, int nb_samples);
void write_frame(OutputStream* pStream);
void finish_frames(OutputStream* pStream);
2016-08-27 19:10:27 +00:00
void close_stream(OutputStream *ost);
void add_stream(OutputStream *ost, AVFormatContext *oc, AVCodec **codec, enum AVCodecID codec_id);
2016-08-27 15:51:23 +00:00
class CGraphics_Threaded* m_pGraphics;
class IStorage* m_pStorage;
class IConsole* m_pConsole;
2016-08-27 15:51:23 +00:00
int m_Width;
int m_Height;
char m_Name[256];
2016-08-27 15:51:23 +00:00
int m_FPS;
bool m_Started;
2016-08-27 15:51:23 +00:00
bool m_Recording;
bool m_ProcessingVideoFrame;
bool m_ProcessingAudioFrame;
bool m_NextFrame;
2016-08-27 15:51:23 +00:00
2016-08-27 19:10:27 +00:00
bool m_HasAudio;
2016-08-27 15:51:23 +00:00
GLubyte* m_pPixels;
2016-08-27 19:10:27 +00:00
OutputStream m_VideoStream;
OutputStream m_AudioStream;
AVCodec* m_VideoCodec;
AVCodec* m_AudioCodec;
AVDictionary* m_pOptDict;
AVFormatContext* m_pFormatContext;
AVOutputFormat* m_pFormat;
2016-08-27 15:51:23 +00:00
uint8_t* m_pRGB;
int m_SndBufferSize;
2016-08-27 15:51:23 +00:00
};
#endif