Some refactoring of ffcpp code (replaced some stack allocated objects with smart pointers). Added project for player example.

This commit is contained in:
selim mustafaev 2016-11-19 07:04:37 +03:00
parent 9cf1dbd545
commit 94d70b6ce6
18 changed files with 348 additions and 77 deletions

View File

@ -0,0 +1,163 @@
# Locate SDL2 library
# This module defines
# SDL2_LIBRARY, the name of the library to link against
# SDL2_FOUND, if false, do not try to link to SDL2
# SDL2_INCLUDE_DIR, where to find SDL.h
#
# This module responds to the the flag:
# SDL2_BUILDING_LIBRARY
# If this is defined, then no SDL2main will be linked in because
# only applications need main().
# Otherwise, it is assumed you are building an application and this
# module will attempt to locate and set the the proper link flags
# as part of the returned SDL2_LIBRARY variable.
#
# Don't forget to include SDLmain.h and SDLmain.m your project for the
# OS X framework based version. (Other versions link to -lSDL2main which
# this module will try to find on your behalf.) Also for OS X, this
# module will automatically add the -framework Cocoa on your behalf.
#
#
# Additional Note: If you see an empty SDL2_LIBRARY_TEMP in your configuration
# and no SDL2_LIBRARY, it means CMake did not find your SDL2 library
# (SDL2.dll, libsdl2.so, SDL2.framework, etc).
# Set SDL2_LIBRARY_TEMP to point to your SDL2 library, and configure again.
# Similarly, if you see an empty SDL2MAIN_LIBRARY, you should set this value
# as appropriate. These values are used to generate the final SDL2_LIBRARY
# variable, but when these values are unset, SDL2_LIBRARY does not get created.
#
#
# $SDL2DIR is an environment variable that would
# correspond to the ./configure --prefix=$SDL2DIR
# used in building SDL2.
# l.e.galup 9-20-02
#
# Modified by Eric Wing.
# Added code to assist with automated building by using environmental variables
# and providing a more controlled/consistent search behavior.
# Added new modifications to recognize OS X frameworks and
# additional Unix paths (FreeBSD, etc).
# Also corrected the header search path to follow "proper" SDL guidelines.
# Added a search for SDL2main which is needed by some platforms.
# Added a search for threads which is needed by some platforms.
# Added needed compile switches for MinGW.
#
# On OSX, this will prefer the Framework version (if found) over others.
# People will have to manually change the cache values of
# SDL2_LIBRARY to override this selection or set the CMake environment
# CMAKE_INCLUDE_PATH to modify the search paths.
#
# Note that the header path has changed from SDL2/SDL.h to just SDL.h
# This needed to change because "proper" SDL convention
# is #include "SDL.h", not <SDL2/SDL.h>. This is done for portability
# reasons because not all systems place things in SDL2/ (see FreeBSD).
#=============================================================================
# Copyright 2003-2009 Kitware, Inc.
#
# Distributed under the OSI-approved BSD License (the "License");
# see accompanying file Copyright.txt for details.
#
# This software is distributed WITHOUT ANY WARRANTY; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the License for more information.
#=============================================================================
# (To distribute this file outside of CMake, substitute the full
# License text for the above reference.)
SET(SDL2_SEARCH_PATHS
~/Library/Frameworks
/Library/Frameworks
/usr/local
/usr
/sw # Fink
/opt/local # DarwinPorts
/opt/csw # Blastwave
/opt
)
FIND_PATH(SDL2_INCLUDE_DIR SDL.h
HINTS
$ENV{SDL2DIR}
PATH_SUFFIXES include/SDL2 include
PATHS ${SDL2_SEARCH_PATHS}
)
FIND_LIBRARY(SDL2_LIBRARY_TEMP
NAMES SDL2
HINTS
$ENV{SDL2DIR}
PATH_SUFFIXES lib64 lib
PATHS ${SDL2_SEARCH_PATHS}
)
IF(NOT SDL2_BUILDING_LIBRARY)
IF(NOT ${SDL2_INCLUDE_DIR} MATCHES ".framework")
# Non-OS X framework versions expect you to also dynamically link to
# SDL2main. This is mainly for Windows and OS X. Other (Unix) platforms
# seem to provide SDL2main for compatibility even though they don't
# necessarily need it.
FIND_LIBRARY(SDL2MAIN_LIBRARY
NAMES SDL2main
HINTS
$ENV{SDL2DIR}
PATH_SUFFIXES lib64 lib
PATHS ${SDL2_SEARCH_PATHS}
)
ENDIF(NOT ${SDL2_INCLUDE_DIR} MATCHES ".framework")
ENDIF(NOT SDL2_BUILDING_LIBRARY)
# SDL2 may require threads on your system.
# The Apple build may not need an explicit flag because one of the
# frameworks may already provide it.
# But for non-OSX systems, I will use the CMake Threads package.
IF(NOT APPLE)
FIND_PACKAGE(Threads)
ENDIF(NOT APPLE)
# MinGW needs an additional library, mwindows
# It's total link flags should look like -lmingw32 -lSDL2main -lSDL2 -lmwindows
# (Actually on second look, I think it only needs one of the m* libraries.)
IF(MINGW)
SET(MINGW32_LIBRARY mingw32 CACHE STRING "mwindows for MinGW")
ENDIF(MINGW)
IF(SDL2_LIBRARY_TEMP)
# For SDL2main
IF(NOT SDL2_BUILDING_LIBRARY)
IF(SDL2MAIN_LIBRARY)
SET(SDL2_LIBRARY_TEMP ${SDL2MAIN_LIBRARY} ${SDL2_LIBRARY_TEMP})
ENDIF(SDL2MAIN_LIBRARY)
ENDIF(NOT SDL2_BUILDING_LIBRARY)
# For OS X, SDL2 uses Cocoa as a backend so it must link to Cocoa.
# CMake doesn't display the -framework Cocoa string in the UI even
# though it actually is there if I modify a pre-used variable.
# I think it has something to do with the CACHE STRING.
# So I use a temporary variable until the end so I can set the
# "real" variable in one-shot.
IF(APPLE)
SET(SDL2_LIBRARY_TEMP ${SDL2_LIBRARY_TEMP} "-framework Cocoa")
ENDIF(APPLE)
# For threads, as mentioned Apple doesn't need this.
# In fact, there seems to be a problem if I used the Threads package
# and try using this line, so I'm just skipping it entirely for OS X.
IF(NOT APPLE)
SET(SDL2_LIBRARY_TEMP ${SDL2_LIBRARY_TEMP} ${CMAKE_THREAD_LIBS_INIT})
ENDIF(NOT APPLE)
# For MinGW library
IF(MINGW)
SET(SDL2_LIBRARY_TEMP ${MINGW32_LIBRARY} ${SDL2_LIBRARY_TEMP})
ENDIF(MINGW)
# Set the final string here so the GUI reflects the final state.
SET(SDL2_LIBRARY ${SDL2_LIBRARY_TEMP} CACHE STRING "Where the SDL2 Library can be found")
# Set the temp variable to INTERNAL so it is not seen in the CMake GUI
SET(SDL2_LIBRARY_TEMP "${SDL2_LIBRARY_TEMP}" CACHE INTERNAL "")
ENDIF(SDL2_LIBRARY_TEMP)
INCLUDE(FindPackageHandleStandardArgs)
FIND_PACKAGE_HANDLE_STANDARD_ARGS(SDL2 REQUIRED_VARS SDL2_LIBRARY SDL2_INCLUDE_DIR)

View File

@ -7,3 +7,13 @@ project(ffPreview)
add_executable(ffPreview ffPreview.cpp) add_executable(ffPreview ffPreview.cpp)
add_dependencies(ffPreview ffcpp) add_dependencies(ffPreview ffcpp)
target_link_libraries(ffPreview ffcpp) target_link_libraries(ffPreview ffcpp)
project(ffPlayer)
find_package(SDL2 REQUIRED)
if(NOT SDL2_FOUND)
message(FATAL_ERROR "SDL2 not found")
endif()
include_directories(${SDL2_INCLUDE_DIR})
add_executable(ffPlayer ffPlayer.cpp)
add_dependencies(ffPlayer ffcpp)
target_link_libraries(ffPlayer ffcpp ${SDL2_LIBRARY})

View File

@ -13,11 +13,11 @@ constexpr int VIDEO_HEIGHT = 480;
namespace ff = ffcpp; namespace ff = ffcpp;
void flushEncoder(ff::MediaFile& file, ff::Codec& encoder, const ff::Stream& inStream, const ff::Stream& outStream, void flushEncoder(ff::MediaFile& file, ff::CodecPtr encoder, ff::StreamPtr inStream, const ff::StreamPtr outStream,
int streamIndex, AVRational srcTimeBase, AVRational dstTimeBase) { int streamIndex, AVRational srcTimeBase, AVRational dstTimeBase) {
if(encoder.capabilities() & AV_CODEC_CAP_DELAY) { if(encoder->capabilities() & AV_CODEC_CAP_DELAY) {
while (1) { while (1) {
auto packet = encoder.encode(nullptr); auto packet = encoder->encode(nullptr);
if(!packet) break; if(!packet) break;
packet.setStreamIndex(streamIndex); packet.setStreamIndex(streamIndex);
@ -32,24 +32,24 @@ int main(int argc, char** argv) {
ff::MediaFile input(argv[1], ff::Mode::Read); ff::MediaFile input(argv[1], ff::Mode::Read);
ff::MediaFile output(argv[2], ff::Mode::Write); ff::MediaFile output(argv[2], ff::Mode::Write);
ff::Stream& vStream = input.videoStream(); auto vStream = input.videoStream();
ff::Stream& aStream = input.audioStream(); auto aStream = input.audioStream();
ff::Codec& vDecoder = vStream.codec(); auto vDecoder = vStream->codec();
ff::Codec& aDecoder = aStream.codec(); auto aDecoder = aStream->codec();
double aspect = 1.0*vDecoder.width()/vDecoder.height(); double aspect = 1.0*vDecoder->width()/vDecoder->height();
int outHeight = (int)(VIDEO_WIDTH/aspect) & ~1; int outHeight = (int)(VIDEO_WIDTH/aspect) & ~1;
ff::Stream& outVStream = output.addVideoStream(AV_CODEC_ID_H264, VIDEO_WIDTH, outHeight, vDecoder.timeBase(), AV_PIX_FMT_YUV420P); auto outVStream = output.addVideoStream(AV_CODEC_ID_H264, VIDEO_WIDTH, outHeight, vDecoder->timeBase(), AV_PIX_FMT_YUV420P);
ff::Codec& vEncoder = outVStream.codec(); auto vEncoder = outVStream->codec();
ff::Stream& outAStream = output.addAudioStream(AV_CODEC_ID_VORBIS, 2, 44100, AV_SAMPLE_FMT_FLTP); auto outAStream = output.addAudioStream(AV_CODEC_ID_VORBIS, 2, 44100, AV_SAMPLE_FMT_FLTP);
ff::Codec& aEncoder = outAStream.codec(); auto aEncoder = outAStream->codec();
output.writeHeader(); output.writeHeader();
int64_t aPts = 0; int64_t aPts = 0;
ff::FifoQueue fifo(aEncoder.sampleFormat(), aEncoder.channels(), aEncoder.frameSize()); ff::FifoQueue fifo(aEncoder->sampleFormat(), aEncoder->channels(), aEncoder->frameSize());
ff::Scaler scaler(vDecoder, vEncoder); ff::Scaler scaler(vDecoder, vEncoder);
ff::Resampler resampler(aDecoder, aEncoder); ff::Resampler resampler(aDecoder, aEncoder);
bool needScaling = ff::Scaler::needScaling(vDecoder, vEncoder); bool needScaling = ff::Scaler::needScaling(vDecoder, vEncoder);
@ -57,38 +57,38 @@ int main(int argc, char** argv) {
while(auto packet = input.readPacket()) { while(auto packet = input.readPacket()) {
AVMediaType packetType = input.packetType(packet); AVMediaType packetType = input.packetType(packet);
if(packetType == AVMEDIA_TYPE_AUDIO) { if(packetType == AVMEDIA_TYPE_AUDIO) {
auto frame = aDecoder.decode(packet); auto frame = aDecoder->decode(packet);
if(needResampling) if(needResampling)
frame = resampler.resample(frame); frame = resampler.resample(frame);
fifo.addSamples(frame); fifo.addSamples(frame);
// FIXME: we're losing last samples in case when fifo queue isn't full enough for encoder // FIXME: we're losing last samples in case when fifo queue isn't full enough for encoder
while(fifo.enoughSamples()) { while(fifo.enoughSamples()) {
auto frame = aEncoder.createAudioFrame(); auto frame = aEncoder->createAudioFrame();
fifo.readFrame(frame); fifo.readFrame(frame);
frame.setPts(aPts); frame.setPts(aPts);
aPts += frame.samplesCount(); aPts += frame.samplesCount();
auto encPacket = aEncoder.encode(frame); auto encPacket = aEncoder->encode(frame);
if(!encPacket) continue; if(!encPacket) continue;
encPacket.setStreamIndex(AUDIO_STREAM_INDEX); encPacket.setStreamIndex(AUDIO_STREAM_INDEX);
encPacket.rescaleTimestamps(aEncoder.timeBase(), outAStream.timeBase()); encPacket.rescaleTimestamps(aEncoder->timeBase(), outAStream->timeBase());
output.writePacket(encPacket); output.writePacket(encPacket);
} }
} else if(packetType == AVMEDIA_TYPE_VIDEO) { } else if(packetType == AVMEDIA_TYPE_VIDEO) {
auto frame = vDecoder.decode(packet); auto frame = vDecoder->decode(packet);
if(needScaling) if(needScaling)
frame = scaler.scale(frame); frame = scaler.scale(frame);
frame.setPictureType(AV_PICTURE_TYPE_NONE); frame.setPictureType(AV_PICTURE_TYPE_NONE);
auto encPacket = vEncoder.encode(frame); auto encPacket = vEncoder->encode(frame);
if(!encPacket) continue; if(!encPacket) continue;
encPacket.setStreamIndex(VIDEO_STREAM_INDEX); encPacket.setStreamIndex(VIDEO_STREAM_INDEX);
encPacket.rescaleTimestamps(vStream.timeBase(), outVStream.timeBase()); encPacket.rescaleTimestamps(vStream->timeBase(), outVStream->timeBase());
output.writePacket(encPacket); output.writePacket(encPacket);
} }
} }
flushEncoder(output, vEncoder, vStream, outVStream, VIDEO_STREAM_INDEX, vStream.timeBase(), outVStream.timeBase()); flushEncoder(output, vEncoder, vStream, outVStream, VIDEO_STREAM_INDEX, vStream->timeBase(), outVStream->timeBase());
flushEncoder(output, aEncoder, aStream, outAStream, AUDIO_STREAM_INDEX, aEncoder.timeBase(), outAStream.timeBase()); flushEncoder(output, aEncoder, aStream, outAStream, AUDIO_STREAM_INDEX, aEncoder->timeBase(), outAStream->timeBase());
output.writeTrailer(); output.writeTrailer();
return 0; return 0;

16
examples/ffPlayer.cpp Normal file
View File

@ -0,0 +1,16 @@
#include "ffcpp/Player.h"
#include <memory>
namespace ff = ffcpp;
class SDLWindow: public ff::IVideoSink {
};
int main(int argc, char** argv) {
auto wnd = std::make_shared<SDLWindow>();
ff::Player player(wnd);
player.setMedia(argv[1]);
return 0;
}

View File

@ -11,11 +11,11 @@ int main(int argc, char** argv) {
ff::MediaFile input(argv[1], ff::Mode::Read); ff::MediaFile input(argv[1], ff::Mode::Read);
ff::MediaFile output(argv[2], ff::Mode::Write); ff::MediaFile output(argv[2], ff::Mode::Write);
ff::Stream& vStream = input.videoStream(); auto vStream = input.videoStream();
ff::Codec& vDecoder = vStream.codec(); auto vDecoder = vStream->codec();
ff::Stream& outVStream = output.addVideoStream(AV_CODEC_ID_PNG, vDecoder.width(), vDecoder.height(), vStream.timeBase(), AV_PIX_FMT_RGB24); auto outVStream = output.addVideoStream(AV_CODEC_ID_PNG, vDecoder->width(), vDecoder->height(), vStream->timeBase(), AV_PIX_FMT_RGB24);
ff::Codec& vEncoder = outVStream.codec(); auto vEncoder = outVStream->codec();
output.writeHeader(); output.writeHeader();
@ -24,15 +24,15 @@ int main(int argc, char** argv) {
while(auto packet = input.readPacket()) { while(auto packet = input.readPacket()) {
AVMediaType packetType = input.packetType(packet); AVMediaType packetType = input.packetType(packet);
if(packetType == AVMEDIA_TYPE_VIDEO) { if(packetType == AVMEDIA_TYPE_VIDEO) {
auto frame = vDecoder.decode(packet); auto frame = vDecoder->decode(packet);
if(frame.isKeyFrame() && (frame.pts() > 0 || KEY_FRAME_TO_SAVE == 0)) { if(frame.isKeyFrame() && (frame.pts() > 0 || KEY_FRAME_TO_SAVE == 0)) {
if(curKeyFrame == KEY_FRAME_TO_SAVE) { if(curKeyFrame == KEY_FRAME_TO_SAVE) {
frame = scaler.scale(frame); frame = scaler.scale(frame);
frame.setPictureType(AV_PICTURE_TYPE_NONE); frame.setPictureType(AV_PICTURE_TYPE_NONE);
auto encPacket = vEncoder.encode(frame); auto encPacket = vEncoder->encode(frame);
if(!encPacket) continue; if(!encPacket) continue;
encPacket.setStreamIndex(0); encPacket.setStreamIndex(0);
encPacket.rescaleTimestamps(vStream.timeBase(), outVStream.timeBase()); encPacket.rescaleTimestamps(vStream->timeBase(), outVStream->timeBase());
output.writePacket(encPacket); output.writePacket(encPacket);
break; break;
} else { } else {

View File

@ -9,6 +9,8 @@ extern "C" {
#include <libavformat/avformat.h> #include <libavformat/avformat.h>
} }
#include <memory>
namespace ffcpp { namespace ffcpp {
enum class CodecType { enum class CodecType {
@ -16,7 +18,9 @@ namespace ffcpp {
Decoder Decoder
}; };
class Codec: public non_copyable { typedef std::shared_ptr<class Codec> CodecPtr;
class Codec {
private: private:
AVCodec* _codec; AVCodec* _codec;
AVCodecContext* _codecCtx; AVCodecContext* _codecCtx;
@ -38,6 +42,7 @@ namespace ffcpp {
int frameSize() const; int frameSize() const;
int channels() const; int channels() const;
int sampleRate() const; int sampleRate() const;
int channelLayout() const;
void setWidth(int width); void setWidth(int width);
void setHeight(int height); void setHeight(int height);

View File

@ -3,6 +3,7 @@
#include "Stream.h" #include "Stream.h"
#include "Packet.h" #include "Packet.h"
#include <memory>
extern "C" { extern "C" {
#include <libavformat/avformat.h> #include <libavformat/avformat.h>
@ -10,6 +11,7 @@ extern "C" {
#include <string> #include <string>
#include <vector> #include <vector>
#include <bits/shared_ptr.h>
namespace ffcpp { namespace ffcpp {
@ -22,7 +24,7 @@ namespace ffcpp {
private: private:
AVFormatContext* _formatCtx; AVFormatContext* _formatCtx;
Mode _mode; Mode _mode;
std::vector<Stream> _streams; std::vector<StreamPtr> _streams;
public: public:
MediaFile(const std::string& src, Mode mode); MediaFile(const std::string& src, Mode mode);
@ -31,10 +33,10 @@ namespace ffcpp {
bool hasVideo() const; bool hasVideo() const;
bool hasAudio() const; bool hasAudio() const;
Stream& videoStream(size_t index = 0); StreamPtr videoStream(size_t index = 0);
Stream& audioStream(size_t index = 0); StreamPtr audioStream(size_t index = 0);
Stream& addVideoStream(AVCodecID codecID, int width, int height, AVRational timeBase, AVPixelFormat pixelFormat = AV_PIX_FMT_NONE); StreamPtr addVideoStream(AVCodecID codecID, int width, int height, AVRational timeBase, AVPixelFormat pixelFormat = AV_PIX_FMT_NONE);
Stream& addAudioStream(AVCodecID codecID, int channels, int sampleRate, AVSampleFormat sampleFormat = AV_SAMPLE_FMT_NONE); StreamPtr addAudioStream(AVCodecID codecID, int channels, int sampleRate, AVSampleFormat sampleFormat = AV_SAMPLE_FMT_NONE);
Packet readPacket(); Packet readPacket();
AVMediaType packetType(const Packet& packet); AVMediaType packetType(const Packet& packet);
@ -46,7 +48,7 @@ namespace ffcpp {
private: private:
bool hasStream(AVMediaType type) const; bool hasStream(AVMediaType type) const;
Stream& getStream(AVMediaType type, size_t index); StreamPtr getStream(AVMediaType type, size_t index);
}; };
} }

30
include/ffcpp/Player.h Normal file
View File

@ -0,0 +1,30 @@
#ifndef PROJECT_PLAYER_H
#define PROJECT_PLAYER_H
#include "ffcpp/MediaFile.h"
#include <memory>
namespace ffcpp {
struct IVideoSink {
};
class Player {
private:
std::shared_ptr<IVideoSink> _vSink;
std::unique_ptr<MediaFile> _curMedia;
StreamPtr _aStream;
StreamPtr _vStream;
public:
Player(std::shared_ptr<IVideoSink> vSink);
~Player();
void setMedia(std::string path);
void play();
};
}
#endif //PROJECT_PLAYER_H

View File

@ -2,6 +2,7 @@
#define FFCONV_RESAMPLER_H #define FFCONV_RESAMPLER_H
#include "Frame.h" #include "Frame.h"
#include "Codec.h"
extern "C" { extern "C" {
#include <libswresample/swresample.h> #include <libswresample/swresample.h>
@ -19,11 +20,11 @@ namespace ffcpp {
public: public:
Resampler(int inChannelLayout, int inSampleRate, AVSampleFormat inSampleFormat, Resampler(int inChannelLayout, int inSampleRate, AVSampleFormat inSampleFormat,
int outChannelLayout, int outSampleRate, AVSampleFormat outSampleFormat); int outChannelLayout, int outSampleRate, AVSampleFormat outSampleFormat);
Resampler(AVCodecContext* decoderCtx, AVCodecContext* encoderCtx); Resampler(CodecPtr decoder, CodecPtr encoder);
~Resampler(); ~Resampler();
Frame resample(Frame& inFrame); Frame resample(Frame& inFrame);
static bool needResampling(AVCodecContext *decoderCtx, AVCodecContext *encoderCtx); static bool needResampling(CodecPtr decoder, CodecPtr encoder);
}; };
} }

View File

@ -1,12 +1,13 @@
#ifndef FFCONV_SCALER_H #ifndef FFCONV_SCALER_H
#define FFCONV_SCALER_H #define FFCONV_SCALER_H
#include "Frame.h"
#include "Codec.h"
extern "C" { extern "C" {
#include <libswscale/swscale.h> #include <libswscale/swscale.h>
} }
#include "Frame.h"
namespace ffcpp { namespace ffcpp {
class Scaler { class Scaler {
@ -18,9 +19,9 @@ namespace ffcpp {
public: public:
Scaler(int srcWidth, int srcHeight, AVPixelFormat srcPixFmt, int dstWidth, int dstHeight, AVPixelFormat dstPixFmt); Scaler(int srcWidth, int srcHeight, AVPixelFormat srcPixFmt, int dstWidth, int dstHeight, AVPixelFormat dstPixFmt);
Scaler(AVCodecContext *decoderCtx, AVCodecContext *encoderCtx); Scaler(CodecPtr decoder, CodecPtr encoder);
Frame scale(Frame& inFrame); Frame scale(Frame& inFrame);
static bool needScaling(AVCodecContext *decoderCtx, AVCodecContext *encoderCtx); static bool needScaling(CodecPtr decoder, CodecPtr encoder);
}; };
} }

View File

@ -7,19 +7,23 @@ extern "C" {
#include <libavformat/avformat.h> #include <libavformat/avformat.h>
} }
#include <memory>
namespace ffcpp { namespace ffcpp {
typedef std::shared_ptr<class Stream> StreamPtr;
class Stream { class Stream {
private: private:
AVStream* _stream; AVStream* _stream;
Codec _codec; CodecPtr _codec;
public: public:
Stream(); Stream();
Stream(AVStream* stream); Stream(AVStream* stream);
Stream(AVStream* stream, AVCodec* encoder); Stream(AVStream* stream, AVCodec* encoder);
operator AVStream*() const; operator AVStream*() const;
Codec& codec(); CodecPtr codec();
AVRational timeBase() const; AVRational timeBase() const;
void setTimeBase(AVRational timeBase); void setTimeBase(AVRational timeBase);

View File

@ -25,7 +25,9 @@ set(SOURCE_FILES MediaFile.cpp
Scaler.cpp Scaler.cpp
../include/ffcpp/Scaler.h ../include/ffcpp/Scaler.h
Resampler.cpp Resampler.cpp
../include/ffcpp/Resampler.h) ../include/ffcpp/Resampler.h
Player.cpp
../include/ffcpp/Player.h )
add_library(ffcpp ${SOURCE_FILES}) add_library(ffcpp ${SOURCE_FILES})
target_link_libraries(ffcpp ${FFMPEG_LIBRARIES}) target_link_libraries(ffcpp ${FFMPEG_LIBRARIES})

View File

@ -75,6 +75,10 @@ namespace ffcpp {
return _codecCtx->sample_rate; return _codecCtx->sample_rate;
} }
int Codec::channelLayout() const {
return (int)_codecCtx->channel_layout;
}
void Codec::setWidth(int width) { void Codec::setWidth(int width) {
_codecCtx->width = width; _codecCtx->width = width;
} }

View File

@ -16,7 +16,8 @@ namespace ffcpp {
_streams.reserve(_formatCtx->nb_streams); _streams.reserve(_formatCtx->nb_streams);
for(size_t i = 0; i < _formatCtx->nb_streams; ++i) { for(size_t i = 0; i < _formatCtx->nb_streams; ++i) {
_streams.emplace_back(_formatCtx->streams[i]); auto stream = std::make_shared<Stream>(_formatCtx->streams[i]);
_streams.emplace_back(stream);
} }
} else if(mode == Mode::Write) { } else if(mode == Mode::Write) {
int res = avformat_alloc_output_context2(&_formatCtx, nullptr, nullptr, src.c_str()); int res = avformat_alloc_output_context2(&_formatCtx, nullptr, nullptr, src.c_str());
@ -56,7 +57,7 @@ namespace ffcpp {
return hasStream(AVMEDIA_TYPE_AUDIO); return hasStream(AVMEDIA_TYPE_AUDIO);
} }
Stream& MediaFile::getStream(AVMediaType type, size_t index) { StreamPtr MediaFile::getStream(AVMediaType type, size_t index) {
for(size_t i = 0, curIndex = 0; i < _formatCtx->nb_streams; ++i) { for(size_t i = 0, curIndex = 0; i < _formatCtx->nb_streams; ++i) {
if(_formatCtx->streams[i]->codec->codec_type == type) { if(_formatCtx->streams[i]->codec->codec_type == type) {
if(curIndex == index) { if(curIndex == index) {
@ -70,11 +71,11 @@ namespace ffcpp {
throw std::runtime_error("cannot find stream"); throw std::runtime_error("cannot find stream");
} }
Stream& MediaFile::videoStream(size_t index /* = 0 */) { StreamPtr MediaFile::videoStream(size_t index /* = 0 */) {
return getStream(AVMEDIA_TYPE_VIDEO, index); return getStream(AVMEDIA_TYPE_VIDEO, index);
} }
Stream& MediaFile::audioStream(size_t index /* = 0 */) { StreamPtr MediaFile::audioStream(size_t index /* = 0 */) {
return getStream(AVMEDIA_TYPE_AUDIO, index); return getStream(AVMEDIA_TYPE_AUDIO, index);
} }
@ -92,7 +93,7 @@ namespace ffcpp {
} }
} }
Stream& MediaFile::addVideoStream(AVCodecID codecID, int width, int height, AVRational timeBase, AVPixelFormat pixelFormat) { StreamPtr MediaFile::addVideoStream(AVCodecID codecID, int width, int height, AVRational timeBase, AVPixelFormat pixelFormat) {
AVCodec* codec = avcodec_find_encoder(codecID); AVCodec* codec = avcodec_find_encoder(codecID);
if(!codec) throw std::runtime_error("cannot find codec"); if(!codec) throw std::runtime_error("cannot find codec");
@ -110,18 +111,18 @@ namespace ffcpp {
ctx->pix_fmt = pixelFormat; ctx->pix_fmt = pixelFormat;
} }
_streams.emplace_back(stream, codec); auto sPtr = std::make_shared<Stream>(stream, codec);
return _streams.back(); _streams.emplace_back(sPtr);
return sPtr;
} }
Stream& MediaFile::addAudioStream(AVCodecID codecID, int channels, int sampleRate, AVSampleFormat sampleFormat) { StreamPtr MediaFile::addAudioStream(AVCodecID codecID, int channels, int sampleRate, AVSampleFormat sampleFormat) {
AVCodec* codec = avcodec_find_encoder(codecID); AVCodec* codec = avcodec_find_encoder(codecID);
if(!codec) throw std::runtime_error("cannot find codec"); if(!codec) throw std::runtime_error("cannot find codec");
AVStream* stream = avformat_new_stream(_formatCtx, codec); AVStream* stream = avformat_new_stream(_formatCtx, codec);
if(!stream) throw std::runtime_error("cannot create stream"); if(!stream) throw std::runtime_error("cannot create stream");
// TODO: Here we need adjust encoder parameters
AVCodecContext* ctx = stream->codec; AVCodecContext* ctx = stream->codec;
if(sampleFormat == AV_SAMPLE_FMT_NONE) { if(sampleFormat == AV_SAMPLE_FMT_NONE) {
ctx->sample_fmt = codec->sample_fmts[0]; ctx->sample_fmt = codec->sample_fmts[0];
@ -135,8 +136,9 @@ namespace ffcpp {
ctx->time_base = AVRational {1, sampleRate}; ctx->time_base = AVRational {1, sampleRate};
ctx->strict_std_compliance = FF_COMPLIANCE_EXPERIMENTAL; ctx->strict_std_compliance = FF_COMPLIANCE_EXPERIMENTAL;
_streams.emplace_back(stream, codec); auto sPtr = std::make_shared<Stream>(stream, codec);
return _streams.back(); _streams.emplace_back(sPtr);
return sPtr;
} }
Packet MediaFile::readPacket() { Packet MediaFile::readPacket() {

31
src/Player.cpp Normal file
View File

@ -0,0 +1,31 @@
#include "ffcpp/Player.h"
#include "ffcpp/Stream.h"
namespace ffcpp {
Player::Player(std::shared_ptr<IVideoSink> vSink): _vSink(vSink), _curMedia(nullptr), _aStream(nullptr), _vStream(
nullptr) {
init();
}
Player::~Player() {
}
void Player::setMedia(std::string path) {
_curMedia = std::make_unique<MediaFile>(path, Mode::Read);
_vStream = _curMedia->videoStream();
_aStream = _curMedia->audioStream();
auto vDecoder = _vStream->codec();
auto aDecoder = _aStream->codec();
}
void Player::play() {
if(!_curMedia)
return;
}
}

View File

@ -32,9 +32,9 @@ namespace ffcpp {
throwIfError(res, "cannot init resampler"); throwIfError(res, "cannot init resampler");
} }
Resampler::Resampler(AVCodecContext *decoderCtx, AVCodecContext *encoderCtx) Resampler::Resampler(CodecPtr decoder, CodecPtr encoder)
: Resampler((int)decoderCtx->channel_layout, decoderCtx->sample_rate, decoderCtx->sample_fmt, : Resampler(decoder->channelLayout(), decoder->sampleRate(), decoder->sampleFormat(),
(int)encoderCtx->channel_layout, encoderCtx->sample_rate, encoderCtx->sample_fmt) { encoder->channelLayout(), encoder->sampleRate(), encoder->sampleFormat()) {
} }
Resampler::~Resampler() { Resampler::~Resampler() {
@ -55,11 +55,11 @@ namespace ffcpp {
return outFrame; return outFrame;
} }
bool Resampler::needResampling(AVCodecContext *decoderCtx, AVCodecContext *encoderCtx) { bool Resampler::needResampling(CodecPtr decoder, CodecPtr encoder) {
return (decoderCtx->channels != encoderCtx->channels || return (decoder->channels() != encoder->channels() ||
decoderCtx->channel_layout != encoderCtx->channel_layout || decoder->channelLayout() != encoder->channelLayout() ||
decoderCtx->sample_fmt != encoderCtx->sample_fmt || decoder->sampleFormat() != encoder->sampleFormat() ||
decoderCtx->sample_rate != encoderCtx->sample_rate); decoder->sampleRate() != encoder->sampleRate());
} }
} }

View File

@ -15,9 +15,9 @@ namespace ffcpp {
} }
} }
Scaler::Scaler(AVCodecContext *decoderCtx, AVCodecContext *encoderCtx) Scaler::Scaler(CodecPtr decoder, CodecPtr encoder)
: Scaler(decoderCtx->width, decoderCtx->height, decoderCtx->pix_fmt, : Scaler(decoder->width(), decoder->height(), decoder->pixelFormat(),
encoderCtx->width, encoderCtx->height, encoderCtx->pix_fmt) { encoder->width(), encoder->height(), encoder->pixelFormat()) {
} }
@ -36,10 +36,10 @@ namespace ffcpp {
return outFrame; return outFrame;
} }
bool Scaler::needScaling(AVCodecContext *decoderCtx, AVCodecContext *encoderCtx) { bool Scaler::needScaling(CodecPtr decoder, CodecPtr encoder) {
return (decoderCtx->width != encoderCtx->width || return (decoder->width() != encoder->width() ||
decoderCtx->height != encoderCtx->height || decoder->height() != encoder->height() ||
decoderCtx->pix_fmt != encoderCtx->pix_fmt); decoder->pixelFormat() != encoder->pixelFormat());
} }
} }

View File

@ -6,19 +6,19 @@ namespace ffcpp {
Stream::Stream(): _stream(nullptr) { Stream::Stream(): _stream(nullptr) {
} }
Stream::Stream(AVStream *stream): _stream(stream), _codec(_stream->codec, CodecType::Decoder) { Stream::Stream(AVStream *stream): _stream(stream) {
_codec = std::make_shared<Codec>(_stream->codec, CodecType::Decoder);
} }
Stream::Stream(AVStream *stream, AVCodec* encoder): _stream(stream), _codec(stream->codec, encoder) { Stream::Stream(AVStream *stream, AVCodec* encoder): _stream(stream) {
_codec = std::make_shared<Codec>(_stream->codec, encoder);
} }
Stream::operator AVStream*() const { Stream::operator AVStream*() const {
return _stream; return _stream;
} }
Codec& Stream::codec() { CodecPtr Stream::codec() {
return _codec; return _codec;
} }