Enable video player(VideoTexture) based on new MediaEngine for macOS and tvos (#1154)

* Ready for PR macOS

* Update videoTexture.frag

* Fix compile

* Fix h265 for mac [skip ci]

* Fix compile error

* Update readme [skip ci]
This commit is contained in:
Deal(涓€绾跨伒) 2023-04-02 01:51:36 +08:00 committed by GitHub
parent b860217be0
commit 621c94c9e7
23 changed files with 1238 additions and 436 deletions

View File

@ -34,8 +34,9 @@
|-----------------------|-------------------|-----------------------|----------------------|--------------------|
| Windows Desktop | complete | H264, HEVC, VP90 | YUY2, NV12, RGB32 | IMFMediaSession |
| Windows UWP | complete | H264, HEVC, VP90 | BGR32 | IMFMediaEngine |
| Apple macOS | in progress | H264, HEVC(hvc1) | NV12, BGR32 | AVFoundation |
| Apple iOS/tvOS | in progress | H264, HEVC(hvc1) | NV12, BGR32 | AVFoundation |
| Apple macOS | complete | H264, HEVC(hvc1) | NV12, BGR32 | AVFoundation |
| Apple tvOS | complete | H264, HEVC(hvc1) | NV12, BGR32 | AVFoundation |
| Apple iOS | in progress | H264, HEVC(hvc1) | NV12, BGR32 | AVFoundation |
| Android | planned | H264 | RGB32 | |
### View code with vscode online

View File

@ -29,8 +29,9 @@
|-----------------------|-------------------|-----------------------|----------------------|--------------------|
| Windows Desktop | complete | H264, HEVC, VP90 | YUY2, NV12, RGB32 | IMFMediaSession |
| Windows UWP | complete | H264, HEVC, VP90 | BGR32 | IMFMediaEngine |
| Apple macOS | in progress | H264, HEVC(hvc1) | NV12, BGR32 | AVFoundation |
| Apple iOS/tvOS | in progress | H264, HEVC(hvc1) | NV12, BGR32 | AVFoundation |
| Apple macOS | complete | H264, HEVC(hvc1) | NV12, BGR32 | AVFoundation |
| Apple tvOS | complete | H264, HEVC(hvc1) | NV12, BGR32 | AVFoundation |
| Apple iOS | in progress | H264, HEVC(hvc1) | NV12, BGR32 | AVFoundation |
| Android | planned | H264 | RGB32 | |
### 关于预编译库

View File

@ -124,7 +124,8 @@ file(GLOB_RECURSE _AX_MEDIA_HEADER media/*.h)
file(GLOB_RECURSE _AX_MEDIA_SRC media/*.cpp)
if (APPLE)
set_source_files_properties(media/MediaEngine.cpp;media/AvfMediaEngine.cpp PROPERTIES LANGUAGE OBJCXX)
list(APPEND _AX_MEDIA_SRC media/AvfMediaEngine.mm)
set_source_files_properties(media/MediaEngine.cpp PROPERTIES LANGUAGE OBJCXX)
endif()
message(STATUS "_AX_MEDIA_SRC=${_AX_MEDIA_SRC}")

View File

@ -0,0 +1,72 @@
#include "MediaEngine.h"
#if defined(__APPLE__)
# import <AVFoundation/AVFoundation.h>
@class AVMediaSessionHandler;
NS_AX_BEGIN
class AvfMediaEngine : public MediaEngine
{
public:
void SetMediaEventCallback(MEMediaEventCallback cb) override;
void SetAutoPlay(bool bAutoPlay) override;
bool Open(std::string_view sourceUri) override;
bool Close() override;
bool SetLoop(bool bLooping) override;
bool SetRate(double fRate) override;
bool SetCurrentTime(double fSeekTimeInSec) override;
bool Play() override;
bool Pause() override;
bool Stop() override;
MEMediaState GetState() const override;
bool TransferVideoFrame(std::function<void(const MEVideoFrame&)> callback) override;
void onStatusNotification(void* context);
void onPlayerEnd();
void FireEvent(MEMediaEventType event)
{
if (_eventCallback)
_eventCallback(event);
}
bool isPlaying() const {
return _state == MEMediaState::Playing;
}
void internalPlay(bool replay = false);
void internalPause();
private:
MEMediaEventCallback _eventCallback;
MEVideoPixelFormat _videoPF = MEVideoPixelFormat::INVALID;
MEMediaState _state = MEMediaState::Closed;
MEIntPoint _videoExtent;
AVPlayer* _player = nil;
AVPlayerItem* _playerItem = nil;
AVPlayerItemOutput* _playerOutput = nil;
AVMediaSessionHandler* _sessionHandler = nil;
bool _bAutoPlay = false;
bool _repeatEnabled = false;
/*
true: luma=[0,255] chroma=[1,255]
false: luma=[16,235] chroma=[16,240]
*/
bool _bFullColorRange = false;
};
struct AvfMediaEngineFactory : public MediaEngineFactory
{
MediaEngine* CreateMediaEngine() override
{
return new AvfMediaEngine();
}
void DestroyMediaEngine(MediaEngine* me) override { delete static_cast<AvfMediaEngine*>(me); }
};
NS_AX_END
#endif

View File

@ -0,0 +1,506 @@
#include "AvfMediaEngine.h"
#if defined(__APPLE__)
# include <TargetConditionals.h>
# include <assert.h>
# include "yasio/detail/endian_portable.hpp"
# include "yasio/stl/string_view.hpp"
# include "yasio/detail/sz.hpp"
#if TARGET_OS_IPHONE
# import <UIKit/UIKit.h>
#endif
USING_NS_AX;
@interface AVMediaSessionHandler : NSObject
- (AVMediaSessionHandler*)initWithMediaEngine:(AvfMediaEngine*)me;
- (void)dealloc;
- (void)playerItemDidPlayToEndTime:(NSNotification*)notification;
@property AvfMediaEngine* _me;
@end
@implementation AVMediaSessionHandler
@synthesize _me;
- (AVMediaSessionHandler*)initWithMediaEngine:(AvfMediaEngine*)me
{
self = [super init];
if (self)
_me = me;
return self;
}
- registerUINotifications
{
#if TARGET_OS_IPHONE
auto nc = [NSNotificationCenter defaultCenter];
[nc addObserver:self
selector:@selector(handleAudioRouteChange:)
name:AVAudioSessionRouteChangeNotification
object:[AVAudioSession sharedInstance]];
[nc addObserver:self
selector:@selector(handleActive:)
name:UIApplicationDidBecomeActiveNotification
object:nil];
[nc addObserver:self
selector:@selector(handleDeactive:)
name:UIApplicationWillResignActiveNotification
object:nil];
[nc addObserver:self
selector:@selector(handleEnterBackround:)
name:UIApplicationDidEnterBackgroundNotification
object:nil];
[nc addObserver:self
selector:@selector(handleEnterForground:)
name:UIApplicationWillEnterForegroundNotification
object:nil];
#endif
}
#if TARGET_OS_IPHONE
- (void)handleAudioRouteChange:(NSNotification*)notification
{
if (_me->isPlaying())
_me->internalPlay(true);
}
- (void)handleActive:(NSNotification*)notification
{
if (_me->isPlaying())
_me->internalPlay();
}
- (void)handleDeactive:(NSNotification*)notification
{
if (_me->isPlaying())
_me->internalPause();
}
- (void)handleEnterForground:(NSNotification*)notification
{
if (_me->isPlaying())
_me->internalPlay();
}
- (void)handleEnterBackround:(NSNotification*)notification
{
if (_me->isPlaying())
_me->internalPause();
}
#endif
- deregisterUINotifications
{
#if TARGET_OS_IPHONE
auto nc = [NSNotificationCenter defaultCenter];
[nc removeObserver:self
name:AVAudioSessionRouteChangeNotification
object:nil];
[nc removeObserver:self
name:UIApplicationDidBecomeActiveNotification
object:nil];
[nc removeObserver:self
name:UIApplicationWillResignActiveNotification
object:nil];
[nc removeObserver:self
name:UIApplicationDidEnterBackgroundNotification
object:nil];
[nc removeObserver:self
name:UIApplicationWillEnterForegroundNotification
object:nil];
#endif
}
- (void)dealloc
{
[super dealloc];
}
- (void)playerItemDidPlayToEndTime:(NSNotification*)notification
{
_me->onPlayerEnd();
}
- (void)observeValueForKeyPath:(NSString*)keyPath
ofObject:(id)object
change:(NSDictionary<NSKeyValueChangeKey, id>*)change
context:(void*)context
{
if ((id)context == object && [keyPath isEqualToString:@"status"])
_me->onStatusNotification(context);
}
@end
NS_AX_BEGIN
void AvfMediaEngine::onPlayerEnd()
{
_state = MEMediaState::Completed;
FireEvent(MEMediaEventType::Completed);
if (_repeatEnabled) {
this->SetCurrentTime(0);
this->Play();
}
}
void AvfMediaEngine::SetMediaEventCallback(MEMediaEventCallback cb)
{
_eventCallback = cb;
}
void AvfMediaEngine::SetAutoPlay(bool bAutoPlay)
{
_bAutoPlay = bAutoPlay;
}
bool AvfMediaEngine::Open(std::string_view sourceUri)
{
Close();
NSURL* nsMediaUrl = nil;
std::string_view Path;
if (cxx20::starts_with(sourceUri, "file://"sv))
{
// Media Framework doesn't percent encode the URL, so the path portion is just a native file path.
// Extract it and then use it create a proper URL.
Path = sourceUri.substr(7);
NSString* nsPath = [NSString stringWithUTF8String:Path.data()];
nsMediaUrl = [NSURL fileURLWithPath:nsPath isDirectory:NO];
}
else
{
// Assume that this has been percent encoded for now - when we support HTTP Live Streaming we will need to check
// for that.
NSString* nsUri = [NSString stringWithUTF8String:sourceUri.data()];
nsMediaUrl = [NSURL URLWithString:nsUri];
}
// open media file
if (nsMediaUrl == nil)
{
AXME_TRACE("Failed to open Media file: %s", sourceUri.data());
return false;
}
// create player instance
_player = [[AVPlayer alloc] init];
if (!_player)
{
AXME_TRACE("Failed to create instance of an AVPlayer: %s", sourceUri.data());
return false;
}
_player.actionAtItemEnd = AVPlayerActionAtItemEndPause;
// create player item
_sessionHandler = [[AVMediaSessionHandler alloc] initWithMediaEngine:this];
assert(_sessionHandler != nil);
// Use URL asset which gives us resource loading ability if system can't handle the scheme
AVURLAsset* urlAsset = [[AVURLAsset alloc] initWithURL:nsMediaUrl options:nil];
_playerItem = [[AVPlayerItem playerItemWithAsset:urlAsset] retain];
[urlAsset release];
if (_playerItem == nil)
{
AXME_TRACE("Failed to open player item with Url: %s", sourceUri.data());
return false;
}
_state = MEMediaState::Preparing;
// load tracks
[[_playerItem asset] loadValuesAsynchronouslyForKeys:@[ @"tracks" ]
completionHandler:^{
NSError* nsError = nil;
if ([[_playerItem asset] statusOfValueForKey:@"tracks" error:&nsError] ==
AVKeyValueStatusLoaded)
{
// File movies will be ready now
if (_playerItem.status == AVPlayerItemStatusReadyToPlay)
{
onStatusNotification(_playerItem);
}
}
else if (nsError != nullptr)
{
NSDictionary* errDetail = [nsError userInfo];
NSString* errStr =
[[errDetail objectForKey:NSUnderlyingErrorKey] localizedDescription];
AXME_TRACE("Load media asset failed, %s", errStr.UTF8String);
}
}];
[[NSNotificationCenter defaultCenter] addObserver:_sessionHandler
selector:@selector(playerItemDidPlayToEndTime:)
name:AVPlayerItemDidPlayToEndTimeNotification
object:_playerItem];
[_playerItem addObserver:_sessionHandler forKeyPath:@"status" options:0 context:_playerItem];
_player.rate = 0.0;
[_player replaceCurrentItemWithPlayerItem:_playerItem];
// TODO: handle EnterForground, EnterBackground, Active, Deactive, AudioRouteChanged
[_sessionHandler registerUINotifications];
return true;
}
void AvfMediaEngine::onStatusNotification(void* context)
{
if (!_playerItem || context != _playerItem)
return;
if (_playerItem.status == AVPlayerItemStatusFailed)
{
FireEvent(MEMediaEventType::Error);
return;
}
if (_playerItem.status != AVPlayerItemStatusReadyToPlay)
return;
for (AVPlayerItemTrack* playerTrack in _playerItem.tracks)
{
AVAssetTrack* assetTrack = playerTrack.assetTrack;
NSString* mediaType = assetTrack.mediaType;
if ([mediaType isEqualToString:AVMediaTypeVideo])
{ // we only care about video
auto naturalSize = [assetTrack naturalSize];
_videoExtent.x = naturalSize.width;
_videoExtent.y = naturalSize.height;
NSMutableDictionary* outputAttrs = [NSMutableDictionary dictionary];
CMFormatDescriptionRef DescRef = (CMFormatDescriptionRef)[assetTrack.formatDescriptions objectAtIndex:0];
CMVideoCodecType codecType = CMFormatDescriptionGetMediaSubType(DescRef);
int videoOutputPF = kCVPixelFormatType_32BGRA;
if (kCMVideoCodecType_H264 == codecType || kCMVideoCodecType_HEVC == codecType)
{
videoOutputPF = kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange;
CFDictionaryRef formatExtensions = CMFormatDescriptionGetExtensions(DescRef);
if (formatExtensions)
{
CFBooleanRef bFullRange = (CFBooleanRef)CFDictionaryGetValue(
formatExtensions, kCMFormatDescriptionExtension_FullRangeVideo);
if (bFullRange && (bool)CFBooleanGetValue(bFullRange))
{
videoOutputPF = kCVPixelFormatType_420YpCbCr8BiPlanarFullRange;
}
}
}
_bFullColorRange = false;
switch (videoOutputPF)
{
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange:
_bFullColorRange = true;
case kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
_videoPF = MEVideoPixelFormat::NV12;
break;
default: // kCVPixelFormatType_32BGRA
_videoPF = MEVideoPixelFormat::BGR32;
}
[outputAttrs setObject:[NSNumber numberWithInt:videoOutputPF]
forKey:(NSString*)kCVPixelBufferPixelFormatTypeKey];
[outputAttrs setObject:[NSNumber numberWithInteger:1]
forKey:(NSString*)kCVPixelBufferBytesPerRowAlignmentKey];
[outputAttrs setObject:[NSNumber numberWithBool:YES] forKey:(NSString*)kCVPixelBufferMetalCompatibilityKey];
AVPlayerItemVideoOutput* videoOutput =
[[AVPlayerItemVideoOutput alloc] initWithPixelBufferAttributes:outputAttrs];
// Only decode for us
videoOutput.suppressesPlayerRendering = YES;
[_playerItem addOutput:videoOutput];
_playerOutput = videoOutput;
break;
}
}
if (_bAutoPlay)
this->Play();
}
bool AvfMediaEngine::TransferVideoFrame(std::function<void(const MEVideoFrame&)> callback)
{
auto videoOutput = static_cast<AVPlayerItemVideoOutput*>(this->_playerOutput);
if (!videoOutput)
return false;
CMTime currentTime = [videoOutput itemTimeForHostTime:CACurrentMediaTime()];
if (![videoOutput hasNewPixelBufferForItemTime:currentTime])
return false;
CVPixelBufferRef videoFrame = [videoOutput copyPixelBufferForItemTime:currentTime itemTimeForDisplay:nullptr];
if (!videoFrame)
return false;
auto& videoDim = _videoExtent;
MEIntPoint bufferDim;
CVPixelBufferLockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly);
if (CVPixelBufferIsPlanar(videoFrame))
{ // NV12('420v' or '420f' expected
assert(CVPixelBufferGetPlaneCount(videoFrame) == 2);
auto YWidth = static_cast<int>(CVPixelBufferGetWidthOfPlane(videoFrame, 0)); // 1920
auto YHeight = static_cast<int>(CVPixelBufferGetHeightOfPlane(videoFrame, 0)); // 1080
auto UVWidth = static_cast<int>(CVPixelBufferGetWidthOfPlane(videoFrame, 1)); // 960
auto UVHeight = static_cast<int>(CVPixelBufferGetHeightOfPlane(videoFrame, 1)); // 540
auto YPitch = static_cast<int>(CVPixelBufferGetBytesPerRowOfPlane(videoFrame, 0));
auto UVPitch = static_cast<int>(CVPixelBufferGetBytesPerRowOfPlane(videoFrame, 1));
auto YDataLen = YPitch * YHeight; // 1920x1080: YDataLen=2073600
auto UVDataLen = UVPitch * UVHeight; // 1920x1080: UVDataLen=1036800
auto frameYData = (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(videoFrame, 0);
auto frameCbCrData = (uint8_t*)CVPixelBufferGetBaseAddressOfPlane(videoFrame, 1);
assert(YASIO_SZ_ALIGN(videoDim.x, 32) * videoDim.y * 3 / 2 == YDataLen + UVDataLen);
// Apple: both H264, HEVC(H265) bufferDimX=ALIGN(videoDim.x, 32), bufferDimY=videoDim.y
// Windows:
// - H264: BufferDimX align videoDim.x with 16, BufferDimY as-is
// - HEVC(H265): BufferDim(X,Y) align videoDim(X,Y) with 32
MEVideoFrame frame{frameYData, frameCbCrData, static_cast<size_t>(YDataLen + UVDataLen), MEVideoPixelDesc{_videoPF, MEIntPoint{YPitch, YHeight}}, videoDim};
#if defined(_DEBUG) || !defined(_NDEBUG)
auto& ycbcrDesc = frame._ycbcrDesc;
ycbcrDesc.YDim.x = YWidth;
ycbcrDesc.YDim.y = YHeight;
ycbcrDesc.CbCrDim.x = UVWidth;
ycbcrDesc.CbCrDim.y = UVHeight;
ycbcrDesc.YPitch = YPitch;
ycbcrDesc.CbCrPitch = UVPitch;
#endif
callback(frame);
}
else
{ // BGRA
auto frameData = (uint8_t*)CVPixelBufferGetBaseAddress(videoFrame);
size_t frameDataSize = CVPixelBufferGetDataSize(videoFrame);
callback(MEVideoFrame{frameData, nullptr, frameDataSize, MEVideoPixelDesc{_videoPF, videoDim}, videoDim});
}
CVPixelBufferUnlockBaseAddress(videoFrame, kCVPixelBufferLock_ReadOnly);
CVPixelBufferRelease(videoFrame);
}
bool AvfMediaEngine::Close()
{
if (_state == MEMediaState::Closed)
return true;
if (_playerItem != nil)
{
if (_player != nil)
{
[_sessionHandler deregisterUINotifications];
[[NSNotificationCenter defaultCenter] removeObserver:_sessionHandler
name:AVPlayerItemDidPlayToEndTimeNotification
object:_playerItem];
[_playerItem removeObserver:_sessionHandler forKeyPath:@"status"];
}
[_playerItem release];
_playerItem = nil;
}
if (_player != nil)
{
[_player release];
_player = nil;
}
_state = MEMediaState::Closed;
return true;
}
bool AvfMediaEngine::SetLoop(bool bLooping)
{
_repeatEnabled = bLooping;
if (bLooping)
_player.actionAtItemEnd = AVPlayerActionAtItemEndNone;
else
_player.actionAtItemEnd = AVPlayerActionAtItemEndPause;
return true;
}
bool AvfMediaEngine::SetRate(double fRate)
{
if (_player)
{
[_player setRate:fRate];
// TODO:
_player.muted = fRate < 0 ? YES : NO;
}
return true;
}
bool AvfMediaEngine::SetCurrentTime(double fSeekTimeInSec)
{
if (_player != nil)
[_player seekToTime:CMTimeMake(fSeekTimeInSec, 1)];
return true;
}
bool AvfMediaEngine::Play()
{
if (_state != MEMediaState::Playing)
{
[_player play];
_state = MEMediaState::Playing;
FireEvent(MEMediaEventType::Playing);
}
return true;
}
void AvfMediaEngine::internalPlay(bool replay)
{
if (_player != nil) {
if (replay)
[_player pause];
[_player play];
}
}
void AvfMediaEngine::internalPause()
{
if (_player != nil)
[_player pause];
}
bool AvfMediaEngine::Pause()
{
if (_state == MEMediaState::Playing)
{
[_player pause];
_state = MEMediaState::Paused;
FireEvent(MEMediaEventType::Paused);
}
return true;
}
bool AvfMediaEngine::Stop()
{
if (_state != MEMediaState::Stopped)
{
SetCurrentTime(0);
[_player pause];
_state = MEMediaState::Stopped;
// stop() will be invoked in dealloc, which is invoked by _videoPlayer's destructor,
// so do't send the message when _videoPlayer is being deleted.
}
return true;
}
MEMediaState AvfMediaEngine::GetState() const
{
return _state;
}
NS_AX_END
#endif

View File

@ -2,8 +2,10 @@
#if defined(_WIN32)
# include <stdio.h>
# include <Unknwn.h>
# include <wrl/client.h>
# include <exception>
namespace MFUtils
{
@ -38,6 +40,14 @@ inline TComPtr<_Ty> ReferencedPtrToComPtr(_Ty* ptr)
return obj;
}
template <typename T>
inline HRESULT CreateInstance(REFCLSID clsid, Microsoft::WRL::ComPtr<T>& ptr)
{
// ASSERT(!ptr);
return CoCreateInstance(clsid, nullptr, CLSCTX_INPROC_SERVER, __uuidof(T),
reinterpret_cast<void**>(ptr.GetAddressOf()));
}
HRESULT InitializeMFOnce();
} // namespace MFUtils

View File

@ -1,11 +1,13 @@
#include "MediaEngine.h"
#if AX_TARGET_PLATFORM == AX_PLATFORM_WIN32
# include "media/WmfMediaEngine.h"
#elif AX_TARGET_PLATFORM == AX_PLATFORM_WINRT
# include "media/MfMediaEngine.h"
// #elif defined(__APPLE__)
// # include "media/AvfMediaEngine.h"
#if defined(WINAPI_FAMILY)
# if WINAPI_FAMILY == WINAPI_FAMILY_DESKTOP_APP && !defined(AXME_USE_IMFME)
# include "media/WmfMediaEngine.h"
# else
# include "media/MfMediaEngine.h"
# endif
#elif defined(__APPLE__)
# include "media/AvfMediaEngine.h"
#endif
namespace axstd
@ -30,12 +32,14 @@ NS_AX_BEGIN
std::unique_ptr<MediaEngineFactory> CreatePlatformMediaEngineFactory()
{
#if AX_TARGET_PLATFORM == AX_PLATFORM_WIN32
#if defined(WINAPI_FAMILY)
# if WINAPI_FAMILY == WINAPI_FAMILY_DESKTOP_APP && !defined(AXME_USE_IMFME)
return axstd::static_pointer_cast<MediaEngineFactory>(std::make_unique<WmfMediaEngineFactory>());
#elif AX_TARGET_PLATFORM == AX_PLATFORM_WINRT
# else
return axstd::static_pointer_cast<MediaEngineFactory>(std::make_unique<MfMediaEngineFactory>());
// #elif defined(__APPLE__)
// return axstd::static_pointer_cast<MediaEngineFactory>(std::make_unique<AvfMediaEngineFactory>());
# endif
#elif defined(__APPLE__)
return axstd::static_pointer_cast<MediaEngineFactory>(std::make_unique<AvfMediaEngineFactory>());
#else
return nullptr;
#endif

View File

@ -1,11 +1,35 @@
#pragma once
#include "platform/CCPlatformMacros.h"
#if !defined(AXME_NO_AXMOL)
# include "base/CCConsole.h"
# include "platform/CCPlatformMacros.h"
# define AXME_TRACE AXLOG
#else
# define AXME_TRACE printf
# define NS_AX_BEGIN \
namespace ax \
{
# define NS_AX_END }
# define AX_BREAK_IF(cond) \
if (cond) \
break
#endif
// #define AXME_USE_IMFME 1
#if __has_include(<winapifamily.h>)
# include <winapifamily.h>
#endif
#include <functional>
#include <memory>
#include <chrono>
#include <string_view>
#include "yasio/detail/byte_buffer.hpp"
using namespace std::string_view_literals;
NS_AX_BEGIN
static constexpr std::string_view FILE_URL_SCHEME = "file://"sv;
@ -48,11 +72,18 @@ enum class MEMediaState
Completed,
};
enum class MEVideoSampleFormat
/**
* SampleVideo: (1928x1080)
* - YUY2,RGB32,BGR32: works well
* - NV12: has green border
* - Y420V/F: on apple, needs test
*/
enum class MEVideoPixelFormat
{
NONE,
INVALID,
YUY2,
NV12,
NV12, // '420v' '420f'
RGB32,
BGR32,
};
@ -69,32 +100,84 @@ struct MEIntPoint
bool equals(const MEIntPoint& rhs) const { return this->x == rhs.x && this->y == rhs.y; }
};
struct MEVideoTextueSample
#if defined(_DEBUG) || !defined(_NDEBUG)
struct YCbCrBiPlanarPixelInfo
{
yasio::byte_buffer _buffer;
MEIntPoint _bufferDim;
MEIntPoint _videoDim;
MEVideoSampleFormat _format = MEVideoSampleFormat::NONE;
int _stride = 0; // bytesPerRow
int _mods = 0; // whether format, videoDim changed
unsigned int YPitch = 0;
MEIntPoint YDim;
unsigned int CbCrPitch = 0;
MEIntPoint CbCrDim;
};
#endif
/*
*
* RGB32/BGR32: _dim==_videoDim
* H264(YUY2):
* LumaTexture(LA8, RG8):
* - _dim.x = ALIGN(_videoDim.x, 16),
* - _dim.y = _videoDim.y
* CHromaTexture(RGBA8)
* - chromaDim.x = _dim.x / 2
* - chromaDim.y = _dim.y
* NV12/HEVC:
* LumaTexture(
* - _dim.x = ALIGN(_videoDim.x, 32)
* - _dim.y = ALIGN(_videoDim.y, 32)
* ChromaTexture(RG8)
* - chromaDim.x = _dim.x / 2
* - chromaDim.y = _dim.y / 2
*/
struct MEVideoPixelDesc
{
MEVideoPixelDesc() : _PF(MEVideoPixelFormat::INVALID), _dim() {}
MEVideoPixelDesc(MEVideoPixelFormat pixelFormat, const MEIntPoint& dim) : _PF(pixelFormat), _dim(dim) {}
MEVideoPixelFormat _PF; // the pixel format
MEIntPoint _dim; // the aligned frame size
bool _fullRange = true;
bool equals(const MEVideoPixelDesc& rhs) const
{
return _dim.equals(rhs._dim) && _PF == rhs._PF && _fullRange == rhs._fullRange;
}
};
struct MEVideoFrame
{
MEVideoFrame(const uint8_t* data,
const uint8_t* cbcrData,
size_t len,
const MEVideoPixelDesc& vpd,
const MEIntPoint& videoDim)
: _vpd(vpd), _dataPointer(data), _cbcrDataPointer(cbcrData), _dataLen(len), _videoDim(videoDim){};
const uint8_t* _dataPointer; // the video data
const size_t _dataLen; // the video data len
const uint8_t* _cbcrDataPointer;
MEVideoPixelDesc _vpd; // the video pixel desc
MEIntPoint _videoDim; // the aligned frame size
#if defined(_DEBUG) || !defined(_NDEBUG)
YCbCrBiPlanarPixelInfo _ycbcrDesc{};
#endif
};
//
// redisigned corss-platform MediaEngine, inspired from microsoft media foundation: IMFMediaEngine
//
class MediaEngine
{
public:
virtual ~MediaEngine() {}
virtual void SetMediaEventCallback(MEMediaEventCallback cb) = 0;
virtual void SetAutoPlay(bool bAutoPlay) = 0;
virtual bool Open(std::string_view sourceUri) = 0;
virtual bool Close() = 0;
virtual bool SetLoop(bool bLooping) = 0;
virtual bool SetRate(double fRate) = 0;
virtual bool SetCurrentTime(double fSeekTimeInSec) = 0;
virtual bool Play() = 0;
virtual bool Pause() = 0;
virtual bool Stop() = 0;
virtual MEMediaState GetState() const = 0;
virtual bool GetLastVideoSample(MEVideoTextueSample& sample) const = 0;
virtual void SetMediaEventCallback(MEMediaEventCallback cb) = 0;
virtual void SetAutoPlay(bool bAutoPlay) = 0;
virtual bool Open(std::string_view sourceUri) = 0;
virtual bool Close() = 0;
virtual bool SetLoop(bool bLooping) = 0;
virtual bool SetRate(double fRate) = 0;
virtual bool SetCurrentTime(double fSeekTimeInSec) = 0;
virtual bool Play() = 0;
virtual bool Pause() = 0;
virtual bool Stop() = 0;
virtual MEMediaState GetState() const = 0;
virtual bool TransferVideoFrame(std::function<void(const MEVideoFrame&)> callback) = 0;
};
class MediaEngineFactory

View File

@ -5,14 +5,16 @@
// Licensed under the MIT License.
//-------------------------------------------------------------------------------------
# include "media/MfMediaEngine.h"
#include "media/MfMediaEngine.h"
#if defined(WINAPI_FAMILY) && (WINAPI_FAMILY != WINAPI_FAMILY_DESKTOP_APP)
#if defined(WINAPI_FAMILY) && (WINAPI_FAMILY != WINAPI_FAMILY_DESKTOP_APP || defined(AXME_USE_IMFME))
# include "ntcvt/ntcvt.hpp"
# include "MFUtils.h"
# include "yasio/stl/string_view.hpp"
NS_AX_BEGIN
using Microsoft::WRL::ComPtr;
@ -106,7 +108,9 @@ bool MfMediaEngine::Initialize()
// reinterpret_cast<IUnknown*>(dxgiManager.Get())));
DX::ThrowIfFailed(attributes->SetUnknown(MF_MEDIA_ENGINE_CALLBACK, reinterpret_cast<IUnknown*>(spNotify.Get())));
DX::ThrowIfFailed(
attributes->SetUINT32(MF_MEDIA_ENGINE_VIDEO_OUTPUT_FORMAT, DXGI_FORMAT::DXGI_FORMAT_B8G8R8X8_UNORM));
attributes->SetUINT32(MF_MEDIA_ENGINE_VIDEO_OUTPUT_FORMAT, DXGI_FORMAT::DXGI_FORMAT_B8G8R8A8_UNORM));
DX::ThrowIfFailed(attributes->SetUINT32(MF_MEDIA_ENGINE_STREAM_CONTAINS_ALPHA_CHANNEL, 1));
// Create MediaEngine.
ComPtr<IMFMediaEngineClassFactory> mfFactory;
@ -115,7 +119,7 @@ bool MfMediaEngine::Initialize()
DX::ThrowIfFailed(mfFactory->CreateInstance(0, attributes.Get(), m_mediaEngine.ReleaseAndGetAddressOf()));
CreateInstance(CLSID_WICImagingFactory, m_wicFactory);
MFUtils::CreateInstance(CLSID_WICImagingFactory, m_wicFactory);
return m_mediaEngine != nullptr;
}
@ -168,6 +172,8 @@ void MfMediaEngine::SetMuted(bool muted)
bool MfMediaEngine::Open(std::string_view sourceUri)
{
if (cxx20::starts_with(sourceUri, FILE_URL_SCHEME))
sourceUri.remove_prefix(FILE_URL_SCHEME.length());
auto bstrUrl = ntcvt::from_chars(sourceUri);
m_readyToPlay = false;
@ -221,56 +227,38 @@ bool MfMediaEngine::SetCurrentTime(double fPosInSeconds)
return false;
}
bool MfMediaEngine::GetLastVideoSample(MEVideoTextueSample& sample) const
bool MfMediaEngine::TransferVideoFrame(std::function<void(const MEVideoFrame&)> callback)
{
if (m_mediaEngine != nullptr && m_state == MEMediaState::Playing)
{
LONGLONG pts;
if (m_mediaEngine->OnVideoStreamTick(&pts) == S_OK)
do
{
LONGLONG pts;
AX_BREAK_IF(FAILED(m_mediaEngine->OnVideoStreamTick(&pts)));
const MFVideoNormalizedRect rect{0, 0, 1.0, 1.0};
const RECT rcTarget{0, 0, m_videoExtent.x, m_videoExtent.y};
HRESULT hr = m_mediaEngine->TransferVideoFrame(m_wicBitmap.Get(), &rect, &rcTarget, &m_bkgColor);
if (hr == S_OK)
{
ComPtr<IWICBitmapLock> lockedData;
DWORD flags = WICBitmapLockRead;
WICRect srcRect{0, 0, m_videoExtent.x, m_videoExtent.y};
AX_BREAK_IF(FAILED(hr));
if (SUCCEEDED(m_wicBitmap->Lock(&srcRect, flags, lockedData.GetAddressOf())))
{
UINT stride{0};
ComPtr<IWICBitmapLock> lockedData;
DWORD flags = WICBitmapLockRead;
WICRect srcRect{0, 0, m_videoExtent.x, m_videoExtent.y};
if (SUCCEEDED(lockedData->GetStride(&stride)))
{
UINT bufferSize{0};
BYTE* data{nullptr};
AX_BREAK_IF(FAILED(m_wicBitmap->Lock(&srcRect, flags, lockedData.GetAddressOf())));
if (SUCCEEDED(lockedData->GetDataPointer(&bufferSize, &data)))
{
sample._buffer.assign(data, data + bufferSize, std::true_type{});
sample._bufferDim = m_videoExtent;
sample._stride = sample._bufferDim.x * 4;
sample._mods = 0;
if (!sample._videoDim.equals(m_videoExtent))
{
sample._videoDim = m_videoExtent;
++sample._mods;
}
if (sample._format != MEVideoSampleFormat::BGR32)
{
sample._format = MEVideoSampleFormat::BGR32;
++sample._mods;
}
}
}
}
UINT stride{0};
AX_BREAK_IF(FAILED(lockedData->GetStride(&stride)));
return true;
}
}
UINT bufferSize{0};
BYTE* data{nullptr};
AX_BREAK_IF(FAILED(lockedData->GetDataPointer(&bufferSize, &data)));
callback(MEVideoFrame{data, nullptr, bufferSize, MEVideoPixelDesc{MEVideoPixelFormat::BGR32, m_videoExtent},
m_videoExtent});
return true;
} while (false);
}
return false;
}
@ -351,13 +339,13 @@ void MfMediaEngine::UpdateVideoExtent()
{
if (m_mediaEngine && m_readyToPlay)
{
DWORD x, y;
DWORD x = 0, y = 0;
DX::ThrowIfFailed(m_mediaEngine->GetNativeVideoSize(&x, &y));
int mods = 0;
if (m_videoExtent.x != x)
{
m_videoExtent.y = x;
m_videoExtent.x = x;
++mods;
}
if (m_videoExtent.x != y)

View File

@ -1,5 +1,7 @@
//--------------------------------------------------------------------------------------
// File: MediaEnginePlayer.h
// File: MfMediaEngine.h
//
// Modified from https://github.com/microsoft/Xbox-ATG-Samples/tree/main/UWPSamples/Graphics/VideoTextureUWP
//
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
@ -9,7 +11,9 @@
#if defined(_WIN32)
#include <winapifamily.h>
#if defined(WINAPI_FAMILY) && (WINAPI_FAMILY != WINAPI_FAMILY_DESKTOP_APP)
#include "media/MediaEngine.h"
# if defined(WINAPI_FAMILY) && (WINAPI_FAMILY != WINAPI_FAMILY_DESKTOP_APP || defined(AXME_USE_IMFME))
#include <stdint.h>
#include <mfapi.h>
@ -18,8 +22,6 @@
#include <wincodec.h>
#include <wrl/client.h>
#include "media/MediaEngine.h"
#include "media/MFUtils.h"
NS_AX_BEGIN
@ -43,13 +45,7 @@ protected:
};
//-------------------------------------------------------------------------------------
template <typename T>
inline HRESULT CreateInstance(REFCLSID clsid, Microsoft::WRL::ComPtr<T>& ptr)
{
// ASSERT(!ptr);
return CoCreateInstance(clsid, nullptr, CLSCTX_INPROC_SERVER, __uuidof(T),
reinterpret_cast<void**>(ptr.GetAddressOf()));
}
class MfMediaEngine : public IMFNotify, public MediaEngine
{
public:
@ -80,7 +76,7 @@ public:
bool SetLoop(bool bLoop) override;
bool SetRate(double fRate) override;
bool GetLastVideoSample(MEVideoTextueSample& sample) const override;
bool TransferVideoFrame(std::function<void(const MEVideoFrame&)> callback) override;
bool Play() override;
bool Pause() override;

View File

@ -21,8 +21,6 @@
# include "ntcvt/ntcvt.hpp"
# include "yasio/detail/sz.hpp"
# include "base/CCConsole.h"
NS_AX_BEGIN
// IF_FAILED_GOTO macro.
@ -37,10 +35,18 @@ NS_AX_BEGIN
# define CHECK_HR(hr) IF_FAILED_GOTO(hr, done)
# define TRACE(format, ...) ax::print(format, ##__VA_ARGS__)
// const UINT WM_APP_PLAYER_EVENT = ::RegisterWindowMessageW(L"mfmedia-event");
// static MFOffset MakeOffset(float v)
//{
// // v = offset.value + (offset.fract / denominator), where denominator = 65536.0f.
// const int denominator = std::numeric_limits<WORD>::max() + 1;
// MFOffset offset;
// offset.value = short(v);
// offset.fract = WORD(denominator * (v - offset.value));
// return offset;
// }
//-------------------------------------------------------------------
// Name: CreateSourceStreamNode
// Description: Creates a source-stream node for a stream.
@ -176,7 +182,7 @@ private:
HRESULT WmfMediaEngine::CreateInstance(WmfMediaEngine** ppPlayer)
{
TRACE((L"WmfMediaEngine::Create\n"));
AXME_TRACE((L"WmfMediaEngine::Create\n"));
if (ppPlayer == NULL)
{
@ -207,7 +213,8 @@ HRESULT WmfMediaEngine::CreateInstance(WmfMediaEngine** ppPlayer)
// WmfMediaEngine constructor
/////////////////////////////////////////////////////////////////////////
WmfMediaEngine::WmfMediaEngine() : m_pSession(), m_pSource(), m_hwndEvent(nullptr), m_hCloseEvent(NULL), m_nRefCount(1)
WmfMediaEngine::WmfMediaEngine()
: m_nRefCount(1)
{}
///////////////////////////////////////////////////////////////////////
@ -231,6 +238,8 @@ WmfMediaEngine::~WmfMediaEngine()
// CreateInstance has failed. Also, calling Shutdown() twice is
// harmless.
ClearPendingFrames();
Shutdown();
}
@ -252,6 +261,12 @@ HRESULT WmfMediaEngine::Initialize()
// Start up Media Foundation platform.
CHECK_HR(hr = MFUtils::InitializeMFOnce());
m_hOpenEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
if (m_hOpenEvent == NULL)
{
CHECK_HR(hr = HRESULT_FROM_WIN32(GetLastError()));
}
m_hCloseEvent = CreateEvent(NULL, FALSE, FALSE, NULL);
if (m_hCloseEvent == NULL)
{
@ -320,11 +335,13 @@ HRESULT WmfMediaEngine::QueryInterface(REFIID iid, void** ppv)
bool WmfMediaEngine::Open(std::string_view sourceUri)
{
auto wsourceUri = ntcvt::from_chars(sourceUri);
Close();
auto sURL = wsourceUri.c_str();
TRACE("WmfMediaEngine::OpenURL\n");
TRACE("URL = %s\n", sURL);
if (sourceUri.empty())
return false;
AXME_TRACE("WmfMediaEngine::OpenURL\n");
AXME_TRACE("URL = %s\n", sourceUri.data());
// 1. Create a new media session.
// 2. Create the media source.
@ -332,63 +349,82 @@ bool WmfMediaEngine::Open(std::string_view sourceUri)
// 4. Queue the topology [asynchronous]
// 5. Start playback [asynchronous - does not happen in this method.]
HRESULT hr = S_OK;
TComPtr<IMFTopology> pTopology;
TComPtr<IMFClock> pClock;
// Create the media session.
CHECK_HR(hr = CreateSession());
// Create the media source.
CHECK_HR(hr = CreateMediaSource(sURL));
// Create a partial topology.
CHECK_HR(hr = CreateTopologyFromSource(&pTopology));
// Set the topology on the media session.
CHECK_HR(hr = m_pSession->SetTopology(0, pTopology.Get()));
// If SetTopology succeeded, the media session will queue an
// MESessionTopologySet event.
// ======> Read media properties
// Get the session capabilities.
CHECK_HR(hr = m_pSession->GetSessionCapabilities(&m_caps));
// Get the duration from the presentation descriptor (optional)
(void)m_PresentDescriptor->GetUINT64(MF_PD_DURATION, (UINT64*)&m_hnsDuration);
// Get the presentation clock (optional)
hr = m_pSession->GetClock(&pClock);
if (SUCCEEDED(hr))
CHECK_HR(hr = pClock->QueryInterface(IID_PPV_ARGS(&m_pClock)));
// Get the rate control interface (optional)
CHECK_HR(hr = MFGetService(m_pSession.Get(), MF_RATE_CONTROL_SERVICE, IID_PPV_ARGS(&m_RateControl)));
CHECK_HR(hr = MFGetService(m_pSession.Get(), MF_RATE_CONTROL_SERVICE, IID_PPV_ARGS(&m_RateSupport)));
// Check if rate 0 (scrubbing) is supported.
if (SUCCEEDED(m_RateSupport->IsRateSupported(TRUE, 0, NULL)))
m_bCanScrub = TRUE;
// if m_pRate is NULL, m_bCanScrub must be FALSE.
assert(m_RateControl || !m_bCanScrub);
if (FAILED(CreateSession()))
return false;
// Set our state to "open pending"
m_state = MEMediaState::Preparing;
done:
if (FAILED(hr))
m_state = MEMediaState::Closed;
TComPtr<IUnknown> sharedFromThis;
this->QueryInterface(IID_IUnknown, &sharedFromThis);
// SAFE_RELEASE(pTopology);
m_bOpenPending = true;
std::thread t([this, sharedFromThis, wsourceUri = ntcvt::from_chars(sourceUri)] {
TComPtr<IMFTopology> pTopology;
TComPtr<IMFClock> pClock;
return SUCCEEDED(hr);
try
{
// Create the media source.
DX::ThrowIfFailed(CreateMediaSource(wsourceUri.c_str()));
if (!m_pSession)
DX::ThrowIfFailed(E_POINTER);
// Create a partial topology.
DX::ThrowIfFailed(CreateTopologyFromSource(&pTopology));
// Set the topology on the media session.
DX::ThrowIfFailed(m_pSession->SetTopology(0, pTopology.Get()));
// If SetTopology succeeded, the media session will queue an
// MESessionTopologySet event.
// ======> Read media properties
// Get the session capabilities.
DX::ThrowIfFailed(m_pSession->GetSessionCapabilities(&m_caps));
// Get the duration from the presentation descriptor (optional)
(void)m_PresentDescriptor->GetUINT64(MF_PD_DURATION, (UINT64*)&m_hnsDuration);
// Get the presentation clock (optional)
auto hr = m_pSession->GetClock(&pClock);
if (SUCCEEDED(hr))
DX::ThrowIfFailed(hr = pClock->QueryInterface(IID_PPV_ARGS(&m_pClock)));
// Get the rate control interface (optional)
DX::ThrowIfFailed(MFGetService(m_pSession.Get(), MF_RATE_CONTROL_SERVICE, IID_PPV_ARGS(&m_RateControl)));
DX::ThrowIfFailed(MFGetService(m_pSession.Get(), MF_RATE_CONTROL_SERVICE, IID_PPV_ARGS(&m_RateSupport)));
// Check if rate 0 (scrubbing) is supported.
if (SUCCEEDED(m_RateSupport->IsRateSupported(TRUE, 0, NULL)))
m_bCanScrub = TRUE;
// if m_pRate is NULL, m_bCanScrub must be FALSE.
assert(m_RateControl || !m_bCanScrub);
}
catch (const std::exception& ex)
{
AXME_TRACE("Exception occurred when Open Media: %s", ex.what());
m_state = MEMediaState::Error;
}
m_bOpenPending = false;
SetEvent(m_hOpenEvent);
});
t.detach();
return false;
}
bool WmfMediaEngine::Close()
{
if (m_bOpenPending)
WaitForSingleObject(m_hOpenEvent, INFINITE);
ClearPendingFrames();
HRESULT hr = S_OK;
auto state = GetState();
if (state != MEMediaState::Closing && state != MEMediaState::Closed)
@ -462,50 +498,63 @@ done:
void WmfMediaEngine::HandleVideoSample(const uint8_t* buf, size_t len)
{
this->m_videoSampleDirty = true;
this->m_lastVideoFrame.assign(buf, buf + len, std::true_type{});
std::unique_lock<std::mutex> lck(m_framesQueueMtx);
m_framesQueue.emplace_back(buf, buf + len);
}
bool WmfMediaEngine::GetLastVideoSample(MEVideoTextueSample& sample) const
void WmfMediaEngine::ClearPendingFrames()
{
if (this->m_videoSampleDirty)
std::unique_lock<std::mutex> lck(m_framesQueueMtx);
m_framesQueue.clear();
}
bool WmfMediaEngine::TransferVideoFrame(std::function<void(const MEVideoFrame&)> callback)
{
if (m_state != MEMediaState::Playing || m_framesQueue.empty())
return false;
std::unique_lock<std::mutex> lck(m_framesQueueMtx);
if (!m_framesQueue.empty())
{
this->m_videoSampleDirty = false;
sample._buffer.assign(this->m_lastVideoFrame);
auto buffer = std::move(m_framesQueue.front());
m_framesQueue.pop_front();
lck.unlock(); // unlock immidiately before invoke user callback (maybe upload buffer to GPU)
switch (m_videoSampleFormat)
auto cbcrData =
(m_videoPF == MEVideoPixelFormat::NV12) ? buffer.data() + m_frameExtent.x * m_frameExtent.y : nullptr;
MEVideoFrame frame{buffer.data(), cbcrData, buffer.size(),
MEVideoPixelDesc{m_videoPF, MEIntPoint{m_frameExtent.x, m_frameExtent.y}}, m_videoExtent};
# if defined(_DEBUG)
switch (m_videoPF)
{
case MEVideoSampleFormat::YUY2:
sample._bufferDim.x = m_bIsH264 ? YASIO_SZ_ALIGN(m_videoExtent.x, 16) : m_videoExtent.x;
sample._bufferDim.y = m_videoExtent.y;
sample._stride = sample._bufferDim.x * 2;
case MEVideoPixelFormat::YUY2:
assert(m_frameExtent.x == m_bIsH264 ? YASIO_SZ_ALIGN(m_frameExtent.x, 16) : m_frameExtent.x);
break;
case MEVideoSampleFormat::NV12:
sample._bufferDim.x = YASIO_SZ_ALIGN(m_videoExtent.x, 16);
sample._bufferDim.y = m_bIsH264 ? YASIO_SZ_ALIGN(m_videoExtent.y, 16) * 3 / 2 : m_videoExtent.y * 3 / 2;
sample._stride = sample._bufferDim.x;
case MEVideoPixelFormat::NV12:
{
// HEVC(H265) on Windows, both height width align 32
// refer to: https://community.intel.com/t5/Media-Intel-oneAPI-Video/32-byte-alignment-for-HEVC/m-p/1048275
auto& desc = frame._ycbcrDesc;
desc.YDim.x = YASIO_SZ_ALIGN(m_videoExtent.x, 32);
desc.YDim.y = m_bIsHEVC ? YASIO_SZ_ALIGN(m_videoExtent.y, 32) : m_videoExtent.y;
desc.CbCrDim.x = desc.YDim.x / 2;
desc.CbCrDim.y = desc.YDim.y / 2;
desc.YPitch = desc.YDim.x;
desc.CbCrPitch = desc.YPitch;
assert(frame._vpd._dim.x * frame._vpd._dim.y * 3 / 2 == static_cast<int>(frame._dataLen));
assert((desc.YPitch * desc.YDim.y + desc.CbCrPitch * desc.CbCrDim.y) == static_cast<int>(frame._dataLen));
break;
}
default:
assert(m_videoSampleFormat == MEVideoSampleFormat::RGB32 ||
m_videoSampleFormat == MEVideoSampleFormat::BGR32);
sample._bufferDim = m_videoExtent;
sample._stride = m_videoExtent.x * 4;
assert(m_videoPF == MEVideoPixelFormat::RGB32 || m_videoPF == MEVideoPixelFormat::BGR32);
}
# endif
// check data
callback(frame);
sample._mods = 0;
if (!sample._videoDim.equals(m_videoExtent))
{
sample._videoDim = m_videoExtent;
++sample._mods;
}
if (sample._format != m_videoSampleFormat)
{
sample._format = m_videoSampleFormat;
++sample._mods;
}
return true;
}
return false;
}
@ -548,7 +597,7 @@ HRESULT WmfMediaEngine::HandleEvent(IMFMediaEvent* pEvent)
// not succeed, the status is a failure code.
CHECK_HR(hr = pEvent->GetStatus(&hrStatus));
// TRACE("Media event: %s\n", EventName(meType));
// AXME_TRACE("Media event: %s\n", EventName(meType));
// Check if the async operation succeeded.
if (SUCCEEDED(hrStatus))
@ -630,7 +679,7 @@ done:
HRESULT WmfMediaEngine::Shutdown()
{
TRACE("WmfMediaEngine::ShutDown\n");
AXME_TRACE("WmfMediaEngine::ShutDown\n");
HRESULT hr = S_OK;
@ -640,7 +689,13 @@ HRESULT WmfMediaEngine::Shutdown()
if (m_hCloseEvent)
{
CloseHandle(m_hCloseEvent);
m_hCloseEvent = NULL;
m_hCloseEvent = nullptr;
}
if (m_hOpenEvent)
{
CloseHandle(m_hOpenEvent);
m_hOpenEvent = nullptr;
}
return hr;
@ -670,7 +725,17 @@ HRESULT WmfMediaEngine::Shutdown()
HRESULT WmfMediaEngine::OnTopologyReady(IMFMediaEvent* pEvent)
{
TRACE("WmfMediaEngine::OnTopologyReady\n");
AXME_TRACE("WmfMediaEngine::OnTopologyReady\n");
UINT32 w = 0, h = 0;
MFGetAttributeSize(m_videoInputType.Get(), MF_MT_FRAME_SIZE, &w, &h);
m_frameExtent.x = w;
m_frameExtent.y = h;
DWORD cx = 0, cy = 0;
GetNativeVideoSize(&cx, &cy);
m_videoExtent.x = cx;
m_videoExtent.y = cy;
if (m_bAutoPlay)
StartPlayback(nullptr);
@ -684,7 +749,7 @@ bool WmfMediaEngine::Play()
{
HRESULT hr = S_OK;
TRACE("WmfMediaEngine::Play\n");
AXME_TRACE("WmfMediaEngine::Play\n");
if (m_state != MEMediaState::Paused && m_state != MEMediaState::Stopped)
MF_E_INVALIDREQUEST;
@ -1215,7 +1280,7 @@ done:
HRESULT WmfMediaEngine::OnPlayEnded(IMFMediaEvent* pEvent)
{
TRACE("WmfMediaEngine::OnPlayEnded\n");
AXME_TRACE("WmfMediaEngine::OnPlayEnded\n");
// The session puts itself into the stopped state autmoatically.
@ -1299,7 +1364,7 @@ HRESULT WmfMediaEngine::OnSessionEnded(HRESULT hrStatus)
HRESULT WmfMediaEngine::CreateSession()
{
TRACE("WmfMediaEngine::CreateSession\n");
AXME_TRACE("WmfMediaEngine::CreateSession\n");
HRESULT hr = S_OK;
@ -1367,7 +1432,7 @@ HRESULT WmfMediaEngine::CloseSession()
if (dwWaitResult == WAIT_TIMEOUT)
{
TRACE("CloseSession timed out!\n");
AXME_TRACE("CloseSession timed out!\n");
}
// Now there will be no more events from this session.
@ -1411,7 +1476,7 @@ done:
HRESULT WmfMediaEngine::CreateMediaSource(const WCHAR* sURL)
{
TRACE("WmfMediaEngine::CreateMediaSource\n");
AXME_TRACE("WmfMediaEngine::CreateMediaSource\n");
HRESULT hr = S_OK;
MF_OBJECT_TYPE ObjectType = MF_OBJECT_INVALID;
@ -1458,7 +1523,7 @@ done:
HRESULT WmfMediaEngine::CreateTopologyFromSource(IMFTopology** ppTopology)
{
TRACE("WmfMediaEngine::CreateTopologyFromSource\n");
AXME_TRACE("WmfMediaEngine::CreateTopologyFromSource\n");
assert(m_pSession != NULL);
assert(m_pSource != NULL);
@ -1477,7 +1542,7 @@ HRESULT WmfMediaEngine::CreateTopologyFromSource(IMFTopology** ppTopology)
// Get the number of streams in the media source.
CHECK_HR(hr = m_PresentDescriptor->GetStreamDescriptorCount(&cSourceStreams));
TRACE("Stream count: %d\n", cSourceStreams);
AXME_TRACE("Stream count: %d\n", cSourceStreams);
// For each stream, create the topology nodes and add them to the topology.
for (DWORD i = 0; i < cSourceStreams; ++i)
@ -1518,7 +1583,7 @@ HRESULT WmfMediaEngine::AddBranchToPartialTopology(IMFTopology* pTopology,
IMFPresentationDescriptor* pSourcePD,
DWORD iStream)
{
TRACE("WmfMediaEngine::AddBranchToPartialTopology\n");
AXME_TRACE("WmfMediaEngine::AddBranchToPartialTopology\n");
assert(pTopology != NULL);
@ -1598,21 +1663,18 @@ HRESULT WmfMediaEngine::CreateOutputNode(IMFStreamDescriptor* pSourceSD, IMFTopo
if (MFMediaType_Video == guidMajorType)
{
// Create the video renderer.
TRACE("Stream %d: video stream\n", streamID);
AXME_TRACE("Stream %d: video stream\n", streamID);
// CHECK_HR(hr = MFCreateVideoRendererActivate(hwndVideo, &pRendererActivate));
auto Sampler = MFUtils::MakeComPtr<MFVideoSampler>(this);
TComPtr<IMFMediaType> InputType;
CHECK_HR(hr = pHandler->GetCurrentMediaType(&InputType));
// Get video dim
CHECK_HR(hr = MFGetAttributeSize(InputType.Get(), MF_MT_FRAME_SIZE, (UINT32*)&m_videoExtent.x,
(UINT32*)&m_videoExtent.y));
auto Sampler = MFUtils::MakeComPtr<MFVideoSampler>(this);
TComPtr<IMFMediaType>& InputType = m_videoInputType;
CHECK_HR(hr = pHandler->GetCurrentMediaType(InputType.ReleaseAndGetAddressOf()));
// Create output type
GUID SubType;
CHECK_HR(hr = InputType->GetGUID(MF_MT_SUBTYPE, &SubType));
m_bIsH264 = SubType == MFVideoFormat_H264 || SubType == MFVideoFormat_H264_ES;
m_bIsHEVC = SubType == MFVideoFormat_HEVC || SubType == MFVideoFormat_HEVC_ES;
GUID VideoOutputFormat;
if ((SubType == MFVideoFormat_HEVC) || (SubType == MFVideoFormat_HEVC_ES) || (SubType == MFVideoFormat_NV12) ||
@ -1640,23 +1702,23 @@ HRESULT WmfMediaEngine::CreateOutputNode(IMFStreamDescriptor* pSourceSD, IMFTopo
m_VideoOutputFormat = VideoOutputFormat;
if (m_VideoOutputFormat == MFVideoFormat_YUY2)
m_videoSampleFormat = MEVideoSampleFormat::YUY2;
m_videoPF = MEVideoPixelFormat::YUY2;
else if (m_VideoOutputFormat == MFVideoFormat_NV12)
m_videoSampleFormat = MEVideoSampleFormat::NV12;
m_videoPF = MEVideoPixelFormat::NV12;
else if (m_VideoOutputFormat == MFVideoFormat_RGB32)
m_videoSampleFormat = MEVideoSampleFormat::RGB32;
m_videoPF = MEVideoPixelFormat::RGB32;
// To run as fast as possible, set this attribute (requires Windows 7):
// CHECK_HR(hr = pRendererActivate->SetUINT32(MF_SAMPLEGRABBERSINK_IGNORE_CLOCK, TRUE));
}
else if (MFMediaType_Audio == guidMajorType)
{
// Create the audio renderer.
TRACE("Stream %d: audio stream\n", streamID);
AXME_TRACE("Stream %d: audio stream\n", streamID);
CHECK_HR(hr = MFCreateAudioRendererActivate(&pRendererActivate));
}
else
{
TRACE("Stream %d: Unknown format\n", streamID);
AXME_TRACE("Stream %d: Unknown format\n", streamID);
CHECK_HR(hr = E_FAIL);
}
@ -1672,6 +1734,45 @@ done:
return hr;
}
HRESULT WmfMediaEngine::GetNativeVideoSize(DWORD* cx, DWORD* cy)
{
if (!m_videoInputType || !cx || !cy)
return E_POINTER;
HRESULT hr = S_OK;
UINT32 width = 0, height = 0;
MFVideoArea mfArea = {0};
do
{
BOOL bPanScan = MFGetAttributeUINT32(m_videoInputType.Get(), MF_MT_PAN_SCAN_ENABLED, FALSE);
if (bPanScan)
{
hr = m_videoInputType->GetBlob(MF_MT_PAN_SCAN_APERTURE, (UINT8*)&mfArea, sizeof(MFVideoArea), nullptr);
AX_BREAK_IF(SUCCEEDED(hr));
}
hr = m_videoInputType->GetBlob(MF_MT_MINIMUM_DISPLAY_APERTURE, (UINT8*)&mfArea, sizeof(MFVideoArea), nullptr);
AX_BREAK_IF(SUCCEEDED(hr));
hr = m_videoInputType->GetBlob(MF_MT_GEOMETRIC_APERTURE, (UINT8*)&mfArea, sizeof(MFVideoArea), nullptr);
} while (false);
if (SUCCEEDED(hr))
{
*cx = mfArea.Area.cx;
*cy = mfArea.Area.cy;
}
else // fallback to frame extent
{
*cx = m_frameExtent.x;
*cy = m_frameExtent.y;
}
return hr;
}
NS_AX_END
#endif

View File

@ -18,7 +18,7 @@
//////////////////////////////////////////////////////////////////////////
#if defined(_WIN32)
#include <winapifamily.h>
# include <winapifamily.h>
#endif
#if defined(WINAPI_FAMILY) && (WINAPI_FAMILY == WINAPI_FAMILY_DESKTOP_APP)
@ -49,6 +49,10 @@
# include "MediaEngine.h"
# include <atomic>
# include <mutex>
# include <deque>
# include "yasio/detail/byte_buffer.hpp"
NS_AX_BEGIN
@ -174,7 +178,9 @@ public:
bool Stop() override;
void HandleVideoSample(const uint8_t* buf, size_t len);
bool GetLastVideoSample(MEVideoTextueSample& sample) const override;
//bool GetLastVideoSample(MEVideoTextueSample& sample) const override;
bool TransferVideoFrame(std::function<void(const MEVideoFrame&)> callback) override;
void FireMediaEvent(MEMediaEventType event)
{
@ -190,6 +196,10 @@ protected:
HRESULT UpdatePendingCommands(Command cmd);
HRESULT GetNativeVideoSize(DWORD* cx, DWORD* cy);
void ClearPendingFrames();
protected:
// Destructor is private. Caller should call Release.
virtual ~WmfMediaEngine();
@ -223,6 +233,8 @@ protected:
TComPtr<IMFRateSupport> m_RateSupport;
TComPtr<IMFPresentationClock> m_pClock;
TComPtr<IMFMediaType> m_videoInputType;
DWORD m_caps = 0; // Session caps.
MFTIME m_hnsDuration = 0; // Duration of the current presentation.
BOOL m_bCanScrub = FALSE; // Does the current session support rate = 0.
@ -241,26 +253,31 @@ protected:
SeekState m_request{CmdNone, 1.0, FALSE, 0}; // Pending request state.
BOOL m_bPending = FALSE; // Is a request pending?
std::atomic<bool> m_bOpenPending = false;
mutable CritSec m_critsec; // Protects the seeking and rate-change states.
HWND m_hwndEvent; // App window to receive events.
MEMediaState m_state = MEMediaState::Closed; // Current state of the media session.
HANDLE m_hCloseEvent; // Event to wait on while closing
std::atomic<MEMediaState> m_state = MEMediaState::Closed; // Current state of the media session.
HANDLE m_hOpenEvent = nullptr; // App window to receive events.
HANDLE m_hCloseEvent = nullptr; // Event to wait on while closing
MEIntPoint m_videoExtent;
MEIntPoint m_frameExtent; // may same with m_videoExtent
BOOL m_bLooping = FALSE;
BOOL m_bAutoPlay = TRUE;
BOOL m_bIsH264 = FALSE;
BOOL m_bIsHEVC = FALSE; // hvc1,hev1
GUID m_VideoOutputFormat{};
MEMediaEventCallback m_eventCallback;
MEVideoSampleFormat m_videoSampleFormat = MEVideoSampleFormat::NONE;
MEVideoPixelFormat m_videoPF = MEVideoPixelFormat::INVALID;
yasio::byte_buffer m_lastVideoFrame;
mutable bool m_videoSampleDirty = false;
mutable std::deque<yasio::byte_buffer> m_framesQueue;
mutable std::mutex m_framesQueueMtx;
};
struct WmfMediaEngineFactory : public MediaEngineFactory

View File

@ -1,76 +1,18 @@
#include <string_view>
/*
* refer to: https://github.com/doyoulikerock/D3D11NV12Rendering/blob/master/D3D11NV12Rendering/PixelShader.hlsl
// Converting 8-bit YUV to RGB888
static const float3x3 YUVtoRGBCoeffMatrix =
{
1.164383, 1.164383, 1.164383,
0.000000, -0.391762, 2.017232,
1.596027, -0.812968, 0.000000
};
*/
const std::string_view videoTextureNV12_frag = R"(
#ifdef GL_ES
varying lowp vec4 v_fragmentColor;
varying mediump vec2 v_texCoord;
#else
varying vec4 v_fragmentColor;
varying vec2 v_texCoord;
#endif
uniform sampler2D u_tex0; // Y sample
uniform sampler2D u_tex1; // UV sample
uniform vec2 uv_scale;
uniform float out_w;
const mat3 YUVtoRGBCoeff = mat3(
1.16438356, 1.16438356, 1.16438356,
0.00000000, -0.213237017, 2.11241937,
1.79265225, -0.533004045, 0.00000000
/* mat4 to mat3:
mat3 coeff = mat3(
colorTransform[0].x, colorTransform[0].y, colorTransform[0].z,
colorTransform[1].x, colorTransform[1].y, colorTransform[1].z,
colorTransform[2].x, colorTransform[2].y, colorTransform[2].z
);
const vec3 YUVOffset8bits = vec3(0.0627451017, 0.501960814, 0.501960814);
vec3 YuvToRgb(vec3 YUV)
{
YUV -= YUVOffset8bits;
return YUVtoRGBCoeff * YUV;
}
void main()
{
vec3 YUV;
/* For dual sampler */
//vec2 tXY = v_texCoord;
//YUV.x = texture2D(u_tex0, tXY).x;
//tXY.y += 0.015625; // why needs adjust 1.0/64 ?
//YUV.yz = texture2D(u_tex1, tXY).xw;
/* For single sampler */
vec2 tXY = v_texCoord * uv_scale;
YUV.x = texture2D(u_tex0, tXY).w;
tXY.y *= 0.5;
tXY.y += 2.0 / 3.0;
float UVOffs = floor(v_texCoord.x * out_w / 2.0) * 2.0;
float UPos = ((UVOffs * uv_scale.x) + 0.5) / out_w;
float VPos = ((UVOffs * uv_scale.x) + 1.5) / out_w;
YUV.y = texture2D(u_tex0, vec2(UPos, tXY.y)).w;
YUV.z = texture2D(u_tex0, vec2(VPos, tXY.y)).w;
/* Convert YUV to RGB */
vec4 OutColor;
OutColor.xyz = YuvToRgb(YUV);
OutColor.w = 1.0;
gl_FragColor = v_fragmentColor * OutColor;
}
)"sv;
mat3 coeff = mat3(
colorTransform[0].xyz,
colorTransform[1].xyz,
colorTransform[2].xyz
);
mat3 coeff = mat3(colorTransform); // require GLES3
*/
// refer to:
// https://docs.microsoft.com/en-us/windows/win32/medfound/recommended-8-bit-yuv-formats-for-video-rendering#yuy2
@ -86,49 +28,92 @@ varying vec2 v_texCoord;
uniform sampler2D u_tex0; // Y sample
uniform sampler2D u_tex1; // UV sample
uniform vec2 uv_scale;
uniform float out_w; // texture width
uniform mat4 colorTransform;
const mat3 YUVtoRGBCoeff = mat3(
1.16438356, 1.16438356, 1.16438356,
0.00000000, -0.213237017, 2.11241937,
1.79265225, -0.533004045, 0.00000000
);
const vec3 YUVOffset8bits = vec3(0.0627451017, 0.501960814, 0.501960814);
vec3 YuvToRgb(vec3 YUV)
vec3 trasnformYUV(vec3 YUV)
{
YUV -= YUVOffset8bits;
return YUVtoRGBCoeff * YUV;
YUV -= vec3(colorTransform[0].w, colorTransform[1].w, colorTransform[2].w);
return mat3(
colorTransform[0].xyz,
colorTransform[1].xyz,
colorTransform[2].xyz
) * YUV;
}
void main()
{
vec2 tXY = v_texCoord * uv_scale;
vec3 YUV;
/* For dual sampler */
YUV.yz = texture2D(u_tex1, tXY).yw;
YUV.x = texture2D(u_tex0, tXY).x;
YUV.yz = texture2D(u_tex1, v_texCoord).yw;
YUV.x = texture2D(u_tex0, v_texCoord).x;
/* For single sampler */
//YUV.yz = texture2D(u_tex0, tXY).xw;
//
//vec4 YUY2P = texture2D(u_tex0, tXY);
//float Pos = v_texCoord.x * out_w;
//YUV.x = floor(mod(Pos, 2.0)) == 0.0 ? YUY2P.z : YUY2P.x;
/* Convert YUV to RGB */
vec4 OutColor;
OutColor.xyz = YuvToRgb(YUV);
OutColor.xyz = trasnformYUV(YUV);
OutColor.w = 1.0;
gl_FragColor = v_fragmentColor * OutColor;
}
)"sv;
/*
The NV12 pixel format render shader:
- Windows:
- video format: HEVC(H256)
- support codec id: 'hev1', 'hvc1'
- Apple(macOS,iOS,tvOS):
- video format: H264,HEVC(H256)
- support codec id: 'hvc1'
- sub pixel formats:
- 'y420v'(kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange)
- 'y420f'(kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
Consider test videos:
- HEVC(H265): 1912x1080.mp4, 1920x1080.mp4, 1912x1080_hvc1.mp4
- H264: 1912x1080.mp4, 1920x1080.mp4, 1280x720.mp4, 432x240.mp4
refer to:
- https://docs.microsoft.com/en-us/windows/win32/medfound/recommended-8-bit-yuv-formats-for-video-rendering#nv12
- https://github.com/doyoulikerock/D3D11NV12Rendering/blob/master/D3D11NV12Rendering/PixelShader.hlsl
*/
const std::string_view videoTextureNV12_frag = R"(
#ifdef GL_ES
varying lowp vec4 v_fragmentColor;
varying mediump vec2 v_texCoord;
#else
varying vec4 v_fragmentColor;
varying vec2 v_texCoord;
#endif
uniform sampler2D u_tex0; // Y sample: LumaTexture
uniform sampler2D u_tex1; // UV sample: ChromaTexture
uniform mat4 colorTransform;
vec3 trasnformYUV(vec3 YUV)
{
YUV -= vec3(colorTransform[0].w, colorTransform[1].w, colorTransform[2].w);
return mat3(
colorTransform[0].xyz,
colorTransform[1].xyz,
colorTransform[2].xyz
) * YUV;
}
void main()
{
vec3 YUV;
YUV.x = texture2D(u_tex0, v_texCoord).w; // Y
YUV.yz = texture2D(u_tex1, v_texCoord).xy; // CbCr
/* Convert YUV to RGB */
vec4 OutColor;
OutColor.xyz = trasnformYUV(YUV);
OutColor.w = 1.0;
gl_FragColor = v_fragmentColor * OutColor;
}
)"sv;
const std::string_view videoTextureBGRA_frag = R"(
#ifdef GL_ES

View File

@ -22,16 +22,15 @@ if(WINDOWS)
if (AX_ENABLE_MFMEDIA)
set(_AX_UI_SPECIFIC_HEADER ${_AX_UI_SPECIFIC_HEADER}
ui/UIVideoPlayer/UIVideoPlayer.h
ui/UIVideoPlayer.h
)
set(_AX_UI_SPECIFIC_SRC ${_AX_UI_SPECIFIC_SRC}
ui/UIVideoPlayer/UIVideoPlayer.cpp
ui/UIVideoPlayer.cpp
)
endif()
elseif(APPLE)
if(MACOSX)
set(_AX_UI_SPECIFIC_HEADER
#ui/UIVideoPlayer/UIVideoPlayer.h
ui/UIEditBox/UIEditBoxImpl-mac.h
ui/UIEditBox/Mac/CCUIPasswordTextField.h
ui/UIEditBox/Mac/CCUIMultilineTextField.h
@ -41,7 +40,6 @@ elseif(APPLE)
ui/UIEditBox/Mac/CCUITextFieldFormatter.h
)
set(_AX_UI_SPECIFIC_SRC
#ui/UIVideoPlayer/UIVideoPlayer.cpp
ui/UIEditBox/UIEditBoxImpl-mac.mm
ui/UIEditBox/Mac/CCUIEditBoxMac.mm
ui/UIEditBox/Mac/CCUIMultilineTextField.m
@ -52,7 +50,6 @@ elseif(APPLE)
elseif(IOS)
if (TVOS)
set(_AX_UI_SPECIFIC_HEADER
ui/UIVideoPlayer/UIVideoPlayer.h
ui/UIEditBox/UIEditBoxImpl-ios.h
ui/UIEditBox/iOS/CCUIEditBoxIOS.h
ui/UIEditBox/iOS/CCUIMultilineTextField.h
@ -62,7 +59,6 @@ elseif(APPLE)
ui/UIEditBox/iOS/CCUISingleLineTextField.h
)
set(_AX_UI_SPECIFIC_SRC
ui/UIVideoPlayer/UIVideoPlayer-ios.mm
ui/UIEditBox/UIEditBoxImpl-ios.mm
ui/UIEditBox/iOS/CCUIEditBoxIOS.mm
ui/UIEditBox/iOS/CCUIMultilineTextField.mm
@ -73,7 +69,6 @@ elseif(APPLE)
else()
set(_AX_UI_SPECIFIC_HEADER
ui/UIWebView/UIWebView.h
ui/UIVideoPlayer/UIVideoPlayer.h
ui/UIWebView/UIWebViewImpl-ios.h
ui/UIEditBox/UIEditBoxImpl-ios.h
ui/UIEditBox/iOS/CCUIEditBoxIOS.h
@ -85,7 +80,7 @@ elseif(APPLE)
)
set(_AX_UI_SPECIFIC_SRC
ui/UIWebView/UIWebView.mm
ui/UIVideoPlayer/UIVideoPlayer-ios.mm
ui/UIVideoPlayer-ios.mm
ui/UIWebView/UIWebViewImpl-ios.mm
ui/UIEditBox/UIEditBoxImpl-ios.mm
ui/UIEditBox/iOS/CCUIEditBoxIOS.mm
@ -96,6 +91,8 @@ elseif(APPLE)
)
endif()
endif()
set(_AX_UI_SPECIFIC_HEADER ui/UIVideoPlayer.h ${_AX_UI_SPECIFIC_HEADER})
set(_AX_UI_SPECIFIC_SRC ui/UIVideoPlayer.cpp ${_AX_UI_SPECIFIC_SRC})
elseif(LINUX)
set(_AX_UI_SPECIFIC_HEADER
ui/UIEditBox/UIEditBoxImpl-linux.h
@ -106,13 +103,13 @@ elseif(LINUX)
elseif(ANDROID)
set(_AX_UI_SPECIFIC_HEADER
ui/UIWebView/UIWebView.h
ui/UIVideoPlayer/UIVideoPlayer.h
ui/UIVideoPlayer.h
ui/UIWebView/UIWebViewImpl-android.h
ui/UIEditBox/UIEditBoxImpl-android.h
)
set(_AX_UI_SPECIFIC_SRC
ui/UIEditBox/UIEditBoxImpl-android.cpp
ui/UIVideoPlayer/UIVideoPlayer-android.cpp
ui/UIVideoPlayer-android.cpp
ui/UIWebView/UIWebViewImpl-android.cpp
# it's special for android, not a common file
ui/UIWebView/UIWebView.cpp

View File

@ -48,7 +48,7 @@ THE SOFTWARE.
#include "ui/UIVBox.h"
#include "ui/UIRelativeBox.h"
#if !defined(_WIN32) || defined(AX_ENABLE_MFMEDIA)
# include "ui/UIVideoPlayer/UIVideoPlayer.h"
# include "ui/UIVideoPlayer.h"
#endif
#if !defined(_WIN32) || defined(AX_ENABLE_MSEDGE_WEBVIEW2)
# include "ui/UIWebView/UIWebView.h"

View File

@ -24,7 +24,7 @@
THE SOFTWARE.
****************************************************************************/
#include "ui/UIVideoPlayer/UIVideoPlayer.h"
#include "ui/UIVideoPlayer.h"
#if (AX_TARGET_PLATFORM == AX_PLATFORM_ANDROID)
# include <unordered_map>

View File

@ -24,7 +24,7 @@
THE SOFTWARE.
****************************************************************************/
#include "ui/UIVideoPlayer/UIVideoPlayer.h"
#include "ui/UIVideoPlayer.h"
// No Available on tvOS
#if AX_TARGET_PLATFORM == AX_PLATFORM_IOS && !defined(AX_TARGET_OS_TVOS)

View File

@ -24,9 +24,10 @@
THE SOFTWARE.
****************************************************************************/
#include "ui/UIVideoPlayer/UIVideoPlayer.h"
#include "ui/UIVideoPlayer.h"
#if defined(_WIN32) || defined(__APPLE__)
// Now, common implementation based on redesigned MediaEngine is enable for windows and macOS
#if defined(_WIN32) || AX_TARGET_PLATFORM == AX_PLATFORM_MAC || AX_TARGET_PLATFORM == AX_TARGET_OS_TVOS
# include <unordered_map>
# include <stdlib.h>
# include <string>
@ -52,17 +53,25 @@ USING_NS_AX;
(ps)->setUniform(__loc, &__v, sizeof(__v)); \
} while (false)
# define PS_SET_UNIFORM_R(ps, name, value) \
do \
{ \
auto __loc = (ps)->getUniformLocation(name); \
(ps)->setUniform(__loc, &value, sizeof(value)); \
} while (false)
using namespace ax::ui;
namespace
{
struct PrivateVideoDescriptor
{
MediaEngine* _vplayer = nullptr;
Texture2D* _vtexture = nullptr;
Sprite* _vrender = nullptr;
MediaEngine* _vplayer = nullptr;
Texture2D* _vtexture = nullptr;
Texture2D* _vchromaTexture = nullptr;
Sprite* _vrender = nullptr;
MEVideoTextueSample _vsample;
MEVideoPixelDesc _vpixelDesc;
Vec2 _originalViewSize;
@ -88,13 +97,9 @@ struct PrivateVideoDescriptor
}
else
{
const Vec2 originalScale{static_cast<float>(_vsample._videoDim.x) / _vtexture->getPixelsWide(),
static_cast<float>(_vsample._videoDim.y) / _vtexture->getPixelsHigh()};
const auto aspectRatio = (std::min)(viewSize.x / videoSize.x, viewSize.y / (videoSize.y));
const auto aspectRatio =
(std::min)(viewSize.x / videoSize.x, viewSize.y / (videoSize.y * originalScale.y));
_vrender->setScale(originalScale.x * aspectRatio, originalScale.y * aspectRatio);
_vrender->setScale(aspectRatio);
}
LayoutHelper::centerNode(_vrender);
@ -107,6 +112,26 @@ struct PrivateVideoDescriptor
_scaleDirty = false;
}
static void updateColorTransform(backend::ProgramState* ps, bool bFullColorRange)
{
// clang-format off
// 1.16438356 ~= 255/219.0
const Mat4 colorTransform = bFullColorRange ? Mat4{ // 709Scaled
1.16438356f, 0.00000000f, 1.79265225f, 0.0f,
1.16438356f, -0.213237017f, - 0.533004045f, 0.0f,
1.16438356f, 2.11241937f, 0.00000000f, 0.0f,
0.0627451017f, 0.501960814f, 0.501960814f, 0.0f // YUVOffset8Bits: 16/255.0f, 128/255.0f, 128/255.0f
} : Mat4 { // 709Unscaled
1.000000f, 0.0000000f, 1.57472198f, 0.0f,
1.000000f, -0.187314089f, -0.46820747f, 0.0f,
1.000000f, 1.85561536f, 0.0000000f, 0.0f,
0.0627451f, 0.5019608f, 0.50196081f, 0.0f
};
// clang-format on
PS_SET_UNIFORM_R(ps, "colorTransform", colorTransform);
}
};
} // namespace
@ -189,10 +214,9 @@ VideoPlayer::~VideoPlayer()
if (pvd->_vplayer)
_meFactory->DestroyMediaEngine(pvd->_vplayer);
if (pvd->_vrender)
pvd->_vrender->release();
if (pvd->_vtexture)
pvd->_vtexture->release();
AX_SAFE_RELEASE(pvd->_vrender);
AX_SAFE_RELEASE(pvd->_vtexture);
AX_SAFE_RELEASE(pvd->_vchromaTexture);
delete pvd;
}
@ -248,87 +272,92 @@ void VideoPlayer::draw(Renderer* renderer, const Mat4& transform, uint32_t flags
if (!vrender || !vplayer)
return;
if (vrender->isVisible() && isPlaying() && vplayer->GetLastVideoSample(pvd->_vsample))
{
auto& vsample = pvd->_vsample;
auto sampleFormat = vsample._format;
if (vrender->isVisible() && isPlaying())
{ // render 1 video sample if avaiable
uint8_t* sampleData = vsample._buffer.data();
size_t sampleDataLen = vsample._buffer.size();
if (vsample._mods)
{
if (pvd->_vtexture)
pvd->_vtexture->release();
pvd->_vtexture = new Texture2D();
auto programManager = ProgramManager::getInstance();
switch (sampleFormat)
vplayer->TransferVideoFrame([this, pvd](const ax::MEVideoFrame& frame){
auto pixelFormat = frame._vpd._PF;
auto bPixelDescChnaged = !frame._vpd.equals(pvd->_vpixelDesc);
if (bPixelDescChnaged)
{
case MEVideoSampleFormat::YUY2:
pvd->_vrender->setProgramState(backend::ProgramType::VIDEO_TEXTURE_YUY2);
break;
case MEVideoSampleFormat::NV12:
pvd->_vrender->setProgramState(backend::ProgramType::VIDEO_TEXTURE_NV12);
break;
case MEVideoSampleFormat::BGR32:
pvd->_vrender->setProgramState(backend::ProgramType::VIDEO_TEXTURE_BGR32);
break;
default:
pvd->_vrender->setProgramState(backend::ProgramType::VIDEO_TEXTURE_RGB32);
}
}
pvd->_vpixelDesc = frame._vpd;
Vec2 uvScale{1.0f, 1.0f};
auto& videoDim = vsample._videoDim;
auto& bufferDim = vsample._bufferDim;
switch (sampleFormat)
{
case MEVideoSampleFormat::NV12:
{
/* For single sampler */
// int texelWidth = YASIO_SZ_ALIGN(rWidth, 16);
// int texelHeight = pvd->_vplayer->IsH264() ? YASIO_SZ_ALIGN(rHeight, 16) * 3 / 2 : rHeight * 3 / 2;
uvScale.x = videoDim.x / (float)bufferDim.x;
uvScale.y = videoDim.y / (float)bufferDim.y;
pvd->_vtexture->updateWithData(sampleData, sampleDataLen, PixelFormat::A8, PixelFormat::A8, bufferDim.x,
bufferDim.y, false);
break;
}
case MEVideoSampleFormat::YUY2:
{
// int texelWidth = pvd->_vplayer->IsH264() ? (YASIO_SZ_ALIGN(rWidth, 16)) : (rWidth);
uvScale.x = (float)videoDim.x / bufferDim.x;
AX_SAFE_RELEASE(pvd->_vtexture);
pvd->_vtexture = new Texture2D(); // deault is Sampler Filter is: LINEAR
/* For dual sampler */
pvd->_vtexture->updateWithData(sampleData, sampleDataLen, PixelFormat::LA8, PixelFormat::LA8, bufferDim.x,
bufferDim.y, false, 0);
pvd->_vtexture->updateWithData(sampleData, sampleDataLen, PixelFormat::RGBA8, PixelFormat::RGBA8,
bufferDim.x >> 1, bufferDim.y, false, 1);
break;
}
case MEVideoSampleFormat::RGB32:
case MEVideoSampleFormat::BGR32:
pvd->_vtexture->updateWithData(sampleData, sampleDataLen, PixelFormat::RGBA8, PixelFormat::RGBA8,
bufferDim.x, bufferDim.y, false, 0);
break;
default:;
}
if (vsample._mods)
{
pvd->_vrender->setTexture(pvd->_vtexture);
pvd->_vrender->setTextureRect(ax::Rect{Vec2::ZERO, pvd->_vtexture->getContentSize()});
AX_SAFE_RELEASE_NULL(pvd->_vchromaTexture);
if (pixelFormat >= MEVideoPixelFormat::YUY2)
{ // use separated texture we can set differrent sample filter
pvd->_vchromaTexture = new Texture2D(); // Sampler Filter: NEAREST
pvd->_vchromaTexture->setAliasTexParameters();
}
if (sampleFormat == MEVideoSampleFormat::NV12 || sampleFormat == MEVideoSampleFormat::YUY2)
{
auto ps = pvd->_vrender->getProgramState();
PS_SET_UNIFORM(ps, "out_w", (float)videoDim.x);
PS_SET_UNIFORM(ps, "uv_scale", uvScale);
auto programManager = ProgramManager::getInstance();
switch (pixelFormat)
{
case MEVideoPixelFormat::YUY2:
pvd->_vrender->setProgramState(backend::ProgramType::VIDEO_TEXTURE_YUY2);
break;
case MEVideoPixelFormat::NV12:
pvd->_vrender->setProgramState(backend::ProgramType::VIDEO_TEXTURE_NV12);
break;
case MEVideoPixelFormat::BGR32:
pvd->_vrender->setProgramState(backend::ProgramType::VIDEO_TEXTURE_BGR32);
break;
default:
pvd->_vrender->setProgramState(backend::ProgramType::VIDEO_TEXTURE_RGB32);
}
}
pvd->_scaleDirty = true;
}
auto& bufferDim = frame._vpd._dim;
switch (pixelFormat)
{
case MEVideoPixelFormat::YUY2:
{
pvd->_vtexture->updateWithData(frame._dataPointer, frame._dataLen, PixelFormat::LA8,
PixelFormat::LA8,
bufferDim.x, bufferDim.y, false, 0);
pvd->_vchromaTexture->updateWithData(frame._dataPointer, frame._dataLen, PixelFormat::RGBA8,
PixelFormat::RGBA8,
bufferDim.x >> 1, bufferDim.y, false, 0);
break;
}
case MEVideoPixelFormat::NV12:
{
pvd->_vtexture->updateWithData(frame._dataPointer, bufferDim.x * bufferDim.y, PixelFormat::A8,
PixelFormat::A8, bufferDim.x, bufferDim.y, false, 0);
pvd->_vchromaTexture->updateWithData(frame._cbcrDataPointer, (bufferDim.x * bufferDim.y) >> 1,
PixelFormat::RG8,
PixelFormat::RG8, bufferDim.x >> 1, bufferDim.y >> 1, false, 0);
break;
}
case MEVideoPixelFormat::RGB32:
case MEVideoPixelFormat::BGR32:
pvd->_vtexture->updateWithData(frame._dataPointer, frame._dataLen, PixelFormat::RGBA8,
PixelFormat::RGBA8, bufferDim.x, bufferDim.y, false, 0);
break;
default:;
}
if (bPixelDescChnaged)
{
pvd->_vrender->setTexture(pvd->_vtexture);
pvd->_vrender->setTextureRect(ax::Rect{Vec2::ZERO, Vec2{
frame._videoDim.x / AX_CONTENT_SCALE_FACTOR(),
frame._videoDim.y / AX_CONTENT_SCALE_FACTOR(),
}});
if (pixelFormat >= MEVideoPixelFormat::YUY2)
{
auto ps = pvd->_vrender->getProgramState();
PrivateVideoDescriptor::updateColorTransform(ps, frame._vpd._fullRange);
ps->setTexture(ps->getUniformLocation("u_tex1"), 1, pvd->_vchromaTexture->getBackendTexture());
}
pvd->_scaleDirty = true;
}
});
}
if (pvd->_scaleDirty || (flags & FLAGS_TRANSFORM_DIRTY))
pvd->rescaleTo(this);

View File

@ -367,7 +367,7 @@ list(APPEND GAME_SOURCE
Source/ZipTest/ZipTests.cpp
)
if(ANDROID OR IOS OR (WINDOWS AND AX_ENABLE_MFMEDIA))
if(ANDROID OR MACOSX OR IOS OR (WINDOWS AND AX_ENABLE_MFMEDIA))
list(APPEND GAME_HEADER
Source/UITest/CocoStudioGUITest/UIVideoPlayerTest/UIVideoPlayerTest.h)
list(APPEND GAME_SOURCE

View File

@ -44,8 +44,7 @@
#include "UIFocusTest/UIFocusTest.h"
#include "UITabControlTest/UITabControlTest.h"
#if (AX_TARGET_PLATFORM == AX_PLATFORM_ANDROID || AX_TARGET_PLATFORM == AX_PLATFORM_IOS || \
defined(_WIN32)) && !defined(AX_TARGET_OS_TVOS)
#if AX_TARGET_PLATFORM != AX_PLATFORM_LINUX
# include "UIVideoPlayerTest/UIVideoPlayerTest.h"
#endif
@ -63,8 +62,7 @@
GUIDynamicCreateTests::GUIDynamicCreateTests()
{
#if (AX_TARGET_PLATFORM == AX_PLATFORM_ANDROID || AX_TARGET_PLATFORM == AX_PLATFORM_IOS || \
defined(_WIN32)) && !defined(AX_TARGET_OS_TVOS)
#if AX_TARGET_PLATFORM != AX_PLATFORM_LINUX
addTest("VideoPlayer Test", []() { return new VideoPlayerTests; });
#endif
#if (AX_TARGET_PLATFORM == AX_PLATFORM_ANDROID || AX_TARGET_PLATFORM == AX_PLATFORM_IOS) && \

View File

@ -28,6 +28,15 @@
USING_NS_AX;
using namespace ax::ui;
static MenuItemFont* createMenuFontWithColor(std::string_view title,
ax::ccMenuCallback&& cb,
const Color3B& color = Color3B::RED)
{
auto menuFont = ax::MenuItemFont::create(title, cb);
menuFont->setColor(color);
return menuFont;
}
VideoPlayerTests::VideoPlayerTests()
{
ADD_TEST_CASE(VideoPlayerTest);
@ -46,49 +55,49 @@ bool VideoPlayerTest::init()
MenuItemFont::setFontSize(16);
auto fullSwitch =
MenuItemFont::create("FullScreenSwitch", AX_CALLBACK_1(VideoPlayerTest::menuFullScreenCallback, this));
createMenuFontWithColor("FullScreenSwitch", AX_CALLBACK_1(VideoPlayerTest::menuFullScreenCallback, this));
fullSwitch->setAnchorPoint(Vec2::ANCHOR_BOTTOM_LEFT);
fullSwitch->setPosition(Vec2(_visibleRect.origin.x + 10, _visibleRect.origin.y + 50));
auto pauseItem = MenuItemFont::create("Pause", AX_CALLBACK_1(VideoPlayerTest::menuPauseCallback, this));
auto pauseItem = createMenuFontWithColor("Pause", AX_CALLBACK_1(VideoPlayerTest::menuPauseCallback, this));
pauseItem->setAnchorPoint(Vec2::ANCHOR_BOTTOM_LEFT);
pauseItem->setPosition(Vec2(_visibleRect.origin.x + 10, _visibleRect.origin.y + 100));
auto resumeItem = MenuItemFont::create("Resume", AX_CALLBACK_1(VideoPlayerTest::menuResumeCallback, this));
auto resumeItem = createMenuFontWithColor("Resume", AX_CALLBACK_1(VideoPlayerTest::menuResumeCallback, this));
resumeItem->setAnchorPoint(Vec2::ANCHOR_BOTTOM_LEFT);
resumeItem->setPosition(Vec2(_visibleRect.origin.x + 10, _visibleRect.origin.y + 150));
auto stopItem = MenuItemFont::create("Stop", AX_CALLBACK_1(VideoPlayerTest::menuStopCallback, this));
auto stopItem = createMenuFontWithColor("Stop", AX_CALLBACK_1(VideoPlayerTest::menuStopCallback, this));
stopItem->setAnchorPoint(Vec2::ANCHOR_BOTTOM_LEFT);
stopItem->setPosition(Vec2(_visibleRect.origin.x + 10, _visibleRect.origin.y + 200));
auto hintItem = MenuItemFont::create("Hint", AX_CALLBACK_1(VideoPlayerTest::menuHintCallback, this));
auto hintItem = createMenuFontWithColor("Hint", AX_CALLBACK_1(VideoPlayerTest::menuHintCallback, this));
hintItem->setAnchorPoint(Vec2::ANCHOR_BOTTOM_LEFT);
hintItem->setPosition(Vec2(_visibleRect.origin.x + 10, _visibleRect.origin.y + 250));
//-------------------------------------------------------------------------------------------------------------------
auto resourceVideo =
MenuItemFont::create("Play resource video", AX_CALLBACK_1(VideoPlayerTest::menuResourceVideoCallback, this));
createMenuFontWithColor("Play resource video", AX_CALLBACK_1(VideoPlayerTest::menuResourceVideoCallback, this));
resourceVideo->setAnchorPoint(Vec2::ANCHOR_MIDDLE_RIGHT);
resourceVideo->setPosition(Vec2(_visibleRect.origin.x + _visibleRect.size.width - 10, _visibleRect.origin.y + 50));
auto onlineVideo =
MenuItemFont::create("Play online video", AX_CALLBACK_1(VideoPlayerTest::menuOnlineVideoCallback, this));
createMenuFontWithColor("Play online video", AX_CALLBACK_1(VideoPlayerTest::menuOnlineVideoCallback, this));
onlineVideo->setAnchorPoint(Vec2::ANCHOR_MIDDLE_RIGHT);
onlineVideo->setPosition(Vec2(_visibleRect.origin.x + _visibleRect.size.width - 10, _visibleRect.origin.y + 100));
auto ratioSwitch = MenuItemFont::create("KeepRatioSwitch", AX_CALLBACK_1(VideoPlayerTest::menuRatioCallback, this));
auto ratioSwitch = createMenuFontWithColor("KeepRatioSwitch", AX_CALLBACK_1(VideoPlayerTest::menuRatioCallback, this));
ratioSwitch->setAnchorPoint(Vec2::ANCHOR_MIDDLE_RIGHT);
ratioSwitch->setPosition(Vec2(_visibleRect.origin.x + _visibleRect.size.width - 10, _visibleRect.origin.y + 150));
auto loopToggle = MenuItemFont::create("LoopToogle", AX_CALLBACK_1(VideoPlayerTest::menuLoopCallback, this));
auto loopToggle = createMenuFontWithColor("LoopToogle", AX_CALLBACK_1(VideoPlayerTest::menuLoopCallback, this));
loopToggle->setAnchorPoint(Vec2::ANCHOR_MIDDLE_RIGHT);
loopToggle->setPosition(Vec2(_visibleRect.origin.x + _visibleRect.size.width - 10, _visibleRect.origin.y + 170));
auto menu = Menu::create(resourceVideo, onlineVideo, ratioSwitch, loopToggle, fullSwitch, pauseItem, resumeItem,
stopItem, hintItem, nullptr);
menu->setPosition(Vec2::ZERO);
_uiLayer->addChild(menu);
_uiLayer->addChild(menu, 1);
_videoStateLabel = Label::createWithSystemFont("IDLE", "Arial", 16);
_videoStateLabel->setAnchorPoint(Vec2::ANCHOR_MIDDLE_RIGHT);
@ -154,7 +163,11 @@ void VideoPlayerTest::menuResourceVideoCallback(Ref* sender)
{
if (_videoPlayer)
{
_videoPlayer->setFileName("SampleVideo.mp4");
#if defined(__APPLE__)
_videoPlayer->setFileName("video/h265/1912x1080_hvc1.mp4");
#else
_videoPlayer->setFileName("video/h265/1912x1080_hev1.mp4");
#endif
_videoPlayer->play();
}
}
@ -163,7 +176,8 @@ void VideoPlayerTest::menuOnlineVideoCallback(Ref* sender)
{
if (_videoPlayer)
{
_videoPlayer->setURL("http://distribution.bbb3d.renderfarming.net/video/mp4/bbb_sunflower_1080p_30fps_normal.mp4");
_videoPlayer->setURL(
"http://distribution.bbb3d.renderfarming.net/video/mp4/bbb_sunflower_1080p_30fps_normal.mp4");
_videoPlayer->play();
}
}
@ -335,12 +349,12 @@ bool SimpleVideoPlayerTest::init()
MenuItemFont::setFontSize(16);
_switchStyle =
MenuItemFont::create("Switch Style", AX_CALLBACK_1(SimpleVideoPlayerTest::switchStyleCallback, this));
createMenuFontWithColor("Switch Style", AX_CALLBACK_1(SimpleVideoPlayerTest::switchStyleCallback, this));
_switchStyle->setAnchorPoint(Vec2::ANCHOR_BOTTOM_LEFT);
_switchStyle->setPosition(Vec2(_visibleRect.origin.x + 10, _visibleRect.origin.y + 50));
_switchUserInputEnabled =
MenuItemFont::create("Enable User Input", AX_CALLBACK_1(SimpleVideoPlayerTest::switchUserInputCallback, this));
createMenuFontWithColor("Enable User Input", AX_CALLBACK_1(SimpleVideoPlayerTest::switchUserInputCallback, this));
_switchUserInputEnabled->setAnchorPoint(Vec2::ANCHOR_BOTTOM_LEFT);
_switchUserInputEnabled->setPosition(Vec2(_visibleRect.origin.x + 10, _visibleRect.origin.y + 100));
@ -430,11 +444,10 @@ void SimpleVideoPlayerTest::createVideo()
_videoPlayer->setLooping(true);
_videoPlayer->setStyle(_style);
_videoPlayer->setUserInputEnabled(_userInputEnabled);
_videoPlayer->setKeepAspectRatioEnabled(true);
_uiLayer->addChild(_videoPlayer);
// _videoPlayer->addEventListener(AX_CALLBACK_2(SimpleVideoPlayerTest::videoEventCallback, this));
_videoPlayer->setFileName("SampleVideo.mp4");
_videoPlayer->setFileName("video/h264/1920x1080.mp4");
_videoPlayer->play();
}

View File

@ -94,7 +94,7 @@ local function VideoPlayerTest()
local function menuResourceVideoCallback(tag, sender)
if nil ~= videoPlayer then
print('start play video')
local videoFullPath = cc.FileUtils:getInstance():fullPathForFilename("SampleVideo2.mp4")
local videoFullPath = cc.FileUtils:getInstance():fullPathForFilename("video/h264/1280x720.mp4")
videoPlayer:setFileName(videoFullPath)
videoPlayer:play()
print('start play video succeed')