mirror of https://github.com/axmolengine/axmol.git
Fix #627
This commit is contained in:
parent
77bd96afb0
commit
eb5c7758cc
|
@ -455,7 +455,7 @@ HRESULT MFMediaPlayer::Invoke(IMFAsyncResult* pResult)
|
||||||
|
|
||||||
// if (!m_hwndEvent)
|
// if (!m_hwndEvent)
|
||||||
// HandleEvent((WPARAM)pEvent.Get());
|
// HandleEvent((WPARAM)pEvent.Get());
|
||||||
//else
|
// else
|
||||||
// PostMessage(m_hwndEvent, WM_APP_PLAYER_EVENT, (WPARAM)pEvent.Get(), (LPARAM)0);
|
// PostMessage(m_hwndEvent, WM_APP_PLAYER_EVENT, (WPARAM)pEvent.Get(), (LPARAM)0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -648,7 +648,7 @@ HRESULT MFMediaPlayer::Play()
|
||||||
|
|
||||||
if (m_pSession == NULL || m_pSource == NULL)
|
if (m_pSession == NULL || m_pSource == NULL)
|
||||||
return E_UNEXPECTED;
|
return E_UNEXPECTED;
|
||||||
|
|
||||||
AutoLock lock(m_critsec);
|
AutoLock lock(m_critsec);
|
||||||
|
|
||||||
// If another operation is pending, cache the request.
|
// If another operation is pending, cache the request.
|
||||||
|
@ -1562,12 +1562,9 @@ HRESULT MFMediaPlayer::CreateOutputNode(IMFStreamDescriptor* pSourceSD, IMFTopol
|
||||||
GUID SubType;
|
GUID SubType;
|
||||||
CHECK_HR(hr = InputType->GetGUID(MF_MT_SUBTYPE, &SubType));
|
CHECK_HR(hr = InputType->GetGUID(MF_MT_SUBTYPE, &SubType));
|
||||||
|
|
||||||
TComPtr<IMFMediaType> OutputType;
|
m_bIsH264 = SubType == MFVideoFormat_H264 || SubType == MFVideoFormat_H264_ES;
|
||||||
CHECK_HR(hr = ::MFCreateMediaType(&OutputType));
|
|
||||||
CHECK_HR(hr = OutputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE));
|
|
||||||
CHECK_HR(hr = OutputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
|
|
||||||
|
|
||||||
GUID VideoOutputFormat = {};
|
GUID VideoOutputFormat;
|
||||||
if ((SubType == MFVideoFormat_HEVC) || (SubType == MFVideoFormat_HEVC_ES) || (SubType == MFVideoFormat_NV12) ||
|
if ((SubType == MFVideoFormat_HEVC) || (SubType == MFVideoFormat_HEVC_ES) || (SubType == MFVideoFormat_NV12) ||
|
||||||
(SubType == MFVideoFormat_IYUV))
|
(SubType == MFVideoFormat_IYUV))
|
||||||
{
|
{
|
||||||
|
@ -1582,6 +1579,10 @@ HRESULT MFMediaPlayer::CreateOutputNode(IMFStreamDescriptor* pSourceSD, IMFTopol
|
||||||
VideoOutputFormat = Uncompressed ? MFVideoFormat_RGB32 : MFVideoFormat_YUY2;
|
VideoOutputFormat = Uncompressed ? MFVideoFormat_RGB32 : MFVideoFormat_YUY2;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
TComPtr<IMFMediaType> OutputType;
|
||||||
|
CHECK_HR(hr = ::MFCreateMediaType(&OutputType));
|
||||||
|
CHECK_HR(hr = OutputType->SetUINT32(MF_MT_ALL_SAMPLES_INDEPENDENT, TRUE));
|
||||||
|
CHECK_HR(hr = OutputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
|
||||||
CHECK_HR(hr = OutputType->SetGUID(MF_MT_SUBTYPE, VideoOutputFormat));
|
CHECK_HR(hr = OutputType->SetGUID(MF_MT_SUBTYPE, VideoOutputFormat));
|
||||||
|
|
||||||
CHECK_HR(hr = ::MFCreateSampleGrabberSinkActivate(OutputType.Get(), Sampler.Get(), &pRendererActivate));
|
CHECK_HR(hr = ::MFCreateSampleGrabberSinkActivate(OutputType.Get(), Sampler.Get(), &pRendererActivate));
|
||||||
|
|
|
@ -156,6 +156,8 @@ public:
|
||||||
HRESULT Pause();
|
HRESULT Pause();
|
||||||
HRESULT Stop();
|
HRESULT Stop();
|
||||||
|
|
||||||
|
BOOL IsH264() const { return m_bIsH264; }
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
HRESULT SetPositionInternal(const MFTIME& hnsPosition);
|
HRESULT SetPositionInternal(const MFTIME& hnsPosition);
|
||||||
HRESULT StartPlayback(const MFTIME* hnsPosition);
|
HRESULT StartPlayback(const MFTIME* hnsPosition);
|
||||||
|
@ -231,5 +233,6 @@ protected:
|
||||||
BOOL m_bLooping = FALSE;
|
BOOL m_bLooping = FALSE;
|
||||||
BOOL m_bPlayOnOpen = TRUE;
|
BOOL m_bPlayOnOpen = TRUE;
|
||||||
|
|
||||||
|
BOOL m_bIsH264 = FALSE;
|
||||||
GUID m_VideoOutputFormat{};
|
GUID m_VideoOutputFormat{};
|
||||||
};
|
};
|
||||||
|
|
|
@ -39,7 +39,7 @@
|
||||||
# include "yasio/detail/byte_buffer.hpp"
|
# include "yasio/detail/byte_buffer.hpp"
|
||||||
# include "ntcvt/ntcvt.hpp"
|
# include "ntcvt/ntcvt.hpp"
|
||||||
# include "ui/LayoutHelper.h"
|
# include "ui/LayoutHelper.h"
|
||||||
|
# include "yasio/detail/sz.hpp"
|
||||||
//-----------------------------------------------------------------------------------------------------------
|
//-----------------------------------------------------------------------------------------------------------
|
||||||
|
|
||||||
USING_NS_CC;
|
USING_NS_CC;
|
||||||
|
@ -58,6 +58,19 @@ using namespace cocos2d::ui;
|
||||||
|
|
||||||
namespace
|
namespace
|
||||||
{
|
{
|
||||||
|
/*
|
||||||
|
* refer to: https://github.com/doyoulikerock/D3D11NV12Rendering/blob/master/D3D11NV12Rendering/PixelShader.hlsl
|
||||||
|
// Converting 8-bit YUV to RGB888
|
||||||
|
static const float3x3 YUVtoRGBCoeffMatrix =
|
||||||
|
{
|
||||||
|
1.164383, 1.164383, 1.164383,
|
||||||
|
0.000000, -0.391762, 2.017232,
|
||||||
|
1.596027, -0.812968, 0.000000
|
||||||
|
};
|
||||||
|
*/
|
||||||
|
|
||||||
|
// refer to:
|
||||||
|
// https://docs.microsoft.com/en-us/windows/win32/medfound/recommended-8-bit-yuv-formats-for-video-rendering#nv12
|
||||||
std::string_view NV12_FRAG = R"(
|
std::string_view NV12_FRAG = R"(
|
||||||
#ifdef GL_ES
|
#ifdef GL_ES
|
||||||
varying lowp vec4 v_fragmentColor;
|
varying lowp vec4 v_fragmentColor;
|
||||||
|
@ -69,28 +82,59 @@ varying vec2 v_texCoord;
|
||||||
|
|
||||||
uniform sampler2D u_texture; // Y sample
|
uniform sampler2D u_texture; // Y sample
|
||||||
uniform sampler2D u_texture1; // UV sample
|
uniform sampler2D u_texture1; // UV sample
|
||||||
|
uniform vec2 uv_scale;
|
||||||
|
uniform float out_w;
|
||||||
|
|
||||||
|
const mat3 YUVtoRGBCoeff = mat3(
|
||||||
|
1.16438356, 1.16438356, 1.16438356,
|
||||||
|
0.00000000, -0.213237017, 2.11241937,
|
||||||
|
1.79265225, -0.533004045, 0.00000000
|
||||||
|
);
|
||||||
|
|
||||||
|
const vec3 YUVOffsets = vec3(0.0627451017, 0.501960814, 0.501960814);
|
||||||
|
|
||||||
|
vec3 YuvToRgb(vec3 YUV)
|
||||||
|
{
|
||||||
|
YUV -= YUVOffsets;
|
||||||
|
return YUVtoRGBCoeff * YUV;
|
||||||
|
}
|
||||||
|
|
||||||
void main()
|
void main()
|
||||||
{
|
{
|
||||||
// refer to:
|
vec3 YUV;
|
||||||
// a. https://gist.github.com/crearo/0d50442145b63c6c288d1c1675909990
|
|
||||||
// b. https://github.com/tqk2811/TqkLibrary.Media.VideoPlayer/blob/38a2dce908215045cc27cffb741a6e4b8492c9cd/TqkLibrary.Media.VideoPlayer.OpenGl/Renders/NV12Render.cs#L14
|
|
||||||
// c. https://www.cnblogs.com/nanqiang/p/10224867.html
|
|
||||||
|
|
||||||
float cy = v_texCoord.y + 0.01625; // why needs adjust?
|
|
||||||
vec4 uvColor = texture2D(u_texture1, vec2(v_texCoord.x, cy));
|
|
||||||
vec3 yuv = vec3(texture2D(u_texture, v_texCoord).r, uvColor.r - 0.5, uvColor.a - 0.5);
|
|
||||||
|
|
||||||
vec3 rgb = mat3( 1.0, 1.0, 1.0,
|
/* For dual sampler */
|
||||||
0, -0.39465, 2.03211,
|
vec2 tXY = v_texCoord;
|
||||||
1.13983, -0.58060, 0 ) * yuv;
|
YUV.x = texture2D(u_texture, tXY).x;
|
||||||
|
tXY.y += 0.015625; // why needs adjust 1.0/64 ?
|
||||||
|
YUV.yz = texture2D(u_texture1, tXY).xw;
|
||||||
|
|
||||||
gl_FragColor = v_fragmentColor * vec4(rgb, 1.0);
|
/* For single sampler */
|
||||||
|
//vec2 tXY = v_texCoord * uv_scale;
|
||||||
|
//YUV.x = texture2D(u_texture, tXY).x;
|
||||||
|
//
|
||||||
|
//tXY.y *= 0.5;
|
||||||
|
//tXY.y += 2.0 / 3.0;
|
||||||
|
//
|
||||||
|
//float UVOffs = floor(v_texCoord.x * out_w / 2.0) * 2;
|
||||||
|
//float UPos = ((UVOffs * uv_scale.x) + 0.5) / out_w;
|
||||||
|
//float VPos = ((UVOffs * uv_scale.x) + 1.5) / out_w;
|
||||||
|
//
|
||||||
|
//YUV.y = texture2D(u_texture, vec2(UPos, tXY.y)).x;
|
||||||
|
//YUV.z = texture2D(u_texture, vec2(VPos, tXY.y)).x;
|
||||||
|
|
||||||
|
/* Convert YUV to RGB */
|
||||||
|
vec4 OutColor;
|
||||||
|
OutColor.xyz = YuvToRgb(YUV);
|
||||||
|
OutColor.w = 1.0;
|
||||||
|
|
||||||
|
gl_FragColor = v_fragmentColor * OutColor;
|
||||||
}
|
}
|
||||||
)"sv;
|
)"sv;
|
||||||
|
|
||||||
|
// refer to:
|
||||||
|
// https://docs.microsoft.com/en-us/windows/win32/medfound/recommended-8-bit-yuv-formats-for-video-rendering#yuy2
|
||||||
std::string_view YUY2_FRAG = R"(
|
std::string_view YUY2_FRAG = R"(
|
||||||
// refer to: https://github.com/TheRealNox/glsl-shaders/blob/master/glsl/colourConverter.f.glsl
|
|
||||||
|
|
||||||
#ifdef GL_ES
|
#ifdef GL_ES
|
||||||
varying lowp vec4 v_fragmentColor;
|
varying lowp vec4 v_fragmentColor;
|
||||||
|
@ -101,97 +145,47 @@ varying vec2 v_texCoord;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
uniform sampler2D u_texture; // Y sample
|
uniform sampler2D u_texture; // Y sample
|
||||||
uniform float tex_w; // texture width
|
uniform sampler2D u_texture1; // UV sample
|
||||||
uniform float tex_h; // texture height
|
uniform vec2 uv_scale;
|
||||||
|
uniform float out_w; // texture width
|
||||||
|
|
||||||
vec4 inYUY2(vec4 tempyuv, float isOdd)
|
const mat3 YUVtoRGBCoeff = mat3(
|
||||||
{
|
1.16438356, 1.16438356, 1.16438356,
|
||||||
if (isOdd > 0.0)
|
0.00000000, -0.213237017, 2.11241937,
|
||||||
return vec4(tempyuv.b, tempyuv.g, tempyuv.a, 255.0);
|
1.79265225, -0.533004045, 0.00000000
|
||||||
else
|
);
|
||||||
return vec4(tempyuv.r, tempyuv.g, tempyuv.a, 255.0);
|
|
||||||
}
|
|
||||||
|
|
||||||
vec4 limitedYCbCrToComputerRGBNormalized(vec4 yuv)
|
const vec3 YUVOffsets = vec3(0.0627451017, 0.501960814, 0.501960814);
|
||||||
{
|
|
||||||
vec4 rgb = vec4(0.0);
|
|
||||||
float scale = 1.0 / 256.0;
|
|
||||||
|
|
||||||
yuv = yuv * 255.0;
|
|
||||||
|
|
||||||
yuv.r -= 16.0;
|
|
||||||
yuv.g -= 128.0;
|
|
||||||
yuv.b -= 128.0;
|
|
||||||
|
|
||||||
rgb.r = scale * ((298.082 * yuv.r) + (458.942 * yuv.b));
|
|
||||||
rgb.g = scale * ((298.082 * yuv.r) + (-54.592 * yuv.g) + (-136.425 * yuv.b));
|
|
||||||
rgb.b = scale * ((298.082 * yuv.r) + (540.775 * yuv.g));
|
|
||||||
|
|
||||||
rgb.a = 255.0;
|
|
||||||
|
|
||||||
rgb = rgb / 255.0;
|
|
||||||
|
|
||||||
return rgb;
|
|
||||||
}
|
|
||||||
|
|
||||||
vec4 convertLimitedYUY2toComputerRGB()
|
vec3 YuvToRgb(vec3 YUV)
|
||||||
{
|
{
|
||||||
vec4 tempyuv = vec4(0.0);
|
YUV -= YUVOffsets;
|
||||||
vec2 textureRealSize = vec2(tex_w, tex_h);
|
return YUVtoRGBCoeff * YUV;
|
||||||
|
|
||||||
vec2 pixelPos = vec2(textureRealSize.x * v_texCoord.x, textureRealSize.y * v_texCoord.y);
|
|
||||||
|
|
||||||
float isOdd = floor(mod(pixelPos.x, 2.0));
|
|
||||||
|
|
||||||
vec2 packedCoor = vec2(v_texCoord.x/2.0, v_texCoord.y);
|
|
||||||
|
|
||||||
tempyuv = inYUY2(texture2D(u_texture, packedCoor), isOdd);
|
|
||||||
|
|
||||||
return limitedYCbCrToComputerRGBNormalized(tempyuv);
|
|
||||||
}
|
|
||||||
|
|
||||||
vec4 fullYCbCrToComputerRGBNormalized(vec4 yuv)
|
|
||||||
{
|
|
||||||
vec4 rgb = vec4(0.0);
|
|
||||||
float scale = 1.0 / 256.0;
|
|
||||||
|
|
||||||
yuv = yuv * 255.0;
|
|
||||||
|
|
||||||
yuv.g -= 128.0;
|
|
||||||
yuv.b -= 128.0;
|
|
||||||
|
|
||||||
rgb.r = scale * ((256.0 * yuv.r) + (403.1488 * yuv.b));
|
|
||||||
rgb.g = scale * ((256.0 * yuv.r) + (-47.954944 * yuv.g) + (-119.839744 * yuv.b));
|
|
||||||
rgb.b = scale * ((256.0 * yuv.r) + (475.0336 * yuv.g));
|
|
||||||
|
|
||||||
rgb.a = 255.0;
|
|
||||||
|
|
||||||
rgb = rgb / 255.0;
|
|
||||||
|
|
||||||
return rgb;
|
|
||||||
}
|
|
||||||
|
|
||||||
vec4 convertFullYUY2toComputerRGB()
|
|
||||||
{
|
|
||||||
vec4 tempyuv = vec4(0.0);
|
|
||||||
// vec2 textureRealSize = textureSize(u_texture, 0);
|
|
||||||
vec2 textureRealSize = vec2(tex_w, tex_h);
|
|
||||||
|
|
||||||
vec2 pixelPos = vec2(textureRealSize.x * v_texCoord.x, textureRealSize.y * v_texCoord.y);
|
|
||||||
|
|
||||||
float isOdd = floor(mod(pixelPos.x, 2.0));
|
|
||||||
|
|
||||||
vec2 packedCoor = vec2(v_texCoord.x/2.0, v_texCoord.y);
|
|
||||||
|
|
||||||
tempyuv = inYUY2(texture2D(u_texture, packedCoor), isOdd);
|
|
||||||
|
|
||||||
return fullYCbCrToComputerRGBNormalized(tempyuv);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void main()
|
void main()
|
||||||
{
|
{
|
||||||
vec4 color = convertFullYUY2toComputerRGB();
|
vec2 tXY = v_texCoord * uv_scale;
|
||||||
gl_FragColor = v_fragmentColor * vec4(color.rgb, 1.0);
|
|
||||||
|
vec3 YUV;
|
||||||
|
|
||||||
|
/* For dual sampler */
|
||||||
|
YUV.yz = texture2D(u_texture1, tXY).yw;
|
||||||
|
YUV.x = texture2D(u_texture, v_texCoord).x;
|
||||||
|
|
||||||
|
/* For single sampler */
|
||||||
|
//YUV.yz = texture2D(u_texture, tXY).yw;
|
||||||
|
//
|
||||||
|
//vec4 YUY2P = texture2D(u_texture, tXY);
|
||||||
|
//float Pos = v_texCoord.x * out_w;
|
||||||
|
//YUV.x = floor(mod(Pos, 2.0)) == 0.0 ? YUY2P.z : YUY2P.x;
|
||||||
|
|
||||||
|
/* Convert YUV to RGB */
|
||||||
|
vec4 OutColor;
|
||||||
|
OutColor.xyz = YuvToRgb(YUV);
|
||||||
|
OutColor.w = 1.0;
|
||||||
|
|
||||||
|
gl_FragColor = v_fragmentColor * OutColor;
|
||||||
}
|
}
|
||||||
)"sv;
|
)"sv;
|
||||||
|
|
||||||
|
@ -244,7 +238,8 @@ struct PrivateVideoDescriptor
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
const Vec2 originalScale{1.0f, _sampleFormat == VideoSampleFormat::YUY2 ? 2.0f : 1.0f};
|
const Vec2 originalScale{static_cast<float>(_videoWidth) / _vtexture->getPixelsWide(),
|
||||||
|
static_cast<float>(_videoHeight) / _vtexture->getPixelsHigh()};
|
||||||
|
|
||||||
const auto aspectRatio =
|
const auto aspectRatio =
|
||||||
(std::min)(viewSize.x / videoSize.x, viewSize.y / (videoSize.y * originalScale.y));
|
(std::min)(viewSize.x / videoSize.x, viewSize.y / (videoSize.y * originalScale.y));
|
||||||
|
@ -409,13 +404,16 @@ void VideoPlayer::draw(Renderer* renderer, const Mat4& transform, uint32_t flags
|
||||||
std::lock_guard<std::recursive_mutex> lck(pvd->_sampleBufferMtx);
|
std::lock_guard<std::recursive_mutex> lck(pvd->_sampleBufferMtx);
|
||||||
uint8_t* sampleData = pvd->_sampleBuffer.data();
|
uint8_t* sampleData = pvd->_sampleBuffer.data();
|
||||||
size_t sampleDataLen = pvd->_sampleBuffer.size();
|
size_t sampleDataLen = pvd->_sampleBuffer.size();
|
||||||
auto w = pvd->_videoWidth = pvd->_vplayer->GetVideoWidth();
|
auto rWidth = pvd->_videoWidth = pvd->_vplayer->GetVideoWidth();
|
||||||
auto h = pvd->_videoHeight = pvd->_vplayer->GetVideoHeight();
|
auto rHeight = pvd->_videoHeight = pvd->_vplayer->GetVideoHeight();
|
||||||
|
|
||||||
|
Vec2 uvScale{1.0f, 1.0f};
|
||||||
|
|
||||||
bool needsInit = !pvd->_vtexture;
|
bool needsInit = !pvd->_vtexture;
|
||||||
if (!pvd->_vtexture)
|
if (!pvd->_vtexture)
|
||||||
{
|
{
|
||||||
pvd->_vtexture = new Texture2D();
|
pvd->_vtexture = new Texture2D();
|
||||||
|
|
||||||
auto programCache = backend::ProgramCache::getInstance();
|
auto programCache = backend::ProgramCache::getInstance();
|
||||||
|
|
||||||
auto& sampleOutFormat = pvd->_vplayer->GetVideoOutputFormat();
|
auto& sampleOutFormat = pvd->_vplayer->GetVideoOutputFormat();
|
||||||
|
@ -434,7 +432,7 @@ void VideoPlayer::draw(Renderer* renderer, const Mat4& transform, uint32_t flags
|
||||||
{
|
{
|
||||||
programCache->registerCustomProgramFactory(
|
programCache->registerCustomProgramFactory(
|
||||||
VIDEO_PROGRAM_ID, positionTextureColor_vert,
|
VIDEO_PROGRAM_ID, positionTextureColor_vert,
|
||||||
std::string{pvd->_sampleFormat == VideoSampleFormat::YUY2 ? YUY2_FRAG : NV12_FRAG});
|
std::string{pvd->_sampleFormat == VideoSampleFormat::NV12 ? NV12_FRAG : YUY2_FRAG});
|
||||||
auto program = programCache->getCustomProgram(VIDEO_PROGRAM_ID);
|
auto program = programCache->getCustomProgram(VIDEO_PROGRAM_ID);
|
||||||
pvd->_vrender->setProgramState(new backend::ProgramState(program), false);
|
pvd->_vrender->setProgramState(new backend::ProgramState(program), false);
|
||||||
break;
|
break;
|
||||||
|
@ -445,22 +443,43 @@ void VideoPlayer::draw(Renderer* renderer, const Mat4& transform, uint32_t flags
|
||||||
|
|
||||||
switch (pvd->_sampleFormat)
|
switch (pvd->_sampleFormat)
|
||||||
{
|
{
|
||||||
case VideoSampleFormat::YUY2:
|
|
||||||
pvd->_vtexture->updateWithData(sampleData, sampleDataLen, PixelFormat::RGBA8, PixelFormat::RGBA8, w,
|
|
||||||
h / 2, false, 0);
|
|
||||||
break;
|
|
||||||
case VideoSampleFormat::NV12:
|
case VideoSampleFormat::NV12:
|
||||||
{
|
{
|
||||||
const size_t ySampleSize = w * h;
|
/* For single sampler */
|
||||||
pvd->_vtexture->updateWithData(sampleData, ySampleSize, PixelFormat::L8, PixelFormat::L8, w, h, false,
|
// int texelWidth = YASIO_SZ_ALIGN(rWidth, 16);
|
||||||
0);
|
// int texelHeight = pvd->_vplayer->IsH264() ? YASIO_SZ_ALIGN(rHeight, 16) * 3 / 2 : rHeight * 3 / 2;
|
||||||
|
// uvScale.x = rWidth / (float)texelWidth;
|
||||||
|
// uvScale.y = rHeight / (float)texelHeight;
|
||||||
|
// pvd->_vtexture->updateWithData(sampleData, sampleDataLen, PixelFormat::L8, PixelFormat::L8, texelWidth,
|
||||||
|
// texelHeight, false);
|
||||||
|
|
||||||
|
/* For dual sampler */
|
||||||
|
const int ySampleSize = rWidth * rHeight;
|
||||||
|
pvd->_vtexture->updateWithData(sampleData, ySampleSize, PixelFormat::L8, PixelFormat::L8, rWidth,
|
||||||
|
rHeight, false, 0);
|
||||||
pvd->_vtexture->updateWithData(sampleData + ySampleSize, sampleDataLen - ySampleSize, PixelFormat::LA8,
|
pvd->_vtexture->updateWithData(sampleData + ySampleSize, sampleDataLen - ySampleSize, PixelFormat::LA8,
|
||||||
PixelFormat::LA8, w >> 1, h >> 1, false, 1);
|
PixelFormat::LA8, rWidth >> 1, rHeight >> 1, false, 1);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
case VideoSampleFormat::YUY2:
|
||||||
|
{
|
||||||
|
int texelWidth = pvd->_vplayer->IsH264() ? (YASIO_SZ_ALIGN(rWidth, 16)) : (rWidth);
|
||||||
|
uvScale.x = (float)rWidth / texelWidth;
|
||||||
|
|
||||||
|
/* For single sampler */
|
||||||
|
// pvd->_vtexture->updateWithData(sampleData, sampleDataLen, PixelFormat::RGBA8, PixelFormat::RGBA8,
|
||||||
|
// texelWidth >> 1, rHeight, false, 0);
|
||||||
|
|
||||||
|
/* For dual sampler */
|
||||||
|
pvd->_vtexture->updateWithData(sampleData, sampleDataLen, PixelFormat::LA8, PixelFormat::LA8,
|
||||||
|
texelWidth, rHeight, false, 0);
|
||||||
|
pvd->_vtexture->updateWithData(sampleData, sampleDataLen, PixelFormat::RGBA8, PixelFormat::RGBA8,
|
||||||
|
texelWidth >> 1, rHeight, false, 1);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case VideoSampleFormat::RGB32:
|
case VideoSampleFormat::RGB32:
|
||||||
pvd->_vtexture->updateWithData(sampleData, sampleDataLen, PixelFormat::RGBA8, PixelFormat::RGBA8, w, h,
|
pvd->_vtexture->updateWithData(sampleData, sampleDataLen, PixelFormat::RGBA8, PixelFormat::RGBA8,
|
||||||
false, 0);
|
rWidth, rHeight, false, 0);
|
||||||
break;
|
break;
|
||||||
default:;
|
default:;
|
||||||
}
|
}
|
||||||
|
@ -468,11 +487,11 @@ void VideoPlayer::draw(Renderer* renderer, const Mat4& transform, uint32_t flags
|
||||||
{
|
{
|
||||||
pvd->_vrender->initWithTexture(pvd->_vtexture);
|
pvd->_vrender->initWithTexture(pvd->_vtexture);
|
||||||
|
|
||||||
if (pvd->_sampleFormat == VideoSampleFormat::YUY2)
|
if (pvd->_sampleFormat == VideoSampleFormat::NV12 || pvd->_sampleFormat == VideoSampleFormat::YUY2)
|
||||||
{
|
{
|
||||||
auto ps = pvd->_vrender->getProgramState();
|
auto ps = pvd->_vrender->getProgramState();
|
||||||
PS_SET_UNIFORM(ps, "tex_w", (float)w);
|
PS_SET_UNIFORM(ps, "out_w", (float)rWidth);
|
||||||
PS_SET_UNIFORM(ps, "tex_h", (float)h);
|
PS_SET_UNIFORM(ps, "uv_scale", uvScale);
|
||||||
}
|
}
|
||||||
|
|
||||||
pvd->_scaleDirty = true;
|
pvd->_scaleDirty = true;
|
||||||
|
|
Loading…
Reference in New Issue