diff --git a/core/media/AndroidMediaEngine.cpp b/core/media/AndroidMediaEngine.cpp index 28a8fae9cc7b..494655b1bc33 100644 --- a/core/media/AndroidMediaEngine.cpp +++ b/core/media/AndroidMediaEngine.cpp @@ -27,25 +27,38 @@ # include "platform/android/jni/JniHelper.h" extern "C" { -JNIEXPORT void JNICALL Java_org_axmol_lib_AxmolMediaEngine_nativeHandleEvent(JNIEnv* env, jclass, jlong pME, int arg1) +JNIEXPORT void JNICALL Java_org_axmol_lib_AxmolMediaEngine_nativeFireEvent(JNIEnv* env, jclass, jlong pME, int arg1) { auto mediaEngine = (ax::AndroidMediaEngine*)((uintptr_t)pME); if (!mediaEngine) return; - mediaEngine->fireMediaEvent((ax::MEMediaEventType)arg1); + mediaEngine->_fireMediaEvent((ax::MEMediaEventType)arg1); } -JNIEXPORT void JNICALL Java_org_axmol_lib_AxmolMediaEngine_nativeHandleVideoSample(JNIEnv* env, - jclass, - jlong pME, - jobject sampleBuffer, - int sampleLen, - int outputX, - int outputY, - int videoX, - int videoY, - int rotation, - int videoPF) + +JNIEXPORT void JNICALL Java_org_axmol_lib_AxmolMediaEngine_nativeStoreVideoMeta(JNIEnv* env, + jclass, + jlong pME, + int outputX, + int outputY, + int videoX, + int videoY, + int cbcrOffset, + int rotation, + int videoPF) +{ + auto mediaEngine = (ax::AndroidMediaEngine*)((uintptr_t)pME); + if (!mediaEngine) + return; + + mediaEngine->_storeVideoMeta(outputX, outputY, videoX, videoY, cbcrOffset, rotation, videoPF); +} + +JNIEXPORT void JNICALL Java_org_axmol_lib_AxmolMediaEngine_nativeStoreLastVideoSample(JNIEnv* env, + jclass, + jlong pME, + jobject sampleBuffer, + int sampleLen) { auto mediaEngine = (ax::AndroidMediaEngine*)((uintptr_t)pME); if (!mediaEngine) @@ -53,38 +66,38 @@ JNIEXPORT void JNICALL Java_org_axmol_lib_AxmolMediaEngine_nativeHandleVideoSamp auto sampleData = static_cast(env->GetDirectBufferAddress(sampleBuffer)); - mediaEngine->handleVideoSample(sampleData, sampleLen, outputX, outputY, videoX, videoY, rotation, videoPF); + mediaEngine->_storeLastVideoSample(sampleData, sampleLen); } -JNIEXPORT void JNICALL Java_org_axmol_lib_AxmolMediaEngine_nativeSetDuration(JNIEnv* env, - jclass, - jlong pME, - double duration) +JNIEXPORT void JNICALL Java_org_axmol_lib_AxmolMediaEngine_nativeStoreDuration(JNIEnv* env, + jclass, + jlong pME, + double duration) { auto mediaEngine = (ax::AndroidMediaEngine*)((uintptr_t)pME); if (!mediaEngine) return; - mediaEngine->updateDuration(duration); + mediaEngine->_storeDuration(duration); } -JNIEXPORT void JNICALL Java_org_axmol_lib_AxmolMediaEngine_nativeSetCurrentTime(JNIEnv* env, - jclass, - jlong pME, - double currentTime) +JNIEXPORT void JNICALL Java_org_axmol_lib_AxmolMediaEngine_nativeStoreCurrentTime(JNIEnv* env, + jclass, + jlong pME, + double currentTime) { auto mediaEngine = (ax::AndroidMediaEngine*)((uintptr_t)pME); if (!mediaEngine) return; - mediaEngine->updateCurrentTime(currentTime); + mediaEngine->_storeCurrentTime(currentTime); } } namespace ax { -static const char* className = "org.axmol.lib.AxmolMediaEngine"; +static const char* className = "org.axmol.lib.AxmolMediaEngine"; AndroidMediaEngine::AndroidMediaEngine() { @@ -162,8 +175,9 @@ bool AndroidMediaEngine::transferVideoFrame() auto& buffer = _frameBuffer2; - ax::MEVideoFrame frame{buffer.data(), buffer.data() + _outputDim.x * _outputDim.y, buffer.size(), - ax::MEVideoPixelDesc{static_cast(_videoPF), _outputDim}, _videoDim}; + ax::MEVideoFrame frame{buffer.data(), buffer.data() + _cbcrOffset, buffer.size(), + ax::MEVideoPixelDesc{static_cast(_videoPF), _outputDim}, + _videoDim}; frame._vpd._rotation = _videoRotation; assert(static_cast(frame._dataLen) >= frame._vpd._dim.x * frame._vpd._dim.y * 3 / 2); _onVideoFrame(frame); @@ -174,23 +188,27 @@ bool AndroidMediaEngine::transferVideoFrame() return false; } -void AndroidMediaEngine::handleVideoSample(const uint8_t* buf, - size_t len, - int outputX, - int outputY, - int videoX, - int videoY, - int rotation, - int videoPF) +void AndroidMediaEngine::_storeVideoMeta(int outputX, + int outputY, + int videoX, + int videoY, + int cbcrOffset, + int rotation, + int videoPF) { - std::unique_lock lck(_frameBuffer1Mtx); - _frameBuffer1.assign(buf, buf + len); _outputDim.set(outputX, outputY); _videoDim.set(videoX, videoY); + _cbcrOffset = cbcrOffset; _videoRotation = rotation; _videoPF = videoPF; } +void AndroidMediaEngine::_storeLastVideoSample(const uint8_t* buf, size_t len) +{ + std::unique_lock lck(_frameBuffer1Mtx); + _frameBuffer1.assign(buf, buf + len); } +} // namespace ax + #endif diff --git a/core/media/AndroidMediaEngine.h b/core/media/AndroidMediaEngine.h index 960be293dbbc..3e58e40af892 100644 --- a/core/media/AndroidMediaEngine.h +++ b/core/media/AndroidMediaEngine.h @@ -34,11 +34,6 @@ class AndroidMediaEngine : public MediaEngine public: AndroidMediaEngine(); ~AndroidMediaEngine(); - void fireMediaEvent(MEMediaEventType event) - { - if (_onMediaEvent) - _onMediaEvent(event); - } void setCallbacks(std::function onMediaEvent, std::function onVideoFrame) override { @@ -60,9 +55,16 @@ class AndroidMediaEngine : public MediaEngine MEMediaState getState() const override; bool transferVideoFrame() override; - void handleVideoSample(const uint8_t* buf, size_t len, int outputX, int outputY, int videoX, int videoY, int rotation, int videoPF); - void updateCurrentTime(double currentTime) { _currentTime = currentTime; } - void updateDuration(double duration) { _duration = duration; } + // internal stubs + void _fireMediaEvent(MEMediaEventType event) + { + if (_onMediaEvent) + _onMediaEvent(event); + } + void _storeDuration(double duration) { _duration = duration; } + void _storeCurrentTime(double currentTime) { _currentTime = currentTime; } + void _storeLastVideoSample(const uint8_t* buf, size_t len); + void _storeVideoMeta(int outputX, int outputY, int videoX, int videoY, int cbcrOffset, int rotation, int videoPF); private: void* context{}; // java object strong-refs @@ -71,6 +73,7 @@ class AndroidMediaEngine : public MediaEngine MEIntPoint _outputDim; MEIntPoint _videoDim; + int _cbcrOffset{0}; int _videoRotation{0}; int _videoPF{-1}; diff --git a/core/platform/android/java/src/org/axmol/lib/AxmolMediaEngine.java b/core/platform/android/java/src/org/axmol/lib/AxmolMediaEngine.java index a6bdf95ce3d5..46c962795475 100644 --- a/core/platform/android/java/src/org/axmol/lib/AxmolMediaEngine.java +++ b/core/platform/android/java/src/org/axmol/lib/AxmolMediaEngine.java @@ -25,7 +25,6 @@ of this software and associated documentation files (the "Software"), to deal import android.app.Activity; import android.content.Context; -import android.graphics.Point; import android.media.MediaCodecInfo; import android.media.MediaFormat; import android.net.Uri; @@ -56,7 +55,7 @@ of this software and associated documentation files (the "Software"), to deal import java.util.concurrent.atomic.AtomicInteger; @UnstableApi @SuppressWarnings("unused") -public class AxmolMediaEngine extends DefaultRenderersFactory implements Player.Listener, MediaCodecVideoRenderer.VideoFrameProcessor, VideoFrameMetadataListener { +public class AxmolMediaEngine extends DefaultRenderersFactory implements Player.Listener, MediaCodecVideoRenderer.VideoFrameProcessor, VideoFrameMetadataListener { // The native media events, match with MEMediaEventType public static final int EVENT_PLAYING = 0; public static final int EVENT_PAUSED = 1; @@ -67,22 +66,34 @@ public class AxmolMediaEngine extends DefaultRenderersFactory implements Player. public static final int VIDEO_PF_NV12 = 3; public static final int VIDEO_PF_I420 = 4; - /** Media has been closed and cannot be played again. */ + /** + * Media has been closed and cannot be played again. + */ public static final int STATE_CLOSED = 0; - /** Media is being prepared for playback. */ + /** + * Media is being prepared for playback. + */ public static final int STATE_PREPARING = 1; - /** Media is currently playing. */ + /** + * Media is currently playing. + */ public static final int STATE_PLAYING = 2; - /** Playback has been paused, but can be resumed. */ + /** + * Playback has been paused, but can be resumed. + */ public static final int STATE_PAUSED = 3; - /** Playback has been stopped, but can be restarted. */ + /** + * Playback has been stopped, but can be restarted. + */ public static final int STATE_STOPPED = 4; - /** Unrecoverable error occurred during playback. */ + /** + * Unrecoverable error occurred during playback. + */ public static final int STATE_ERROR = 5; // The desired video pixel format @@ -91,7 +102,6 @@ public class AxmolMediaEngine extends DefaultRenderersFactory implements Player. * COLOR_FormatYUV420Planar (yyyyyyyy uu vv) (YUV420p) * COLOR_FormatYUV422SemiPlanar (Y0 U0 Y1 V0) (YUY2) */ - public static final String TAG = "AxmolMediaEngine"; public static Context sContext = null; private ExoPlayer mPlayer; @@ -103,16 +113,19 @@ public class AxmolMediaEngine extends DefaultRenderersFactory implements Player. private boolean mPlaybackEnded = false; private AtomicInteger mState = new AtomicInteger(STATE_CLOSED); - Point mOutputDim = new Point(); // The output dim match with buffer - Point mVideoDim = new Point(); // The video dim (validate image dim) - private int mVideoPF = -1; - private int mVideoRotation = 0; - /** ------ native methods ------- */ - public static native void nativeHandleEvent(long nativeObj, int arg1); - public static native void nativeHandleVideoSample(long nativeObj, ByteBuffer sampleData, int sampleLen, int outputX, int outputY, int videoX, int videoY, int rotation, int videoPF); - public static native void nativeSetDuration(long nativeObj, double duration); - public static native void nativeSetCurrentTime(long nativeObj, double currentTime); + /** + * ------ native methods ------- + */ + public static native void nativeFireEvent(long nativeObj, int arg1); + + public static native void nativeStoreVideoMeta(long mNativeObj, int outputX, int outputY, int videoX, int videoY, int cbcrOffset, int rotation, int videoPF); + + public static native void nativeStoreLastVideoSample(long nativeObj, ByteBuffer sampleData, int sampleLen); + + public static native void nativeStoreDuration(long nativeObj, double duration); + + public static native void nativeStoreCurrentTime(long nativeObj, double currentTime); public static void setContext(Activity activity) { sContext = activity.getApplicationContext(); @@ -134,7 +147,7 @@ public AxmolMediaEngine(Context context) { @SuppressWarnings("unused") public void bindNativeObject(long nativeObj) { mNativeObj = nativeObj; - if(nativeObj == 0) { // when unbind nativeObj, we should ensure close player + if (nativeObj == 0) { // when unbind nativeObj, we should ensure close player close(); } } @@ -161,21 +174,17 @@ protected void buildVideoRenderers( MAX_DROPPED_VIDEO_FRAME_COUNT_TO_NOTIFY)); } - public void setAutoPlay(boolean bAutoPlay) - { + public void setAutoPlay(boolean bAutoPlay) { mAutoPlay = bAutoPlay; } /** - * - * @param sourceUri: - * url: http(s):// - * disk: file://path/to/file - * assets: file:///android_asset + * @param sourceUri: url: http(s):// + * disk: file://path/to/file + * assets: file:///android_asset * @return */ - public boolean open(String sourceUri) - { + public boolean open(String sourceUri) { if (mState.get() == STATE_PREPARING) return false; mState.set(STATE_PREPARING); @@ -206,7 +215,7 @@ public boolean open(String sourceUri) } public boolean close() { - if(mPlayer != null) { + if (mPlayer != null) { final ExoPlayer player = mPlayer; mPlayer = null; final AxmolMediaEngine mediaEngine = this; @@ -220,6 +229,7 @@ public boolean close() { } return true; } + public boolean setLoop(boolean bLooping) { if (mLooping != bLooping) { mLooping = bLooping; @@ -234,23 +244,24 @@ public boolean setLoop(boolean bLooping) { @SuppressWarnings("unused") public boolean setRate(double fRate) { - if(mPlayer == null) return false; + if (mPlayer == null) return false; AxmolEngine.getActivity().runOnUiThread(() -> { if (mPlayer != null) - mPlayer.setPlaybackSpeed((float)fRate); + mPlayer.setPlaybackSpeed((float) fRate); }); return true; } - public boolean setCurrentTime(double fSeekTimeInSec) - { - if(mPlayer == null) return false; + + public boolean setCurrentTime(double fSeekTimeInSec) { + if (mPlayer == null) return false; AxmolEngine.getActivity().runOnUiThread(() -> { if (mPlayer != null) - mPlayer.seekTo((long)(fSeekTimeInSec * 1000)); + mPlayer.seekTo((long) (fSeekTimeInSec * 1000)); }); return true; } + public boolean play() { if (mPlayer == null) return false; AxmolEngine.getActivity().runOnUiThread(() -> { @@ -271,20 +282,22 @@ public boolean play() { }); return true; } + public boolean pause() { - if(mPlayer == null) return false; + if (mPlayer == null) return false; AxmolEngine.getActivity().runOnUiThread(() -> { if (mPlayer != null) mPlayer.pause(); }); return true; } + public boolean stop() { - if(mPlayer == null) return false; + if (mPlayer == null) return false; AxmolEngine.getActivity().runOnUiThread(() -> { if (mPlayer != null) { mPlayer.stop(); - nativeSetDuration(mNativeObj,0.0); + nativeStoreDuration(mNativeObj, 0.0); } }); return true; @@ -304,55 +317,96 @@ public void onVideoFrameAboutToBeRendered( Format format, @Nullable MediaFormat mediaFormat) { if (mOutputFormat != mediaFormat) { + // format.sampleMimeType will be video/hevc or video/avc mOutputFormat = mediaFormat; - updateVideoMeta(); + handleVideoMetaChanged(); } } - /** update video informations */ - private void updateVideoMeta() { + /* handle video informations changed + * Notes + * 1. About desired frame size bytes + * a. stride > mOutputDim.x: means all frame bytes should pass to GPU(shader), and + * desired frame size bytes is: stride * sliceHeight * 3 / 2 + * b. stride == mOutputDim.x: means we need discard Y plane aligned extra data, and + * desired frame size bytes is: stride * sliceHeight + (mOutputDim.x / 2) * (mOutputDim.y / 2) * 2 + * 2. About video frame size alignment + * a. many devices may align 2, the sliceHeight == mOutputDim.y and stride == mOutputDim.x + * b. H264: align 16 for both width and height + * HEVC/H265: align 32 for both width and height + * 3. The cbcrOffset should be always stride * sliceHeight + * refer: https://github.com/axmolengine/axmol/issues/2101 + */ + private void handleVideoMetaChanged() { MediaFormat format = mOutputFormat; - if(format != null) { - // String mimeType = format.getString(MediaFormat.KEY_MIME); // "video/raw" + if (format != null) { + // String mimeType = format.getString(MediaFormat.KEY_MIME); // =="video/raw" // Note: some android 11 and older devices not response desired color format(NV12), instead will be YUV420P aka I420 // refer: https://github.com/axmolengine/axmol/issues/2049 + int videoPF; Integer colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); - switch(colorFormat) { + switch (colorFormat) { case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar: - mVideoPF = VIDEO_PF_NV12; + videoPF = VIDEO_PF_NV12; break; case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar: - mVideoPF = VIDEO_PF_I420; + videoPF = VIDEO_PF_I420; break; default: - mVideoPF = VIDEO_PF_NV12; + videoPF = VIDEO_PF_NV12; Log.w(TAG, String.format("Unsupported color format: %d, video render may incorrect!", colorFormat)); } - mOutputDim.x = format.getInteger(MediaFormat.KEY_WIDTH); - if (format.containsKey(MediaFormat.KEY_CROP_LEFT) - && format.containsKey(MediaFormat.KEY_CROP_RIGHT)) { - mVideoDim.x = format.getInteger(MediaFormat.KEY_CROP_RIGHT) + 1 - - format.getInteger(MediaFormat.KEY_CROP_LEFT); - } else - mVideoDim.x = mOutputDim.x; - - mOutputDim.y = format.getInteger(MediaFormat.KEY_HEIGHT); - if (format.containsKey(MediaFormat.KEY_CROP_TOP) - && format.containsKey(MediaFormat.KEY_CROP_BOTTOM)) { - mVideoDim.y = format.getInteger(MediaFormat.KEY_CROP_BOTTOM) + 1 - - format.getInteger(MediaFormat.KEY_CROP_TOP); - } else - mVideoDim.y = mOutputDim.y; - - if (format.containsKey(MediaFormat.KEY_ROTATION)) { - mVideoRotation = format.getInteger(MediaFormat.KEY_ROTATION); + String codec = format.getString(MediaFormat.KEY_CODECS_STRING); + + // output dim + int outputX = format.getInteger(MediaFormat.KEY_WIDTH); + int outputY = format.getInteger(MediaFormat.KEY_HEIGHT); + + int stride = 0; + int sliceHeight = 0; + if (format.containsKey(MediaFormat.KEY_STRIDE)) { + stride = format.getInteger(MediaFormat.KEY_STRIDE); } + if (format.containsKey(MediaFormat.KEY_SLICE_HEIGHT)) { + sliceHeight = format.getInteger(MediaFormat.KEY_SLICE_HEIGHT); + } + Log.d(TAG, String.format("Frame stride and slice height: %dx%d", stride, sliceHeight)); + stride = Math.max(outputX, stride); + sliceHeight = Math.max(outputY, sliceHeight); + + int cbcrOffset = stride * sliceHeight; + int frameSizeBytes = 0; + if (stride > outputX) { + outputX = stride; + outputY = sliceHeight; + frameSizeBytes = cbcrOffset * 3 / 2; + } else frameSizeBytes = cbcrOffset + outputX / 2 * outputY; + + // video dim + int videoX = format.containsKey(MediaFormat.KEY_CROP_LEFT) + && format.containsKey(MediaFormat.KEY_CROP_RIGHT) ? format.getInteger(MediaFormat.KEY_CROP_RIGHT) + 1 + - format.getInteger(MediaFormat.KEY_CROP_LEFT) : outputX; + int videoY = format.containsKey(MediaFormat.KEY_CROP_TOP) + && format.containsKey(MediaFormat.KEY_CROP_BOTTOM) ? format.getInteger(MediaFormat.KEY_CROP_BOTTOM) + 1 + - format.getInteger(MediaFormat.KEY_CROP_TOP) : outputY; + + // video rotation + int rotation = format.containsKey(MediaFormat.KEY_ROTATION) ? format.getInteger(MediaFormat.KEY_ROTATION) : 0; + + nativeStoreVideoMeta(mNativeObj, outputX, outputY, videoX, videoY, cbcrOffset, rotation, videoPF); + + Log.d(TAG, String.format("Input format:%s, outputDim:%dx%d, videoDim:%dx%d, cbcrOffset:%d, frameSizeBytes:%d", + mVideoRenderer.getCodecName(), + outputX, outputY, + videoX, videoY, + cbcrOffset, frameSizeBytes)); } } - /** handler or listener methods */ - + /** + * handler or listener methods + */ @Override public void processVideoFrame(MediaCodecAdapter codec, int index, long presentationTimeUs) { if (mState.get() != STATE_PLAYING) { @@ -362,27 +416,20 @@ public void processVideoFrame(MediaCodecAdapter codec, int index, long presentat } ByteBuffer tmpBuffer = codec.getOutputBuffer(index); - nativeHandleVideoSample(mNativeObj, tmpBuffer, tmpBuffer.remaining(), mOutputDim.x, mOutputDim.y, mVideoDim.x, mVideoDim.y, mVideoRotation, mVideoPF); - - AxmolEngine.getActivity().runOnUiThread(() -> { - if (mPlayer != null) { - long currentPos = mPlayer.getCurrentPosition(); - nativeSetCurrentTime(mNativeObj,currentPos / 1000.0); - } - }); + nativeStoreLastVideoSample(mNativeObj, tmpBuffer, tmpBuffer.remaining()); + nativeStoreCurrentTime(mNativeObj, mPlayer.getCurrentPosition() / 1000.0); } @Override public void onIsPlayingChanged(boolean isPlaying) { Log.d(TAG, "[Individual]onIsPlayingChanged: " + isPlaying); - if(mPlayer == null) return; + if (mPlayer == null) return; if (!isPlaying) { int playbackState = mPlayer.getPlaybackState(); if (playbackState == Player.STATE_READY || playbackState == Player.STATE_BUFFERING) { mState.set(STATE_PAUSED); nativeEvent(EVENT_PAUSED); - } - else if(playbackState == Player.STATE_IDLE && mState.get() != STATE_STOPPED) { + } else if (playbackState == Player.STATE_IDLE && mState.get() != STATE_STOPPED) { mState.set(STATE_STOPPED); nativeEvent(EVENT_STOPPED); } @@ -390,25 +437,18 @@ else if(playbackState == Player.STATE_IDLE && mState.get() != STATE_STOPPED) { } /** - * - * @param playbackState - * int STATE_IDLE = 1; - * int STATE_BUFFERING = 2; - * int STATE_READY = 3; - * int STATE_ENDED = 4; + * @param playbackState int STATE_IDLE = 1; + * int STATE_BUFFERING = 2; + * int STATE_READY = 3; + * int STATE_ENDED = 4; */ @Override public void onPlaybackStateChanged(int playbackState) { Log.d(TAG, "[Individual]onPlaybackStateChanged: " + playbackState); - if(mPlayer == null) return; + if (mPlayer == null) return; switch (playbackState) { case Player.STATE_READY: - Log.d(TAG, "[Individual]onPlaybackStateChanged: decoder: " + mVideoRenderer.getCodecName()); - AxmolEngine.getActivity().runOnUiThread(() -> { - if (mPlayer != null) { - nativeSetDuration(mNativeObj,mPlayer.getContentDuration() / 1000.0); - } - }); + nativeStoreDuration(mNativeObj, mPlayer.getContentDuration() / 1000.0); break; case Player.STATE_ENDED: mPlaybackEnded = true; @@ -416,15 +456,16 @@ public void onPlaybackStateChanged(int playbackState) { mState.set(STATE_STOPPED); nativeEvent(EVENT_STOPPED); break; - default: ; + default: + ; } } @Override public void onPlayerError(PlaybackException error) { Log.e(TAG, "onPlayerError: " + error.getMessage()); - if(mPlayer == null) return; - mState .set(STATE_ERROR); + if (mPlayer == null) return; + mState.set(STATE_ERROR); nativeEvent(EVENT_ERROR); } @@ -436,8 +477,8 @@ public boolean isPlaybackEnded() { public void onVideoSizeChanged(VideoSize videoSize) { Log.d(TAG, String.format("[Individual]onVideoSizeChanged: (%d,%d)", videoSize.width, videoSize.height)); - if(mPlayer != null) - updateVideoMeta(); + if (mPlayer != null) + handleVideoMetaChanged(); } @Override @@ -446,10 +487,8 @@ public void onIsLoadingChanged(boolean isLoading) { } public void nativeEvent(int event) { - if(mNativeObj != 0 && mPlayer != null) { - nativeHandleEvent(mNativeObj, event); + if (mNativeObj != 0 && mPlayer != null) { + nativeFireEvent(mNativeObj, event); } } } - -