@@ -29,10 +29,12 @@ of this software and associated documentation files (the "Software"), to deal
29
29
import android .media .MediaCodecInfo ;
30
30
import android .media .MediaFormat ;
31
31
import android .net .Uri ;
32
+ import android .os .Build ;
32
33
import android .os .Handler ;
33
34
import android .util .Log ;
34
35
35
36
import androidx .annotation .Nullable ;
37
+ import androidx .annotation .RequiresApi ;
36
38
import androidx .media3 .common .Format ;
37
39
import androidx .media3 .common .MediaItem ;
38
40
import androidx .media3 .common .PlaybackException ;
@@ -105,12 +107,14 @@ public class AxmolMediaEngine extends DefaultRenderersFactory implements Player.
105
107
private AtomicInteger mState = new AtomicInteger (STATE_CLOSED );
106
108
Point mOutputDim = new Point (); // The output dim match with buffer
107
109
Point mVideoDim = new Point (); // The video dim (validate image dim)
110
+ String mSampleMimeType = null ;
108
111
private int mVideoPF = -1 ;
109
112
private int mVideoRotation = 0 ;
113
+ private int mCbcrOffset = 0 ;
110
114
111
115
/** ------ native methods ------- */
112
116
public static native void nativeHandleEvent (long nativeObj , int arg1 );
113
- public static native void nativeHandleVideoSample (long nativeObj , ByteBuffer sampleData , int sampleLen , int outputX , int outputY , int videoX , int videoY , int rotation , int videoPF );
117
+ public static native void nativeHandleVideoSample (long nativeObj , ByteBuffer sampleData , int sampleLen , int outputX , int outputY , int videoX , int videoY , int cbcrOffset , int rotation , int videoPF );
114
118
public static native void nativeSetDuration (long nativeObj , double duration );
115
119
public static native void nativeSetCurrentTime (long nativeObj , double currentTime );
116
120
@@ -304,16 +308,18 @@ public void onVideoFrameAboutToBeRendered(
304
308
Format format ,
305
309
@ Nullable MediaFormat mediaFormat ) {
306
310
if (mOutputFormat != mediaFormat ) {
311
+ mSampleMimeType = format .sampleMimeType ; // video/hevc, video/avc
307
312
mOutputFormat = mediaFormat ;
308
- updateVideoMeta ();
313
+ handleVideoMetaChanged ();
309
314
}
310
315
}
311
316
312
- /** update video informations */
313
- private void updateVideoMeta () {
317
+ /** handle video informations changed */
318
+ private void handleVideoMetaChanged () {
314
319
MediaFormat format = mOutputFormat ;
315
320
if (format != null ) {
316
- // String mimeType = format.getString(MediaFormat.KEY_MIME); // "video/raw"
321
+ // String mimeType = format.getString(MediaFormat.KEY_MIME); // =="video/raw"
322
+
317
323
// Note: some android 11 and older devices not response desired color format(NV12), instead will be YUV420P aka I420
318
324
// refer: https://github.com/axmolengine/axmol/issues/2049
319
325
Integer colorFormat = format .getInteger (MediaFormat .KEY_COLOR_FORMAT );
@@ -329,30 +335,70 @@ private void updateVideoMeta() {
329
335
Log .w (TAG , String .format ("Unsupported color format: %d, video render may incorrect!" , colorFormat ));
330
336
}
331
337
338
+ // output dim
332
339
mOutputDim .x = format .getInteger (MediaFormat .KEY_WIDTH );
340
+ mOutputDim .y = format .getInteger (MediaFormat .KEY_HEIGHT );
341
+
342
+ int stride = 0 , sliceHeight = 0 ;
343
+ if (format .containsKey (MediaFormat .KEY_STRIDE )) {
344
+ stride = format .getInteger (MediaFormat .KEY_STRIDE );
345
+ }
346
+ if (format .containsKey (MediaFormat .KEY_SLICE_HEIGHT )) {
347
+ sliceHeight = format .getInteger (MediaFormat .KEY_SLICE_HEIGHT );
348
+ }
349
+ Log .d (TAG , String .format ("Frame stride and slice height: %dx%d" , stride , sliceHeight );
350
+ stride = Math .max (mOutputDim .x , stride );
351
+ sliceHeight = Math .max (mOutputDim .y , sliceHeight );
352
+
353
+ /* Notes
354
+ * 1. About desired frame size bytes
355
+ * a. stride > mOutputDim.x: means all frame bytes should pass to GPU(shader), and
356
+ * desired frame size bytes is: stride * sliceHeight * 3 / 2
357
+ * b. stride == mOutputDim.x: means we need discard Y plane aligned extra data, and
358
+ * desired frame size bytes is: stride * sliceHeight + (mOutputDim.x / 2) * (mOutputDim.y / 2) * 2
359
+ * 2. About video frame size alignment
360
+ * a. many devices may align 2, the sliceHeight == mOutputDim.y and stride == mOutputDim.x
361
+ * b. H264: align 16 for both width and height
362
+ * HEVC/H265: align 32 for both width and height
363
+ * 3. The cbcrOffset should be always stride * sliceHeight
364
+ * refer: https://github.com/axmolengine/axmol/issues/2101
365
+ */
366
+ mCbcrOffset = stride * sliceHeight ;
367
+ int frameSizeBytes = 0 ;
368
+ if (stride > mOutputDim .x ) {
369
+ mOutputDim .x = stride ;
370
+ mOutputDim .y = sliceHeight ;
371
+ frameSizeBytes = mCbcrOffset * 3 / 2 ;
372
+ } else frameSizeBytes = mCbcrOffset + mOutputDim .x / 2 * mOutputDim .y ;
373
+
374
+ // video dim
333
375
if (format .containsKey (MediaFormat .KEY_CROP_LEFT )
334
376
&& format .containsKey (MediaFormat .KEY_CROP_RIGHT )) {
335
377
mVideoDim .x = format .getInteger (MediaFormat .KEY_CROP_RIGHT ) + 1
336
378
- format .getInteger (MediaFormat .KEY_CROP_LEFT );
337
379
} else
338
380
mVideoDim .x = mOutputDim .x ;
339
381
340
- mOutputDim .y = format .getInteger (MediaFormat .KEY_HEIGHT );
341
382
if (format .containsKey (MediaFormat .KEY_CROP_TOP )
342
383
&& format .containsKey (MediaFormat .KEY_CROP_BOTTOM )) {
343
384
mVideoDim .y = format .getInteger (MediaFormat .KEY_CROP_BOTTOM ) + 1
344
385
- format .getInteger (MediaFormat .KEY_CROP_TOP );
345
386
} else
346
387
mVideoDim .y = mOutputDim .y ;
347
388
389
+ // video rotation
348
390
if (format .containsKey (MediaFormat .KEY_ROTATION )) {
349
391
mVideoRotation = format .getInteger (MediaFormat .KEY_ROTATION );
350
392
}
393
+
394
+ Log .d (TAG , String .format ("Input format:%s, outputDim:%dx%d, videoDim:%dx%d, cbcrOffset:%d, frameSizeBytes:%d" , mSampleMimeType ,
395
+ mOutputDim .x , mOutputDim .y ,
396
+ mVideoDim .x , mVideoDim .y ,
397
+ mCbcrOffset , frameSizeBytes ));
351
398
}
352
399
}
353
400
354
401
/** handler or listener methods */
355
-
356
402
@ Override
357
403
public void processVideoFrame (MediaCodecAdapter codec , int index , long presentationTimeUs ) {
358
404
if (mState .get () != STATE_PLAYING ) {
@@ -362,7 +408,7 @@ public void processVideoFrame(MediaCodecAdapter codec, int index, long presentat
362
408
}
363
409
364
410
ByteBuffer tmpBuffer = codec .getOutputBuffer (index );
365
- nativeHandleVideoSample (mNativeObj , tmpBuffer , tmpBuffer .remaining (), mOutputDim .x , mOutputDim .y , mVideoDim .x , mVideoDim .y , mVideoRotation , mVideoPF );
411
+ nativeHandleVideoSample (mNativeObj , tmpBuffer , tmpBuffer .remaining (), mOutputDim .x , mOutputDim .y , mVideoDim .x , mVideoDim .y , mCbcrOffset , mVideoRotation , mVideoPF );
366
412
367
413
AxmolEngine .getActivity ().runOnUiThread (() -> {
368
414
if (mPlayer != null ) {
@@ -437,7 +483,7 @@ public void onVideoSizeChanged(VideoSize videoSize) {
437
483
Log .d (TAG , String .format ("[Individual]onVideoSizeChanged: (%d,%d)" , videoSize .width , videoSize .height ));
438
484
439
485
if (mPlayer != null )
440
- updateVideoMeta ();
486
+ handleVideoMetaChanged ();
441
487
}
442
488
443
489
@ Override
0 commit comments