Przeglądaj źródła

Merge pull request #1 from Tencent/master

从原仓库同步代码
dirge 3 lat temu
rodzic
commit
f4fe3cdee8
100 zmienionych plików z 4561 dodań i 849 usunięć
  1. 22 0
      .github/ISSUE_TEMPLATE/bug_report.md
  2. 20 0
      .github/ISSUE_TEMPLATE/feature_request.md
  3. 1 0
      Android/PlayerProj/.gitignore
  4. 8 1
      Android/PlayerProj/animplayer/build.gradle
  5. 1 1
      Android/PlayerProj/animplayer/publish.gradle
  6. 45 13
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/AnimConfigManager.kt
  7. 19 8
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/AnimPlayer.kt
  8. 115 74
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/AnimView.kt
  9. 14 6
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/AudioPlayer.kt
  10. 8 2
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/Constant.kt
  11. 26 6
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/Decoder.kt
  12. 5 1
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/EGLUtil.kt
  13. 0 98
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/FileContainer.kt
  14. 164 31
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/HardDecoder.kt
  15. 66 0
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/IAnimView.kt
  16. 54 0
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/IRenderListener.kt
  17. 36 43
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/Render.kt
  18. 207 0
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/YUVRender.kt
  19. 60 0
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/YUVShader.kt
  20. 65 0
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/file/AssetsFileContainer.kt
  21. 60 0
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/file/FileContainer.kt
  22. 34 0
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/file/IFileContainer.kt
  23. 50 0
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/file/StreamContainer.kt
  24. 49 0
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/file/StreamMediaDataSource.kt
  25. 3 2
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/mix/MixAnimPlugin.kt
  26. 4 4
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/mix/MixRender.kt
  27. 6 5
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/mix/MixTouch.kt
  28. 4 2
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/mix/Resource.kt
  29. 30 1
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/mix/Src.kt
  30. 11 3
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/plugin/AnimPluginManager.kt
  31. 21 0
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/textureview/InnerTextureView.kt
  32. 53 4
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/util/MediaUtil.kt
  33. 252 0
      Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/util/ScaleTypeUtil.kt
  34. 197 92
      Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/AnimTool.java
  35. 38 12
      Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/CommonArg.java
  36. 118 28
      Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/CommonArgTool.java
  37. 53 45
      Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/GetAlphaFrame.java
  38. 82 8
      Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/Main.java
  39. 2 1
      Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/Md5Util.java
  40. 19 0
      Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/TLog.java
  41. 24 0
      Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/data/PointRect.java
  42. 283 39
      Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/ui/ToolUI.java
  43. 295 0
      Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/ui/VapxUI.java
  44. 79 0
      Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/vapx/FrameSet.java
  45. 208 0
      Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/vapx/GetMaskFrame.java
  46. 90 0
      Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/vapx/SrcSet.java
  47. 0 6
      Android/PlayerProj/app/build.gradle
  48. 1 1
      Android/PlayerProj/app/src/main/AndroidManifest.xml
  49. BIN
      Android/PlayerProj/app/src/main/assets/special_size_750.mp4
  50. 4 0
      Android/PlayerProj/app/src/main/java/com/tencent/qgame/playerproj/MainActivity.kt
  51. 7 15
      Android/PlayerProj/app/src/main/java/com/tencent/qgame/playerproj/player/AnimActiveDemoActivity.kt
  52. 3 8
      Android/PlayerProj/app/src/main/java/com/tencent/qgame/playerproj/player/AnimSimpleDemoActivity.kt
  53. 228 0
      Android/PlayerProj/app/src/main/java/com/tencent/qgame/playerproj/player/AnimSpecialSizeDemoActivity.kt
  54. 12 14
      Android/PlayerProj/app/src/main/java/com/tencent/qgame/playerproj/player/AnimVapxDemoActivity.kt
  55. 2 2
      Android/PlayerProj/app/src/main/res/layout/activity_anim_simple_demo.xml
  56. 25 20
      Android/PlayerProj/app/src/main/res/layout/activity_main.xml
  57. 6 4
      Android/PlayerProj/build.gradle
  58. 0 15
      Android/PlayerProj/gradle.properties
  59. 99 0
      Android/PlayerProj/publish-mavencentral.gradle
  60. 9 2
      Android/README.md
  61. BIN
      Android/aar/vap_2.0.6.aar
  62. 0 11
      Introduction.md
  63. 1 0
      MavenCentral.md
  64. 7 6
      QGVAPlayer.podspec
  65. 17 2
      README.md
  66. 64 0
      README_en.md
  67. 68 0
      iOS/.gitignore
  68. 88 0
      iOS/CHANGELOG.md
  69. 8 0
      iOS/QGVAPlayer/QGVAPlayer.xcodeproj/project.pbxproj
  70. 3 0
      iOS/QGVAPlayer/QGVAPlayer/Classes/Controllers/Decoders/QGBaseDecoder.h
  71. 3 0
      iOS/QGVAPlayer/QGVAPlayer/Classes/Controllers/Decoders/QGBaseDecoder.m
  72. 144 53
      iOS/QGVAPlayer/QGVAPlayer/Classes/Controllers/Decoders/QGMP4FrameHWDecoder.m
  73. 5 0
      iOS/QGVAPlayer/QGVAPlayer/Classes/Controllers/QGAnimatedImageDecodeManager.h
  74. 59 5
      iOS/QGVAPlayer/QGVAPlayer/Classes/Controllers/QGAnimatedImageDecodeManager.m
  75. 3 1
      iOS/QGVAPlayer/QGVAPlayer/Classes/Controllers/QGVAPConfigManager.m
  76. 8 0
      iOS/QGVAPlayer/QGVAPlayer/Classes/MP4Parser/QGMP4Box.h
  77. 21 1
      iOS/QGVAPlayer/QGVAPlayer/Classes/MP4Parser/QGMP4Box.m
  78. 1 0
      iOS/QGVAPlayer/QGVAPlayer/Classes/MP4Parser/QGMP4Parser.h
  79. 116 88
      iOS/QGVAPlayer/QGVAPlayer/Classes/MP4Parser/QGMP4Parser.m
  80. 1 1
      iOS/QGVAPlayer/QGVAPlayer/Classes/Models/QGVAPConfigModel.h
  81. 1 1
      iOS/QGVAPlayer/QGVAPlayer/Classes/Models/QGVAPTextureLoader.m
  82. 82 0
      iOS/QGVAPlayer/QGVAPlayer/Classes/QGVAPWrapView.h
  83. 229 0
      iOS/QGVAPlayer/QGVAPlayer/Classes/QGVAPWrapView.m
  84. 14 0
      iOS/QGVAPlayer/QGVAPlayer/Classes/UIView+VAP.h
  85. 79 18
      iOS/QGVAPlayer/QGVAPlayer/Classes/UIView+VAP.m
  86. 7 7
      iOS/QGVAPlayer/QGVAPlayer/Classes/Views/Metal/QGHWDMetalRenderer.h
  87. 23 23
      iOS/QGVAPlayer/QGVAPlayer/Classes/Views/Metal/QGHWDMetalRenderer.m
  88. 5 4
      iOS/QGVAPlayer/QGVAPlayer/Classes/Views/Metal/Vapx/QGVAPMetalRenderer.m
  89. 4 4
      iOS/QGVAPlayer/QGVAPlayer/Classes/Views/OpenGL/QGHWDMP4OpenGLView.m
  90. 12 0
      iOS/QGVAPlayerDemo/QGVAPlayerDemo.xcodeproj/project.pbxproj
  91. 91 7
      iOS/QGVAPlayerDemo/QGVAPlayerDemo/ViewController.m
  92. BIN
      iOS/QGVAPlayerDemo/Resource/b_frame.mp4
  93. BIN
      iOS/QGVAPlayerDemo/Resource/demo.mp4
  94. BIN
      iOS/QGVAPlayerDemo/Resource/destroy.mp4
  95. BIN
      iOS/QGVAPlayerDemo/Resource/test.mp4
  96. BIN
      iOS/QGVAPlayerDemo/Resource/vap.mp4
  97. BIN
      iOS/QGVAPlayerDemo/Resource/vap1.mp4
  98. BIN
      iOS/QGVAPlayerDemo/Resource/vap_264_classical.mp4
  99. BIN
      iOS/QGVAPlayerDemo/Resource/vap_265.mp4
  100. BIN
      iOS/QGVAPlayerDemo/Resource/vap_265_classical.mp4

+ 22 - 0
.github/ISSUE_TEMPLATE/bug_report.md

@@ -0,0 +1,22 @@
+---
+name: Bug report
+about: Create a report to help us improve
+title: ''
+labels: ''
+assignees: ''
+
+---
+
+# issue 模版
+
+1. 问题描述(重现路径)
+2. 运行环境(手机型号,Android/iOS版本等)
+3. 相关日志
+4. 播放错误的文件(可选)
+
+# issue template
+
+1. Problem description 
+2. Running environment (mobile phone model, Android/iOS version, etc.)
+3. Logs
+4. Error file (optional)

+ 20 - 0
.github/ISSUE_TEMPLATE/feature_request.md

@@ -0,0 +1,20 @@
+---
+name: Feature request
+about: Suggest an idea for this project
+title: ''
+labels: ''
+assignees: ''
+
+---
+
+**Is your feature request related to a problem? Please describe.**
+A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
+
+**Describe the solution you'd like**
+A clear and concise description of what you want to happen.
+
+**Describe alternatives you've considered**
+A clear and concise description of any alternative solutions or features you've considered.
+
+**Additional context**
+Add any other context or screenshots about the feature request here.

+ 1 - 0
Android/PlayerProj/.gitignore

@@ -10,3 +10,4 @@
 /build
 /captures
 .externalNativeBuild
+gradle.properties

+ 8 - 1
Android/PlayerProj/animplayer/build.gradle

@@ -29,4 +29,11 @@ dependencies {
 
 // jcenter 上传(这个要在底部)
 // 上传需要执行此任务 IDE -> gradle-> Tasks/publishing/bintrayUpload
-apply from: file("publish.gradle")
+// apply from: file("publish.gradle")
+
+
+// maven central
+// 上传指令./gradlew uploadArchives
+// https://s01.oss.sonatype.org/
+// Staging Repositories -> close -> release
+// apply from: "../publish-mavencentral.gradle"

+ 1 - 1
Android/PlayerProj/animplayer/publish.gradle

@@ -23,7 +23,7 @@ ext {
     // library artifact(单个module一般就填写library name)
     artifact = 'animplayer'
     libraryName = 'animplayer'
-    libraryVersion = '2.0.9'
+    libraryVersion = '2.0.15'
     libraryDescription = ''
     // bintrayName 是你在网页Repository页面能看到的名称
     bintrayName = 'vap'

+ 45 - 13
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/AnimConfigManager.kt

@@ -16,6 +16,7 @@
 package com.tencent.qgame.animplayer
 
 import android.os.SystemClock
+import com.tencent.qgame.animplayer.file.IFileContainer
 import com.tencent.qgame.animplayer.util.ALog
 import org.json.JSONObject
 import java.nio.charset.Charset
@@ -36,17 +37,21 @@ class AnimConfigManager(val player: AnimPlayer) {
      * 解析配置
      * @return true 解析成功 false 解析失败
      */
-    fun parseConfig(fileContainer: FileContainer, defaultVideoMode: Int, defaultFps: Int): Int {
+    fun parseConfig(fileContainer: IFileContainer, enableVersion1: Boolean, defaultVideoMode: Int, defaultFps: Int): Int {
         try {
             isParsingConfig = true
             // 解析vapc
             val time = SystemClock.elapsedRealtime()
             val result = parse(fileContainer, defaultVideoMode, defaultFps)
-            ALog.i(TAG, "parseConfig cost=${SystemClock.elapsedRealtime() - time}ms")
+            ALog.i(TAG, "parseConfig cost=${SystemClock.elapsedRealtime() - time}ms enableVersion1=$enableVersion1 result=$result")
             if (!result) {
                 isParsingConfig = false
                 return Constant.REPORT_ERROR_TYPE_PARSE_CONFIG
             }
+            if (config?.isDefaultConfig == true && !enableVersion1) {
+                isParsingConfig = false
+                return Constant.REPORT_ERROR_TYPE_PARSE_CONFIG
+            }
             // 插件解析配置
             val resultCode = config?.let {
                 player.pluginManager.onConfigCreate(it)
@@ -68,22 +73,48 @@ class AnimConfigManager(val player: AnimPlayer) {
         config?.apply {
             videoWidth = _videoWidth
             videoHeight = _videoHeight
-            if (defaultVideoMode == Constant.VIDEO_MODE_SPLIT_VERTICAL) { // 上下对齐
-                width = _videoWidth
-                height = _videoHeight / 2
-                alphaPointRect = PointRect(0, 0, width, height)
-                rgbPointRect = PointRect(0, height, width, height)
-            } else { // 默认左右对齐
-                width = _videoWidth / 2
-                height = _videoHeight
-                alphaPointRect = PointRect(0, 0, width, height)
-                rgbPointRect = PointRect(width, 0, width, height)
+            when (defaultVideoMode) {
+                Constant.VIDEO_MODE_SPLIT_HORIZONTAL -> {
+                    // 视频左右对齐(alpha左\rgb右)
+                    width = _videoWidth / 2
+                    height = _videoHeight
+                    alphaPointRect = PointRect(0, 0, width, height)
+                    rgbPointRect = PointRect(width, 0, width, height)
+                }
+                Constant.VIDEO_MODE_SPLIT_VERTICAL -> {
+                    // 视频上下对齐(alpha上\rgb下)
+                    width = _videoWidth
+                    height = _videoHeight / 2
+                    alphaPointRect = PointRect(0, 0, width, height)
+                    rgbPointRect = PointRect(0, height, width, height)
+                }
+                Constant.VIDEO_MODE_SPLIT_HORIZONTAL_REVERSE -> {
+                    // 视频左右对齐(rgb左\alpha右)
+                    width = _videoWidth / 2
+                    height = _videoHeight
+                    rgbPointRect = PointRect(0, 0, width, height)
+                    alphaPointRect = PointRect(width, 0, width, height)
+                }
+                Constant.VIDEO_MODE_SPLIT_VERTICAL_REVERSE -> {
+                    // 视频上下对齐(rgb上\alpha下)
+                    width = _videoWidth
+                    height = _videoHeight / 2
+                    rgbPointRect = PointRect(0, 0, width, height)
+                    alphaPointRect = PointRect(0, height, width, height)
+                }
+                else -> {
+                    // 默认视频左右对齐(alpha左\rgb右)
+                    width = _videoWidth / 2
+                    height = _videoHeight
+                    alphaPointRect = PointRect(0, 0, width, height)
+                    rgbPointRect = PointRect(width, 0, width, height)
+                }
             }
         }
     }
 
 
-    private fun parse(fileContainer: FileContainer, defaultVideoMode: Int, defaultFps: Int): Boolean {
+    private fun parse(fileContainer: IFileContainer, defaultVideoMode: Int, defaultFps: Int): Boolean {
 
         val config = AnimConfig()
         this.config = config
@@ -113,6 +144,7 @@ class AnimConfigManager(val player: AnimPlayer) {
                 this.defaultVideoMode = defaultVideoMode
                 fps = defaultFps
             }
+            player.fps = config.fps
             return true
         }
 

+ 19 - 8
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/AnimPlayer.kt

@@ -15,12 +15,13 @@
  */
 package com.tencent.qgame.animplayer
 
+import com.tencent.qgame.animplayer.file.IFileContainer
 import com.tencent.qgame.animplayer.inter.IAnimListener
 import com.tencent.qgame.animplayer.mask.MaskConfig
 import com.tencent.qgame.animplayer.plugin.AnimPluginManager
 import com.tencent.qgame.animplayer.util.ALog
 
-class AnimPlayer(val animView: AnimView) {
+class AnimPlayer(val animView: IAnimView) {
 
     companion object {
         private const val TAG = "${Constant.TAG}.AnimPlayer"
@@ -34,6 +35,8 @@ class AnimPlayer(val animView: AnimView) {
             decoder?.fps = value
             field = value
         }
+    // 设置默认的fps <= 0 表示以vapc配置为准 > 0  表示以此设置为准
+    var defaultFps: Int = 0
     var playLoop: Int = 0
         set(value) {
             decoder?.playLoop = value
@@ -42,18 +45,22 @@ class AnimPlayer(val animView: AnimView) {
         }
     var supportMaskBoolean : Boolean = false
     var maskEdgeBlurBoolean : Boolean = false
+    // 是否兼容老版本 默认不兼容
+    var enableVersion1 : Boolean = false
     // 视频模式
     var videoMode: Int = Constant.VIDEO_MODE_SPLIT_HORIZONTAL
     var isDetachedFromWindow = false
     var isSurfaceAvailable = false
     var startRunnable: Runnable? = null
     var isStartRunning = false // 启动时运行状态
+    var isMute = false // 是否静音
 
     val configManager = AnimConfigManager(this)
     val pluginManager = AnimPluginManager(this)
 
     fun onSurfaceTextureDestroyed() {
         isSurfaceAvailable = false
+        isStartRunning = false
         decoder?.destroy()
         audioPlayer?.destroy()
     }
@@ -69,20 +76,22 @@ class AnimPlayer(val animView: AnimView) {
         decoder?.onSurfaceSizeChanged(width, height)
     }
 
-    fun startPlay(fileContainer: FileContainer) {
+    fun startPlay(fileContainer: IFileContainer) {
         isStartRunning = true
         prepareDecoder()
         if (decoder?.prepareThread() == false) {
-            decoder?.onFailed(Constant.REPORT_ERROR_TYPE_CREATE_THREAD, Constant.ERROR_MSG_CREATE_THREAD)
             isStartRunning = false
+            decoder?.onFailed(Constant.REPORT_ERROR_TYPE_CREATE_THREAD, Constant.ERROR_MSG_CREATE_THREAD)
+            decoder?.onVideoComplete()
             return
         }
         // 在线程中解析配置
         decoder?.renderThread?.handler?.post {
-            val result = configManager.parseConfig(fileContainer, videoMode, fps)
+            val result = configManager.parseConfig(fileContainer, enableVersion1, videoMode, defaultFps)
             if (result != Constant.OK) {
-                decoder?.onFailed(result, Constant.getErrorMsg(result))
                 isStartRunning = false
+                decoder?.onFailed(result, Constant.getErrorMsg(result))
+                decoder?.onVideoComplete()
                 return@post
             }
             ALog.i(TAG, "parse ${configManager.config}")
@@ -96,16 +105,18 @@ class AnimPlayer(val animView: AnimView) {
         }
     }
 
-    private fun innerStartPlay(fileContainer: FileContainer) {
+    private fun innerStartPlay(fileContainer: IFileContainer) {
         synchronized(AnimPlayer::class.java) {
             if (isSurfaceAvailable) {
                 isStartRunning = false
                 decoder?.start(fileContainer)
-                audioPlayer?.start(fileContainer)
+                if (!isMute) {
+                    audioPlayer?.start(fileContainer)
+                }
             } else {
                  startRunnable = Runnable {
                     innerStartPlay(fileContainer)
-                }
+                 }
                 animView.prepareTextureView()
             }
         }

+ 115 - 74
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/AnimView.kt

@@ -22,36 +22,46 @@ import android.os.Build
 import android.os.Handler
 import android.os.Looper
 import android.util.AttributeSet
-import android.view.MotionEvent
 import android.view.TextureView
 import android.view.View
-import android.view.ViewGroup
 import android.widget.FrameLayout
+import com.tencent.qgame.animplayer.file.AssetsFileContainer
+import com.tencent.qgame.animplayer.file.FileContainer
+import com.tencent.qgame.animplayer.file.IFileContainer
 import com.tencent.qgame.animplayer.inter.IAnimListener
 import com.tencent.qgame.animplayer.inter.IFetchResource
 import com.tencent.qgame.animplayer.inter.OnResourceClickListener
 import com.tencent.qgame.animplayer.mask.MaskConfig
+import com.tencent.qgame.animplayer.textureview.InnerTextureView
 import com.tencent.qgame.animplayer.util.ALog
+import com.tencent.qgame.animplayer.util.IScaleType
+import com.tencent.qgame.animplayer.util.ScaleType
+import com.tencent.qgame.animplayer.util.ScaleTypeUtil
 import java.io.File
 
 open class AnimView @JvmOverloads constructor(context: Context, attrs: AttributeSet? = null, defStyleAttr: Int = 0):
+    IAnimView,
     FrameLayout(context, attrs, defStyleAttr),
     TextureView.SurfaceTextureListener {
 
     companion object {
         private const val TAG = "${Constant.TAG}.AnimView"
     }
+    private lateinit var player: AnimPlayer
+
     private val uiHandler by lazy { Handler(Looper.getMainLooper()) }
     private var surface: SurfaceTexture? = null
-    private var player: AnimPlayer? = null
     private var animListener: IAnimListener? = null
-    private var innerTextureView: TextureView? = null
-    private var lastFile: FileContainer? = null
+    private var innerTextureView: InnerTextureView? = null
+    private var lastFile: IFileContainer? = null
+    private val scaleTypeUtil = ScaleTypeUtil()
+
     // 代理监听
     private val animProxyListener by lazy {
         object : IAnimListener {
 
             override fun onVideoConfigReady(config: AnimConfig): Boolean {
+                scaleTypeUtil.setVideoSize(config.width, config.height)
                 return animListener?.onVideoConfigReady(config) ?: super.onVideoConfigReady(config)
             }
 
@@ -80,33 +90,44 @@ open class AnimView @JvmOverloads constructor(context: Context, attrs: Attribute
         }
     }
 
+    // 保证AnimView已经布局完成才加入TextureView
+    private var onSizeChangedCalled = false
+    private var needPrepareTextureView = false
+    private val prepareTextureViewRunnable = Runnable {
+        removeAllViews()
+        innerTextureView = InnerTextureView(context).apply {
+            player = this@AnimView.player
+            isOpaque = false
+            surfaceTextureListener = this@AnimView
+            layoutParams = scaleTypeUtil.getLayoutParam(this)
+        }
+        addView(innerTextureView)
+    }
+
 
     init {
         hide()
         player = AnimPlayer(this)
-        player?.animListener = animProxyListener
+        player.animListener = animProxyListener
     }
 
 
-    fun prepareTextureView() {
-        uiHandler.post {
-            removeAllViews()
-            innerTextureView = TextureView(context).apply {
-                isOpaque = false
-                surfaceTextureListener = this@AnimView
-                layoutParams = FrameLayout.LayoutParams(ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT)
-            }
-            addView(innerTextureView)
+    override fun prepareTextureView() {
+        if (onSizeChangedCalled) {
+            uiHandler.post(prepareTextureViewRunnable)
+        } else {
+            ALog.e(TAG, "onSizeChanged not called")
+            needPrepareTextureView = true
         }
     }
 
-    fun getSurfaceTexture(): SurfaceTexture? {
+    override fun getSurfaceTexture(): SurfaceTexture? {
         return innerTextureView?.surfaceTexture ?: surface
     }
 
     override fun onSurfaceTextureSizeChanged(surface: SurfaceTexture, width: Int, height: Int) {
         ALog.i(TAG, "onSurfaceTextureSizeChanged $width x $height")
-        player?.onSurfaceTextureSizeChanged(width, height)
+        player.onSurfaceTextureSizeChanged(width, height)
     }
 
     override fun onSurfaceTextureUpdated(surface: SurfaceTexture) {
@@ -114,27 +135,40 @@ open class AnimView @JvmOverloads constructor(context: Context, attrs: Attribute
 
     override fun onSurfaceTextureDestroyed(surface: SurfaceTexture): Boolean {
         ALog.i(TAG, "onSurfaceTextureDestroyed")
-        player?.onSurfaceTextureDestroyed()
+        this.surface = null
+        player.onSurfaceTextureDestroyed()
         uiHandler.post {
             innerTextureView?.surfaceTextureListener = null
             innerTextureView = null
             removeAllViews()
         }
-        return !belowKitKat()
+        return true
     }
 
     override fun onSurfaceTextureAvailable(surface: SurfaceTexture, width: Int, height: Int) {
-        ALog.i(TAG, "onSurfaceTextureAvailable")
+        ALog.i(TAG, "onSurfaceTextureAvailable width=$width height=$height")
         this.surface = surface
-        player?.onSurfaceTextureAvailable(width, height)
+        player.onSurfaceTextureAvailable(width, height)
+    }
+
+    override fun onSizeChanged(w: Int, h: Int, oldw: Int, oldh: Int) {
+        super.onSizeChanged(w, h, oldw, oldh)
+        ALog.i(TAG, "onSizeChanged w=$w, h=$h")
+        scaleTypeUtil.setLayoutSize(w, h)
+        onSizeChangedCalled = true
+        // 需要保证onSizeChanged被调用
+        if (needPrepareTextureView) {
+            needPrepareTextureView = false
+            prepareTextureView()
+        }
     }
 
     override fun onAttachedToWindow() {
         ALog.i(TAG, "onAttachedToWindow")
         super.onAttachedToWindow()
-        player?.isDetachedFromWindow = false
+        player.isDetachedFromWindow = false
         // 自动恢复播放
-        if ((player?.playLoop ?: 0) > 0) {
+        if (player.playLoop > 0) {
             lastFile?.apply {
                 startPlay(this)
             }
@@ -144,101 +178,123 @@ open class AnimView @JvmOverloads constructor(context: Context, attrs: Attribute
     override fun onDetachedFromWindow() {
         ALog.i(TAG, "onDetachedFromWindow")
         super.onDetachedFromWindow()
-        if (belowKitKat()) {
-            release()
-        }
-        player?.isDetachedFromWindow = true
-        player?.onSurfaceTextureDestroyed()
+        player.isDetachedFromWindow = true
+        player.onSurfaceTextureDestroyed()
     }
 
-    override fun dispatchTouchEvent(ev: MotionEvent?): Boolean {
-        val res = isRunning() && ev != null && player?.pluginManager?.onDispatchTouchEvent(ev) == true
-        return if (!res) super.dispatchTouchEvent(ev) else true
-    }
 
-    open fun setAnimListener(animListener: IAnimListener?) {
+    override fun setAnimListener(animListener: IAnimListener?) {
         this.animListener = animListener
     }
 
-    open fun setFetchResource(fetchResource: IFetchResource?) {
-        player?.pluginManager?.getMixAnimPlugin()?.resourceRequest = fetchResource
+    override fun setFetchResource(fetchResource: IFetchResource?) {
+        player.pluginManager.getMixAnimPlugin()?.resourceRequest = fetchResource
     }
 
-    open fun setOnResourceClickListener(resourceClickListener: OnResourceClickListener?) {
-        player?.pluginManager?.getMixAnimPlugin()?.resourceClickListener = resourceClickListener
+    override fun setOnResourceClickListener(resourceClickListener: OnResourceClickListener?) {
+        player.pluginManager.getMixAnimPlugin()?.resourceClickListener = resourceClickListener
     }
 
     /**
      * 兼容方案,优先保证表情显示
      */
     open fun enableAutoTxtColorFill(enable: Boolean) {
-        player?.pluginManager?.getMixAnimPlugin()?.autoTxtColorFill = enable
+        player.pluginManager.getMixAnimPlugin()?.autoTxtColorFill = enable
     }
 
-    fun setLoop(playLoop: Int) {
-        player?.playLoop = playLoop
+    override fun setLoop(playLoop: Int) {
+        player.playLoop = playLoop
     }
 
-    fun supportMask(isSupport : Boolean, isEdgeBlur : Boolean) {
-        player?.supportMaskBoolean = isSupport
-        player?.maskEdgeBlurBoolean = isEdgeBlur
+    override fun supportMask(isSupport : Boolean, isEdgeBlur : Boolean) {
+        player.supportMaskBoolean = isSupport
+        player.maskEdgeBlurBoolean = isEdgeBlur
     }
 
-    fun updateMaskConfig(maskConfig: MaskConfig?) {
-        player?.updateMaskConfig(maskConfig)
+    override fun updateMaskConfig(maskConfig: MaskConfig?) {
+        player.updateMaskConfig(maskConfig)
+    }
+
+
+    @Deprecated("Compatible older version mp4, default false")
+    fun enableVersion1(enable: Boolean) {
+        player.enableVersion1 = enable
     }
 
     // 兼容老版本视频模式
     @Deprecated("Compatible older version mp4")
     fun setVideoMode(mode: Int) {
-        player?.videoMode = mode
+        player.videoMode = mode
     }
 
-    fun setFps(fps: Int) {
-        player?.fps = fps
+    override fun setFps(fps: Int) {
+        ALog.i(TAG, "setFps=$fps")
+        player.defaultFps = fps
     }
 
-    fun startPlay(file: File) {
+    override fun setScaleType(type : ScaleType) {
+        scaleTypeUtil.currentScaleType = type
+    }
+
+    override fun setScaleType(scaleType: IScaleType) {
+        scaleTypeUtil.scaleTypeImpl = scaleType
+    }
+
+    /**
+     * @param isMute true 静音
+     */
+    override fun setMute(isMute: Boolean) {
+        ALog.e(TAG, "set mute=$isMute")
+        player.isMute = isMute
+    }
+
+    override fun startPlay(file: File) {
         try {
             val fileContainer = FileContainer(file)
             startPlay(fileContainer)
         } catch (e: Throwable) {
             animProxyListener.onFailed(Constant.REPORT_ERROR_TYPE_FILE_ERROR, Constant.ERROR_MSG_FILE_ERROR)
+            animProxyListener.onVideoComplete()
         }
     }
 
-    fun startPlay(assetManager: AssetManager, assetsPath: String) {
+    override fun startPlay(assetManager: AssetManager, assetsPath: String) {
         try {
-            val fileContainer = FileContainer(assetManager, assetsPath)
+            val fileContainer = AssetsFileContainer(assetManager, assetsPath)
             startPlay(fileContainer)
         } catch (e: Throwable) {
             animProxyListener.onFailed(Constant.REPORT_ERROR_TYPE_FILE_ERROR, Constant.ERROR_MSG_FILE_ERROR)
+            animProxyListener.onVideoComplete()
         }
     }
 
 
-    fun startPlay(fileContainer: FileContainer) {
+    override fun startPlay(fileContainer: IFileContainer) {
         ui {
             if (visibility != View.VISIBLE) {
                 ALog.e(TAG, "AnimView is GONE, can't play")
                 return@ui
             }
-            if (player?.isRunning() == false) {
+            if (!player.isRunning()) {
                 lastFile = fileContainer
-                player?.startPlay(fileContainer)
+                player.startPlay(fileContainer)
             } else {
-                ALog.i(TAG, "is running can not start")
+                ALog.e(TAG, "is running can not start")
             }
         }
     }
 
 
-    fun stopPlay() {
-        player?.stopPlay()
+    override fun stopPlay() {
+        player.stopPlay()
+    }
+
+    override fun isRunning(): Boolean {
+        return player.isRunning()
     }
 
-    fun isRunning(): Boolean {
-        return player?.isRunning() ?: false
+    override fun getRealSize(): Pair<Int, Int> {
+        return scaleTypeUtil.getRealSize()
     }
 
     private fun hide() {
@@ -252,19 +308,4 @@ open class AnimView @JvmOverloads constructor(context: Context, attrs: Attribute
         if (Looper.myLooper() == Looper.getMainLooper()) f() else uiHandler.post { f() }
     }
 
-    /**
-     * fix Error detachFromGLContext crash
-     */
-    private fun belowKitKat(): Boolean {
-        return Build.VERSION.SDK_INT <= 19
-    }
-
-    private fun release() {
-        try {
-            surface?.release()
-        } catch (error: Throwable) {
-            ALog.e(TAG, "failed to release mSurfaceTexture= $surface: ${error.message}", error)
-        }
-        surface = null
-    }
 }

+ 14 - 6
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/AudioPlayer.kt

@@ -16,6 +16,7 @@
 package com.tencent.qgame.animplayer
 
 import android.media.*
+import com.tencent.qgame.animplayer.file.IFileContainer
 import com.tencent.qgame.animplayer.util.ALog
 import com.tencent.qgame.animplayer.util.MediaUtil
 import java.lang.RuntimeException
@@ -41,7 +42,7 @@ class AudioPlayer(val player: AnimPlayer) {
         return Decoder.createThread(decodeThread, "anim_audio_thread")
     }
 
-    fun start(fileContainer: FileContainer) {
+    fun start(fileContainer: IFileContainer) {
         isStopReq = false
         needDestroy = false
         if (!prepareThread()) return
@@ -63,7 +64,7 @@ class AudioPlayer(val player: AnimPlayer) {
         isStopReq = true
     }
 
-    private fun startPlay(fileContainer: FileContainer) {
+    private fun startPlay(fileContainer: IFileContainer) {
         val extractor = MediaUtil.getExtractor(fileContainer)
         this.extractor = extractor
         val audioIndex = MediaUtil.selectAudioTrack(extractor)
@@ -74,7 +75,14 @@ class AudioPlayer(val player: AnimPlayer) {
         }
         extractor.selectTrack(audioIndex)
         val format = extractor.getTrackFormat(audioIndex)
-        val mime =format.getString(MediaFormat.KEY_MIME) ?: ""
+        val mime = format.getString(MediaFormat.KEY_MIME) ?: ""
+        ALog.i(TAG, "audio mime=$mime")
+        if (!MediaUtil.checkSupportCodec(mime)) {
+            ALog.e(TAG, "mime=$mime not support")
+            release()
+            return
+        }
+
         val decoder = MediaCodec.createDecoderByType(mime).apply {
             configure(format, null, null, 0)
             start()
@@ -91,7 +99,7 @@ class AudioPlayer(val player: AnimPlayer) {
         val bufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
         val audioTrack = AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM)
         this.audioTrack = audioTrack
-        val state = audioTrack.getState()
+        val state = audioTrack.state
         if (state != AudioTrack.STATE_INITIALIZED) {
             release()
             ALog.e(TAG, "init audio track failure")
@@ -120,7 +128,7 @@ class AudioPlayer(val player: AnimPlayer) {
             if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                 decodeOutputBuffers = decoder.outputBuffers
             }
-            if (outputIndex > 0) {
+            if (outputIndex >= 0) {
                 val outputBuffer = decodeOutputBuffers[outputIndex]
                 val chunkPCM = ByteArray(bufferInfo.size)
                 outputBuffer.get(chunkPCM)
@@ -172,8 +180,8 @@ class AudioPlayer(val player: AnimPlayer) {
     }
 
     fun destroy() {
-        needDestroy = true
         if (isRunning) {
+            needDestroy = true
             stop()
         } else {
             destroyInner()

+ 8 - 2
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/Constant.kt

@@ -25,9 +25,13 @@ object Constant {
 
     // 视频对齐方式 (兼容老版本视频模式)
     @Deprecated("Compatible older version mp4")
-    const val VIDEO_MODE_SPLIT_HORIZONTAL = 1 // 视频左右对齐
+    const val VIDEO_MODE_SPLIT_HORIZONTAL = 1 // 视频左右对齐(alpha左\rgb右)
     @Deprecated("Compatible older version mp4")
-    const val VIDEO_MODE_SPLIT_VERTICAL = 2 // 视频上下对齐
+    const val VIDEO_MODE_SPLIT_VERTICAL = 2 // 视频上下对齐(alpha上\rgb下)
+    @Deprecated("Compatible older version mp4")
+    const val VIDEO_MODE_SPLIT_HORIZONTAL_REVERSE = 3 // 视频左右对齐(rgb左\alpha右)
+    @Deprecated("Compatible older version mp4")
+    const val VIDEO_MODE_SPLIT_VERTICAL_REVERSE = 4 // 视频上下对齐(rgb上\alpha下)
 
 
     const val OK = 0 // 成功
@@ -39,6 +43,7 @@ object Constant {
     const val REPORT_ERROR_TYPE_PARSE_CONFIG = 10005 // 配置解析失败
     const val REPORT_ERROR_TYPE_CONFIG_PLUGIN_MIX = 10006 // vapx融合动画资源获取失败
     const val REPORT_ERROR_TYPE_FILE_ERROR = 10007 // 文件无法读取
+    const val REPORT_ERROR_TYPE_HEVC_NOT_SUPPORT = 10008 // 不支持h265
 
     const val ERROR_MSG_EXTRACTOR_EXC = "0x1 MediaExtractor exception" // MediaExtractor exception
     const val ERROR_MSG_DECODE_EXC = "0x2 MediaCodec exception" // MediaCodec exception
@@ -47,6 +52,7 @@ object Constant {
     const val ERROR_MSG_PARSE_CONFIG = "0x5 parse config fail" // 配置解析失败
     const val ERROR_MSG_CONFIG_PLUGIN_MIX = "0x6 vapx fail" // vapx融合动画资源获取失败
     const val ERROR_MSG_FILE_ERROR = "0x7 file can't read" // 文件无法读取
+    const val ERROR_MSG_HEVC_NOT_SUPPORT = "0x8 hevc not support" // 不支持h265
 
 
     fun getErrorMsg(errorType: Int, errorMsg: String? = null): String {

+ 26 - 6
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/Decoder.kt

@@ -18,6 +18,7 @@ package com.tencent.qgame.animplayer
 import android.os.Build
 import android.os.HandlerThread
 import android.os.Handler
+import com.tencent.qgame.animplayer.file.IFileContainer
 import com.tencent.qgame.animplayer.inter.IAnimListener
 import com.tencent.qgame.animplayer.util.ALog
 import com.tencent.qgame.animplayer.util.SpeedControlUtil
@@ -55,7 +56,7 @@ abstract class Decoder(val player: AnimPlayer) : IAnimListener {
         }
     }
 
-    var render: Render? = null
+    var render: IRenderListener? = null
     val renderThread = HandlerHolder(null, null)
     val decodeThread = HandlerHolder(null, null)
     private var surfaceWidth = 0
@@ -70,7 +71,7 @@ abstract class Decoder(val player: AnimPlayer) : IAnimListener {
     var isStopReq = false // 是否需要停止
     val speedControlUtil by lazy { SpeedControlUtil() }
 
-    abstract fun start(fileContainer: FileContainer)
+    abstract fun start(fileContainer: IFileContainer)
 
     fun stop() {
         isStopReq = true
@@ -82,16 +83,20 @@ abstract class Decoder(val player: AnimPlayer) : IAnimListener {
         return createThread(renderThread, "anim_render_thread") && createThread(decodeThread, "anim_decode_thread")
     }
 
-    fun prepareRender(): Boolean {
+    fun prepareRender(needYUV: Boolean): Boolean {
         if (render == null) {
             ALog.i(TAG, "prepareRender")
             player.animView.getSurfaceTexture()?.apply {
-                render = Render(this).apply {
-                    updateViewPort(surfaceWidth, surfaceHeight)
+                if (needYUV) {
+                    ALog.i(TAG, "use yuv render")
+                    render = YUVRender(this)
+                } else {
+                    render = Render(this).apply {
+                        updateViewPort(surfaceWidth, surfaceHeight)
+                    }
                 }
             }
         }
-        render?.createTexture()
         return render != null
     }
 
@@ -103,6 +108,21 @@ abstract class Decoder(val player: AnimPlayer) : IAnimListener {
         player.pluginManager.onRenderCreate()
     }
 
+    /**
+     * decode过程中视频尺寸变化
+     * 主要是没有16进制对齐的老视频
+     */
+    fun videoSizeChange(newWidth: Int, newHeight: Int) {
+        if (newWidth <= 0 || newHeight <= 0) return
+        val config = player.configManager.config ?: return
+        if (config.videoWidth != newWidth || config.videoHeight != newHeight) {
+            ALog.i(TAG, "videoSizeChange old=(${config.videoWidth},${config.videoHeight}), new=($newWidth,$newHeight)")
+            config.videoWidth = newWidth
+            config.videoHeight = newHeight
+            render?.setAnimConfig(config)
+        }
+    }
+
 
     fun destroyThread() {
         if (player.isDetachedFromWindow) {

+ 5 - 1
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/EGLUtil.kt

@@ -32,6 +32,7 @@ class EGLUtil {
     private var eglSurface: EGLSurface? = null
     private var eglContext: EGLContext? = null
     private var eglConfig: EGLConfig? = null
+    private var surface: Surface? = null
 
     init {
         eglDisplay = EGL10.EGL_NO_DISPLAY
@@ -46,7 +47,8 @@ class EGLUtil {
             val version = IntArray(2)
             egl?.eglInitialize(eglDisplay, version)
             eglConfig = chooseConfig()
-            eglSurface = egl?.eglCreateWindowSurface(eglDisplay, eglConfig, Surface(surfaceTexture), null)
+            surface = Surface(surfaceTexture)
+            eglSurface = egl?.eglCreateWindowSurface(eglDisplay, eglConfig, surface, null)
             eglContext = createContext(egl, eglDisplay, eglConfig)
             if (eglSurface == null || eglSurface == EGL10.EGL_NO_SURFACE) {
                 ALog.e(TAG, "error:${Integer.toHexString(egl?.eglGetError() ?: 0)}")
@@ -105,6 +107,8 @@ class EGLUtil {
             eglDestroySurface(eglDisplay, eglSurface)
             eglDestroyContext(eglDisplay, eglContext)
             eglTerminate(eglDisplay)
+            surface?.release()
+            surface = null
         }
     }
 

+ 0 - 98
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/FileContainer.kt

@@ -1,98 +0,0 @@
-/*
- * Tencent is pleased to support the open source community by making vap available.
- *
- * Copyright (C) 2020 THL A29 Limited, a Tencent company.  All rights reserved.
- *
- * Licensed under the MIT License (the "License"); you may not use this file except in
- * compliance with the License. You may obtain a copy of the License at
- *
- * http://opensource.org/licenses/MIT
- *
- * Unless required by applicable law or agreed to in writing, software distributed under the License is
- * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
- * either express or implied. See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package com.tencent.qgame.animplayer
-
-import android.content.res.AssetFileDescriptor
-import android.content.res.AssetManager
-import android.media.MediaExtractor
-import java.io.File
-import java.io.FileNotFoundException
-import java.io.RandomAccessFile
-
-class FileContainer {
-
-    private var isAssets = false
-
-    private var file: File? = null
-    private var randomAccessFile: RandomAccessFile? = null
-    private var assetFd: AssetFileDescriptor? = null
-    private var assetsInputStream: AssetManager.AssetInputStream? = null
-
-    constructor(file: File) {
-        isAssets = false
-        this.file = file
-        if (!(file.exists() && file.isFile && file.canRead())) throw FileNotFoundException("Unable to read $file")
-    }
-
-    constructor(assetManager: AssetManager, assetsPath: String) {
-        isAssets = true
-        assetFd = assetManager.openFd(assetsPath)
-        assetsInputStream = assetManager.open(assetsPath, AssetManager.ACCESS_STREAMING) as AssetManager.AssetInputStream
-    }
-
-
-    fun setDataSource(extractor: MediaExtractor) {
-        if (isAssets) {
-            val assetFd = this.assetFd ?: return
-            if (assetFd.declaredLength < 0) {
-                extractor.setDataSource(assetFd.fileDescriptor)
-            } else {
-                extractor.setDataSource(assetFd.fileDescriptor, assetFd.startOffset, assetFd.declaredLength)
-            }
-        } else {
-            val file = this.file ?: return
-            extractor.setDataSource(file.toString())
-        }
-    }
-
-    fun startRandomRead() {
-        if (isAssets) return
-        val file = this.file ?: return
-        randomAccessFile = RandomAccessFile(file, "r")
-    }
-
-    fun read(b: ByteArray, off: Int, len: Int):Int {
-        return if (isAssets) {
-            assetsInputStream?.read(b, off, len) ?: -1
-        } else {
-            randomAccessFile?.read(b, off, len) ?: -1
-        }
-    }
-
-    fun skip(pos: Long) {
-        if (isAssets) {
-            assetsInputStream?.skip(pos)
-        } else {
-            randomAccessFile?.skipBytes(pos.toInt())
-        }
-    }
-
-    fun closeRandomRead() {
-        if (isAssets) {
-            assetsInputStream?.close()
-        } else {
-            randomAccessFile?.close()
-        }
-    }
-
-    fun close() {
-        if (isAssets) {
-            assetFd?.close()
-            assetsInputStream?.close()
-        }
-    }
-
-}

+ 164 - 31
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/HardDecoder.kt

@@ -17,12 +17,14 @@ package com.tencent.qgame.animplayer
 
 import android.graphics.SurfaceTexture
 import android.media.MediaCodec
+import android.media.MediaCodecInfo
 import android.media.MediaExtractor
 import android.media.MediaFormat
+import android.os.Build
 import android.view.Surface
+import com.tencent.qgame.animplayer.file.IFileContainer
 import com.tencent.qgame.animplayer.util.ALog
 import com.tencent.qgame.animplayer.util.MediaUtil
-import java.lang.RuntimeException
 
 class HardDecoder(player: AnimPlayer) : Decoder(player), SurfaceTexture.OnFrameAvailableListener {
 
@@ -31,11 +33,24 @@ class HardDecoder(player: AnimPlayer) : Decoder(player), SurfaceTexture.OnFrameA
         private const val TAG = "${Constant.TAG}.HardDecoder"
     }
 
+    private var surface: Surface? = null
     private var glTexture: SurfaceTexture? = null
     private val bufferInfo by lazy { MediaCodec.BufferInfo() }
     private var needDestroy = false
 
-    override fun start(fileContainer: FileContainer) {
+    // 动画的原始尺寸
+    private var videoWidth = 0
+    private var videoHeight = 0
+
+    // 动画对齐后的尺寸
+    private var alignWidth = 0
+    private var alignHeight = 0
+
+    // 动画是否需要走YUV渲染逻辑的标志位
+    private var needYUV = false
+    private var outputFormat: MediaFormat? = null
+
+    override fun start(fileContainer: IFileContainer) {
         isStopReq = false
         needDestroy = false
         isRunning = true
@@ -44,15 +59,18 @@ class HardDecoder(player: AnimPlayer) : Decoder(player), SurfaceTexture.OnFrameA
         }
     }
 
-
     override fun onFrameAvailable(surfaceTexture: SurfaceTexture?) {
         if (isStopReq) return
         ALog.d(TAG, "onFrameAvailable")
+        renderData()
+    }
+
+    private fun renderData() {
         renderThread.handler?.post {
             try {
                 glTexture?.apply {
                     updateTexImage()
-                    render?.renderFrame(player.configManager.config)
+                    render?.renderFrame()
                     player.pluginManager.onRendering()
                     render?.swapBuffers()
                 }
@@ -62,16 +80,7 @@ class HardDecoder(player: AnimPlayer) : Decoder(player), SurfaceTexture.OnFrameA
         }
     }
 
-    private fun startPlay(fileContainer: FileContainer) {
-        try {
-            if (!prepareRender()) {
-                throw RuntimeException("render create fail")
-            }
-        } catch (t: Throwable) {
-            onFailed(Constant.REPORT_ERROR_TYPE_CREATE_RENDER, "${Constant.ERROR_MSG_CREATE_RENDER} e=$t")
-            release(null, null)
-            return
-        }
+    private fun startPlay(fileContainer: IFileContainer) {
 
         var extractor: MediaExtractor? = null
         var decoder: MediaCodec? = null
@@ -87,9 +96,43 @@ class HardDecoder(player: AnimPlayer) : Decoder(player), SurfaceTexture.OnFrameA
             extractor.selectTrack(trackIndex)
             format = extractor.getTrackFormat(trackIndex)
             if (format == null) throw RuntimeException("format is null")
-            val videoWidth = format.getInteger(MediaFormat.KEY_WIDTH)
-            val videoHeight = format.getInteger(MediaFormat.KEY_HEIGHT)
+
+            // 是否支持h265
+            if (MediaUtil.checkIsHevc(format)) {
+                if (Build.VERSION.SDK_INT  < Build.VERSION_CODES.LOLLIPOP
+                    || !MediaUtil.checkSupportCodec(MediaUtil.MIME_HEVC)) {
+
+                    onFailed(Constant.REPORT_ERROR_TYPE_HEVC_NOT_SUPPORT,
+                        "${Constant.ERROR_MSG_HEVC_NOT_SUPPORT} " +
+                                "sdk:${Build.VERSION.SDK_INT}" +
+                                ",support hevc:" + MediaUtil.checkSupportCodec(MediaUtil.MIME_HEVC))
+                    release(null, null)
+                    return
+                }
+            }
+
+            videoWidth = format.getInteger(MediaFormat.KEY_WIDTH)
+            videoHeight = format.getInteger(MediaFormat.KEY_HEIGHT)
+            // 防止没有INFO_OUTPUT_FORMAT_CHANGED时导致alignWidth和alignHeight不会被赋值一直是0
+            alignWidth = videoWidth
+            alignHeight = videoHeight
             ALog.i(TAG, "Video size is $videoWidth x $videoHeight")
+
+            // 由于使用mediacodec解码老版本素材时对宽度1500尺寸的视频进行数据对齐,解码后的宽度变成1504,导致采样点出现偏差播放异常
+            // 所以当开启兼容老版本视频模式并且老版本视频的宽度不能被16整除时要走YUV渲染逻辑
+            // 但是这样直接判断有风险,后期想办法改
+            needYUV = videoWidth % 16 != 0 && player.enableVersion1
+
+            try {
+                if (!prepareRender(needYUV)) {
+                    throw RuntimeException("render create fail")
+                }
+            } catch (t: Throwable) {
+                onFailed(Constant.REPORT_ERROR_TYPE_CREATE_RENDER, "${Constant.ERROR_MSG_CREATE_RENDER} e=$t")
+                release(null, null)
+                return
+            }
+
             preparePlay(videoWidth, videoHeight)
 
             render?.apply {
@@ -107,38 +150,47 @@ class HardDecoder(player: AnimPlayer) : Decoder(player), SurfaceTexture.OnFrameA
             return
         }
 
-
         try {
             val mime = format.getString(MediaFormat.KEY_MIME) ?: ""
             ALog.i(TAG, "Video MIME is $mime")
             decoder = MediaCodec.createDecoderByType(mime).apply {
-                configure(format, Surface(glTexture), null, 0)
+                if (needYUV) {
+                    format.setInteger(
+                            MediaFormat.KEY_COLOR_FORMAT,
+                            MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar
+                    )
+                    configure(format, null, null, 0)
+                } else {
+                    surface = Surface(glTexture)
+                    configure(format, surface, null, 0)
+                }
+
                 start()
                 decodeThread.handler?.post {
                     try {
-                        startDecode(extractor, this, trackIndex)
+                        startDecode(extractor, this)
                     } catch (e: Throwable) {
+                        ALog.e(TAG, "MediaCodec exception e=$e", e)
                         onFailed(Constant.REPORT_ERROR_TYPE_DECODE_EXC, "${Constant.ERROR_MSG_DECODE_EXC} e=$e")
                         release(decoder, extractor)
                     }
                 }
             }
         } catch (e: Throwable) {
-            ALog.e(TAG, "MediaCodec exception e=$e", e)
+            ALog.e(TAG, "MediaCodec configure exception e=$e", e)
             onFailed(Constant.REPORT_ERROR_TYPE_DECODE_EXC, "${Constant.ERROR_MSG_DECODE_EXC} e=$e")
             release(decoder, extractor)
             return
         }
     }
 
-
-
-    private fun startDecode(extractor: MediaExtractor ,decoder: MediaCodec, trackIndex: Int) {
+    private fun startDecode(extractor: MediaExtractor ,decoder: MediaCodec) {
         val TIMEOUT_USEC = 10000L
         var inputChunk = 0
         var outputDone = false
         var inputDone = false
-        var frameIndex = 1
+        var frameIndex = 0
+        var isLoop = false
 
         val decoderInputBuffers = decoder.inputBuffers
 
@@ -175,7 +227,23 @@ class HardDecoder(player: AnimPlayer) : Decoder(player), SurfaceTexture.OnFrameA
                 when {
                     decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER -> ALog.d(TAG, "no output from decoder available")
                     decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED -> ALog.d(TAG, "decoder output buffers changed")
-                    decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED -> ALog.d(TAG, "decoder output format changed: ${decoder.outputFormat}")
+                    decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED -> {
+                        outputFormat = decoder.outputFormat
+                        outputFormat?.apply {
+                            try {
+                                // 有可能取到空值,做一层保护
+                                val stride = getInteger("stride")
+                                val sliceHeight = getInteger("slice-height")
+                                if (stride > 0 && sliceHeight > 0) {
+                                    alignWidth = stride
+                                    alignHeight = sliceHeight
+                                }
+                            } catch (t: Throwable) {
+                                ALog.e(TAG, "$t", t)
+                            }
+                        }
+                        ALog.i(TAG, "decoder output format changed: $outputFormat")
+                    }
                     decoderStatus < 0 -> {
                         throw RuntimeException("unexpected result from decoder.dequeueOutputBuffer: $decoderStatus")
                     }
@@ -190,21 +258,31 @@ class HardDecoder(player: AnimPlayer) : Decoder(player), SurfaceTexture.OnFrameA
                         if (doRender) {
                             speedControlUtil.preRender(bufferInfo.presentationTimeUs)
                         }
-                        decoder.releaseOutputBuffer(decoderStatus, doRender)
-                        if (frameIndex == 1) {
+
+                        if (needYUV && doRender) {
+                            yuvProcess(decoder, decoderStatus)
+                        }
+
+                        // release & render
+                        decoder.releaseOutputBuffer(decoderStatus, doRender && !needYUV)
+
+                        if (frameIndex == 0 && !isLoop) {
                             onVideoStart()
                         }
                         player.pluginManager.onDecoding(frameIndex)
                         onVideoRender(frameIndex, player.configManager.config)
+
                         frameIndex++
                         ALog.d(TAG, "decode frameIndex=$frameIndex")
                         if (loop > 0) {
                             ALog.d(TAG, "Reached EOD, looping")
+                            player.pluginManager.onLoopStart()
                             extractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC)
                             inputDone = false
                             decoder.flush()
                             speedControlUtil.reset()
-                            frameIndex = 1
+                            frameIndex = 0
+                            isLoop = true
                         }
                         if (outputDone) {
                             release(decoder, extractor)
@@ -216,6 +294,59 @@ class HardDecoder(player: AnimPlayer) : Decoder(player), SurfaceTexture.OnFrameA
 
     }
 
+    /**
+     * 获取到解码后每一帧的YUV数据,裁剪出正确的尺寸
+     */
+    private fun yuvProcess(decoder: MediaCodec, outputIndex: Int) {
+        val outputBuffer = decoder.outputBuffers[outputIndex]
+        outputBuffer?.let {
+            it.position(0)
+            it.limit(bufferInfo.offset + bufferInfo.size)
+            var yuvData = ByteArray(outputBuffer.remaining())
+            outputBuffer.get(yuvData)
+
+            if (yuvData.isNotEmpty()) {
+                var yData = ByteArray(videoWidth * videoHeight)
+                var uData = ByteArray(videoWidth * videoHeight / 4)
+                var vData = ByteArray(videoWidth * videoHeight / 4)
+
+                if (outputFormat?.getInteger(MediaFormat.KEY_COLOR_FORMAT) == MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar) {
+                    yuvData = yuv420spTop(yuvData)
+                }
+
+                yuvCopy(yuvData, 0, alignWidth, alignHeight, yData, videoWidth, videoHeight)
+                yuvCopy(yuvData, alignWidth * alignHeight, alignWidth / 2, alignHeight / 2, uData, videoWidth / 2, videoHeight / 2)
+                yuvCopy(yuvData, alignWidth * alignHeight * 5 / 4, alignWidth / 2, alignHeight / 2, vData, videoWidth / 2, videoHeight / 2)
+
+                render?.setYUVData(videoWidth, videoHeight, yData, uData, vData)
+                renderData()
+            }
+        }
+    }
+
+    private fun yuv420spTop(yuv420sp: ByteArray): ByteArray {
+        val yuv420p = ByteArray(yuv420sp.size)
+        val ySize = alignWidth * alignHeight
+        System.arraycopy(yuv420sp, 0, yuv420p, 0, alignWidth * alignHeight)
+        var i = ySize
+        var j = ySize
+        while (i < ySize * 3 / 2) {
+            yuv420p[j] = yuv420sp[i]
+            yuv420p[j + ySize / 4] = yuv420sp[i + 1]
+            i += 2
+            j++
+        }
+        return yuv420p
+    }
+
+    private fun yuvCopy(src: ByteArray, srcOffset: Int, inWidth: Int, inHeight: Int, dest: ByteArray, outWidth: Int, outHeight: Int) {
+        for (h in 0 until inHeight) {
+            if (h < outHeight) {
+                System.arraycopy(src, srcOffset + h * inWidth, dest, h * outWidth, outWidth)
+            }
+        }
+    }
+
     private fun release(decoder: MediaCodec?, extractor: MediaExtractor?) {
         renderThread.handler?.post {
             render?.clearFrame()
@@ -231,6 +362,8 @@ class HardDecoder(player: AnimPlayer) : Decoder(player), SurfaceTexture.OnFrameA
                 speedControlUtil.reset()
                 player.pluginManager.onRelease()
                 render?.releaseTexture()
+                surface?.release()
+                surface = null
             } catch (e: Throwable) {
                 ALog.e(TAG, "release e=$e", e)
             }
@@ -242,10 +375,9 @@ class HardDecoder(player: AnimPlayer) : Decoder(player), SurfaceTexture.OnFrameA
         }
     }
 
-
     override fun destroy() {
-        needDestroy = true
         if (isRunning) {
+            needDestroy = true
             stop()
         } else {
             destroyInner()
@@ -253,9 +385,10 @@ class HardDecoder(player: AnimPlayer) : Decoder(player), SurfaceTexture.OnFrameA
     }
 
     private fun destroyInner() {
+        ALog.i(TAG, "destroyInner")
         renderThread.handler?.post {
             player.pluginManager.onDestroy()
-            render?.destroy()
+            render?.destroyRender()
             render = null
             onVideoDestroy()
             destroyThread()

+ 66 - 0
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/IAnimView.kt

@@ -0,0 +1,66 @@
+/*
+ * Tencent is pleased to support the open source community by making vap available.
+ *
+ * Copyright (C) 2020 THL A29 Limited, a Tencent company.  All rights reserved.
+ *
+ * Licensed under the MIT License (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ *
+ * http://opensource.org/licenses/MIT
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is
+ * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.tencent.qgame.animplayer
+
+import android.content.res.AssetManager
+import android.graphics.SurfaceTexture
+import com.tencent.qgame.animplayer.file.IFileContainer
+import com.tencent.qgame.animplayer.inter.IAnimListener
+import com.tencent.qgame.animplayer.inter.IFetchResource
+import com.tencent.qgame.animplayer.inter.OnResourceClickListener
+import com.tencent.qgame.animplayer.mask.MaskConfig
+import com.tencent.qgame.animplayer.util.IScaleType
+import com.tencent.qgame.animplayer.util.ScaleType
+import java.io.File
+
+interface IAnimView {
+
+    fun prepareTextureView()
+
+    fun getSurfaceTexture(): SurfaceTexture?
+
+    fun setAnimListener(animListener: IAnimListener?)
+
+    fun setFetchResource(fetchResource: IFetchResource?)
+
+    fun setOnResourceClickListener(resourceClickListener: OnResourceClickListener?)
+
+    fun setLoop(playLoop: Int)
+
+    fun supportMask(isSupport: Boolean, isEdgeBlur: Boolean)
+
+    fun updateMaskConfig(maskConfig: MaskConfig?)
+
+    fun setFps(fps: Int)
+
+    fun setScaleType(type: ScaleType)
+
+    fun setScaleType(scaleType: IScaleType)
+
+    fun setMute(isMute: Boolean)
+
+    fun startPlay(file: File)
+
+    fun startPlay(assetManager: AssetManager, assetsPath: String)
+
+    fun startPlay(fileContainer: IFileContainer)
+
+    fun stopPlay()
+
+    fun isRunning(): Boolean
+
+    fun getRealSize(): Pair<Int, Int>
+}

+ 54 - 0
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/IRenderListener.kt

@@ -0,0 +1,54 @@
+/*
+ * Tencent is pleased to support the open source community by making vap available.
+ *
+ * Copyright (C) 2020 THL A29 Limited, a Tencent company.  All rights reserved.
+ *
+ * Licensed under the MIT License (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ *
+ * http://opensource.org/licenses/MIT
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is
+ * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.tencent.qgame.animplayer
+
+interface IRenderListener {
+
+    /**
+     * 初始化渲染环境,获取shader字段,创建绑定纹理
+     */
+    fun initRender()
+
+    /**
+     * 渲染上屏
+     */
+    fun renderFrame()
+
+    fun clearFrame()
+
+    /**
+     * 释放纹理
+     */
+    fun destroyRender()
+
+    /**
+     * 设置视频配置
+     */
+    fun setAnimConfig(config: AnimConfig)
+
+    /**
+     * 显示区域大小变化
+     */
+    fun updateViewPort(width: Int, height: Int) {}
+
+    fun getExternalTexture(): Int
+
+    fun releaseTexture()
+
+    fun swapBuffers()
+
+    fun setYUVData(width: Int, height: Int, y: ByteArray?, u: ByteArray?, v: ByteArray?) {}
+}

+ 36 - 43
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/Render.kt

@@ -26,7 +26,7 @@ import java.nio.ByteBuffer
 import java.nio.ByteOrder
 import java.nio.ShortBuffer
 
-class Render(surfaceTexture: SurfaceTexture) {
+class Render(surfaceTexture: SurfaceTexture): IRenderListener {
 
     companion object {
         private const val TAG = "${Constant.TAG}.Render"
@@ -48,11 +48,7 @@ class Render(surfaceTexture: SurfaceTexture) {
 
     init {
         eglUtil.start(surfaceTexture)
-        initGL()
-    }
-
-    private fun initGL() {
-        compileShader()
+        initRender()
     }
 
     private fun setVertexBuf(config: AnimConfig) {
@@ -66,7 +62,13 @@ class Render(surfaceTexture: SurfaceTexture) {
         rgbArray.setArray(rgb)
     }
 
-    fun createTexture() {
+    override fun initRender() {
+        shaderProgram = ShaderUtil.createProgram(RenderConstant.VERTEX_SHADER, RenderConstant.FRAGMENT_SHADER)
+        uTextureLocation = GLES20.glGetUniformLocation(shaderProgram, "texture")
+        aPositionLocation = GLES20.glGetAttribLocation(shaderProgram, "vPosition")
+        aTextureAlphaLocation = GLES20.glGetAttribLocation(shaderProgram, "vTexCoordinateAlpha")
+        aTextureRgbLocation = GLES20.glGetAttribLocation(shaderProgram, "vTexCoordinateRgb")
+
         GLES20.glGenTextures(genTexture.size, genTexture, 0)
         GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, genTexture[0])
         GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST.toFloat())
@@ -75,18 +77,35 @@ class Render(surfaceTexture: SurfaceTexture) {
         GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE)
     }
 
-    private fun compileShader() {
-        shaderProgram = ShaderUtil.createProgram(RenderConstant.VERTEX_SHADER, RenderConstant.FRAGMENT_SHADER)
-        uTextureLocation = GLES20.glGetUniformLocation(shaderProgram, "texture")
-        aPositionLocation = GLES20.glGetAttribLocation(shaderProgram, "vPosition")
-        aTextureAlphaLocation = GLES20.glGetAttribLocation(shaderProgram, "vTexCoordinateAlpha")
-        aTextureRgbLocation = GLES20.glGetAttribLocation(shaderProgram, "vTexCoordinateRgb")
+    override fun renderFrame() {
+        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f)
+        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT)
+        if (surfaceSizeChanged && surfaceWidth>0 && surfaceHeight>0) {
+            surfaceSizeChanged = false
+            GLES20.glViewport(0,0, surfaceWidth, surfaceHeight)
+        }
+        draw()
+    }
+
+    override fun clearFrame() {
+        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f)
+        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT)
+        eglUtil.swapBuffers()
+    }
+
+    override fun destroyRender() {
+        releaseTexture()
+        eglUtil.release()
+    }
+
+    override fun releaseTexture() {
+        GLES20.glDeleteTextures(genTexture.size, genTexture, 0)
     }
 
     /**
      * 设置视频配置
      */
-    fun setAnimConfig(config: AnimConfig) {
+    override fun setAnimConfig(config: AnimConfig) {
         setVertexBuf(config)
         setTexCoords(config)
     }
@@ -94,47 +113,21 @@ class Render(surfaceTexture: SurfaceTexture) {
     /**
      * 显示区域大小变化
      */
-    fun updateViewPort(width: Int, height: Int) {
+    override fun updateViewPort(width: Int, height: Int) {
         if (width <=0 || height <=0) return
         surfaceSizeChanged = true
         surfaceWidth = width
         surfaceHeight = height
     }
 
-    fun clearFrame() {
-        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f)
-        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT)
+    override fun swapBuffers() {
         eglUtil.swapBuffers()
     }
 
-    fun renderFrame(config: AnimConfig?) {
-        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f)
-        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT)
-        if (surfaceSizeChanged && surfaceWidth>0 && surfaceHeight>0) {
-            surfaceSizeChanged = false
-            GLES20.glViewport(0,0, surfaceWidth, surfaceHeight)
-        }
-        draw()
-    }
-
-    fun swapBuffers() {
-        eglUtil.swapBuffers()
-    }
-
-
-    fun destroy() {
-        releaseTexture()
-        eglUtil.release()
-    }
-
-    fun releaseTexture() {
-        GLES20.glDeleteTextures(genTexture.size, genTexture, 0)
-    }
-
     /**
      * mediaCodec渲染使用的
      */
-    fun getExternalTexture(): Int {
+    override fun getExternalTexture(): Int {
         return genTexture[0]
     }
 

+ 207 - 0
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/YUVRender.kt

@@ -0,0 +1,207 @@
+/*
+ * Tencent is pleased to support the open source community by making vap available.
+ *
+ * Copyright (C) 2020 THL A29 Limited, a Tencent company.  All rights reserved.
+ *
+ * Licensed under the MIT License (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ *
+ * http://opensource.org/licenses/MIT
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is
+ * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.tencent.qgame.animplayer
+
+import android.graphics.SurfaceTexture
+import android.opengl.GLES20
+import com.tencent.qgame.animplayer.util.GlFloatArray
+import com.tencent.qgame.animplayer.util.ShaderUtil.createProgram
+import com.tencent.qgame.animplayer.util.TexCoordsUtil
+import com.tencent.qgame.animplayer.util.VertexUtil
+import java.nio.ByteBuffer
+import java.nio.FloatBuffer
+
+class YUVRender (surfaceTexture: SurfaceTexture): IRenderListener {
+
+    companion object {
+        private const val TAG = "${Constant.TAG}.YUVRender"
+    }
+
+    private val vertexArray = GlFloatArray()
+    private val alphaArray = GlFloatArray()
+    private val rgbArray = GlFloatArray()
+
+    private var shaderProgram = 0
+
+    //顶点位置
+    private var avPosition = 0
+
+    //rgb纹理位置
+    private var rgbPosition = 0
+
+    //alpha纹理位置
+    private var alphaPosition = 0
+
+    //shader  yuv变量
+    private var samplerY = 0
+    private var samplerU = 0
+    private var samplerV = 0
+    private var textureId = IntArray(3)
+    private var convertMatrixUniform = 0
+    private var convertOffsetUniform = 0
+
+    //YUV数据
+    private var widthYUV = 0
+    private var heightYUV = 0
+    private var y: ByteBuffer? = null
+    private var u: ByteBuffer? = null
+    private var v: ByteBuffer? = null
+
+    private val eglUtil: EGLUtil = EGLUtil()
+
+    // 像素数据向GPU传输时默认以4字节对齐
+    private var unpackAlign = 4
+
+    // YUV offset
+    private val YUV_OFFSET = floatArrayOf(
+            0f, -0.501960814f, -0.501960814f
+    )
+
+    // RGB coefficients
+    private val YUV_MATRIX = floatArrayOf(
+            1f, 1f, 1f,
+            0f, -0.3441f, 1.772f,
+            1.402f, -0.7141f, 0f
+    )
+
+    init {
+        eglUtil.start(surfaceTexture)
+        initRender()
+    }
+
+    override fun initRender() {
+        shaderProgram = createProgram(YUVShader.VERTEX_SHADER, YUVShader.FRAGMENT_SHADER)
+        //获取顶点坐标字段
+        avPosition = GLES20.glGetAttribLocation(shaderProgram, "v_Position")
+        //获取纹理坐标字段
+        rgbPosition = GLES20.glGetAttribLocation(shaderProgram, "vTexCoordinateRgb")
+        alphaPosition = GLES20.glGetAttribLocation(shaderProgram, "vTexCoordinateAlpha")
+
+        //获取yuv字段
+        samplerY = GLES20.glGetUniformLocation(shaderProgram, "sampler_y")
+        samplerU = GLES20.glGetUniformLocation(shaderProgram, "sampler_u")
+        samplerV = GLES20.glGetUniformLocation(shaderProgram, "sampler_v")
+        convertMatrixUniform = GLES20.glGetUniformLocation(shaderProgram, "convertMatrix")
+        convertOffsetUniform = GLES20.glGetUniformLocation(shaderProgram, "offset")
+        //创建3个纹理
+        GLES20.glGenTextures(textureId.size, textureId, 0)
+
+        //绑定纹理
+        for (id in textureId) {
+            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, id)
+            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_REPEAT)
+            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_REPEAT)
+            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR)
+            GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR)
+        }
+    }
+
+    override fun renderFrame() {
+        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f)
+        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT)
+        draw()
+    }
+
+    override fun clearFrame() {
+        GLES20.glClearColor(0.0f, 0.0f, 0.0f, 0.0f)
+        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT)
+        eglUtil.swapBuffers()
+    }
+
+    override fun destroyRender() {
+        releaseTexture()
+        eglUtil.release()
+    }
+
+    override fun setAnimConfig(config: AnimConfig) {
+        vertexArray.setArray(VertexUtil.create(config.width, config.height, PointRect(0, 0, config.width, config.height), vertexArray.array))
+        val alpha = TexCoordsUtil.create(config.videoWidth, config.videoHeight, config.alphaPointRect, alphaArray.array)
+        val rgb = TexCoordsUtil.create(config.videoWidth, config.videoHeight, config.rgbPointRect, rgbArray.array)
+        alphaArray.setArray(alpha)
+        rgbArray.setArray(rgb)
+    }
+
+    override fun getExternalTexture(): Int {
+        return textureId[0]
+    }
+
+    override fun releaseTexture() {
+        GLES20.glDeleteTextures(textureId.size, textureId, 0)
+    }
+
+    override fun swapBuffers() {
+        eglUtil.swapBuffers()
+    }
+
+    override fun setYUVData(width: Int, height: Int, y: ByteArray?, u: ByteArray?, v: ByteArray?) {
+        widthYUV = width
+        heightYUV = height
+        this.y = ByteBuffer.wrap(y)
+        this.u = ByteBuffer.wrap(u)
+        this.v = ByteBuffer.wrap(v)
+
+        // 当视频帧的u或者v分量的宽度不能被4整除时,用默认的4字节对齐会导致存取最后一行时越界,所以在向GPU传输数据前指定对齐方式
+        if ((widthYUV / 2) % 4 != 0) {
+            this.unpackAlign = if ((widthYUV / 2) % 2 == 0) 2 else 1
+        }
+    }
+
+    private fun draw() {
+        if (widthYUV > 0 && heightYUV > 0 && y != null && u != null && v != null) {
+            GLES20.glUseProgram(shaderProgram)
+            vertexArray.setVertexAttribPointer(avPosition)
+            alphaArray.setVertexAttribPointer(alphaPosition)
+            rgbArray.setVertexAttribPointer(rgbPosition)
+
+            GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, unpackAlign)
+
+            //激活纹理0来绑定y数据
+            GLES20.glActiveTexture(GLES20.GL_TEXTURE0)
+            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId[0])
+            GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, widthYUV, heightYUV, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, y)
+
+            //激活纹理1来绑定u数据
+            GLES20.glActiveTexture(GLES20.GL_TEXTURE1)
+            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId[1])
+            GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, widthYUV / 2, heightYUV / 2, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, u)
+
+            //激活纹理2来绑定v数据
+            GLES20.glActiveTexture(GLES20.GL_TEXTURE2)
+            GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, textureId[2])
+            GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_LUMINANCE, widthYUV / 2, heightYUV / 2, 0, GLES20.GL_LUMINANCE, GLES20.GL_UNSIGNED_BYTE, v)
+
+            //给fragment_shader里面yuv变量设置值   0 1 标识纹理x
+            GLES20.glUniform1i(samplerY, 0)
+            GLES20.glUniform1i(samplerU, 1)
+            GLES20.glUniform1i(samplerV, 2)
+
+            GLES20.glUniform3fv(convertOffsetUniform, 1, FloatBuffer.wrap(YUV_OFFSET))
+            GLES20.glUniformMatrix3fv(convertMatrixUniform, 1, false, YUV_MATRIX, 0)
+
+            //绘制
+            GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4)
+            y?.clear()
+            u?.clear()
+            v?.clear()
+            y = null
+            u = null
+            v = null
+            GLES20.glDisableVertexAttribArray(avPosition)
+            GLES20.glDisableVertexAttribArray(rgbPosition)
+            GLES20.glDisableVertexAttribArray(alphaPosition)
+        }
+    }
+}

+ 60 - 0
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/YUVShader.kt

@@ -0,0 +1,60 @@
+/*
+ * Tencent is pleased to support the open source community by making vap available.
+ *
+ * Copyright (C) 2020 THL A29 Limited, a Tencent company.  All rights reserved.
+ *
+ * Licensed under the MIT License (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ *
+ * http://opensource.org/licenses/MIT
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is
+ * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.tencent.qgame.animplayer
+
+object YUVShader {
+
+    const val VERTEX_SHADER = "attribute vec4 v_Position;\n" +
+            "attribute vec2 vTexCoordinateAlpha;\n" +
+            "attribute vec2 vTexCoordinateRgb;\n" +
+            "varying vec2 v_TexCoordinateAlpha;\n" +
+            "varying vec2 v_TexCoordinateRgb;\n" +
+            "\n" +
+            "void main() {\n" +
+            "    v_TexCoordinateAlpha = vTexCoordinateAlpha;\n" +
+            "    v_TexCoordinateRgb = vTexCoordinateRgb;\n" +
+            "    gl_Position = v_Position;\n" +
+            "}"
+
+    const val FRAGMENT_SHADER = "precision mediump float;\n" +
+            "uniform sampler2D sampler_y;\n" +
+            "uniform sampler2D sampler_u;\n" +
+            "uniform sampler2D sampler_v;\n" +
+            "varying vec2 v_TexCoordinateAlpha;\n" +
+            "varying vec2 v_TexCoordinateRgb;\n" +
+            "uniform mat3 convertMatrix;\n" +
+            "uniform vec3 offset;\n" +
+            "\n" +
+            "void main() {\n" +
+            "   highp vec3 yuvColorAlpha;\n" +
+            "   highp vec3 yuvColorRGB;\n" +
+            "   highp vec3 rgbColorAlpha;\n" +
+            "   highp vec3 rgbColorRGB;\n" +
+            "   yuvColorAlpha.x = texture2D(sampler_y,v_TexCoordinateAlpha).r;\n" +
+            "   yuvColorRGB.x = texture2D(sampler_y,v_TexCoordinateRgb).r;\n" +
+            "   yuvColorAlpha.y = texture2D(sampler_u,v_TexCoordinateAlpha).r;\n" +
+            "   yuvColorAlpha.z = texture2D(sampler_v,v_TexCoordinateAlpha).r;\n" +
+            "   yuvColorRGB.y = texture2D(sampler_u,v_TexCoordinateRgb).r;\n" +
+            "   yuvColorRGB.z = texture2D(sampler_v,v_TexCoordinateRgb).r;\n" +
+            "   yuvColorAlpha += offset;\n" +
+            "   yuvColorRGB += offset;\n" +
+            "   rgbColorAlpha = convertMatrix * yuvColorAlpha; \n" +
+            "   rgbColorRGB = convertMatrix * yuvColorRGB; \n" +
+            "   gl_FragColor=vec4(rgbColorRGB, rgbColorAlpha.r);\n" +
+            "}"
+
+    // RGB2*Alpha+RGB1*(1-Alpha)
+}

+ 65 - 0
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/file/AssetsFileContainer.kt

@@ -0,0 +1,65 @@
+/*
+ * Tencent is pleased to support the open source community by making vap available.
+ *
+ * Copyright (C) 2020 THL A29 Limited, a Tencent company.  All rights reserved.
+ *
+ * Licensed under the MIT License (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ *
+ * http://opensource.org/licenses/MIT
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is
+ * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.tencent.qgame.animplayer.file
+
+import android.content.res.AssetFileDescriptor
+import android.content.res.AssetManager
+import android.media.MediaExtractor
+import com.tencent.qgame.animplayer.Constant
+import com.tencent.qgame.animplayer.util.ALog
+
+class AssetsFileContainer(assetManager: AssetManager, assetsPath: String): IFileContainer {
+
+    companion object {
+        private const val TAG = "${Constant.TAG}.FileContainer"
+    }
+
+    private val assetFd: AssetFileDescriptor = assetManager.openFd(assetsPath)
+    private val assetsInputStream: AssetManager.AssetInputStream =
+        assetManager.open(assetsPath, AssetManager.ACCESS_STREAMING) as AssetManager.AssetInputStream
+
+    init {
+        ALog.i(TAG, "AssetsFileContainer init")
+    }
+
+    override fun setDataSource(extractor: MediaExtractor) {
+        if (assetFd.declaredLength < 0) {
+            extractor.setDataSource(assetFd.fileDescriptor)
+        } else {
+            extractor.setDataSource(assetFd.fileDescriptor, assetFd.startOffset, assetFd.declaredLength)
+        }
+    }
+
+    override fun startRandomRead() {
+    }
+
+    override fun read(b: ByteArray, off: Int, len: Int): Int {
+        return assetsInputStream.read(b, off, len)
+    }
+
+    override fun skip(pos: Long) {
+        assetsInputStream.skip(pos)
+    }
+
+    override fun closeRandomRead() {
+        assetsInputStream.close()
+    }
+
+    override fun close() {
+        assetFd.close()
+        assetsInputStream.close()
+    }
+}

+ 60 - 0
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/file/FileContainer.kt

@@ -0,0 +1,60 @@
+/*
+ * Tencent is pleased to support the open source community by making vap available.
+ *
+ * Copyright (C) 2020 THL A29 Limited, a Tencent company.  All rights reserved.
+ *
+ * Licensed under the MIT License (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ *
+ * http://opensource.org/licenses/MIT
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is
+ * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.tencent.qgame.animplayer.file
+
+import android.media.MediaExtractor
+import com.tencent.qgame.animplayer.Constant
+import com.tencent.qgame.animplayer.util.ALog
+import java.io.File
+import java.io.FileNotFoundException
+import java.io.RandomAccessFile
+
+class FileContainer(private val file: File) : IFileContainer {
+
+    companion object {
+        private const val TAG = "${Constant.TAG}.FileContainer"
+    }
+
+    private var randomAccessFile: RandomAccessFile? = null
+
+    init {
+        ALog.i(TAG, "FileContainer init")
+        if (!(file.exists() && file.isFile && file.canRead())) throw FileNotFoundException("Unable to read $file")
+    }
+
+    override fun setDataSource(extractor: MediaExtractor) {
+        extractor.setDataSource(file.toString())
+    }
+
+    override fun startRandomRead() {
+        randomAccessFile = RandomAccessFile(file, "r")
+    }
+
+    override fun read(b: ByteArray, off: Int, len: Int): Int {
+        return randomAccessFile?.read(b, off, len) ?: -1
+    }
+
+    override fun skip(pos: Long) {
+        randomAccessFile?.skipBytes(pos.toInt())
+    }
+
+    override fun closeRandomRead() {
+        randomAccessFile?.close()
+    }
+
+    override fun close() {
+    }
+}

+ 34 - 0
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/file/IFileContainer.kt

@@ -0,0 +1,34 @@
+/*
+ * Tencent is pleased to support the open source community by making vap available.
+ *
+ * Copyright (C) 2020 THL A29 Limited, a Tencent company.  All rights reserved.
+ *
+ * Licensed under the MIT License (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ *
+ * http://opensource.org/licenses/MIT
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is
+ * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.tencent.qgame.animplayer.file
+
+import android.media.MediaExtractor
+
+interface IFileContainer {
+
+    fun setDataSource(extractor: MediaExtractor)
+
+    fun startRandomRead()
+
+    fun read(b: ByteArray, off: Int, len: Int): Int
+
+    fun skip(pos: Long)
+
+    fun closeRandomRead()
+
+    fun close()
+
+}

+ 50 - 0
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/file/StreamContainer.kt

@@ -0,0 +1,50 @@
+/*
+ * Tencent is pleased to support the open source community by making vap available.
+ *
+ * Copyright (C) 2020 THL A29 Limited, a Tencent company.  All rights reserved.
+ *
+ * Licensed under the MIT License (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ *
+ * http://opensource.org/licenses/MIT
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is
+ * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.tencent.qgame.animplayer.file
+
+import android.annotation.TargetApi
+import android.media.MediaExtractor
+import android.os.Build
+import java.io.ByteArrayInputStream
+
+@TargetApi(Build.VERSION_CODES.M)
+class StreamContainer(private val bytes: ByteArray) : IFileContainer {
+
+    private var stream: ByteArrayInputStream = ByteArrayInputStream(bytes)
+
+    override fun setDataSource(extractor: MediaExtractor) {
+        val dataSource = StreamMediaDataSource(bytes)
+        extractor.setDataSource(dataSource)
+    }
+
+    override fun startRandomRead() {
+    }
+
+    override fun read(b: ByteArray, off: Int, len: Int): Int {
+        return stream.read(b, off, len)
+    }
+
+    override fun skip(pos: Long) {
+        stream.skip(pos)
+    }
+
+    override fun closeRandomRead() {
+    }
+
+    override fun close() {
+        stream.close()
+    }
+}

+ 49 - 0
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/file/StreamMediaDataSource.kt

@@ -0,0 +1,49 @@
+/*
+ * Tencent is pleased to support the open source community by making vap available.
+ *
+ * Copyright (C) 2020 THL A29 Limited, a Tencent company.  All rights reserved.
+ *
+ * Licensed under the MIT License (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ *
+ * http://opensource.org/licenses/MIT
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is
+ * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.tencent.qgame.animplayer.file
+
+import android.annotation.TargetApi
+import android.media.MediaDataSource
+import android.os.Build
+
+@TargetApi(Build.VERSION_CODES.M)
+class StreamMediaDataSource(val bytes: ByteArray) : MediaDataSource() {
+
+    override fun close() {
+    }
+
+    override fun readAt(position: Long, buffer: ByteArray, offset: Int, size: Int): Int {
+        var newSize = size
+        synchronized(StreamMediaDataSource::class) {
+            val length = bytes.size
+            if (position >= length) {
+                return -1
+            }
+            if (position + newSize > length) {
+                newSize -= (position + newSize).toInt() - length
+            }
+            System.arraycopy(bytes, position.toInt(), buffer, offset, newSize)
+            return newSize
+        }
+
+    }
+
+    override fun getSize(): Long {
+        synchronized(StreamMediaDataSource::class) {
+            return bytes.size.toLong()
+        }
+    }
+}

+ 3 - 2
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/mix/MixAnimPlugin.kt

@@ -52,8 +52,9 @@ class MixAnimPlugin(val player: AnimPlayer): IAnimPlugin {
     override fun onConfigCreate(config: AnimConfig): Int {
         if (!config.isMix) return Constant.OK
         if (resourceRequest == null) {
-            ALog.i(TAG, "IFetchResource is empty")
-            return Constant.REPORT_ERROR_TYPE_CONFIG_PLUGIN_MIX
+            ALog.e(TAG, "IFetchResource is empty")
+            // 没有设置IFetchResource 当成普通视频播放
+            return Constant.OK
         }
         // step 1 parse src
         parseSrc(config)

+ 4 - 4
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/mix/MixRender.kt

@@ -15,14 +15,11 @@
  */
 package com.tencent.qgame.animplayer.mix
 
-import android.graphics.Bitmap
 import android.opengl.GLES11Ext
 import android.opengl.GLES20
-import android.opengl.GLUtils
 import com.tencent.qgame.animplayer.AnimConfig
 import com.tencent.qgame.animplayer.Constant
 import com.tencent.qgame.animplayer.PointRect
-import com.tencent.qgame.animplayer.RenderConstant
 import com.tencent.qgame.animplayer.util.*
 
 /**
@@ -63,7 +60,7 @@ class MixRender(private val mixAnimPlugin: MixAnimPlugin) {
         vertexArray.setVertexAttribPointer(shader.aPositionLocation)
 
         // src 纹理坐标
-        srcArray.setArray(genSrcCoordsArray(srcArray.array, frame.frame.w, frame.frame.h, src.w, src.h, src.fitType))
+        srcArray.setArray(genSrcCoordsArray(srcArray.array, frame.frame.w, frame.frame.h, src.drawWidth, src.drawHeight, src.fitType))
         srcArray.setVertexAttribPointer(shader.aTextureSrcCoordinatesLocation)
         // 绑定 src纹理
         GLES20.glActiveTexture(GLES20.GL_TEXTURE0)
@@ -107,6 +104,9 @@ class MixRender(private val mixAnimPlugin: MixAnimPlugin) {
         }
     }
 
+    /**
+     * CENTER_FULL 并不是严格的centerCrop(centerCrop已经前置处理),此处主要是为防抖动做处理,复杂遮罩情况下需要固定src大小进行绘制防止抖动
+     */
     private fun genSrcCoordsArray(array: FloatArray, fw: Int, fh: Int, sw: Int, sh: Int, fitType: Src.FitType): FloatArray {
         return if (fitType == Src.FitType.CENTER_FULL) {
             if (fw <= sw && fh <= sh) {

+ 6 - 5
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/mix/MixTouch.kt

@@ -24,8 +24,7 @@ import com.tencent.qgame.animplayer.PointRect
 class MixTouch(private val mixAnimPlugin: MixAnimPlugin) {
 
     fun onTouchEvent(ev: MotionEvent): Resource? {
-        val viewWith = mixAnimPlugin.player.animView.width
-        val viewHeight = mixAnimPlugin.player.animView.height
+        val (viewWith, viewHeight) = mixAnimPlugin.player.animView.getRealSize()
         val videoWith = mixAnimPlugin.player.configManager.config?.width ?: return null
         val videoHeight = mixAnimPlugin.player.configManager.config?.height ?: return null
 
@@ -33,13 +32,15 @@ class MixTouch(private val mixAnimPlugin: MixAnimPlugin) {
 
         when(ev.action) {
             MotionEvent.ACTION_UP -> {
-                val x = ev.rawX * videoWith / viewWith
-                val y = ev.rawY * videoHeight / viewHeight
+                val x = ev.x * videoWith / viewWith.toFloat()
+                val y = ev.y * videoHeight / viewHeight.toFloat()
                 val list = mixAnimPlugin.frameAll?.map?.get(mixAnimPlugin.curFrameIndex)?.list
                 list?.forEach {frame ->
                     val src = mixAnimPlugin.srcMap?.map?.get(frame.srcId) ?: return@forEach
                     if (calClick(x.toInt(), y.toInt(), frame.frame)) {
-                        return Resource(src)
+                        return Resource(src).apply {
+                            curPoint = frame.frame
+                        }
                     }
                 }
             }

+ 4 - 2
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/mix/Resource.kt

@@ -16,18 +16,20 @@
 package com.tencent.qgame.animplayer.mix
 
 import android.graphics.Bitmap
+import com.tencent.qgame.animplayer.PointRect
 
 /**
  * 资源描述
  */
-class Resource {
+class Resource(src: Src) {
     var id = ""
     var type = Src.SrcType.UNKNOWN
     var loadType = Src.LoadType.UNKNOWN
     var tag = ""
     var bitmap: Bitmap? = null
+    var curPoint: PointRect? = null  // src在当前帧的位置信息
 
-    constructor(src: Src) {
+    init {
         id = src.srcId
         type = src.srcType
         loadType = src.loadType

+ 30 - 1
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/mix/Src.kt

@@ -51,15 +51,21 @@ class Src {
     var srcId = ""
     var w = 0
     var h = 0
+    var drawWidth = 0
+    var drawHeight = 0
     var srcType = SrcType.UNKNOWN
     var loadType = LoadType.UNKNOWN
     var srcTag = ""
-    var bitmap: Bitmap? = null
     var txt = ""
     var style = Style.DEFAULT
     var color: Int = 0
     var fitType = FitType.FIT_XY
     var srcTextureId = 0
+    var bitmap: Bitmap? = null
+        set(value) {
+            field = value
+            genDrawSize(value)
+        }
 
     constructor(json: JSONObject) {
         srcId = json.getString("srcId")
@@ -98,6 +104,29 @@ class Src {
     }
 
 
+    private fun genDrawSize(bitmap: Bitmap?) {
+        val bw = bitmap?.width?: w
+        val bh = bitmap?.height?: h
+        drawWidth = bw
+        drawHeight = bh
+        if (fitType == FitType.CENTER_FULL) {
+            if (w == 0 || h == 0) {
+                return
+            }
+            // 按src w h进行centerCrop处理
+            val srcRate = w.toFloat() / h.toFloat()
+            val bitmapRate = bw.toFloat() / bh.toFloat()
+
+            if (bitmapRate >= srcRate) {
+                drawHeight = h
+                drawWidth = (h * bitmapRate).toInt()
+            } else {
+                drawWidth = w
+                drawHeight = (w / bitmapRate).toInt()
+            }
+        }
+    }
+
 
     override fun toString(): String {
         return "Src(srcId='$srcId', srcType=$srcType, loadType=$loadType, srcTag='$srcTag', bitmap=$bitmap, txt='$txt')"

+ 11 - 3
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/plugin/AnimPluginManager.kt

@@ -40,7 +40,7 @@ class AnimPluginManager(val player: AnimPlayer) {
     // 当前渲染的帧
     private var frameIndex = 0
     // 当前解码的帧
-    private var decodeIndex = 1
+    private var decodeIndex = 0
     // 帧不相同的次数, 连续多次不同则直接使用decodeIndex
     private var frameDiffTimes = 0
 
@@ -66,6 +66,7 @@ class AnimPluginManager(val player: AnimPlayer) {
     fun onRenderCreate() {
         ALog.i(TAG, "onRenderCreate")
         frameIndex = 0
+        decodeIndex = 0
         plugins.forEach {
             it.onRenderCreate()
         }
@@ -79,8 +80,14 @@ class AnimPluginManager(val player: AnimPlayer) {
         }
     }
 
+    // 开始循环调用
+    fun onLoopStart() {
+        ALog.i(TAG, "onLoopStart")
+        frameIndex = 0
+        decodeIndex = 0
+    }
+
     fun onRendering() {
-        frameIndex++
         if (decodeIndex > frameIndex + 1 || frameDiffTimes >= DIFF_TIMES) {
             ALog.i(TAG, "jump frameIndex= $frameIndex,decodeIndex=$decodeIndex,frameDiffTimes=$frameDiffTimes")
             frameIndex = decodeIndex
@@ -92,8 +99,9 @@ class AnimPluginManager(val player: AnimPlayer) {
         }
         ALog.d(TAG, "onRendering frameIndex=$frameIndex")
         plugins.forEach {
-            it.onRendering(frameIndex - 1) // 第一帧算0
+            it.onRendering(frameIndex) // 第一帧 0
         }
+        frameIndex++
     }
 
     fun onRelease() {

+ 21 - 0
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/textureview/InnerTextureView.kt

@@ -0,0 +1,21 @@
+package com.tencent.qgame.animplayer.textureview
+
+import android.content.Context
+import android.util.AttributeSet
+import android.view.MotionEvent
+import android.view.TextureView
+import com.tencent.qgame.animplayer.AnimPlayer
+
+class InnerTextureView @JvmOverloads constructor(
+    context: Context, attrs: AttributeSet? = null, defStyleAttr: Int = 0
+) : TextureView(context, attrs, defStyleAttr) {
+
+    var player: AnimPlayer? = null
+
+    override fun dispatchTouchEvent(ev: MotionEvent?): Boolean {
+        val res = player?.isRunning() == true
+                && ev != null
+                && player?.pluginManager?.onDispatchTouchEvent(ev) == true
+        return if (!res) super.dispatchTouchEvent(ev) else true
+    }
+}

+ 53 - 4
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/util/MediaUtil.kt

@@ -15,28 +15,44 @@
  */
 package com.tencent.qgame.animplayer.util
 
+import android.media.MediaCodecList
 import android.media.MediaExtractor
 import android.media.MediaFormat
 import com.tencent.qgame.animplayer.Constant
-import com.tencent.qgame.animplayer.FileContainer
+import com.tencent.qgame.animplayer.file.IFileContainer
+import kotlin.collections.HashMap
+
 
 object MediaUtil {
 
     private const val TAG = "${Constant.TAG}.MediaUtil"
 
-    fun getExtractor(file: FileContainer): MediaExtractor {
+    private var isTypeMapInit = false
+    private val supportTypeMap = HashMap<String, Boolean>()
+
+    const val MIME_HEVC = "video/hevc"
+
+    fun getExtractor(file: IFileContainer): MediaExtractor {
         val extractor = MediaExtractor()
         file.setDataSource(extractor)
         return extractor
     }
 
+    /**
+     * 是否为h265的视频
+     */
+    fun checkIsHevc(videoFormat: MediaFormat):Boolean {
+        val mime = videoFormat.getString(MediaFormat.KEY_MIME) ?: ""
+        return mime.contains("hevc")
+    }
+
     fun selectVideoTrack(extractor: MediaExtractor): Int {
         val numTracks = extractor.trackCount
         for (i in 0 until numTracks) {
             val format = extractor.getTrackFormat(i)
             val mime = format.getString(MediaFormat.KEY_MIME) ?: ""
             if (mime.startsWith("video/")) {
-                ALog.d(TAG, "Extractor selected track $i ($mime): $format")
+                ALog.i(TAG, "Extractor selected track $i ($mime): $format")
                 return i
             }
         }
@@ -49,10 +65,43 @@ object MediaUtil {
             val format = extractor.getTrackFormat(i)
             val mime = format.getString(MediaFormat.KEY_MIME) ?: ""
             if (mime.startsWith("audio/")) {
-                ALog.d(TAG, "Extractor selected track $i ($mime): $format")
+                ALog.i(TAG, "Extractor selected track $i ($mime): $format")
                 return i
             }
         }
         return -1
     }
+
+    /**
+     * 检查设备解码支持类型
+     */
+    @Synchronized
+    fun checkSupportCodec(mimeType: String): Boolean {
+        if (!isTypeMapInit) {
+            isTypeMapInit = true
+            getSupportType()
+        }
+        return supportTypeMap.containsKey(mimeType.toLowerCase())
+    }
+
+
+    private fun getSupportType() {
+        try {
+            val numCodecs = MediaCodecList.getCodecCount()
+            for (i in 0 until numCodecs) {
+                val codecInfo = MediaCodecList.getCodecInfoAt(i)
+                if (codecInfo.isEncoder) {
+                    continue
+                }
+                val types = codecInfo.supportedTypes
+                for (j in types.indices) {
+                    supportTypeMap[types[j].toLowerCase()] = true
+                }
+            }
+            ALog.i(TAG, "supportType=${supportTypeMap.keys}")
+        } catch (t: Throwable) {
+            ALog.e(TAG, "getSupportType $t")
+        }
+    }
+
 }

+ 252 - 0
Android/PlayerProj/animplayer/src/main/java/com/tencent/qgame/animplayer/util/ScaleTypeUtil.kt

@@ -0,0 +1,252 @@
+/*
+ * Tencent is pleased to support the open source community by making vap available.
+ *
+ * Copyright (C) 2020 THL A29 Limited, a Tencent company.  All rights reserved.
+ *
+ * Licensed under the MIT License (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ *
+ * http://opensource.org/licenses/MIT
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is
+ * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.tencent.qgame.animplayer.util
+
+import android.view.Gravity
+import android.view.View
+import android.view.ViewGroup
+import android.widget.FrameLayout
+import com.tencent.qgame.animplayer.Constant
+
+
+enum class ScaleType {
+    FIT_XY, // 完整填充整个布局 default
+    FIT_CENTER, // 按视频比例在布局中间完整显示
+    CENTER_CROP, // 按视频比例完整填充布局(多余部分不显示)
+}
+
+interface IScaleType {
+
+    fun getLayoutParam(
+        layoutWidth: Int,
+        layoutHeight: Int,
+        videoWidth: Int,
+        videoHeight: Int,
+        layoutParams: FrameLayout.LayoutParams
+    ): FrameLayout.LayoutParams
+
+    fun getRealSize(): Pair<Int, Int>
+}
+
+class ScaleTypeFitXY : IScaleType {
+
+    private var realWidth = 0
+    private var realHeight = 0
+
+    override fun getLayoutParam(
+        layoutWidth: Int,
+        layoutHeight: Int,
+        videoWidth: Int,
+        videoHeight: Int,
+        layoutParams: FrameLayout.LayoutParams
+    ): FrameLayout.LayoutParams {
+        layoutParams.width = ViewGroup.LayoutParams.MATCH_PARENT
+        layoutParams.height = ViewGroup.LayoutParams.MATCH_PARENT
+        realWidth = layoutWidth
+        realHeight = layoutHeight
+        return layoutParams
+    }
+
+    override fun getRealSize(): Pair<Int, Int> {
+        return Pair(realWidth, realHeight)
+    }
+}
+
+class ScaleTypeFitCenter : IScaleType {
+
+    private var realWidth = 0
+    private var realHeight = 0
+
+    override fun getLayoutParam(
+        layoutWidth: Int,
+        layoutHeight: Int,
+        videoWidth: Int,
+        videoHeight: Int,
+        layoutParams: FrameLayout.LayoutParams
+    ): FrameLayout.LayoutParams {
+        val (w, h) = getFitCenterSize(layoutWidth, layoutHeight, videoWidth, videoHeight)
+        if (w <= 0 && h <= 0) return layoutParams
+        realWidth = w
+        realHeight = h
+        layoutParams.width = w
+        layoutParams.height = h
+        layoutParams.gravity = Gravity.CENTER
+        return layoutParams
+    }
+
+    override fun getRealSize(): Pair<Int, Int> {
+        return Pair(realWidth, realHeight)
+    }
+
+    private fun getFitCenterSize(
+        layoutWidth: Int,
+        layoutHeight: Int,
+        videoWidth: Int,
+        videoHeight: Int
+    ): Pair<Int, Int> {
+
+        val layoutRatio = layoutWidth.toFloat() / layoutHeight
+        val videoRatio = videoWidth.toFloat() / videoHeight
+
+        val realWidth: Int
+        val realHeight: Int
+        if (layoutRatio > videoRatio) {
+            realHeight = layoutHeight
+            realWidth = (videoRatio * realHeight).toInt()
+        } else {
+            realWidth = layoutWidth
+            realHeight = (realWidth / videoRatio).toInt()
+        }
+
+        return Pair(realWidth, realHeight)
+    }
+}
+
+class ScaleTypeCenterCrop : IScaleType {
+
+    private var realWidth = 0
+    private var realHeight = 0
+
+    override fun getLayoutParam(
+        layoutWidth: Int,
+        layoutHeight: Int,
+        videoWidth: Int,
+        videoHeight: Int,
+        layoutParams: FrameLayout.LayoutParams
+    ): FrameLayout.LayoutParams {
+        val (w, h) = getCenterCropSize(layoutWidth, layoutHeight, videoWidth, videoHeight)
+        if (w <= 0 && h <= 0) return layoutParams
+        realWidth = w
+        realHeight = h
+        layoutParams.width = w
+        layoutParams.height = h
+        layoutParams.gravity = Gravity.CENTER
+        return layoutParams
+    }
+
+    override fun getRealSize(): Pair<Int, Int> {
+        return Pair(realWidth, realHeight)
+    }
+
+    private fun getCenterCropSize(
+        layoutWidth: Int,
+        layoutHeight: Int,
+        videoWidth: Int,
+        videoHeight: Int
+    ): Pair<Int, Int> {
+
+        val layoutRatio = layoutWidth.toFloat() / layoutHeight
+        val videoRatio = videoWidth.toFloat() / videoHeight
+
+        val realWidth: Int
+        val realHeight: Int
+        if (layoutRatio > videoRatio) {
+            realWidth = layoutWidth
+            realHeight = (realWidth / videoRatio).toInt()
+        } else {
+            realHeight = layoutHeight
+            realWidth = (videoRatio * realHeight).toInt()
+        }
+
+        return Pair(realWidth, realHeight)
+    }
+}
+
+
+class ScaleTypeUtil {
+
+    companion object {
+        private const val TAG = "${Constant.TAG}.ScaleTypeUtil"
+    }
+
+    private val scaleTypeFitXY by lazy { ScaleTypeFitXY() }
+    private val scaleTypeFitCenter by lazy { ScaleTypeFitCenter() }
+    private val scaleTypeCenterCrop by lazy { ScaleTypeCenterCrop() }
+    private var layoutWidth = 0
+    private var layoutHeight = 0
+    private var videoWidth = 0
+    private var videoHeight = 0
+
+    var currentScaleType = ScaleType.FIT_XY
+    var scaleTypeImpl: IScaleType? = null
+
+    fun setLayoutSize(w: Int, h: Int) {
+        layoutWidth = w
+        layoutHeight = h
+    }
+
+    fun setVideoSize(w: Int, h: Int) {
+        videoWidth = w
+        videoHeight = h
+    }
+
+    /**
+     * 获取实际视频容器宽高
+     * @return w h
+     */
+    fun getRealSize(): Pair<Int, Int> {
+        val size = getCurrentScaleType().getRealSize()
+        ALog.i(TAG, "get real size (${size.first}, ${size.second})")
+        return size
+    }
+
+    fun getLayoutParam(view: View?): FrameLayout.LayoutParams {
+        val layoutParams = (view?.layoutParams as? FrameLayout.LayoutParams)
+            ?: FrameLayout.LayoutParams(
+                ViewGroup.LayoutParams.MATCH_PARENT,
+                ViewGroup.LayoutParams.MATCH_PARENT
+            )
+        if (!checkParams()) {
+            ALog.e(
+                TAG,
+                "params error: layoutWidth=$layoutWidth, layoutHeight=$layoutHeight, videoWidth=$videoWidth, videoHeight=$videoHeight"
+            )
+            return layoutParams
+        }
+
+        return getCurrentScaleType().getLayoutParam(
+            layoutWidth,
+            layoutHeight,
+            videoWidth,
+            videoHeight,
+            layoutParams
+        )
+    }
+
+    private fun getCurrentScaleType(): IScaleType {
+        val tmpScaleType = scaleTypeImpl
+        return if (tmpScaleType != null) {
+            ALog.i(TAG, "custom scaleType")
+            tmpScaleType
+        } else {
+            ALog.i(TAG, "scaleType=$currentScaleType")
+            when (currentScaleType) {
+                ScaleType.FIT_XY -> scaleTypeFitXY
+                ScaleType.FIT_CENTER -> scaleTypeFitCenter
+                ScaleType.CENTER_CROP -> scaleTypeCenterCrop
+            }
+        }
+    }
+
+
+    private fun checkParams(): Boolean {
+        return layoutWidth > 0
+                && layoutHeight > 0
+                && videoWidth > 0
+                && videoHeight > 0
+    }
+
+}

+ 197 - 92
Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/AnimTool.java

@@ -15,15 +15,16 @@
  */
 package com.tencent.qgame.playerproj.animtool;
 
+import com.tencent.qgame.playerproj.animtool.vapx.FrameSet;
+import com.tencent.qgame.playerproj.animtool.vapx.GetMaskFrame;
+import com.tencent.qgame.playerproj.animtool.vapx.SrcSet;
+
 import javax.imageio.ImageIO;
 import java.awt.image.BufferedImage;
-import java.io.BufferedReader;
 import java.io.BufferedWriter;
 import java.io.File;
 import java.io.FileWriter;
 import java.io.IOException;
-import java.io.InputStreamReader;
-import java.util.concurrent.TimeUnit;
 
 public class AnimTool {
 
@@ -32,7 +33,8 @@ public class AnimTool {
     public static final String OUTPUT_DIR = "output"+ File.separator;
     public static final String FRAME_IMAGE_DIR = "frames"+ File.separator;
     public static final String VIDEO_FILE = "video.mp4";
-    public static final String TEM_VIDEO_FILE = "tmp_video.mp4";
+    public static final String TEMP_VIDEO_FILE = "tmp_video.mp4";
+    public static final String TEMP_VIDEO_AUDIO_FILE = "tmp_video_audio.mp4";
     public static final String VAPC_BIN_FILE = "vapc.bin";
     public static final String VAPC_JSON_FILE = "vapc.json";
 
@@ -42,6 +44,7 @@ public class AnimTool {
     private volatile int finishThreadCount = 0;
     private long time;
     private GetAlphaFrame getAlphaFrame = new GetAlphaFrame();
+    private GetMaskFrame getMaskFrame = new GetMaskFrame();
     private IToolListener toolListener;
 
     public void setToolListener(IToolListener toolListener) {
@@ -54,13 +57,14 @@ public class AnimTool {
      */
     public void create(final CommonArg commonArg, final boolean needVideo) throws Exception{
         TLog.i(TAG, "start create");
-        createAllFrameImage(commonArg, new Runnable() {
+        createAllFrameImage(commonArg, new IRunResult() {
             @Override
-            public void run() {
-                if (needVideo) {
+            public boolean run() {
+                if (finalCheck(commonArg) && needVideo) {
                     // 最终生成视频文件
-                    createVideo(commonArg);
+                    return createVideo(commonArg);
                 }
+                return false;
             }
         });
     }
@@ -71,10 +75,26 @@ public class AnimTool {
      * @return
      */
     private boolean checkCommonArg(CommonArg commonArg) throws Exception {
-        return CommonArgTool.autoFillAndCheck(commonArg);
+        return CommonArgTool.autoFillAndCheck(commonArg, toolListener);
+    }
+
+    private boolean finalCheck(CommonArg commonArg) {
+        if (commonArg.isVapx) {
+            if (commonArg.srcSet.srcs.isEmpty()) {
+                TLog.i(TAG, "vapx error: src is empty");
+                return false;
+            }
+            for (SrcSet.Src src : commonArg.srcSet.srcs) {
+                if (src.w <=0 || src.h <= 0) {
+                    TLog.i(TAG, "vapx error: src.id=" + src.srcId + ",src.w=" + src.w + ",src.h=" + src.h);
+                    return false;
+                }
+            }
+        }
+        return true;
     }
 
-    private void createAllFrameImage(final CommonArg commonArg, final Runnable finishRunnable) throws Exception{
+    private void createAllFrameImage(final CommonArg commonArg, final IRunResult finishRunnable) throws Exception{
         if (!checkCommonArg(commonArg)) {
             if (toolListener != null) toolListener.onError();
             return;
@@ -101,12 +121,20 @@ public class AnimTool {
         threadIndexSet[threadNum-1][0] = (threadNum-1) * block;
         threadIndexSet[threadNum-1][1] = totalFrame;
 
+        if (toolListener != null) {
+            toolListener.onProgress(0f);
+        }
         for (int i=0; i<threadNum; i++) {
             final int k = i;
             new Thread(new Runnable() {
                 @Override
                 public void run() {
                     for(int i = threadIndexSet[k][0]; i<threadIndexSet[k][1]; i++) {
+                        try {
+                            createFrame(commonArg, i);
+                        } catch (Exception e) {
+                            TLog.e(TAG, "createFrame error:" + e.getMessage());
+                        }
                         synchronized (AnimTool.class) {
                             totalP++;
                             float progress = totalP * 1.0f / commonArg.totalFrame;
@@ -116,22 +144,22 @@ public class AnimTool {
                                 TLog.i(TAG, "progress " + progress);
                             }
                         }
-                        try {
-                            createFrame(commonArg, i);
-                        } catch (Exception e) {
-                            e.printStackTrace();
-                        }
                     }
                     synchronized (AnimTool.class) {
                         finishThreadCount++;
                         if (finishThreadCount == threadNum) {
+                            boolean result = false;
                             if (finishRunnable != null) {
-                                finishRunnable.run();
+                                result = finishRunnable.run();
                             }
                             long cost = System.currentTimeMillis() - time;
                             TLog.i(TAG,"Finish cost=" + cost);
                             if (toolListener != null) {
-                                toolListener.onComplete();
+                                if (result) {
+                                    toolListener.onComplete();
+                                } else {
+                                    toolListener.onError();
+                                }
                             }
                         }
                     }
@@ -141,18 +169,21 @@ public class AnimTool {
     }
 
     private void createFrame(CommonArg commonArg, int frameIndex) throws Exception {
-        int w = commonArg.videoW;
-        int h = commonArg.videoH;
         File inputFile = new File(commonArg.inputPath + String.format("%03d", frameIndex)+".png");
-        GetAlphaFrame.AlphaFrameOut videoFrame = getAlphaFrame.createFrame(commonArg.orin, w, h,
-                commonArg.gap, commonArg.wFill, commonArg.hFill, inputFile);
+        GetAlphaFrame.AlphaFrameOut videoFrame = getAlphaFrame.createFrame(commonArg, inputFile);
+        if (commonArg.isVapx) {
+            FrameSet.FrameObj frameObj = getMaskFrame.getFrameObj(frameIndex, commonArg, videoFrame.argb);
+            if (frameObj != null) {
+                commonArg.frameSet.frameObjs.add(frameObj);
+            }
+        }
         if (videoFrame == null) {
             TLog.i(TAG, "frameIndex="+frameIndex +" is empty");
             return;
         }
         // 最后保存图片
-        BufferedImage outBuf = new BufferedImage(videoFrame.outW, videoFrame.outH, BufferedImage.TYPE_INT_ARGB);
-        outBuf.setRGB(0,0, videoFrame.outW, videoFrame.outH, videoFrame.argb, 0, videoFrame.outW);
+        BufferedImage outBuf = new BufferedImage(commonArg.outputW, commonArg.outputH, BufferedImage.TYPE_INT_ARGB);
+        outBuf.setRGB(0,0, commonArg.outputW, commonArg.outputH, videoFrame.argb, 0, commonArg.outputW);
 
         File outputFile = new File(commonArg.frameOutputPath + String.format("%03d", frameIndex) +".png");
         ImageIO.write(outBuf, "PNG", outputFile);
@@ -170,7 +201,7 @@ public class AnimTool {
      * 创建最终的视频
      * @param commonArg
      */
-    private void createVideo(CommonArg commonArg){
+    private boolean createVideo(CommonArg commonArg) {
         try {
             // 创建配置json文件
             createVapcJson(commonArg);
@@ -178,26 +209,52 @@ public class AnimTool {
             boolean result = createMp4(commonArg, commonArg.outputPath, commonArg.frameOutputPath);
             if (!result) {
                 TLog.i(TAG, "createMp4 fail");
-                return;
+                deleteFile(commonArg);
+                return false;
             }
+            String tempVideoName = TEMP_VIDEO_FILE;
+            if (commonArg.needAudio) {
+                result = mergeAudio2Mp4(commonArg, tempVideoName);
+                if (!result) {
+                    TLog.i(TAG, "mergeAudio2Mp4 fail");
+                    deleteFile(commonArg);
+                    return false;
+                }
+                tempVideoName = TEMP_VIDEO_AUDIO_FILE;
+            }
+
             String input = commonArg.outputPath + VAPC_JSON_FILE;
             // 由json变为bin文件
             String vapcBinPath = mp4BoxTool(input, commonArg.outputPath);
             // 将bin文件合并到mp4里
-            result = mergeBin2Mp4(commonArg, vapcBinPath, commonArg.outputPath);
+            result = mergeBin2Mp4(commonArg, vapcBinPath, tempVideoName, commonArg.outputPath);
             if (!result) {
                 TLog.i(TAG, "mergeBin2Mp4 fail");
-                return;
+                deleteFile(commonArg);
+                return false;
             }
-            // 删除临时视频文件
-            new File(commonArg.outputPath + TEM_VIDEO_FILE).delete();
-            new File(commonArg.outputPath + VAPC_BIN_FILE).delete();
+            deleteFile(commonArg);
             // 计算文件md5
             String md5 = new Md5Util().getFileMD5(new File(commonArg.outputPath + VIDEO_FILE), commonArg.outputPath);
             TLog.i(TAG, "md5="+md5);
         } catch (Exception e) {
-            e.printStackTrace();
+            TLog.e(TAG, "createVideo error:" + e.getMessage());
+            return false;
+        }
+        return true;
+    }
+
+    private void deleteFile(CommonArg commonArg) {
+        // 删除临时视频文件
+        File file;
+        file = new File(commonArg.outputPath + TEMP_VIDEO_FILE);
+        if (file.exists()) file.delete();
+        if (commonArg.needAudio) {
+            file = new File(commonArg.outputPath + TEMP_VIDEO_AUDIO_FILE);
+            if (file.exists()) file.delete();
         }
+        file = new File(commonArg.outputPath + VAPC_BIN_FILE);
+        if (file.exists()) file.delete();
     }
 
     /**
@@ -205,47 +262,43 @@ public class AnimTool {
      * @param commonArg
      */
     private void createVapcJson(CommonArg commonArg) {
-        String json = "{\"info\":{\"v\":$(v),\"f\":$(f),\"w\":$(w),\"h\":$(h),\"videoW\":$(videoW),\"videoH\":$(videoH),\"orien\":0,\"fps\":$(fps),\"isVapx\":0,\"aFrame\":$(aFrame),\"rgbFrame\":$(rgbFrame)}}";
-        json = json.replace("$(v)", String.valueOf(commonArg.version));
-        json = json.replace("$(f)", String.valueOf(commonArg.totalFrame));
-        json = json.replace("$(w)", String.valueOf(commonArg.videoW));
-        json = json.replace("$(h)", String.valueOf(commonArg.videoH));
-        json = json.replace("$(fps)", String.valueOf(commonArg.fps));
-        int realW = 0;
-        int realH = 0;
-        int cx, cy;
-        String aFrame = "[0,0,"+commonArg.videoW+","+commonArg.videoH+"]";
-        String rgbFrame = "[0,0,0,0]";
-        if (commonArg.orin == CommonArg.ORIN_H) { // 水平对齐
-            realW = 2 * commonArg.videoW + commonArg.gap;
-            realH = commonArg.videoH;
-            cx = commonArg.videoW + commonArg.gap;
-            cy = 0;
-        } else { // 上下对齐
-            realW = commonArg.videoW;
-            realH = 2 * commonArg.videoH + commonArg.gap;
-            cx = 0;
-            cy = commonArg.videoH + commonArg.gap;
+
+        String json = "\"info\":{" +
+                "\"v\":" + commonArg.version + "," +
+                "\"f\":" + commonArg.totalFrame + "," +
+                "\"w\":" + commonArg.rgbPoint.w + "," +
+                "\"h\":" + commonArg.rgbPoint.h + "," +
+                "\"fps\":" + commonArg.fps + "," +
+                "\"videoW\":" + commonArg.outputW + "," +
+                "\"videoH\":" + commonArg.outputH + "," +
+                "\"aFrame\":" + commonArg.alphaPoint.toString() + "," +
+                "\"rgbFrame\":" + commonArg.rgbPoint.toString() + "," +
+                "\"isVapx\":" + (commonArg.isVapx ? 1 : 0) + "," +
+                "\"orien\":" + 0 +
+                "}";
+        TLog.i(TAG, "{" + json + "}");
+
+        StringBuilder sb = new StringBuilder();
+        sb.append("{");
+        sb.append(json);
+        if (commonArg.isVapx) {
+            sb.append(",");
+            sb.append(commonArg.srcSet.toString());
+            sb.append(",");
+            sb.append(commonArg.frameSet.toString());
         }
-        rgbFrame = "["+cx+","+cy+","+commonArg.videoW+","+commonArg.videoH+"]";
-
-        realW += commonArg.wFill;
-        realH += commonArg.hFill;
-        json = json.replace("$(videoW)", String.valueOf(realW));
-        json = json.replace("$(videoH)", String.valueOf(realH));
-        json = json.replace("$(aFrame)", aFrame);
-        json = json.replace("$(rgbFrame)", rgbFrame);
+        sb.append("}");
+        json = sb.toString();
+
         try {
             BufferedWriter writer = new BufferedWriter(new FileWriter(commonArg.outputPath + VAPC_JSON_FILE));
             writer.write(json);
             writer.flush();
             writer.close();
         } catch (IOException e) {
-            e.printStackTrace();
+            TLog.e(TAG, "createVapcJson error:" + e.getMessage());
             throw new RuntimeException();
         }
-        TLog.i(TAG,json);
-
     }
 
 
@@ -254,50 +307,97 @@ public class AnimTool {
 
     /**
      * 创建mp4
-     * @param commonArg
-     * @throws Exception
      */
     private boolean createMp4(CommonArg commonArg, String videoPath, String frameImagePath) throws Exception {
-        String[] cmd = null;
+
+        TLog.i(TAG, "run createMp4");
+        int result = ProcessUtil.run(getFFmpegCmd(commonArg, videoPath, frameImagePath));
+        TLog.i(TAG, "createMp4 result=" + (result == 0? "success" : "fail"));
+        return result == 0;
+    }
+
+    private String[] getFFmpegCmd(CommonArg commonArg, String videoPath, String frameImagePath) {
+        String[] cmd;
         if (commonArg.enableH265) {
-            cmd = new String[] {commonArg.ffmpegCmd, "-r", String.valueOf(commonArg.fps),
-                    "-i", frameImagePath + "%03d.png",
-                    "-pix_fmt", "yuv420p",
-                    "-vcodec", "libx265",
-                    "-b:v", "2000k",
-                    "-profile:v", "main",
-                    "-level", "4.0",
-                    "-tag:v", "hvc1",
-                    "-bufsize", "2000k",
-                    "-y", videoPath + TEM_VIDEO_FILE};
+            if (commonArg.enableCrf) {
+                cmd = new String[] {commonArg.ffmpegCmd, "-framerate", String.valueOf(commonArg.fps),
+                        "-i", frameImagePath + "%03d.png",
+                        "-pix_fmt", "yuv420p",
+                        "-vcodec", "libx265",
+                        "-crf", Integer.toString(commonArg.crf),
+                        "-profile:v", "main",
+                        "-level", "4.0",
+                        "-tag:v", "hvc1",
+                        "-bufsize", "2000k",
+                        "-y", videoPath + TEMP_VIDEO_FILE};
+            } else {
+                cmd = new String[] {commonArg.ffmpegCmd, "-framerate", String.valueOf(commonArg.fps),
+                        "-i", frameImagePath + "%03d.png",
+                        "-pix_fmt", "yuv420p",
+                        "-vcodec", "libx265",
+                        "-b:v", commonArg.bitrate + "k",
+                        "-profile:v", "main",
+                        "-level", "4.0",
+                        "-tag:v", "hvc1",
+                        "-bufsize", "2000k",
+                        "-y", videoPath + TEMP_VIDEO_FILE};
+            }
+
         } else {
-            cmd = new String[]{commonArg.ffmpegCmd, "-r", String.valueOf(commonArg.fps),
-                    "-i", frameImagePath + "%03d.png",
-                    "-pix_fmt", "yuv420p",
-                    "-vcodec", "libx264",
-                    "-b:v", "3000k",
-                    "-profile:v", "baseline",
-                    "-level", "3.0",
-                    "-bf", "0",
-                    "-y", videoPath + TEM_VIDEO_FILE};
+            if (commonArg.enableCrf) {
+                cmd = new String[]{commonArg.ffmpegCmd, "-framerate", String.valueOf(commonArg.fps),
+                        "-i", frameImagePath + "%03d.png",
+                        "-pix_fmt", "yuv420p",
+                        "-vcodec", "libx264",
+                        "-crf", Integer.toString(commonArg.crf),
+                        "-profile:v", "main",
+                        "-level", "4.0",
+                        "-bf", "0",
+                        "-bufsize", "2000k",
+                        "-y", videoPath + TEMP_VIDEO_FILE};
+            } else {
+                cmd = new String[]{commonArg.ffmpegCmd, "-framerate", String.valueOf(commonArg.fps),
+                        "-i", frameImagePath + "%03d.png",
+                        "-pix_fmt", "yuv420p",
+                        "-vcodec", "libx264",
+                        "-b:v", commonArg.bitrate + "k",
+                        "-profile:v", "main",
+                        "-level", "4.0",
+                        "-bf", "0",
+                        "-bufsize", "2000k",
+                        "-y", videoPath + TEMP_VIDEO_FILE};
+            }
+
         }
 
-        TLog.i(TAG, "run createMp4");
+        return cmd;
+    }
+
+    /**
+     * 合并音频文件
+     */
+    private boolean mergeAudio2Mp4(CommonArg commonArg, String tempVideoFile) throws Exception {
+        String[] cmd = new String[] {commonArg.ffmpegCmd,
+                "-i", commonArg.audioPath,
+                "-i", commonArg.outputPath + tempVideoFile,
+                "-c:v", "copy",
+                "-c:a", "aac",
+                "-y", commonArg.outputPath + TEMP_VIDEO_AUDIO_FILE};
+        TLog.i(TAG, "run mergeAudio2Mp4");
         int result = ProcessUtil.run(cmd);
-        TLog.i(TAG, "createMp4 result=" + (result == 0? "success" : "fail"));
+        TLog.i(TAG, "mergeAudio2Mp4 result=" + (result == 0? "success" : "fail"));
         return result == 0;
     }
 
+
     /**
      * 合并vapc.bin到mp4里
-     * @param inputFile
-     * @throws Exception
      */
-    private boolean mergeBin2Mp4(CommonArg commonArg, String inputFile, String videoPath) throws Exception{
-        String[] cmd = new String[] {commonArg.mp4editCmd, "--insert", ":"+inputFile+":1", videoPath + TEM_VIDEO_FILE, videoPath + VIDEO_FILE};
+    private boolean mergeBin2Mp4(CommonArg commonArg, String inputFile, String tempVideoFile, String videoPath) throws Exception {
+        String[] cmd = new String[]{commonArg.mp4editCmd, "--insert", ":" + inputFile + ":3", videoPath + tempVideoFile, videoPath + VIDEO_FILE};
         TLog.i(TAG, "run mergeBin2Mp4");
         int result = ProcessUtil.run(cmd);
-        TLog.i(TAG, "mergeBin2Mp4 result=" + (result == 0? "success" : "fail"));
+        TLog.i(TAG, "mergeBin2Mp4 result=" + (result == 0 ? "success" : "fail"));
         return result == 0;
     }
 
@@ -313,8 +413,13 @@ public class AnimTool {
 
     public interface IToolListener {
         void onProgress(float progress);
+        void onWarning(String msg);
         void onError();
         void onComplete();
     }
 
+    private interface IRunResult {
+        boolean run();
+    }
+
 }

+ 38 - 12
Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/CommonArg.java

@@ -15,10 +15,11 @@
  */
 package com.tencent.qgame.playerproj.animtool;
 
-public class CommonArg {
+import com.tencent.qgame.playerproj.animtool.data.PointRect;
+import com.tencent.qgame.playerproj.animtool.vapx.FrameSet;
+import com.tencent.qgame.playerproj.animtool.vapx.SrcSet;
 
-    public static final int ORIN_H = 1; // 左右对齐
-    public static final int ORIN_V = 2; // 上下对齐
+public class CommonArg {
 
     public String ffmpegCmd = "ffmpeg"; // ffmpeg 命令地址
 
@@ -26,14 +27,20 @@ public class CommonArg {
 
     public boolean enableH265 = false; // 是否开启h265
 
-    public int fps = 0;
+    public int fps = 25;
 
     public String inputPath; // 输入帧文件地址
 
+    public float scale = 0.5f; // alpha 区域缩放大小
+
+    public boolean enableCrf = false; // 是否开启可变码率
+
+    public int bitrate = 2000; // 码率
 
+    public int crf = 29; // 0(无损) - 50(最大压缩)
 
     /**
-     * 无需手动配置
+     * 自动填充参数配置
      */
     public String outputPath; // 输出地址
 
@@ -41,19 +48,32 @@ public class CommonArg {
 
     public int version = 2;
 
-    public int orin = ORIN_H;
+    public int gap; // rgb 与 alpha 之间间隔距离
 
-    public int videoW;
+    public int totalFrame;
 
-    public int videoH;
+    public PointRect rgbPoint = new PointRect(); // rgb 区域 原始图像区域
 
-    public int gap; // rgb 与 alpha 之间间隔距离
+    public PointRect alphaPoint = new PointRect();  // alpha 区域
 
-    public int wFill; // 宽度填充
+    public boolean isVLayout = false; // 是否为垂直布局
 
-    public int hFill; // 高度填充
+    public int outputW = 0; // 输出最终视频的宽高
 
-    public int totalFrame;
+    public int outputH = 0;
+
+    public boolean needAudio = false;
+
+    public String audioPath; // 音频地址
+
+    /**
+     * 融合动画相关参数
+     */
+
+    public boolean isVapx = false;
+
+    public SrcSet srcSet = new SrcSet();
+    public FrameSet frameSet = new FrameSet();
 
     @Override
     public String toString() {
@@ -62,7 +82,13 @@ public class CommonArg {
                 ", mp4editCmd='" + mp4editCmd + '\'' +
                 ", enableH265=" + enableH265 +
                 ", fps=" + fps +
+                ", enableCrf=" + enableCrf +
+                ", bitrate=" + bitrate +
+                ", crf=" + crf +
+                ", scale=" + scale +
                 ", inputPath='" + inputPath + '\'' +
+                ", needAudio=" + needAudio + '\'' +
+                ", audioPath='" + audioPath + '\'' +
                 '}';
     }
 }

+ 118 - 28
Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/CommonArgTool.java

@@ -1,5 +1,7 @@
 package com.tencent.qgame.playerproj.animtool;
 
+import com.tencent.qgame.playerproj.animtool.vapx.SrcSet;
+
 import java.awt.image.BufferedImage;
 import java.io.File;
 
@@ -17,59 +19,138 @@ class CommonArgTool {
      * 参数自动填充
      * @param commonArg
      */
-    static boolean autoFillAndCheck(CommonArg commonArg) throws Exception {
+    static boolean autoFillAndCheck(CommonArg commonArg, AnimTool.IToolListener toolListener) throws Exception {
 
         String os = System.getProperty("os.name");
         TLog.i(TAG, os);
 
-        if (commonArg.inputPath == null && commonArg.inputPath == "") {
-            TLog.i(TAG, "error: input path invalid");
+        if (commonArg.inputPath == null && "".equals(commonArg.inputPath)) {
+            TLog.e(TAG, "input path invalid");
             return false;
         }
 
         //  路径检查
         File input = new File(commonArg.inputPath);
         if (!input.exists()) {
-            TLog.i(TAG, "error: input path invalid " + commonArg.inputPath);
+            TLog.e(TAG, "input path invalid " + commonArg.inputPath);
             return false;
         }
 
         if (!File.separator.equals(commonArg.inputPath.substring(commonArg.inputPath.length() - 1))) {
             commonArg.inputPath = commonArg.inputPath + File.separator;
         }
+
+        // 检查音频文件是否存在
+        if (commonArg.needAudio) {
+            File audio = new File(commonArg.audioPath);
+            if (!audio.exists() || commonArg.audioPath == null || commonArg.audioPath.length() < 3) {
+                TLog.e(TAG , "audio file not exists " + commonArg.audioPath);
+                return false;
+            }
+            String type = commonArg.audioPath.substring(commonArg.audioPath.length() - 3).toLowerCase();
+            if (!"mp3".equals(type)) {
+                TLog.e(TAG , "audio file must be mp3 file " + commonArg.audioPath);
+                return false;
+            }
+        }
+
         // output path
         commonArg.outputPath = commonArg.inputPath + AnimTool.OUTPUT_DIR;
 
         // 帧图片生成路径
         commonArg.frameOutputPath = commonArg.outputPath + AnimTool.FRAME_IMAGE_DIR;
 
+        // srcId自动生成 & 融合动画路径检查 & z序
+        if (commonArg.isVapx) {
+            // vapx 强制缩小
+            commonArg.scale = 0.5f;
+            int size = commonArg.srcSet.srcs.size();
+            SrcSet.Src src;
+            for (int i=0; i<size; i++) {
+                src = commonArg.srcSet.srcs.get(i);
+                src.srcId = String.valueOf(i);
+                src.z = i;
+                File srcPath = new File(src.srcPath);
+                if (!srcPath.exists()) {
+                    TLog.e(TAG, "src="+ src.srcId+",path invalid " + src.srcPath);
+                    continue;
+                }
+                if (!File.separator.equals(src.srcPath.substring(src.srcPath.length() - 1))) {
+                    src.srcPath = src.srcPath + File.separator;
+                }
+            }
+        }
+
+        // 限定scale的值
+        if (commonArg.scale < 0.5f) {
+            commonArg.scale = 0.5f;
+        }
+
+        if (commonArg.scale > 1f) {
+            commonArg.scale = 1f;
+        }
+
         // 检查第一帧
         File firstFrame = new File(commonArg.inputPath + "000.png");
         if (!firstFrame.exists()) {
-            TLog.i(TAG, "error: first frame 000.png does not exist");
+            TLog.e(TAG, "first frame 000.png does not exist");
             return false;
         }
         // 获取视频高度
         BufferedImage inputBuf = ImageIO.read(firstFrame);
-        commonArg.videoW = inputBuf.getWidth();
-        commonArg.videoH = inputBuf.getHeight();
-        if (commonArg.videoW <= 0 || commonArg.videoH <= 0) {
-            TLog.i(TAG, "error: video size " + commonArg.videoW + "x" + commonArg.videoH);
+        commonArg.rgbPoint.w = inputBuf.getWidth();
+        commonArg.rgbPoint.h = inputBuf.getHeight();
+        if (commonArg.rgbPoint.w <= 0 || commonArg.rgbPoint.h <= 0) {
+            TLog.e(TAG, "video size " + commonArg.rgbPoint.w + "x" + commonArg.rgbPoint.h);
             return false;
         }
 
-
-        // 计算视频最佳方向
-        commonArg.orin = commonArg.videoW >= commonArg.videoH ? CommonArg.ORIN_V : CommonArg.ORIN_H;
-
         // 设置元素之间宽度
         commonArg.gap = MIN_GAP;
 
-        // 计算出 16倍数的视频
-        int[] size = calSizeFill(commonArg.orin, commonArg.gap, commonArg.videoW, commonArg.videoH, 0, 0);
-        commonArg.wFill = size[0];
-        commonArg.hFill = size[1];
+        // 计算alpha区域大小
+        commonArg.alphaPoint.w = (int) (commonArg.rgbPoint.w * commonArg.scale);
+        commonArg.alphaPoint.h = (int) (commonArg.rgbPoint.h * commonArg.scale);
+
+        // 计算视频最佳方向 (最长边最小原则)
+        int hW = commonArg.rgbPoint.w + commonArg.gap + commonArg.alphaPoint.w;
+        int hH = commonArg.rgbPoint.h;
+        int hMaxLen = Math.max(hW, hH);
+
+        int vW = commonArg.rgbPoint.w;
+        int vH = commonArg.rgbPoint.h + commonArg.gap + commonArg.alphaPoint.h;
+        int vMaxLen = Math.max(vW, vH);
+
+        if (hMaxLen > vMaxLen) { // 竖直布局
+            commonArg.isVLayout = true;
+            commonArg.alphaPoint.x = 0;
+            commonArg.alphaPoint.y = commonArg.rgbPoint.h + commonArg.gap;
+
+            commonArg.outputW = commonArg.rgbPoint.w;
+            commonArg.outputH = commonArg.rgbPoint.h + commonArg.gap + commonArg.alphaPoint.h;
+        } else { // 水平布局
+            commonArg.isVLayout = false;
+            commonArg.alphaPoint.x = commonArg.rgbPoint.w + commonArg.gap;
+            commonArg.alphaPoint.y = 0;
+
+            commonArg.outputW = commonArg.rgbPoint.w + commonArg.gap + commonArg.alphaPoint.w;
+            commonArg.outputH = commonArg.rgbPoint.h;
+        }
 
+        // 计算出 16倍数的视频
+        int[] size = calSizeFill(commonArg.outputW, commonArg.outputH);
+        // 得到最终视频宽高
+        commonArg.outputW += size[0];
+        commonArg.outputH += size[1];
+
+        if (commonArg.outputW > 1504 || commonArg.outputH > 1504) {
+            String msg = "[Warning] Output video width:" + commonArg.outputW + " or height:" + commonArg.outputH
+                    + " is over 1504. Some devices will display exception. For example green screen!";
+            TLog.w(TAG, msg);
+            if (toolListener != null) {
+                toolListener.onWarning(msg);
+            }
+        }
 
         // 获取总帧数
         commonArg.totalFrame = 0;
@@ -84,11 +165,21 @@ class CommonArgTool {
 
 
         if (commonArg.totalFrame <= 0) {
-            TLog.i(TAG, "error: totalFrame=" + commonArg.totalFrame);
+            TLog.e(TAG, "totalFrame=" + commonArg.totalFrame);
             return false;
         }
 
+        // 码率检查
+        if (!commonArg.enableCrf && commonArg.bitrate <= 0) {
+            TLog.e(TAG, "bitrate=" + commonArg.bitrate);
+            return false;
+        }
 
+        // crf检查
+        if (commonArg.enableCrf && (commonArg.crf < 0 || commonArg.crf > 51)) {
+            TLog.e(TAG, "crf=" + commonArg.crf + ", no in [0, 51]");
+            return false;
+        }
 
         return true;
     }
@@ -96,18 +187,17 @@ class CommonArgTool {
     /**
      * 寻找最小wFill & hFill情况下 整个视频宽高能被16整除
      */
-    private static int[] calSizeFill(int orin, int gap, int w, int h, int wFill, int hFill) {
-        int outW = (orin == CommonArg.ORIN_H ? (w * 2 + gap) : w) + wFill;
-        int outH = (orin == CommonArg.ORIN_H ? h : (h * 2 + gap)) + hFill;
-
-        boolean wCheck = outW % 16 == 0;
-        boolean hCheck = outH % 16 == 0;
-        if (wCheck && hCheck) {
-            return new int[]{wFill, hFill};
+    private static int[] calSizeFill(int outW, int outH) {
+        int wFill = 0;
+        if (outW % 16 != 0) {
+            wFill = ((outW / 16) + 1) * 16 - outW;
         }
 
-        // 递归计算
-        return calSizeFill(orin, gap, w, h, wCheck? wFill : wFill + 1, hCheck? hFill : hFill + 1);
+        int hFill = 0;
+        if (outH % 16 != 0) {
+            hFill = ((outH / 16) + 1) * 16 - outH;
+        }
+        return new int[]{wFill, hFill};
     }
 
 

+ 53 - 45
Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/GetAlphaFrame.java

@@ -15,7 +15,12 @@
  */
 package com.tencent.qgame.playerproj.animtool;
 
+import com.tencent.qgame.playerproj.animtool.data.PointRect;
+
 import javax.imageio.ImageIO;
+
+import java.awt.geom.AffineTransform;
+import java.awt.image.AffineTransformOp;
 import java.awt.image.BufferedImage;
 import java.io.File;
 import java.io.IOException;
@@ -23,52 +28,26 @@ import java.util.Arrays;
 
 public class GetAlphaFrame {
 
-    public static final int ORIN_H = 1; // 左右对齐
-    public static final int ORIN_V = 2; // 上下对齐
-
-
     public static class AlphaFrameOut {
 
-
-        public int orin;
         public int[] argb;
-        public int w;
-        public int h;
-        public int outW;
-        public int outH;
-        public int gap;
 
-
-        public AlphaFrameOut(int orin, int[] argb, int w, int h, int outW, int outH, int gap) {
-            this.orin = orin;
+        public AlphaFrameOut(int[] argb) {
             this.argb = argb;
-            this.w = w;
-            this.h = h;
-            this.outW = outW;
-            this.outH = outH;
-            this.gap = gap;
         }
 
     }
 
-    /**
-     *
-     * @param orin
-     * @param w 原图像宽
-     * @param h 原图像高
-     * @param gap rgb 与 alpha 之间间隔距离
-     * @param inputFile
-     * @return
-     * @throws IOException
-     */
-    public AlphaFrameOut createFrame(int orin, int w, int h, int gap, int wFill, int hFill, File inputFile) throws IOException {
+    public AlphaFrameOut createFrame(CommonArg commonArg, File inputFile) throws IOException {
 
         if (!inputFile.exists()) {
             return null;
         }
 
-        int outW = (orin == ORIN_H ? (w * 2 + gap) : w) + wFill;
-        int outH = (orin == ORIN_H ? h : (h * 2 + gap)) + hFill;
+        int w = commonArg.rgbPoint.w;
+        int h = commonArg.rgbPoint.h;
+        int outW = commonArg.outputW;
+        int outH = commonArg.outputH;
 
         BufferedImage inputBuf = ImageIO.read(inputFile);
         int[] inputArgb = inputBuf.getRGB(0, 0, w, h, null, 0, w);
@@ -76,23 +55,52 @@ public class GetAlphaFrame {
         int[] outputArgb = new int[outW * outH];
         Arrays.fill(outputArgb, 0xff000000);
 
-        for (int k=0; k<2; k++) {
-            for (int x = 0; x < w; x++) {
-                for (int y = 0; y < h; y++) {
-                    int outPoint = orin == ORIN_H ? k * (w + gap) + x + y * outW : k * outW * (h + gap) + x + y * outW;
-                    if (k == 0) {
-                        int alpha = inputArgb[x + y * w] >>> 24;
-                        // r = g = b
-                        outputArgb[outPoint] = 0xff000000 + (alpha << 16) + (alpha << 8) + alpha;
-                    } else {
-                        outputArgb[outPoint] = blendBg(inputArgb[x + y * w], 0xff000000);
-                    }
-                }
+        BufferedImage alphaBuf = inputBuf;
+        int[] alphaArgb = inputArgb;
+
+        if (commonArg.scale < 1f) {
+            AffineTransform at = new AffineTransform();
+            at.scale(commonArg.scale, commonArg.scale);
+
+            alphaBuf = new BufferedImage(w, h, BufferedImage.TYPE_INT_ARGB);
+            AffineTransformOp scaleOp = new AffineTransformOp(at, AffineTransformOp.TYPE_BILINEAR);
+            alphaBuf = scaleOp.filter(inputBuf, alphaBuf);
+
+            alphaArgb = alphaBuf.getRGB(0, 0, w, h, null, 0, w);
+        }
+
+        // rgb 区域
+        fillColor(outputArgb, outW, commonArg.rgbPoint, false, inputArgb, w);
+
+        // alpha 区域
+        fillColor(outputArgb, outW, commonArg.alphaPoint, true, alphaArgb, w);
+
+        return new AlphaFrameOut(outputArgb);
+
+    }
+
+
+    private void fillColor(int[] outputArgb, int outputW, PointRect point, boolean isAlpha, int[] inputArgb, int inputW) {
+        int outX = 0;
+        int outY = 0;
+        for (int y = 0; y < point.h ; y++) {
+            outY = point.y + y;
+            for (int x = 0; x < point.w ; x++) {
+                outX = point.x + x;
+                int color = inputArgb[x + y * inputW];
+                outputArgb[outX + outY * outputW] = isAlpha ? getAlpha(color) : getColor(color);
             }
         }
+    }
 
-        return new AlphaFrameOut(orin, outputArgb, w, h, outW, outH, gap);
+    private int getColor(int color) {
+        return blendBg(color, 0xff000000);
+    }
 
+    private int getAlpha(int color) {
+        int alpha = color >>> 24;
+        // r = g = b
+        return 0xff000000 + (alpha << 16) + (alpha << 8) + alpha;
     }
 
     private int blendBg(int color, int colorBg) {

+ 82 - 8
Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/Main.java

@@ -17,6 +17,7 @@ package com.tencent.qgame.playerproj.animtool;
 
 
 import com.tencent.qgame.playerproj.animtool.ui.ToolUI;
+import com.tencent.qgame.playerproj.animtool.vapx.SrcSet;
 
 public class Main {
 
@@ -24,8 +25,12 @@ public class Main {
     public static void main(String[] args) throws Exception {
         // 启动UI界面
         new ToolUI().run();
-        // java工具
+
+        // java工具普通动画
         // animTool();
+
+        // java工具融合动画
+        // animVapxTool();
     }
 
 
@@ -36,13 +41,12 @@ public class Main {
      * 生成图片的工具
      * step 1 填写如下参数,运行后生成中间图片
      * step 2 进入outputPath目录,运行如下ffmpeg命令(需要预先安装ffmpeng)
-     * ffmpeg -r 24 -i "%03d.png" -pix_fmt yuv420p -vcodec libx264 -b:v 3000k -profile:v baseline -level 3.0 -bf 0 -y demo.mp4
      *
+     * h264
+     * ffmpeg -r 24 -i "%03d.png" -pix_fmt yuv420p -vcodec libx264 -b:v 3000k -profile:v main -level 4.0 -bf 0 -bufsize 3000k -y demo.mp4
      *
-     * -vcodec libx264 h264编码
-     * -b:v 3000K 表示码率为3000K,可以改变码率调节文件大小和视频清晰度
-     * -bf 0 没有B帧
-     * -profile:v baseline baseline模式
+     * h265
+     * ffmpeg -r 24 -i "%03d.png" -pix_fmt yuv420p -vcodec libx265 -b:v 2000k -profile:v main -level 4.0 -bf 0 -bufsize 2000k -tag:v hvc1 -y demo.mp4
      *
      * 使用固定码率能使文件更小,但会损失清晰度
      * 使用-crf 参数可以提高清晰度但文件大小不可控(会变大),推荐值 29(0 最好 51 最差)
@@ -58,21 +62,91 @@ public class Main {
         commonArg.mp4editCmd = "mp4edit";
 
         /*
-         * 是否开启h265(默认关闭)
+         * 是否开启h265
          * 优点:压缩率更高,视频更清晰
          * 缺点:Android 4.x系统 & 极少部分低端机 无法播放265视频
          */
-        commonArg.enableH265 = true;
+        commonArg.enableH265 = false;
         // fps
         commonArg.fps = 24;
         // 素材文件路径
         commonArg.inputPath = "/path/to/your/demo";
+        // alpha 区域缩放大小  (0.5 - 1)
+        commonArg.scale = 0.5f;
+
+        // 开始运行
+        AnimTool animTool = new AnimTool();
+        // needVideo true 直接生成video false 生成帧图片,由用户手动生成最终视频文件
+        animTool.create(commonArg, true);
+    }
+
+
+    /**
+     * 融合动画 demo
+     */
+    public static void animVapxTool() throws Exception {
+        final CommonArg commonArg = new CommonArg();
+        // ffmpeg 命令路径
+        commonArg.ffmpegCmd = "ffmpeg";
+        // bento4 mp4edit 命令路径
+        commonArg.mp4editCmd = "mp4edit";
+
+        String path = "/path/to/your/demo";
+
+        commonArg.enableH265 = false;
+        // fps
+        commonArg.fps = 24;
+        // 素材文件路径
+        commonArg.inputPath = path + "video";
+        // 启动融合动画
+        commonArg.isVapx = true;
+        if (commonArg.isVapx) {
+            // 融合动画默认需要缩放0.5f 空出区域
+            commonArg.scale = 0.5f;
+        }
+        // src 设置
+        commonArg.srcSet = getSrcSet(path);
+
 
         // 开始运行
         AnimTool animTool = new AnimTool();
         // needVideo true 直接生成video false 生成帧图片,由用户手动生成最终视频文件
         animTool.create(commonArg, true);
     }
+
+
+    private static SrcSet getSrcSet(String path) {
+        SrcSet srcSet = new SrcSet();
+
+        {
+            SrcSet.Src src = new SrcSet.Src();
+            src.srcPath = path + "mask1";
+            src.srcId = "1";
+            src.srcType = SrcSet.Src.SRC_TYPE_IMG;
+            src.srcTag = "head1";
+            src.fitType = SrcSet.Src.FIT_TYPE_CF;
+            srcSet.srcs.add(src);
+        }
+
+
+        {
+            SrcSet.Src src = new SrcSet.Src();
+            src.srcPath = path + "mask2";
+            src.srcId = "2";
+            src.srcType = SrcSet.Src.SRC_TYPE_TXT;
+            src.srcTag = "text1";
+            src.fitType = SrcSet.Src.FIT_TYPE_FITXY;
+            src.color = "#0000ff";
+            src.style = SrcSet.Src.TEXT_STYLE_BOLD;
+            srcSet.srcs.add(src);
+        }
+
+
+
+
+        return srcSet;
+    }
+
     /**
      * 生成对应的box bin
      * 执行 mp4edit --insert :vapc.bin:1 demo_origin.mp4 demo_output.mp4 插入对应box

+ 2 - 1
Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/Md5Util.java

@@ -24,6 +24,7 @@ import java.io.InputStream;
 import java.security.MessageDigest;
 
 public class Md5Util {
+    public static final String MD5_FILE = "md5.txt";
     private char[] hexDigits = new char[] {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f'};
 
 
@@ -48,7 +49,7 @@ public class Md5Util {
                 md5txt = bufferToHex(digest);
             }
             try {
-                BufferedWriter writer = new BufferedWriter(new FileWriter(outputPath + "/md5.txt"));
+                BufferedWriter writer = new BufferedWriter(new FileWriter(outputPath + MD5_FILE));
                 writer.write(md5txt);
                 writer.flush();
                 writer.close();

+ 19 - 0
Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/TLog.java

@@ -27,7 +27,26 @@ public class TLog {
         }
     }
 
+    public static void e(String tag, String msg) {
+        if (logger != null) {
+            logger.e(tag, msg);
+        } else {
+            System.out.println(tag + "\tError:" + msg);
+        }
+    }
+
+    public static void w(String tag, String msg) {
+        if (logger != null) {
+            logger.w(tag, msg);
+        } else {
+            System.out.println(tag + "\tWarning:" + msg);
+        }
+    }
+
+
     public interface ITLog {
         void i(String tag, String  msg);
+        void e(String tag, String  msg);
+        void w(String tag, String  msg);
     }
 }

+ 24 - 0
Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/data/PointRect.java

@@ -0,0 +1,24 @@
+package com.tencent.qgame.playerproj.animtool.data;
+
+public class PointRect {
+
+    public int x = 0;
+    public int y = 0;
+    public int w = 0;
+    public int h = 0;
+
+    public PointRect() {
+    }
+
+    public PointRect(int x, int y, int w, int h) {
+        this.x = x;
+        this.y = y;
+        this.w = w;
+        this.h = h;
+    }
+
+    @Override
+    public String toString() {
+        return "["+ x +","+ y +","+ w +","+ h +"]";
+    }
+}

+ 283 - 39
Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/ui/ToolUI.java

@@ -11,20 +11,27 @@ import java.awt.GridLayout;
 import java.awt.Toolkit;
 import java.awt.event.ActionEvent;
 import java.awt.event.ActionListener;
+import java.awt.event.ItemEvent;
+import java.awt.event.ItemListener;
 import java.awt.event.MouseAdapter;
 import java.awt.event.MouseEvent;
 import java.io.File;
 import java.io.FileInputStream;
 import java.io.FileOutputStream;
 import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.OutputStreamWriter;
+import java.nio.charset.StandardCharsets;
 import java.util.Properties;
 
 import javax.swing.BoxLayout;
 import javax.swing.ButtonGroup;
 import javax.swing.JButton;
+import javax.swing.JComboBox;
 import javax.swing.JFileChooser;
 import javax.swing.JFrame;
 import javax.swing.JLabel;
+import javax.swing.JOptionPane;
 import javax.swing.JPanel;
 import javax.swing.JRadioButton;
 import javax.swing.JScrollPane;
@@ -37,57 +44,124 @@ import javax.swing.SpinnerNumberModel;
 public class ToolUI {
 
     private static final String TAG = "ToolUI";
+    private static final String TOOL_VERSION = "VAP tool 2.0.6";
     private static final String PROPERTIES_FILE = "setting.properties";
-    private final int WIDTH = 550;
-    private final int HEIGHT = 350;
+    public static final int WIDTH = 900;
+    public static final int HEIGHT = 750;
 
+    private final JFrame frame = new JFrame(TOOL_VERSION);
     private final ButtonGroup group = new ButtonGroup();
-    private final JRadioButton btnH265 = new JRadioButton("h265");
     private final JRadioButton btnH264 = new JRadioButton("h264");
+    private final JRadioButton btnH265 = new JRadioButton("h265");
     private final SpinnerModel modelFps = new SpinnerNumberModel(24, 1, 60, 1);
+    private final Float[] scaleArray = new Float[]{0.5f, 1f};
+    private final JComboBox<Float> boxScale = new JComboBox<>(scaleArray);
     private final JTextField textInputPath = new JTextField();
     private final JButton btnCreate = new JButton("create VAP");
     private final JTextArea txtAreaLog = new JTextArea();
+    private final JTextField textAudioPath = new JTextField();
+    private final JPanel panelAudioPath = new JPanel();
+
+    private final JPanel panelBitrate = new JPanel();
+    private final JTextField textBitrate = new JTextField();
+    private final JPanel panelCrf = new JPanel();
+    private final JTextField textCrf = new JTextField();
+
+    private final ButtonGroup groupQuality = new ButtonGroup();
+    private final JRadioButton btnBitrate = new JRadioButton("bitrate");
+    private final JRadioButton btnCrf = new JRadioButton("crf");
+
     private final JLabel labelOutInfo = new JLabel();
     private final Dimension labelSize = new Dimension(100, 20);
     private final Properties props = new Properties();
+    private final VapxUI vapxUI = new VapxUI(this);
+
+    private boolean needAudio = false;
+
+    private final ItemListener qualityGroupListener = new ItemListener() {
+        @Override
+        public void itemStateChanged(ItemEvent itemEvent) {
+            if (itemEvent.getSource() == btnBitrate) {
+                panelBitrate.setVisible(true);
+                panelCrf.setVisible(false);
+            } else if (itemEvent.getSource() == btnCrf) {
+                panelBitrate.setVisible(false);
+                panelCrf.setVisible(true);
+            }
+        }
+    };
 
-
-
-    public void run() {
+    public ToolUI() {
         TLog.logger = new TLog.ITLog() {
             @Override
             public void i(String tag, String msg) {
                 log(tag, msg);
             }
+
+            @Override
+            public void e(String tag, String msg) {
+                log(tag, "Error:" + msg);
+            }
+
+            @Override
+            public void w(String tag, String msg) {
+                log(tag, "Warning:" + msg);
+            }
         };
+    }
+
+
+    public void run() {
         createUI();
+        loadProperties();
+    }
+
+    private void loadProperties() {
         try {
             File file = new File(PROPERTIES_FILE);
             if (!file.exists()) {
                 file.createNewFile();
             }
-            props.load(new FileInputStream(PROPERTIES_FILE));
+            props.load(new InputStreamReader(new FileInputStream(PROPERTIES_FILE), StandardCharsets.UTF_8));
             CommonArg commonArg = getProperties();
             group.setSelected(commonArg.enableH265 ? btnH265.getModel() : btnH264.getModel(), true);
             modelFps.setValue(commonArg.fps);
             textInputPath.setText(commonArg.inputPath);
+            textAudioPath.setText(commonArg.audioPath);
+            textBitrate.setText(String.valueOf(commonArg.bitrate));
+            textCrf.setText(String.valueOf(commonArg.crf));
+            groupQuality.setSelected(commonArg.enableCrf ? btnCrf.getModel() : btnBitrate.getModel(), true);
+            if (commonArg.enableCrf) {
+                panelBitrate.setVisible(false);
+                panelCrf.setVisible(true);
+            } else {
+                panelBitrate.setVisible(true);
+                panelCrf.setVisible(false);
+            }
+
+            float scale = commonArg.scale;
+            for (int i = 0; i < scaleArray.length ; i++) {
+                if (scaleArray[i] == scale) {
+                    boxScale.setSelectedIndex(i);
+                    break;
+                }
+            }
         } catch (Exception e) {
-            TLog.i(TAG, "ERROR -> " + e.getMessage());
+            TLog.e(TAG, e.getMessage());
         }
     }
 
-
     private void runTool() {
         txtAreaLog.setText("");
+        TLog.i(TAG, TOOL_VERSION);
         new Thread(new Runnable() {
             @Override
             public void run() {
                 try {
                     runAnimTool();
                 } catch (Exception e) {
-                    TLog.i(TAG, "ERROR -> " + e.getMessage());
-                    btnCreate.setEnabled(true);
+                    TLog.e(TAG, e.getMessage());
+                    setOutput(false, "");
                 }
             }
         }).start();
@@ -112,6 +186,27 @@ public class ToolUI {
         commonArg.enableH265 = group.isSelected(btnH265.getModel());
         commonArg.fps = (Integer)modelFps.getValue();
         commonArg.inputPath = textInputPath.getText();
+        commonArg.scale = scaleArray[boxScale.getSelectedIndex()];
+        if (needAudio) {
+            commonArg.needAudio = true;
+            commonArg.audioPath = textAudioPath.getText();
+        }
+
+        if (vapxUI.isVapxEnable()) {
+            commonArg.isVapx = true;
+            commonArg.srcSet = vapxUI.getSrcSet();
+            if (commonArg.srcSet == null) {
+                return;
+            }
+        }
+        try {
+            commonArg.enableCrf = groupQuality.isSelected(btnCrf.getModel());
+            commonArg.bitrate = Integer.parseInt(textBitrate.getText());
+            commonArg.crf = Integer.parseInt(textCrf.getText());
+        } catch (NumberFormatException e) {
+            TLog.e(TAG, "bitrate format error " + textBitrate.getText() + e.getMessage());
+        }
+
         TLog.i(TAG, commonArg.toString());
 
         AnimTool animTool = new AnimTool();
@@ -122,20 +217,24 @@ public class ToolUI {
                 labelOutInfo.setText((Math.min(p, 99)) + "%");
             }
 
+            @Override
+            public void onWarning(String msg) {
+                JOptionPane.showMessageDialog(frame, msg, "Warning", JOptionPane.WARNING_MESSAGE);
+            }
+
             @Override
             public void onError() {
-                btnCreate.setEnabled(true);
+                setOutput(false, "");
             }
 
             @Override
             public void onComplete() {
-                btnCreate.setEnabled(true);
-                setOutput(commonArg.outputPath);
+                setOutput(true, commonArg.outputPath);
                 try {
                     setProperties(commonArg);
                     Desktop.getDesktop().open(new File(commonArg.outputPath));
                 } catch (IOException e) {
-                    TLog.i(TAG, "ERROR -> " + e.getMessage());
+                    TLog.e(TAG, e.getMessage());
                 }
             }
         });
@@ -145,7 +244,6 @@ public class ToolUI {
     }
 
     private void createUI() {
-        JFrame frame = new JFrame("VAP tool");
         frame.setSize(WIDTH, HEIGHT);
         frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
 
@@ -159,6 +257,10 @@ public class ToolUI {
         frame.setVisible(true);
     }
 
+    public String getInputPath() {
+        return textInputPath.getText();
+    }
+
     private void layout(JPanel panel) {
         BoxLayout layout = new BoxLayout(panel, BoxLayout.PAGE_AXIS);
         panel.setLayout(layout);
@@ -166,8 +268,20 @@ public class ToolUI {
         panel.add(getCodecLayout());
         // fps
         panel.add(getFpsLayout());
+        // bitrate/crf switch
+        panel.add(getQualityLayout());
+        // bitrate
+        panel.add(getBitrateLayout());
+        // crf
+        panel.add(getCrfLayout());
+        // scale
+        panel.add(getScaleLayout());
         // path
         panel.add(getPathLayout());
+        // audio path
+        panel.add(getAudioPathLayout());
+        // vapx
+        panel.add(vapxUI.createUI());
         // create
         panel.add(getCreateLayout());
         // log
@@ -187,11 +301,11 @@ public class ToolUI {
 
         JPanel panelRadio = new JPanel();
         panelRadio.setLayout(new GridLayout(1, 2));
-        panelRadio.add(btnH265);
         panelRadio.add(btnH264);
-        group.add(btnH265);
+        panelRadio.add(btnH265);
         group.add(btnH264);
-        group.setSelected(btnH265.getModel(), true);
+        group.add(btnH265);
+        group.setSelected(btnH264.getModel(), true);
         panel.add(panelRadio);
 
         return panel;
@@ -209,11 +323,66 @@ public class ToolUI {
         return panel;
     }
 
+    private JPanel getQualityLayout() {
+        JPanel panel = new JPanel();
+        panel.setLayout(new FlowLayout(FlowLayout.LEFT));
+
+        JLabel label = new JLabel("quality");
+        label.setPreferredSize(labelSize);
+        panel.add(label);
+
+        JPanel panelRadio = new JPanel();
+        panelRadio.setLayout(new GridLayout(1, 2));
+        panelRadio.add(btnBitrate);
+        panelRadio.add(btnCrf);
+        groupQuality.add(btnBitrate);
+        groupQuality.add(btnCrf);
+        groupQuality.setSelected(btnBitrate.getModel(), true);
+        btnBitrate.addItemListener(qualityGroupListener);
+        btnCrf.addItemListener(qualityGroupListener);
+        panel.add(panelRadio);
+
+        return panel;
+    }
+
+    private JPanel getBitrateLayout() {
+        panelBitrate.setLayout(new FlowLayout(FlowLayout.LEFT));
+        JLabel label = new JLabel("bitrate");
+        label.setPreferredSize(labelSize);
+        panelBitrate.add(label);
+        textBitrate.setPreferredSize(new Dimension(60, 20));
+        panelBitrate.add(textBitrate);
+        panelBitrate.add(new JLabel("k (default 2000k)"));
+        return panelBitrate;
+    }
+
+    private JPanel getCrfLayout() {
+        panelCrf.setLayout(new FlowLayout(FlowLayout.LEFT));
+        JLabel label = new JLabel("crf");
+        label.setPreferredSize(labelSize);
+        panelCrf.add(label);
+        textCrf.setPreferredSize(new Dimension(60, 20));
+        panelCrf.add(textCrf);
+        panelCrf.add(new JLabel("[0, 51] (default 29)"));
+        return panelCrf;
+    }
+
+    private JPanel getScaleLayout() {
+        JPanel panel = new JPanel();
+        panel.setLayout(new FlowLayout(FlowLayout.LEFT));
+        JLabel label = new JLabel("alpha scale");
+        label.setPreferredSize(labelSize);
+        panel.add(label);
+        panel.add(boxScale);
+        panel.add(new JLabel("(default 0.5)"));
+        return panel;
+    }
+
     private JPanel getPathLayout() {
         JPanel panel = new JPanel();
 
         panel.setLayout(new FlowLayout(FlowLayout.LEFT));
-        JLabel label = new JLabel("input path");
+        JLabel label = new JLabel("frames path");
         label.setPreferredSize(labelSize);
         panel.add(label);
         JPanel gPanel = new JPanel();
@@ -222,7 +391,7 @@ public class ToolUI {
         BoxLayout layout = new BoxLayout(gPanel, BoxLayout.LINE_AXIS);
         gPanel.setLayout(layout);
 
-        textInputPath.setPreferredSize(new Dimension(300,20));
+        textInputPath.setPreferredSize(new Dimension(400,20));
         gPanel.add(textInputPath);
 
         JButton btnInputPath = new JButton("choose");
@@ -230,7 +399,7 @@ public class ToolUI {
         btnInputPath.addActionListener(new ActionListener() {
             @Override
             public void actionPerformed(ActionEvent actionEvent) {
-                JFileChooser fileChooser = new JFileChooser();
+                JFileChooser fileChooser = new JFileChooser(new File(getInputPath()));
                 fileChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
                 int returnVal = fileChooser.showOpenDialog(fileChooser);
                 if(returnVal == JFileChooser.APPROVE_OPTION) {
@@ -244,18 +413,72 @@ public class ToolUI {
         return panel;
     }
 
-    private void setOutput(final String path) {
-        labelOutInfo.setText("<html><font color='blue'>open output</font></html>");
-        labelOutInfo.addMouseListener(new MouseAdapter() {
+
+    private JPanel getAudioPathLayout() {
+        JPanel panel = new JPanel();
+
+        panel.setLayout(new FlowLayout(FlowLayout.LEFT));
+        JLabel label = new JLabel("audio(mp3)");
+        label.setPreferredSize(labelSize);
+        panel.add(label);
+        panel.add(panelAudioPath);
+        final JLabel labelAudioAction = new JLabel("+");
+        panel.add(labelAudioAction);
+        labelAudioAction.addMouseListener(new MouseAdapter() {
             @Override
             public void mouseClicked(MouseEvent mouseEvent) {
-                try {
-                    Desktop.getDesktop().open(new File(path));
-                } catch (IOException e) {
-                    TLog.i(TAG, "ERROR -> " + e.getMessage());
+                needAudio = !needAudio;
+                panelAudioPath.setVisible(needAudio);
+                labelAudioAction.setText(needAudio ? "x" : "+");
+            }
+        });
+
+        BoxLayout layout = new BoxLayout(panelAudioPath, BoxLayout.LINE_AXIS);
+        panelAudioPath.setLayout(layout);
+
+        textAudioPath.setPreferredSize(new Dimension(400,20));
+        panelAudioPath.add(textAudioPath);
+
+        JButton btnInputPath = new JButton("choose");
+        panelAudioPath.add(btnInputPath);
+        btnInputPath.addActionListener(new ActionListener() {
+            @Override
+            public void actionPerformed(ActionEvent actionEvent) {
+                JFileChooser fileChooser = new JFileChooser(new File(getInputPath()));
+                fileChooser.setFileSelectionMode(JFileChooser.FILES_ONLY);
+                int returnVal = fileChooser.showOpenDialog(fileChooser);
+                if(returnVal == JFileChooser.APPROVE_OPTION) {
+                    // 文件夹路径
+                    String filePath = fileChooser.getSelectedFile().getAbsolutePath();
+                    textAudioPath.setText(filePath);
                 }
             }
         });
+
+        if (!needAudio) {
+            panelAudioPath.setVisible(false);
+        }
+
+        return panel;
+    }
+
+    private void setOutput(boolean success, final String path) {
+        btnCreate.setEnabled(true);
+        if (success) {
+            labelOutInfo.setText("<html><font color='blue'>open output</font></html>");
+            labelOutInfo.addMouseListener(new MouseAdapter() {
+                @Override
+                public void mouseClicked(MouseEvent mouseEvent) {
+                    try {
+                        Desktop.getDesktop().open(new File(path));
+                    } catch (IOException e) {
+                        TLog.e(TAG, e.getMessage());
+                    }
+                }
+            });
+        } else {
+            labelOutInfo.setText("<html><font color='red'>create error!</font></html>");
+        }
     }
 
 
@@ -286,7 +509,8 @@ public class ToolUI {
         JScrollPane areaScrollPane = new JScrollPane(txtAreaLog);
         areaScrollPane.setVerticalScrollBarPolicy(
                 JScrollPane.VERTICAL_SCROLLBAR_ALWAYS);
-        areaScrollPane.setPreferredSize(new Dimension(WIDTH, 200));
+        areaScrollPane.setPreferredSize(new Dimension(WIDTH, 100));
+        areaScrollPane.setMinimumSize(new Dimension(WIDTH, 100));
 
         panel.add(areaScrollPane);
         panel.setPreferredSize(new Dimension(WIDTH, HEIGHT));
@@ -317,26 +541,46 @@ public class ToolUI {
 
     private CommonArg getProperties() {
         CommonArg commonArg = new CommonArg();
-        String enableH265 = props.getProperty("enableH265", Boolean.TRUE.toString());
-        String fps = props.getProperty("fps", "24");
-        String inputPath = props.getProperty("inputPath", "");
-
-        commonArg.enableH265 = Boolean.TRUE.toString().equals(enableH265);
         try {
+            String version = props.getProperty("version", "0");
+            String enableH265 = props.getProperty("enableH265", Boolean.toString(commonArg.enableH265));
+            String fps = props.getProperty("fps", String.valueOf(commonArg.fps));
+            String inputPath = props.getProperty("inputPath", "");
+            String scale = props.getProperty("scale", String.valueOf(scaleArray[0]));
+            String audioPath = props.getProperty("audioPath", "");
+            String bitrate = props.getProperty("bitrate", String.valueOf(commonArg.bitrate));
+            String enableCrf = props.getProperty("enableCrf", String.valueOf(commonArg.enableCrf));
+            String crf = props.getProperty("crf", String.valueOf(commonArg.crf));
+
+            int v = Integer.parseInt(version);
+            // 版本不符直接返回默认值
+            if (v != commonArg.version) return commonArg;
             commonArg.fps = Integer.parseInt(fps);
+            commonArg.scale = Float.parseFloat(scale);
+            commonArg.enableH265 = Boolean.TRUE.toString().equals(enableH265);
+            commonArg.inputPath = inputPath;
+            commonArg.audioPath = audioPath;
+            commonArg.bitrate = Integer.parseInt(bitrate);
+            commonArg.enableCrf = Boolean.TRUE.toString().equals(enableCrf);
+            commonArg.crf = Integer.parseInt(crf);
         } catch (Exception e) {
-            commonArg.fps = 24;
+            TLog.e(TAG, "getProperties error:" + e.getMessage());
         }
-        commonArg.inputPath = inputPath;
         return commonArg;
     }
 
 
     private void setProperties(CommonArg commonArg) throws IOException {
+        props.setProperty("version", String.valueOf(commonArg.version));
         props.setProperty("enableH265", commonArg.enableH265? Boolean.TRUE.toString() : Boolean.FALSE.toString());
-        props.setProperty("fps", commonArg.fps + "");
+        props.setProperty("fps", String.valueOf(commonArg.fps));
         props.setProperty("inputPath", commonArg.inputPath == null ? "" : commonArg.inputPath);
-        props.store(new FileOutputStream(PROPERTIES_FILE), "");
+        props.setProperty("audioPath", commonArg.audioPath == null ? "" : commonArg.audioPath);
+        props.setProperty("scale", String.valueOf(commonArg.scale));
+        props.setProperty("bitrate", String.valueOf(commonArg.bitrate));
+        props.setProperty("crf", String.valueOf(commonArg.crf));
+        props.setProperty("enableCrf", String.valueOf(commonArg.enableCrf));
+        props.store(new OutputStreamWriter(new FileOutputStream(PROPERTIES_FILE), StandardCharsets.UTF_8), "");
     }
 
 

+ 295 - 0
Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/ui/VapxUI.java

@@ -0,0 +1,295 @@
+package com.tencent.qgame.playerproj.animtool.ui;
+
+import com.tencent.qgame.playerproj.animtool.TLog;
+import com.tencent.qgame.playerproj.animtool.vapx.SrcSet;
+
+import java.awt.Dimension;
+import java.awt.FlowLayout;
+import java.awt.event.ActionEvent;
+import java.awt.event.ActionListener;
+import java.awt.event.ItemEvent;
+import java.awt.event.ItemListener;
+import java.awt.event.MouseAdapter;
+import java.awt.event.MouseEvent;
+import java.io.File;
+import java.util.ArrayList;
+import java.util.List;
+
+import javax.swing.BoxLayout;
+import javax.swing.JButton;
+import javax.swing.JCheckBox;
+import javax.swing.JComboBox;
+import javax.swing.JFileChooser;
+import javax.swing.JLabel;
+import javax.swing.JOptionPane;
+import javax.swing.JPanel;
+import javax.swing.JScrollPane;
+import javax.swing.JSeparator;
+import javax.swing.JTextField;
+
+public class VapxUI {
+
+    private static final String TAG = "VapxUI";
+
+    private final Dimension labelSize = new Dimension(100, 20);
+    private final JPanel controlPanel = new JPanel();
+    private final List<MaskUI> maskUiList = new ArrayList<>();
+    private int index = 0;
+    private ToolUI toolUI;
+    private final IMaskUIListener listener = new IMaskUIListener() {
+        @Override
+        public void onDelete(MaskUI maskUI) {
+            controlPanel.remove(maskUI.getPanel());
+            maskUiList.remove(maskUI);
+            controlPanel.revalidate();
+        }
+    };
+
+    public VapxUI(ToolUI toolUI) {
+        this.toolUI = toolUI;
+    }
+
+    public JPanel createUI() {
+        JPanel panel = new JPanel();
+        panel.setLayout(new BoxLayout(panel, BoxLayout.PAGE_AXIS));
+        panel.setPreferredSize(new Dimension(ToolUI.WIDTH, 300));
+        panel.setMinimumSize(new Dimension(ToolUI.WIDTH, 300));
+        controlPanel.setLayout(new BoxLayout(controlPanel, BoxLayout.PAGE_AXIS));
+        controlPanel.add(getAddLayout());
+        JScrollPane areaScrollPane = new JScrollPane(controlPanel);
+        panel.add(areaScrollPane);
+        return panel;
+    }
+
+    public boolean isVapxEnable() {
+        return !maskUiList.isEmpty();
+    }
+
+    public SrcSet getSrcSet() {
+        if (maskUiList.isEmpty()) return null;
+        SrcSet srcSet = new SrcSet();
+
+        SrcSet.Src src;
+        for (MaskUI maskUI : maskUiList) {
+            src = maskUI.getSrc();
+            if (src == null) return null;
+            srcSet.srcs.add(src);
+        }
+
+        return srcSet;
+    }
+
+
+    private JPanel getAddLayout() {
+        JPanel panel = new JPanel();
+        panel.setLayout(new FlowLayout(FlowLayout.LEFT));
+        JLabel label = new JLabel("add source");
+        label.setPreferredSize(labelSize);
+
+        JButton btnAdd = new JButton("add");
+        btnAdd.addActionListener(new ActionListener() {
+            @Override
+            public void actionPerformed(ActionEvent actionEvent) {
+                createMaskUI();
+            }
+        });
+
+        panel.add(label);
+        panel.add(btnAdd);
+        panel.add(new JLabel("(simple video don't need add source)"));
+        return panel;
+    }
+
+    private void createMaskUI() {
+        MaskUI maskUI = new MaskUI(toolUI, ++index, listener);
+        controlPanel.add(maskUI.getPanel());
+        maskUiList.add(maskUI);
+        controlPanel.revalidate();
+    }
+
+    private static class MaskUI {
+        private ToolUI toolUI;
+        public IMaskUIListener listener;
+        public int index;
+        public String maskPath;
+        public JPanel panel = new JPanel();
+
+        private final JLabel labelIndex = new JLabel();
+        private final JTextField textSrcTag = new JTextField();
+        // image -> SrcSet.Src.SRC_TYPE_IMG text -> SrcSet.Src.SRC_TYPE_TXT
+        private final String[] srcTypeArray = new String[]{"image", "text"};
+        private final JComboBox<String> boxSrcType = new JComboBox<>(srcTypeArray);
+
+        // centerCrop -> SrcSet.Src.FIT_TYPE_CF
+        private final String[] fitTypeArray = new String[]{"fitXY", "centerCrop"};
+        private final JComboBox<String> boxFitType = new JComboBox<>(fitTypeArray);
+
+        private final JPanel txtPanel = new JPanel();
+        private final JTextField textTxtColor = new JTextField();
+        private final JCheckBox checkTxtBold = new JCheckBox("text Bold");
+
+        final JLabel labelMaskPathState = new JLabel();
+
+
+        public MaskUI(ToolUI toolUI, int index, IMaskUIListener listener) {
+            this.toolUI = toolUI;
+            this.index = index;
+            this.listener = listener;
+            createUI();
+        }
+
+        public JPanel getPanel() {
+            return panel;
+        }
+
+        public SrcSet.Src getSrc() {
+            SrcSet.Src src = new SrcSet.Src();
+            src.srcId = String.valueOf(index);
+            src.srcTag = textSrcTag.getText().trim();
+
+            src.srcType = SrcSet.Src.SRC_TYPE_IMG;
+            if (boxSrcType.getSelectedIndex() == 1) {
+                src.srcType = SrcSet.Src.SRC_TYPE_TXT;
+            }
+
+            src.fitType = SrcSet.Src.FIT_TYPE_FITXY;
+            if (boxFitType.getSelectedIndex() == 1) {
+                src.fitType = SrcSet.Src.FIT_TYPE_CF;
+            }
+
+            src.srcPath = maskPath;
+
+            if (SrcSet.Src.SRC_TYPE_TXT.equals(src.srcType)) {
+                src.color = textTxtColor.getText().trim();
+                if (checkTxtBold.isSelected()) {
+                    src.style = SrcSet.Src.TEXT_STYLE_BOLD;
+                }
+            }
+
+            if (src.srcTag == null || "".equals(src.srcTag)) {
+                String msg = "id:" + index + " source tag is empty";
+                TLog.e(TAG, msg);
+                JOptionPane.showMessageDialog(panel, msg, "Error", JOptionPane.ERROR_MESSAGE);
+                return null;
+            }
+
+            if (src.srcPath == null || "".equals(src.srcPath)) {
+                String msg = "id:" + index + " mask path is empty";
+                TLog.e(TAG, msg);
+                JOptionPane.showMessageDialog(panel, msg, "Error", JOptionPane.ERROR_MESSAGE);
+                return null;
+            }
+            return src;
+        }
+
+        private void createUI() {
+            panel.setLayout(new BoxLayout(panel, BoxLayout.PAGE_AXIS));
+            setMaskPath();
+            panel.add(new JSeparator());
+            panel.add(part1Layout());
+            panel.add(part2Layout());
+            panel.add(part3Layout());
+
+        }
+
+        private void setMaskPath() {
+            String text = maskPath == null? "<html><font color='red'>empty</font></html>" : maskPath;
+            labelMaskPathState.setText(text);
+        }
+
+        public JPanel part1Layout() {
+            JPanel panel = new JPanel();
+            panel.setLayout(new FlowLayout(FlowLayout.LEFT));
+
+            // index
+            labelIndex.setText("id:" + index);
+            panel.add(labelIndex);
+
+            // srcTag
+            panel.add(new JLabel(" source tag:"));
+            textSrcTag.setPreferredSize(new Dimension(50, 20));
+            textSrcTag.setText("tag" + index);
+            panel.add(textSrcTag);
+
+            // srcType
+            panel.add(new JLabel(" source type:"));
+            boxSrcType.setSelectedIndex(0);
+            panel.add(boxSrcType);
+            boxSrcType.addItemListener(new ItemListener() {
+                @Override
+                public void itemStateChanged(ItemEvent itemEvent) {
+                    txtPanel.setVisible(srcTypeArray[1].equals(itemEvent.getItem()));
+                }
+            });
+            // fitType
+            panel.add(new JLabel(" fit type:"));
+            boxFitType.setSelectedIndex(0);
+            panel.add(boxFitType);
+
+
+
+
+            // delete
+            JLabel labelDelete = new JLabel("<html><font color='red'>delete</font></html>");
+            panel.add(labelDelete);
+            labelDelete.addMouseListener(new MouseAdapter() {
+                @Override
+                public void mouseClicked(MouseEvent mouseEvent) {
+                    if (listener != null) {
+                        listener.onDelete(MaskUI.this);
+                    }
+                }
+            });
+
+            return panel;
+        }
+
+
+        private JPanel part2Layout() {
+            JPanel panel = txtPanel;
+            panel.setLayout(new FlowLayout(FlowLayout.LEFT));
+
+            panel.add(new JLabel(" text color:"));
+            textTxtColor.setPreferredSize(new Dimension(100, 20));
+            textTxtColor.setText("#000000");
+            panel.add(textTxtColor);
+
+            panel.add(checkTxtBold);
+            panel.setVisible(false);
+            return panel;
+        }
+
+
+        private JPanel part3Layout() {
+            JPanel panel = new JPanel();
+            panel.setLayout(new FlowLayout(FlowLayout.LEFT));
+            // mask path
+            panel.add(new JLabel(" mask path:"));
+            JButton btnMaskPath = new JButton("choose");
+            panel.add(btnMaskPath);
+            btnMaskPath.addActionListener(new ActionListener() {
+                @Override
+                public void actionPerformed(ActionEvent actionEvent) {
+                    JFileChooser fileChooser = new JFileChooser(new File(toolUI.getInputPath()));
+                    fileChooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY);
+                    int returnVal = fileChooser.showOpenDialog(fileChooser);
+                    if(returnVal == JFileChooser.APPROVE_OPTION) {
+                        // 文件夹路径
+                        maskPath = fileChooser.getSelectedFile().getAbsolutePath();
+                        setMaskPath();
+                    }
+                }
+            });
+
+
+            panel.add(labelMaskPathState);
+
+            return panel;
+        }
+    }
+
+    private interface IMaskUIListener {
+        void onDelete(MaskUI maskUI);
+    }
+
+}

+ 79 - 0
Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/vapx/FrameSet.java

@@ -0,0 +1,79 @@
+package com.tencent.qgame.playerproj.animtool.vapx;
+
+import com.tencent.qgame.playerproj.animtool.data.PointRect;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Vector;
+
+public class FrameSet {
+
+    // for sync
+    public Vector<FrameObj> frameObjs = new Vector<>();
+
+    @Override
+    public String toString() {
+        StringBuilder json = new StringBuilder();
+
+        json.append("\"frame\":[");
+        FrameSet.FrameObj frameObj;
+        for (int i=0; i<frameObjs.size(); i++) {
+            frameObj = frameObjs.get(i);
+            json.append(frameObj.toString());
+            if (i != frameObjs.size() - 1) {
+                json.append(",");
+            }
+        }
+
+        json.append("]");
+
+        return json.toString();
+    }
+
+    public static class FrameObj {
+        public List<Frame> frames = new ArrayList<>();
+        public int frameIndex = 0;
+
+        @Override
+        public String toString() {
+            StringBuilder json = new StringBuilder();
+
+            json.append("{");
+            json.append("\"i\":").append(frameIndex).append(",");
+            json.append("\"obj\":[");
+            FrameSet.Frame frame;
+            for (int i=0; i<frames.size(); i++) {
+                frame = frames.get(i);
+                json.append(frame.toString());
+                if (i != frames.size() - 1) {
+                    json.append(",");
+                }
+            }
+            json.append("]");
+            json.append("}");
+
+            return json.toString();
+        }
+    }
+
+    public static class Frame {
+        public String srcId = "";
+        public int z = 0;
+        public int mt = 0; // 旋转角度 目前只支持0
+        public PointRect frame = new PointRect(); // src位置
+        public PointRect mFrame = new PointRect(); // 遮罩区域
+
+
+        @Override
+        public String toString() {
+
+            return "{" +
+                    "\"srcId\":" + "\"" + srcId + "\"," +
+                    "\"z\":" + z + "," +
+                    "\"frame\":" + frame.toString() + "," +
+                    "\"mFrame\":" + mFrame.toString() + "," +
+                    "\"mt\":" + mt +
+                    "}";
+        }
+    }
+}

+ 208 - 0
Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/vapx/GetMaskFrame.java

@@ -0,0 +1,208 @@
+package com.tencent.qgame.playerproj.animtool.vapx;
+
+import com.tencent.qgame.playerproj.animtool.CommonArg;
+import com.tencent.qgame.playerproj.animtool.TLog;
+import com.tencent.qgame.playerproj.animtool.data.PointRect;
+
+import java.awt.geom.AffineTransform;
+import java.awt.image.AffineTransformOp;
+import java.awt.image.BufferedImage;
+import java.io.File;
+
+import javax.imageio.ImageIO;
+
+/**
+ * 获取融合动画遮罩
+ */
+public class GetMaskFrame {
+
+    private static final String TAG = "GetMaskFrame";
+
+    public FrameSet.FrameObj getFrameObj(int frameIndex, CommonArg commonArg, int[] outputArgb) throws Exception {
+
+        FrameSet.FrameObj frameObj = new FrameSet.FrameObj();
+        frameObj.frameIndex = frameIndex;
+
+        FrameSet.Frame frame;
+        // 需要放置的位置
+        int x;
+        int y;
+        int gap = commonArg.gap;
+        if (commonArg.isVLayout) {
+            x = commonArg.alphaPoint.w + gap;
+            y = commonArg.alphaPoint.y;
+        } else {
+            x = commonArg.alphaPoint.x;
+            y = commonArg.alphaPoint.h + gap;
+        }
+        int startX = x;
+        int lastMaxY = y;
+        for (int i=0; i<commonArg.srcSet.srcs.size(); i++) {
+            frame = getFrame(frameIndex, commonArg.srcSet.srcs.get(i), outputArgb, commonArg.outputW, commonArg.outputH, x, y, startX, lastMaxY);
+            if (frame == null) continue;
+            // 计算下一个遮罩起点
+            x = frame.mFrame.x + frame.mFrame.w + gap;
+            y = frame.mFrame.y;
+            int newY = frame.mFrame.y + frame.mFrame.h + gap;
+            if (newY > lastMaxY) {
+                lastMaxY = newY;
+            }
+
+            frameObj.frames.add(frame);
+        }
+
+        if (frameObj.frames.isEmpty()) {
+            return null;
+        }
+        return frameObj;
+    }
+
+
+    private FrameSet.Frame getFrame(int frameIndex, SrcSet.Src src, int[] outputArgb, int outW, int outH, int x, int y, int startX, int lastMaxY) throws Exception {
+        File inputFile = new File(src.srcPath  + String.format("%03d", frameIndex)+".png");
+        if (!inputFile.exists()) {
+            return null;
+        }
+
+        BufferedImage inputBuf = ImageIO.read(inputFile);
+        int maskW = inputBuf.getWidth();
+        int maskH = inputBuf.getHeight();
+        int[] maskArgb = inputBuf.getRGB(0, 0, maskW, maskH, null, 0, maskW);
+
+        FrameSet.Frame frame = new FrameSet.Frame();
+        frame.srcId = src.srcId;
+        frame.z = src.z;
+
+        frame.frame = getSrcFramePoint(maskArgb, maskW, maskH);
+        if (frame.frame == null) {
+            // 有文件,但内容是空
+            return null;
+        }
+
+        PointRect maskPoint = new PointRect(
+            frame.frame.x,
+            frame.frame.y,
+            frame.frame.w,
+            frame.frame.h
+        );
+
+        PointRect mFrame = new PointRect(x, y, frame.frame.w, frame.frame.h);
+        // 计算是否能放下遮罩
+        if (mFrame.x + mFrame.w > outW) { // 超宽换行
+            mFrame.x = startX;
+            mFrame.y = lastMaxY;
+            if (mFrame.x + mFrame.w > outW) {
+                // 超长后缩放mask
+                float scale = (outW - mFrame.x) * 1f / mFrame.w;
+
+                mFrame.w = outW - mFrame.x;
+                mFrame.h = (int) (mFrame.h * scale);
+
+                // 设置缩放区域
+                maskPoint.x = (int) (maskPoint.x * scale);
+                maskPoint.y = (int) (maskPoint.y * scale);
+                maskPoint.h = mFrame.h;
+                maskPoint.w = mFrame.w;
+
+                maskArgb = scaleMask(scale, inputBuf);
+
+                TLog.w(TAG, "frameIndex=" + frameIndex + ",src=" + src.srcId + ", no more space for(w)" + mFrame + ",scale=" + scale);
+            }
+        }
+        if (mFrame.y + mFrame.h > outH) { // 高度不够直接错误
+            TLog.e(TAG, "frameIndex=" + frameIndex + ",src=" + src.srcId + ", no more space(h)" + mFrame);
+            return null;
+        }
+        frame.mFrame = mFrame;
+
+        fillMaskToOutput(outputArgb, outW, maskArgb, maskW, maskPoint, frame.mFrame);
+
+        // 设置src的w,h 取所有遮罩里最大值
+        synchronized (GetMaskFrame.class) {
+            // 只按宽度进行判断防止横跳
+            if (frame.frame.w > src.w) {
+                src.w = frame.frame.w;
+                src.h = frame.frame.h;
+            }
+        }
+        return frame;
+    }
+
+    /**
+     * 缩放遮罩
+     */
+    private int[] scaleMask(float scale, BufferedImage inputBuf) {
+        AffineTransform at = new AffineTransform();
+        at.scale(scale, scale);
+
+        int w = inputBuf.getWidth();
+        int h = inputBuf.getHeight();
+        BufferedImage alphaBuf = new BufferedImage(w, h, BufferedImage.TYPE_INT_ARGB);
+        AffineTransformOp scaleOp = new AffineTransformOp(at, AffineTransformOp.TYPE_BILINEAR);
+        alphaBuf = scaleOp.filter(inputBuf, alphaBuf);
+
+        return alphaBuf.getRGB(0, 0, w, h, null, 0, w);
+    }
+
+    /**
+     * 获取遮罩位置信息 并转换为黑白
+     */
+    private PointRect getSrcFramePoint(int[] maskArgb, int w, int h) {
+
+        PointRect point = new PointRect();
+
+        int minX = Integer.MAX_VALUE;
+        int minY = Integer.MAX_VALUE;
+        int maxX = 0;
+        int maxY = 0;
+        for (int y=0; y<h; y++) {
+            for (int x = 0; x < w; x++) {
+                int alpha = maskArgb[x + y*w] >>> 24;
+                if (alpha > 0) {
+                    if (x < minX) minX = x;
+                    if (y < minY) minY = y;
+                    if (x > maxX) maxX = x;
+                    if (y > maxY) maxY = y;
+                }
+            }
+        }
+
+        point.x = minX;
+        point.y = minY;
+        point.w = maxX - minX + 1;
+        point.h = maxY - minY + 1;
+        if (point.w <=0 || point.h <= 0) return null;
+
+        return point;
+
+    }
+
+
+    private void fillMaskToOutput(int[] outputArgb, int outW,
+                                  int[] maskArgb, int maskW,
+                                  PointRect frame,
+                                  PointRect mFrame) {
+        for (int y=0; y < frame.h; y++) {
+            for (int x=0; x < frame.w; x++) {
+                int maskXOffset = frame.x;
+                int maskYOffset = frame.y;
+                // 先从遮罩 maskArgb 取色
+                int maskColor = maskArgb[x + maskXOffset + (y + maskYOffset) * maskW];
+                // 黑色部分不遮挡,红色部分被遮挡
+                int alpha = maskColor >>> 24;
+                int maskRed = (maskColor & 0x00ff0000) >>> 16;
+                int redAlpha = 255 - maskRed; // 红色部分算遮挡
+                alpha = (int) ((redAlpha / 255f) * (alpha / 255f) * 255f);
+                // 最终color
+                int color = 0xff000000 + (alpha << 16) + (alpha << 8) + alpha;
+
+                // 将遮罩颜色放置到视频中对应区域
+                int outputXOffset = mFrame.x;
+                int outputYOffset = mFrame.y;
+                outputArgb[x + outputXOffset + (y + outputYOffset) * outW] = color;
+
+            }
+        }
+    }
+
+}

+ 90 - 0
Android/PlayerProj/animtool/src/main/java/com/tencent/qgame/playerproj/animtool/vapx/SrcSet.java

@@ -0,0 +1,90 @@
+package com.tencent.qgame.playerproj.animtool.vapx;
+
+import java.util.ArrayList;
+import java.util.List;
+
+public class SrcSet {
+
+
+    public List<Src> srcs = new ArrayList<>();
+
+
+    public static class Src {
+
+        public static final String SRC_TYPE_IMG = "img";
+        public static final String SRC_TYPE_TXT = "txt";
+
+        public static final String LOAD_TYPE_NET = "net";
+        public static final String LOAD_TYPE_LOC = "local";
+
+        public static final String TEXT_STYLE_DEFAULT = "";
+        public static final String TEXT_STYLE_BOLD = "b";
+
+        public static final String FIT_TYPE_FITXY = "fitXY";
+        public static final String FIT_TYPE_CF = "centerFull"; // 同centerCrop
+
+        /**
+         * src 配置
+         */
+        public String srcId = "";
+        public String srcType = SRC_TYPE_IMG;
+        public String loadType = LOAD_TYPE_NET;
+        public String srcTag = "";
+        public String color = "#000000";
+        public String style = TEXT_STYLE_DEFAULT;
+        public int w = 0;
+        public int h = 0;
+        public String fitType = FIT_TYPE_FITXY;
+
+        /**
+         * src 辅助信息
+         */
+        public String srcPath = "";
+        public int z = 0; // 渲染层级 与输入顺序相关
+
+        @Override
+        public String toString() {
+            StringBuilder json = new StringBuilder();
+            json.append("{");
+            json.append("\"srcId\":").append("\"").append(srcId).append("\",");
+            json.append("\"srcType\":").append("\"").append(srcType).append("\",");
+            json.append("\"srcTag\":").append("\"").append(srcTag.trim()).append("\",");
+            if (SRC_TYPE_TXT.equals(srcType)) {
+                if (color != null && color != null) {
+                    json.append("\"color\":").append("\"").append(color.trim()).append("\",");
+                }
+                json.append("\"style\":").append("\"").append(style).append("\",");
+                json.append("\"loadType\":").append("\"").append(LOAD_TYPE_LOC).append("\",");
+            } else {
+                json.append("\"loadType\":").append("\"").append(loadType).append("\",");
+            }
+
+
+            json.append("\"fitType\":").append("\"").append(fitType).append("\",");
+            json.append("\"w\":").append(w).append(",");
+            json.append("\"h\":").append(h);
+            json.append("}");
+
+            return json.toString();
+        }
+    }
+
+    @Override
+    public String toString() {
+        StringBuilder json = new StringBuilder();
+
+        json.append("\"src\":[");
+        Src src;
+        for (int i=0; i<srcs.size(); i++) {
+            src = srcs.get(i);
+            json.append(src.toString());
+            if (i != srcs.size() - 1) {
+                json.append(",");
+            }
+        }
+
+        json.append("]");
+
+        return json.toString();
+    }
+}

+ 0 - 6
Android/PlayerProj/app/build.gradle

@@ -28,9 +28,3 @@ dependencies {
     implementation"org.jetbrains.kotlin:kotlin-stdlib-jdk7:$kotlin_version"
     implementation project(":animplayer")
 }
-
-project.afterEvaluate {
-    def assembleDebugTask = project.tasks.find { it.name == 'assembleDebug' }
-    def lintTask = project.tasks.find { it.name == 'lintDebug' }
-    assembleDebugTask.dependsOn(lintTask)
-}

+ 1 - 1
Android/PlayerProj/app/src/main/AndroidManifest.xml

@@ -22,7 +22,7 @@
         <activity android:name=".player.AnimSimpleDemoActivity" android:screenOrientation="portrait"/>
         <activity android:name=".player.AnimVapxDemoActivity" android:screenOrientation="portrait"/>
         <activity android:name=".player.AnimActiveDemoActivity" android:screenOrientation="portrait"/>
-
+        <activity android:name=".player.AnimSpecialSizeDemoActivity" android:screenOrientation="portrait"/>
 
     </application>
 

BIN
Android/PlayerProj/app/src/main/assets/special_size_750.mp4


+ 4 - 0
Android/PlayerProj/app/src/main/java/com/tencent/qgame/playerproj/MainActivity.kt

@@ -20,6 +20,7 @@ import android.content.Intent
 import android.os.Bundle
 import com.tencent.qgame.playerproj.player.AnimActiveDemoActivity
 import com.tencent.qgame.playerproj.player.AnimSimpleDemoActivity
+import com.tencent.qgame.playerproj.player.AnimSpecialSizeDemoActivity
 import com.tencent.qgame.playerproj.player.AnimVapxDemoActivity
 import kotlinx.android.synthetic.main.activity_main.*
 
@@ -38,6 +39,9 @@ class MainActivity : Activity(){
         btn3.setOnClickListener {
             startActivity(Intent(this, AnimActiveDemoActivity::class.java))
         }
+        btn4.setOnClickListener {
+            startActivity(Intent(this, AnimSpecialSizeDemoActivity::class.java))
+        }
     }
 
 

+ 7 - 15
Android/PlayerProj/app/src/main/java/com/tencent/qgame/playerproj/player/AnimActiveDemoActivity.kt

@@ -18,7 +18,6 @@ package com.tencent.qgame.playerproj.player
 import android.app.Activity
 import android.content.Context
 import android.graphics.Bitmap
-import android.graphics.BitmapFactory
 import android.os.Bundle
 import android.os.Environment
 import android.os.Handler
@@ -26,23 +25,19 @@ import android.os.Looper
 import android.util.Base64
 import android.util.Log
 import android.view.View
-import android.widget.Toast
 import com.tencent.qgame.animplayer.AnimConfig
 import com.tencent.qgame.animplayer.AnimView
 import com.tencent.qgame.animplayer.PointRect
 import com.tencent.qgame.animplayer.RefVec2
 import com.tencent.qgame.animplayer.inter.IAnimListener
-import com.tencent.qgame.animplayer.inter.IFetchResource
-import com.tencent.qgame.animplayer.inter.OnResourceClickListener
 import com.tencent.qgame.animplayer.mask.MaskConfig
-import com.tencent.qgame.animplayer.mix.Resource
 import com.tencent.qgame.animplayer.util.ALog
 import com.tencent.qgame.animplayer.util.IALog
+import com.tencent.qgame.animplayer.util.ScaleType
 import com.tencent.qgame.playerproj.R
 import kotlinx.android.synthetic.main.activity_anim_simple_demo.*
 import java.io.File
 import java.nio.ByteBuffer
-import java.util.*
 import java.util.zip.Inflater
 import kotlin.experimental.and
 import kotlin.math.sqrt
@@ -107,9 +102,10 @@ class AnimActiveDemoActivity : Activity(), IAnimListener {
         initTestView()
         // 获取动画view
         animView = playerView
-
+        // 居中(根据父布局按比例居中并全部显示s)
+        animView.setScaleType(ScaleType.FIT_CENTER)
+        // 启动过滤遮罩s
         animView.supportMask(true, true)
-
         // 注册动画监听
         animView.setAnimListener(this)
         /**
@@ -141,13 +137,6 @@ class AnimActiveDemoActivity : Activity(), IAnimListener {
      */
     override fun onVideoConfigReady(config: AnimConfig): Boolean {
         updateTestMask()
-        uiHandler.post {
-            val w = window.decorView.width
-            val lp = animView.layoutParams
-            lp.width = if (w == 0) dp2px(this, 400f).toInt() else w
-            lp.height = (w * config.height * 1f / config.width).toInt()
-            animView.layoutParams = lp
-        }
         return true
     }
 
@@ -245,6 +234,9 @@ class AnimActiveDemoActivity : Activity(), IAnimListener {
         }
     }
 
+    /**
+     * 将Base64的bitmap转换为真正的bitmap
+     */
     private fun handleDepthMaskData(compressedBase64Data: String) : Bitmap? {
         var zipInflater : Inflater?= null
         try {

+ 3 - 8
Android/PlayerProj/app/src/main/java/com/tencent/qgame/playerproj/player/AnimSimpleDemoActivity.kt

@@ -28,6 +28,7 @@ import com.tencent.qgame.animplayer.AnimView
 import com.tencent.qgame.animplayer.inter.IAnimListener
 import com.tencent.qgame.animplayer.util.ALog
 import com.tencent.qgame.animplayer.util.IALog
+import com.tencent.qgame.animplayer.util.ScaleType
 import com.tencent.qgame.playerproj.R
 import kotlinx.android.synthetic.main.activity_anim_simple_demo.*
 import java.io.File
@@ -73,6 +74,8 @@ class AnimSimpleDemoActivity : Activity(), IAnimListener {
         initTestView()
         // 获取动画view
         animView = playerView
+        // 居中(根据父布局按比例居中并全部显示,默认fitXY)
+        animView.setScaleType(ScaleType.FIT_CENTER)
         // 注册动画监听
         animView.setAnimListener(this)
         /**
@@ -104,14 +107,6 @@ class AnimSimpleDemoActivity : Activity(), IAnimListener {
      * @return true 继续播放 false 停止播放
      */
     override fun onVideoConfigReady(config: AnimConfig): Boolean {
-
-        uiHandler.post {
-            val w = dp2px(this,400f).toInt()
-            val lp = animView.layoutParams
-            lp.width = w
-            lp.height = (w * config.height *1f / config.width).toInt()
-            animView.layoutParams = lp
-        }
         return true
     }
 

+ 228 - 0
Android/PlayerProj/app/src/main/java/com/tencent/qgame/playerproj/player/AnimSpecialSizeDemoActivity.kt

@@ -0,0 +1,228 @@
+/*
+ * Tencent is pleased to support the open source community by making vap available.
+ *
+ * Copyright (C) 2020 THL A29 Limited, a Tencent company.  All rights reserved.
+ *
+ * Licensed under the MIT License (the "License"); you may not use this file except in
+ * compliance with the License. You may obtain a copy of the License at
+ *
+ * http://opensource.org/licenses/MIT
+ *
+ * Unless required by applicable law or agreed to in writing, software distributed under the License is
+ * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+ * either express or implied. See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package com.tencent.qgame.playerproj.player
+
+import android.app.Activity
+import android.content.Context
+import android.os.Bundle
+import android.os.Environment
+import android.os.Handler
+import android.os.Looper
+import android.util.Log
+import android.view.View
+import com.tencent.qgame.animplayer.AnimConfig
+import com.tencent.qgame.animplayer.AnimView
+import com.tencent.qgame.animplayer.Constant
+import com.tencent.qgame.animplayer.inter.IAnimListener
+import com.tencent.qgame.animplayer.util.ALog
+import com.tencent.qgame.animplayer.util.IALog
+import com.tencent.qgame.animplayer.util.ScaleType
+import com.tencent.qgame.playerproj.R
+import kotlinx.android.synthetic.main.activity_anim_simple_demo.*
+import java.io.File
+
+/**
+ * 播放宽高不是16的倍数的特殊尺寸的动画demo,这里以special_size_750.mp4为例,size = 750 x 814
+ */
+class AnimSpecialSizeDemoActivity : Activity(), IAnimListener {
+
+    companion object {
+        private const val TAG = "AnimSpecialSizeActivity"
+    }
+
+    private val dir by lazy {
+        // 存放在sdcard应用缓存文件中
+        getExternalFilesDir(null)?.absolutePath ?: Environment.getExternalStorageDirectory().path
+    }
+
+    // 视频信息
+    data class VideoInfo(val fileName: String,val md5:String)
+
+    // ps:每次修改mp4文件,但文件名不变,记得先卸载app,因为assets同名文件不会进行替换
+    private val videoInfo = VideoInfo("special_size_750.mp4", "2acde1639ad74b8bd843083246902e23")
+
+    // 动画View
+    private lateinit var animView: AnimView
+
+    private val uiHandler by lazy {
+        Handler(Looper.getMainLooper())
+    }
+
+    override fun onCreate(savedInstanceState: Bundle?) {
+        super.onCreate(savedInstanceState)
+        setContentView(R.layout.activity_anim_simple_demo)
+        // 文件加载完成后会调用init方法
+        loadFile()
+    }
+
+    private fun init() {
+        // 初始化日志
+        initLog()
+        // 初始化调试开关
+        initTestView()
+        // 获取动画view
+        animView = playerView
+        // 视频左右对齐(rgb左\alpha右)
+        animView.setVideoMode(Constant.VIDEO_MODE_SPLIT_HORIZONTAL_REVERSE)
+        // 兼容老版本视频资源
+        animView.enableVersion1(true)
+        // 居中(根据父布局按比例居中并全部显示,默认fitXY)
+        animView.setScaleType(ScaleType.FIT_CENTER)
+        // 注册动画监听
+        animView.setAnimListener(this)
+        /**
+         * 开始播放主流程
+         * ps: 主要流程都是对AnimView的操作,其它比如队列,或改变窗口大小等操作都不是必须的
+         */
+        play(videoInfo)
+    }
+
+
+    private fun play(videoInfo: VideoInfo) {
+        // 播放前强烈建议检查文件的md5是否有改变
+        // 因为下载或文件存储过程中会出现文件损坏,导致无法播放
+        Thread {
+            val file = File(dir + "/" + videoInfo.fileName)
+            val md5 = FileUtil.getFileMD5(file)
+            if (videoInfo.md5 == md5) {
+                // 开始播放动画文件
+                animView.startPlay(file)
+            } else {
+                Log.e(TAG, "md5 is not match, error md5=$md5")
+            }
+        }.start()
+    }
+
+
+    /**
+     * 视频信息准备好后的回调,用于检查视频准备好后是否继续播放
+     * @return true 继续播放 false 停止播放
+     */
+    override fun onVideoConfigReady(config: AnimConfig): Boolean {
+
+        uiHandler.post {
+            val w = dp2px(this,400f).toInt()
+            val lp = animView.layoutParams
+            lp.width = w
+            lp.height = (w * config.height *1f / config.width).toInt()
+            animView.layoutParams = lp
+        }
+        return true
+    }
+
+    /**
+     * 视频开始回调
+     */
+    override fun onVideoStart() {
+        Log.i(TAG, "onVideoStart")
+    }
+
+    /**
+     * 视频渲染每一帧时的回调
+     * @param frameIndex 帧索引
+     */
+    override fun onVideoRender(frameIndex: Int, config: AnimConfig?) {
+    }
+
+    /**
+     * 视频播放结束(失败也会回调onComplete)
+     */
+    override fun onVideoComplete() {
+        Log.i(TAG, "onVideoComplete")
+    }
+
+    /**
+     * 播放器被销毁情况下会调用onVideoDestroy
+     */
+    override fun onVideoDestroy() {
+        Log.i(TAG, "onVideoDestroy")
+    }
+
+    /**
+     * 失败回调
+     * 一次播放时可能会调用多次,建议onFailed只做错误上报
+     * @param errorType 错误类型
+     * @param errorMsg 错误消息
+     */
+    override fun onFailed(errorType: Int, errorMsg: String?) {
+        Log.i(TAG, "onFailed errorType=$errorType errorMsg=$errorMsg")
+    }
+
+
+
+    override fun onPause() {
+        super.onPause()
+        // 页面切换是停止播放
+        animView.stopPlay()
+    }
+
+
+    private fun initLog() {
+        ALog.isDebug = false
+        ALog.log = object : IALog {
+            override fun i(tag: String, msg: String) {
+                Log.i(tag, msg)
+            }
+
+            override fun d(tag: String, msg: String) {
+                Log.d(tag, msg)
+            }
+
+            override fun e(tag: String, msg: String) {
+                Log.e(tag, msg)
+            }
+
+            override fun e(tag: String, msg: String, tr: Throwable) {
+                Log.e(tag, msg, tr)
+            }
+        }
+    }
+
+
+    private fun initTestView() {
+        btnLayout.visibility = View.VISIBLE
+        /**
+         * 开始播放按钮
+         */
+        btnPlay.setOnClickListener {
+            play(videoInfo)
+        }
+        /**
+         * 结束视频按钮
+         */
+        btnStop.setOnClickListener {
+            animView.stopPlay()
+        }
+    }
+
+    private fun loadFile() {
+        val files = Array(1) {
+            videoInfo.fileName
+        }
+        FileUtil.copyAssetsToStorage(this, dir, files) {
+            uiHandler.post {
+                init()
+            }
+        }
+    }
+
+
+    private fun dp2px(context: Context, dp: Float): Float {
+        val scale = context.resources.displayMetrics.density
+        return dp * scale + 0.5f
+    }
+}
+

+ 12 - 14
Android/PlayerProj/app/src/main/java/com/tencent/qgame/playerproj/player/AnimVapxDemoActivity.kt

@@ -34,6 +34,7 @@ import com.tencent.qgame.animplayer.inter.OnResourceClickListener
 import com.tencent.qgame.animplayer.mix.Resource
 import com.tencent.qgame.animplayer.util.ALog
 import com.tencent.qgame.animplayer.util.IALog
+import com.tencent.qgame.animplayer.util.ScaleType
 import com.tencent.qgame.playerproj.R
 import kotlinx.android.synthetic.main.activity_anim_simple_demo.*
 import java.io.File
@@ -69,6 +70,8 @@ class AnimVapxDemoActivity : Activity(), IAnimListener {
         Handler(Looper.getMainLooper())
     }
 
+    private var lastToast: Toast? = null
+
     override fun onCreate(savedInstanceState: Bundle?) {
         super.onCreate(savedInstanceState)
         setContentView(R.layout.activity_anim_simple_demo)
@@ -83,6 +86,8 @@ class AnimVapxDemoActivity : Activity(), IAnimListener {
         initTestView()
         // 获取动画view
         animView = playerView
+        // 居中(根据父布局按比例居中并裁剪)
+        animView.setScaleType(ScaleType.CENTER_CROP)
         /**
          * 注册资源获取类
          */
@@ -98,8 +103,7 @@ class AnimVapxDemoActivity : Activity(), IAnimListener {
                  * 比如:一个素材里需要显示多个头像,则需要定义多个不同的tag,表示不同位置,需要显示不同的头像,文字类似
                  */
                 val srcTag = resource.tag
-
-                if (srcTag == "[sImg1]") { // 此tag是已经写入到动画配置中的tag
+                if (srcTag.isNotEmpty()) {
                     val drawableId = if (head1Img) R.drawable.head1 else R.drawable.head2
                     head1Img = !head1Img
                     val options = BitmapFactory.Options()
@@ -116,8 +120,7 @@ class AnimVapxDemoActivity : Activity(), IAnimListener {
             override fun fetchText(resource: Resource, result: (String?) -> Unit) {
                 val str = "恭喜 No.${1000 + Random().nextInt(8999)}用户 升神"
                 val srcTag = resource.tag
-
-                if (srcTag == "[sTxt1]") { // 此tag是已经写入到动画配置中的tag
+                if (srcTag.isNotEmpty()) { // 此tag是已经写入到动画配置中的tag
                     result(str)
                 } else {
                     result(null)
@@ -137,11 +140,13 @@ class AnimVapxDemoActivity : Activity(), IAnimListener {
         // 注册点击事件监听
         animView.setOnResourceClickListener(object : OnResourceClickListener {
             override fun onClick(resource: Resource) {
-                Toast.makeText(
+                lastToast?.cancel()
+                lastToast = Toast.makeText(
                     this@AnimVapxDemoActivity,
-                    "srcTag=${resource.tag} onClick",
+                    "srcTag=${resource.tag} onClick ${resource.curPoint}",
                     Toast.LENGTH_LONG
-                ).show()
+                )
+                lastToast?.show()
             }
         })
 
@@ -175,13 +180,6 @@ class AnimVapxDemoActivity : Activity(), IAnimListener {
      * @return true 继续播放 false 停止播放
      */
     override fun onVideoConfigReady(config: AnimConfig): Boolean {
-        uiHandler.post {
-            val w = window.decorView.width
-            val lp = animView.layoutParams
-            lp.width = if (w == 0) dp2px(this, 400f).toInt() else w
-            lp.height = (w * config.height * 1f / config.width).toInt()
-            animView.layoutParams = lp
-        }
         return true
     }
 

+ 2 - 2
Android/PlayerProj/app/src/main/res/layout/activity_anim_simple_demo.xml

@@ -30,8 +30,8 @@
     
     <com.tencent.qgame.animplayer.AnimView
         android:id="@+id/playerView"
-        android:layout_width="400dp"
-        android:layout_height="400dp"
+        android:layout_width="match_parent"
+        android:layout_height="match_parent"
         android:layout_gravity="center"/>
     
 </FrameLayout>

+ 25 - 20
Android/PlayerProj/app/src/main/res/layout/activity_main.xml

@@ -10,30 +10,35 @@
     <LinearLayout
         android:layout_width="wrap_content"
         android:layout_height="wrap_content"
-        android:orientation="vertical"
         android:layout_gravity="center"
-        >
+        android:orientation="vertical">
+
+        <Button
+            android:id="@+id/btn1"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:text="Simple Demo" />
+
+        <Button
+            android:id="@+id/btn2"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:text="VAPX Demo(融合动画)" />
+
+        <Button
+            android:id="@+id/btn3"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:text="Active Demo(可变动画)" />
+
+        <Button
+            android:id="@+id/btn4"
+            android:layout_width="wrap_content"
+            android:layout_height="wrap_content"
+            android:text="Special Size Demo" />
 
-    <Button
-        android:id="@+id/btn1"
-        android:layout_width="wrap_content"
-        android:layout_height="wrap_content"
-        android:text="Simple Demo" />
-
-    <Button
-        android:id="@+id/btn2"
-        android:layout_width="wrap_content"
-        android:layout_height="wrap_content"
-        android:text="VAPX Demo(融合动画)" />
-
-    <Button
-        android:id="@+id/btn3"
-        android:layout_width="wrap_content"
-        android:layout_height="wrap_content"
-        android:text="Active Demo(可变动画)" />
     </LinearLayout>
 
-
 </FrameLayout>
 
 

+ 6 - 4
Android/PlayerProj/build.gradle

@@ -4,13 +4,14 @@ buildscript {
     ext.kotlin_version = '1.3.50'
     repositories {
         google()
-        jcenter()
+        // jcenter()
+        mavenCentral()
     }
     dependencies {
         classpath 'com.android.tools.build:gradle:3.2.1'
         classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"
-        classpath 'com.jfrog.bintray.gradle:gradle-bintray-plugin:1.8.4'
-        classpath 'com.github.dcendents:android-maven-gradle-plugin:2.1'
+        // classpath 'com.jfrog.bintray.gradle:gradle-bintray-plugin:1.8.4'
+        // classpath 'com.github.dcendents:android-maven-gradle-plugin:2.1'
         // NOTE: Do not place your application dependencies here; they belong
         // in the individual module build.gradle files
     }
@@ -19,7 +20,8 @@ buildscript {
 allprojects {
     repositories {
         google()
-        jcenter()
+        // jcenter()
+        mavenCentral()
     }
     tasks.withType(Javadoc).all { enabled = false }
 }

+ 0 - 15
Android/PlayerProj/gradle.properties

@@ -1,15 +0,0 @@
-# Project-wide Gradle settings.
-# IDE (e.g. Android Studio) users:
-# Gradle settings configured through the IDE *will override*
-# any settings specified in this file.
-# For more details on how to configure your build environment visit
-# http://www.gradle.org/docs/current/userguide/build_environment.html
-# Specifies the JVM arguments used for the daemon process.
-# The setting is particularly useful for tweaking memory settings.
-org.gradle.jvmargs=-Xmx1536m
-# When configured, Gradle will run in incubating parallel mode.
-# This option should only be used with decoupled projects. More details, visit
-# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
-# org.gradle.parallel=true
-# Kotlin code style for this project: "official" or "obsolete":
-kotlin.code.style=official

+ 99 - 0
Android/PlayerProj/publish-mavencentral.gradle

@@ -0,0 +1,99 @@
+apply plugin: 'maven'
+apply plugin: 'signing'
+
+def sonatypeRepositoryUrl
+
+def isReleaseBuild() {
+    return VERSION_NAME.contains("SNAPSHOT") == false
+}
+if (isReleaseBuild()) {
+    println 'RELEASE BUILD'
+    sonatypeRepositoryUrl = "https://s01.oss.sonatype.org/service/local/staging/deploy/maven2/"
+} else {
+    println 'DEBUG BUILD'
+    sonatypeRepositoryUrl = "https://s01.oss.sonatype.org/content/repositories/snapshots/"
+}
+
+def getRepositoryUsername() {
+    return hasProperty('nexusUsername') ? nexusUsername : ""
+}
+
+def getRepositoryPassword() {
+    return hasProperty('nexusPassword') ? nexusPassword : ""
+}
+
+afterEvaluate { project ->
+    uploadArchives {
+        repositories {
+            mavenDeployer {
+                beforeDeployment { MavenDeployment deployment -> signing.signPom(deployment) }
+                pom.groupId = GROUP_ID
+                pom.artifactId = POM_ARTIFACT_ID
+                pom.version = VERSION_NAME
+
+                repository(url: sonatypeRepositoryUrl) {
+                    authentication(userName: getRepositoryUsername(), password: getRepositoryPassword())
+                }
+
+                pom.project {
+                    name POM_NAME
+                    packaging POM_PACKAGING
+                    description POM_DESCRIPTION
+                    url POM_URL
+
+                    scm {
+                        url POM_SCM_URL
+                        connection POM_SCM_CONNECTION
+                        developerConnection POM_SCM_DEV_CONNECTION
+                    }
+
+                    licenses {
+                        license {
+                            name POM_LICENCE_NAME
+                            url POM_LICENCE_URL
+                            distribution POM_LICENCE_DIST
+                        }
+                    }
+
+                    developers {
+                        developer {
+                            id POM_DEVELOPER_ID
+                            name POM_DEVELOPER_NAME
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    signing {
+        required { isReleaseBuild() && gradle.taskGraph.hasTask("uploadArchives") }
+        println 'Signing archives...'
+        sign configurations.archives
+    }
+
+    task androidJavadocs(type: Javadoc) {
+        source = android.sourceSets.main.java.sourceFiles
+        options {
+            links "http://docs.oracle.com/javase/7/docs/api/"
+            linksOffline "http://d.android.com/reference", "${android.sdkDirectory}/docs/reference"
+        }
+        classpath += project.android.libraryVariants.toList().first().javaCompile.classpath
+        classpath += project.files(android.getBootClasspath().join(File.pathSeparator))
+    }
+
+    task androidJavadocsJar(type: Jar, dependsOn: androidJavadocs) {
+        classifier = 'javadoc'
+        from androidJavadocs.destinationDir
+    }
+
+    task androidSourcesJar(type: Jar) {
+        classifier = 'sources'
+        from android.sourceSets.main.java.sourceFiles
+    }
+
+    artifacts {
+        archives androidSourcesJar
+        archives androidJavadocsJar
+    }
+}

+ 9 - 2
Android/README.md

@@ -15,6 +15,8 @@ IAnimListener 视频播放过程中的回调方法
 private fun init() {
     // 获取视频播放 AnimView
     animView = (AnimView) findViewById(R.id.player);
+    // 可选: 设置视频对齐方式(默认FIT_XY,支持自定义)
+    animView.setScaleType(ScaleType.FIT_XY)
     // 开始播放动画文件
     animView.startPlay(file)
 }
@@ -93,6 +95,9 @@ interface OnResourceClickListener {
 }
 ```
 
+### 老版本兼容
+
+老版本动画播放可以参考: AnimSpecialSizeDemoActivity
 
 ### 引入方式
 
@@ -100,14 +105,16 @@ maven方式引入
 
 ```gradle
 repositories {
-    jcenter()
+    mavenCentral()
 }
 
 dependencies {
-    implementation "com.egame.vap:animplayer:2.0.10"
+    implementation "io.github.tencent:vap:2.0.28"
 }
 ```
 
+通知:由于jcenter将于2022年停止服务,VAP迁移到mavenCentral(从2.0.15版本开始)。如果需要低版本(低于2.0.15版本)需要自行切换到对应版本的release tag,打aar包的方式进行集成。
+
 aar引入
 
 需要自己打包animplayer项目为aar(PlayerProj/animplayer)

BIN
Android/aar/vap_2.0.6.aar


+ 0 - 11
Introduction.md

@@ -106,17 +106,6 @@ VAP还支持在动画中融入自定义属性,比如用户名称, 头像。我
 ![](./images/pic5.png)
 
 
-## 已接入APP
-
-
-![](./images/icons.png)
-
-## QQ交流群
-遇到任何问题或者有好的建议,欢迎提issues,或者加入QQ群交流
-
-![](./images/qq_group.png)
-
-
 ## License
 
 VAP is under the MIT license. See the LICENSE file for details.

+ 1 - 0
MavenCentral.md

@@ -0,0 +1 @@
+This file is used to verify. OSSRH-67862

+ 7 - 6
QGVAPlayer.podspec

@@ -16,7 +16,7 @@ Pod::Spec.new do |spec|
   #
 
   spec.name         = "QGVAPlayer"
-  spec.version      = "iOS1.0.3"
+  spec.version      = "1.0.19"
   spec.summary      = "video animation player."
   spec.platform     = :ios, "8.0"
 
@@ -60,8 +60,8 @@ Pod::Spec.new do |spec|
   #  profile URL.
   #
 
-  spec.author             = { "mlzhangyu" => "mlzhangyu@gmail.com" }
-  # Or just: spec.author    = "mlzhangyu"
+  spec.author             = { "tencent" => "tencent@gmail.com" }
+  # Or just: spec.author    = "tencent"
 
   # ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
   #
@@ -85,7 +85,7 @@ Pod::Spec.new do |spec|
   #  Supports git, hg, bzr, svn and HTTP.
   #
 
-  spec.source       = { :git => "https://github.com/Tencent/vap.git", :tag => "#{spec.version}"}
+  spec.source       = { :git => "https://github.com/Tencent/vap.git", :tag => "iOS#{spec.version}"}
 
 
   # ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
@@ -96,7 +96,7 @@ Pod::Spec.new do |spec|
   #  Not including the public_header_files will make all headers public.
   #
 
-  spec.source_files = 'iOS/QGVAPlayer/QGVAPlayer/**/*.{h,m}'
+  spec.source_files = 'iOS/QGVAPlayer/QGVAPlayer/**/*.{h,m}', 'iOS/QGVAPlayer/QGVAPlayer/Shaders/QGHWDShaders.metal'
 
   # spec.subspec 'Shaders' do |ss|
   #   ss.source_files = 'iOS/QGVAPlayer/QGVAPlayer/Shaders/**/*.{h,m}'
@@ -122,6 +122,7 @@ Pod::Spec.new do |spec|
   # end
 
 
+
   # spec.exclude_files = "Classes/Exclude"
 
   #spec.public_header_files = "iOS/QGVAPlayer/QGVAPlayer/**/*.h"
@@ -148,7 +149,7 @@ Pod::Spec.new do |spec|
   #
 
   # spec.framework  = "SomeFramework"
-  # spec.frameworks = "SomeFramework", "AnotherFramework"
+  spec.frameworks = "Metal", "MetalKit"
 
   # spec.library   = "iconv"
   # spec.libraries = "iconv", "xml2"

+ 17 - 2
README.md

@@ -2,6 +2,8 @@
 
 [![License](https://img.shields.io/badge/license-MIT-blue.svg?style=flat)](http://opensource.org/licenses/MIT)
 
+简体中文 | [English](./README_en.md)
+
 VAP(Video Animation Player)是企鹅电竞开发,用于播放酷炫动画的实现方案。
 
 * 相比Webp, Apng动图方案,具有高压缩率(素材更小)、硬件解码(解码更快)的优点
@@ -43,15 +45,28 @@ VAP|***1.5M***|***硬解***|***全支持***
 
 支持:[Android](./Android), [iOS](./iOS), [web](./web). 接入说明在对应平台目录中
 
-为大家更方便的使用组件,还有配套的素材制作工具[tool](./tool) 
+素材制作工具:[VapTool](./tool) (工具使用说明在tool目录下)
 
+播放预览工具:[Mac](https://github.com/Tencent/vap/releases/download/VapPreview1.2.0/vap-player_mac_1.2.0.zip), [Windows](https://github.com/Tencent/vap/releases/download/VapPreview1.2.0/vap-player_1.2.0.exe)
 
 
 ## QQ交流群
+
 遇到任何问题或者有好的建议,欢迎提issues,或者加入QQ群交流
 
-![](./images/qq_group.png)
+VAP交流群:719738292
+
+
+## FAQ
+
+[常见问题解答](https://github.com/Tencent/vap/wiki/FAQ)
+
+
+## 已接入APP
+
+![VAP接入APP](https://user-images.githubusercontent.com/3285051/211699577-58c70501-02c4-440a-a3a8-2ba7cbec6da7.png)
 
+需要显示自己App图标,可以加入QQ群,群公告里有App图标加入说明
 
 ## License
 

+ 64 - 0
README_en.md

@@ -0,0 +1,64 @@
+# VAP
+
+
+[![License](https://img.shields.io/badge/license-MIT-blue.svg?style=flat)](http://opensource.org/licenses/MIT)
+
+VAP(Video Animation Player) is a fantastic animation player. It can play video with alpha channel.
+
+* Compared with Webp or Apng animation, it has the advantages of high compression rate (smaller material) and hardware decoding.
+
+
+* Compared with Lottie, it can achieve more complex animation effects (such as particle effects)
+
+
+More detail: [Introduction.md](./Introduction.md)
+
+
+Demo show:
+
+[Web demo](https://egame.qq.com/vap)
+
+![](./images/anim1.gif)
+
+And VAP can also merge custom attributes (such as user name, avatar) into the animation.
+
+![](./images/anim2.gif)
+
+
+
+## Performance
+
+
+-|file size|decoder|effects support
+---|---|---|---
+Lottie|can't generate|software decoder|not support particle effects
+GIF|4.6M|software decoder|only support 8 bit color format
+Apng|10.6M|software decoder|all support
+Webp|9.2M|software decoder|all support
+mp4|1.5M|hardware decoder|not support alpha channel
+VAP|***1.5M***|***hardware decoder***|***all support***
+
+
+More detail: [Introduction.md](./Introduction.md)
+
+
+## Platform support
+
+Platform:[Android](./Android), [iOS](./iOS), [web](./web). 
+
+Generation tool:[VapTool](./tool)
+
+Preview tool:[Mac](https://github.com/Tencent/vap/releases/download/VapPreview0.1.0/vap-player-0.1.0.dmg), [Windows](https://github.com/Tencent/vap/releases/download/VapPreview0.1.0/vap-player_0.1.0.exe)
+
+
+## Issue
+
+If you have some problems with VAP, you can post issues. Developer will check often.
+
+## FAQ
+
+[FAQ](https://github.com/Tencent/vap/wiki/FAQ)
+
+## License
+
+VAP is under the MIT license. See the [LICENSE](./LICENSE.txt) file for details.

+ 68 - 0
iOS/.gitignore

@@ -0,0 +1,68 @@
+# Xcode
+#
+# gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore
+
+## User settings
+xcuserdata/
+
+## compatibility with Xcode 8 and earlier (ignoring not required starting Xcode 9)
+*.xcscmblueprint
+*.xccheckout
+
+## compatibility with Xcode 3 and earlier (ignoring not required starting Xcode 4)
+build/
+DerivedData/
+*.moved-aside
+*.pbxuser
+!default.pbxuser
+*.mode1v3
+!default.mode1v3
+*.mode2v3
+!default.mode2v3
+*.perspectivev3
+!default.perspectivev3
+
+## Obj-C/Swift specific
+*.hmap
+
+## App packaging
+*.ipa
+*.dSYM.zip
+*.dSYM
+
+# CocoaPods
+#
+# We recommend against adding the Pods directory to your .gitignore. However
+# you should judge for yourself, the pros and cons are mentioned at:
+# https://guides.cocoapods.org/using/using-cocoapods.html#should-i-check-the-pods-directory-into-source-control
+#
+# Pods/
+#
+# Add this line if you want to avoid checking in source code from the Xcode workspace
+# *.xcworkspace
+
+# Carthage
+#
+# Add this line if you want to avoid checking in source code from Carthage dependencies.
+# Carthage/Checkouts
+
+Carthage/Build/
+
+# fastlane
+#
+# It is recommended to not store the screenshots in the git repo.
+# Instead, use fastlane to re-generate the screenshots whenever they are needed.
+# For more information about the recommended setup visit:
+# https://docs.fastlane.tools/best-practices/source-control/#source-control
+
+fastlane/report.xml
+fastlane/Preview.html
+fastlane/screenshots/**/*.png
+fastlane/test_output
+
+# Code Injection
+#
+# After new code Injection tools there's a generated folder /iOSInjectionProject
+# https://github.com/johnno1962/injectionforxcode
+
+iOSInjectionProject/

+ 88 - 0
iOS/CHANGELOG.md

@@ -0,0 +1,88 @@
+## iOS 1.0.19
+
+**bugfix**
+
+- 修复mp4解析length为0异常。
+
+## iOS 1.0.18
+
+**bugfix**
+
+- 修复部分渲染宏与GPUImage冲突。
+
+## iOS 1.0.17
+
+**bugfix**
+
+- 修复mp4解析length异常。
+
+## iOS 1.0.16
+
+**bugfix**
+
+- MTLRenderCommandEncoder释放前需要调用endEncoding方法。
+- 修改QGMP4FrameHWDecoder在解码停止调用onInputEnd为_onInputEnd,即将停止任务立即执行,避免在低端机上解码性能太差,停止任务未及时执行导致finishFrameIndex设置有误陷入渲染死循环。
+
+## iOS 1.0.15
+
+**bugfix**
+
+- 修改SRGB格式的图像渲染后颜色变深[#issue124](https://github.com/Tencent/vap/issues/124)
+
+**feature**
+
+- UIView(VAP)及QGVAPWrapView 增加setMute接口,设置是否静音播放素材,注:在播放开始时进行设置,播放过程中设置无效,循环播放则设置后的下一次播放开始生效
+
+## iOS 1.0.14
+
+**bugfix**
+
+- 修改vap 取默认帧率的逻辑,添加从vapc box获取帧率操作,若vapc box取到帧率为0,则继续沿用旧有的逻辑,即利用帧数与时长计算帧率
+- 修复MP4Parser解析box长度逻辑不完成导致解析box异常,无法播放素材问题[#issue133](https://github.com/Tencent/vap/issues/133)
+- UIView(VAP) 增加enableOldVersion接口,若素材非vap工具制作(不包含vapc box),则必须在播放前调用此接口设置enable,才可播放
+
+
+## iOS 1.0.13
+
+**feature**
+
+- 暂停时音频播放跟随暂停
+
+**bugfix**
+
+- 修复AVAudioPlayer被释放后可能导致野指针crash的问题
+
+
+
+
+## iOS 1.0.12
+
+**bugfix**
+
+- 修复暂停时CPU上升的问题(在退后台的场景下会导致CPU上涨约1s然后下降,形成一个尖刺)
+
+
+
+## iOS 1.0.11
+
+**feature**
+
+- UIView(VAP) 新增 hwd_enterBackgroundOP,退后台时可以控制是暂停/结束行为[#issue102](https://github.com/Tencent/vap/issues/102)
+- QGVAPWrapView 补齐 stop/pause/resume功能,并修改了方法的命名
+
+
+
+## iOS 1.0.10
+
+**bugfix**
+
+- 解决退后台后回复可能出现花屏的问题
+
+
+
+## iOS 1.0.9
+
+**feature**
+
+- 添加VTSession失效时的重建逻辑(Seek关键帧,解码并丢弃,直到当前帧)
+- 将VAP默认行为由退后台时结束播放改为退后台时暂停,进入前台时恢复

+ 8 - 0
iOS/QGVAPlayer/QGVAPlayer.xcodeproj/project.pbxproj

@@ -7,6 +7,8 @@
 	objects = {
 
 /* Begin PBXBuildFile section */
+		43BAD32225F2153500D17934 /* QGVAPWrapView.h in Headers */ = {isa = PBXBuildFile; fileRef = 43BAD32025F2153500D17934 /* QGVAPWrapView.h */; settings = {ATTRIBUTES = (Public, ); }; };
+		43BAD32325F2153500D17934 /* QGVAPWrapView.m in Sources */ = {isa = PBXBuildFile; fileRef = 43BAD32125F2153500D17934 /* QGVAPWrapView.m */; };
 		630723B122F0409200B15629 /* QGVAPlayer.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 630723A722F0409200B15629 /* QGVAPlayer.framework */; };
 		630723B622F0409200B15629 /* QGVAPlayerTests.m in Sources */ = {isa = PBXBuildFile; fileRef = 630723B522F0409200B15629 /* QGVAPlayerTests.m */; };
 		630723B822F0409200B15629 /* QGVAPlayer.h in Headers */ = {isa = PBXBuildFile; fileRef = 630723AA22F0409200B15629 /* QGVAPlayer.h */; settings = {ATTRIBUTES = (Public, ); }; };
@@ -100,6 +102,8 @@
 /* End PBXContainerItemProxy section */
 
 /* Begin PBXFileReference section */
+		43BAD32025F2153500D17934 /* QGVAPWrapView.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = QGVAPWrapView.h; sourceTree = "<group>"; };
+		43BAD32125F2153500D17934 /* QGVAPWrapView.m */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.objc; path = QGVAPWrapView.m; sourceTree = "<group>"; };
 		630723A722F0409200B15629 /* QGVAPlayer.framework */ = {isa = PBXFileReference; explicitFileType = wrapper.framework; includeInIndex = 0; path = QGVAPlayer.framework; sourceTree = BUILT_PRODUCTS_DIR; };
 		630723AA22F0409200B15629 /* QGVAPlayer.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = QGVAPlayer.h; sourceTree = "<group>"; };
 		630723AB22F0409200B15629 /* Info.plist */ = {isa = PBXFileReference; lastKnownFileType = text.plist.xml; path = Info.plist; sourceTree = "<group>"; };
@@ -249,6 +253,8 @@
 				6307240222F0410600B15629 /* VAPMacros.h */,
 				630723C722F0410600B15629 /* UIView+VAP.h */,
 				630723F522F0410600B15629 /* UIView+VAP.m */,
+				43BAD32025F2153500D17934 /* QGVAPWrapView.h */,
+				43BAD32125F2153500D17934 /* QGVAPWrapView.m */,
 				630723D722F0410600B15629 /* Models */,
 				630723F622F0410600B15629 /* Views */,
 				630723E522F0410600B15629 /* Controllers */,
@@ -430,6 +436,7 @@
 			isa = PBXHeadersBuildPhase;
 			buildActionMask = 2147483647;
 			files = (
+				43BAD32225F2153500D17934 /* QGVAPWrapView.h in Headers */,
 				6307240B22F0410600B15629 /* UIView+VAP.h in Headers */,
 				630723B822F0409200B15629 /* QGVAPlayer.h in Headers */,
 				63AEB3A02316AF8A0069CEBB /* NSDictionary+VAPUtil.h in Headers */,
@@ -574,6 +581,7 @@
 				63AFC6B8231E3D4A00E127F9 /* QGVAPMetalShaderFunctionLoader.m in Sources */,
 				6307243A22F0410600B15629 /* QGHWDMP4OpenGLView.m in Sources */,
 				6307243322F0410600B15629 /* QGHWDMetalView.m in Sources */,
+				43BAD32325F2153500D17934 /* QGVAPWrapView.m in Sources */,
 				63AEB3A62317CD940069CEBB /* NSArray+VAPUtil.m in Sources */,
 				6307240C22F0410600B15629 /* QGVAPWeakProxy.m in Sources */,
 				6307240922F0410600B15629 /* QGMP4Parser.m in Sources */,

+ 3 - 0
iOS/QGVAPlayer/QGVAPlayer/Classes/Controllers/Decoders/QGBaseDecoder.h

@@ -17,6 +17,9 @@
 #import "QGAnimatedImageDecodeThread.h"
 #import "QGBaseDFileInfo.h"
 
+extern NSString* kQGVAPDecoderSeekStart;
+extern NSString* kQGVAPDecoderSeekFinish;
+
 @interface QGBaseDecoder : NSObject
 
 @property (atomic, assign) NSInteger currentDecodeFrame;    //正在解码的帧索引

+ 3 - 0
iOS/QGVAPlayer/QGVAPlayer/Classes/Controllers/Decoders/QGBaseDecoder.m

@@ -16,6 +16,9 @@
 #import "QGBaseDecoder.h"
 #import "QGAnimatedImageDecodeThreadPool.h"
 
+NSString* kQGVAPDecoderSeekStart = @"kQGVAPDecoderSeekStart";
+NSString* kQGVAPDecoderSeekFinish = @"kQGVAPDecoderSeekFinish";
+
 @interface QGBaseDecoder() {
 
     QGBaseDFileInfo *_fileInfo;

+ 144 - 53
iOS/QGVAPlayer/QGVAPlayer/Classes/Controllers/Decoders/QGMP4FrameHWDecoder.m

@@ -86,6 +86,8 @@
     NSInteger _finishFrameIndex;
     NSError *_constructErr;
     QGMP4ParserProxy *_mp4Parser;
+    
+    int _invalidRetryCount;
 }
 
 @property (atomic, strong) dispatch_queue_t decodeQueue; //dispatch decode task
@@ -94,6 +96,8 @@
 /** Video Parameter Set */
 @property (nonatomic, strong) NSData *vpsData;
 
+@property (atomic, assign) NSInteger lastDecodeFrame;
+
 @end
 
 NSString *const QGMP4HWDErrorDomain = @"QGMP4HWDErrorDomain";
@@ -134,6 +138,7 @@ NSString *const QGMP4HWDErrorDomain = @"QGMP4HWDErrorDomain";
     
     if (self = [super initWith:fileInfo error:error]) {
         _decodeQueue = dispatch_queue_create("com.qgame.vap.decode", DISPATCH_QUEUE_SERIAL);
+        _lastDecodeFrame = -1;
         _mp4Parser = fileInfo.mp4Parser;
         BOOL isOpenSuccess = [self onInputStart];
         if (!isOpenSuccess) {
@@ -148,14 +153,11 @@ NSString *const QGMP4HWDErrorDomain = @"QGMP4HWDErrorDomain";
 }
 
 - (void)registerNotification {
-    
-    [[NSNotificationCenter defaultCenter] hwd_addSafeObserver:self selector:@selector(hwd_didReceiveEnterBackgroundNotification:) name:UIApplicationDidEnterBackgroundNotification object:nil];
-    [[NSNotificationCenter defaultCenter] hwd_addSafeObserver:self selector:@selector(hwd_didReceiveEnterBackgroundNotification:) name:UIApplicationWillResignActiveNotification object:nil];
+
 }
 
 - (void)hwd_didReceiveEnterBackgroundNotification:(NSNotification *)notification {
     
-    [self onInputEnd];
 }
 
 - (void)decodeFrame:(NSInteger)frameIndex buffers:(NSMutableArray *)buffers {
@@ -167,12 +169,15 @@ NSString *const QGMP4HWDErrorDomain = @"QGMP4HWDErrorDomain";
     self.currentDecodeFrame = frameIndex;
     _buffers = buffers;
     dispatch_async(self.decodeQueue, ^{
-        [self _decodeFrame:frameIndex];
+        if (frameIndex != self.lastDecodeFrame + 1) {
+            // 必须是依次增大,否则解出来的画面会异常
+            return;
+        }
+        [self _decodeFrame:frameIndex drop:NO];
     });
 }
 
-- (void)_decodeFrame:(NSInteger)frameIndex {
-    
+- (void)_decodeFrame:(NSInteger)frameIndex drop:(BOOL)dropFlag {
     if (_isFinish) {
         return ;
     }
@@ -190,7 +195,7 @@ NSString *const QGMP4HWDErrorDomain = @"QGMP4HWDErrorDomain";
     NSData *packetData = [_mp4Parser readPacketOfSample:frameIndex];
     if (!packetData.length) {
         _finishFrameIndex = frameIndex;
-        [self onInputEnd];
+        [self _onInputEnd];
         return;
     }
     
@@ -219,74 +224,118 @@ NSString *const QGMP4HWDErrorDomain = @"QGMP4HWDErrorDomain";
     if (blockBuffer) {
         CFRelease(blockBuffer);
     }
+    
     // 7. use VTDecompressionSessionDecodeFrame
     if (@available(iOS 9.0, *)) {
         __typeof(self) __weak weakSelf = self;
         VTDecodeFrameFlags flags = 0;
         VTDecodeInfoFlags flagOut = 0;
-        VTDecompressionSessionDecodeFrameWithOutputHandler(_mDecodeSession, sampleBuffer, flags, &flagOut, ^(OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef  _Nullable imageBuffer, CMTime presentationTimeStamp, CMTime presentationDuration) {
-            CFRelease(sampleBuffer);
+        OSStatus status = VTDecompressionSessionDecodeFrameWithOutputHandler(_mDecodeSession, sampleBuffer, flags, &flagOut, ^(OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef  _Nullable imageBuffer, CMTime presentationTimeStamp, CMTime presentationDuration) {
             __typeof(self) strongSelf = weakSelf;
             if (strongSelf == nil) {
                 return;
             }
             
-            if(status == kVTInvalidSessionErr) {
-                VAP_Error(kQGVAPModuleCommon, @"decompress fail! frame:%@ kVTInvalidSessionErr error:%@", @(frameIndex), @(status));
-            } else if(status == kVTVideoDecoderBadDataErr) {
-                VAP_Error(kQGVAPModuleCommon, @"decompress fail! frame:%@ kVTVideoDecoderBadDataErr error:%@", @(frameIndex), @(status));
-            } else if(status != noErr) {
-                VAP_Error(kQGVAPModuleCommon, @"decompress fail! frame:%@ error:%@", @(frameIndex), @(status));
-            }
-            
-            QGMP4AnimatedImageFrame *newFrame = [[QGMP4AnimatedImageFrame alloc] init];
-            // imagebuffer会在frame回收时释放
-            CVPixelBufferRetain(imageBuffer);
-            newFrame.pixelBuffer = imageBuffer;
-            newFrame.frameIndex = frameIndex; //dts顺序
-            NSTimeInterval decodeTime = [[NSDate date] timeIntervalSinceDate:startDate]*1000;
-            newFrame.decodeTime = decodeTime;
-            newFrame.defaultFps =(int) strongSelf->_mp4Parser.fps;
-            newFrame.pts = currentPts;
+            [strongSelf handleDecodePixelBuffer:imageBuffer
+                                   sampleBuffer:sampleBuffer
+                                     frameIndex:frameIndex
+                                     currentPts:currentPts
+                                      startDate:startDate
+                                         status:status
+                                       needDrop:dropFlag];
+        });
+        
+        if (status == kVTInvalidSessionErr) {
+            CFRelease(sampleBuffer);
             
-            // 8. insert into buffer
-            [strongSelf->_buffers addObject:newFrame];
+            // 防止陷入死循环
+            if (_invalidRetryCount >= 3) {
+                return;
+            }
             
-            // 9. sort
-            [strongSelf->_buffers sortUsingComparator:^NSComparisonResult(QGMP4AnimatedImageFrame * _Nonnull obj1, QGMP4AnimatedImageFrame * _Nonnull obj2) {
-                return [@(obj1.pts) compare:@(obj2.pts)];
-            }];
-        });
+            [self resetDecoder];
+            // 从最近I帧一直解码到当前帧,中间帧丢弃
+            [self findKeyFrameAndDecodeToCurrent:frameIndex];
+        } else {
+            _invalidRetryCount = 0;
+        }
+        
     } else {
         // 7. use VTDecompressionSessionDecodeFrame
         VTDecodeFrameFlags flags = 0;
         VTDecodeInfoFlags flagOut = 0;
         _status = VTDecompressionSessionDecodeFrame(_mDecodeSession, sampleBuffer, flags, &outputPixelBuffer, &flagOut);
         
-        if(_status == kVTInvalidSessionErr) {
-        } else if(_status == kVTVideoDecoderBadDataErr) {
-        } else if(_status != noErr) {
+        if (_status == kVTInvalidSessionErr) {
+            CFRelease(sampleBuffer);
+            // 防止陷入死循环
+            if (_invalidRetryCount >= 3) {
+                return;
+            }
+            
+            [self resetDecoder];
+            // 从最近I帧一直解码到当前帧,中间帧丢弃
+            [self findKeyFrameAndDecodeToCurrent:frameIndex];
+            
+            return;
+        } else {
+            _invalidRetryCount = 0;
         }
-        CFRelease(sampleBuffer);
-        
-        QGMP4AnimatedImageFrame *newFrame = [[QGMP4AnimatedImageFrame alloc] init];
-        // imagebuffer会在frame回收时释放
-        newFrame.pixelBuffer = outputPixelBuffer;
-        newFrame.frameIndex = frameIndex;
-        NSTimeInterval decodeTime = [[NSDate date] timeIntervalSinceDate:startDate]*1000;
-        newFrame.decodeTime = decodeTime;
-        newFrame.defaultFps = (int)_mp4Parser.fps;
         
-        // 8. insert into buffer
-        [_buffers addObject:newFrame];
+        [self handleDecodePixelBuffer:outputPixelBuffer
+                         sampleBuffer:sampleBuffer
+                           frameIndex:frameIndex
+                           currentPts:currentPts
+                            startDate:startDate
+                               status:_status
+                             needDrop:dropFlag];
         
-        // 9. sort
-        [_buffers sortUsingComparator:^NSComparisonResult(QGMP4AnimatedImageFrame * _Nonnull obj1, QGMP4AnimatedImageFrame * _Nonnull obj2) {
-            return [@(obj1.pts) compare:@(obj2.pts)];
-        }];
     }
 }
 
+- (void)handleDecodePixelBuffer:(CVPixelBufferRef)pixelBuffer
+                   sampleBuffer:(CMSampleBufferRef)sampleBuffer
+                     frameIndex:(NSInteger)frameIndex
+                     currentPts:(uint64_t)currentPts
+                      startDate:(NSDate *)startDate
+                         status:(OSStatus)status
+                       needDrop:(BOOL)dropFlag {
+    
+    self.lastDecodeFrame = frameIndex;
+    
+    CFRelease(sampleBuffer);
+    
+    if(status == kVTInvalidSessionErr) {
+        VAP_Error(kQGVAPModuleCommon, @"decompress fail! frame:%@ kVTInvalidSessionErr error:%@", @(frameIndex), @(status));
+    } else if(status == kVTVideoDecoderBadDataErr) {
+        VAP_Error(kQGVAPModuleCommon, @"decompress fail! frame:%@ kVTVideoDecoderBadDataErr error:%@", @(frameIndex), @(status));
+    } else if(status != noErr) {
+        VAP_Error(kQGVAPModuleCommon, @"decompress fail! frame:%@ error:%@", @(frameIndex), @(status));
+    }
+    
+    if (dropFlag) {
+        return;
+    }
+    
+    QGMP4AnimatedImageFrame *newFrame = [[QGMP4AnimatedImageFrame alloc] init];
+    // imagebuffer会在frame回收时释放
+    CVPixelBufferRetain(pixelBuffer);
+    newFrame.pixelBuffer = pixelBuffer;
+    newFrame.frameIndex = frameIndex; //dts顺序
+    NSTimeInterval decodeTime = [[NSDate date] timeIntervalSinceDate:startDate]*1000;
+    newFrame.decodeTime = decodeTime;
+    newFrame.defaultFps = (int)_mp4Parser.fps;
+    newFrame.pts = currentPts;
+    
+    // 8. insert into buffer
+    [_buffers addObject:newFrame];
+    
+    // 9. sort
+    [_buffers sortUsingComparator:^NSComparisonResult(QGMP4AnimatedImageFrame * _Nonnull obj1, QGMP4AnimatedImageFrame * _Nonnull obj2) {
+        return [@(obj1.pts) compare:@(obj2.pts)];
+    }];
+}
+
 #pragma mark - override
 
 - (BOOL)shouldStopDecode:(NSInteger)nextFrameIndex {
@@ -391,6 +440,10 @@ NSString *const QGMP4HWDErrorDomain = @"QGMP4HWDErrorDomain";
     }
     
     // 3. create VTDecompressionSession
+    return [self createDecompressionSession];;
+}
+
+- (BOOL)createDecompressionSession {
     CFDictionaryRef attrs = NULL;
     const void *keys[] = {kCVPixelBufferPixelFormatTypeKey};
     //      kCVPixelFormatType_420YpCbCr8Planar is YUV420
@@ -431,6 +484,44 @@ NSString *const QGMP4HWDErrorDomain = @"QGMP4HWDErrorDomain";
     return YES;
 }
 
+- (void)resetDecoder {
+    // delete
+    if (_mDecodeSession) {
+        VTDecompressionSessionWaitForAsynchronousFrames(_mDecodeSession);
+        VTDecompressionSessionInvalidate(_mDecodeSession);
+        CFRelease(_mDecodeSession);
+        _mDecodeSession = NULL;
+    }
+    
+    // recreate
+    [self createDecompressionSession];
+}
+
+- (void)findKeyFrameAndDecodeToCurrent:(NSInteger)frameIndex {
+    
+    [[NSNotificationCenter defaultCenter] postNotificationName:kQGVAPDecoderSeekStart object:self];
+    
+    NSArray<NSNumber *> *keyframeIndexes = [_mp4Parser videoSyncSampleIndexes];
+    NSInteger index = [[keyframeIndexes firstObject] integerValue];
+    for(NSNumber *number in keyframeIndexes) {
+        if(number.integerValue < frameIndex) {
+            index = number.integerValue;
+            continue;
+        } else {
+            break;
+        }
+    }
+    
+    // seek to last key frame
+    while (index < frameIndex) {
+        [self _decodeFrame:index drop:YES];
+        index++;
+    }
+    [self _decodeFrame:frameIndex drop:NO];
+    
+    [[NSNotificationCenter defaultCenter] postNotificationName:kQGVAPDecoderSeekFinish object:self];
+}
+
 - (void)_onInputEnd  {
     if (_isFinish) {
         return ;
@@ -469,7 +560,7 @@ NSString *const QGMP4HWDErrorDomain = @"QGMP4HWDErrorDomain";
 }
 
 //decode callback
-void didDecompress(void *decompressionOutputRefCon, void *sourceFrameRefCon, OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef pixelBuffer, CMTime presentationTimeStamp, CMTime presentationDuration ){
+static void didDecompress(void *decompressionOutputRefCon, void *sourceFrameRefCon, OSStatus status, VTDecodeInfoFlags infoFlags, CVImageBufferRef pixelBuffer, CMTime presentationTimeStamp, CMTime presentationDuration ){
     
     CVPixelBufferRef *outputPixelBuffer = (CVPixelBufferRef *)sourceFrameRefCon;
     *outputPixelBuffer = CVPixelBufferRetain(pixelBuffer);

+ 5 - 0
iOS/QGVAPlayer/QGVAPlayer/Classes/Controllers/QGAnimatedImageDecodeManager.h

@@ -31,6 +31,7 @@
 
 @optional
 
+- (BOOL)shouldSetupAudioPlayer;
 /**
  到文件末尾时被调用
 
@@ -53,5 +54,9 @@
 - (QGBaseAnimatedImageFrame *)consumeDecodedFrame:(NSInteger)frameIndex;
 
 - (void)tryToStartAudioPlay;
+- (void)tryToStopAudioPlay;
+- (void)tryToPauseAudioPlay;
+- (void)tryToResumeAudioPlay;
+- (BOOL)containsThisDeocder:(id)decoder;
 
 @end

+ 59 - 5
iOS/QGVAPlayer/QGVAPlayer/Classes/Controllers/QGAnimatedImageDecodeManager.m

@@ -46,7 +46,7 @@
         _decoderDelegate = delegate;
         [self createDecodersByConfig:config];
         _bufferManager = [[QGAnimatedImageBufferManager alloc] initWithConfig:config];
-        [self initializeBuffers];
+        [self initializeBuffersFromIndex:0];
         [self setupAudioPlayerIfNeed];
     }
     return self;
@@ -65,13 +65,27 @@
         if (frameIndex == 0 && _bufferManager.buffers.count < _config.bufferCount) {
             return nil;
         }
-        [self checkIfDecodeFinish:frameIndex];
+        BOOL decodeFinish = [self checkIfDecodeFinish:frameIndex];
         QGBaseAnimatedImageFrame *frame = [_bufferManager popVideoFrame];
         if (frame) {
             // pts顺序
             frame.frameIndex = frameIndex;
             [self decodeFrame:frameIndex+_config.bufferCount];
         }
+        else if (!decodeFinish){
+            // buffer已经空了,但还没有结束(退后台时可能出现这种情况)
+            NSInteger decoderIndex = _decoders.count==1?0:frameIndex%_decoders.count;
+            QGBaseDecoder *decoder = _decoders[decoderIndex];
+            if ([decoder shouldStopDecode:frameIndex]) {
+                // 其实已经该结束了
+                if ([self.decoderDelegate respondsToSelector:@selector(decoderDidFinishDecode:)]) {
+                    [self.decoderDelegate decoderDidFinishDecode:decoder];
+                }
+                return nil;
+            }
+            
+            [self initializeBuffersFromIndex:frameIndex];
+        }
         return frame;
     }
 }
@@ -83,9 +97,32 @@
     [_audioPlayer play];
 }
 
+- (void)tryToStopAudioPlay {
+    if (!_audioPlayer) {
+        return;
+    }
+    // CoreAudio(AVAudioPlaeyrCpp)回调audioPlayerDidFinishPlaying:successfully:时在子线程,恰巧此时释放将可能导致野指针问题
+    // 如果只是stop不能解决,可以考虑产生循环持有并延迟释放_audioPlayer
+    [_audioPlayer stop];
+}
+
+- (void)tryToPauseAudioPlay {
+    if (!_audioPlayer) {
+        return;
+    }
+    [_audioPlayer pause];
+}
+
+- (void)tryToResumeAudioPlay {
+    if (!_audioPlayer) {
+        return;
+    }
+    [_audioPlayer play];
+}
+
 #pragma mark - private methods
 
-- (void)checkIfDecodeFinish:(NSInteger)frameIndex {
+- (BOOL)checkIfDecodeFinish:(NSInteger)frameIndex {
     
     NSInteger decoderIndex = _decoders.count==1?0:frameIndex%_decoders.count;
     QGBaseDecoder *decoder = _decoders[decoderIndex];
@@ -93,7 +130,9 @@
         if ([self.decoderDelegate respondsToSelector:@selector(decoderDidFinishDecode:)]) {
             [self.decoderDelegate decoderDidFinishDecode:decoder];
         }
+        return YES;
     }
+    return NO;
 }
 
 - (void)decodeFrame:(NSInteger)frameIndex {
@@ -134,14 +173,20 @@
     }
 }
 
-- (void)initializeBuffers {
+- (void)initializeBuffersFromIndex:(NSInteger)start {
     
     for (int i = 0; i < _config.bufferCount; i++) {
-        [self decodeFrame:i];
+        [self decodeFrame:start+i];
     }
 }
 
 - (void)setupAudioPlayerIfNeed {
+    if ([_decoderDelegate respondsToSelector:@selector(shouldSetupAudioPlayer)]) {
+        BOOL should = [_decoderDelegate shouldSetupAudioPlayer];
+        if (!should) {
+            return;
+        }
+    }
     
     if ([_fileInfo isKindOfClass:[QGMP4HWDFileInfo class]]) {
         QGMP4ParserProxy *mp4Parser = [(QGMP4HWDFileInfo *)_fileInfo mp4Parser];
@@ -158,4 +203,13 @@
 
 }
 
+- (BOOL)containsThisDeocder:(id)decoder {
+    for (id d in _decoders) {
+        if (d == decoder) {
+            return YES;
+        }
+    }
+    return NO;
+}
+
 @end

+ 3 - 1
iOS/QGVAPlayer/QGVAPlayer/Classes/Controllers/QGVAPConfigManager.m

@@ -45,6 +45,7 @@
     QGMP4Box *vapc = [_fileInfo.mp4Parser.rootBox subBoxOfType:QGMP4BoxType_vapc];
     if (!vapc) {
         self.hasValidConfig = NO;
+        VAP_Error(kQGVAPModuleCommon, @"config can not find vapc box");
         return ;
     }
     self.hasValidConfig = YES;
@@ -166,7 +167,8 @@
     commonInfo.alphaAreaRect = a_frame ? [a_frame hwd_rectValue] : CGRectZero;
     commonInfo.rgbAreaRect = rgb_frame ? [rgb_frame hwd_rectValue] : CGRectZero;
     configModel.info = commonInfo;
-    
+    //更新parser的fps信息
+    _fileInfo.mp4Parser.fps = fps;
     if (!sourcesArr) {
         VAP_Error(kQGVAPModuleCommon, @"has no sourcesArr:%@", configDic);
         return ;

+ 8 - 0
iOS/QGVAPlayer/QGVAPlayer/Classes/MP4Parser/QGMP4Box.h

@@ -20,6 +20,8 @@
 
 extern NSInteger const kQGBoxSizeLengthInBytes;
 extern NSInteger const kQGBoxTypeLengthInBytes;
+extern NSInteger const kQGBoxLargeSizeLengthInBytes;
+extern NSInteger const kQGBoxLargeSizeFlagLengthInBytes;
 
 @class QGMP4Box;
 typedef NSData* (^QGMp4BoxDataFetcher)(QGMP4Box *box);
@@ -168,6 +170,12 @@ The table is compactly coded. Each entry gives the index of the first chunk of a
 
 @end
 
+@interface QGMP4StssBox : QGMP4Box
+
+@property(nonatomic, strong) NSMutableArray<NSNumber *> *syncSamples;
+
+@end
+
 /**
  * ctts
  */

+ 21 - 1
iOS/QGVAPlayer/QGVAPlayer/Classes/MP4Parser/QGMP4Box.m

@@ -18,6 +18,8 @@
 
 NSInteger const kQGBoxSizeLengthInBytes = 4;
 NSInteger const kQGBoxTypeLengthInBytes = 4;
+NSInteger const kQGBoxLargeSizeLengthInBytes = 8;
+NSInteger const kQGBoxLargeSizeFlagLengthInBytes = 1;
 
 #pragma mark - boxes
 #pragma mark -- base box
@@ -257,6 +259,23 @@ NSInteger const kQGBoxTypeLengthInBytes = 4;
 
 @end
 
+@implementation QGMP4StssBox
+
+- (void)boxDidParsed:(QGMp4BoxDataFetcher)datablock {
+    if (!_syncSamples) {
+        _syncSamples = [NSMutableArray new];
+    }
+    NSData *stssData = datablock(self);
+    const char *bytes = stssData.bytes;
+    uint32_t sample_count = READ32BIT(&bytes[12]);
+    for (int i = 0; i < sample_count; i++) {
+        NSInteger index = READ32BIT(&bytes[16 + 4 * i]) - 1;
+        [_syncSamples addObject:[NSNumber numberWithInteger:index]];
+    }
+}
+
+@end
+
 /**
  Decoding Time to Sample Box
  用来计算dts
@@ -341,7 +360,6 @@ stts记录了sample的时间信息,⾥⾯有多个entry,每个entry⾥⾯的
         case QGMP4BoxType_url:
         case QGMP4BoxType_stbl:
         case QGMP4BoxType_avc1:
-        case QGMP4BoxType_stss:
         case QGMP4BoxType_udta:
         case QGMP4BoxType_meta:
         case QGMP4BoxType_ilst:
@@ -351,6 +369,8 @@ stts记录了sample的时间信息,⾥⾯有多个entry,每个entry⾥⾯的
         case QGMP4BoxType_loci:
         case QGMP4BoxType_smhd:
             return [QGMP4Box class];
+        case QGMP4BoxType_stss:
+            return [QGMP4StssBox class];
         case QGMP4BoxType_mdat:
             return [QGMP4MdatBox class];
         case QGMP4BoxType_avcC:

+ 1 - 0
iOS/QGVAPlayer/QGVAPlayer/Classes/MP4Parser/QGMP4Parser.h

@@ -49,6 +49,7 @@
 @property (nonatomic, strong) NSData *spsData;          //sps
 @property (nonatomic, strong) NSData *ppsData;          //pps
 @property (nonatomic, strong) NSArray *videoSamples;    //所有帧数据,包含了位置和大小等信息
+@property (nonatomic, strong) NSArray *videoSyncSampleIndexes;  // 所有关键帧的index
 @property (nonatomic, strong) QGMP4Box *rootBox;        //mp4文件根box
 @property (nonatomic, strong) QGMP4TrackBox *videoTrackBox;     //视频track
 @property (nonatomic, strong) QGMP4TrackBox *audioTrackBox;     //音频track

+ 116 - 88
iOS/QGVAPlayer/QGVAPlayer/Classes/MP4Parser/QGMP4Parser.m

@@ -78,35 +78,22 @@
         //第一个子box
         offset = calBox.superBox ? (calBox.startIndexInBytes + kQGBoxSizeLengthInBytes + kQGBoxTypeLengthInBytes) : 0;
         
-        //avcbox特殊处理
-        if (calBox.type == QGMP4BoxType_avc1 || calBox.type == QGMP4BoxType_hvc1 || calBox.type == QGMP4BoxType_stsd) {
-            unsigned long long avcOffset = calBox.startIndexInBytes+kQGBoxSizeLengthInBytes+kQGBoxTypeLengthInBytes;
-            unsigned long long avcEdge = calBox.startIndexInBytes+calBox.length-kQGBoxSizeLengthInBytes-kQGBoxTypeLengthInBytes;
-            unsigned long long avcLength = 0;
-            QGMP4BoxType avcType = QGMP4BoxType_unknown;
-            for (; avcOffset < avcEdge; avcOffset++) {
-                readBoxTypeAndLength(_fileHandle, avcOffset, &avcType, &avcLength);
-                if (avcType == QGMP4BoxType_avc1 || avcType == QGMP4BoxType_avcC || avcType == QGMP4BoxType_hvc1 || avcType == QGMP4BoxType_hvcC) {
-                    QGMP4Box *avcBox = [QGMP4BoxFactory createBoxForType:avcType startIndex:avcOffset length:avcLength];
-                    if (!calBox.subBoxes) {
-                        calBox.subBoxes = [NSMutableArray new];
-                    }
-                    [calBox.subBoxes addObject:avcBox];
-                    avcBox.superBox = calBox;
-                    [BFSQueue addObject:avcBox];
-                    offset = (avcBox.startIndexInBytes+avcBox.length);
-                    [self didParseBox:avcBox];
-                    break ;
-                }
-            }
+        //特殊处理
+        if ([self shouldResetOffset:calBox.type]) {
+            [self calibrateOffset:&offset boxType:calBox.type];
         }
+        
+        //解析子box
         do {
             //判断是否会越界
             if ((offset+kQGBoxSizeLengthInBytes+kQGBoxTypeLengthInBytes)>(calBox.startIndexInBytes+calBox.length)) {
                 break ;
             }
-            readBoxTypeAndLength(_fileHandle, offset, &type, &length);
             
+            if (![self readBoxTypeAndLength:offset type:&type length:&length]) {
+                break;
+            }
+          
             if ((offset+length)>(calBox.startIndexInBytes+calBox.length)) {
                 //reach to super box end or not a box
                 break ;
@@ -136,6 +123,54 @@
     [self didFinisheParseFile];
 }
 
+- (BOOL)readBoxTypeAndLength:(uint64_t)offset type:(QGMP4BoxType *)type length:(uint64_t*)length {
+    [_fileHandle seekToFileOffset:offset];
+    NSData *data = [_fileHandle readDataOfLength:(kQGBoxSizeLengthInBytes + kQGBoxTypeLengthInBytes)];
+    if (data.length < kQGBoxSizeLengthInBytes + kQGBoxTypeLengthInBytes) {
+      VAP_Error(kQGVAPModuleCommon, @"read box length and type error");
+      return NO;
+    }
+    const char *bytes = data.bytes;
+    *length = [self readValue:bytes length:kQGBoxSizeLengthInBytes];
+    *type = [self readValue:&bytes[kQGBoxSizeLengthInBytes] length:kQGBoxTypeLengthInBytes];
+    if (*length == kQGBoxLargeSizeFlagLengthInBytes) {
+      offset += kQGBoxSizeLengthInBytes + kQGBoxTypeLengthInBytes;
+      [_fileHandle seekToFileOffset:offset];
+      data = [_fileHandle readDataOfLength:kQGBoxLargeSizeLengthInBytes];
+      if (data.length < kQGBoxLargeSizeLengthInBytes) {
+        VAP_Error(kQGVAPModuleCommon, @"read box length and type error");
+        return NO;
+      }
+      bytes = data.bytes;
+      *length = [self readValue:bytes length:kQGBoxLargeSizeLengthInBytes];
+      if (*length == 0) {
+        VAP_Error(kQGVAPModuleCommon, @"read box length is 0");
+        return NO;
+      }
+    }
+    return YES;
+}
+
+- (BOOL)shouldResetOffset:(QGMP4BoxType)type {
+    return type == QGMP4BoxType_stsd ||
+           type == QGMP4BoxType_avc1 ||
+           type == QGMP4BoxType_hvc1;
+}
+
+- (void)calibrateOffset:(uint64_t*)offset boxType:(QGMP4BoxType)type {
+    switch (type) {
+        case QGMP4BoxType_stsd:
+            *offset += 8;
+            break;
+        case QGMP4BoxType_avc1:
+        case QGMP4BoxType_hvc1:
+            *offset += (24 + 2 + 2 + 14 + 32 + 4);
+            break;
+        default:
+            break;
+    }
+}
+
 - (NSData *)readDataForBox:(QGMP4Box *)box {
     
     if (!box) {
@@ -151,6 +186,7 @@
     for (int i = 0; i < length; i++) {
         value += (bytes[i]&0xff)<<((length-i-1)*8);
     }
+    VAP_Debug(kQGVAPModuleCommon, @"readValue length:%lld value:%lld", length, value);
     return value;
 }
 
@@ -173,15 +209,6 @@
     }
 }
 
-void readBoxTypeAndLength(NSFileHandle *fileHandle, unsigned long long offset, QGMP4BoxType *type, unsigned long long *length) {
-    
-    [fileHandle seekToFileOffset:offset];
-    NSData *data = [fileHandle readDataOfLength:kQGBoxSizeLengthInBytes+kQGBoxTypeLengthInBytes];
-    const char *bytes = data.bytes;
-    *length = ((bytes[0]&0xff)<<24)+((bytes[1]&0xff)<<16)+((bytes[2]&0xff)<<8)+(bytes[3]&0xff);
-    *type = ((bytes[4]&0xff)<<24)+((bytes[5]&0xff)<<16)+((bytes[6]&0xff)<<8)+(bytes[7]&0xff);
-}
-
 @end
 
 #pragma mark - parser proxy
@@ -227,7 +254,7 @@ void readBoxTypeAndLength(NSFileHandle *fileHandle, unsigned long long offset, Q
         if (self.videoSamples.count == 0) {
             return 0;
         }
-        _fps = self.videoSamples.count/self.duration;
+        _fps = lround(self.videoSamples.count/self.duration);
     }
     return _fps;
 }
@@ -247,72 +274,68 @@ void readBoxTypeAndLength(NSFileHandle *fileHandle, unsigned long long offset, Q
     }
     NSMutableArray *videoSamples = [NSMutableArray new];
     
-    uint64_t start_play_time = 0;
     uint64_t tmp = 0;
-    uint32_t sampIdx = 0;
     QGMP4SttsBox *sttsBox = [self.videoTrackBox subBoxOfType:QGMP4BoxType_stts];
     QGMP4StszBox *stszBox = [self.videoTrackBox subBoxOfType:QGMP4BoxType_stsz];
     QGMP4StscBox *stscBox = [self.videoTrackBox subBoxOfType:QGMP4BoxType_stsc];
     QGMP4StcoBox *stcoBox = [self.videoTrackBox subBoxOfType:QGMP4BoxType_stco];
     QGMP4CttsBox *cttsBox = [self.videoTrackBox subBoxOfType:QGMP4BoxType_ctts];
-    for (int i = 0; i < sttsBox.entries.count; ++i) {
-        QGSttsEntry *entry = sttsBox.entries[i];
-        for (int j = 0; j < entry.sampleCount; ++j) {
-            QGMP4Sample *sample = [QGMP4Sample new];
-            sample.sampleDelta = entry.sampleDelta;
-            sample.codecType = QGMP4CodecTypeVideo;
-            sample.sampleIndex = sampIdx;
-            sample.pts = tmp + [cttsBox.compositionOffsets[j] unsignedLongLongValue];
-            if (sampIdx < stszBox.sampleSizes.count) {
-                sample.sampleSize = (int32_t)[stszBox.sampleSizes[sampIdx] integerValue];
+
+    uint32_t stscEntryIndex = 0;
+    uint32_t stscEntrySampleIndex = 0;
+    uint32_t stscEntrySampleOffset = 0;
+    uint32_t sttsEntryIndex = 0;
+    uint32_t sttsEntrySampleIndex = 0;
+    uint32_t stcoChunkLogicIndex = 0;
+    for (int i = 0; i < stszBox.sampleCount; ++i) {
+        if (stscEntryIndex >= stscBox.entries.count ||
+            sttsEntryIndex >= sttsBox.entries.count ||
+            stcoChunkLogicIndex >= stcoBox.chunkOffsets.count) {
+            break;
+        }
+
+        QGStscEntry *stscEntry = stscBox.entries[stscEntryIndex];
+        QGSttsEntry *sttsEntry = sttsBox.entries[sttsEntryIndex];
+        uint32_t sampleOffset = [stcoBox.chunkOffsets[stcoChunkLogicIndex] unsignedIntValue] + stscEntrySampleOffset;
+        uint32_t ctts = 0;
+        if (i < cttsBox.compositionOffsets.count) {
+            ctts = [cttsBox.compositionOffsets[i] unsignedIntValue];
+        }
+
+        QGMP4Sample *sample = [QGMP4Sample new];
+        sample.codecType = QGMP4CodecTypeVideo;
+        sample.sampleIndex = i;
+        sample.chunkIndex = stcoChunkLogicIndex;
+        sample.sampleDelta = sttsEntry.sampleDelta;
+        sample.sampleSize = [stszBox.sampleSizes[i] unsignedIntValue];
+        sample.pts = tmp + ctts;
+        sample.streamOffset = sampleOffset;
+        [videoSamples addObject:sample];
+
+        stscEntrySampleOffset += sample.sampleSize;
+        tmp += sample.sampleDelta;
+
+        stscEntrySampleIndex++;
+        if (stscEntrySampleIndex >= stscEntry.samplesPerChunk) {
+            if (stcoChunkLogicIndex + 1 < stcoBox.chunkOffsets.count) {
+                stcoChunkLogicIndex++;
             }
-            [videoSamples addObject:sample];
-            start_play_time += entry.sampleDelta;
-            sampIdx++;
-            tmp += entry.sampleDelta;
+
+            stscEntrySampleIndex = 0;
+            stscEntrySampleOffset = 0;
         }
-        
-        NSMutableArray<QGChunkOffsetEntry *> *chunkOffsets = [NSMutableArray new];
-        uint32_t chunkIndex = 0;
-        uint32_t totalSample = 0;
-        for (int j = 0; j < stscBox.entries.count; ++j) {
-            QGStscEntry *entry = stscBox.entries[j];
-            if (j < stscBox.entries.count - 1) {
-                QGStscEntry *nextEntry = stscBox.entries[j+1];
-                for (int k = 0; k < nextEntry.firstChunk - entry.firstChunk; ++k) {
-                    QGChunkOffsetEntry *offsetEntry = [QGChunkOffsetEntry new];
-                    offsetEntry.samplesPerChunk = entry.samplesPerChunk;
-                    totalSample += entry.samplesPerChunk;
-                    if (chunkIndex < stcoBox.chunkOffsets.count) {
-                        offsetEntry.offset = (uint32_t)[stcoBox.chunkOffsets[chunkIndex] integerValue];
-                    }
-                    chunkIndex++;
-                    [chunkOffsets addObject:offsetEntry];
-                }
-            } else {
-                //只有一个或最后一个
-                while (chunkIndex < stcoBox.chunkOffsets.count) {
-                    QGChunkOffsetEntry *offsetEntry = [QGChunkOffsetEntry new];
-                    offsetEntry.samplesPerChunk = entry.samplesPerChunk;
-                    offsetEntry.offset = (uint32_t)[stcoBox.chunkOffsets[chunkIndex] integerValue];
-                    totalSample += entry.samplesPerChunk;
-                    chunkIndex++;
-                    [chunkOffsets addObject:offsetEntry];
-                }
+
+        sttsEntrySampleIndex++;
+        if (sttsEntrySampleIndex >= sttsEntry.sampleCount) {
+            sttsEntrySampleIndex = 0;
+            if (sttsEntryIndex + 1 < sttsBox.entries.count) {
+                sttsEntryIndex++;
             }
         }
-        sampIdx = 0;
-        for (int i = 0; i < chunkOffsets.count; ++i) {
-            QGChunkOffsetEntry *offsetEntry = chunkOffsets[i];
-            uint32_t offsetChunk = 0;
-            for (int j = 0; j < offsetEntry.samplesPerChunk; ++j) {
-                if (sampIdx < videoSamples.count) {
-                    QGMP4Sample *videoSample = videoSamples[sampIdx];
-                    videoSample.chunkIndex = i;
-                    videoSample.streamOffset = offsetEntry.offset + offsetChunk;
-                    offsetChunk += videoSample.sampleSize;
-                    sampIdx++;
-                }
+
+        if (stscEntryIndex + 1 < stscBox.entries.count) {
+            if (stcoChunkLogicIndex >= stscBox.entries[stscEntryIndex + 1].firstChunk - 1) {
+                stscEntryIndex++;
             }
         }
     }
@@ -320,6 +343,11 @@ void readBoxTypeAndLength(NSFileHandle *fileHandle, unsigned long long offset, Q
     return _videoSamples;
 }
 
+- (NSArray *)videoSyncSampleIndexes {
+    QGMP4StssBox *stssBox = [self.videoTrackBox subBoxOfType:QGMP4BoxType_stss];
+    return stssBox.syncSamples;
+}
+
 /**
  调用该方法才会解析mp4文件并得到必要信息。
  */

+ 1 - 1
iOS/QGVAPlayer/QGVAPlayer/Classes/Models/QGVAPConfigModel.h

@@ -14,7 +14,7 @@
 // limitations under the License.
 
 #import <Foundation/Foundation.h>
-#import <UIKit/UIkit.h>
+#import <UIKit/UIKit.h>
 #import <Metal/Metal.h>
 
 typedef NS_ENUM(NSInteger, QGVAPOrientation){

+ 1 - 1
iOS/QGVAPlayer/QGVAPlayer/Classes/Models/QGVAPTextureLoader.m

@@ -49,7 +49,7 @@
     if (@available(iOS 10.0, *)) {
         MTKTextureLoader *loader = [[MTKTextureLoader alloc] initWithDevice:device];
         NSError *error = nil;
-        id<MTLTexture> texture = [loader newTextureWithCGImage:image.CGImage options:@{MTKTextureLoaderOptionOrigin : MTKTextureLoaderOriginFlippedVertically} error:&error];
+        id<MTLTexture> texture = [loader newTextureWithCGImage:image.CGImage options:@{MTKTextureLoaderOptionOrigin : MTKTextureLoaderOriginFlippedVertically,MTKTextureLoaderOptionSRGB:@(NO)} error:&error];
         if (!texture || error) {
             VAP_Error(kQGVAPModuleCommon, @"loadTexture error:%@", error);
             return nil;

+ 82 - 0
iOS/QGVAPlayer/QGVAPlayer/Classes/QGVAPWrapView.h

@@ -0,0 +1,82 @@
+// UIView+VAP.h
+// Tencent is pleased to support the open source community by making vap available.
+//
+// Copyright (C) 2020 THL A29 Limited, a Tencent company.  All rights reserved.
+//
+// Licensed under the MIT License (the "License"); you may not use this file except in
+// compliance with the License. You may obtain a copy of the License at
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless required by applicable law or agreed to in writing, software distributed under the License is
+// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+// either express or implied. See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import <UIKit/UIKit.h>
+#import "UIView+VAP.h"
+
+NS_ASSUME_NONNULL_BEGIN
+
+
+typedef NS_ENUM(NSUInteger, QGVAPWrapViewContentMode) {
+    QGVAPWrapViewContentModeScaleToFill,
+    QGVAPWrapViewContentModeAspectFit,
+    QGVAPWrapViewContentModeAspectFill,
+};
+
+@protocol VAPWrapViewDelegate <NSObject>
+
+@optional
+//即将开始播放时询问,true马上开始播放,false放弃播放
+- (BOOL)vapWrap_viewshouldStartPlayMP4:(VAPView *)container config:(QGVAPConfigModel *)config;
+
+- (void)vapWrap_viewDidStartPlayMP4:(VAPView *)container;
+- (void)vapWrap_viewDidPlayMP4AtFrame:(QGMP4AnimatedImageFrame*)frame view:(VAPView *)container;
+- (void)vapWrap_viewDidStopPlayMP4:(NSInteger)lastFrameIndex view:(VAPView *)container;
+- (void)vapWrap_viewDidFinishPlayMP4:(NSInteger)totalFrameCount view:(VAPView *)container;
+- (void)vapWrap_viewDidFailPlayMP4:(NSError *)error;
+
+//vap APIs
+- (NSString *)vapWrapview_contentForVapTag:(NSString *)tag resource:(QGVAPSourceInfo *)info;        //替换配置中的资源占位符(不处理直接返回tag)
+- (void)vapWrapView_loadVapImageWithURL:(NSString *)urlStr context:(NSDictionary *)context completion:(VAPImageCompletionBlock)completionBlock; //由于组件内不包含网络图片加载的模块,因此需要外部支持图片加载。
+
+@end
+
+/*
+ 封装VAPView,本身不响应手势
+ 提供ContentMode功能
+ 播放完成后会自动移除内部的VAPView(可选)
+ */
+@interface QGVAPWrapView : UIView
+/// default is QGVAPWrapViewContentModeScaleToFill
+@property (nonatomic, assign) QGVAPWrapViewContentMode contentMode;
+// 是否在播放完成后自动移除内部VAPView, 如果外部用法会复用当前View,可以不移除
+@property (nonatomic, assign) BOOL autoDestoryAfterFinish;
+
+- (void)playHWDMP4:(NSString *)filePath
+       repeatCount:(NSInteger)repeatCount
+          delegate:(id<VAPWrapViewDelegate>)delegate;
+
+- (void)stopHWDMP4;
+
+- (void)pauseHWDMP4;
+- (void)resumeHWDMP4;
+
+//设置是否静音播放素材,注:在播放开始时进行设置,播放过程中设置无效
+- (void)setMute:(BOOL)isMute;
+
+//增加点击的手势识别, 如果开启了autoDestoryAfterFinish,那么手势将在播放完毕后失效
+- (void)addVapTapGesture:(VAPGestureEventBlock)handler;
+//手势识别通用接口, 如果开启了autoDestoryAfterFinish,那么手势将在播放完毕后失效
+- (void)addVapGesture:(UIGestureRecognizer *)gestureRecognizer callback:(VAPGestureEventBlock)handler;
+
+
+/*
+ QGVAPWrapView本身不响应手势,只有子视图响应手势,请使用vapWrapView_addVapTapGesture / vapWrapView_addVapGesture添加
+ */
+- (void)addGestureRecognizer:(UIGestureRecognizer *)gestureRecognizer NS_UNAVAILABLE;
+
+@end
+
+NS_ASSUME_NONNULL_END

+ 229 - 0
iOS/QGVAPlayer/QGVAPlayer/Classes/QGVAPWrapView.m

@@ -0,0 +1,229 @@
+// UIView+VAP.m
+// Tencent is pleased to support the open source community by making vap available.
+//
+// Copyright (C) 2020 THL A29 Limited, a Tencent company.  All rights reserved.
+//
+// Licensed under the MIT License (the "License"); you may not use this file except in
+// compliance with the License. You may obtain a copy of the License at
+//
+// http://opensource.org/licenses/MIT
+//
+// Unless required by applicable law or agreed to in writing, software distributed under the License is
+// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
+// either express or implied. See the License for the specific language governing permissions and
+// limitations under the License.
+
+#import "QGVAPWrapView.h"
+#import "QGVAPConfigModel.h"
+
+@interface QGVAPWrapView()<VAPWrapViewDelegate, HWDMP4PlayDelegate>
+
+@property (nonatomic, weak) id<VAPWrapViewDelegate> delegate;
+
+@property (nonatomic, strong) VAPView *vapView;
+
+@end
+
+@implementation QGVAPWrapView
+
+- (instancetype)init {
+    if (self = [super init]) {
+        [self commonInit];
+    }
+    return self;
+}
+
+- (instancetype)initWithFrame:(CGRect)frame {
+    if (self = [super initWithFrame:frame]) {
+        [self commonInit];
+    }
+    return self;
+}
+
+- (void)commonInit {
+    _autoDestoryAfterFinish = YES;
+}
+
+// 因为播放停止后可能移除VAPView,这里需要加回来
+- (void)initVAPViewIfNeed {
+    if (!_vapView) {
+        _vapView = [[VAPView alloc] initWithFrame:self.bounds];
+        [self addSubview:_vapView];
+    }
+}
+
+- (void)playHWDMP4:(NSString *)filePath
+                   repeatCount:(NSInteger)repeatCount
+                      delegate:(id<VAPWrapViewDelegate>)delegate {
+    
+    self.delegate = delegate;
+    
+    [self initVAPViewIfNeed];
+    [self.vapView playHWDMP4:filePath repeatCount:repeatCount delegate:self];
+}
+
+- (void)stopHWDMP4 {
+    [self.vapView stopHWDMP4];
+}
+
+- (void)pauseHWDMP4 {
+    [self.vapView pauseHWDMP4];
+}
+
+- (void)resumeHWDMP4 {
+    [self.vapView resumeHWDMP4];
+}
+
+- (void)setMute:(BOOL)isMute {
+    [self initVAPViewIfNeed];
+    [self.vapView setMute:isMute];
+}
+
+- (void)addVapGesture:(UIGestureRecognizer *)gestureRecognizer callback:(VAPGestureEventBlock)handler {
+    [self initVAPViewIfNeed];
+    [self.vapView addVapGesture:gestureRecognizer callback:handler];
+}
+
+- (void)addVapTapGesture:(VAPGestureEventBlock)handler {
+    [self initVAPViewIfNeed];
+    [self.vapView addVapTapGesture:handler];
+}
+
+#pragma mark - UIView
+// 自身不响应,仅子视图响应。
+- (UIView *)hitTest:(CGPoint)point withEvent:(UIEvent *)event {
+    if (!self.isUserInteractionEnabled || self.isHidden || self.alpha < 0.01) {
+        return nil;
+    }
+    if ([self pointInside:point withEvent:event]) {
+        for (UIView *subview in [self.subviews reverseObjectEnumerator]) {
+            CGPoint convertedPoint = [self convertPoint:point toView:subview];
+            UIView *hitView = [subview hitTest:convertedPoint withEvent:event];
+            if (hitView) {
+                return hitView;
+            }
+        }
+        return nil;
+    }
+    return nil;
+}
+
+#pragma mark - Private
+
+- (void)p_setupContentModeWithConfig:(QGVAPConfigModel *)config {
+    CGFloat realWidth = 0.;
+    CGFloat realHeight = 0.;
+    
+    CGFloat layoutWidth = self.bounds.size.width;
+    CGFloat layoutHeight = self.bounds.size.height;
+    
+    CGFloat layoutRatio = self.bounds.size.width / self.bounds.size.height;
+    CGFloat videoRatio = config.info.size.width / config.info.size.height;
+    
+    switch (self.contentMode) {
+        case QGVAPWrapViewContentModeScaleToFill: {
+
+        }
+            break;
+        case QGVAPWrapViewContentModeAspectFit: {
+            if (layoutRatio < videoRatio) {
+                realWidth = layoutWidth;
+                realHeight = realWidth / videoRatio;
+            } else {
+                realHeight = layoutHeight;
+                realWidth = videoRatio * realHeight;
+            }
+            
+            self.vapView.frame = CGRectMake(0, 0, realWidth, realHeight);
+            self.vapView.center = self.center;
+        }
+            break;;
+        case QGVAPWrapViewContentModeAspectFill: {
+            if (layoutRatio > videoRatio) {
+                realWidth = layoutWidth;
+                realHeight = realWidth / videoRatio;
+            } else {
+                realHeight = layoutHeight;
+                realWidth = videoRatio * realHeight;
+            }
+            
+            self.vapView.frame = CGRectMake(0, 0, realWidth, realHeight);
+            self.vapView.center = self.center;
+        }
+            break;;
+        default:
+            break;
+    }
+}
+
+#pragma mark -  mp4 hwd delegate
+
+#pragma mark -- 播放流程
+- (void)viewDidStartPlayMP4:(VAPView *)container {
+    if ([self.delegate respondsToSelector:@selector(vapWrap_viewDidStartPlayMP4:)]) {
+        [self.delegate vapWrap_viewDidStartPlayMP4:container];
+    }
+}
+
+- (void)viewDidFinishPlayMP4:(NSInteger)totalFrameCount view:(UIView *)container {
+    //note:在子线程被调用
+    if ([self.delegate respondsToSelector:@selector(vapWrap_viewDidFinishPlayMP4:view:)]) {
+        [self.delegate vapWrap_viewDidFinishPlayMP4:totalFrameCount view:container];
+    }
+}
+
+- (void)viewDidPlayMP4AtFrame:(QGMP4AnimatedImageFrame *)frame view:(UIView *)container {
+    //note:在子线程被调用
+    if ([self.delegate respondsToSelector:@selector(vapWrap_viewDidPlayMP4AtFrame:view:)]) {
+        [self.delegate vapWrap_viewDidPlayMP4AtFrame:frame view:container];
+    }
+}
+
+- (void)viewDidStopPlayMP4:(NSInteger)lastFrameIndex view:(UIView *)container {
+    //note:在子线程被调用
+    if ([self.delegate respondsToSelector:@selector(vapWrap_viewDidStopPlayMP4:view:)]) {
+        [self.delegate vapWrap_viewDidStopPlayMP4:lastFrameIndex view:container];
+    }
+    
+    dispatch_async(dispatch_get_main_queue(), ^{
+        if (self.autoDestoryAfterFinish) {
+            [self.vapView removeFromSuperview];
+            self.vapView = nil;
+        }
+    });
+}
+
+- (BOOL)shouldStartPlayMP4:(VAPView *)container config:(QGVAPConfigModel *)config {
+    [self p_setupContentModeWithConfig:config];
+    
+    if ([self.delegate respondsToSelector:@selector(vapWrap_viewshouldStartPlayMP4:config:)]) {
+        return [self.delegate vapWrap_viewshouldStartPlayMP4:container config:config];
+    }
+    return YES;
+}
+
+- (void)viewDidFailPlayMP4:(NSError *)error {
+    if ([self.delegate respondsToSelector:@selector(vapWrap_viewDidFailPlayMP4:)]) {
+        [self.delegate vapWrap_viewDidFailPlayMP4:error];
+    }
+}
+
+#pragma mark -- 融合特效的接口 vapx
+
+//provide the content for tags, maybe text or url string ...
+- (NSString *)contentForVapTag:(NSString *)tag resource:(QGVAPSourceInfo *)info {
+    if ([self.delegate respondsToSelector:@selector(vapWrapview_contentForVapTag:resource:)]) {
+        return [self.delegate vapWrapview_contentForVapTag:tag resource:info];
+    }
+    
+    return nil;
+}
+
+//provide image for url from tag content
+- (void)loadVapImageWithURL:(NSString *)urlStr context:(NSDictionary *)context completion:(VAPImageCompletionBlock)completionBlock {
+    if ([self.delegate respondsToSelector:@selector(vapWrapView_loadVapImageWithURL:context:completion:)]) {
+        [self.delegate vapWrapView_loadVapImageWithURL:urlStr context:context completion:completionBlock];
+    }
+}
+
+@end

+ 14 - 0
iOS/QGVAPlayer/QGVAPlayer/Classes/UIView+VAP.h

@@ -17,6 +17,13 @@
 #import "VAPMacros.h"
 #import "QGVAPLogger.h"
 
+// 退后台时的行为
+typedef NS_ENUM(NSUInteger, HWDMP4EBOperationType) {
+    HWDMP4EBOperationTypeStop,              // 退后台时结束VAP播放
+    HWDMP4EBOperationTypePauseAndResume,    // 退后台时暂停、回到前台时自动恢复 (需要从关键帧解码到当前帧以解决VTSession失效问题,建议低端机型不要设置此选项,暂停时间较长、CPU占用较大)
+    HWDMP4EBOperationTypeDoNothing,         // VAP自身不进行控制,当外部进行控制时可以使用这个,仅用于防止覆盖外界的pause调用的问题
+};
+
 @class QGMP4AnimatedImageFrame,QGVAPConfigModel, QGVAPSourceInfo;
 /** 注意:回调方法会在子线程被执行。*/
 @protocol HWDMP4PlayDelegate <NSObject>
@@ -44,17 +51,24 @@
 @property (nonatomic, strong) NSString                  *hwd_MP4FilePath;
 @property (nonatomic, assign) NSInteger                 hwd_fps;         //fps for dipslay, each frame's duration would be set by fps value before display.
 @property (nonatomic, assign) BOOL                      hwd_renderByOpenGL;      //是否使用opengl渲染,默认使用metal
+@property (nonatomic, assign) HWDMP4EBOperationType     hwd_enterBackgroundOP;   // 在退后台时的行为,默认为结束
 
 - (void)playHWDMp4:(NSString *)filePath;
 - (void)playHWDMP4:(NSString *)filePath delegate:(id<HWDMP4PlayDelegate>)delegate;
 - (void)playHWDMP4:(NSString *)filePath repeatCount:(NSInteger)repeatCount delegate:(id<HWDMP4PlayDelegate>)delegate;
 
 - (void)stopHWDMP4;
+
 - (void)pauseHWDMP4;
 - (void)resumeHWDMP4;
 
 + (void)registerHWDLog:(QGVAPLoggerFunc)logger;
 
+//当素材不包含vapc box时,只有在播放素材前调用此接口设置enable才可播放素材,否则素材无法播放
+- (void)enableOldVersion:(BOOL)enable;
+
+//设置是否静音播放素材,注:在播放开始时进行设置,播放过程中设置无效,循环播放则设置后的下一次播放开始生效
+- (void)setMute:(BOOL)isMute;
 @end
 
 @interface UIView (VAPGesture)

+ 79 - 18
iOS/QGVAPlayer/QGVAPlayer/Classes/UIView+VAP.m

@@ -48,14 +48,16 @@ NSInteger const VapMaxCompatibleVersion = 2;
 @property (nonatomic, strong) QGAnimatedImageDecodeConfig   *hwd_decodeConfig;          //线程数与buffer数
 @property (nonatomic, strong) NSOperationQueue              *hwd_callbackQueue;         //回调执行队列
 @property (nonatomic, assign) BOOL                          hwd_onPause;                //标记是否暂停中
+@property (nonatomic, assign) BOOL                          hwd_onSeek;                 //正在seek当中,此时继续播放会导致时序混乱
 @property (nonatomic, strong) QGHWDMP4OpenGLView            *hwd_openGLView;            //opengl绘制图层
 @property (nonatomic, strong) QGHWDMetalView                *hwd_metalView;             //metal绘制图层
 @property (nonatomic, strong) QGVAPMetalView                *vap_metalView;             //vap格式mp4渲染图层
 @property (nonatomic, assign) BOOL                          hwd_isFinish;               //标记是否结束
 @property (nonatomic, assign) NSInteger                     hwd_repeatCount;            //播放次数;-1 表示无限循环
-@property (nonatomic, strong) QGVAPConfigManager            *hwd_configManager;             //额外的配置信息
-@property (nonatomic, strong) dispatch_queue_t              vap_renderQueue;                //播放队列
-
+@property (nonatomic, strong) QGVAPConfigManager            *hwd_configManager;         //额外的配置信息
+@property (nonatomic, strong) dispatch_queue_t              vap_renderQueue;            //播放队列
+@property (nonatomic, assign) BOOL                          vap_enableOldVersion;       //标记是否兼容不含vapc box的素材播放
+@property (nonatomic, assign) BOOL                          vap_isMute;                 //标记是否禁止音频播放
 @end
 
 @implementation UIView (VAP)
@@ -65,13 +67,47 @@ NSInteger const VapMaxCompatibleVersion = 2;
 - (void)hwd_registerNotification {
     
     [[NSNotificationCenter defaultCenter] hwd_addSafeObserver:self selector:@selector(hwd_didReceiveEnterBackgroundNotification:) name:UIApplicationDidEnterBackgroundNotification object:nil];
-    [[NSNotificationCenter defaultCenter] hwd_addSafeObserver:self selector:@selector(hwd_didReceiveEnterBackgroundNotification:) name:UIApplicationWillResignActiveNotification object:nil];
     [[NSNotificationCenter defaultCenter] hwd_addSafeObserver:self selector:@selector(hwd_didReceiveWillEnterForegroundNotification:) name:UIApplicationWillEnterForegroundNotification object:nil];
-    [[NSNotificationCenter defaultCenter] hwd_addSafeObserver:self selector:@selector(hwd_didReceiveWillEnterForegroundNotification:) name:UIApplicationDidBecomeActiveNotification object:nil];
+    
+    [[NSNotificationCenter defaultCenter] hwd_addSafeObserver:self selector:@selector(hwd_didReceiveSeekStartNotification:) name:kQGVAPDecoderSeekStart object:nil];
+    [[NSNotificationCenter defaultCenter] hwd_addSafeObserver:self selector:@selector(hwd_didReceiveSeekFinishNotification:) name:kQGVAPDecoderSeekFinish object:nil];
 }
 
 - (void)hwd_didReceiveEnterBackgroundNotification:(NSNotification *)notification {
-    [self hwd_stopHWDMP4];
+    switch (self.hwd_enterBackgroundOP) {
+        case HWDMP4EBOperationTypePauseAndResume:
+            [self pauseHWDMP4];
+            break;
+        case HWDMP4EBOperationTypeDoNothing:
+            break;
+            
+        default:
+            [self stopHWDMP4];
+    }
+}
+
+- (void)hwd_didReceiveWillEnterForegroundNotification:(NSNotification *)notification {
+    switch (self.hwd_enterBackgroundOP) {
+        case HWDMP4EBOperationTypePauseAndResume:
+            [self resumeHWDMP4];
+            break;
+            
+        default:
+            break;
+    }
+    
+}
+
+- (void)hwd_didReceiveSeekStartNotification:(NSNotification *)notification {
+    if ([self.hwd_decodeManager containsThisDeocder:notification.object]) {
+        self.hwd_onSeek = YES;
+    }
+}
+
+- (void)hwd_didReceiveSeekFinishNotification:(NSNotification *)notification {
+    if ([self.hwd_decodeManager containsThisDeocder:notification.object]) {
+        self.hwd_onSeek = NO;
+    }
 }
 
 //结束播放
@@ -99,6 +135,7 @@ NSInteger const VapMaxCompatibleVersion = 2;
     if (self.vap_metalView) {
         [self.vap_metalView dispose];
     }
+    [self.hwd_decodeManager tryToStopAudioPlay];
     [self.hwd_callbackQueue addOperationWithBlock:^{
         //此处必须延迟释放,避免野指针
         if ([self.hwd_Delegate respondsToSelector:@selector(viewDidStopPlayMP4:view:)]) {
@@ -136,10 +173,6 @@ NSInteger const VapMaxCompatibleVersion = 2;
     [self hwd_stopHWDMP4];
 }
 
-- (void)hwd_didReceiveWillEnterForegroundNotification:(NSNotification *)notification {
-    [self resumeHWDMP4];
-}
-
 - (void)hwd_loadMetalViewIfNeed:(QGHWDTextureBlendMode)mode {
     
     if (self.hwd_renderByOpenGL) {
@@ -307,6 +340,11 @@ NSInteger const VapMaxCompatibleVersion = 2;
         return ;
     }
     
+    if (!configManager.hasValidConfig && !self.vap_enableOldVersion) {
+        VAP_Error(kQGVAPModuleCommon, @"playHWDMP4 error! don't has vapc box and enableOldVersion is false!");
+        [self stopHWDMP4];
+        return ;
+    }
     //reset
     self.hwd_currentFrameInstance = nil;
     self.hwd_decodeManager = nil;
@@ -349,9 +387,14 @@ NSInteger const VapMaxCompatibleVersion = 2;
         //不能将self.hwd_onPause判断加到while语句中!会导致releasepool不断上涨
         while (YES) {
             @autoreleasepool {
-                if (self.hwd_onPause || self.hwd_isFinish) {
+                if (self.hwd_isFinish) {
                     break ;
                 }
+                if (self.hwd_onPause || self.hwd_onSeek) {
+                    lastRenderingInterval = NSDate.timeIntervalSinceReferenceDate;
+                    [NSThread sleepForTimeInterval:durationForWaitingFrame];
+                    continue;
+                }
                 __block QGMP4AnimatedImageFrame *nextFrame = nil;
                 dispatch_sync(dispatch_get_main_queue(), ^{
                     nextFrame = [self hwd_displayNext];
@@ -425,12 +468,14 @@ NSInteger const VapMaxCompatibleVersion = 2;
     
     VAP_Info(kQGVAPModuleCommon, @"pauseHWDMP4");
     self.hwd_onPause = YES;
-    [self.hwd_callbackQueue addOperationWithBlock:^{
-        //此处必须延迟释放,避免野指针
-        if ([self.hwd_Delegate respondsToSelector:@selector(viewDidStopPlayMP4:view:)]) {
-            [self.hwd_Delegate viewDidStopPlayMP4:self.hwd_currentFrame.frameIndex view:self];
-        }
-    }];
+    [self.hwd_decodeManager tryToPauseAudioPlay];
+// pause回调stop会导致一般使用场景将view移除,无法resume,因此暂时去掉该回调触发
+//    [self.hwd_callbackQueue addOperationWithBlock:^{
+//        //此处必须延迟释放,避免野指针
+//        if ([self.hwd_Delegate respondsToSelector:@selector(viewDidStopPlayMP4:view:)]) {
+//            [self.hwd_Delegate viewDidStopPlayMP4:self.hwd_currentFrame.frameIndex view:self];
+//        }
+//    }];
 }
 
 - (void)resumeHWDMP4 {
@@ -438,12 +483,21 @@ NSInteger const VapMaxCompatibleVersion = 2;
     VAP_Info(kQGVAPModuleCommon, @"resumeHWDMP4");
     self.hwd_onPause = NO;
     self.hwd_openGLView.pause = NO;
+    // 目前音频和视频没有同步逻辑,多次暂停恢复会使音视频差距越来越大
+    [self.hwd_decodeManager tryToResumeAudioPlay];
 }
 
 + (void)registerHWDLog:(QGVAPLoggerFunc)logger {
     [QGVAPLogger registerExternalLog:logger];
 }
 
+- (void)enableOldVersion:(BOOL)enable {
+    self.vap_enableOldVersion = enable;
+}
+
+- (void)setMute:(BOOL)isMute {
+    self.vap_isMute = isMute;
+}
 #pragma mark - delegate
 
 #pragma clang diagnostic push
@@ -453,6 +507,10 @@ NSInteger const VapMaxCompatibleVersion = 2;
     return [QGMP4FrameHWDecoder class];
 }
 
+- (BOOL)shouldSetupAudioPlayer {
+    return !self.vap_isMute;
+}
+
 - (void)decoderDidFinishDecode:(QGBaseDecoder *)decoder {
     VAP_Info(kQGVAPModuleCommon, @"decoderDidFinishDecode.");
     [self hwd_didFinishDisplay];
@@ -540,6 +598,8 @@ NSInteger const VapMaxCompatibleVersion = 2;
 
 //category methods
 HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(hwd_onPause, setHwd_onPause, BOOL)
+HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(hwd_onSeek, setHwd_onSeek, BOOL)
+HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(hwd_enterBackgroundOP, setHwd_enterBackgroundOP, HWDMP4EBOperationType)
 HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(hwd_renderByOpenGL, setHwd_renderByOpenGL, BOOL)
 HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(hwd_isFinish, setHwd_isFinish, BOOL)
 HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(hwd_fps, setHwd_fps, NSInteger)
@@ -557,7 +617,8 @@ HWDSYNTH_DYNAMIC_PROPERTY_OBJECT(vap_metalView, setVap_metalView, OBJC_ASSOCIATI
 HWDSYNTH_DYNAMIC_PROPERTY_OBJECT(hwd_attachmentsModel, setHwd_attachmentsModel, OBJC_ASSOCIATION_RETAIN)
 HWDSYNTH_DYNAMIC_PROPERTY_OBJECT(hwd_configManager, setHwd_configManager, OBJC_ASSOCIATION_RETAIN)
 HWDSYNTH_DYNAMIC_PROPERTY_OBJECT(vap_renderQueue, setVap_renderQueue, OBJC_ASSOCIATION_RETAIN)
-
+HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(vap_enableOldVersion, setVap_enableOldVersion, BOOL)
+HWDSYNTH_DYNAMIC_PROPERTY_CTYPE(vap_isMute, setVap_isMute, BOOL)
 @end
 
 

+ 7 - 7
iOS/QGVAPlayer/QGVAPlayer/Classes/Views/Metal/QGHWDMetalRenderer.h

@@ -17,13 +17,13 @@
 #import <Metal/Metal.h>
 #import "VAPMacros.h"
 
-UIKIT_EXTERN NSString *const kHWDVertexFunctionName;
-UIKIT_EXTERN NSString *const kHWDYUVFragmentFunctionName;
-extern matrix_float3x3 const kColorConversionMatrix601Default;
-extern matrix_float3x3 const kColorConversionMatrix601FullRangeDefault;
-extern matrix_float3x3 const kColorConversionMatrix709Default;
-extern matrix_float3x3 const kColorConversionMatrix709FullRangeDefault;
-extern matrix_float3x3 const kBlurWeightMatrixDefault;
+UIKIT_EXTERN NSString *const kQGHWDVertexFunctionName;
+UIKIT_EXTERN NSString *const kQGHWDYUVFragmentFunctionName;
+extern matrix_float3x3 const kQGColorConversionMatrix601Default;
+extern matrix_float3x3 const kQGColorConversionMatrix601FullRangeDefault;
+extern matrix_float3x3 const kQGColorConversionMatrix709Default;
+extern matrix_float3x3 const kQGColorConversionMatrix709FullRangeDefault;
+extern matrix_float3x3 const kQGBlurWeightMatrixDefault;
 extern id<MTLDevice> kQGHWDMetalRendererDevice;
 
 #if TARGET_OS_SIMULATOR//模拟器

+ 23 - 23
iOS/QGVAPlayer/QGVAPlayer/Classes/Views/Metal/QGHWDMetalRenderer.m

@@ -24,17 +24,17 @@
 
 #pragma mark - constants
 
-NSString *const kHWDVertexFunctionName      = @"hwd_vertexShader";
-NSString *const kHWDYUVFragmentFunctionName = @"hwd_yuvFragmentShader";
+NSString *const kQGHWDVertexFunctionName      = @"hwd_vertexShader";
+NSString *const kQGHWDYUVFragmentFunctionName = @"hwd_yuvFragmentShader";
 
-static NSInteger const kQuadVerticesConstantsRow    = 4;
-static NSInteger const kQuadVerticesConstantsColumn = 32;
-static NSInteger const kHWDVertexCount              = 4;
+static NSInteger const kQGQuadVerticesConstantsRow    = 4;
+static NSInteger const kQGQuadVerticesConstantsColumn = 32;
+static NSInteger const kQGHWDVertexCount              = 4;
 
 id<MTLDevice> kQGHWDMetalRendererDevice;
 
 // BT.601, which is the standard for SDTV.
-matrix_float3x3 const kColorConversionMatrix601Default = {{
+matrix_float3x3 const kQGColorConversionMatrix601Default = {{
     {1.164,     1.164,      1.164},
     {0.0,       -0.392,     2.017},
     {1.596,     -0.813,     0.0}
@@ -46,35 +46,35 @@ matrix_float3x3 const kColorConversionMatrix601Default = {{
  1.0 1.765 0.0
  */
 //ITU BT.601 Full Range
- matrix_float3x3 const kColorConversionMatrix601FullRangeDefault = {{
+ matrix_float3x3 const kQGColorConversionMatrix601FullRangeDefault = {{
     {1.0,       1.0,        1.0},
     {0.0,       -0.34413,   1.772},
     {1.402,     -0.71414,   0.0}
 }};
 
 // BT.709, which is the standard for HDTV.
-matrix_float3x3 const kColorConversionMatrix709Default = {{
+matrix_float3x3 const kQGColorConversionMatrix709Default = {{
     {1.164,     1.164,      1.164},
     {0.0,       -0.213,     2.112},
     {1.793,     -0.533,     0.0}
 }};
 
 // BT.709 Full Range.
-matrix_float3x3 const kColorConversionMatrix709FullRangeDefault = {{
+matrix_float3x3 const kQGColorConversionMatrix709FullRangeDefault = {{
     {1.0,       1.0,        1.0},
     {0.0,       -.18732,    1.8556},
     {1.57481,   -.46813,    0.0}
 }};
 
 // Blur weight matrix.
-matrix_float3x3 const kBlurWeightMatrixDefault = {{
+matrix_float3x3 const kQGBlurWeightMatrixDefault = {{
     {0.0625,     0.125,      0.0625},
     {0.125,      0.25,       0.125},
     {0.0625,     0.125,      0.0625}
 }};
 
 //QGHWDVertex  顶点坐标+纹理坐标(rgb+alpha)
-static const float kQuadVerticesConstants[kQuadVerticesConstantsRow][kQuadVerticesConstantsColumn] = {
+static const float kQGQuadVerticesConstants[kQGQuadVerticesConstantsRow][kQGQuadVerticesConstantsColumn] = {
     //左侧alpha
     {-1.0, -1.0, 0.0, 1.0, 0.5, 1.0, 0.0, 1.0,
     -1.0, 1.0, 0.0, 1.0, 0.5, 0.0, 0.0, 0.0,
@@ -157,10 +157,10 @@ static const float kQuadVerticesConstants[kQuadVerticesConstantsRow][kQuadVertic
 - (void)setupConstants {
     //buffers
     const void *vertices = [self suitableQuadVertices];
-    NSUInteger allocationSize = kQuadVerticesConstantsColumn * sizeof(float);
+    NSUInteger allocationSize = kQGQuadVerticesConstantsColumn * sizeof(float);
     _vertexBuffer = [kQGHWDMetalRendererDevice newBufferWithBytes:vertices length:allocationSize options:kDefaultMTLResourceOption];
-    _vertexCount = kHWDVertexCount;
-    _currentColorConversionMatrix = kColorConversionMatrix601FullRangeDefault;
+    _vertexCount = kQGHWDVertexCount;
+    _currentColorConversionMatrix = kQGColorConversionMatrix601FullRangeDefault;
     struct ColorParameters yuvMatrixs[] = {{_currentColorConversionMatrix,{0.5, 0.5}}};
     NSUInteger yuvMatrixsDataSize = sizeof(struct ColorParameters);
     _yuvMatrixBuffer = [kQGHWDMetalRendererDevice newBufferWithBytes:yuvMatrixs length:yuvMatrixsDataSize options:kDefaultMTLResourceOption];
@@ -172,9 +172,9 @@ static const float kQuadVerticesConstants[kQuadVerticesConstantsRow][kQuadVertic
         return ;
     }
     CFTypeRef yCbCrMatrixType = CVBufferGetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, NULL);
-    matrix_float3x3 matrix = kColorConversionMatrix601FullRangeDefault;
+    matrix_float3x3 matrix = kQGColorConversionMatrix601FullRangeDefault;
     if (CFStringCompare(yCbCrMatrixType, kCVImageBufferYCbCrMatrix_ITU_R_709_2, 0) == kCFCompareEqualTo) {
-        matrix = kColorConversionMatrix709FullRangeDefault;
+        matrix = kQGColorConversionMatrix709FullRangeDefault;
     }
     if (simd_equal(_currentColorConversionMatrix, matrix)) {
         return ;
@@ -188,8 +188,8 @@ static const float kQuadVerticesConstants[kQuadVerticesConstantsRow][kQuadVertic
 - (void)setupPipelineStatesWithMetalLayer:(CAMetalLayer *)metalLayer {
     
     self.shaderFuncLoader = [[QGVAPMetalShaderFunctionLoader alloc] initWithDevice:kQGHWDMetalRendererDevice];
-    id<MTLFunction> vertexProgram = [self.shaderFuncLoader loadFunctionWithName:kHWDVertexFunctionName];
-    id<MTLFunction> fragmentProgram = [self.shaderFuncLoader loadFunctionWithName:kHWDYUVFragmentFunctionName];
+    id<MTLFunction> vertexProgram = [self.shaderFuncLoader loadFunctionWithName:kQGHWDVertexFunctionName];
+    id<MTLFunction> fragmentProgram = [self.shaderFuncLoader loadFunctionWithName:kQGHWDYUVFragmentFunctionName];
     
     if (!vertexProgram || !fragmentProgram) {
         VAP_Error(kQGVAPModuleCommon, @"setupPipelineStatesWithMetalLayer fail! cuz: shader load fail");
@@ -304,17 +304,17 @@ static const float kQuadVerticesConstants[kQuadVerticesConstantsRow][kQuadVertic
     
     switch (self.blendMode) {
         case QGHWDTextureBlendMode_AlphaLeft:
-            return kQuadVerticesConstants[0];
+            return kQGQuadVerticesConstants[0];
         case QGHWDTextureBlendMode_AlphaRight:
-            return kQuadVerticesConstants[1];
+            return kQGQuadVerticesConstants[1];
         case QGHWDTextureBlendMode_AlphaTop:
-            return kQuadVerticesConstants[2];
+            return kQGQuadVerticesConstants[2];
         case QGHWDTextureBlendMode_AlphaBottom:
-            return kQuadVerticesConstants[3];
+            return kQGQuadVerticesConstants[3];
         default:
             break;
     }
-    return kQuadVerticesConstants[0];
+    return kQGQuadVerticesConstants[0];
 }
 
 @end

+ 5 - 4
iOS/QGVAPlayer/QGVAPlayer/Classes/Views/Metal/Vapx/QGVAPMetalRenderer.m

@@ -120,6 +120,7 @@
     }
     if (self.vertexBuffer == nil || self.yuvMatrixBuffer == nil) {
         VAP_Error(kQGVAPModuleCommon, @"quit rendering cuz vertexBuffer:%p or yuvMatrixBuffer:%p is nil!", self.vertexBuffer, self.yuvMatrixBuffer);
+        [renderEncoder endEncoding];
         return ;
     }
     
@@ -200,7 +201,7 @@
 - (void)setupRenderContext {
     
     //constants
-    _currentColorConversionMatrix = kColorConversionMatrix601FullRangeDefault;
+    _currentColorConversionMatrix = kQGColorConversionMatrix601FullRangeDefault;
     struct ColorParameters yuvMatrixs[] = {{_currentColorConversionMatrix,{0.5, 0.5}}};
     NSUInteger yuvMatrixsDataSize = sizeof(struct ColorParameters);
     _yuvMatrixBuffer = [kQGHWDMetalRendererDevice newBufferWithBytes:yuvMatrixs length:yuvMatrixsDataSize options:kDefaultMTLResourceOption];
@@ -261,7 +262,7 @@
 
 - (id<MTLBuffer>)maskBlurBuffer {
     if (!_maskBlurBuffer) {
-        struct MaskParameters parameters[] = {{kBlurWeightMatrixDefault, 3, 0.01}};
+        struct MaskParameters parameters[] = {{kQGBlurWeightMatrixDefault, 3, 0.01}};
         NSUInteger parametersSize = sizeof(struct MaskParameters);
         _maskBlurBuffer = [kQGHWDMetalRendererDevice newBufferWithBytes:parameters length:parametersSize options:kDefaultMTLResourceOption];
     }
@@ -369,9 +370,9 @@
         return ;
     }
     CFTypeRef yCbCrMatrixType = CVBufferGetAttachment(pixelBuffer, kCVImageBufferYCbCrMatrixKey, NULL);
-    matrix_float3x3 matrix = kColorConversionMatrix601FullRangeDefault;
+    matrix_float3x3 matrix = kQGColorConversionMatrix601FullRangeDefault;
     if (CFStringCompare(yCbCrMatrixType, kCVImageBufferYCbCrMatrix_ITU_R_709_2, 0) == kCFCompareEqualTo) {
-        matrix = kColorConversionMatrix709FullRangeDefault;
+        matrix = kQGColorConversionMatrix709FullRangeDefault;
     }
     if (simd_equal(_currentColorConversionMatrix, matrix)) {
         return ;

+ 4 - 4
iOS/QGVAPlayer/QGVAPlayer/Classes/Views/OpenGL/QGHWDMP4OpenGLView.m

@@ -38,14 +38,14 @@ enum {
 };
 
 // BT.709-HDTV.
-static const GLfloat kColorConversion709[] = {
+static const GLfloat kQGColorConversion709[] = {
     1.164,  1.164, 1.164,
     0.0, -0.213, 2.112,
     1.793, -0.533,   0.0,
 };
 
 // BT.601 full range-http://www.equasys.de/colorconversion.html
-const GLfloat kColorConversion601FullRange[] = {
+const GLfloat kQGColorConversion601FullRange[] = {
     1.0,    1.0,    1.0,
     0.0,    -0.343, 1.765,
     1.4,    -0.711, 0.0,
@@ -212,7 +212,7 @@ NSString *const kFragmentShaderSource = SHADER_STRING
     if (!_glContext || ![EAGLContext setCurrentContext:_glContext] || ![self loadShaders]) {
         return NO;
     }
-    _preferredConversion = kColorConversion709;
+    _preferredConversion = kQGColorConversion709;
     return YES;
 }
 
@@ -338,7 +338,7 @@ NSString *const kFragmentShaderSource = SHADER_STRING
         }
         [self cleanupTextures];
 
-        _preferredConversion = kColorConversion601FullRange;
+        _preferredConversion = kQGColorConversion601FullRange;
         
         //y
         glActiveTexture(GL_TEXTURE0);

+ 12 - 0
iOS/QGVAPlayerDemo/QGVAPlayerDemo.xcodeproj/project.pbxproj

@@ -19,6 +19,9 @@
 		63BAD43422F09D2800EAD4C4 /* libz.tbd in Frameworks */ = {isa = PBXBuildFile; fileRef = 63BAD43322F09D2700EAD4C4 /* libz.tbd */; };
 		63BAD44322F0A02C00EAD4C4 /* QGVAPlayer.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = 63BAD43B22F09D6800EAD4C4 /* QGVAPlayer.framework */; };
 		63BAD44422F0A02C00EAD4C4 /* QGVAPlayer.framework in Embed Frameworks */ = {isa = PBXBuildFile; fileRef = 63BAD43B22F09D6800EAD4C4 /* QGVAPlayer.framework */; settings = {ATTRIBUTES = (CodeSignOnCopy, RemoveHeadersOnCopy, ); }; };
+		BA964B5F268315E6003265F2 /* CHANGELOG.md in Resources */ = {isa = PBXBuildFile; fileRef = BA964B5D268315E6003265F2 /* CHANGELOG.md */; };
+		BA964B6326831AF1003265F2 /* README.md in Resources */ = {isa = PBXBuildFile; fileRef = BA964B6126831AF1003265F2 /* README.md */; };
+		BA964B6526831BB6003265F2 /* QGVAPlayer.podspec in Resources */ = {isa = PBXBuildFile; fileRef = BA964B6426831BB6003265F2 /* QGVAPlayer.podspec */; };
 /* End PBXBuildFile section */
 
 /* Begin PBXContainerItemProxy section */
@@ -100,6 +103,9 @@
 		63BAD3B822F09BAF00EAD4C4 /* Resource */ = {isa = PBXFileReference; lastKnownFileType = folder; path = Resource; sourceTree = "<group>"; };
 		63BAD43322F09D2700EAD4C4 /* libz.tbd */ = {isa = PBXFileReference; lastKnownFileType = "sourcecode.text-based-dylib-definition"; name = libz.tbd; path = usr/lib/libz.tbd; sourceTree = SDKROOT; };
 		63BAD43522F09D6800EAD4C4 /* QGVAPlayer.xcodeproj */ = {isa = PBXFileReference; lastKnownFileType = "wrapper.pb-project"; name = QGVAPlayer.xcodeproj; path = ../../QGVAPlayer/QGVAPlayer.xcodeproj; sourceTree = "<group>"; };
+		BA964B5D268315E6003265F2 /* CHANGELOG.md */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = net.daringfireball.markdown; name = CHANGELOG.md; path = ../CHANGELOG.md; sourceTree = "<group>"; };
+		BA964B6126831AF1003265F2 /* README.md */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = net.daringfireball.markdown; name = README.md; path = ../README.md; sourceTree = "<group>"; };
+		BA964B6426831BB6003265F2 /* QGVAPlayer.podspec */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; name = QGVAPlayer.podspec; path = ../../QGVAPlayer.podspec; sourceTree = "<group>"; };
 /* End PBXFileReference section */
 
 /* Begin PBXFrameworksBuildPhase section */
@@ -132,6 +138,9 @@
 		63BAD37A22F09B1200EAD4C4 = {
 			isa = PBXGroup;
 			children = (
+				BA964B6426831BB6003265F2 /* QGVAPlayer.podspec */,
+				BA964B6126831AF1003265F2 /* README.md */,
+				BA964B5D268315E6003265F2 /* CHANGELOG.md */,
 				63BAD3B822F09BAF00EAD4C4 /* Resource */,
 				63BAD38522F09B1200EAD4C4 /* QGVAPlayerDemo */,
 				63BAD39E22F09B1500EAD4C4 /* QGVAPlayerDemoTests */,
@@ -332,7 +341,10 @@
 			isa = PBXResourcesBuildPhase;
 			buildActionMask = 2147483647;
 			files = (
+				BA964B6526831BB6003265F2 /* QGVAPlayer.podspec in Resources */,
+				BA964B6326831AF1003265F2 /* README.md in Resources */,
 				63BAD3B922F09BAF00EAD4C4 /* Resource in Resources */,
+				BA964B5F268315E6003265F2 /* CHANGELOG.md in Resources */,
 				63BAD39322F09B1500EAD4C4 /* LaunchScreen.storyboard in Resources */,
 				63BAD39022F09B1500EAD4C4 /* Assets.xcassets in Resources */,
 				63BAD38E22F09B1200EAD4C4 /* Main.storyboard in Resources */,

+ 91 - 7
iOS/QGVAPlayerDemo/QGVAPlayerDemo/ViewController.m

@@ -15,11 +15,17 @@
 
 #import "ViewController.h"
 #import "UIView+VAP.h"
+#import "QGVAPWrapView.h"
 
-@interface ViewController () <HWDMP4PlayDelegate>
+#import <AVFoundation/AVFoundation.h>
+
+@interface ViewController () <HWDMP4PlayDelegate, VAPWrapViewDelegate>
 
 @property (nonatomic, strong) UIButton *vapButton;
 @property (nonatomic, strong) UIButton *vapxButton;
+@property (nonatomic, strong) UIButton *vapWrapViewButton;
+
+@property (nonatomic, strong) VAPView *vapView;
 
 @end
 
@@ -44,6 +50,7 @@ void qg_VAP_Logger_handler(VAPLogLevel level, const char* file, int line, const
 
 - (void)viewDidLoad {
     [super viewDidLoad];
+    [self setupAudioSession];
     
     //日志
     [UIView registerHWDLog:qg_VAP_Logger_handler];
@@ -51,16 +58,35 @@ void qg_VAP_Logger_handler(VAPLogLevel level, const char* file, int line, const
     //vap-经典效果
     _vapButton = [[UIButton alloc] initWithFrame:CGRectMake(0, 100, CGRectGetWidth(self.view.frame), 90)];
     _vapButton.backgroundColor = [UIColor lightGrayColor];
-    [_vapButton setTitle:@"电竞方案" forState:UIControlStateNormal];
+    [_vapButton setTitle:@"电竞方案(退后台结束)" forState:UIControlStateNormal];
     [_vapButton addTarget:self action:@selector(playVap) forControlEvents:UIControlEventTouchUpInside];
     [self.view addSubview:_vapButton];
     
     //vapx-融合效果
     _vapxButton = [[UIButton alloc] initWithFrame:CGRectMake(0, CGRectGetMaxY(_vapButton.frame)+60, CGRectGetWidth(self.view.frame), 90)];
     _vapxButton.backgroundColor = [UIColor lightGrayColor];
-    [_vapxButton setTitle:@"融合特效" forState:UIControlStateNormal];
+    [_vapxButton setTitle:@"融合特效(退后台暂停/恢复)" forState:UIControlStateNormal];
     [_vapxButton addTarget:self action:@selector(playVapx) forControlEvents:UIControlEventTouchUpInside];
     [self.view addSubview:_vapxButton];
+    
+    //使用WrapView,支持ContentMode
+    _vapWrapViewButton = [[UIButton alloc] initWithFrame:CGRectMake(0, CGRectGetMaxY(_vapxButton.frame)+60, CGRectGetWidth(self.view.frame), 90)];
+    _vapWrapViewButton.backgroundColor = [UIColor lightGrayColor];
+    [_vapWrapViewButton setTitle:@"WrapView-ContentMode" forState:UIControlStateNormal];
+    [_vapWrapViewButton addTarget:self action:@selector(playVapWithWrapView) forControlEvents:UIControlEventTouchUpInside];
+    [self.view addSubview:_vapWrapViewButton];
+}
+
+- (void)setupAudioSession {
+    AVAudioSession* avsession = [AVAudioSession sharedInstance];
+    NSError *error = nil;
+    if (![avsession setCategory:AVAudioSessionCategoryPlayback withOptions:0 error:&error]) {
+        if (error) NSLog(@"AVAudioSession setCategory failed : %ld, %s", (long)error.code, [error.localizedDescription UTF8String]);
+        return;
+    }
+    if (![avsession setActive:YES error:&error]) {
+        if (error) NSLog(@"AVAudioSession setActive failed : %ld, %s", (long)error.code, [error.localizedDescription UTF8String]);
+    }
 }
 
 #pragma mark - 各种类型的播放
@@ -68,29 +94,65 @@ void qg_VAP_Logger_handler(VAPLogLevel level, const char* file, int line, const
 - (void)playVap {
     VAPView *mp4View = [[VAPView alloc] initWithFrame:CGRectMake(0, 0, 752/2, 752/2)];
     //默认使用metal渲染,使用OpenGL请打开下面这个开关
-    //mp4View.renderByOpenGL = YES;
+//    mp4View.hwd_renderByOpenGL = YES;
     mp4View.center = self.view.center;
     [self.view addSubview:mp4View];
     mp4View.userInteractionEnabled = YES;
+    mp4View.hwd_enterBackgroundOP = HWDMP4EBOperationTypeStop;
     UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(onImageviewTap:)];
     [mp4View addGestureRecognizer:tap];
-    NSString *resPath = [NSString stringWithFormat:@"%@/Resource/test.mp4", [[NSBundle mainBundle] resourcePath]];
+    NSString *resPath = [NSString stringWithFormat:@"%@/Resource/demo.mp4", [[NSBundle mainBundle] resourcePath]];
     //单纯播放的接口
     //[mp4View playHWDMp4:resPath];
     //指定素材混合模式,重复播放次数,delegate的接口
+    
+    //注意若素材不含vapc box,则必须用调用如下接口设置enable才可播放
+    [mp4View enableOldVersion:YES];
     [mp4View playHWDMP4:resPath repeatCount:-1 delegate:self];
 }
 
 //vap动画
 - (void)playVapx {
-    NSString *mp4Path = [NSString stringWithFormat:@"%@/Resource/vap1.mp4", [[NSBundle mainBundle] resourcePath]];
+    NSString *mp4Path = [NSString stringWithFormat:@"%@/Resource/vap.mp4", [[NSBundle mainBundle] resourcePath]];
     VAPView *mp4View = [[VAPView alloc] initWithFrame:self.view.bounds];
     [self.view addSubview:mp4View];
     mp4View.center = self.view.center;
     mp4View.userInteractionEnabled = YES;
+    mp4View.hwd_enterBackgroundOP = HWDMP4EBOperationTypePauseAndResume; // ⚠️ 建议设置该选项时对机型进行判断,屏蔽低端机
     UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(onImageviewTap:)];
     [mp4View addGestureRecognizer:tap];
+    [mp4View setMute:YES];
     [mp4View playHWDMP4:mp4Path repeatCount:-1 delegate:self];
+    
+    dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(1 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
+        [mp4View pauseHWDMP4];
+        dispatch_after(dispatch_time(DISPATCH_TIME_NOW, (int64_t)(2 * NSEC_PER_SEC)), dispatch_get_main_queue(), ^{
+            [mp4View resumeHWDMP4];
+        });
+    });
+}
+
+/// 使用WrapView,支持ContentMode
+- (void)playVapWithWrapView {
+    static BOOL pause = NO;
+    QGVAPWrapView *wrapView = [[QGVAPWrapView alloc] initWithFrame:self.view.bounds];
+    wrapView.center = self.view.center;
+    wrapView.contentMode = QGVAPWrapViewContentModeAspectFit;
+    wrapView.autoDestoryAfterFinish = YES;
+    [self.view addSubview:wrapView];
+    NSString *resPath = [NSString stringWithFormat:@"%@/Resource/vap.mp4", [[NSBundle mainBundle] resourcePath]];
+    [wrapView setMute:YES];
+    [wrapView playHWDMP4:resPath repeatCount:-1 delegate:self];
+    UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(doNothingonImageviewTap:)];
+    
+    __weak __typeof(wrapView) weakWrapView = wrapView;
+    [wrapView addVapGesture:tap callback:^(UIGestureRecognizer *gestureRecognizer, BOOL insideSource, QGVAPSourceDisplayItem *source) {
+        if ((pause = !pause)) {
+            [weakWrapView pauseHWDMP4];
+        } else {
+            [weakWrapView resumeHWDMP4];
+        }
+    }];
 }
 
 #pragma mark -  mp4 hwd delegate
@@ -111,7 +173,7 @@ void qg_VAP_Logger_handler(VAPLogLevel level, const char* file, int line, const
 - (void)viewDidStopPlayMP4:(NSInteger)lastFrameIndex view:(UIView *)container {
     //note:在子线程被调用
     dispatch_async(dispatch_get_main_queue(), ^{
-        //do something
+        [container removeFromSuperview];
     });
 }
 
@@ -153,4 +215,26 @@ void qg_VAP_Logger_handler(VAPLogLevel level, const char* file, int line, const
     [ges.view removeFromSuperview];
 }
 
+- (void)doNothingonImageviewTap:(UIGestureRecognizer *)ges {
+    
+}
+
+#pragma mark - WrapViewDelegate
+
+//provide the content for tags, maybe text or url string ...
+- (NSString *)vapWrapview_contentForVapTag:(NSString *)tag resource:(QGVAPSourceInfo *)info {
+    NSDictionary *extraInfo = @{@"[sImg1]" : @"http://shp.qlogo.cn/pghead/Q3auHgzwzM6GuU0Y6q6sKHzq3MjY1aGibIzR4xrJc1VY/60",
+                                @"[textAnchor]" : @"我是主播名",
+                                @"[textUser]" : @"我是用户名😂😂",};
+    return extraInfo[tag];
+}
+
+//provide image for url from tag content
+- (void)vapWrapView_loadVapImageWithURL:(NSString *)urlStr context:(NSDictionary *)context completion:(VAPImageCompletionBlock)completionBlock {
+    dispatch_async(dispatch_get_main_queue(), ^{
+        UIImage *image = [UIImage imageNamed:[NSString stringWithFormat:@"%@/Resource/qq.png", [[NSBundle mainBundle] resourcePath]]];
+        completionBlock(image, nil, urlStr);
+    });
+}
+
 @end

BIN
iOS/QGVAPlayerDemo/Resource/b_frame.mp4


BIN
iOS/QGVAPlayerDemo/Resource/demo.mp4


BIN
iOS/QGVAPlayerDemo/Resource/destroy.mp4


BIN
iOS/QGVAPlayerDemo/Resource/test.mp4


BIN
iOS/QGVAPlayerDemo/Resource/vap.mp4


BIN
iOS/QGVAPlayerDemo/Resource/vap1.mp4


BIN
iOS/QGVAPlayerDemo/Resource/vap_264_classical.mp4


BIN
iOS/QGVAPlayerDemo/Resource/vap_265.mp4


BIN
iOS/QGVAPlayerDemo/Resource/vap_265_classical.mp4


Niektóre pliki nie zostały wyświetlone z powodu dużej ilości zmienionych plików