Browse Source

支持265

张禹 5 years ago
parent
commit
0e05ef39f6

+ 64 - 9
iOS/QGVAPlayer/QGVAPlayer/Classes/Controllers/Decoders/QGMP4FrameHWDecoder.m

@@ -91,6 +91,8 @@
 @property (atomic, strong) dispatch_queue_t decodeQueue; //dispatch decode task
 @property (atomic, strong) dispatch_queue_t decodeQueue; //dispatch decode task
 @property (nonatomic, strong) NSData *ppsData; //Picture Parameter Set
 @property (nonatomic, strong) NSData *ppsData; //Picture Parameter Set
 @property (nonatomic, strong) NSData *spsData; //Sequence Parameter Set
 @property (nonatomic, strong) NSData *spsData; //Sequence Parameter Set
+/** Video Parameter Set */
+@property (nonatomic, strong) NSData *vpsData;
 
 
 @end
 @end
 
 
@@ -191,6 +193,8 @@ NSString *const QGMP4HWDErrorDomain = @"QGMP4HWDErrorDomain";
         return;
         return;
     }
     }
     
     
+    uint64_t currentPts = [_mp4Parser.videoSamples[frameIndex] pts];
+    
     CVPixelBufferRef outputPixelBuffer = NULL;
     CVPixelBufferRef outputPixelBuffer = NULL;
     // 4. get NALUnit payload into a CMBlockBuffer,
     // 4. get NALUnit payload into a CMBlockBuffer,
     CMBlockBufferRef blockBuffer = NULL;
     CMBlockBufferRef blockBuffer = NULL;
@@ -241,10 +245,22 @@ NSString *const QGMP4HWDErrorDomain = @"QGMP4HWDErrorDomain";
             NSTimeInterval decodeTime = [[NSDate date] timeIntervalSinceDate:startDate]*1000;
             NSTimeInterval decodeTime = [[NSDate date] timeIntervalSinceDate:startDate]*1000;
             newFrame.decodeTime = decodeTime;
             newFrame.decodeTime = decodeTime;
             newFrame.defaultFps =(int) strongSelf->_mp4Parser.fps;
             newFrame.defaultFps =(int) strongSelf->_mp4Parser.fps;
+            newFrame.pts = currentPts;
             
             
             //8. insert into buffer
             //8. insert into buffer
             NSInteger index = frameIndex % (strongSelf->_buffers.count);
             NSInteger index = frameIndex % (strongSelf->_buffers.count);
             strongSelf->_buffers[index] = newFrame;
             strongSelf->_buffers[index] = newFrame;
+            
+            // 9. sort
+//            [strongSelf->_buffers sortUsingComparator:^NSComparisonResult(QGMP4AnimatedImageFrame * _Nonnull obj1, QGMP4AnimatedImageFrame * _Nonnull obj2) {
+//                return [@(obj1.pts) compare:@(obj2.pts)];
+//            }];
+            
+//            if (frameIndex == 70) {
+//                for (int i = 0; i < strongSelf->_buffers.count; i++) {
+//                    NSLog(@"aarony - %lld", [strongSelf->_buffers[i] pts]);
+//                }
+//            }
         });
         });
     } else {
     } else {
         // 7. use VTDecompressionSessionDecodeFrame
         // 7. use VTDecompressionSessionDecodeFrame
@@ -266,6 +282,7 @@ NSString *const QGMP4HWDErrorDomain = @"QGMP4HWDErrorDomain";
         newFrame.decodeTime = decodeTime;
         newFrame.decodeTime = decodeTime;
         newFrame.defaultFps = (int)_mp4Parser.fps;
         newFrame.defaultFps = (int)_mp4Parser.fps;
         
         
+        
         //8. insert into buffer
         //8. insert into buffer
         NSInteger index = frameIndex%_buffers.count;
         NSInteger index = frameIndex%_buffers.count;
         _buffers[index] = newFrame;
         _buffers[index] = newFrame;
@@ -307,6 +324,7 @@ NSString *const QGMP4HWDErrorDomain = @"QGMP4HWDErrorDomain";
     }
     }
     
     
     _isFinish = NO;
     _isFinish = NO;
+    self.vpsData = nil;
     self.spsData = nil;
     self.spsData = nil;
     self.ppsData = nil;
     self.ppsData = nil;
     _outputWidth = (int)_mp4Parser.picWidth;
     _outputWidth = (int)_mp4Parser.picWidth;
@@ -323,18 +341,54 @@ NSString *const QGMP4HWDErrorDomain = @"QGMP4HWDErrorDomain";
         VAP_Error(kQGVAPModuleCommon, @"sps&pps is already has value.");
         VAP_Error(kQGVAPModuleCommon, @"sps&pps is already has value.");
         return YES;
         return YES;
     }
     }
+    
     self.spsData = _mp4Parser.spsData;
     self.spsData = _mp4Parser.spsData;
     self.ppsData = _mp4Parser.ppsData;
     self.ppsData = _mp4Parser.ppsData;
+    self.vpsData = _mp4Parser.vpsData;
     
     
     // 2. create  CMFormatDescription
     // 2. create  CMFormatDescription
-    if (self.spsData != nil && self.ppsData != nil) {
-        const uint8_t* const parameterSetPointers[2] = { (const uint8_t*)[self.spsData bytes], (const uint8_t*)[self.ppsData bytes] };
-        const size_t parameterSetSizes[2] = { [self.spsData length], [self.ppsData length] };
-        _status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault, 2, parameterSetPointers, parameterSetSizes, 4, &_mFormatDescription);
-        if (_status != noErr) {
-            VAP_Event(kQGVAPModuleCommon, @"CMVideoFormatDescription. Creation: %@.", (_status == noErr) ? @"successfully." : @"failed.");
-            _constructErr = [NSError errorWithDomain:QGMP4HWDErrorDomain code:QGMP4HWDErrorCode_ErrorCreateVTBDesc userInfo:[self errorUserInfo]];
-            return NO;
+    if (self.spsData != nil && self.ppsData != nil && _mp4Parser.videoCodecID != QGMP4VideoStreamCodecIDUnknown) {
+        if (_mp4Parser.videoCodecID == QGMP4VideoStreamCodecIDH264) {
+            const uint8_t* const parameterSetPointers[2] = { (const uint8_t*)[self.spsData bytes], (const uint8_t*)[self.ppsData bytes] };
+            const size_t parameterSetSizes[2] = { [self.spsData length], [self.ppsData length] };
+            
+            _status = CMVideoFormatDescriptionCreateFromH264ParameterSets(kCFAllocatorDefault,
+                                                                          2,
+                                                                          parameterSetPointers,
+                                                                          parameterSetSizes,
+                                                                          4,
+                                                                          &_mFormatDescription);
+            if (_status != noErr) {
+                VAP_Event(kQGVAPModuleCommon, @"CMVideoFormatDescription. Creation: %@.", (_status == noErr) ? @"successfully." : @"failed.");
+                _constructErr = [NSError errorWithDomain:QGMP4HWDErrorDomain code:QGMP4HWDErrorCode_ErrorCreateVTBDesc userInfo:[self errorUserInfo]];
+                return NO;
+            }
+        } else if (_mp4Parser.videoCodecID == QGMP4VideoStreamCodecIDH265) {
+            if (@available(iOS 11.0, *)) {
+                if(VTIsHardwareDecodeSupported(kCMVideoCodecType_HEVC)) {
+                    const uint8_t* const parameterSetPointers[3] = {(const uint8_t*)[self.vpsData bytes], (const uint8_t*)[self.spsData bytes], (const uint8_t*)[self.ppsData bytes]};
+                    const size_t parameterSetSizes[3] = {[self.vpsData length], [self.spsData length], [self.ppsData length]};
+                    
+                    _status = CMVideoFormatDescriptionCreateFromHEVCParameterSets(kCFAllocatorDefault,
+                                                                                  3,                    // parameter_set_count
+                                                                                  parameterSetPointers, // &parameter_set_pointers
+                                                                                  parameterSetSizes,    // &parameter_set_sizes
+                                                                                  4,                    // nal_unit_header_length
+                                                                                  NULL,
+                                                                                  &_mFormatDescription);
+                    if (_status != noErr) {
+                        VAP_Event(kQGVAPModuleCommon, @"CMVideoFormatDescription. Creation: %@.", (_status == noErr) ? @"successfully." : @"failed.");
+                        _constructErr = [NSError errorWithDomain:QGMP4HWDErrorDomain code:QGMP4HWDErrorCode_ErrorCreateVTBDesc userInfo:[self errorUserInfo]];
+                        return NO;
+                    }
+                } else {
+                    VAP_Event(kQGVAPModuleCommon, @"H.265 decoding is un-supported because of the hardware");
+                    return NO;
+                }
+            } else {
+                VAP_Event(kQGVAPModuleCommon, @"System version is too low to support H.265 decoding");
+                return NO;
+            }
         }
         }
     }
     }
     
     
@@ -390,9 +444,10 @@ NSString *const QGMP4HWDErrorDomain = @"QGMP4HWDErrorDomain";
         CFRelease(_mDecodeSession);
         CFRelease(_mDecodeSession);
         _mDecodeSession = NULL;
         _mDecodeSession = NULL;
     }
     }
-    if (self.spsData || self.ppsData) {
+    if (self.spsData || self.ppsData || self.vpsData) {
         self.spsData = nil;
         self.spsData = nil;
         self.ppsData = nil;
         self.ppsData = nil;
+        self.vpsData = nil;
     }
     }
     if (_mFormatDescription) {
     if (_mFormatDescription) {
         CFRelease(_mFormatDescription);
         CFRelease(_mFormatDescription);

+ 34 - 1
iOS/QGVAPlayer/QGVAPlayer/Classes/MP4Parser/QGMP4Box.h

@@ -75,9 +75,29 @@ typedef NS_ENUM(NSUInteger, QGMP4BoxType) {
     QGMP4BoxType_wide           =   ATOM_TYPE('w','i','d','e'),//0x77696465,
     QGMP4BoxType_wide           =   ATOM_TYPE('w','i','d','e'),//0x77696465,
     QGMP4BoxType_loci           =   ATOM_TYPE('l','o','c','i'),//0x6c6f6369,
     QGMP4BoxType_loci           =   ATOM_TYPE('l','o','c','i'),//0x6c6f6369,
     QGMP4BoxType_smhd           =   ATOM_TYPE('s','m','h','d'),//0x736d6864,
     QGMP4BoxType_smhd           =   ATOM_TYPE('s','m','h','d'),//0x736d6864,
-    QGMP4BoxType_vapc           =   ATOM_TYPE('v','a','p','c')//0x76617063,//vap专属,存储json配置信息
+    QGMP4BoxType_vapc           =   ATOM_TYPE('v','a','p','c'),//0x76617063,//vap专属,存储json配置信息
+    QGMP4BoxType_hvc1           =   ATOM_TYPE('h','v','c','1'),
+    QGMP4BoxType_hvcC           =   ATOM_TYPE('h','v','c','C')
 };
 };
 
 
+typedef NS_ENUM(NSUInteger, QGMP4VideoStreamCodecID) {
+    QGMP4VideoStreamCodecIDUnknown = 0,
+    QGMP4VideoStreamCodecIDH264,
+    QGMP4VideoStreamCodecIDH265
+};
+ 
+/**
+ * QGCttsEntry
+ */
+@interface QGCttsEntry : NSObject
+
+/** sampleCount */
+@property (nonatomic, assign) uint32_t sampleCount;
+/** compositionOffset */
+@property (nonatomic, assign) uint32_t compositionOffset;
+
+@end
+
 @interface QGMP4BoxFactory : NSObject
 @interface QGMP4BoxFactory : NSObject
 
 
 + (BOOL)isTypeValueValid:(QGMP4BoxType)type;
 + (BOOL)isTypeValueValid:(QGMP4BoxType)type;
@@ -115,6 +135,9 @@ typedef NS_ENUM(NSUInteger, QGMP4BoxType) {
 @interface QGMP4AvccBox : QGMP4Box
 @interface QGMP4AvccBox : QGMP4Box
 @end
 @end
 
 
+@interface QGMP4HvccBox : QGMP4Box
+@end
+
 @interface QGMP4MvhdBox : QGMP4Box
 @interface QGMP4MvhdBox : QGMP4Box
 @end
 @end
 
 
@@ -145,6 +168,16 @@ The table is compactly coded. Each entry gives the index of the first chunk of a
 
 
 @end
 @end
 
 
+/**
+ * ctts
+ */
+@interface QGMP4CttsBox : QGMP4Box
+
+/** compositionOffsets */
+@property (nonatomic, strong) NSMutableArray<NSNumber *> *compositionOffsets;
+
+@end
+
 //This box contains a compact version of a table that allows indexing from decoding time to sample number. Other tables give sample sizes and pointers, from the sample number. Each entry in the table gives the number of consecutive samples with the same time delta, and the delta of those samples. By adding the deltas a complete time-to-sample map may be built.
 //This box contains a compact version of a table that allows indexing from decoding time to sample number. Other tables give sample sizes and pointers, from the sample number. Each entry in the table gives the number of consecutive samples with the same time delta, and the delta of those samples. By adding the deltas a complete time-to-sample map may be built.
 @interface QGSttsEntry : NSObject
 @interface QGSttsEntry : NSObject
 
 

+ 42 - 0
iOS/QGVAPlayer/QGVAPlayer/Classes/MP4Parser/QGMP4Box.m

@@ -107,6 +107,44 @@ NSInteger const kQGBoxTypeLengthInBytes = 4;
 
 
 @end
 @end
 
 
+#pragma mark -- hvcc box
+/**
+ * QGMP4HvccBox
+ */
+@implementation QGMP4HvccBox
+@end
+
+/**
+ * QGCttsEntry 通过dts计算pts
+ */
+@implementation QGCttsEntry
+
+@end
+
+/**
+ * QGMP4CttsBox 通过dts计算pts
+ */
+@implementation QGMP4CttsBox
+- (void)boxDidParsed:(QGMp4BoxDataFetcher)datablock {
+    if (!_compositionOffsets) {
+        _compositionOffsets = [NSMutableArray new];
+    }
+
+    NSData *cttsData = datablock(self);
+    const char *bytes = cttsData.bytes;
+    uint32_t entryCount = READ32BIT(&bytes[12]);
+
+    for (int i = 0; i < entryCount; ++i) {
+        uint32_t sampleCount = READ32BIT(&bytes[16+i*8]);
+        uint32_t compositionOffset = READ32BIT(&bytes[16+i*8+4]);
+        for (int j = 0; j < sampleCount; j++) {
+            [_compositionOffsets addObject:@(compositionOffset)];
+        }
+    }
+}
+
+@end
+
 #pragma mark -- mdat box
 #pragma mark -- mdat box
 @implementation QGMP4MdatBox
 @implementation QGMP4MdatBox
 
 
@@ -331,6 +369,10 @@ stts记录了sample的时间信息,⾥⾯有多个entry,每个entry⾥⾯的
             return [QGMP4SttsBox class];
             return [QGMP4SttsBox class];
         case QGMP4BoxType_stco:
         case QGMP4BoxType_stco:
             return [QGMP4StcoBox class];
             return [QGMP4StcoBox class];
+        case QGMP4BoxType_hvcC:
+            return [QGMP4HvccBox class];
+        case QGMP4BoxType_ctts:
+            return [QGMP4CttsBox class];
         default:
         default:
             return nil;
             return nil;
     }
     }

+ 4 - 0
iOS/QGVAPlayer/QGVAPlayer/Classes/MP4Parser/QGMP4Parser.h

@@ -52,6 +52,10 @@
 @property (nonatomic, strong) QGMP4Box *rootBox;        //mp4文件根box
 @property (nonatomic, strong) QGMP4Box *rootBox;        //mp4文件根box
 @property (nonatomic, strong) QGMP4TrackBox *videoTrackBox;     //视频track
 @property (nonatomic, strong) QGMP4TrackBox *videoTrackBox;     //视频track
 @property (nonatomic, strong) QGMP4TrackBox *audioTrackBox;     //音频track
 @property (nonatomic, strong) QGMP4TrackBox *audioTrackBox;     //音频track
+/** vps */
+@property (nonatomic, strong) NSData *vpsData;
+/** 视频流编码器ID类型 */
+@property (nonatomic, assign) QGMP4VideoStreamCodecID videoCodecID;
 
 
 - (void)parse;
 - (void)parse;
 - (NSData *)readPacketOfSample:(NSInteger)sampleIndex;
 - (NSData *)readPacketOfSample:(NSInteger)sampleIndex;

+ 121 - 25
iOS/QGVAPlayer/QGVAPlayer/Classes/MP4Parser/QGMP4Parser.m

@@ -79,14 +79,14 @@
         offset = calBox.superBox ? (calBox.startIndexInBytes + kQGBoxSizeLengthInBytes + kQGBoxTypeLengthInBytes) : 0;
         offset = calBox.superBox ? (calBox.startIndexInBytes + kQGBoxSizeLengthInBytes + kQGBoxTypeLengthInBytes) : 0;
         
         
         //avcbox特殊处理
         //avcbox特殊处理
-        if (calBox.type == QGMP4BoxType_avc1 || calBox.type == QGMP4BoxType_stsd) {
+        if (calBox.type == QGMP4BoxType_avc1 || calBox.type == QGMP4BoxType_hvc1 || calBox.type == QGMP4BoxType_stsd) {
             unsigned long long avcOffset = calBox.startIndexInBytes+kQGBoxSizeLengthInBytes+kQGBoxTypeLengthInBytes;
             unsigned long long avcOffset = calBox.startIndexInBytes+kQGBoxSizeLengthInBytes+kQGBoxTypeLengthInBytes;
             unsigned long long avcEdge = calBox.startIndexInBytes+calBox.length-kQGBoxSizeLengthInBytes-kQGBoxTypeLengthInBytes;
             unsigned long long avcEdge = calBox.startIndexInBytes+calBox.length-kQGBoxSizeLengthInBytes-kQGBoxTypeLengthInBytes;
             unsigned long long avcLength = 0;
             unsigned long long avcLength = 0;
             QGMP4BoxType avcType = QGMP4BoxType_unknown;
             QGMP4BoxType avcType = QGMP4BoxType_unknown;
             for (; avcOffset < avcEdge; avcOffset++) {
             for (; avcOffset < avcEdge; avcOffset++) {
                 readBoxTypeAndLength(_fileHandle, avcOffset, &avcType, &avcLength);
                 readBoxTypeAndLength(_fileHandle, avcOffset, &avcType, &avcLength);
-                if (avcType == QGMP4BoxType_avc1 || avcType == QGMP4BoxType_avcC) {
+                if (avcType == QGMP4BoxType_avc1 || avcType == QGMP4BoxType_avcC || avcType == QGMP4BoxType_hvc1 || avcType == QGMP4BoxType_hvcC) {
                     QGMP4Box *avcBox = [QGMP4BoxFactory createBoxForType:avcType startIndex:avcOffset length:avcLength];
                     QGMP4Box *avcBox = [QGMP4BoxFactory createBoxForType:avcType startIndex:avcOffset length:avcLength];
                     if (!calBox.subBoxes) {
                     if (!calBox.subBoxes) {
                         calBox.subBoxes = [NSMutableArray new];
                         calBox.subBoxes = [NSMutableArray new];
@@ -240,21 +240,21 @@ void readBoxTypeAndLength(NSFileHandle *fileHandle, unsigned long long offset, Q
     return _duration;
     return _duration;
 }
 }
 
 
-- (NSData *)spsData {
-    
-    if (!_spsData) {
-        _spsData = [self readSPSData];
-    }
-    return _spsData;
-}
-
-- (NSData *)ppsData {
-    
-    if (!_ppsData) {
-        _ppsData = [self readPPSData];
-    }
-    return _ppsData;
-}
+//- (NSData *)spsData {
+//
+//    if (!_spsData) {
+//        _spsData = [self readSPSData];
+//    }
+//    return _spsData;
+//}
+//
+//- (NSData *)ppsData {
+//
+//    if (!_ppsData) {
+//        _ppsData = [self readPPSData];
+//    }
+//    return _ppsData;
+//}
 
 
 - (NSArray *)videoSamples {
 - (NSArray *)videoSamples {
     
     
@@ -270,20 +270,22 @@ void readBoxTypeAndLength(NSFileHandle *fileHandle, unsigned long long offset, Q
     QGMP4StszBox *stszBox = [self.videoTrackBox subBoxOfType:QGMP4BoxType_stsz];
     QGMP4StszBox *stszBox = [self.videoTrackBox subBoxOfType:QGMP4BoxType_stsz];
     QGMP4StscBox *stscBox = [self.videoTrackBox subBoxOfType:QGMP4BoxType_stsc];
     QGMP4StscBox *stscBox = [self.videoTrackBox subBoxOfType:QGMP4BoxType_stsc];
     QGMP4StcoBox *stcoBox = [self.videoTrackBox subBoxOfType:QGMP4BoxType_stco];
     QGMP4StcoBox *stcoBox = [self.videoTrackBox subBoxOfType:QGMP4BoxType_stco];
+    QGMP4CttsBox *cttsBox = [self.videoTrackBox subBoxOfType:QGMP4BoxType_ctts];
     for (int i = 0; i < sttsBox.entries.count; ++i) {
     for (int i = 0; i < sttsBox.entries.count; ++i) {
         QGSttsEntry *entry = sttsBox.entries[i];
         QGSttsEntry *entry = sttsBox.entries[i];
-        tmp += entry.sampleDelta;
         for (int j = 0; j < entry.sampleCount; ++j) {
         for (int j = 0; j < entry.sampleCount; ++j) {
             QGMP4Sample *sample = [QGMP4Sample new];
             QGMP4Sample *sample = [QGMP4Sample new];
             sample.sampleDelta = entry.sampleDelta;
             sample.sampleDelta = entry.sampleDelta;
             sample.codecType = QGMP4CodecTypeVideo;
             sample.codecType = QGMP4CodecTypeVideo;
             sample.sampleIndex = sampIdx;
             sample.sampleIndex = sampIdx;
+            sample.pts = tmp + [cttsBox.compositionOffsets[j] unsignedLongLongValue];
             if (sampIdx < stszBox.sampleSizes.count) {
             if (sampIdx < stszBox.sampleSizes.count) {
                 sample.sampleSize = (int32_t)[stszBox.sampleSizes[sampIdx] integerValue];
                 sample.sampleSize = (int32_t)[stszBox.sampleSizes[sampIdx] integerValue];
             }
             }
             [videoSamples addObject:sample];
             [videoSamples addObject:sample];
             start_play_time += entry.sampleDelta;
             start_play_time += entry.sampleDelta;
             sampIdx++;
             sampIdx++;
+            tmp += entry.sampleDelta;
         }
         }
         
         
         NSMutableArray<QGChunkOffsetEntry *> *chunkOffsets = [NSMutableArray new];
         NSMutableArray<QGChunkOffsetEntry *> *chunkOffsets = [NSMutableArray new];
@@ -341,11 +343,81 @@ void readBoxTypeAndLength(NSFileHandle *fileHandle, unsigned long long offset, Q
     
     
     [_parser parse];
     [_parser parse];
     _rootBox = _parser.rootBox;
     _rootBox = _parser.rootBox;
+    
+    // 解析视频解码配置信息
+    [self parseVideoDecoderConfigRecord];
 }
 }
 
 
-- (NSData *)readSPSData {
+#pragma mark - Private
+
+- (void)parseVideoDecoderConfigRecord {
+    if (self.videoCodecID == QGMP4VideoStreamCodecIDH264) {
+        [self parseAvccDecoderConfigRecord];
+    } else if (self.videoCodecID == QGMP4VideoStreamCodecIDH265) {
+        [self parseHvccDecoderConfigRecord];
+    }
+}
+
+- (void)parseAvccDecoderConfigRecord {
+    self.spsData = [self parseAvccSPSData];
+    self.ppsData = [self parseAvccPPSData];
+}
+
+- (void)parseHvccDecoderConfigRecord {
+    NSData *extraData = [_parser readDataForBox:[self.videoTrackBox subBoxOfType:QGMP4BoxType_hvcC]];
+    if (extraData.length <= 8) {
+        return;
+    }
     
     
+    const char *bytes = extraData.bytes;
+    int index = 30; // 21 + 4 + 4
+    
+    //int lengthSize = ((bytes[index++] & 0xff) & 0x03) + 1;
+    int arrayNum = bytes[index++] & 0xff;
+    
+    // sps pps vps 种类数量
+    for (int i = 0; i < arrayNum; i++) {
+        int value = bytes[index++] & 0xff;
+        int naluType = value & 0x3F;
+        // sps pps vps 各自的数量
+        int naluNum = ((bytes[index] & 0xff) << 8) + (bytes[index + 1] & 0xff);
+        index += 2;
+        
+        for (int j = 0; j < naluNum; j++) {
+            int naluLength = ((bytes[index] & 0xff) << 8) + (bytes[index + 1] & 0xff);
+            index += 2;
+            NSData *paramData = [NSData dataWithBytes:&bytes[index] length:naluLength];
+            
+            if (naluType == 32) {
+                // vps
+                self.vpsData = paramData;
+            } else if (naluType == 33) {
+                // sps
+                self.spsData = paramData;
+            } else if (naluType == 34) {
+                // pps
+                self.ppsData = paramData;
+            }
+            
+            index += naluLength;
+        }
+    }
+}
+
+//- (NSData *)readSPSData {
+//    if (self.videoCodecID == QGMP4VideoStreamCodecIDH264) {
+//        return [self parseAvccSPSData];
+//    } else if (self.videoCodecID == QGMP4VideoStreamCodecIDH265) {
+//        // h.265
+//        return nil;
+//    } else {
+//        return nil;
+//    }
+//}
+
+- (NSData *)parseAvccSPSData {
     //boxsize(32)+boxtype(32)+prefix(40)+预留(3)+spsCount(5)+spssize(16)+...+ppscount(8)+ppssize(16)+...
     //boxsize(32)+boxtype(32)+prefix(40)+预留(3)+spsCount(5)+spssize(16)+...+ppscount(8)+ppssize(16)+...
+    //QGMP4BoxType boxType = self.videoCodecID == QGMP4VideoStreamCodecIDH264 ? QGMP4BoxType_avcC : QGMP4BoxType_hvcC;
     NSData *extraData = [_parser readDataForBox:[self.videoTrackBox subBoxOfType:QGMP4BoxType_avcC]];
     NSData *extraData = [_parser readDataForBox:[self.videoTrackBox subBoxOfType:QGMP4BoxType_avcC]];
     if (extraData.length <= 8) {
     if (extraData.length <= 8) {
         return nil;
         return nil;
@@ -362,8 +434,19 @@ void readBoxTypeAndLength(NSFileHandle *fileHandle, unsigned long long offset, Q
     return spsData;
     return spsData;
 }
 }
 
 
-- (NSData *)readPPSData {
-    
+//- (NSData *)readPPSData {
+//    if (self.videoCodecID == QGMP4VideoStreamCodecIDH264) {
+//        return [self parseAvccPPSData];
+//    } else if (self.videoCodecID == QGMP4VideoStreamCodecIDH265) {
+//        // h.265
+//        return nil;
+//    } else {
+//        return nil;
+//    }
+//}
+
+- (NSData *)parseAvccPPSData {
+    //QGMP4BoxType boxType = self.videoCodecID == QGMP4VideoStreamCodecIDH264 ? QGMP4BoxType_avcC : QGMP4BoxType_hvcC;
     NSData *extraData = [_parser readDataForBox:[self.videoTrackBox subBoxOfType:QGMP4BoxType_avcC]];
     NSData *extraData = [_parser readDataForBox:[self.videoTrackBox subBoxOfType:QGMP4BoxType_avcC]];
     if (extraData.length <= 8) {
     if (extraData.length <= 8) {
         return nil;
         return nil;
@@ -397,10 +480,14 @@ void readBoxTypeAndLength(NSFileHandle *fileHandle, unsigned long long offset, Q
 }
 }
 
 
 - (NSInteger)readPicWidth {
 - (NSInteger)readPicWidth {
+    if (self.videoCodecID == QGMP4VideoStreamCodecIDUnknown) {
+        return 0;
+    }
     
     
+    QGMP4BoxType boxType = self.videoCodecID == QGMP4VideoStreamCodecIDH264 ? QGMP4BoxType_avc1 : QGMP4BoxType_hvc1;
     NSInteger sizeIndex = 32;
     NSInteger sizeIndex = 32;
     NSUInteger readLength = 2;
     NSUInteger readLength = 2;
-    QGMP4Box *avc1 = [self.videoTrackBox subBoxOfType:QGMP4BoxType_avc1];
+    QGMP4Box *avc1 = [self.videoTrackBox subBoxOfType:boxType];
     [_parser.fileHandle seekToFileOffset:avc1.startIndexInBytes+sizeIndex];
     [_parser.fileHandle seekToFileOffset:avc1.startIndexInBytes+sizeIndex];
     NSData *widthData = [_parser.fileHandle readDataOfLength:readLength];
     NSData *widthData = [_parser.fileHandle readDataOfLength:readLength];
     
     
@@ -414,10 +501,14 @@ void readBoxTypeAndLength(NSFileHandle *fileHandle, unsigned long long offset, Q
 }
 }
 
 
 - (NSInteger)readPicHeight {
 - (NSInteger)readPicHeight {
+    if (self.videoCodecID == QGMP4VideoStreamCodecIDUnknown) {
+        return 0;
+    }
     
     
+    QGMP4BoxType boxType = self.videoCodecID == QGMP4VideoStreamCodecIDH264 ? QGMP4BoxType_avc1 : QGMP4BoxType_hvc1;
     NSInteger sizeIndex = 34;
     NSInteger sizeIndex = 34;
     NSUInteger readLength = 2;
     NSUInteger readLength = 2;
-    QGMP4Box *avc1 = [self.videoTrackBox subBoxOfType:QGMP4BoxType_avc1];
+    QGMP4Box *avc1 = [self.videoTrackBox subBoxOfType:boxType];
     [_parser.fileHandle seekToFileOffset:avc1.startIndexInBytes+sizeIndex];
     [_parser.fileHandle seekToFileOffset:avc1.startIndexInBytes+sizeIndex];
     NSData *heightData = [_parser.fileHandle readDataOfLength:readLength];
     NSData *heightData = [_parser.fileHandle readDataOfLength:readLength];
     
     
@@ -501,8 +592,13 @@ void readBoxTypeAndLength(NSFileHandle *fileHandle, unsigned long long offset, Q
                 default:
                 default:
                     break;
                     break;
             }
             }
-        }
-            break;
+        } break;
+        case QGMP4BoxType_avc1: {
+            self.videoCodecID = QGMP4VideoStreamCodecIDH264;
+        } break;
+        case QGMP4BoxType_hvc1: {
+            self.videoCodecID = QGMP4VideoStreamCodecIDH265;
+        } break;
         default:
         default:
             break;
             break;
     }
     }

+ 2 - 0
iOS/QGVAPlayer/QGVAPlayer/Classes/Models/QGBaseAnimatedImageFrame.h

@@ -19,5 +19,7 @@
 
 
 @property (atomic, assign) NSInteger frameIndex;         //当前帧索引
 @property (atomic, assign) NSInteger frameIndex;         //当前帧索引
 @property (atomic, assign) NSTimeInterval duration;      //播放时长
 @property (atomic, assign) NSTimeInterval duration;      //播放时长
+/** pts */
+@property (atomic, assign) uint64_t pts;
 
 
 @end
 @end

+ 3 - 2
iOS/QGVAPlayerDemo/QGVAPlayerDemo/ViewController.m

@@ -83,14 +83,15 @@ void qg_VAP_Logger_handler(VAPLogLevel level, const char* file, int line, const
 
 
 //vap动画
 //vap动画
 - (void)playVapx {
 - (void)playVapx {
-    NSString *mp4Path = [NSString stringWithFormat:@"%@/Resource/vap1.mp4", [[NSBundle mainBundle] resourcePath]];
+    NSString *mp4Path = [NSString stringWithFormat:@"%@/Resource/vap_265_hvc1.mp4", [[NSBundle mainBundle] resourcePath]];
     VAPView *mp4View = [[VAPView alloc] initWithFrame:self.view.bounds];
     VAPView *mp4View = [[VAPView alloc] initWithFrame:self.view.bounds];
     [self.view addSubview:mp4View];
     [self.view addSubview:mp4View];
     mp4View.center = self.view.center;
     mp4View.center = self.view.center;
     mp4View.userInteractionEnabled = YES;
     mp4View.userInteractionEnabled = YES;
     UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(onImageviewTap:)];
     UITapGestureRecognizer *tap = [[UITapGestureRecognizer alloc] initWithTarget:self action:@selector(onImageviewTap:)];
     [mp4View addGestureRecognizer:tap];
     [mp4View addGestureRecognizer:tap];
-    [mp4View playHWDMP4:mp4Path repeatCount:-1 delegate:self];
+    [mp4View playHWDMP4:mp4Path delegate:self];
+    //[mp4View playHWDMP4:mp4Path blendMode:QGHWDTextureBlendMode_AlphaRight delegate:self];
 }
 }
 
 
 #pragma mark -  mp4 hwd delegate
 #pragma mark -  mp4 hwd delegate

BIN
iOS/QGVAPlayerDemo/Resource/b_frame.mp4


BIN
iOS/QGVAPlayerDemo/Resource/vap_265.mp4


BIN
iOS/QGVAPlayerDemo/Resource/vap_265_hvc1.mp4