When I was using Replaykit for video development, I did a functional test. First, I clicked the "Start Live" button and turned on the microphone. When a wechat voice call comes in, hang up the voice and continue recording. After a period of time, the recording stops and it is found that the recorded file cannot be played。

This is my code:


#import "ZGBroadcastManager.h"
#import "SharePath.h"


static ZGBroadcastManager *_sharedManager = nil;

@interface ZGBroadcastManager ()

@property (nonatomic, weak) RPBroadcastSampleHandler *sampleHandler;

//使用以下保存mp4
@property (strong, nonatomic) AVAssetWriter *assetWriter;
@property (strong, nonatomic) AVAssetWriterInput *videoInput;
@property (strong, nonatomic) AVAssetWriterInput *audioAppInput;
@property (strong, nonatomic) AVAssetWriterInput *audioMicInput;
@end


@implementation ZGBroadcastManager

+ (instancetype)sharedManager {
    if (!_sharedManager) {
        @synchronized (self) {
            if (!_sharedManager) {
                _sharedManager = [[self alloc] init];
            }
        }
    }
    return _sharedManager;
}

- (void)setupAssetWriter {
    if ([self.assetWriter canAddInput:self.videoInput]) {
        [self.assetWriter addInput:self.videoInput];
    } else {
        NSAssert(false, @"添加视频写入失败");
    }
    if ([self.assetWriter canAddInput:self.audioAppInput]) {
        [self.assetWriter addInput:self.audioAppInput];
    } else {
        NSAssert(false, @"添加App音频写入失败");
    }
    if ([self.assetWriter canAddInput:self.audioMicInput]) {
        [self.assetWriter addInput:self.audioMicInput];
    } else {
        NSAssert(false, @"添加Mic音频写入失败");
    }
}


- (NSString *)timestamp {
    long long timeinterval = (long long)([NSDate timeIntervalSinceReferenceDate] * 1000);
    return [NSString stringWithFormat:@"%lld", timeinterval];
}

- (AVAssetWriter *)assetWriter {
    if (!_assetWriter) {
        NSError *error = nil;
        NSUserDefaults *sharedDefaults = [[NSUserDefaults alloc] initWithSuiteName:GroupIDKey];
        NSString *fileName = [NSString stringWithFormat:@"%@",[self timestamp]];
        [sharedDefaults setObject:fileName forKey:@"FileKey"];
        [sharedDefaults synchronize];
        NSURL *filePathURL = [SharePath filePathUrlWithFileName:fileName];
        NSLog(@"filePathURL------%@",filePathURL.path);//保存在共享文件夹中
            self.filePath = [NSString stringWithFormat:@"%@",filePathURL.path];
            NSLog(@"sampleHandler---self.filePath ----- %@",self.filePath);
            NSLog(@"sampleHandler------filePathURL.path:%@",filePathURL.path);
            _assetWriter = [[AVAssetWriter alloc] initWithURL:filePathURL fileType:(AVFileTypeMPEG4) error:&error];
            NSLog(@"_assetWriter---文件写入成功!");
            NSAssert(!error, @"_assetWriter初始化失败");
       
    }
    return _assetWriter;
}

- (AVAssetWriterInput *)videoInput {
    if (!_videoInput) {
        CGSize size = [UIScreen mainScreen].bounds.size;
        //写入视频大小
        NSInteger numPixels = size.width  * size.height;
        //每像素比特
        CGFloat bitsPerPixel = 10;
        NSInteger bitsPerSecond = numPixels * bitsPerPixel;
        // 码率和帧率设置
        NSDictionary *compressionProperties = @{
            AVVideoAverageBitRateKey : @(bitsPerSecond),//码率(平均每秒的比特率)
            AVVideoExpectedSourceFrameRateKey : @(15),//帧率(如果使用了AVVideoProfileLevelKey则该值应该被设置,否则可能会丢弃帧以满足比特流的要求)
            AVVideoMaxKeyFrameIntervalKey : @(15),//关键帧最大间隔
            AVVideoProfileLevelKey : AVVideoProfileLevelH264HighAutoLevel,
            AVVideoH264EntropyModeKey:AVVideoH264EntropyModeCABAC,
        };
        
        NSDictionary *videoOutputSettings = @{
            AVVideoCodecKey : AVVideoCodecTypeH264,
            AVVideoScalingModeKey : AVVideoScalingModeResizeAspect,
            AVVideoWidthKey : @(size.width * 2),
            AVVideoHeightKey : @(size.height * 2),
            AVVideoCompressionPropertiesKey : compressionProperties
        };
        _videoInput = [[AVAssetWriterInput alloc] initWithMediaType:AVMediaTypeVideo outputSettings:videoOutputSettings];
        _videoInput.expectsMediaDataInRealTime = YES;//实时录制
    }
    return _videoInput;
}

- (AVAssetWriterInput *)audioAppInput {
    if (!_audioAppInput) {
        NSDictionary *audioCompressionSettings = @{ AVEncoderBitRatePerChannelKey : @(28000),
                                                    AVFormatIDKey : @(kAudioFormatMPEG4AAC),
                                                    AVNumberOfChannelsKey : @(2),
                                                    AVSampleRateKey : @(22050) };

        _audioAppInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioCompressionSettings];
        _audioAppInput.expectsMediaDataInRealTime = YES;//实时录制
    }
    return _audioAppInput;
}
- (AVAssetWriterInput *)audioMicInput {
    if (!_audioMicInput) {
        NSDictionary *audioCompressionSettings = @{ AVEncoderBitRatePerChannelKey : @(28000),
                                                    AVFormatIDKey : @(kAudioFormatMPEG4AAC),
                                                    AVNumberOfChannelsKey : @(2),
                                                    AVSampleRateKey : @(22050) };

        _audioMicInput = [AVAssetWriterInput assetWriterInputWithMediaType:AVMediaTypeAudio outputSettings:audioCompressionSettings];
        _audioMicInput.expectsMediaDataInRealTime = YES;//实时录制
    }
    return _audioMicInput;
}
- (void)startBroadcast:(RPBroadcastSampleHandler *)sampleHandler {
    
    self.sampleHandler = sampleHandler;
    NSUserDefaults *share = [[NSUserDefaults alloc] initWithSuiteName:GroupIDKey];
    [self setupAssetWriter];//
    
    // Add an observer for stop broadcast notificatio
    NSLog(@"点击开始直播了");
    [share setInteger:1 forKey:@"isStart"];
    CFNotificationCenterAddObserver(CFNotificationCenterGetDarwinNotifyCenter(),
                                    (__bridge const void *)(self),
                                    onBroadcastFinish,
                                    (CFStringRef)@"ZGFinishBroadcastUploadExtensionProcessNotification",
                                    NULL,
                                    CFNotificationSuspensionBehaviorDeliverImmediately);

    CFNotificationCenterAddObserver(CFNotificationCenterGetDarwinNotifyCenter(), NULL, updateEnabled, NotificationOff, NULL, CFNotificationSuspensionBehaviorDeliverImmediately);
    // Do some business logic when starting screen capture here.

}

- (void)stopBroadcast {
    [self stopWriting];
    // Remove observer for stop broadcast notification
    CFNotificationCenterRemoveObserver(CFNotificationCenterGetDarwinNotifyCenter(),
                                       (__bridge const void *)(self),
                                       (CFStringRef)@"ZGFinishBroadcastUploadExtensionProcessNotification",
                                       NULL);
    
    CFNotificationCenterRemoveObserver(CFNotificationCenterGetDarwinNotifyCenter(),
                                       (__bridge const void *)(self),
                                       (CFStringRef)NotificationOff,
                                       NULL);
    
    
    NSUserDefaults *sharedDefaults = [[NSUserDefaults alloc] initWithSuiteName:GroupIDKey];
    NSString *fileName = [sharedDefaults objectForKey:@"FileKey"];
    if(fileName!=nil && fileName.length >0){
        CFNotificationCenterPostNotification(CFNotificationCenterGetDarwinNotifyCenter(), (CFStringRef)@"ScreenDidFinishNotif", NULL, nil, YES);
    }
    NSLog(@"停止录屏!");

    // Do some business logic when finishing screen capture here.
}

- (void)handleSampleBuffer:(CMSampleBufferRef)sampleBuffer withType:(RPSampleBufferType)sampleBufferType {
    
    AVAssetWriterStatus status = self.assetWriter.status;
    if ( status == AVAssetWriterStatusFailed || status == AVAssetWriterStatusCompleted || status == AVAssetWriterStatusCancelled) {
        NSAssert(false,@"屏幕录制AVAssetWriterStatusFailed error :%@", self.assetWriter.error);
        return;
    }
    if (status == AVAssetWriterStatusUnknown) {
        [self.assetWriter startWriting];
        CMTime time = CMSampleBufferGetPresentationTimeStamp(sampleBuffer);
        [self.assetWriter startSessionAtSourceTime:time];
    }
    
    switch (sampleBufferType) {
        case RPSampleBufferTypeVideo:
            // Handle video sample buffer
            @autoreleasepool {
                if (status == AVAssetWriterStatusWriting) {
                    if (self.videoInput.isReadyForMoreMediaData) {
                       BOOL success = [self.videoInput appendSampleBuffer:sampleBuffer];
                        if (!success) {
                            [self stopWriting];
                        }
                    }
                }
            }
            break;
        case RPSampleBufferTypeAudioApp:
            NSLog(@"RPSampleBufferTypeAudioApp------");
            @autoreleasepool {
                if (status == AVAssetWriterStatusWriting) {
                    if (self.audioAppInput.isReadyForMoreMediaData) {
                        BOOL success = [self.audioAppInput appendSampleBuffer:sampleBuffer];
                        NSLog(@"执行了audioAppInput");
                        if (!success) {
                            [self stopWriting];
                        }
                    }
                }
            }
            // Handle audio sample buffer for app audio
            break;
         case RPSampleBufferTypeAudioMic:
           @autoreleasepool {
                if (status == AVAssetWriterStatusWriting) {
                        if (self.audioMicInput.isReadyForMoreMediaData) {
                            BOOL success = [self.audioMicInput appendSampleBuffer:sampleBuffer];
                            NSLog(@"执行了audioMicInput");
                            if (!success) {
                                [self stopWriting];
                            }
                       }

               }
}

            // Handle audio sample buffer for mic audio
            break;
        default:
            break;
    }
}


#pragma mark - Finish broadcast function

// Handle stop broadcast notification from main app process
void onBroadcastFinish(CFNotificationCenterRef center, void *observer, CFStringRef name, const void *object, CFDictionaryRef userInfo) {

    // Stop broadcast
    [[ZGBroadcastManager sharedManager] stopBroadcast];

    RPBroadcastSampleHandler *handler = [ZGBroadcastManager sharedManager].sampleHandler;
    if (handler) {
        // Finish broadcast extension process with no error
        #pragma clang diagnostic push
        #pragma clang diagnostic ignored "-Wnonnull"
        [handler finishBroadcastWithError:nil];
        #pragma clang diagnostic pop
    } else {
        NSLog(@"⚠️ RPBroadcastSampleHandler is null, can not stop broadcast upload extension process");
    }
}
- (void)stopWriting{
    
    if(self.assetWriter.status == AVAssetWriterStatusWriting){
        [self.videoInput markAsFinished];
        [self.audioAppInput markAsFinished];
        [self.audioMicInput markAsFinished];
        [self.assetWriter finishWriting];
        self.videoInput = nil;
        self.audioAppInput = nil;
        self.audioMicInput = nil;
        self.assetWriter = nil;
    }
}

@end

How do I handle that I can achieve when hanging up the call that the recorded video is complete

0

There are 0 answers