关于iOS12+屏幕共享-总结篇

之前写到了 屏幕共享的调起 、 进程间的通讯CFNotificationCenterGetDarwinNotifyCenter与进程间通信-App Groups
此篇文章基于屏幕共享的调起与进程间通信-App Groups、实现屏幕流的获取及推送(使用的aliRTC)

由于之前的socket传输一直不稳定、更换了传输方式。

aliRTC使用的是1.19版本以上、已经开放了通道、如果使用以下版本,请继续使用通道(把屏幕流推入此通道即可)
可参考:官网文档

视频流处理

//
//  AppGroupData.h
//  Demo
//
//  Created by 斌小狼 on 2021/4/1.
//  Copyright © 2021 bxl. All rights reserved.
//

#import 

static NSString * _Nonnull kUserDefaultFrame = @"KEY_BXL_DEFAULT_FRAME"; // 接收屏幕共享(屏幕流)监听的Key
static NSString * _Nonnull kUserDefaultState = @"KEY_BXL_DEFAULT_STATE"; // 接收屏幕共享(开始/结束 状态)监听的Key

static NSString * _Nonnull kPropFormat = @"format";
static NSString * _Nonnull kPropWidth = @"width";
static NSString * _Nonnull kPropHeight = @"height";
static NSString * _Nonnull kPropStrideY = @"strideY";
static NSString * _Nonnull kPropStrideU = @"strideU";
static NSString * _Nonnull kPropStrideV = @"strideV";
static NSString * _Nonnull kPropDataLength = @"dataLength";
static NSString * _Nonnull kPropData = @"data";
static NSString * _Nonnull kPropRotation = @"rotation";

@interface AppGroupData : NSObject

+ (NSDictionary *_Nonnull)packetWithSampleBuffer:(CMSampleBufferRef _Nullable )sampleBuffer;

@end
//
//  AppGroupData.m
//  Demo
//
//  Created by 斌小狼 on 2021/4/1.
//  Copyright © 2021 bxl. All rights reserved.
//

#import "AppGroupData.h"
#import 
#import 
#import 

@interface AppGroupData ()

@end

@implementation AppGroupData

+ (NSDictionary *_Nonnull)packetWithSampleBuffer:(CMSampleBufferRef _Nullable )sampleBuffer{
    // output data
    int16_t format = -1;
    int32_t strideY = -1;
    int32_t strideU = -1;
    int32_t strideV = -1;
    uint8_t * dataPtr = NULL;
    int32_t width = -1;
    int32_t height = -1;
    uint32_t dataLength = 0;
    int32_t rotation = 0;
    // get CVPixelBuffer
    CVImageBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    const OSType pixel_format = CVPixelBufferGetPixelFormatType(pixelBuffer);
    // CVPixelBuffer to yuv(nv12) data
    if(pixel_format == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange ||
       pixel_format == kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange) {
        CVPixelBufferLockBaseAddress(pixelBuffer, 0);
        size_t w = CVPixelBufferGetWidth(pixelBuffer);
        size_t h = CVPixelBufferGetHeight(pixelBuffer);
        size_t src_y_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0);
        size_t src_uv_stride = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 1);
       
        size_t bufferSize = w * h * 3 / 2;
        // buffer
        uint8_t * buffer = (uint8_t*)malloc(bufferSize);
        unsigned char* dst = buffer;
        unsigned char* src_y = (unsigned char*)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0);
        unsigned char* src_uv = (unsigned char *)CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 1);
        // copy y
        size_t height_y = h;
        for (unsigned int rIdx = 0; rIdx < height_y; ++rIdx, dst += w, src_y += src_y_stride) {
            memcpy(dst, src_y, w);
        }
        // copy uv
        size_t height_uv = h >> 1;
        for (unsigned int rIdx = 0; rIdx < height_uv; ++rIdx, dst += w, src_uv += src_uv_stride) {
            memcpy(dst, src_uv, w);
        }
        CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
        
        // frame
        format = 3; // AliRtcVideoFormat_NV12;
     
        strideY = (int32_t)w;
        strideU = (int32_t)w;
        strideV = (int32_t)w / 2;
        dataPtr = buffer;
        width = (int32_t)w;
        height = (int32_t)h;
        dataLength = bufferSize;
        
    } else if(pixel_format == kCVPixelFormatType_32BGRA){ // CVPixelBuffer to rgb data
        CVPixelBufferLockBaseAddress(pixelBuffer, 0);
        size_t w = CVPixelBufferGetWidth(pixelBuffer);
        size_t h = CVPixelBufferGetHeight(pixelBuffer);
        uint8_t *buffer = (uint8_t *)CVPixelBufferGetBaseAddress(pixelBuffer);
        size_t stride = CVPixelBufferGetBytesPerRow(pixelBuffer);
        CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
        
        // frame
        format = 0; // AliRtcVideoFormat_BGRA;
        strideY = (int32_t)stride;
        strideU = 0;
        strideV = 0;
        dataPtr = buffer;
        width = (int32_t)w;
        height = (int32_t)h;
        dataLength = stride * h;
    } else {
        NSLog(@"(Error)unsupported pixelBuffer format");
        return NULL;
    }
    if(format == -1 ||  dataPtr == NULL || width == -1 || height == -1 || dataLength == 0){
        NSLog(@"(Error)wrong output params");
        return NULL;
    }
    int32_t orientation = ((NSNumber *)CMGetAttachment(sampleBuffer,
                                                            (__bridge CFStringRef)RPVideoSampleOrientationKey,
                                                          NULL)).intValue;
    switch (orientation) {
        case kCGImagePropertyOrientationUp:
            rotation = 0;
            break;
        case kCGImagePropertyOrientationLeft:
            rotation = 90;
            break;
        case kCGImagePropertyOrientationDown:
            rotation = 180;
            break;
        case kCGImagePropertyOrientationRight:
            rotation = 270;
            break;
        default:
            break;
    }
    //int64_t timeStampNs = CMTimeGetSeconds(CMSampleBufferGetPresentationTimeStamp(sampleBuffer)) * 1000000000;
    NSData *data = [NSData dataWithBytesNoCopy:dataPtr length:dataLength];
    // 组合成frame(一般屏幕采集都为nv12)
    NSDictionary *nv12Frame = @{
        kPropFormat: @(format),
        kPropWidth: @(width),
        kPropHeight: @(height),
        kPropStrideY: @(strideY),
        kPropStrideU: @(strideU),
        kPropStrideV: @(strideV),
        kPropDataLength: @(dataLength),
        kPropData: data,
        kPropRotation: @(rotation),
    };
    return nv12Frame;
}
@end

在Extension中获取数据流

//
//  SampleHandler.m
//  Demo
//
//  Created by 斌小狼 on 2021/4/1.
//  Copyright © 2021 bxl. All rights reserved.
//


#import "SampleHandler.h"
#import "AppGroupData.h"

static NSString * _Nonnull kAppGroup = @"你的app Group命名";

@interface SampleHandler()

@property (nonatomic, strong) NSUserDefaults *userDefaults;

@end

@implementation SampleHandler

- (void)broadcastStartedWithSetupInfo:(NSDictionary *)setupInfo {
    // User has requested to start the broadcast. Setup info from the UI extension can be supplied but optional.
    self.userDefaults = [[NSUserDefaults alloc] initWithSuiteName:kAppGroup];
    [self.userDefaults setObject:@{@"state":@"初始化"} forKey:kUserDefaultState];//开始字段
}

- (void)broadcastPaused {
    // User has requested to pause the broadcast. Samples will stop being delivered.
    NSLog(@"暂停");
}

- (void)broadcastResumed {
    // User has requested to resume the broadcast. Samples delivery will resume.
    NSLog(@"继续");
}

- (void)broadcastFinished {
    // User has requested to finish the broadcast.
    [self.userDefaults setObject:@{@"state":@"停止"} forKey:kUserDefaultState];//结束字段
    
}
- (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer withType:(RPSampleBufferType)sampleBufferType {
    //监听数据回流:
    switch (sampleBufferType) {
        case RPSampleBufferTypeVideo:
            @autoreleasepool {
                NSDictionary *nv12Frame = [AppGroupData packetWithSampleBuffer:sampleBuffer];
                [self.userDefaults setObject:nv12Frame forKey:kUserDefaultFrame];//屏幕流数据
                [self.userDefaults synchronize];
            }
            // Handle video sample buffer
            break;
        case RPSampleBufferTypeAudioApp:
            // Handle audio sample buffer for app audio
            //            NSLog(@"--app audio---");
            //音频流信息:44100,双声道,16bit
            break;
        case RPSampleBufferTypeAudioMic:
            // Handle audio sample buffer for mic audio
            //            NSLog(@"-------输出--mic audio---");
            //音频流信息:48000,单声道,16bit
            //            [self sendAudioData:sampleBuffer];
            break;
        default:
            break;
    }
}

@end

hostApp中屏幕流获取及使用

//
//  RTCSampleChatViewController.m
//  RtcSample
//
//  Created by 斌小狼 on 2021/4/1.
//  Copyright © 2021年 bxl. All rights reserved.
//

#import "RTCSampleChatViewController.h"
#import 
#import "AppGroupData.h"
#import 

static NSString * _Nonnull kAppGroup = @"你的app Group命名";
static void *KVOContext = &KVOContext;

@interface RTCSampleChatViewController
//....
//....
//....
@property (nonatomic, strong) NSUserDefaults *userDefaults;
@end

@implementation RTCSampleChatViewController

- (void)viewDidLoad {
    [super viewDidLoad];
    //....
    //....
    //通过UserDefaults建立数据通道
    [self setupUserDefaults];      
}

- (void)setupUserDefaults{
    // 通过UserDefaults建立数据通道,接收Extension传递来的视频帧
    self.userDefaults = [[NSUserDefaults alloc] initWithSuiteName:kAppGroup];
    [self.userDefaults setObject:@{@"state":@"x"} forKey:kUserDefaultState];//给状态一个默认值
    [self.userDefaults addObserver:self forKeyPath:kUserDefaultState options:NSKeyValueObservingOptionNew context:KVOContext];
    [self.userDefaults addObserver:self forKeyPath:kUserDefaultFrame options:NSKeyValueObservingOptionNew context:KVOContext];
}

- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context{
    if ([keyPath isEqualToString:kUserDefaultState]) {
        NSDictionary *string = change[NSKeyValueChangeNewKey];
        if ([string[@"state"] isEqual:@"初始化"]) {
            //开启 RTC:外部视频输入通道,开始推送屏幕流(configLocalScreenPublish)
            [self screenShareStart];
        }
        if ([string[@"state"] isEqual:@"停止"]) {
            //关闭 RTC:外部视频输入通道,停止推送屏幕流
            [self screenShareStop];
        }
        return;
    }
    if (![keyPath isEqualToString:kUserDefaultFrame] || startScreen == 0 ) {
        return;
    }
    NSDictionary *NV12Frame = change[NSKeyValueChangeNewKey];
    // 组建NV12 VideoFrame
    AliRtcVideoDataSample *dataSample = [[AliRtcVideoDataSample alloc] init];
    dataSample.dataPtr = (long)[NV12Frame[kPropData] bytes];
    dataSample.format = (AliRtcVideoFormat)[NV12Frame[kPropFormat] intValue];
    dataSample.width = [NV12Frame[kPropWidth] unsignedIntValue];
    dataSample.height = [NV12Frame[kPropHeight] unsignedIntValue];
    dataSample.strideY = [NV12Frame[kPropStrideY] unsignedIntValue];
    dataSample.strideU = [NV12Frame[kPropStrideU] unsignedIntValue];
    dataSample.strideV = [NV12Frame[kPropStrideV] unsignedIntValue];
    dataSample.dataLength = [NV12Frame[kPropDataLength] unsignedIntValue];
    dataSample.rotation = [NV12Frame[kPropRotation] unsignedIntValue];
    // 输入yuv frame
    //RTC:输入视频数据接口-----推送屏幕流到AliRtcVideosourceScreenShareType通道(如使用RTC1.19一下版本,请推送到AliRtcVideosourceCameraLargeType通道)
    [self.engine pushExternalVideoFrame:dataSample sourceType:AliRtcVideosourceScreenShareType];
}
@end

关于屏幕共享篇:

感谢各位客官的帮助与支持
更希望您能提出有意义的问题和建议、沟通会使我们共同成长~

你可能感兴趣的:(关于iOS12+屏幕共享-总结篇)