iOS 基于实时音视频 SDK 实现屏幕共享功能——4
1.7 工具类
// // RongRTCBufferUtil.m // SealRTC // // Created by Sun on 2020/5/8. // Copyright © 2020 RongCloud. All rights reserved. // #import "RongRTCBufferUtil.h" // 下面的这些方法,一定要记得release,有的没有在方法里面release,但是在外面release了,要不然会内存泄漏 @implementation RongRTCBufferUtil + (UIImage *)imageFromBuffer:(CMSampleBufferRef)buffer { CVPixelBufferRef pixelBuffer = (CVPixelBufferRef)CMSampleBufferGetImageBuffer(buffer); CIImage *ciImage = [CIImage imageWithCVPixelBuffer:pixelBuffer]; CIContext *temporaryContext = [CIContext contextWithOptions:nil]; CGImageRef videoImage = [temporaryContext createCGImage:ciImage fromRect:CGRectMake(0, 0, CVPixelBufferGetWidth(pixelBuffer), CVPixelBufferGetHeight(pixelBuffer))]; UIImage *image = [UIImage imageWithCGImage:videoImage]; CGImageRelease(videoImage); return image; } + (UIImage *)compressImage:(UIImage *)image newWidth:(CGFloat)newImageWidth { if (!image) return nil; float imageWidth = image.size.width; float imageHeight = image.size.height; float width = newImageWidth; float height = image.size.height/(image.size.width/width); float widthScale = imageWidth /width; float heightScale = imageHeight /height; UIGraphicsBeginImageContext(CGSizeMake(width, height)); if (widthScale > heightScale) { [image drawInRect:CGRectMake(0, 0, imageWidth /heightScale , height)]; } else { [image drawInRect:CGRectMake(0, 0, width , imageHeight /widthScale)]; } UIImage *newImage = UIGraphicsGetImageFromCurrentImageContext(); UIGraphicsEndImageContext(); return newImage; } + (CVPixelBufferRef)CVPixelBufferRefFromUiImage:(UIImage *)img { CGSize size = img.size; CGImageRef image = [img CGImage]; NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys: [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil]; CVPixelBufferRef pxbuffer = NULL; CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options, &pxbuffer); NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL); CVPixelBufferLockBaseAddress(pxbuffer, 0); void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer); NSParameterAssert(pxdata != NULL); CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB(); CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst); NSParameterAssert(context); CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image); CGColorSpaceRelease(rgbColorSpace); CGContextRelease(context); CVPixelBufferUnlockBaseAddress(pxbuffer, 0); return pxbuffer; } + (CMSampleBufferRef)sampleBufferFromPixbuffer:(CVPixelBufferRef)pixbuffer time:(CMTime)time { CMSampleBufferRef sampleBuffer = NULL; //获取视频信息 CMVideoFormatDescriptionRef videoInfo = NULL; OSStatus result = CMVideoFormatDescriptionCreateForImageBuffer(NULL, pixbuffer, &videoInfo); CMTime currentTime = time; // CMSampleTimingInfo timing = {currentTime, currentTime, kCMTimeInvalid}; CMSampleTimingInfo timing = {currentTime, currentTime, kCMTimeInvalid}; result = CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault,pixbuffer, true, NULL, NULL, videoInfo, &timing, &sampleBuffer); CFRelease(videoInfo); return sampleBuffer; } + (size_t)getCMTimeSize { size_t size = sizeof(CMTime); return size; } @end
此工具类中实现是由 CPU 处理,当进行 CMSampleBufferRef 转 UIImage、UIImage 转 CVPixelBufferRef、 CVPixelBufferRef 转 CMSampleBufferRef 以及裁剪图片时,这里需要注意将使用后的对象及时释放,否则会出现内存大量泄漏。
2. 视频发送
2.1 准备阶段
使用融云的 RongRTCLib 的前提需要一个 AppKey,请在官网(https://www.rongcloud.cn/)获取,通过 AppKey 取得 token 之后进行 IM 连接,在连接成功后加入 RTC 房间,这是屏幕共享发送的准备阶段。
- (void)broadcastStartedWithSetupInf(NSDictionary<NSString *,NSObject *> *)setupInfo { // User has requested to start the broadcast. Setup info from the UI extension can be supplied but optional. // 请填写您的 AppKey self.appKey = @""; // 请填写用户的 Token self.token = @""; // 请指定房间号 self.roomId = @"123456"; [[RCIMClient sharedRCIMClient] initWithAppKey:self.appKey]; [[RCIMClient sharedRCIMClient] setLogLevel:RC_Log_Level_Verbose]; // 连接 IM [[RCIMClient sharedRCIMClient] connectWithToken:self.token dbOpened:^(RCDBErrorCode code) { NSLog(@"dbOpened: %zd", code); } success:^(NSString *userId) { NSLog(@"connectWithToken success userId: %@", userId); // 加入房间 [[RCRTCEngine sharedInstance] joinRoom:self.roomId completion:^(RCRTCRoom * _Nullable room, RCRTCCode code) { self.room = room; self.room.delegate = self; [self publishScreenStream]; }]; } error:^(RCConnectErrorCode errorCode) { NSLog(@"ERROR status: %zd", errorCode); }]; }
如上是连接 IM 和加入 RTC 房间的全过程,其中还包含调用发布自定义视频 [self publishScreenStream]; 此方法在加入房间成功后才可以进行。
- (void)publishScreenStream { RongRTCStreamParams *param = [[RongRTCStreamParams alloc] init]; param.videoSizePreset = RongRTCVideoSizePreset1280x720; self.videoOutputStream = [[RongRTCAVOutputStream alloc] initWithParameters:param tag:@"RongRTCScreenVideo"]; [self.room publishAVStream:self.videoOutputStream extra:@"" completion:^(BOOL isSuccess, RongRTCCode desc) { if (isSuccess) { NSLog(@"发布自定义流成功"); } }]; }
自定义一个 RongRTCAVOutputStream 流即可,使用此流发送屏幕共享数据。
2.2 开始发送屏幕共享数据
上面我们已经连接了融云的 IM 和加入了 RTC 房间,并且自定义了一个发送屏幕共享的自定义流,接下来,如何将此流发布出去呢?
- (void)processSampleBuffer:(CMSampleBufferRef)sampleBuffer withType:(RPSampleBufferType)sampleBufferType { switch (sampleBufferType) { case RPSampleBufferTypeVide // Handle video sample buffer [self.videoOutputStream write:sampleBuffer error:nil]; break; case RPSampleBufferTypeAudioApp: // Handle audio sample buffer for app audio break; case RPSampleBufferTypeAudioMic: // Handle audio sample buffer for mic audio break; default: break; } }
但我们接收到了苹果上报的数据之后,调用 RongRTCAVOutputStream 中的 write 方法,将 sampleBuffer 发送给远端,至此,屏幕共享数据就发送出去啦。
[self.videoOutputStream write:sampleBuffer error:nil];
融云的核心代码就是通过上面的连接 IM,加入房间,发布自定义流,然后通过自定义流的 write 方法将 sampleBuffer 发送出去。
不管是通过 ReplayKit 取得屏幕视频,还是使用 Socket 在进程间传输,都是为最终的 write 服务。
总结
Extension 内存是有限制的,最大 50M,所以在 Extension 里面处理数据需要格外注意内存释放;
如果 VideotoolBox 在后台解码一直失败,只需把 VideotoolBox 重启一下即可,此步骤在上面的代码中有体现;
如果不需要将 Extension 的数据传到主 App,只需在 Extension 里直接将流通过 RongRTCLib 发布出去即可,缺点是 Extension 中发布自定义流的用户与主 App 中的用户不是同一个,这也是上面通过 Socket 将数据传递给主 App 要解决的问题;
如果主 App 需要拿到屏幕共享的数据处理,使用 Socket 将流先发给主 App,然后在主 App 里面通过 RongRTCLib 将流发出去。
最后附上 Demo