#ios #video-streaming #avfoundation #gpuimage #red5pro
#iOS #потоковое видео #avfoundation #графическое изображение #red5pro
Вопрос:
Привет, я поворачиваю и применяю фильтры изображений с помощью GPUImage в прямом эфире, задача отнимает больше времени, чем ожидалось, что приводит к перегреву iPhone. Может кто-нибудь помочь мне в оптимизации моего кода Ниже приведен мой используемый код:
- (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer{
//return if invalid sample buffer
if (!CMSampleBufferIsValid(sampleBuffer)) {
return;
}
//Get CG Image from sample buffer
CGImageRef cgImageFromBuffer = [self cgImageFromSampleBuffer:sampleBuffer];
if(!cgImageFromBuffer || (cgImageFromBuffer == NULL)){
return;
}
//We need rotation to perform
UIImage *rotatedPlainImage = [UIUtils rotateImage:[UIImage imageWithCGImage:cgImageFromBuffer] byDegree:90];
if (rotatedPlainImage == nil) {
CFRelease(cgImageFromBuffer);
return;
}
//Apply image filter using GPU Image on CGImage
CGImageRef filteredCGImage = [self.selectedPublishFilter newCGImageByFilteringCGImage:rotatedPlainImage.CGImage];
//Convert back in CMSamplbuffer
CMSampleBufferRef outputBufffer = [self getSampleBufferUsingCIByCGInput:filteredCGImage andProvidedSampleBuffer:sampleBuffer];
//Pass to custom encode of Red5Pro to server for live stream
[self.encoder encodeFrame:outputBufffer ofType:r5_media_type_video_custom];
//Release data if needed
CFRelease(outputBufffer);
CFRelease(filteredCGImage);
CFRelease(cgImageFromBuffer);
}
- (CGImageRef)cgImageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer // Create a CGImageRef from sample buffer data
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer,0); // Lock the image buffer
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); // Get information of the image
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
/* CVBufferRelease(imageBuffer); */ // do not call this!
return newImage;
}
- (CMSampleBufferRef)getSampleBufferUsingCIByCGInput:(CGImageRef)imageRef andProvidedSampleBuffer:(CMSampleBufferRef)sampleBuffer{
CIImage *nm = [CIImage imageWithCGImage:imageRef];
CVPixelBufferRef pixelBuffer;
CVPixelBufferCreate(kCFAllocatorSystemDefault, (size_t)nm.extent.size.width, (size_t)nm.extent.size.height, kCVPixelFormatType_32BGRA, NULL, amp;pixelBuffer);
CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
CIContext *ciContext = [CIContext contextWithOptions: nil];
[ciContext render:nm toCVPixelBuffer:pixelBuffer];
CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
CMSampleTimingInfo sampleTime = {
.duration = CMSampleBufferGetDuration(sampleBuffer),
.presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
.decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
};
CMVideoFormatDescriptionRef videoInfo = NULL;
CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, amp;videoInfo);
CMSampleBufferRef oBuf;
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo, amp;sampleTime, amp;oBuf);
CVPixelBufferRelease(pixelBuffer);
CFRelease(videoInfo);
return oBuf;
}
Ответ №1:
Я использовал OpenGL 2.0 и ускорял фреймворк
Ускорение фреймворка для поворота CMSampleBuffer
Теперь без фильтра время составляет 3-8 миллисекунд
С фильтрами его 7-21 миллисекунд
OpenGL для быстрого рендеринга изображения CI в CVPixelBuffer
@implementation ColorsVideoSource{
CIContext *coreImageContext;
}
- (instancetype)init{
if((self = [super init]) != nil){
EAGLContext *glContext = [[EAGLContext alloc] initWithAPI:kEAGLRenderingAPIOpenGLES2];
GLKView *glView = [[GLKView alloc] initWithFrame:CGRectMake(0.0, 0.0, 360.0, 480.0) context:glContext];
coreImageContext = [CIContext contextWithEAGLContext:glView.context];
}
return self;
}
- (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer{
if (!CMSampleBufferIsValid(sampleBuffer)) {
return;
}
CVPixelBufferRef rotateBuffer = [self correctBufferOrientation:sampleBuffer];
CGImageRef cgImageFromBuffer = [self cgImageFromImageBuffer:rotateBuffer];
if(!cgImageFromBuffer || (cgImageFromBuffer == NULL)){
return;
}
UIImage *rotatedPlainImage = [UIImage imageWithCGImage:cgImageFromBuffer];
if (rotatedPlainImage == nil) {
CFRelease(rotateBuffer);
CFRelease(cgImageFromBuffer);
return;
}
if (_currentFilterType == SWPublisherFilterNone) {
if (_needPreviewImage) {
_previewImage = rotatedPlainImage;
}
CMSampleTimingInfo sampleTime = {
.duration = CMSampleBufferGetDuration(sampleBuffer),
.presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
.decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
};
CMVideoFormatDescriptionRef videoInfo = NULL;
CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, rotateBuffer, amp;videoInfo);
CMSampleBufferRef oBuf;
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, rotateBuffer, true, NULL, NULL, videoInfo, amp;sampleTime, amp;oBuf);
CFRelease(videoInfo);
if(!self.pauseEncoding){
@try {
[self.encoder encodeFrame:oBuf ofType:r5_media_type_video_custom];
} @catch (NSException *exception) {
NSLog(@"Encoder error: %@", exception);
}
}
CFRelease(oBuf);
}
else {
CGImageRef filteredCGImage = [self.selectedPublishFilter newCGImageByFilteringCGImage:rotatedPlainImage.CGImage];
if (_needPreviewImage) {
_previewImage = [UIImage imageWithCGImage:filteredCGImage];
}
CMSampleBufferRef outputBuffer = [self getSampleBufferUsingCIByCGInput:filteredCGImage andProvidedSampleBuffer:sampleBuffer];
if(!self.pauseEncoding){
@try {
[self.encoder encodeFrame:outputBuffer ofType:r5_media_type_video_custom];
} @catch (NSException *exception) {
NSLog(@"Encoder error: %@", exception);
}
}
CFRelease(outputBuffer);
CFRelease(filteredCGImage);
}
CFRelease(rotateBuffer);
CFRelease(cgImageFromBuffer);
}
#pragma mark - Methods Refactored GPUImage - Devanshu
- (CVPixelBufferRef)correctBufferOrientation:(CMSampleBufferRef)sampleBuffer
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CVPixelBufferLockBaseAddress(imageBuffer, 0);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
size_t currSize = bytesPerRow * height * sizeof(unsigned char);
size_t bytesPerRowOut = 4 * height * sizeof(unsigned char);
void *srcBuff = CVPixelBufferGetBaseAddress(imageBuffer);
/* rotationConstant:
* 0 -- rotate 0 degrees (simply copy the data from src to dest)
* 1 -- rotate 90 degrees counterclockwise
* 2 -- rotate 180 degress
* 3 -- rotate 270 degrees counterclockwise
*/
uint8_t rotationConstant = 3;
unsigned char *dstBuff = (unsigned char *)malloc(currSize);
vImage_Buffer inbuff = {srcBuff, height, width, bytesPerRow};
vImage_Buffer outbuff = {dstBuff, width, height, bytesPerRowOut};
uint8_t bgColor[4] = {0, 0, 0, 0};
vImage_Error err = vImageRotate90_ARGB8888(amp;inbuff, amp;outbuff, rotationConstant, bgColor, 0);
if (err != kvImageNoError) NSLog(@"%ld", err);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
CVPixelBufferRef rotatedBuffer = NULL;
CVPixelBufferCreateWithBytes(NULL,
height,
width,
kCVPixelFormatType_32BGRA,
outbuff.data,
bytesPerRowOut,
freePixelBufferDataAfterRelease,
NULL,
NULL,
amp;rotatedBuffer);
return rotatedBuffer;
}
void freePixelBufferDataAfterRelease(void *releaseRefCon, const void *baseAddress)
{
// Free the memory we malloced for the vImage rotation
free((void *)baseAddress);
}
- (CGImageRef)cgImageFromSampleBuffer:(CMSampleBufferRef) sampleBuffer // Create a CGImageRef from sample buffer data
{
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
return [self cgImageFromImageBuffer:imageBuffer];
}
- (CGImageRef)cgImageFromImageBuffer:(CVImageBufferRef) imageBuffer // Create a CGImageRef from sample buffer data
{
CVPixelBufferLockBaseAddress(imageBuffer,0); // Lock the image buffer
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0); // Get information of the image
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage = CGBitmapContextCreateImage(newContext);
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
return newImage;
}
- (CMSampleBufferRef)getSampleBufferUsingCIByCGInput:(CGImageRef)imageRef andProvidedSampleBuffer:(CMSampleBufferRef)sampleBuffer{
CIImage *theCoreImage = [CIImage imageWithCGImage:imageRef];
CVPixelBufferRef pixelBuffer;
CVPixelBufferCreate(kCFAllocatorSystemDefault, (size_t)theCoreImage.extent.size.width, (size_t)theCoreImage.extent.size.height, kCVPixelFormatType_32BGRA, NULL, amp;pixelBuffer);
CVPixelBufferLockBaseAddress( pixelBuffer, 0 );
[coreImageContext render:theCoreImage toCVPixelBuffer:pixelBuffer];
CVPixelBufferUnlockBaseAddress( pixelBuffer, 0 );
CMSampleTimingInfo sampleTime = {
.duration = CMSampleBufferGetDuration(sampleBuffer),
.presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
.decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
};
CMVideoFormatDescriptionRef videoInfo = NULL;
CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, amp;videoInfo);
CMSampleBufferRef oBuf;
CMSampleBufferCreateForImageBuffer(kCFAllocatorDefault, pixelBuffer, true, NULL, NULL, videoInfo, amp;sampleTime, amp;oBuf);
CVPixelBufferRelease(pixelBuffer);
CFRelease(videoInfo);
return oBuf;
}
Комментарии:
1. Не создает ли это проблемы с памятью, когда вы используете это во время записи видео?
2. Нет, ничего не сделал, вы можете попробовать просто освободить любой объект, который не нужен дорогой.
Ответ №2:
NSLog(@"start rotate");
CFAbsoluteTime t0 = CFAbsoluteTimeGetCurrent();
CVPixelBufferRef pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
CIImage *ciimage = [CIImage imageWithCVPixelBuffer:pixelBuffer];
CFAbsoluteTime t1 = CFAbsoluteTimeGetCurrent();
NSLog(@"dur to ciimage: %@", @(t1-t0));
CVPixelBufferLockBaseAddress(pixelBuffer, 0);
CIImage *newImage = [ciimage imageByApplyingCGOrientation:kCGImagePropertyOrientationRight];
CFAbsoluteTime t2 = CFAbsoluteTimeGetCurrent();
NSLog(@"dur rotate ciimage: %@", @(t2-t1));
CVPixelBufferRef newPixcelBuffer = nil;
size_t width = CVPixelBufferGetWidth(pixelBuffer);
size_t height = CVPixelBufferGetHeight(pixelBuffer);
CVPixelBufferCreate(kCFAllocatorDefault, height, width, kCVPixelFormatType_32BGRA, nil, amp;newPixcelBuffer);
CFAbsoluteTime t3 = CFAbsoluteTimeGetCurrent();
NSLog(@"dur alloc pixel: %@", @(t3-t2));
[_ciContext render:newImage toCVPixelBuffer:newPixcelBuffer];
CVPixelBufferUnlockBaseAddress(pixelBuffer, 0);
CFAbsoluteTime t4 = CFAbsoluteTimeGetCurrent();
NSLog(@"dur render pixel: %@", @(t4-t3));
//
CMSampleTimingInfo sampleTimingInfo = {
.duration = CMSampleBufferGetDuration(sampleBuffer),
.presentationTimeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer),
.decodeTimeStamp = CMSampleBufferGetDecodeTimeStamp(sampleBuffer)
};
//
CMVideoFormatDescriptionRef videoInfo = nil;
CMVideoFormatDescriptionCreateForImageBuffer(kCFAllocatorDefault, newPixcelBuffer, amp;videoInfo);
CMSampleBufferRef newSampleBuffer = nil;
CMSampleBufferCreateForImageBuffer(kCFAllocatorMalloc, newPixcelBuffer, true, nil, nil, videoInfo, amp;sampleTimingInfo, amp;newSampleBuffer);
CFAbsoluteTime t5 = CFAbsoluteTimeGetCurrent();
NSLog(@"dur create CMSample: %@", @(t5-t4));
// release
CVPixelBufferRelease(newPixcelBuffer);
CFAbsoluteTime t6 = CFAbsoluteTimeGetCurrent();
NSLog(@"dur end rotate: %@", @(t6-t0));
return newSampleBuffer;
Комментарии:
1. Пожалуйста, объясните свои строки кода, чтобы другие пользователи могли понять его функциональность. Спасибо!