Skip to content

Commit

Permalink
Threading fix + new camera
Browse files Browse the repository at this point in the history
  • Loading branch information
rekrutik committed Sep 28, 2019
1 parent 2d5b16f commit 010de02
Show file tree
Hide file tree
Showing 3 changed files with 115 additions and 20 deletions.
12 changes: 6 additions & 6 deletions framework/Source/GPUImageStillCamera.m
Original file line number Diff line number Diff line change
Expand Up @@ -78,9 +78,9 @@ - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureD
[self.captureSession beginConfiguration];

photoOutput = [[AVCaptureStillImageOutput alloc] init];

// Having a still photo input set to BGRA and video to YUV doesn't work well, so since I don't have YUV resizing for iPhone 4 yet, kick back to BGRA for that device
// if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
// if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
if (captureAsYUV && [GPUImageContext deviceSupportsRedTextures])
{
BOOL supportsFullYUVRange = NO;
Expand Down Expand Up @@ -113,7 +113,7 @@ - (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureD

[self.captureSession commitConfiguration];

self.jpegCompressionQuality = 0.8;
self.jpegCompressionQuality = 0.92;

return self;
}
Expand Down Expand Up @@ -297,7 +297,7 @@ - (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)fi
dispatch_semaphore_signal(frameRenderingSemaphore);
}

block(dataForPNGFile, error);
block(dataForPNGFile, error);
}];

return;
Expand Down Expand Up @@ -337,11 +337,11 @@ - (void)capturePhotoProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFi

[photoOutput captureStillImageAsynchronouslyFromConnection:[[photoOutput connections] objectAtIndex:0] completionHandler:^(CMSampleBufferRef imageSampleBuffer, NSError *error) {
dispatch_semaphore_wait(frameRenderingSemaphore, DISPATCH_TIME_FOREVER);
if(imageSampleBuffer == NULL){
if(imageSampleBuffer == NULL){
block(error);
return;
}

// For now, resize photos to fix within the max texture size of the GPU
CVImageBufferRef cameraFrame = CMSampleBufferGetImageBuffer(imageSampleBuffer);

Expand Down
94 changes: 90 additions & 4 deletions framework/Source/GPUImageVideoCamera.m
Original file line number Diff line number Diff line change
Expand Up @@ -248,11 +248,11 @@ - (id)initWithFrameRate:(NSUInteger)rate desiredSize:(CGSize)size cameraPosition
frameRenderingSemaphore = dispatch_semaphore_create(1);

_frameRate = 0; // This will not set frame rate unless this value gets set to 1 or above
_runBenchmark = YES;
_runBenchmark = NO;
capturePaused = NO;
outputRotation = kGPUImageNoRotation;
internalRotation = kGPUImageNoRotation;
captureAsYUV = NO;
captureAsYUV = YES;
_preferredConversion = kColorConversion709;
_inputCamera = nil;

Expand Down Expand Up @@ -295,8 +295,92 @@ - (id)initWithFrameRate:(NSUInteger)rate desiredSize:(CGSize)size cameraPosition

// Add the video frame output
videoOutput = [[AVCaptureVideoDataOutput alloc] init];
[videoOutput setAlwaysDiscardsLateVideoFrames: NO];
videoOutput.videoSettings = @{(id)kCVPixelBufferPixelFormatTypeKey: @(kCVPixelFormatType_32BGRA)};
[videoOutput setAlwaysDiscardsLateVideoFrames: YES];

if (captureAsYUV && [GPUImageContext supportsFastTextureUpload])
{
BOOL supportsFullYUVRange = NO;
NSArray *supportedPixelFormats = videoOutput.availableVideoCVPixelFormatTypes;
for (NSNumber *currentPixelFormat in supportedPixelFormats)
{
if ([currentPixelFormat intValue] == kCVPixelFormatType_420YpCbCr8BiPlanarFullRange)
{
supportsFullYUVRange = YES;
}
}

if (supportsFullYUVRange)
{
[videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarFullRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
isFullYUVRange = YES;
}
else
{
[videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
isFullYUVRange = NO;
}
}
else
{
[videoOutput setVideoSettings:[NSDictionary dictionaryWithObject:[NSNumber numberWithInt:kCVPixelFormatType_32BGRA] forKey:(id)kCVPixelBufferPixelFormatTypeKey]];
}





runSynchronouslyOnVideoProcessingQueue(^{

if (captureAsYUV)
{
[GPUImageContext useImageProcessingContext];
// if ([GPUImageContext deviceSupportsRedTextures])
// {
// yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForRGFragmentShaderString];
// }
// else
// {
if (isFullYUVRange)
{
yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVFullRangeConversionForLAFragmentShaderString];
}
else
{
yuvConversionProgram = [[GPUImageContext sharedImageProcessingContext] programForVertexShaderString:kGPUImageVertexShaderString fragmentShaderString:kGPUImageYUVVideoRangeConversionForLAFragmentShaderString];
}

// }

if (!yuvConversionProgram.initialized)
{
[yuvConversionProgram addAttribute:@"position"];
[yuvConversionProgram addAttribute:@"inputTextureCoordinate"];

if (![yuvConversionProgram link])
{
NSString *progLog = [yuvConversionProgram programLog];
NSLog(@"Program link log: %@", progLog);
NSString *fragLog = [yuvConversionProgram fragmentShaderLog];
NSLog(@"Fragment shader compile log: %@", fragLog);
NSString *vertLog = [yuvConversionProgram vertexShaderLog];
NSLog(@"Vertex shader compile log: %@", vertLog);
yuvConversionProgram = nil;
NSAssert(NO, @"Filter shader link failed");
}
}

yuvConversionPositionAttribute = [yuvConversionProgram attributeIndex:@"position"];
yuvConversionTextureCoordinateAttribute = [yuvConversionProgram attributeIndex:@"inputTextureCoordinate"];
yuvConversionLuminanceTextureUniform = [yuvConversionProgram uniformIndex:@"luminanceTexture"];
yuvConversionChrominanceTextureUniform = [yuvConversionProgram uniformIndex:@"chrominanceTexture"];
yuvConversionMatrixUniform = [yuvConversionProgram uniformIndex:@"colorConversionMatrix"];

[GPUImageContext setActiveShaderProgram:yuvConversionProgram];

glEnableVertexAttribArray(yuvConversionPositionAttribute);
glEnableVertexAttribArray(yuvConversionTextureCoordinateAttribute);
}
});

if (!_inputCamera) {
return nil;
Expand Down Expand Up @@ -909,6 +993,8 @@ - (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
{
CFAbsoluteTime currentFrameTime = (CFAbsoluteTimeGetCurrent() - startTime);
totalFrameTimeDuringCapture += currentFrameTime;
NSLog(@"Average frame time : %f ms", [self averageFrameDurationDuringCapture]);
NSLog(@"Current frame time : %f ms", 1000.0 * currentFrameTime);
}
}
}
Expand Down
29 changes: 19 additions & 10 deletions framework/Source/iOS/GPUImageView.m
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ @interface GPUImageView ()
GLProgram *displayProgram;
GLint displayPositionAttribute, displayTextureCoordinateAttribute;
GLint displayInputTextureUniform;
CAEAGLLayer *_layer;

CGSize inputImageSize;
GLfloat imageVertices[8];
Expand All @@ -25,6 +26,7 @@ @interface GPUImageView ()
}

@property (assign, nonatomic) NSUInteger aspectRatio;
@property (assign, atomic) CGRect threadSafeBounds;

// Initialization and teardown
- (void)commonInit;
Expand Down Expand Up @@ -91,6 +93,7 @@ - (void)commonInit;
CAEAGLLayer *eaglLayer = (CAEAGLLayer *)self.layer;
eaglLayer.opaque = YES;
eaglLayer.drawableProperties = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:NO], kEAGLDrawablePropertyRetainedBacking, kEAGLColorFormatRGBA8, kEAGLDrawablePropertyColorFormat, nil];
_layer = eaglLayer;

self.enabled = YES;

Expand Down Expand Up @@ -134,13 +137,13 @@ - (void)layoutSubviews {
[super layoutSubviews];

// The frame buffer needs to be trashed and re-created when the view size changes.
if (!CGSizeEqualToSize(self.bounds.size, boundsSizeAtFrameBufferEpoch) &&
!CGSizeEqualToSize(self.bounds.size, CGSizeZero)) {
if (!CGSizeEqualToSize(self.threadSafeBounds.size, boundsSizeAtFrameBufferEpoch) &&
!CGSizeEqualToSize(self.threadSafeBounds.size, CGSizeZero)) {
runSynchronouslyOnVideoProcessingQueue(^{
[self destroyDisplayFramebuffer];
[self createDisplayFramebuffer];
});
} else if (!CGSizeEqualToSize(self.bounds.size, CGSizeZero)) {
} else if (!CGSizeEqualToSize(self.threadSafeBounds.size, CGSizeZero)) {
[self recalculateViewGeometry];
}
}
Expand All @@ -165,7 +168,7 @@ - (void)createDisplayFramebuffer;
glGenRenderbuffers(1, &displayRenderbuffer);
glBindRenderbuffer(GL_RENDERBUFFER, displayRenderbuffer);

[[[GPUImageContext sharedImageProcessingContext] context] renderbufferStorage:GL_RENDERBUFFER fromDrawable:(CAEAGLLayer*)self.layer];
[[[GPUImageContext sharedImageProcessingContext] context] renderbufferStorage:GL_RENDERBUFFER fromDrawable: _layer];

GLint backingWidth, backingHeight;

Expand All @@ -186,8 +189,8 @@ - (void)createDisplayFramebuffer;
glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, displayRenderbuffer);

__unused GLuint framebufferCreationStatus = glCheckFramebufferStatus(GL_FRAMEBUFFER);
NSAssert(framebufferCreationStatus == GL_FRAMEBUFFER_COMPLETE, @"Failure with display framebuffer generation for display of size: %f, %f", self.bounds.size.width, self.bounds.size.height);
boundsSizeAtFrameBufferEpoch = self.bounds.size;
NSAssert(framebufferCreationStatus == GL_FRAMEBUFFER_COMPLETE, @"Failure with display framebuffer generation for display of size: %f, %f", self.threadSafeBounds.size.width, self.threadSafeBounds.size.height);
boundsSizeAtFrameBufferEpoch = self.threadSafeBounds.size;

[self recalculateViewGeometry];
}
Expand Down Expand Up @@ -235,12 +238,12 @@ - (void)recalculateViewGeometry;
runSynchronouslyOnVideoProcessingQueue(^{
CGFloat heightScaling, widthScaling;

CGSize currentViewSize = self.bounds.size;
CGSize currentViewSize = self.threadSafeBounds.size;

// CGFloat imageAspectRatio = inputImageSize.width / inputImageSize.height;
// CGFloat viewAspectRatio = currentViewSize.width / currentViewSize.height;

CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(inputImageSize, self.bounds);
CGRect insetRect = AVMakeRectWithAspectRatioInsideRect(inputImageSize, self.threadSafeBounds);

switch(_fillMode)
{
Expand Down Expand Up @@ -432,15 +435,21 @@ - (CGSize)maximumOutputSize;
{
if ([self respondsToSelector:@selector(setContentScaleFactor:)])
{
CGSize pointSize = self.bounds.size;
CGSize pointSize = self.threadSafeBounds.size;
return CGSizeMake(self.contentScaleFactor * pointSize.width, self.contentScaleFactor * pointSize.height);
}
else
{
return self.bounds.size;
return self.threadSafeBounds.size;
}
}

- (void)setBounds:(CGRect)bounds;
{
[super setBounds:bounds];
self.threadSafeBounds = bounds;
}

- (void)endProcessing
{
}
Expand Down

0 comments on commit 010de02

Please sign in to comment.