Created
April 18, 2013 21:37
-
-
Save oliland/5416438 to your computer and use it in GitHub Desktop.
An example of using CIFilters to mess with UIViews on iOS.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
- (void)viewDidLoad | |
{ | |
[super viewDidLoad]; | |
// Great tutorial: http://www.raywenderlich.com/22167/beginning-core-image-in-ios-6 | |
// Official docs: https://developer.apple.com/library/ios/#documentation/GraphicsImaging/Conceptual/CoreImaging/ci_intro/ci_intro.html#//apple_ref/doc/uid/TP30001185-CH1-TPXREF101 | |
// Alt-click on function names for more! | |
// Make any old label, with our frame set to the view so we know it's there. | |
UILabel *label = [[UILabel alloc] initWithFrame:self.view.frame]; | |
label.text = @"HELLO"; | |
label.textAlignment = NSTextAlignmentCenter; | |
// Uncomment to render. | |
// [self.view addSubview:label]; | |
// To apply a filter to any UIView, we first need to get a UIImage of it. | |
// To do this, we create an offscreen "canvas" to draw our label into. | |
// Our "Canvas" in this case is called a UIGraphicsImageContext. | |
// Let's create a canvas, and make it the size of our label. | |
UIGraphicsBeginImageContext(label.bounds.size); | |
// We then render the label's CALayer into the canvas. | |
// To access label.layer, we need to import QuartzCore, at the top. | |
// This is because QuartzCore is responsible for the Core Animation classes. | |
// Why can't we just get an image from the layer? Because it seems one of iOS's design | |
// principles is that "everything is animatable" - every UIView is built on a CALayer. | |
// Alt-click "renderInContext" for more. | |
[label.layer renderInContext:UIGraphicsGetCurrentContext()]; | |
// Having drawn the label on the canvas, we get a UIImage "screenshot" of our canvas. | |
UIImage *image = UIGraphicsGetImageFromCurrentImageContext(); | |
// Now we're finished with our canvas, let's free up some memory. | |
// As we have a strong reference to our screenshot, that should be safe. | |
UIGraphicsEndImageContext(); | |
// It's good to make sure we have an image: | |
assert(image); | |
// Let's log the list of "blur" filters that are available on the device. | |
// From this we see that "CIGaussianBlur" is available. | |
// It's best to play with this in GDB. Try: | |
// p [CIFilter filterNamesInCategory:kCICategoryBlur] | |
// in the debugger. | |
NSLog(@"%@", [CIFilter filterNamesInCategory:kCICategoryBlur]); | |
// What other categories can you find? | |
// Now let's create the filter. | |
CIFilter *blurFilter = [CIFilter filterWithName:@"CIGaussianBlur"]; | |
// Logging a CIFilter's attributes gives all of the information on the filter | |
// and what values it accepts. | |
NSLog(@"%@", [blurFilter attributes]); | |
[blurFilter setDefaults]; | |
// This part is weird. Our UIImage does have a CIImage property, but there's something | |
// strange about it. Option-click on "CIImage" here: | |
assert(image.CIImage == 0); | |
// So in some cases we don't get a CIImage from a UIImage. | |
// That's ok - we can make a CIImage from the UIImage's CGImage. Hooray! | |
CIImage *imageToBlur = [CIImage imageWithCGImage:image.CGImage]; | |
assert(imageToBlur); | |
// iOS provides a nice constant for the input image key | |
[blurFilter setValue:imageToBlur forKey:kCIInputImageKey]; | |
// ... but not for anything else | |
[blurFilter setValue:@3.0 forKey:@"inputRadius"]; | |
// Now let's get the output image. Hope it works! | |
CIImage *outputImage = blurFilter.outputImage; | |
assert(outputImage); | |
UIImageView *blurredImageView = [[UIImageView alloc] initWithFrame:self.view.frame]; | |
// UIImage imageWithCIImage was added recently. Convenient! | |
blurredImageView.image = [UIImage imageWithCIImage:outputImage]; | |
[self.view addSubview:blurredImageView]; | |
// Apple's documentation give a pretty good explanation as to why this is so complicated. | |
// Basically, on iOS, image filters use the GPU, and can be fucked with in real time. | |
// Some food for thought: Can you create a slider to dynamically change the blur on the UILabel? | |
} |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment