Here we need two framework CoreImage & QuartzCore.
Let Start.
.h
#import <UIKit/UIKit.h>
@interface ViewController : UIViewController
@property (retain, nonatomic) IBOutlet UIButton *btnScanFace;
- (IBAction)btnScanFace:(id)sender;
@property (retain, nonatomic) IBOutlet UIImageView *imgView;
@end
.m
#import "ViewController.h"
#import <CoreImage/CoreImage.h>
#import <QuartzCore/QuartzCore.h>
- (void)viewDidLoad
{
[super viewDidLoad];
self.imgView.image=[UIImage imageNamed:@"family.png"];
}
- (IBAction)btnScanFace:(id)sender {
// draw a CI image with the previously loaded picture
CIImage* image1=[CIImage imageWithCGImage:self.imgView.image.CGImage];
// create a face detector - since speed is not an issue we'll use a high accuracy
// detector
CIDetector* detector1=[CIDetector detectorOfType:CIDetectorTypeFace context:nil options:[NSDictionary dictionaryWithObject:CIDetectorAccuracyHigh forKey:CIDetectorAccuracy]];
// create an array containing all the detected faces from the detector
NSArray *feature1=[detector1 featuresInImage:image1];
NSLog(@"%d",[feature1 count]);
UIView *frameview=[[UIView alloc] initWithFrame:CGRectMake(self.imgView.bounds.origin.x, self.imgView.bounds.origin.y, self.imgView.image.size.width, self.imgView.image.size.height)];
// we'll iterate through every detected face. CIFaceFeature provides us
// with the width for the entire face, and the coordinates of each eye
// and the mouth if detected. Also provided are BOOL's for the eye's and
// mouth so we can check if they already exist.
for (CIFaceFeature * faceFeature1 in feature1) {
// create a UIView using the bounds of the face
//UIView* faceView1 = [[UIView alloc] initWithFrame:faceFeature.bounds];
// Border is set
faceView1.layer.borderWidth=1;
[frameview addSubview:faceView1];
faceView1.layer.borderColor=[[UIColor blackColor] CGColor];
}
}
[frameview setTransform:CGAffineTransformMakeScale(1, -1)];
[self.view addSubview:frameview];
No comments:
Post a Comment