Pergunta

i am trying to detect face in UIImageview and place image on mouth. i have tried this method, but i can't transform CoreImage Coordination system to UIkit coordination system. here is my code:

code updated but still not functioning, just rotating view

@interface ProcessImageViewController ()

@end

@implementation ProcessImageViewController

@synthesize receivedImageData;
@synthesize renderImageView;
@synthesize viewToRender;
@synthesize preview;
@synthesize pancontrol;
@synthesize pinchcontrol;
@synthesize rotatecontrol;

- (BOOL)prefersStatusBarHidden {
return YES;
}

- (void)viewDidLoad
{


[super viewDidLoad];



renderImageView.image = receivedImageData;
renderImageView.contentMode  = UIViewContentModeScaleToFill;
 }


-(void)tryAddCliparts
{
NSLog(@"button clicked");

[self performSelectorInBackground:@selector(markFaces:) withObject:renderImageView];

}


- (IBAction)handlePan:(UIPanGestureRecognizer *)recognizer {

CGFloat firstX = recognizer.view.center.x;
CGFloat firstY = recognizer.view.center.y;

CGPoint translationPoint = [recognizer translationInView:self.view];
CGPoint translatedPoint = CGPointMake(firstX + translationPoint.x, firstY+ translationPoint.y);
CGFloat viewW = renderImageView.frame.size.width;
CGFloat viewH = renderImageView.frame.size.height;

if (translatedPoint.x<0 || translatedPoint.x>viewW)
    translatedPoint.x = renderImageView.frame.origin.x;

if (translatedPoint.y<0|| translatedPoint.y>viewH)
    translatedPoint.y = renderImageView.frame.origin.y;

recognizer.view.center = CGPointMake(translatedPoint.x, translatedPoint.y);
[recognizer setTranslation:CGPointMake(0, 0) inView:self.view];


  }
- (IBAction)handlePinch:(UIPinchGestureRecognizer *)recognizer {
recognizer.view.transform = CGAffineTransformScale(recognizer.view.transform, recognizer.scale, recognizer.scale);
recognizer.scale = 1;
 }

- (IBAction)handleRotate:(UIRotationGestureRecognizer *)recognizer {
recognizer.view.transform = CGAffineTransformRotate(recognizer.view.transform, recognizer.rotation);
recognizer.rotation = 0;
 }

- (BOOL)gestureRecognizer:(UIGestureRecognizer *)gestureRecognizer shouldRecognizeSimultaneouslyWithGestureRecognizer:(UIGestureRecognizer *)otherGestureRecognizer {
 return YES;
}

 -(void)markFaces:(UIImageView *)facePicture
{
NSLog(@"face detection started");
// draw a ci image from view
CIImage *image = [CIImage imageWithCGImage:facePicture.image.CGImage];


// Create face detector with high accuracy
 CIDetector* detector = [CIDetector detectorOfType:CIDetectorTypeFace
                                          context:nil options:[NSDictionary   dictionaryWithObject:CIDetectorAccuracyHigh forKey:CIDetectorAccuracy]];


CGAffineTransform transform = CGAffineTransformMakeScale(1, -1);
transform = CGAffineTransformTranslate(transform,
                                       0,-facePicture.bounds.size.height);

  // Get features from the image
  NSArray* features = [detector featuresInImage:image];
  for(CIFaceFeature* faceFeature in features) {



    // Transform CoreImage coordinates to UIKit
    CGRect faceRect = CGRectApplyAffineTransform(faceFeature.bounds, transform);


                UIImage *mustache = [UIImage imageNamed:@"mustacheok.png"];

                UIImageView *mustacheview = [[UIImageView alloc] initWithImage:mustache];


                 mustacheview.contentMode = UIViewContentModeScaleAspectFill;
                [mustacheview.layer setBorderColor:[[UIColor whiteColor] CGColor]];
                [mustacheview.layer setBorderWidth:3];
                [mustacheview addGestureRecognizer:pancontrol];
                [mustacheview addGestureRecognizer:pinchcontrol];
                [mustacheview addGestureRecognizer:rotatecontrol];
                mustacheview.userInteractionEnabled=YES;

                    CGPoint mouthPos = CGPointApplyAffineTransform(faceFeature.mouthPosition, transform);


                [mustacheview setFrame:CGRectMake(mouthPos.x, mouthPos.y,     mustacheview.frame.size.width, mustacheview.frame.size.height)];

    [viewToRender addSubview:mustacheview];
    [viewToRender bringSubviewToFront:mustacheview];



}

}




@end
Foi útil?

Solução

CGAffineTransform transform = CGAffineTransformMakeScale(1, -1);
transform = CGAffineTransformTranslate(transform,
                                       0,-facePicture.bounds.size.height);
for (CIFaceFeature *faceFeature in features) {

    // Transform CoreImage coordinates to UIKit
    CGRect faceRect = CGRectApplyAffineTransform(faceFeature.bounds, transform);

    if (faceFeature.hasMouthPosition) {

        // Transform CoreImage coordinates to UIKit
        CGPoint mouthPos = CGPointApplyAffineTransform(faceFeature.mouthPosition, transform);

    }

}

the only thing I see wrong on your code is this:

[mustacheview setFrame:CGRectMake(mouthPos.x, mouthPos.y,     mustacheview.frame.size.width, mustacheview.frame.size.height)];

you should use:

[mustacheview setCenter:mouthPos];

because the detector returns the mouth center point.

Outras dicas

CoreImage uses the same coordinate system as CoreGraphics, a bottom left coordinate system, as opposed to the top left coordinate system of UIKit.

So you basically have to flip along the Y-axis (multiply with -1 and offset the height of the screen)

CGAffineTransformation flipVertical = 
       CGAffineTransformMake(1, 0, 0, -1, 0, self.bounds.size.height);
Licenciado em: CC-BY-SA com atribuição
Não afiliado a StackOverflow
scroll top