IMG_0636-thumbnail2

*別ブログに既出記事の再掲です。

iPhoneでのオプティカルフローの参考例がネット上で見当たらなかったので、がんばって挑戦してみました。cvCalcOpticalFlowFarnebackを使っています。
下記のページなどを参考にさせていただきました。AVCapture関連のところはほぼそのままです。

・OpenCVを利用したリアルタイムフィルタリングの基本

下記コードです。すべてViewControllerに入れています。

//
//  ViewController.h
//  optFlow
//

#import "UIKit/UIKit.h"
#import "Foundation/Foundation.h"
#import "opencv2/opencv.hpp"
#import "AVFoundation/AVFoundation.h"
#import "CoreVideo/CoreVideo.h"
@interface ViewController : UIViewController {
    
}
@end
************************************************
//
//  ViewController.m
//  optFlow
//
#ifdef __cplusplus
#import "opencv2/opencv.hpp"
#endif

#import "ViewController.h"

@interface ViewController () <AVCaptureVideoDataOutputSampleBufferDelegate>

@property (strong, nonatomic) AVCaptureSession *session;
@property (strong, nonatomic) AVCaptureDeviceInput *videoInput;
@property (strong, nonatomic) AVCaptureVideoDataOutput *videoDataOutput;
@property (nonatomic) dispatch_queue_t videoDataOutputQueue;
@property (strong, nonatomic) CALayer *previewLayer;
@end

UIImage *prevImage;   //一コマ前の画像

@implementation ViewController

- (void)viewDidLoad
{
    [super viewDidLoad];
	// Do any additional setup after loading the view, typically from a nib.
    self.previewLayer = [CALayer layer];
    self.previewLayer.frame = self.view.bounds;
    [self.view.layer addSublayer:self.previewLayer];
}

- (void)viewWillAppear:(BOOL)animated
{
    [super viewWillAppear:animated];
    
    [self setupAVCapture];
}

- (void)viewDidDisappear:(BOOL)animated
{
    [super viewDidDisappear:animated];
    
    [self teardownAVCapture];
}

- (void)didReceiveMemoryWarning
{
    [super didReceiveMemoryWarning];
    // Dispose of any resources that can be recreated.
}

- (void)setupAVCapture
{
    NSError *error = nil;
    // 入力と出力からキャプチャーセッションを作成
    self.session = [[AVCaptureSession alloc] init];
    self.session.sessionPreset = AVCaptureSessionPresetLow;
    // カメラからの入力を作成
    AVCaptureDevice *camera = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
    // キャプチャーセッションに追加
    self.videoInput = [AVCaptureDeviceInput deviceInputWithDevice:camera error:&error];
    [self.session addInput:self.videoInput];
    // 画像への出力を作成
    self.videoDataOutput = [[AVCaptureVideoDataOutput alloc] init];
    // キャプチャーセッションに追加
    [self.session addOutput:self.videoDataOutput];
    // ビデオ出力のキャプチャの画像情報のキューを設定
    self.videoDataOutputQueue = dispatch_queue_create("myQueue", NULL);
    [self.videoDataOutput setAlwaysDiscardsLateVideoFrames:TRUE];
    [self.videoDataOutput setSampleBufferDelegate:self queue:self.videoDataOutputQueue];
    // ビデオへの出力の画像は、BGRAで出力
    self.videoDataOutput.videoSettings = @{
                                           (id)kCVPixelBufferPixelFormatTypeKey : [NSNumber numberWithInt:kCVPixelFormatType_32BGRA]
                                           };
    // ビデオ入力のAVCaptureConnectionを取得
    AVCaptureConnection *videoConnection = [self.videoDataOutput connectionWithMediaType:AVMediaTypeVideo];
    if ([videoConnection isVideoOrientationSupported]) {
        AVCaptureVideoOrientation orientation = AVCaptureVideoOrientationPortrait;[videoConnection setVideoOrientation:orientation];
    }
    // 1秒あたり8回画像をキャプチャ
    if ( YES == [camera lockForConfiguration:NULL] )
    {
        [camera setActiveVideoMinFrameDuration:CMTimeMake(1,16)];
        [camera setActiveVideoMaxFrameDuration:CMTimeMake(1,8)];
    }
    // 開始
    [self.session startRunning];
}

- (void)teardownAVCapture
{
	self.videoDataOutput = nil;
	if (self.videoDataOutputQueue) {
#if __IPHONE_OS_VERSION_MIN_REQUIRED < 60000
		dispatch_release(self.videoDataOutputQueue);
#endif
    }
}

// AVCaptureVideoDataOutputSampleBufferDelegateプロトコルのメソッド。
// 新しいキャプチャの情報が追加されたときに呼び出される。
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
       fromConnection:(AVCaptureConnection *)connection
{
    // キャプチャしたフレームからCGImageを作成
    UIImage *image = [self imageFromSampleBuffer:sampleBuffer];
    if(prevImage==nil)prevImage = [[UIImage alloc] initWithCGImage:image.CGImage];
    // 画像を画面に表示
    dispatch_async(dispatch_get_main_queue(), ^{
        [self process:image];
    });
}

// 画像処理
- (void)process:(UIImage *)image
{
    // オプティカルフロー
    UIImage *processedImage = [self optFlow:image];
    // 加工した画像をプレビューレイヤーに追加
    self.previewLayer.contents = (__bridge id)(processedImage.CGImage);
    // imageをprevImageにセット
    prevImage = [[UIImage alloc] initWithCGImage:image.CGImage];
}

- (UIImage *)optFlow:(UIImage *)image
{
    // CGImageからIplImageを作成
    IplImage *srcImage       = [self IplImageFromUIImage:image];
    IplImage *graySrcImage = cvCreateImage(cvGetSize(srcImage), IPL_DEPTH_8U, 1);
    cvCvtColor(srcImage, graySrcImage, CV_BGR2GRAY);
    IplImage *prvImage       = [self IplImageFromUIImage:prevImage];
    IplImage *grayPrvImage = cvCreateImage(cvGetSize(prvImage), IPL_DEPTH_8U, 1);
    cvCvtColor(prvImage, grayPrvImage, CV_BGR2GRAY);
    CvMat *flow = cvCreateMat(cvGetSize(srcImage).height, cvGetSize(srcImage).width, CV_32FC2);
    IplImage *grayScaleImage = cvCreateImage(cvGetSize(srcImage), IPL_DEPTH_8U, 1);
    
    cvCalcOpticalFlowFarneback(grayPrvImage, graySrcImage, flow, 0.5, 3, 15, 3, 5, 1.1, 0);
    
    for ( int i=0;i < cvGetSize(srcImage).height;i+=10 ){
        for ( int j=0;j < cvGetSize(srcImage).width;j+=10 ){
            cv::Point pt1(j, i);
            int jj = flow->data.fl[(i*cvGetSize(srcImage).width+j)*2];
            int ii = flow->data.fl[(i*cvGetSize(srcImage).width+j)*2]+1;
            cv::Point pt2(j + jj*2, i + ii*2);
            cv::Mat matImage(srcImage);
            cv::line(matImage,
                     pt1,
                     pt2,
                     cv::Scalar(255,255,0,255), 2, 8, 0);
        }
    }
    
    UIImage *effectedImage = [self UIImageFromIplImage:srcImage];
    
    cvReleaseImage(&srcImage);
    cvReleaseImage(&graySrcImage);
    cvReleaseImage(&prvImage);
    cvReleaseImage(&grayPrvImage);
    cvReleaseImage(&grayScaleImage);
    cvReleaseMat(&flow);
    
    return effectedImage;
}

- (IplImage *)IplImageFromUIImage:(UIImage *)image
{
    CGImageRef imageRef = image.CGImage;
    
    // RGB色空間を作成
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    
    // 一時的なIplImageを作成
    IplImage *iplimage = cvCreateImage(cvSize(image.size.width,image.size.height), IPL_DEPTH_8U, 4);
    
    // CGBitmapContextをIplImageのビットマップデータのポインタから作成
    CGContextRef contextRef = CGBitmapContextCreate(
                                                    iplimage->imageData,
                                                    iplimage->width,
                                                    iplimage->height,
                                                    iplimage->depth,
                                                    iplimage->widthStep,
                                                    colorSpace,
                                                    kCGImageAlphaPremultipliedLast|kCGBitmapByteOrderDefault);
    
    // CGImageをCGBitmapContextに描画
    CGContextDrawImage(contextRef,
                       CGRectMake(0, 0, image.size.width, image.size.height),
                       imageRef);
    
    // ビットマップコンテキストと色空間を解放
    CGContextRelease(contextRef);
    CGColorSpaceRelease(colorSpace);
    
    // 最終的なIplImageを作成
    IplImage *ret = cvCreateImage(cvGetSize(iplimage), IPL_DEPTH_8U, 3);
    
    // 一時的なIplImageを解放
    cvCvtColor(iplimage, ret, CV_RGBA2BGR);
    cvReleaseImage(&iplimage);
    
    return ret;
}

- (UIImage *)UIImageFromIplImage:(IplImage*)image
{
    CGColorSpaceRef colorSpace;
    if (image->nChannels == 1) {
        colorSpace = CGColorSpaceCreateDeviceGray();
    } else {
        colorSpace = CGColorSpaceCreateDeviceRGB();
        //BGRになっているのでRGBに変換
        cvCvtColor(image, image, CV_BGR2RGB);
    }
    
    // IplImageのビットマップデータのポインタアドレスからNSDataを作成
    NSData *data = [NSData dataWithBytes:image->imageData length:image->imageSize];
    
    CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
    
    // CGImageを作成
    CGImageRef imageRef = CGImageCreate(image->width,
                                        image->height,
                                        image->depth,
                                        image->depth * image->nChannels,
                                        image->widthStep,
                                        colorSpace,
                                        kCGImageAlphaNone|kCGBitmapByteOrderDefault,
                                        provider,
                                        NULL,
                                        false,
                                        kCGRenderingIntentDefault
                                        );
    
    // UIImageを生成
    UIImage *ret = [UIImage imageWithCGImage:imageRef];
    
    CGImageRelease(imageRef);
    CGDataProviderRelease(provider);
    CGColorSpaceRelease(colorSpace);
    
    return ret;
}

- (UIImage *)imageFromSampleBuffer:(CMSampleBufferRef)sampleBuffer
{
    CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
    
    // ピクセルバッファのベースアドレスをロックする
    CVPixelBufferLockBaseAddress(imageBuffer, 0);
    
    // Get information of the image
    uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddressOfPlane(imageBuffer, 0);
    
    size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
    size_t width = CVPixelBufferGetWidth(imageBuffer);
    size_t height = CVPixelBufferGetHeight(imageBuffer);
    
    // RGBの色空間
    CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
    
    CGContextRef newContext = CGBitmapContextCreate(baseAddress,
                                                    width,
                                                    height,
                                                    8,
                                                    bytesPerRow,
                                                    colorSpace,
                                                    kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
    
    CGImageRef imageRef = CGBitmapContextCreateImage(newContext);
    UIImage *ret = [UIImage imageWithCGImage:imageRef];
    
    CGImageRelease(imageRef);
    CGContextRelease(newContext);
    CGColorSpaceRelease(colorSpace);
    CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
    
    return ret;
}

- (AVCaptureVideoOrientation)videoOrientationFromDeviceOrientation:(UIDeviceOrientation)deviceOrientation
{
    AVCaptureVideoOrientation orientation;
    switch (deviceOrientation) {
        case UIDeviceOrientationUnknown:
            orientation = AVCaptureVideoOrientationPortrait;
            break;
        case UIDeviceOrientationPortrait:
            orientation = AVCaptureVideoOrientationPortrait;
            break;
        case UIDeviceOrientationPortraitUpsideDown:
            orientation = AVCaptureVideoOrientationPortraitUpsideDown;
            break;
        case UIDeviceOrientationLandscapeLeft:
            orientation = AVCaptureVideoOrientationLandscapeRight;
            break;
        case UIDeviceOrientationLandscapeRight:
            orientation = AVCaptureVideoOrientationLandscapeLeft;
            break;
        case UIDeviceOrientationFaceUp:
            orientation = AVCaptureVideoOrientationPortrait;
            break;
        case UIDeviceOrientationFaceDown:
            orientation = AVCaptureVideoOrientationPortrait;
            break;
    }
    return orientation;
}
@end