在iOS上的我的opencv項目中遇到了其他問題。蘋果Mach-O鏈接器(Id)使用iOS的OpenCV框架的錯誤
所以我必須寫在Ojective下用2種方法用C++編寫的一個小項目:
我ViewController.hh文件:
#import <UIKit/UIKit.h>
#import <CoreData/CoreData.h>
@interface ViewController : UIViewController
static UIImage* MatToUIImage(const cv::Mat& m);
static void UIImageToMat(const UIImage* image, cv::Mat& m);
@end
和我ViewController.mm文件:
#import "ViewController.hh"
@interface ViewController()
@end
@implementation ViewController
static UIImage* MatToUIImage(const cv::Mat& m) {
CV_Assert(m.depth() == CV_8U);
NSData *data = [NSData dataWithBytes:m.data length:m.elemSize()*m.total()];
CGColorSpaceRef colorSpace = m.channels() == 1 ?
CGColorSpaceCreateDeviceGray() : CGColorSpaceCreateDeviceRGB();
CGDataProviderRef provider = CGDataProviderCreateWithCFData((__bridge CFDataRef)data);
// Creating CGImage from cv::Mat
CGImageRef imageRef = CGImageCreate(m.cols, m.cols, m.elemSize1()*8, m.elemSize()*8,
m.step[0], colorSpace, kCGImageAlphaNoneSkipLast|kCGBitmapByteOrderDefault,
provider, NULL, false, kCGRenderingIntentDefault);
UIImage *finalImage = [UIImage imageWithCGImage:imageRef];
CGImageRelease(imageRef); CGDataProviderRelease(provider);
CGColorSpaceRelease(colorSpace); return finalImage;
}
static void UIImageToMat(const UIImage* image, cv::Mat& m) {
CGColorSpaceRef colorSpace = CGImageGetColorSpace(image.CGImage);
CGFloat cols = image.size.width, rows = image.size.height;
m.create(rows, cols, CV_8UC4); // 8 bits per component, 4 channels
CGContextRef contextRef = CGBitmapContextCreate(m.data, m.cols, m.rows, 8, m.step[0], colorSpace, kCGImageAlphaNoneSkipLast |kCGBitmapByteOrderDefault);
CGContextDrawImage(contextRef, CGRectMake(0, 0, cols, rows), image.CGImage);
CGContextRelease(contextRef); CGColorSpaceRelease(colorSpace);
}
- (void)viewDidLoad
{
[super viewDidLoad];
// Do any additional setup after loading the view, typically from a nib.
NSString* filename = [[NSBundle mainBundle] pathForResource:@"cmc7" ofType:@"jpg"];
UIImage *image = [UIImage imageWithContentsOfFile:filename];
//cv::Mat inputMat = self.UIImageToMat(image);
cv::Mat mat;
UIImageToMat(image, mat);
UIImage *newImage = MatToUIImage(mat);
self.view.backgroundColor = [UIColor colorWithPatternImage:newImage];
}
我包括我的opencv框架這樣:
#import <Availability.h>
#ifndef __IPHONE_5_0
#warning "This project uses features only available in iOS SDK 5.0 and later."
#endif
#ifdef __cplusplus
#import <opencv2/opencv.hpp>
#endif
#ifdef __OBJC__
#import <UIKit/UIKit.h>
#import <Foundation/Foundation.h>
#endif
我確切的說,我有一個類似的項目與相同的包括,但用Objective C編寫的方法,而不是在C++中,它的工作原理。
所以這裏是我的日誌錯誤。
Undefined symbols for architecture i386:
"cv::Exception::Exception(int, std::string const&, std::string const&, std::string const&, int)", referenced from:
__ZL12MatToUIImageRKN2cv3MatE in ViewController.o
ld: symbol(s) not found for architecture i386
clang: error: linker command failed with exit code 1 (use -v to see invocation)
那麼,有什麼問題呢?我真的更喜歡用C++編寫方法,因爲使用C++語言代替Objective C使用openCV更爲常見。
非常感謝。
有你在「構建階段」添加你自己的框架 - >「鏈接二進制與圖書館」? – edzio27 2013-02-26 16:54:23
是的,我做到了,它出現在框架文件夾中。 – Tiffado 2013-02-27 09:00:37