2
A
回答
4
您可以通過使用AVFoundation
- (void)initCapture {
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput
deviceInputWithDevice:[AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo]
error:nil];
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
captureOutput.alwaysDiscardsLateVideoFrames = YES;
dispatch_queue_t queue;
queue = dispatch_queue_create("cameraQueue", NULL);
[captureOutput setSampleBufferDelegate:self queue:queue];
dispatch_release(queue);
NSString* key = (NSString*)kCVPixelBufferPixelFormatTypeKey;
NSNumber* value = [NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA];
NSDictionary* videoSettings = [NSDictionary dictionaryWithObject:value forKey:key];
[captureOutput setVideoSettings:videoSettings];
self.captureSession = [[AVCaptureSession alloc] init];
[self.captureSession setSessionPreset:AVCaptureSessionPresetLow];
[self.captureSession addInput:captureInput];
[self.captureSession addOutput:captureOutput];
[self.captureSession startRunning];
self.customLayer = [CALayer layer];
self.customLayer.frame =CGRectMake(5-25,25, 200,150);
self.customLayer.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
//self.customLayer.transform =CATransform3DMakeRotation(M_PI/2.0f, 0, 0, 1);
//[self.view.layer addSublayer:imageView.layer];
//self.customLayer.frame =CGRectMake(0, 0, 200,150);
//self.customLayer.contentsGravity = kCAGravityResizeAspectFill;
[self.view.layer insertSublayer:self.customLayer atIndex:4];
//[self.view.layer addSublayer:self.customLayer];
self.customLayer1 = [CALayer layer];
//self.customLayer.frame = self.view.bounds;
self.customLayer1.frame =CGRectMake(165-25, 25, 200, 150);
self.customLayer1.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
//self.customLayer1.contentsGravity = kCAGravityResizeAspectFill;
[self.view.layer addSublayer:self.customLayer1];
self.customLayer2 = [CALayer layer];
//self.customLayer.frame = self.view.bounds;
self.customLayer2.frame =CGRectMake(5-25, 210 +25, 200, 150);
self.customLayer2.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
//self.customLayer1.contentsGravity = kCAGravityResizeAspectFill;
[self.view.layer addSublayer:self.customLayer2];
self.customLayer3 = [CALayer layer];
//self.customLayer.frame = self.view.bounds;
self.customLayer3.frame =CGRectMake(165-25, 210 +25, 200, 150);
self.customLayer3.transform = CATransform3DRotate(CATransform3DIdentity, M_PI/2.0f, 0, 0, 1);
//self.customLayer1.contentsGravity = kCAGravityResizeAspectFill;
[self.view.layer addSublayer:self.customLayer3];
}
#pragma mark -
#pragma mark AVCaptureSession delegate
- (void)captureOutput:(AVCaptureOutput *)captureOutput
didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer
fromConnection:(AVCaptureConnection *)connection
{
NSAutoreleasePool * pool = [[NSAutoreleasePool alloc] init];
CVImageBufferRef imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
/*Lock the image buffer*/
CVPixelBufferLockBaseAddress(imageBuffer,0);
/*Get information about the image*/
uint8_t *baseAddress = (uint8_t *)CVPixelBufferGetBaseAddress(imageBuffer);
size_t bytesPerRow = CVPixelBufferGetBytesPerRow(imageBuffer);
size_t width = CVPixelBufferGetWidth(imageBuffer);
size_t height = CVPixelBufferGetHeight(imageBuffer);
/*Create a CGImageRef from the CVImageBufferRef*/
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGContextRef newContext = CGBitmapContextCreate(baseAddress, width, height, 8, bytesPerRow, colorSpace, kCGBitmapByteOrder32Little | kCGImageAlphaPremultipliedFirst);
CGImageRef newImage2 = CGBitmapContextCreateImage(newContext);
/*We release some components*/
CGContextRelease(newContext);
CGColorSpaceRelease(colorSpace);
[self.customLayer performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES];
[self.customLayer1 performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES];
[self.customLayer2 performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES];
[self.customLayer3 performSelectorOnMainThread:@selector(setContents:) withObject: (id) newImage2 waitUntilDone:YES];
// UIImage *image= [UIImage imageWithCGImage:newImage scale:1.0 orientation:UIImageOrientationRight];
/*We relase the CGImageRef*/
CGImageRelease(newImage2);
// [self.imageView performSelectorOnMainThread:@selector(setImage:) withObject:image waitUntilDone:YES];
/*We unlock the image buffer*/
CVPixelBufferUnlockBaseAddress(imageBuffer,0);
[pool drain];
}
它工作得很好做到這一點..
http://crayoncoding.blogspot.com/2011/04/iphone-4-camera-views-at-once.html
看到上面的鏈接查看詳細代碼
0
你可以嘗試擁有4個UIImagePickerControllers。不知道它是否會奏效,但值得一試。
相關問題
- 1. 用iPhone SDK訪問攝像頭
- 2. 訪問攝像頭
- 3. iPhone SDK 3.0攝像機訪問
- 4. 通過iPhone Web App訪問攝像頭
- 5. iphone攝像頭訪問移動網頁
- 6. 從uiwebview訪問攝像頭?
- 7. HTML5 - 訪問攝像頭
- 8. 訪問攝像頭失敗
- 9. HTML5:攝像頭訪問
- 10. 從JavaFx訪問攝像頭
- 11. iPhone攝像頭流
- 12. 管理攝像頭 - 無法訪問攝像頭
- 13. 作爲網絡攝像頭訪問手機攝像頭C++
- 14. 實時訪問iPhone的攝像頭圖像
- 15. iPhone:從Web應用程序訪問攝像頭和相冊
- 16. 只能通過API訪問iphone攝像頭的數據
- 17. 如何從服務器端網頁訪問iphone攝像頭?
- 18. 如何使用AS3訪問iPhone攝像頭
- 19. 您可以從Mobile Safari訪問iPhone攝像頭嗎?
- 20. 用於IOS Iphone 3G的JavaScript API用於攝像頭訪問
- 21. 在iphone上的遠程位置訪問攝像頭
- 22. 訪問iPhone的攝像頭,在一個應用程序
- 23. 使用Xcode的訪問後置攝像頭,iPhone的iOS5
- 24. 如何訪問MacBook上的攝像頭?
- 25. 訪問設備攝像頭與getUserMedia
- 26. 如何從Webview中訪問攝像頭?
- 27. 如何使用JavaScript訪問攝像頭
- 28. 在瀏覽器中訪問攝像頭?
- 29. 無閃存訪問攝像頭
- 30. 訪問網絡攝像頭使用vb.net
我假設你想對每一個應用效果,比如o在Mac上。由於需要使用GPU來提供此功能,因此不確定這對iPhone 4是否還能起作用。 – 2011-03-03 07:44:38