since: 2012/02/28
update: 2012/02/28
reference: 1. The iOS 5 Developer's Cookbook
2. I touchs: Filter4Cam 學習之 Getting Raw Video Data
A. 開啓 ViewController.h 檔案, 修改如下:
....
//@add
#import "Filter4CamHelper.h"
//@add:Cameras defined
enum {
kCameraNone = -1, // 無相機
kCameraFront, // 前置相機
kCameraBack, // 後置相機
} availableCameras;
//@interface ViewController : GLKViewController
//@update
@interface ViewController : GLKViewController <AVCaptureVideoDataOutputSampleBufferDelegate>
{
....
// AVCaptureSession: Used to coordinate the flow of data from
// AV input devices to outputs.
AVCaptureSession *session;
BOOL isUsingFrontCamera;
}
//@add
@property (strong, nonatomic) EAGLContext *glContext;
@property (strong, nonatomic) CIContext *coreImageContext;
@property (strong, nonatomic) GLKView *glView;
@property (strong) AVCaptureSession *session;
@property (assign) BOOL isUsingFrontCamera;
//@add for Render
- (void)establishRender; // establish Render
//@add for Camera
- (void)establishCamera:(uint)whichCamera; // 建立相機 Session
- (void)startRunningSession; // 啟動相機 Session
- (void)stopRunningSession; // 停止相機 Session
//@add for test
- (void)helperTest;
-----------------------------------------------------------------------------------
B. 開啓 ViewController.m 檔案, 修改如下:
@implementation ViewController
//@add
@synthesize glContext = _glContext;
@synthesize coreImageContext = _coreImageContext;
@synthesize glView = _glView;
@synthesize session = _session;
@synthesize isUsingFrontCamera = _isUsingFrontCamera;
....
#pragma mark Camera
//@add:啟動相機 Session
- (void)startRunningSession
{
if (self.session.running) return;
[self.session startRunning];
}
//@add:停止相機 Session
- (void)stopRunningSession
{
[self.session stopRunning];
}
//@add:建立相機 Session
- (void)establishCamera:(uint)whichCamera
{
NSError *error;
// Is a camera available
if (![Filter4CamHelper numberOfCameras]) return;
// 設置相機輸入: 建立 session 並設定. 在這邊設定 session 為 640 pixels 寬,
// 480 pixels 高. 還有其他的選項, 包括 720 pixels 跟 1080 pixels.
// Core Image 的解析度越高, 效能就越低. 一個單一簡單的濾鏡就能夠來
// 處理高解析度.
// Create a session
self.session = [[AVCaptureSession alloc] init];
// begin
[self.session beginConfiguration];
[self.session setSessionPreset:AVCaptureSessionPreset640x480];
// 輸入設備: 假如要指定前置或後置相機, 需要呼叫 devicesWithMediaType,
// 它會回傳一個設備的陣列. 若要取得前置相機, 可在
// AVCaptureDevicePosition 屬性中, 重複地在陣列中尋找
// AVCaptureDevicePositionFront.
//
// Choose camera
self.isUsingFrontCamera = NO;
if ((whichCamera == kCameraFront) && [Filter4CamHelper frontCameraAvailable])
{
self.isUsingFrontCamera = YES;
}
// 設定輸入設備
// Retrieve the selected camera
//
// 使用預設的設備:
// AVCaptureDevice *device = [AVCaptureDevice
// defaultDeviceWithMediaType:AVMediaTypeVideo];
//
AVCaptureDevice *device = self.isUsingFrontCamera ? [Filter4CamHelper frontCamera] : [Filter4CamHelper backCamera];
// Create the capture input
AVCaptureDeviceInput *captureInput = [AVCaptureDeviceInput deviceInputWithDevice:device error:&error];
if (!captureInput)
{
NSLog(@"Error establishing device input: %@", error);
return;
}
[self.session addInput:captureInput];
/*********************************************************************/
// 設定輸出: 並忽略較慢的影格(frames). 如果需要記錄改變的地方,
// 可以設定輸入資料的顏色格式.
//
// 設定輸出設備
AVCaptureVideoDataOutput *captureOutput = [[AVCaptureVideoDataOutput alloc] init];
[captureOutput setAlwaysDiscardsLateVideoFrames:YES];
// Establish settings
NSDictionary *settings = [NSDictionary dictionaryWithObject:[NSNumber numberWithUnsignedInt:kCVPixelFormatType_32BGRA] forKey:(NSString *)kCVPixelBufferPixelFormatTypeKey];
[captureOutput setVideoSettings:settings];
/*********************************************************************/
// 設定 delegate: 它將會收到回傳(callback)的每個 frame,
// 並且設為主要的駐列(queue).
//
// Create capture output
[captureOutput setSampleBufferDelegate:self queue:dispatch_get_main_queue()];
[self.session addOutput:captureOutput];
// 完成設定
//
// commit
[self.session commitConfiguration];
}
#pragma mark Implementation Delegate Method
//@add implementation method for
// <AVCaptureVideoDataOutputSampleBufferDelegate>
// 將接收到由傳送者(delivers)所回傳(callback) 的原始像素資料(raw pixel data).
-(void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection {
//@TODO
}
沒有留言:
張貼留言
注意:只有此網誌的成員可以留言。