如何在mac终端上编译目标C代码,涉及AVCapture

问题描述:

我是一个初学者,目标是C,需要在mac终端中编译一个程序,其中包含从两个网络摄像头捕获图像的AVCapture。该代码表示​​,它需要使用如何在mac终端上编译目标C代码,涉及AVCapture

#import <AVFoundation/AVFoundation.h> 
#import <AppKit/AppKit.h> 

@interface Capture : NSObject <AVCaptureVideoDataOutputSampleBufferDelegate> 
@property (weak) AVCaptureSession* session; 
- (void) captureOutput: (AVCaptureOutput*) output 
didOutputSampleBuffer: (CMSampleBufferRef) buffer 
     fromConnection: (AVCaptureConnection*) connection; 
//- (void) captureOutput: (AVCaptureOutput*) output 
// didDropSampleBuffer: (CMSampleBufferRef) buffer 
//  fromConnection: (AVCaptureConnection*) connection; 
@end 
@interface Capture() 
{ 
    CVImageBufferRef head; 
    CFRunLoopRef runLoop; 
    int count; 
    int secret; 
} 
- (void) save; 
@end 

@implementation Capture 
@synthesize session; 

- (id) initWithInteger: (int) s 
{ 
    self = [super init]; 
    runLoop = CFRunLoopGetCurrent(); 
    head = nil; 
    count = 0; 
    secret = s; 
    return self; 
} 

- (void) dealloc 
{ 
    @synchronized (self) { 
    CVBufferRelease(head); 
    } 
    NSLog(@"capture released"); 
} 

- (void) save 
{ 
    @synchronized (self) { 
    CIImage* ciImage = 
     [CIImage imageWithCVImageBuffer: head]; 
    NSBitmapImageRep* bitmapRep = 
     [[NSBitmapImageRep alloc] initWithCIImage: ciImage]; 

    NSData* jpgData = 
     [bitmapRep representationUsingType:NSJPEGFileType properties: nil]; 
    NSString* filename = [NSString stringWithFormat: @"result_%d.jpg", secret]; 
    [jpgData writeToFile: filename atomically: NO]; 
    //NSData* pngData = 
    // [bitmapRep representationUsingType:NSPNGFileType properties: nil]; 
    //[pngData writeToFile: @"result.png" atomically: NO]; 
    } 
    NSLog(@"Saved"); 
} 

- (void) captureOutput: (AVCaptureOutput*) output 
    didOutputSampleBuffer: (CMSampleBufferRef) buffer 
     fromConnection: (AVCaptureConnection*) connection 
{ 
#pragma unused (output) 
#pragma unused (connection) 
    CVImageBufferRef frame = CMSampleBufferGetImageBuffer(buffer); 
    CVImageBufferRef prev; 
    CVBufferRetain(frame); 
    @synchronized (self) { 
    prev = head; 
    head = frame; 
    count++; 
    NSLog(@"Captured"); 
    } 
    CVBufferRelease(prev); 
    if (count == 5) { 
    // after skipped 5 frames 
    [self save]; 
    [self.session stopRunning]; 
    CFRunLoopStop(runLoop); 
    } 
} 
//- (void) captureOutput: (AVCaptureOutput*) output 
// didDropSampleBuffer: (CMSampleBufferRef) buffer 
//  fromConnection: (AVCaptureConnection*) connection 
//{ 
//#pragma unused (output) 
//#pragma unused (buffer) 
//#pragma unused (connection) 
//} 
@end 


int quit(NSError * error) 
{ 
    NSLog(@"[error] %@", [error localizedDescription]); 
    return 1; 
} 

int main() 
{ 
    NSError* error = nil; 
    Capture* capture_1 = [[Capture alloc] initWithInteger: 1]; 
    Capture* capture_2 = [[Capture alloc] initWithInteger: 2]; 

    //NSArray* devices = 
    // [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo]; 
    //AVCaptureDevice* device = [devices objectAtIndex: 0]; 
    NSArray* devices = [AVCaptureDevice devicesWithMediaType: AVMediaTypeVideo]; 
    for(id obj in devices){ 
    NSLog(@"[XX device] %@", obj); 
    } 

    AVCaptureDevice* logitech_1 = [AVCaptureDevice deviceWithUniqueID:@"0x14344000046d081b"]; 
    AVCaptureDevice* logitech_2 = [AVCaptureDevice deviceWithUniqueID:@"0x14342000046d081b"]; 

    NSLog(@"[check device] %@", logitech_1); 
    NSLog(@"[check device] %@", logitech_2); 

    /*AVCaptureDevice* device = 
    [AVCaptureDevice defaultDeviceWithMediaType: AVMediaTypeVideo];*/ 
    AVCaptureDevice* device_1 = logitech_1; 
    AVCaptureDevice* device_2 = logitech_2; 

    //NSLog(@"[device] %@", device); 

    AVCaptureDeviceInput* input_1 = 
    [AVCaptureDeviceInput deviceInputWithDevice: logitech_1 error: &error]; 
    NSLog(@"[input] %@", input_1); 

    AVCaptureDeviceInput* input_2 = 
    [AVCaptureDeviceInput deviceInputWithDevice: logitech_2 error: &error]; 
    NSLog(@"[input] %@", input_2); 

    AVCaptureVideoDataOutput* output_1 = 
    [[AVCaptureVideoDataOutput alloc] init]; 
    [output_1 setSampleBufferDelegate: capture_1 queue: dispatch_get_main_queue()]; 
    NSLog(@"[output] %@", output_1); 
    AVCaptureVideoDataOutput* output_2 = 
    [[AVCaptureVideoDataOutput alloc] init]; 
    [output_2 setSampleBufferDelegate: capture_2 queue: dispatch_get_main_queue()]; 
    NSLog(@"[output] %@", output_2); 

    AVCaptureSession* session_1 = [[AVCaptureSession alloc] init]; 
    [session_1 addInput: input_1]; 
    [session_1 addOutput: output_1]; 

    AVCaptureSession* session_2 = [[AVCaptureSession alloc] init]; 
    [session_2 addInput: input_2]; 
    [session_2 addOutput: output_2]; 

    capture_1.session = session_1; 
    capture_2.session = session_2; 
    [session_2 startRunning]; 

    NSLog(@"Started"); 
    CFRunLoopRun(); 

    [session_1 startRunning]; 
    CFRunLoopRun(); 

    NSLog(@"Stopped"); 
    return 0; 
} 

命令编译编译:

clang -fobjc-arc -Wall -Wextra -pedantic avcapture.m 
    -framework Cocoa -framework AVFoundation -framework CoreMedia 
    -framework QuartzCore -o avcapture 

在运行的第一个命令,我得到警告和以下错误

clang: error: linker command failed with exit code 1 (use -v to see invocation) 

可能有人请帮助我吗?

使用命令clang -fobjc-arc -framework AVFoundation -framework CoreMedia -framework CoreImage -framework Cocoa -framework QuartzCore avcapture.m您的文件编译并生成a.out可执行文件。

+0

我尝试了命令,但我得到的消息 - LD:框架没有找到CoreImage 铛:错误:连接命令,退出代码1失败(使用-v看到调用) – Ron

+0

看起来CoreImage在MacOS的10.11首次推出。你使用的是最新版本的macOS吗? – faffaffaff

+0

感谢您的帮助,它一切运作良好。 – Ron