Commit ab1f4412 authored by Arnaud Blanchard's avatar Arnaud Blanchard
Browse files

Initial files for camera acquisition on Mac OSX

parents
/*
* blQTKit.h
* blQTKitlib
*
* Created by Arnaud Blanchard on 31/01/15.
* Copyright 2015 ETIS. All rights reserved.
*
*/
#ifndef BLQTKIT_H
#define BLQTKIT_H
#include "blc_core.h"
#include <QuartzCore/QuartzCore.h>
START_EXTERN_C
void init_capture(int (*callback)(blc_array *image, void*), void *user_data);
END_EXTERN_C
#endif
\ No newline at end of file
//
// Created by Arnaud Blanchard on 06/11/14.
// Copyright ETIS 2014. All rights reserved.
//
#include "blQTKit.h"
#include "blc_image.h"
#import <QTKit/QTKit.h>
#import <CoreVideo/CVPixelBuffer.h>
@interface capture:NSObject
{
@public
QTCaptureSession *capture_session;
blc_array image;
int initialized;
void *user_data;
int (*callback)(blc_array *image, void *user_data);
}
@end
@implementation capture
- (id)init
{
initialized=0;
return self;
}
-(void)captureOutput:(QTCaptureOutput *)captureOutput didDropVideoFrameWithSampleBuffer:(QTSampleBuffer *)sampleBuffer fromConnection:(QTCaptureConnection *)connection
{
(void)captureOutput;
(void)sampleBuffer;
(void)connection;
}
-(void)captureOutput:(QTCaptureOutput *)captureOutput didOutputVideoFrame:(CVImageBufferRef)videoFrame withSampleBuffer:(QTSampleBuffer *)sampleBuffer fromConnection:(QTCaptureConnection *)connection
{
int bytes_per_pixel;
int continue_run=1;
(void)captureOutput;
(void)sampleBuffer;
(void)connection;
if (!initialized)
{
image.type='UIN8';
image.format = ntohl(CVPixelBufferGetPixelFormatType(videoFrame));
bytes_per_pixel=blc_image_get_bytes_per_pixel(&image);
switch (bytes_per_pixel){
case -1: EXIT_ON_ARRAY_ERROR(&image, "Variable pixel size (i.e. compression) not yet managed."); //Variable pixel size i.e:JPEG
break;
case 1:break;
default: image.add_dim(bytes_per_pixel);
}
image.add_dim( CVPixelBufferGetWidth(videoFrame));
image.add_dim( CVPixelBufferGetHeight(videoFrame));
initialized = 1;
}
CVPixelBufferLockBaseAddress(videoFrame, 0);
image.data=CVPixelBufferGetBaseAddress(videoFrame);
continue_run=callback(&image, user_data);
CVPixelBufferUnlockBaseAddress(videoFrame, 0);
if (!continue_run) [NSApp terminate:NULL];
}
@end
START_EXTERN_C
void init_capture(int (*callback)(blc_array *image, void*), void*user_data)
{
NSError *error;
QTCaptureSession *capture_session;
QTCaptureDevice *device;
QTCaptureDeviceInput *device_input;
QTCaptureDecompressedVideoOutput *decompressed_video_output;
capture *capture_instance = [[capture alloc] init];
capture_instance->callback = callback;
capture_instance->user_data = user_data;
capture_session = [QTCaptureSession new];
device = [QTCaptureDevice defaultInputDeviceWithMediaType:QTMediaTypeVideo];
if (![device open:&error]) EXIT_ON_ERROR("Opening device", [error localizedDescription]);
device_input = [[QTCaptureDeviceInput alloc] initWithDevice:device];
if (![capture_session addInput:device_input error:&error]) EXIT_ON_ERROR("Setting Decompressed output.\n\t", [error localizedDescription]);
decompressed_video_output = [QTCaptureDecompressedVideoOutput new];
if(![capture_session addOutput:decompressed_video_output error:&error]) EXIT_ON_ERROR("Setting Decompressed output.\n\t", [error localizedDescription]);
[decompressed_video_output setAutomaticallyDropsLateVideoFrames:YES];
[decompressed_video_output setDelegate:capture_instance];
[NSApplication sharedApplication];
[NSApp setActivationPolicy:NSApplicationActivationPolicyRegular]; //doe not work on SDK 10.5
[NSApp activateIgnoringOtherApps:YES];
capture_instance->capture_session=capture_session;
// printf("%s\n", [[decompressed_video_output pixelBufferAttributes] valueForKey:(id)kCVPixelBufferPixelFormatTypeKey]); try to print format
[capture_session startRunning];
[NSApp run];
}
END_EXTERN_C
\ No newline at end of file
# Set the minimum version of cmake required to build this project
cmake_minimum_required(VERSION 2.6)
project(i_AV_camera)
find_package(blc_channel)
find_package(blc_image)
find_package(blc_program)
find_library(AVFOUNDATION_LIBRARY AVFoundation)
find_library(APPKIT_LIBRARY AppKit)
find_library(COREMEDIA_LIBRRY CoreMedia)
find_library(COREVIDEO_LIBRRY CoreVideo)
add_definitions(${BL_DEFINITIONS} -Wall -Wextra)
include_directories(${BL_INCLUDE_DIRS})
add_executable(i_AV_camera i_AV_camera.mm)
target_link_libraries(i_AV_camera ${APPKIT_LIBRARY} ${COREVIDEO_LIBRRY} ${COREMEDIA_LIBRRY} ${AVFOUNDATION_LIBRARY} ${BL_LIBRARIES})
//
// Created by Arnaud Blanchard on 06/11/14.
// Copyright ETIS 2014. All rights reserved.
//
#include <AppKit/AppKit.h>
#include <AVFoundation/AVFoundation.h>
#import <CoreVideo/CVPixelBuffer.h>
#include <sys/signal.h>
#include <stdlib.h>
#include <unistd.h>
#include "blc_core.h"
#include "blc_channel.h"
#include "blc_program.h"
#include "blc_image.h"
#include <pthread.h>
/**
@main
Exemple of using the console to display the camera
*/
static blc_channel image, channel;
static char const *output_name;
static char const *output_format_option, *output_type_option;
static int initialized=0;
static uint32_t type, format;
static pthread_mutex_t mutex_busy, mutex_new_image;
static int drop_images_nb=0;
static int dropping=0;
void* treat_image(void *){
/* if (sem_trywait(channel.sem_new_data)==0){//Nobody listen
sem_post(channel.sem_ack_data);
}*/
while (blc_command_loop_start()){
BLC_PTHREAD_CHECK(pthread_mutex_lock(&mutex_new_image), NULL);
BLC_PTHREAD_CHECK(pthread_mutex_lock(&mutex_busy), NULL);
blc_image_convert(&channel, &image);
BLC_PTHREAD_CHECK(pthread_mutex_unlock(&mutex_busy), NULL);
blc_command_loop_end();
}
[NSApp terminate:NULL];
return NULL;
}
@interface capture:NSObject <AVCaptureVideoDataOutputSampleBufferDelegate>
{
@public
}
@end
@implementation capture
- (id)init
{
return self;
}
- (void)captureOutput:(AVCaptureOutput *)captureOutput didOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer fromConnection:(AVCaptureConnection *)connection
{
CVImageBufferRef imageBuffer;
int bytes_per_pixel;
int width, height;
int ret, acquisition;
(void) captureOutput;
(void)connection;
imageBuffer = CMSampleBufferGetImageBuffer(sampleBuffer);
if (!initialized)
{
image.type='UIN8';
image.format = ntohl(CVPixelBufferGetPixelFormatType(imageBuffer));
bytes_per_pixel=blc_image_get_bytes_per_pixel(&image);
switch (bytes_per_pixel){
case -1: EXIT_ON_ARRAY_ERROR(&image, "Variable pixel size (i.e. compression) not yet managed."); //Variable pixel size i.e:JPEG
break;
case 1:break;
default: image.add_dim(bytes_per_pixel);
}
width= CVPixelBufferGetWidth(imageBuffer);
height= CVPixelBufferGetHeight(imageBuffer);
image.add_dim(width);
image.add_dim(height);
image.allocate();
initialized = 1;
if (strcmp(output_type_option, "NDEF")==0) type=image.type;
else type=STRING_TO_UINT32(output_type_option);
if (strcmp(output_format_option, "NDEF")==0) format=image.format;
else format=STRING_TO_UINT32(output_format_option);
blc_image_def(&channel, type, format, width, height); //A simplifier
channel.create_or_open(output_name, BLC_CHANNEL_WRITE);
channel.publish();
blc_loop_try_add_waiting_semaphore(channel.sem_ack_data);
blc_loop_try_add_posting_semaphore(channel.sem_new_data);/*
sem_post(channel.sem_new_data);
*/
blc_command_loop_init(0); //As fast as possible
// BLC_PTHREAD_CHECK(pthread_mutex_lock(&mutex_new_image), NULL);
// BLC_PTHREAD_CHECK(pthread_create(&thread, NULL, treat_image, NULL), NULL);
}
ret=1;
if (dropping){
if (channel.sem_ack_data) {
ret=sem_trywait(channel.sem_ack_data);
if ((ret==-1) && (errno==EAGAIN)) { //It is not ready to receive data
drop_images_nb++;
return;
} else if (ret==-1) EXIT_ON_SYSTEM_ERROR(NULL);
}
if (channel.sem_new_data) {
ret=sem_trywait(channel.sem_new_data);
if (ret==0) {
sem_post(channel.sem_new_data);
if (channel.sem_ack_data) sem_post(channel.sem_ack_data); //We took it just before
drop_images_nb++;
return;
} else if (errno!=EAGAIN) EXIT_ON_SYSTEM_ERROR(NULL);
}
}
if (blc_command_loop_start()){
if (drop_images_nb) {
fprintf(stderr, "drop %d images\n", drop_images_nb);
drop_images_nb=0;
}
CVPixelBufferLockBaseAddress(imageBuffer,0);
image.data=CVPixelBufferGetBaseAddress(imageBuffer);
blc_image_convert(&channel, &image);
CVPixelBufferUnlockBaseAddress(imageBuffer, 0);
// if (channel.sem_new_data) sem_post(channel.sem_new_data);
blc_command_loop_end();
}else {
image.data=NULL; //avoid "pointer being freed was not allocated"
[NSApp terminate:NULL];
}
}
@end
void display_infos(){
NSArray *devices = [AVCaptureDevice devices];
uint32_t format_uint32;
AVCaptureVideoDataOutput *videoDataOutput = [AVCaptureVideoDataOutput new];
for (AVCaptureDevice *device in devices) {
if ([device hasMediaType:AVMediaTypeVideo]) {
fprintf(stderr,"device %s" , [[device localizedName] UTF8String]);
fprintf(stderr, "\npixels format: ");
for (NSNumber *format in [videoDataOutput availableVideoCVPixelFormatTypes]){
format_uint32=[format unsignedIntegerValue];
fprintf(stderr, "'%.4s' ", (char*)&format_uint32);
}
fprintf(stderr, "\n");
}
}
}
void start_acquisition()
{
NSError *error;
AVCaptureDevice *selected_device;
capture *capture_instance;
dispatch_queue_t videoDataOutputQueue;
AVCaptureVideoDataOutput *videoDataOutput = [AVCaptureVideoDataOutput new];
AVCaptureSession *session = [AVCaptureSession new];
AVCaptureDeviceInput *input;
selected_device = [AVCaptureDevice defaultDeviceWithMediaType:AVMediaTypeVideo];
fprintf(stderr, "Device: %s\n", [[selected_device localizedName] UTF8String]);
input = [AVCaptureDeviceInput deviceInputWithDevice:selected_device error:&error];
if (!input) EXIT_ON_ERROR("Acquiring input device");
[session addInput:input];
fprintf(stderr, "Presset %s\n", [[session sessionPreset] UTF8String]);
videoDataOutputQueue = dispatch_queue_create("VideoDataOutputQueue", DISPATCH_QUEUE_SERIAL);
capture_instance = [capture new];
[videoDataOutput setSampleBufferDelegate:capture_instance queue:videoDataOutputQueue];
[videoDataOutput setAlwaysDiscardsLateVideoFrames:NO];
[session addOutput:videoDataOutput];
[session startRunning];
[NSApplication sharedApplication];
pthread_mutex_init(&mutex_busy, NULL);
pthread_mutex_init(&mutex_new_image, NULL);
// [NSApp setActivationPolicy:NSApplicationActivationPolicyRegular]; //doe not work on SDK 10.5
// [NSApp activateIgnoringOtherApps:YES];
[NSApp run];
}
int main(int argc, char **argv){
char const *infos;
blc_program_add_option(&output_format_option, 'f', "format", "Y800", "set ouput video format", "NDEF");
blc_program_add_option(&output_name, 'o', "output", "blc_channel", "output channel", NULL);
blc_program_add_option(&output_type_option, 't', "type", "UIN8|FL32", "set ouput data type", "NDEF");
blc_program_add_option(&infos, 'i', "infos", NULL, "list infos: pixel formats of the device", NULL);
blc_program_init(&argc, &argv, blc_quit);
if (infos) {
display_infos();
exit(EXIT_SUCCESS);
}
if (output_name==NULL) asprintf((char**)&output_name, ":%s", blc_program_name);
start_acquisition();
return EXIT_SUCCESS;
}
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment