iSightを使ってBooklog,MediaMarkerインポート用CSVファイルを生成するアプリ
Rev. | deec2c46d1e4b89d37ea473242737695cab87712 |
---|---|
크기 | 16,402 bytes |
Time | 2012-03-25 16:55:28 |
Author | masakih |
Log Message | [Mod] PPCサポートをやめた
|
/* SampleCIView.m - simple OpenGL based CoreImage view */
#import "SampleCIView.h"
#import <OpenGL/OpenGL.h>
#import <OpenGL/gl.h>
static CGRect centerSizeWithinRect(CGSize size, CGRect rect);
@interface SampleCIView (Private)
- (CIImage *)lineImageWithColor:(NSColor *)aColor;
@end
@implementation SampleCIView
- (id)initWithFrame:(NSRect)frameRect {
self = [super initWithFrame:frameRect];
if (self != nil) {
//lock = [[NSRecursiveLock alloc] init];
}
return self;
}
- (void)dealloc {
[defaultColor release];
[mirroredFilter release];
[redLinesFilter release];
[greenLinesFilter release];
//[lock release];
[theImage release];
[context release];
[super dealloc];
}
+ (NSOpenGLPixelFormat *)defaultPixelFormat
{
static NSOpenGLPixelFormat *pf;
if (pf == nil)
{
/* Making sure the context's pixel format doesn't have a recovery
* renderer is important - otherwise CoreImage may not be able to
* create deeper context's that share textures with this one. */
static const NSOpenGLPixelFormatAttribute attr[] = {
NSOpenGLPFAAccelerated,
NSOpenGLPFANoRecovery,
NSOpenGLPFAColorSize, 32,
0
};
pf = [[NSOpenGLPixelFormat alloc] initWithAttributes:(void *)&attr];
}
return pf;
}
- (void)setGoodScan:(BOOL)aBool {
goodScan = aBool;
}
- (void)setMirrored:(BOOL)aBool {
mirrored = aBool;
if (mirrored && mirroredFilter == nil) {
mirroredFilter = [[CIFilter filterWithName:@"CIAffineTransform"] retain];
NSAffineTransform *rotateTransform = [NSAffineTransform transform];
[rotateTransform rotateByDegrees:180.0];
[rotateTransform scaleXBy:1.0 yBy:-1.0];
[rotateTransform translateXBy:-captureSize.width yBy:0 ];
[mirroredFilter setValue:rotateTransform forKey:@"inputTransform"];
}
//NSLog(@"Mirrored: %d, filter: %@", mirrored, mirroredFilter);
}
/*
- (void)setDefaultColor:(NSColor *)aColor {
[defaultColor release];
defaultColor = [aColor retain];
}
*/
//If there is any kind of crop I am using the new zbarScanner, but it might need to be a display preference the type of bars or box shown.
- (void)setCropRect:(CGRect)aCropRect {
if (aCropRect.size.width) {
cropImage = YES;
cropRect = aCropRect;
//defaultColor = [NSColor greenColor];
}
else {
//if (defaultColor == nil)
//defaultColor = [NSColor redColor];
cropImage = NO;
}
CIImage *redLines = [self lineImageWithColor:[NSColor redColor]];
CIImage *greenLines = [self lineImageWithColor:[NSColor greenColor]];
redLinesFilter = [[CIFilter filterWithName:@"CISourceOverCompositing"] retain];
[redLinesFilter setValue:redLines forKey:@"inputImage"];
greenLinesFilter = [[CIFilter filterWithName:@"CISourceOverCompositing"] retain];
[greenLinesFilter setValue:greenLines forKey:@"inputImage"];
//Add Lines
}
/*
- (void)setHighResiSight:(BOOL)aBool {
cropImage = aBool;
// The value of FIRST_LINE_SCAN_HIGH in MybarcodeScanner has to change as well if the value bellow changes
// cropRect = CGRectMake(0.0, 0.0, 640.0, 480.0); //Capture bottom left portion
// cropRect = CGRectMake(320.0, 240.0, 640.0, 4800.0); //To capture middle
//cropRect = CGRectMake(0.0, 544.0, 640.0, 480.0); //Capture top left portion
cropRect = CGRectMake(320.0, 544.0, 640.0, 480.0); //Capture top middle
CIImage *redLines = [self lineImageWithColor:[NSColor redColor]];
CIImage *greenLines = [self lineImageWithColor:[NSColor greenColor]];
redLinesFilter = [[CIFilter filterWithName:@"CISourceOverCompositing"] retain];
[redLinesFilter setValue:redLines forKey:@"inputImage"];
greenLinesFilter = [[CIFilter filterWithName:@"CISourceOverCompositing"] retain];
[greenLinesFilter setValue:greenLines forKey:@"inputImage"];
//NSLog(@"HighRes: %d, size: %f %f %f %f", cropImage, cropRect.origin.x, cropRect.origin.y, cropRect.size.width, cropRect.size.height);
}
*/
#define NUMBER_OF_LINES_SCANNED 14
#define SPACING_BETWEEN_SCAN_LINE 8
#define LINE_WIDTH 250.0
//#define FIRST_LINE_SCAN
//#define FIRST_LINE_SCAN (752 - (NUMBER_OF_LINES_SCANNED /2 * SPACING_BETWEEN_SCAN_LINE))
- (CIImage *)lineImageWithColor:(NSColor *)aColor {
NSImage *linesImage;
NSBezierPath *linesDetails = [NSBezierPath bezierPath];
//NSBezierPath *barsDetails= [NSBezierPath bezierPath];
[linesDetails setLineJoinStyle: NSRoundLineJoinStyle];
NSRect aRect;
if (cropImage) {
//linesImage = [[NSImage alloc] initWithSize:NSMakeSize(770.0, 1024.0)];
//middleLine.y = (784 - (NUMBER_OF_LINES_SCANNED /2 * SPACING_BETWEEN_SCAN_LINE));
//middleLine.x = 520.0;
//middleLine.y = (240 - (NUMBER_OF_LINES_SCANNED /2 * SPACING_BETWEEN_SCAN_LINE));
linesImage = [[NSImage alloc] initWithSize:NSMakeSize(captureSize.width, captureSize.height)];
//middleLine.y = (cropRect.origin.y + (cropRect.size.height /2) - (NUMBER_OF_LINES_SCANNED /2 * SPACING_BETWEEN_SCAN_LINE));
//middleLine.x = cropRect.origin.x + (cropRect.size.width * .25);
aRect.origin.x = cropRect.origin.x + (cropRect.size.width * .2);
aRect.origin.y = cropRect.origin.y + (cropRect.size.height * .3);
aRect.size.width = floor(cropRect.size.width * .6);
aRect.size.height = floor(cropRect.size.height * .4);
}
else {
linesImage = [[NSImage alloc] initWithSize:NSMakeSize(640.0, 480.0)];
cropRect = CGRectMake(0, 0, 640, 480);
aRect.origin.x = cropRect.origin.x + (cropRect.size.width * .25);
aRect.origin.y = cropRect.origin.y + (cropRect.size.height * .3);
aRect.size.width = floor(cropRect.size.width * .5);
aRect.size.height = floor(cropRect.size.height * .4);
/*
NSPoint middleLine;
middleLine.y = (240 - (NUMBER_OF_LINES_SCANNED /2 * SPACING_BETWEEN_SCAN_LINE));
middleLine.x = 200.0;
float lineWidth = floor(cropRect.size.width * .4);
//[linesDetails setLineWidth:1];
int i;
for (i = 0; i < NUMBER_OF_LINES_SCANNED; i++) {
i * SPACING_BETWEEN_SCAN_LINE;
[linesDetails moveToPoint:NSMakePoint(middleLine.x, middleLine.y + (i * SPACING_BETWEEN_SCAN_LINE) +1)];
[linesDetails lineToPoint:NSMakePoint(middleLine.x + lineWidth, middleLine.y + (i * SPACING_BETWEEN_SCAN_LINE) +1)];
}
*/
}
[linesDetails setLineWidth:2];
[linesDetails moveToPoint:NSMakePoint(aRect.origin.x, aRect.origin.y)];
[linesDetails lineToPoint:NSMakePoint(aRect.origin.x, aRect.origin.y + aRect.size.height)];
[linesDetails lineToPoint:NSMakePoint(aRect.origin.x + aRect.size.width, aRect.origin.y + aRect.size.height)];
[linesDetails lineToPoint:NSMakePoint(aRect.origin.x + aRect.size.width, aRect.origin.y)];
[linesDetails closePath];
[linesImage lockFocus];
[aColor set];
[linesDetails stroke];
[linesImage unlockFocus];
CIImage *tempImage = [CIImage imageWithData:[linesImage TIFFRepresentation]];
[linesImage release];
return tempImage;
}
- (CIImage *)image
{
if (mirrored) {
[mirroredFilter setValue:theImage forKey:@"inputImage"];
return [mirroredFilter valueForKey:@"outputImage"];
}
else
return [[theImage retain] autorelease];
}
/*
- (void)setImage:(CIImage *)image dirtyRect:(CGRect)r
{
if (theImage != image)
{
//[lock lock];
[theImage release];
theImage = [image retain];
if (CGRectIsInfinite (r))
[self setNeedsDisplay:YES];
else
[self setNeedsDisplayInRect:*(NSRect *)&r];
//[lock unlock];
}
}
*/
- (void)setImage:(CIImage *)image
{
if (theImage != image)
{
//[lock lock];
[theImage release];
theImage = [image retain];
[self setNeedsDisplay:YES];
//[lock unlock];
}
//[self setImage:image dirtyRect:CGRectInfinite];
}
/*
- (void)setCleanRect:(CGRect)cleanRect
{
_cleanRect = cleanRect;
}
*/
- (void)setCaptureSize:(CGSize)aSize
{
captureSize = aSize;
//cropRect = CGRectZero;
//cropRect.size = captureSize;
}
- (void)prepareOpenGL
{
const GLint parm = 1;
// file:///Developer/Documentation/DocSets/com.apple.ADC_Reference_Library.CoreReference.docset/Contents/Resources/Documents/documentation/GraphicsImaging/Conceptual/CoreVideo/CVProg_Intro/chapter_1_section_1.html
// file:///Developer/Documentation/DocSets/com.apple.ADC_Reference_Library.CoreReference.docset/Contents/Resources/Documents/documentation/GraphicsImaging/Conceptual/CoreImaging/ci_tasks/chapter_3_section_8.html
/* Enable beam-synced updates. */
[[self openGLContext] setValues:&parm forParameter:NSOpenGLCPSwapInterval];
/* Make sure that everything we don't need is disabled. Some of these
* are enabled by default and can slow down rendering. */
glDisable (GL_ALPHA_TEST);
glDisable (GL_DEPTH_TEST);
glDisable (GL_SCISSOR_TEST);
glDisable (GL_BLEND);
glDisable (GL_DITHER);
glDisable (GL_CULL_FACE);
glColorMask (GL_TRUE, GL_TRUE, GL_TRUE, GL_TRUE);
glDepthMask (GL_FALSE);
glStencilMask (0);
glClearColor (0.0f, 0.0f, 0.0f, 0.0f);
glHint (GL_TRANSFORM_HINT_APPLE, GL_FASTEST);
}
- (void)viewBoundsDidChange:(NSRect)bounds
{
#pragma unused(bounds)
/* For subclasses. */
}
- (void)updateMatrices
{
NSRect r = [self bounds];
if (!NSEqualRects (r, lastBounds))
{
[[self openGLContext] update];
/* Install an orthographic projection matrix (no perspective)
* with the origin in the bottom left and one unit equal to one
* device pixel. */
glViewport (0, 0, r.size.width, r.size.height);
glMatrixMode (GL_PROJECTION);
glLoadIdentity ();
glOrtho (0, r.size.width, 0, r.size.height, -1, 1);
glMatrixMode (GL_MODELVIEW);
glLoadIdentity ();
lastBounds = r;
[self viewBoundsDidChange:r];
}
}
- (void)drawRect:(NSRect)r
{
//[lock lock];
CGRect ir, rr;
//CGImageRef cgImage;
[[self openGLContext] makeCurrentContext];
/* Allocate a CoreImage rendering context using the view's OpenGL
* context as its destination if none already exists. */
if (context == nil)
{
NSOpenGLPixelFormat *pf;
pf = [self pixelFormat];
if (pf == nil)
pf = [[self class] defaultPixelFormat];
context = [[CIContext contextWithCGLContext: CGLGetCurrentContext() pixelFormat: [pf CGLPixelFormatObj] options: nil] retain];
//CGColorSpaceCreateDeviceGray()
}
ir = CGRectIntegral (*(CGRect *)&r);
//if ([NSGraphicsContext currentContextDrawingToScreen]) {
[self updateMatrices];
/* Clear the specified subrect of the OpenGL surface then
* render the image into the view. Use the GL scissor test to
* clip to * the subrect. Ask CoreImage to generate an extra
* pixel in case * it has to interpolate (allow for hardware
* inaccuracies) */
rr = CGRectIntersection (CGRectInset (ir, -1.0f, -1.0f), *(CGRect *)&lastBounds);
glScissor (ir.origin.x, ir.origin.y, ir.size.width, ir.size.height);
glEnable (GL_SCISSOR_TEST);
glClear (GL_COLOR_BUFFER_BIT);
//NSLog(@"Display %@", theImage);
if (theImage != nil)
{
CIImage *displayImage = [[theImage retain] autorelease];
/*
NSCIImageRep *imageRep = [NSCIImageRep imageRepWithCIImage:[CIImage
imageWithCVImageBuffer:imageBuffer]];
NSImage *image = [[[NSImage alloc] initWithSize:[imageRep size]]
autorelease];
*/
//mirror filter
if (mirrored) {
[mirroredFilter setValue:displayImage forKey:@"inputImage"];
displayImage = [mirroredFilter valueForKey:@"outputImage"];
}
//Add lines showing scan area
if (goodScan) {
[greenLinesFilter setValue:displayImage forKey:@"inputBackgroundImage"];
displayImage = [greenLinesFilter valueForKey:@"outputImage"];
}
else if (redLinesFilter) { //Could be empty if it's a capture window
[redLinesFilter setValue:displayImage forKey:@"inputBackgroundImage"];
displayImage = [redLinesFilter valueForKey:@"outputImage"];
}
/*
CIFilter *transitionFilter = [CIFilter filterWithName:@"CIAffineTransform"];
[transitionFilter setValue:displayImage forKey:@"inputImage"];
NSAffineTransform *rotateTransform = [NSAffineTransform transform];
//[rotateTransform scaleXBy:2.0 yBy:2.0];
[rotateTransform translateXBy:0 yBy:-512.0];
[transitionFilter setValue:rotateTransform forKey:@"inputTransform"];
displayImage = [transitionFilter valueForKey:@"outputImage"];
*/
/*
CIFilter *noiseReduction = [CIFilter filterWithName:@"CINoiseReduction"];
[noiseReduction setValue:displayImage forKey:@"inputImage"];
[noiseReduction setValue:[NSNumber numberWithFloat: 0.1] forKey:@"inputNoiseLevel"];
[noiseReduction setValue:[NSNumber numberWithFloat: 0.2] forKey:@"inputSharpness"];
displayImage = [noiseReduction valueForKey:@"outputImage"];
*/
/*
CIFilter *noiseReduction = [CIFilter filterWithName:@"CISharpenLuminance"];
[noiseReduction setValue:displayImage forKey:@"inputImage"];
[noiseReduction setValue:[NSNumber numberWithFloat: 0.2] forKey:@"inputSharpness"];
displayImage = [noiseReduction valueForKey:@"outputImage"];
*/
/*
CIFilter *noiseReduction = [CIFilter filterWithName:@"CIUnsharpMask"];
[noiseReduction setValue:displayImage forKey:@"inputImage"];
// [noiseReduction setValue:[NSNumber numberWithFloat: 4.0] forKey:@"inputRadius"];
//[noiseReduction setValue:[NSNumber numberWithFloat: 1.0] forKey:@"inputIntensity"];
displayImage = [noiseReduction valueForKey:@"outputImage"];
*/
/*
CIFilter *noiseReduction = [CIFilter filterWithName:@"CILineOverlay"];
[noiseReduction setValue:displayImage forKey:@"inputImage"];
// [noiseReduction setValue:[NSNumber numberWithFloat: 4.0] forKey:@"inputRadius"];
//[noiseReduction setValue:[NSNumber numberWithFloat: 1.0] forKey:@"inputIntensity"];
displayImage = [noiseReduction valueForKey:@"outputImage"];
*/
/*
CIFilter *hueAdjust = [CIFilter filterWithName:@"CIHueAdjust"];
[hueAdjust setDefaults];
[hueAdjust setValue: theImage forKey: @"inputImage"];
[hueAdjust setValue: [NSNumber numberWithFloat: 2.094] forKey: @"inputAngle"];
CIImage *rotatedImage = [hueAdjust valueForKey: @"outputImage"];
*/
/*
CIFilter *hueAdjust = [CIFilter filterWithName:@"CIEdgeWork"];
[hueAdjust setDefaults];
[hueAdjust setValue: theImage forKey: @"inputImage"];
[hueAdjust setValue: [NSNumber numberWithFloat: 0.3] forKey: @"inputRadius"];
displayImage = [hueAdjust valueForKey: @"outputImage"];
*/
//RELEVANT_SECTION
//Display the center part of a high resolution grab
if (cropImage) {
//[context drawImage:displayImage atPoint:rr.origin fromRect:rr];
[context drawImage:displayImage atPoint:rr.origin fromRect:cropRect];
}
else {
[context drawImage:displayImage atPoint:rr.origin fromRect:rr];
}
// use the commented out method if you want to perform scaling
//CGRect where = centerSizeWithinRect(captureSize, *(CGRect *)&lastBounds);
//[context drawImage:displayImage inRect:where fromRect:_cleanRect];
}
glDisable (GL_SCISSOR_TEST);
/* Flush the OpenGL command stream. If the view is double
* buffered this should be replaced by [[self openGLContext]
* flushBuffer]. */
glFlush ();
/*}
else
{
if (theImage != nil)
{
cgImage = [context createCGImage:theImage fromRect:ir];
if (cgImage != NULL)
{
CGContextDrawImage ([[NSGraphicsContext currentContext]
graphicsPort], ir, cgImage);
CGImageRelease (cgImage);
}
}
}
*/
//[lock unlock];
}
#pragma mark Mouse
- (void)mouseDown:(NSEvent *)theEvent {
/*if (captureMode) {
//MyiSightWindow *theWindow = (MyiSightWindow *)[self window];
id delegate = [[self window] delegate];
if ([delegate respondsToSelector:@selector(tooglePause:)]) {
[delegate tooglePause:self];
}
}
*/
[super mouseDown:theEvent];
}
- (void)setCapture:(BOOL)aMode {
cropImage = NO;
captureMode = aMode;
}
@end
static CGRect centerSizeWithinRect(CGSize size, CGRect rect)
{
float delta;
if( CGRectGetHeight(rect) / CGRectGetWidth(rect) > size.height / size.width ) {
// rect is taller: fit width
delta = rect.size.height - size.height * CGRectGetWidth(rect) / size.width;
rect.size.height -= delta;
rect.origin.y += delta/2;
}
else {
// rect is wider: fit height
delta = rect.size.width - size.width * CGRectGetHeight(rect) / size.height;
rect.size.width -= delta;
rect.origin.x += delta/2;
}
return rect;
}