Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with
or
.
Download ZIP

Comparing changes

Choose two branches to see what’s changed or to start a new pull request. If you need to, you can also compare across forks.

Open a pull request

Create a new pull request by comparing changes across two branches. If you need to, you can also compare across forks.
base fork: HalfdanJ/ViljensTriumf2
base: 7f2795c127
...
head fork: HalfdanJ/ViljensTriumf2
compare: 8b5fa79cf4
  • 4 commits
  • 14 files changed
  • 0 commit comments
  • 1 contributor
Commits on Oct 16, 2012
@HalfdanJ a bc90588
Commits on Oct 19, 2012
@HalfdanJ 20 okt 2b050e6
Commits on Oct 23, 2012
@HalfdanJ tirsdag eb15a36
Commits on Oct 24, 2012
@HalfdanJ movie recorder works 8b5fa79
View
4 ViljensTriumf.xcodeproj/project.pbxproj
@@ -23,6 +23,7 @@
DD303C54162605FF00A4764C /* ChromaFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = DD303C50162605FE00A4764C /* ChromaFilter.m */; };
DD303C55162605FF00A4764C /* deinterlaceFilter.cikernel in Resources */ = {isa = PBXBuildFile; fileRef = DD303C51162605FF00A4764C /* deinterlaceFilter.cikernel */; };
DD303C56162605FF00A4764C /* DeinterlaceFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = DD303C53162605FF00A4764C /* DeinterlaceFilter.m */; };
+ DD6F95061636AC4C0032288A /* alphaOver.cikernel in Resources */ = {isa = PBXBuildFile; fileRef = DD6F95051636AC4C0032288A /* alphaOver.cikernel */; };
DD9C845D1628053600089567 /* MavController.mm in Sources */ = {isa = PBXBuildFile; fileRef = DD9C845C1628053600089567 /* MavController.mm */; };
DDDC241F1626B10400FBC44F /* Foundation.framework in Frameworks */ = {isa = PBXBuildFile; fileRef = DD303C0B1625FA4400A4764C /* Foundation.framework */; };
DDDC24201626B11800FBC44F /* BeamSync.m in Sources */ = {isa = PBXBuildFile; fileRef = DDDC241D1626B0C000FBC44F /* BeamSync.m */; };
@@ -61,6 +62,7 @@
DD303C51162605FF00A4764C /* deinterlaceFilter.cikernel */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = deinterlaceFilter.cikernel; sourceTree = "<group>"; };
DD303C52162605FF00A4764C /* DeinterlaceFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = DeinterlaceFilter.h; sourceTree = "<group>"; };
DD303C53162605FF00A4764C /* DeinterlaceFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; path = DeinterlaceFilter.m; sourceTree = "<group>"; };
+ DD6F95051636AC4C0032288A /* alphaOver.cikernel */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = text; path = alphaOver.cikernel; sourceTree = "<group>"; };
DD9C845B1628053600089567 /* MavController.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; path = MavController.h; sourceTree = "<group>"; };
DD9C845C1628053600089567 /* MavController.mm */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.cpp.objcpp; path = MavController.mm; sourceTree = "<group>"; };
DDDC241C1626B0C000FBC44F /* BeamSync.h */ = {isa = PBXFileReference; lastKnownFileType = sourcecode.c.h; path = BeamSync.h; sourceTree = "<group>"; };
@@ -174,6 +176,7 @@
DD303C4E162605FE00A4764C /* filters */ = {
isa = PBXGroup;
children = (
+ DD6F95051636AC4C0032288A /* alphaOver.cikernel */,
DD303C4F162605FE00A4764C /* ChromaFilter.h */,
DD303C50162605FE00A4764C /* ChromaFilter.m */,
DD303C51162605FF00A4764C /* deinterlaceFilter.cikernel */,
@@ -238,6 +241,7 @@
DD303C171625FA4400A4764C /* Credits.rtf in Resources */,
DD303C1D1625FA4400A4764C /* MainMenu.xib in Resources */,
DD303C55162605FF00A4764C /* deinterlaceFilter.cikernel in Resources */,
+ DD6F95061636AC4C0032288A /* alphaOver.cikernel in Resources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
View
BIN  ...xcodeproj/project.xcworkspace/xcuserdata/jonas.xcuserdatad/UserInterfaceState.xcuserstate
Binary file not shown
View
21 ViljensTriumf/AppDelegate.h
@@ -22,7 +22,7 @@
{
@public
CIImage * cameras[3];
- float chromaMinSet, chromaMaxSet;
+ float chromaMinSet, chromaMaxSet, chromaValSet, chromaSatSet;
bool _recording;
bool _playVideo;
@@ -47,6 +47,8 @@
float transitionTime;
int transitionImageSourceSelector;
+
+ int _outSelector;
}
@property (unsafe_unretained) IBOutlet NSWindow *mainOutputWindow;
@@ -59,6 +61,7 @@
@property (weak) IBOutlet NSView *videoView;
@property (strong) DeinterlaceFilter * deinterlaceFilter;
+@property (strong) CIFilter * noiseReductionFilter;
@property (strong) CIFilter * colorControlsFilter;
@property (strong) CIFilter * gammaAdjustFilter;
@property (strong) CIFilter * toneCurveFilter;
@@ -68,7 +71,12 @@
@property (strong) CIFilter * constantColorFilter;
@property (strong) CIFilter * widescreenFilter;
@property (strong) CIFilter * dslrFilter;
+@property (strong) CIFilter * perspectiveFilter;
+@property (strong) CIFilter * perspectiveFilterMovie;
+@property (strong) CIFilter * chromaGaussian;
+@property (strong) CIFilter * chromaTransform;
+@property (strong) CIFilter * chromaCrop;
@property (readwrite) int outSelector;
@@ -90,6 +98,17 @@
@property (readwrite) int decklink2input;
@property (readwrite) int decklink3input;
+@property (readonly) bool out1selected;
+@property (readonly) bool out2selected;
+@property (readonly) bool out3selected;
+
+@property (readonly) NSString * out1name;
+@property (readonly) NSString * out2name;
+@property (readonly) NSString * out3name;
+
+
-(void) newFrame:(DecklinkCallback*)callback;
+- (IBAction)updateKeystone:(id)sender;
+-(CVPixelBufferRef) createCVImageBufferFromCallback:(DecklinkCallback*)callback;
@end
View
658 ViljensTriumf/AppDelegate.mm
@@ -42,6 +42,8 @@ - (void)applicationDidFinishLaunching:(NSNotification *)aNotification
//
//Init filters
//
+ self.noiseReductionFilter = [CIFilter filterWithName:@"CINoiseReduction"] ;
+ [self.noiseReductionFilter setDefaults];
self.colorControlsFilter = [CIFilter filterWithName:@"CIColorControls"] ;
[self.colorControlsFilter setDefaults];
@@ -55,11 +57,24 @@ - (void)applicationDidFinishLaunching:(NSNotification *)aNotification
self.dissolveFilter = [CIFilter filterWithName:@"CIDissolveTransition"];
[self.dissolveFilter setDefaults];
+ self.perspectiveFilter = [CIFilter filterWithName:@"CIPerspectiveTransform"];
+ [self.perspectiveFilter setDefaults];
+
+ self.perspectiveFilterMovie= [CIFilter filterWithName:@"CIPerspectiveTransform"];
+ [self.perspectiveFilterMovie setDefaults];
+ [self updateKeystone:self];
+
+
+
self.constantColorFilter = [CIFilter filterWithName:@"CIConstantColorGenerator"];
+ [self.constantColorFilter setValue:[CIColor colorWithRed:1.0 green:0.0 blue:0.0 alpha:1.0] forKey:@"inputColor"];
+
+
self.sourceOverFilter = [CIFilter filterWithName:@"CISourceOverCompositing"];
self.deinterlaceFilter = [[DeinterlaceFilter alloc] init];
- [self.deinterlaceFilter setDefaults];
+
+ // [self.deinterlaceFilter setDefaults];
self.widescreenFilter = [CIFilter filterWithName:@"CIAffineTransform"];
@@ -69,6 +84,19 @@ - (void)applicationDidFinishLaunching:(NSNotification *)aNotification
[transform translateXBy:-120 yBy:0];
[self.widescreenFilter setValue:transform forKey:@"inputTransform"];
+
+ self.chromaTransform = [CIFilter filterWithName:@"CIAffineTransform"];
+ [self.chromaTransform setDefaults];
+
+ self.chromaCrop = [CIFilter filterWithName:@"CICrop"];
+ [self.chromaCrop setDefaults];
+
+ self.chromaGaussian = [CIFilter filterWithName:@"CIGaussianBlur"];
+ [self.chromaGaussian setDefaults];
+
+ [self updateChromaTransform];
+
+
self.dslrFilter = [CIFilter filterWithName:@"CIAffineTransform"];
[self.dslrFilter setDefaults];
transform = [NSAffineTransform transform];
@@ -76,7 +104,7 @@ - (void)applicationDidFinishLaunching:(NSNotification *)aNotification
[transform translateXBy:-100 yBy:-100];
[transform scaleBy:1.28];
[self.dslrFilter setValue:transform forKey:@"inputTransform"];
-
+
self.chromaFilter = [[ChromaFilter alloc] init];
self.master = 1.0;
@@ -89,71 +117,40 @@ - (void)applicationDidFinishLaunching:(NSNotification *)aNotification
transitionTime = -1;
- /*
- NSError * error;
- self.mMovie = [[QTMovie alloc] initToWritableData:[NSMutableData data] error:&error];
- if (!self.mMovie) {
- [[NSAlert alertWithError:error] runModal];
- }*/
-
//Shortcuts
-
[NSEvent addLocalMonitorForEventsMatchingMask:(NSKeyDownMask) handler:^(NSEvent *incomingEvent) {
NSLog(@"Events: %@",incomingEvent);
-
- // if ([NSEvent modifierFlags] & NSAlternateKeyMask) {
switch ([incomingEvent keyCode]) {
case 82:
self.outSelector = 0;
transitionTime = 0;
-
break;
case 83:
self.outSelector = 1;
transitionTime = 0;
- /* serial.writeByte('1');
- serial.writeByte('*');
- serial.writeByte('4');
- serial.writeByte('!');*/
break;
case 84:
self.outSelector = 2;
transitionTime = 0;
- /* serial.writeByte('2');
- serial.writeByte('*');
- serial.writeByte('4');
- serial.writeByte('!');*/
break;
case 85:
self.outSelector = 3;
transitionTime = 0;
- /* serial.writeByte('3');
- serial.writeByte('*');
- serial.writeByte('4');
- serial.writeByte('!');*/
break;
- /* case 86:
- if(recordMovie){
- [self stopRecording];
- } else {
- [self startRecording];
- }
- break;*/
- /* case 86:
- {
- self.recording = false;
-
- self.outSelector = 4;
- break;
- }*/
+ case 76:
+ [avPlayer advanceToNextItem];
+ break;
default:
return incomingEvent;
break;
}
+
+
+
return (NSEvent*)nil;
}];
@@ -166,13 +163,18 @@ - (void)applicationDidFinishLaunching:(NSNotification *)aNotification
[inputs addObject:@{@"name":@"2 Main B"}];
[inputs addObject:@{@"name":@"3 Main C"}];
[inputs addObject:@{@"name":@"4 Dolly"}];
- [inputs addObject:@{@"name":@"5 PTZ"}];
- [inputs addObject:@{@"name":@"6 Jonas"}];
- [inputs addObject:@{@"name":@"7 Top"}];
- [inputs addObject:@{@"name":@"8 Bagscene"}];
- [inputs addObject:@{@"name":@"9 Ude"}];
- [inputs addObject:@{@"name":@"10 DSLR"}];
-
+ [inputs addObject:@{@"name":@"5"}];
+ [inputs addObject:@{@"name":@"6 Cam 5 Model"}];
+ [inputs addObject:@{@"name":@"7 Lærred"}];
+ [inputs addObject:@{@"name":@"8 Ude"}];
+ [inputs addObject:@{@"name":@"9 Greenscreen"}];
+ [inputs addObject:@{@"name":@"10"}];
+ [inputs addObject:@{@"name":@"11 ---- "}];
+ [inputs addObject:@{@"name":@"12 PTZ"}];
+ [inputs addObject:@{@"name":@"13 Cam 5 kort "}];
+ [inputs addObject:@{@"name":@"14 Mercedes front"}];
+ [inputs addObject:@{@"name":@"15 Model (quad)"}];
+ [inputs addObject:@{@"name":@"16 Mercedes overshoulder"}];
self.cameraInputs = inputs;
self.decklink1input = -1;
@@ -205,6 +207,61 @@ - (void)applicationDidFinishLaunching:(NSNotification *)aNotification
}
+
+ avPlayerLayerPreview = [[AVPlayerLayer alloc] init];
+
+}
+
+-(void)setOutSelector:(int)outSelector{
+ [self willChangeValueForKey:@"out1selected"];
+ [self willChangeValueForKey:@"out2selected"];
+ [self willChangeValueForKey:@"out3selected"];
+
+ _outSelector = outSelector;
+
+ [self didChangeValueForKey:@"out1selected"];
+ [self didChangeValueForKey:@"out2selected"];
+ [self didChangeValueForKey:@"out3selected"];
+
+}
+
+-(void)updateChromaTransform{
+ NSUserDefaults * defaults = [NSUserDefaults standardUserDefaults];
+
+ /*NSAffineTransform * transform = [NSAffineTransform transform];
+ [transform translateXBy:[defaults floatForKey:@"chromaX"] yBy:[defaults floatForKey:@"chromaY"]];
+ [transform scaleXBy:[defaults floatForKey:@"chromaScale"] yBy:[defaults floatForKey:@"chromaScale"]];
+ [self.chromaTransform setValue:transform forKey:@"inputTransform"];
+ */
+
+ CIVector * vec = [CIVector vectorWithX:[defaults floatForKey:@"chromaX"] Y:[defaults floatForKey:@"chromaY"] Z:720*[defaults floatForKey:@"chromaScale"] W:576*[defaults floatForKey:@"chromaScale"]];
+ [self.chromaCrop setValue:vec forKey:@"inputRectangle"];
+
+
+}
+- (IBAction)updateKeystone:(id)sender {
+ NSUserDefaults * settings = [NSUserDefaults standardUserDefaults];
+ [self.perspectiveFilter setValue:[[CIVector alloc] initWithX:[settings floatForKey:@"c1x"]*720.0 Y:[settings floatForKey:@"c1y"]*576.0] forKey:@"inputTopLeft"];
+
+ [self.perspectiveFilter setValue:[[CIVector alloc] initWithX:[settings floatForKey:@"c2x"]*720.0 Y:[settings floatForKey:@"c2y"]*576.0] forKey:@"inputTopRight"];
+
+ [self.perspectiveFilter setValue:[[CIVector alloc] initWithX:[settings floatForKey:@"c3x"]*720.0 Y:[settings floatForKey:@"c3y"]*576.0] forKey:@"inputBottomRight"];
+
+ [self.perspectiveFilter setValue:[[CIVector alloc] initWithX:[settings floatForKey:@"c4x"]*720.0 Y:[settings floatForKey:@"c4y"]*576.0] forKey:@"inputBottomLeft"];
+
+ [self.perspectiveFilterMovie setValue:[[CIVector alloc] initWithX:[settings floatForKey:@"c1x"]*1024 Y:[settings floatForKey:@"c1y"]*768] forKey:@"inputTopLeft"];
+
+ [self.perspectiveFilterMovie setValue:[[CIVector alloc] initWithX:[settings floatForKey:@"c2x"]*1024 Y:[settings floatForKey:@"c2y"]*768] forKey:@"inputTopRight"];
+
+ [self.perspectiveFilterMovie setValue:[[CIVector alloc] initWithX:[settings floatForKey:@"c3x"]*1024 Y:[settings floatForKey:@"c3y"]*768] forKey:@"inputBottomRight"];
+
+ [self.perspectiveFilterMovie setValue:[[CIVector alloc] initWithX:[settings floatForKey:@"c4x"]*1024 Y:[settings floatForKey:@"c4y"]*768] forKey:@"inputBottomLeft"];
+
+
+}
+
+-(int)outSelector{
+ return _outSelector;
}
-(void)applicationWillTerminate:(NSNotification *)notification{
@@ -218,37 +275,57 @@ -(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NS
if([keyPath isEqualToString:@"input"]){
NSNumber * output = (__bridge NSNumber*) context;
- // NSLog(@"Change %@-> %@",[object valueForKey:@"input"], output);
-
dispatch_async(dispatch_get_main_queue(), ^{
if([output intValue] == 0){
+ [self willChangeValueForKey:@"out1name"];
self.decklink1input = [[object valueForKey:@"input"] intValue];
+ [self didChangeValueForKey:@"out1name"];
+
}
if([output intValue] == 1){
+ [self willChangeValueForKey:@"out2name"];
self.decklink2input = [[object valueForKey:@"input"] intValue];
+ [self didChangeValueForKey:@"out2name"];
+
}
if([output intValue] == 2){
+ [self willChangeValueForKey:@"out3name"];
self.decklink3input = [[object valueForKey:@"input"] intValue];
+ [self didChangeValueForKey:@"out3name"];
+
}
});
-
}
+
+
if(context == &ItemStatusContext){
if(avPlayer.error){
NSLog(@"Error loading %@",avPlayer.error);
+ } else {
+ NSLog(@"Loaded player item");
}
// [avPlayer play];
[self.mainOutput setWantsLayer:YES];
avPlayerLayer.backgroundColor = [[NSColor colorWithCalibratedWhite:0.0 alpha:1.0] CGColor];
avPlayerLayer.videoGravity = AVLayerVideoGravityResize;
- [avPlayerLayer setFrame:[[self.mainOutput layer] bounds]];
+ // [avPlayerLayer setFrame:[[self.mainOutput layer] bounds]];
+ NSRect frame = [[self.mainOutput layer] bounds];
+ NSRect bounds = [[self.mainOutput layer] bounds];
+// bounds.size.width *= 0.5;
+ // bounds.size.height *= 0.5;
+ [avPlayerLayer setFrame:frame];
+ [avPlayerLayer setBounds:bounds];
+ //xavPlayerLayer.transform
+ // avPlayerLayer.contentsScale = 2.0;
[avPlayerLayer setAutoresizingMask:kCALayerWidthSizable | kCALayerHeightSizable];
[[self.mainOutput layer] addSublayer:avPlayerLayer];
[avPlayer play];
NSLog(@"Play %lli", [avPlayer.currentItem duration].value);
}
+
+
if(context == &SelectionContext){
NSLog(@"Selection");
avPlayerBoundaryPreview = nil;
@@ -262,7 +339,7 @@ -(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NS
//NSLog(@"%@",selection);
AVPlayerItem * item = [AVPlayerItem playerItemWithURL:[NSURL URLWithString:[NSString stringWithFormat:@"file://%@",[selection valueForKey:@"path"]]]];
- NSNumber * inTime = [selection valueForKey:@"inTime"];
+ // NSNumber * inTime = [selection valueForKey:@"inTime"];
/* if(inTime){
[item seekToTime:CMTimeMake([inTime floatValue], 25)];
}
@@ -281,7 +358,7 @@ -(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NS
avPlayerPreview = [AVPlayer playerWithPlayerItem:item];
[avPlayerPreview play];
- avPlayerLayerPreview = [AVPlayerLayer playerLayerWithPlayer:avPlayerPreview];
+ [avPlayerLayerPreview setPlayer:avPlayerPreview];
[self.videoView setWantsLayer:YES];
avPlayerLayerPreview.backgroundColor = [[NSColor colorWithCalibratedWhite:0.0 alpha:1.0] CGColor];
@@ -291,7 +368,9 @@ -(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NS
[[self.videoView layer] addSublayer:avPlayerLayerPreview];
- [self updateInOutTime:self];
+
+ [self performSelector:@selector(updateInOutTime:) withObject:self afterDelay:0];
+ // [self updateInOutTime:self];
/*
CATextLayer *textLayer=[CATextLayer layer];
@@ -306,16 +385,24 @@ -(void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NS
}
}
+- (IBAction)clearVideos:(id)sender {
+ [self willChangeValueForKey:@"recordings"];
+
+ self.recordingIndex = 0;
+ [self.recordings removeAllObjects];
+ [self didChangeValueForKey:@"recordings"];
+
+}
- (IBAction)loadLastVideos:(id)sender {
[self willChangeValueForKey:@"recordings"];
self.recordingIndex = [[NSUserDefaults standardUserDefaults] integerForKey:@"recordingIndex"] ;
- for(int i=0;i<self.recordingIndex-1;i++){
- NSString * path = [NSString stringWithFormat:@"/Users/jonas/Desktop/triumf%i.mov",i+1];
+ for(int i=1;i<self.recordingIndex;i++){
+ NSString * path = [NSString stringWithFormat:@"/Users/jonas/Desktop/triumf%i.mov",i];
//AVPlayerItem * item = [AVPlayerItem playerItemWithURL:[NSURL URLWithString:[NSString stringWithFormat:@"file://%@",path]]];
- NSMutableDictionary * dict = [@{@"path":path, @"name":[NSString stringWithFormat:@"Old Rec %i", i], @"inTime":@(0), @"outTime":@(0)} mutableCopy];
+ NSMutableDictionary * dict = [@{@"path":path, @"active": @(YES), @"name":[NSString stringWithFormat:@"Old Rec %i", i], @"inTime":@(0), @"outTime":@(0)} mutableCopy];
[self.recordings addObject:dict];
@@ -324,11 +411,14 @@ - (IBAction)loadLastVideos:(id)sender {
[self didChangeValueForKey:@"recordings"];
}
+
+
- (IBAction)updateInOutTime:(id)sender {
NSDictionary * selection = self.recordingsArrayController.selectedObjects[0];
NSNumber * inTime = [selection valueForKey:@"inTime"];
NSNumber * outTime = [selection valueForKey:@"outTime"];
if(inTime && outTime){
+ NSLog(@"Update inout %@ %@",inTime, outTime);
if([inTime floatValue] == 0){
inTime = @(1);
}
@@ -343,20 +433,28 @@ - (IBAction)updateInOutTime:(id)sender {
// [avPlayerPreview.currentItem a]
- if(avPlayerBoundaryPreview){
- [avPlayerPreview removeTimeObserver:avPlayerBoundaryPreview];
- avPlayerBoundaryPreview = nil;
- }
-
- long long time = avPlayerPreview.currentItem.duration.value - [outTime floatValue];
- if(time > 0){
- NSValue * val = [NSValue valueWithCMTime:CMTimeMake(time, 600)];
-
- __block AppDelegate *dp = self;
- avPlayerBoundaryPreview= [avPlayerPreview addBoundaryTimeObserverForTimes:@[val] queue: NULL usingBlock:^{
- [dp->avPlayerPreview pause];
- }];
- }
+ /* if(avPlayerBoundaryPreview){
+ [avPlayerPreview removeTimeObserver:avPlayerBoundaryPreview];
+ avPlayerBoundaryPreview = nil;
+ }
+
+ long long time = avPlayerPreview.currentItem.duration.value - [outTime floatValue];
+ if(time < 0){
+ time = 1;
+ }
+ if(time > 0){
+ NSValue * val = [NSValue valueWithCMTime:CMTimeMake(time, 600)];
+ NSLog(@"Block %@",val);
+ __block AppDelegate *dp = self;
+ avPlayerBoundaryPreview= [avPlayerPreview addBoundaryTimeObserverForTimes:@[val] queue: NULL usingBlock:^{
+ [dp->avPlayerPreview pause];
+
+ [dp->avPlayerPreview removeTimeObserver:dp->avPlayerBoundaryPreview];
+ dp->avPlayerBoundaryPreview = nil;
+
+ NSLog(@"Block ping");
+ }];
+ }*/
[avPlayerPreview play];
}
@@ -374,53 +472,75 @@ -(void)setPlayVideo:(bool)playVideo{
NSMutableArray * outTimes = [NSMutableArray array];
for(NSDictionary * recording in self.recordings){
- AVPlayerItem * item = [AVPlayerItem playerItemWithURL:[NSURL URLWithString:[NSString stringWithFormat:@"file://%@",[recording valueForKey:@"path"]]]];
-
- NSNumber * inTime = [recording valueForKey:@"inTime"];
- NSNumber * outTime = [recording valueForKey:@"outTime"];
- if([inTime floatValue] == 0){
- inTime = @(100);
+ if([[recording valueForKey:@"active"] boolValue] == YES){
+ AVPlayerItem * item = [AVPlayerItem playerItemWithURL:[NSURL URLWithString:[NSString stringWithFormat:@"file://%@",[recording valueForKey:@"path"]]]];
+
+ double inTime = [[recording valueForKey:@"inTime"] doubleValue];
+ double outTime = item.duration.value - [[recording valueForKey:@"outTime"] doubleValue];
+
+ if(inTime == 0){
+ inTime = 100;
+ }
+ if(inTime >= avPlayerPreview.currentItem.duration.value){
+ inTime = avPlayerPreview.currentItem.duration.value;
+ }
+
+
+ /* if(outTime >= avPlayerPreview.currentItem.duration.value){
+ outTime = avPlayerPreview.currentItem.duration.value;
+ }
+ if(outTime == 0){
+ outTime = 200;
+ }*/
+ [item seekToTime:CMTimeMake(inTime, 600) toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero];
+ [outTimes addObject:[NSValue valueWithCMTime:CMTimeMake(outTime, 600) ]];
+
+ if(item.error){
+ NSLog(@"Error loading %@",item.error);
+ }
+ if(i==0){
+ [item addObserver:self forKeyPath:@"status" options:0 context:&ItemStatusContext];
+ }
+ [items addObject:item];
+ i++;
}
- if([inTime floatValue] >= avPlayerPreview.currentItem.duration.value){
- inTime = @(avPlayerPreview.currentItem.duration.value);
- }
-
-
- if([outTime floatValue] >= avPlayerPreview.currentItem.duration.value){
- outTime = @(avPlayerPreview.currentItem.duration.value);
- }
- if([outTime floatValue] == 0){
- outTime = @(200);
- }
- [item seekToTime:CMTimeMake([inTime floatValue], 600) toleranceBefore:kCMTimeZero toleranceAfter:kCMTimeZero];
- [outTimes addObject:[NSValue valueWithCMTime:CMTimeMake([outTime floatValue], 600) ]];
-
- if(item.error){
- NSLog(@"Error loading %@",item.error);
- }
- if(i==0){
- [item addObserver:self forKeyPath:@"status" options:0 context:&ItemStatusContext];
- }
- [items addObject:item];
- i++;
}
avPlayer = [[AVQueuePlayer alloc] initWithItems:items];
// [avPlayer play];
avPlayerLayer = [AVPlayerLayer playerLayerWithPlayer:avPlayer];
- /*
- __block AppDelegate *dp = self;
+
+ /* __block AppDelegate *dp = self;
+
+ void (^blockPointer)(void);
+
void (^block)(void) = ^(void){
[dp->avPlayer removeTimeObserver:dp->avPlayerBoundaryPreview];
[dp->avPlayer advanceToNextItem];
+
+ [outTimes removeObjectAtIndex:0];
+ if([outTimes count] > 0){
+ NSLog(@"New time boundary");
+ avPlayerBoundaryPreview = [avPlayer addBoundaryTimeObserverForTimes:@[outTimes[0]] queue:NULL usingBlock:blockPointer];
+ }
NSLog(@"Ping");
};
+ blockPointer = block;
+
- avPlayerBoundaryPreview = [avPlayer addBoundaryTimeObserverForTimes:@[outTimes[0]] queue:NULL usingBlock:block];
+ avPlayerBoundaryPreview = [avPlayer addBoundaryTimeObserverForTimes:@[outTimes[0]] queue:NULL usingBlock:blockPointer];
*/
- avPlayerLayer.filters = @[self.colorControlsFilter, self.gammaAdjustFilter];
+
+
+ // avPlayerLayer.filters = @[self.deinterlaceFilter, self.colorControlsFilter, self.gammaAdjustFilter, self.perspectiveFilter];
+
+ [self.constantColorFilter setValue:[CIColor colorWithRed:0.0 green:0.0 blue:0.0 alpha:1.0] forKey:@"inputColor"];
+ [self.sourceOverFilter setValue:[self.constantColorFilter valueForKey:@"outputImage"] forKey:@"inputBackgroundImage"];
+
+ [self.chromaFilter setInputBackgroundImage:[self imageForSelector:2]];
+ avPlayerLayer.filters = @[ self.deinterlaceFilter, self.colorControlsFilter, self.gammaAdjustFilter];//, self.perspectiveFilterMovie, self.sourceOverFilter];
} else {
[avPlayerLayer removeFromSuperlayer];
[self.mainOutput setWantsLayer:NO];
@@ -449,7 +569,7 @@ -(void)setRecording:(bool)recording{
[videoWriter finishWriting];
// [self.recordings addObject:@{@"path":path, @"name":[NSString stringWithFormat:@"Rec %i", self.recordingIndex-1]}];
- NSMutableDictionary * dict = [@{@"path":path, @"name":[NSString stringWithFormat:@"Old Rec %i", self.recordingIndex-1], @"inTime":@(0), @"outTime":@(0)} mutableCopy];
+ NSMutableDictionary * dict = [@{@"path":path, @"active": @(YES), @"name":[NSString stringWithFormat:@"Old Rec %i", self.recordingIndex-1], @"inTime":@(0), @"outTime":@(0)} mutableCopy];
[self.recordings addObject:dict];
@@ -506,7 +626,7 @@ -(void)setRecording:(bool)recording{
[NSThread sleepForTimeInterval:0.1];
}
- }
+ }
}
-(bool)recording{
@@ -518,14 +638,16 @@ -(bool)recording{
static dispatch_once_t onceToken;
-(void) newFrame:(DecklinkCallback*)callback{
- dispatch_sync(dispatch_get_main_queue(), ^{
+ dispatch_async(dispatch_get_main_queue(), ^{
//NSLog(@"%lld",[avPlayer currentTime].value);
-
+ //NSLog(@"New frame in %i",callback);
+
[callback->lock lock];
callback->delegateBusy = YES;
- CVPixelBufferRef buffer = [self createCVImageBufferFromCallback:callback];
-
+// CVPixelBufferRef buffer = [self createCVImageBufferFromCallback:callback];
+ CVPixelBufferRef buffer = callback->buffer;
+ NSLog(@"%i",buffer);
int num = -1;
if(callback == [self.blackMagicController callbacks:0]){
@@ -561,34 +683,47 @@ -(void) newFrame:(DecklinkCallback*)callback{
if(!self.recording){
- if(num == 0 && [[NSUserDefaults standardUserDefaults] boolForKey:@"chromaKey"]){
- image = [self chromaKey:image backgroundImage:cameras[1]];
+
+
+
+ if(num == 0 && [[NSUserDefaults standardUserDefaults] boolForKey:@"chromaKey"] && self.decklink1input == 8){
+
+ [self.noiseReductionFilter setValue:[[NSUserDefaults standardUserDefaults] valueForKey:@"noiseReduction"] forKey:@"inputNoiseLevel"];
+ [self.noiseReductionFilter setValue:[[NSUserDefaults standardUserDefaults] valueForKey:@"sharpness"] forKey:@"inputSharpness"];
+ [self.noiseReductionFilter setValue:image forKey:@"inputImage"];
+// image = [self.noiseReductionFilter valueForKey:@"outputImage"];
+
+
+ image = [self chromaKey:image backgroundImage:cameras[1] alphaImage:[self.noiseReductionFilter valueForKey:@"outputImage"]];
+ }
+ if(num == 0 && [[NSUserDefaults standardUserDefaults] floatForKey:@"chromaScale"] != 1 && self.decklink1input == 8){
+ [self updateChromaTransform];
+ // [self.chromaTransform setValue:image forKey:@"inputImage"];
+ // image = [self.chromaTransform valueForKey:@"outputImage"];
+
+ [self.chromaCrop setValue:image forKey:@"inputImage"];
+ image = [self.chromaCrop valueForKey:@"outputImage"];
}
image = [self filterCIImage:image];
+
+
cameras[num] = image;
// dispatch_async(dispatch_get_main_queue(), ^{
- if(!preview.needsDisplay){ //Spar på energien
+
+ if(num == self.outSelector-1 || self.outSelector == 0 || self.outSelector > 3){
+ self.mainOutput.ciImage = [self outputImage];
+ //if(![self.mainOutput needsDisplay])
+ [self.mainOutput setNeedsDisplay:YES];
+ }
+// if(!self.mainOutput.needsDisplay){ //Spar på energien
preview.ciImage = [self imageForSelector:num+1];
- // preview.ciImage = image;
+ // [preview performSelector:@selector(setNeedsDisplay:) withObject:YES afterDelay:1];
[preview setNeedsDisplay:YES];
- }
- if(!self.mainOutput.needsDisplay){
- if(num == 0){
- self.mainOutput.ciImage = [self outputImage];
- if(![self.mainOutput needsDisplay])
- [self.mainOutput setNeedsDisplay:YES];
- }
- }
-
- if(num==0){
- if(self.outSelector == 4){
- //[self updateMovie];
- }
- }
- // });
+ // }
+ //[NSThread sleepForTimeInterval:0.01];
}
if(self.recording && num == self.outSelector - 1){
@@ -608,8 +743,8 @@ -(void) newFrame:(DecklinkCallback*)callback{
append_ok = [adaptor appendPixelBuffer:buffer withPresentationTime:frameTime];
- if(buffer)
- CVBufferRelease(buffer);
+ //if(buffer)
+ // CVBufferRelease(buffer);
[NSThread sleepForTimeInterval:0.035];
}
else
@@ -624,46 +759,12 @@ -(void) newFrame:(DecklinkCallback*)callback{
}
// });
- /*dispatch_once(&onceToken, ^{
- recordImage = [[NSImage alloc] initWithSize:NSMakeSize(720, 576)];
- });
-
- NSTimeInterval diff = [NSDate timeIntervalSinceReferenceDate] - self.lastRecordTime;
- if(self.lastRecordTime == -1){
- diff = 0;
- }
- diff *= 600;
-
- self.lastRecordTime = [NSDate timeIntervalSinceReferenceDate];
-
- NSBitmapImageRep * bitmap;
-
- unsigned char * bytes = callback->bytes;
- bitmap = [[NSBitmapImageRep alloc] initWithBitmapDataPlanes:&bytes
- pixelsWide:callback->w pixelsHigh:callback->h
- bitsPerSample:8 samplesPerPixel:4
- hasAlpha:YES isPlanar:NO
- colorSpaceName:NSDeviceRGBColorSpace
- bitmapFormat:1
- bytesPerRow:4*callback->w bitsPerPixel:8*4];
-
-
-
- [recordImage addRepresentation:bitmap];
-
- //dispatch_sync(dispatch_get_main_queue(), ^{
- // [self.mMovie addImage:recordImage forDuration:QTMakeTime(diff, 600) withAttributes:[NSDictionary dictionaryWithObjectsAndKeys: @"jpeg", QTAddImageCodecType, nil]];
- // [mMovie addImage:recordImage forDuration:QTMakeTime(timeDiff, 1000) withAttributes:nil];
- // [self.mMovie setCurrentTime:[self.mMovie duration]];
- [recordImage removeRepresentation:bitmap];
- //});
- */
-
-
}
callback->delegateBusy = NO;
[callback->lock unlock];
+// NSLog(@"New frame out %i",callback);
+
});
}
/*
@@ -709,82 +810,166 @@ -(CIImage*) imageForSelector:(int)selector{
inputSelector = self.decklink2input;
if(selector == 3)
inputSelector = self.decklink3input;
-
- if(inputSelector == 5){ //6 Jonas
+
+ if(inputSelector == 15){ //6 Jonas
[self.widescreenFilter setValue:img forKey:@"inputImage"];
img = [self.widescreenFilter valueForKey:@"outputImage"];
}
- if(inputSelector == 9){ //10 DSLR
+/* if(inputSelector == 9){ //10 DSLR
[self.dslrFilter setValue:img forKey:@"inputImage"];
img = [self.dslrFilter valueForKey:@"outputImage"];
- }
+ }*/
return img;
}
return nil;
}
-(void)updateTransitionTime {
+
transitionTime += 0.51-self.fadeTime*0.5;
if(transitionTime < 1)
[self performSelector:@selector(updateTransitionTime) withObject:nil afterDelay:0.01];
+
+}
+
+-(bool)out1selected{
+ return self.outSelector == 1 || transitionImageSourceSelector == 1;
+}
+-(bool)out2selected{
+ return self.outSelector == 2 || transitionImageSourceSelector == 2;;
+}
+-(bool)out3selected{
+ return self.outSelector == 3 || transitionImageSourceSelector == 3;
+}
+-(NSString *)out1name{
+ if(self.decklink1input < [self.cameraInputs count])
+ return [self.cameraInputs[self.decklink1input] valueForKey:@"name"];
+ return @"";
+}
+-(NSString *)out2name{
+ if(self.decklink2input < [self.cameraInputs count])
+ return [self.cameraInputs[self.decklink2input] valueForKey:@"name"];
+ return @"";
+}
+-(NSString *)out3name{
+ if(self.decklink3input < [self.cameraInputs count])
+ return [self.cameraInputs[self.decklink3input] valueForKey:@"name"];
+ return @"";
}
-(CIImage*) outputImage {
CIImage * _outputImage;
if(transitionTime >= 1){
+ [self willChangeValueForKey:@"out1selected"];
+ [self willChangeValueForKey:@"out2selected"];
+ [self willChangeValueForKey:@"out3selected"];
+
transitionImageSourceSelector = self.outSelector;
transitionTime = -1;
+
+ [self didChangeValueForKey:@"out1selected"];
+ [self didChangeValueForKey:@"out2selected"];
+ [self didChangeValueForKey:@"out3selected"];
+
}
if(self.fadeTime > 0 && transitionTime != -1){
if(transitionTime == 0){
[self updateTransitionTime];
-// [self performSelector:@selector(updateTransitionTime) withObject:nil afterDelay:0.1];
+ // [self performSelector:@selector(updateTransitionTime) withObject:nil afterDelay:0.1];
}
[self.dissolveFilter setValue:[self imageForSelector:transitionImageSourceSelector] forKey:@"inputImage"];
[self.dissolveFilter setValue:[self imageForSelector:self.outSelector] forKey:@"inputTargetImage"];
-
+
[self.dissolveFilter setValue:@(transitionTime) forKey:@"inputTime"];
_outputImage = [self.dissolveFilter valueForKey:@"outputImage"];
-
+
} else {
- if(self.outSelector == 0){
- [self.constantColorFilter setValue:[CIColor colorWithRed:0.0 green:0.0 blue:0.0 alpha:1.0] forKey:@"inputColor"];
- return [self.constantColorFilter valueForKey:@"outputImage"];
- }
+ /* if(self.outSelector == 0){
+ [self.constantColorFilter setValue:[CIColor colorWithRed:0.0 green:0.0 blue:0.0 alpha:1.0] forKey:@"inputColor"];
+ return [self.constantColorFilter valueForKey:@"outputImage"];
+ }*/
_outputImage = [self imageForSelector:self.outSelector];
-/* if(self.outSelector > 0 && self.outSelector <= 3){
- _outputImage = cameras[self.outSelector-1];
- }*/
+
+ [self willChangeValueForKey:@"out1selected"];
+ [self willChangeValueForKey:@"out2selected"];
+ [self willChangeValueForKey:@"out3selected"];
+ transitionImageSourceSelector = self.outSelector;
+ [self didChangeValueForKey:@"out1selected"];
+ [self didChangeValueForKey:@"out2selected"];
+ [self didChangeValueForKey:@"out3selected"];
+
+
+ if(self.outSelector == 0){
+ return _outputImage;
+ }
+ /* if(self.outSelector > 0 && self.outSelector <= 3){
+ _outputImage = cameras[self.outSelector-1];
+ }*/
}
+
+
+ [self.perspectiveFilter setValue:_outputImage forKey:@"inputImage"];
+ _outputImage = [self.perspectiveFilter valueForKey:@"outputImage"];
+
+
+
+
+ //----
+
+
+ [self.constantColorFilter setValue:[CIColor colorWithRed:0.0 green:0.0 blue:0.0 alpha:1.0] forKey:@"inputColor"];
+ [self.sourceOverFilter setValue: _outputImage forKey:@"inputImage"];
+ [self.sourceOverFilter setValue:[self.constantColorFilter valueForKey:@"outputImage"] forKey:@"inputBackgroundImage"];
+ _outputImage = [self.sourceOverFilter valueForKey:@"outputImage"];
+
+
+
+ //----
+
[self.constantColorFilter setValue:[CIColor colorWithRed:0.0 green:0.0 blue:0.0 alpha:1-self.master] forKey:@"inputColor"];
[self.sourceOverFilter setValue:_outputImage forKey:@"inputBackgroundImage"];
[self.sourceOverFilter setValue:[self.constantColorFilter valueForKey:@"outputImage"] forKey:@"inputImage"];
_outputImage = [self.sourceOverFilter valueForKey:@"outputImage"];
+
+
+
+
return _outputImage;
}
--(CIImage*) chromaKey:(CIImage*)image backgroundImage:(CIImage*)background{
+-(CIImage*) chromaKey:(CIImage*)image backgroundImage:(CIImage*)background alphaImage:(CIImage*)alpha{
CIImage * retImage = image;
NSUserDefaults * defaults = [NSUserDefaults standardUserDefaults];
float chromaMin = [defaults floatForKey:@"chromaMin"];
float chromaMax = [defaults floatForKey:@"chromaMax"];
-
- if(chromaMin != chromaMinSet || chromaMax != chromaMaxSet){
+ float chromaVal = [defaults floatForKey:@"chromaVal"];
+ float chromaSat = [defaults floatForKey:@"chromaSat"];
+
+ float chromaBlur = [defaults floatForKey:@"chromaBlur"];
+
+ if(chromaMin != chromaMinSet || chromaMax != chromaMaxSet || chromaSat != chromaSatSet || chromaVal != chromaValSet){
chromaMinSet = chromaMin;
chromaMaxSet = chromaMax;
- [self.chromaFilter setMinHueAngle:chromaMinSet maxHueAngle:chromaMaxSet];
+ chromaSatSet = chromaSat;
+ chromaValSet = chromaVal;
+ [self.chromaFilter setMinHueAngle:chromaMinSet maxHueAngle:chromaMaxSet minValue:chromaVal minSaturation:chromaSat];
}
- self.chromaFilter.backgroundImage = background;
- self.chromaFilter.inputImage = image;
+ [self.chromaGaussian setValue:@(chromaBlur) forKey:@"inputRadius"];
+ [self.chromaGaussian setValue:alpha forKey:@"inputImage"];
+ image = [self.chromaGaussian valueForKey:@"outputImage"];
+
+ self.chromaFilter.inputBackgroundImage = background;
+ self.chromaFilter.inputImage = alpha;
+ self.chromaFilter.inputForegroundImage = image;
retImage = [self.chromaFilter outputImage];
return retImage;
@@ -793,25 +978,17 @@ -(CIImage*) chromaKey:(CIImage*)image backgroundImage:(CIImage*)background{
-(CIImage*) filterCIImage:(CIImage*)inputImage{
__block CIImage * _outputImage = inputImage;
- // if(PropB(@"deinterlace")){
[self.deinterlaceFilter setInputImage:_outputImage];
_outputImage = [self.deinterlaceFilter valueForKey:@"outputImage"];
- // }
- /* if(PropB(@"chromaKey") && inputImage == [self imageForSelector:1]){
- [chromaFilter setInputImage:_outputImage];
- [chromaFilter setBackgroundImage:[self imageForSelector:2]];
- _outputImage = [chromaFilter outputImage];
- }*/
- /* [blurFilter setValue:[NSNumber numberWithFloat:PropF(@"blur")] forKey:@"inputRadius"];
- [blurFilter setValue:_outputImage forKey:@"inputImage"];
- _outputImage = [blurFilter valueForKey:@"outputImage"];*/
-
- // dispatch_sync(dispatch_get_main_queue(), ^{
+ NSUserDefaults * defaults = [NSUserDefaults standardUserDefaults];
+// [self.noiseReductionFilter setValue:[defaults valueForKey:@"noiseReduction"] forKey:@"inputNoiseLevel"];
+// [self.noiseReductionFilter setValue:[defaults valueForKey:@"sharpness"] forKey:@"inputSharpness"];
+// [self.noiseReductionFilter setValue:_outputImage forKey:@"inputImage"];
+// _outputImage = [self.noiseReductionFilter valueForKey:@"outputImage"];
- NSUserDefaults * defaults = [NSUserDefaults standardUserDefaults];
[self.colorControlsFilter setValue:[defaults valueForKey:@"saturation"] forKey:@"inputSaturation"];
/*[self.colorControlsFilter setValue:[NSNumber numberWithFloat:PropF(@"contrast")] forKey:@"inputContrast"];
@@ -819,14 +996,6 @@ -(CIImage*) filterCIImage:(CIImage*)inputImage{
[self.colorControlsFilter setValue:_outputImage forKey:@"inputImage"];
_outputImage = [self.colorControlsFilter valueForKey:@"outputImage"];
- /* [self.dissolveFilter setValue:_outputImage forKey:@"inputImage"];
- [self.dissolveFilter setValue:@(self.master) forKey:@"inputTime"];
- _outputImage = [self.dissolveFilter valueForKey:@"outputImage"];
- */
-
-
- //});
-
[self.gammaAdjustFilter setValue:[defaults valueForKey:@"gamma"] forKey:@"inputPower"];
[self.gammaAdjustFilter setValue:_outputImage forKey:@"inputImage"];
_outputImage = [self.gammaAdjustFilter valueForKey:@"outputImage"];
@@ -839,18 +1008,27 @@ -(CIImage*) filterCIImage:(CIImage*)inputImage{
[toneCurveFilter setValue:_outputImage forKey:@"inputImage"];
_outputImage = [toneCurveFilter valueForKey:@"outputImage"];*/
-
return _outputImage;
}
+void MyPixelBufferReleaseCallback(void *releaseRefCon, const void *baseAddress){
+ // NSLog(@" release %i",baseAddress);
+ delete baseAddress;
+}
+
-(CVPixelBufferRef) createCVImageBufferFromCallback:(DecklinkCallback*)callback{
int w = callback->w;
int h = callback->h;
- unsigned char * bytes = callback->bytes;
+ // unsigned char * bytes = callback->bytes;
+ unsigned char * bytes = (unsigned char * ) malloc(callback->w*callback->h*4 * sizeof(unsigned char)) ;
+ memcpy(bytes, callback->bytes, callback->w*callback->h*4);
+// NSLog(@" create %i",bytes);
+
+
NSDictionary *d = [NSDictionary dictionaryWithObjectsAndKeys:[NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey, [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
CVPixelBufferRef buffer = NULL;
- CVPixelBufferCreateWithBytes(kCFAllocatorDefault, w, h, k32ARGBPixelFormat, bytes, 4*w, (CVPixelBufferReleaseBytesCallback )nil, (void*)nil, (__bridge CFDictionaryRef)d, &buffer);
+ CVPixelBufferCreateWithBytes(kCFAllocatorDefault, w, h, k32ARGBPixelFormat, bytes, 4*w, (CVPixelBufferReleaseBytesCallback )MyPixelBufferReleaseCallback, (void*)bytes, (__bridge CFDictionaryRef)d, &buffer);
return buffer;
}
@@ -859,23 +1037,55 @@ static void MyMIDIReadProc(const MIDIPacketList *pklist, void *refCon, void *con
AppDelegate * ad = (__bridge AppDelegate*)refCon;
MIDIPacket * packet = (MIDIPacket*)pklist->packet;
- Byte midiCommand = packet->data[0] >> 4;
- if(midiCommand==11){//CC
- int channel = (packet->data[0] & 0xF) + 1;
- int number = packet->data[1] & 0x7F;
- int value = packet->data[2] & 0x7F;
- // NSLog(@"%i %i %i",channel, number, value);
-
- dispatch_async(dispatch_get_main_queue(), ^{
- if(channel == 1 && number == 0){
- ad.fadeTime = value / 128.0;
- }
- if(channel == 1 && number == 7){
- ad.master = value / 128.0;
+ for (int i = 0; i < pklist->numPackets; ++i) {
+ for (int j = 0; j < packet->length; j+=3) {
+
+
+ Byte midiCommand = packet->data[0+j] >> 4;
+
+ if(midiCommand==11){//CC
+ int channel = (packet->data[0+j] & 0xF) + 1;
+ int number = packet->data[1+j] & 0x7F;
+ int value = packet->data[2+j] & 0x7F;
+ // NSLog(@"%i %i %i",channel, number, value);
+
+ dispatch_async(dispatch_get_main_queue(), ^{
+ if(channel == 1 && number == 1){
+ ad.fadeTime = value / 128.0;
+ }
+ if(channel == 1 && number == 0){
+ ad.master = value / 128.0;
+ }
+
+
+ if(channel == 2){
+ if(number == 0){
+ ad.outSelector = value;
+ }
+
+ if(number == 1){
+ ad.decklink1input = value-1;
+ }
+ if(number == 2){
+ ad.decklink2input = value-1;
+ }
+ if(number == 3){
+ ad.decklink3input = value-1;
+ }
+ }
+
+ if (channel == 3) {
+ if(number == 0){
+ [[NSUserDefaults standardUserDefaults] setBool:value forKey:@"chromaKey"];
+ }
+ }
+ });
+ // if()
}
- });
-// if()
+ }
+
+ packet = MIDIPacketNext(packet);
}
}
View
1  ViljensTriumf/CoreImageViewer.h
@@ -13,4 +13,5 @@
CIContext *ciContext;
}
@property (strong) CIImage * ciImage;
+@property CALayer * activeLayer;
@end
View
9 ViljensTriumf/CoreImageViewer.m
@@ -15,6 +15,15 @@ - (id)initWithFrame:(NSRect)frame
self = [super initWithFrame:frame];
if (self) {
// Initialization code here.
+// self.activeLayer = [CALayer layer];
+// self.activeLayer.backgroundColor = [[NSColor redColor] CGColor];
+//
+// NSRect rect = frame;
+// rect.size.height = 30;
+// self.activeLayer.bounds = rect;
+//
+// // [self setWantsLayer:YES];
+// // [self.layer addSublayer:self.activeLayer];
}
return self;
View
5 ViljensTriumf/DecklinkCallback.h
@@ -47,9 +47,12 @@ class DecklinkCallback : public IDeckLinkInputCallback{
void YuvToRgbChunk(unsigned char *yuv, unsigned char * rgb, unsigned int offset, unsigned int chunk_size);
unsigned char * YuvToRgb(IDeckLinkVideoInputFrame* pArrivedFrame);
- NSRecursiveLock * lock;
+ NSLock * lock;
id delegate;
bool delegateBusy;
+
+ CVPixelBufferRef buffer;
+
};
View
19 ViljensTriumf/DecklinkCallback.mm
@@ -152,7 +152,7 @@
int num_workers = 8;
int a;
- unsigned t0=clock(),t1;
+ //unsigned t0=clock(),t1;
// split up the image into memory-aligned chunks so they take advantage of
// the CPU cache
@@ -169,7 +169,7 @@
}
dispatch_group_wait(group, DISPATCH_TIME_FOREVER);
- t1=clock()-t0;
+ // t1=clock()-t0;
//printf("%i\n",t1);
return rgb;
@@ -192,7 +192,7 @@ void bwFrames(unsigned char * bytes, int size){
bytes = 0;
CreateLookupTables();
- lock = [[NSRecursiveLock alloc] init];
+ lock = [[NSLock alloc] init];
};
@@ -247,6 +247,7 @@ void bwFrames(unsigned char * bytes, int size){
{
@autoreleasepool {
if(!delegateBusy){
+ // NSLog(@"Frame in %i",this);
[lock lock];
// BMDPixelFormat pixelFormat = videoFrame->GetPixelFormat();
BMDTimeValue frameTime, frameDuration;
@@ -254,7 +255,7 @@ void bwFrames(unsigned char * bytes, int size){
HRESULT theResult;
videoFrame->GetStreamTime(&frameTime, &frameDuration, 600);
- theResult = decklinkOutput->ScheduleVideoFrame(videoFrame, frameTime, frameDuration, 600);
+ decklinkOutput->ScheduleVideoFrame(videoFrame, frameTime, frameDuration, 600);
//if (theResult != S_OK)
// printf("Scheduling failed with error = %08x\n", (unsigned int)theResult);
@@ -269,6 +270,12 @@ void bwFrames(unsigned char * bytes, int size){
delete bytes;
}*/
bytes = YuvToRgb(videoFrame);
+
+ if(buffer){
+ CVPixelBufferRelease(buffer);
+ }
+ buffer = [delegate createCVImageBufferFromCallback:this];
+
/*imageRep = [[NSBitmapImageRep alloc] initWithBitmapDataPlanes:&bytes
pixelsWide:w pixelsHigh:h
bitsPerSample:8 samplesPerPixel:3
@@ -281,12 +288,12 @@ void bwFrames(unsigned char * bytes, int size){
newFrame = true;
dispatch_queue_t queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_HIGH, 0ul);
- dispatch_async(queue, ^{
+ dispatch_sync(queue, ^{
[delegate newFrame:this];
});
[lock unlock];
-
+ // NSLog(@"Frame out %i",this);
} else {
// NSLog(@"busy delegate");
}
View
3  ViljensTriumf/MavController.mm
@@ -21,7 +21,7 @@ - (id)init
waitingForData = NO;
readThreadRunning = FALSE;
incommingString = [NSMutableString string];
- NSString * serialPort = @"/dev/tty.usbserial-FT5CHURVA";
+ NSString * serialPort = @"/dev/tty.usbserial-FT5CHUS4B";
/* NSDirectoryEnumerator *enumerator = [[NSFileManager defaultManager] enumeratorAtPath:@"/dev/"];
@@ -199,6 +199,7 @@ - (void)serialReadThread: (NSThread *) parentThread {
}
}
}
+ [NSThread sleepForTimeInterval:0.1];
}
// make sure the serial port is closed
if (serialFileDescriptor != -1) {
View
9,161 ViljensTriumf/en.lproj/MainMenu.xib
6,562 additions, 2,599 deletions not shown
View
8 ViljensTriumf/filters/ChromaFilter.h
@@ -10,15 +10,17 @@
@interface ChromaFilter : CIFilter{
CIImage * _inputImage;
+ CIImage * _inputForegroundImage;
CIImage * _backgroundImage;
CIFilter *colorCube;
CIFilter * sourceOverFilter;
}
@property (strong) CIImage *inputImage;
-@property (strong) CIImage *backgroundImage;
+@property (strong) CIImage *inputForegroundImage;
+@property (strong) CIImage *inputBackgroundImage;
--(void) setMinHueAngle:(float)minHueAngle maxHueAngle:(float)maxHueAngle;
-- (CIImage *)outputImage;
+-(void) setMinHueAngle:(float)minHueAngle maxHueAngle:(float)maxHueAngle minValue:(float)minValue minSaturation:(float)minSaturation;
+-(CIImage*)outputImage;
@end
View
51 ViljensTriumf/filters/ChromaFilter.m
@@ -57,10 +57,12 @@ void rgb2hsv(float * rgb, float * hsv)
}
+static CIKernel *alphaOverKernel = nil;
+
@implementation ChromaFilter
@synthesize inputImage = _inputImage;
-@synthesize backgroundImage = _backgroundImage;
-
+@synthesize inputBackgroundImage = _inputBackgroundImage;
+@synthesize inputForegroundImage = _inputForegroundImage;
- (id)init
{
@@ -68,10 +70,21 @@ - (id)init
colorCube = [CIFilter filterWithName:@"CIColorCube"];
sourceOverFilter = [CIFilter filterWithName:@"CISourceOverCompositing"];
+
+ if(alphaOverKernel == nil)// 1
+ {
+ NSBundle *bundle = [NSBundle bundleForClass: [self class]];// 2
+ NSString *code = [NSString stringWithContentsOfFile: [bundle// 3
+ pathForResource: @"alphaOver"
+ ofType: @"cikernel"]];
+ NSArray *kernels = [CIKernel kernelsWithString: code];// 4
+ alphaOverKernel = [kernels objectAtIndex:0];// 5
+ }
+
return self;
}
--(void) setMinHueAngle:(float)minHueAngle maxHueAngle:(float)maxHueAngle{
+-(void) setMinHueAngle:(float)minHueAngle maxHueAngle:(float)maxHueAngle minValue:(float)minValue minSaturation:(float)minSaturation{
// Allocate memory
@@ -90,12 +103,25 @@ -(void) setMinHueAngle:(float)minHueAngle maxHueAngle:(float)maxHueAngle{
// Convert RGB to HSV
// You can find publicly available rgbToHSV functions on the Internet
rgb2hsv(rgb,hsv);
+
// Use the hue value to determine which to make transparent
// The minimum and maximum hue angle depends on
// the color you want to remove
// printf("%f %f %f = %f\n",rgb[0], rgb[1], rgb[2], hsv[0]);
- float alpha = (hsv[0] > minHueAngle && hsv[0] < maxHueAngle) ? 0.0f: 1.0f;
+ float alpha = 1.0;
+ if(hsv[0] > minHueAngle && hsv[0] < maxHueAngle && hsv[1] > minSaturation && hsv[2] > minValue)
+ alpha = 0;
+
+/* float hueWidth = 10;
+ if(hsv[0] > minHueAngle - hueWidth && hsv[0] < minHueAngle + 10){
+ float i = (hsv[0]-minHueAngle-hueWidth)/(hueWidth*2.0);
+ alpha *= 1-i;
+ } else if(hsv[0] > minHueAngle && hsv[0] < maxHueAngle){
+ alpha *= 0;
+ }
+*/
+
// Calculate premultiplied alpha values for the cube
c[0] = rgb[0] * alpha;
c[1] = rgb[1] * alpha;
@@ -115,18 +141,31 @@ -(void) setMinHueAngle:(float)minHueAngle maxHueAngle:(float)maxHueAngle{
[colorCube setValue:data forKey:@"inputCubeData"];
}
+-(NSArray *)inputKeys{
+ return @[@"inputImage", @"inputBackgroundImage", @"inputForegroundImage"];
+}
+
- (CIImage *)outputImage
{
if(self.inputImage == nil)
return nil;
[colorCube setValue:self.inputImage forKey:@"inputImage"];
-
+ /*
[sourceOverFilter setValue:[colorCube valueForKey:@"outputImage"] forKey:@"inputImage"];
- [sourceOverFilter setValue:self.backgroundImage forKey:@"inputBackgroundImage"];
+ [sourceOverFilter setValue:self.inputBackgroundImage forKey:@"inputBackgroundImage"];
return [sourceOverFilter valueForKey:@"outputImage"];
+
+*/
+
+ CISampler *foreground = [CISampler samplerWithImage: self.inputForegroundImage];
+ CISampler *background = [CISampler samplerWithImage: self.inputBackgroundImage];
+ CISampler *alpha = [CISampler samplerWithImage: [colorCube valueForKey:@"outputImage"]];
+ // NSAssert(src, @" Nor Src");
+ return [self apply: alphaOverKernel, foreground, alpha, background, kCIApplyOptionDefinition, [foreground definition], nil];
+
}
@end
View
5 ViljensTriumf/filters/DeinterlaceFilter.m
@@ -30,9 +30,12 @@ - (id)init
- (CIImage *)outputImage
{
CISampler *src = [CISampler samplerWithImage: self.inputImage];
-
+ // NSAssert(src, @" Nor Src");
return [self apply: deinterlaceKernel, src, kCIApplyOptionDefinition, [src definition], nil];
}
+-(NSArray *)inputKeys{
+ return @[@"inputImage"];
+}
@end
View
6 ViljensTriumf/filters/deinterlaceFilter.cikernel
@@ -32,4 +32,8 @@ kernel vec4 v002SimpleDeinterlace(sampler image)
// final output
return oddresult;
// return mix(oddresult, evenresult, isodd);
-}
+}
+
+
+
+

No commit comments for this range

Something went wrong with that request. Please try again.