Skip to content

HTTPS clone URL

Subversion checkout URL

You can clone with HTTPS or Subversion.

Download ZIP
Browse files

Merge remote-tracking branch 'upstream/master'

Conflicts:
	framework/GPUImage.xcodeproj/project.pbxproj
  • Loading branch information...
commit b10f83f5d9a28f3f171a0817d5f3a8edf526c007 2 parents 621d5da + fa31df5
@fattjake fattjake authored
Showing with 2,240 additions and 831 deletions.
  1. +6 −1 .gitignore
  2. +32 −23 README.md
  3. +2 −3 examples/BenchmarkSuite/BenchmarkSuite/VideoFilteringDisplayController.m
  4. +1 −1  examples/ColorObjectTracking/ColorObjectTracking/ColorTrackingViewController.h
  5. +10 −14 examples/ColorObjectTracking/ColorObjectTracking/ColorTrackingViewController.m
  6. +2 −3 examples/CubeExample/Classes/ES2Renderer.m
  7. +7 −4 examples/FilterShowcase/FilterShowcase/ShowcaseFilterListController.m
  8. +3 −0  examples/FilterShowcase/FilterShowcase/ShowcaseFilterViewController.h
  9. +77 −18 examples/FilterShowcase/FilterShowcase/ShowcaseFilterViewController.m
  10. +6 −9 examples/MultiViewFilterExample/MultiViewFilterExample/MultiViewViewController.m
  11. +6 −7 examples/SimpleImageFilter/SimpleImageFilter/SimpleImageViewController.m
  12. +11 −26 examples/SimplePhotoFilter/SimplePhotoFilter/PhotoViewController.m
  13. +1 −3 examples/SimpleVideoFileFilter/SimpleVideoFileFilter/SimpleVideoFileFilterViewController.m
  14. +3 −5 examples/SimpleVideoFilter/SimpleVideoFilter/SimpleVideoFilterViewController.m
  15. +8 −8 examples/SimpleVideoFilter/SimpleVideoFilter/SimpleVideoFilterViewController.xib
  16. +95 −26 framework/GPUImage.xcodeproj/project.pbxproj
  17. +4 −1 framework/Source/GPUImage.h
  18. +2 −7 framework/Source/GPUImage3x3ConvolutionFilter.h
  19. +1 −59 framework/Source/GPUImage3x3ConvolutionFilter.m
  20. +18 −0 framework/Source/GPUImage3x3TextureSamplingFilter.h
  21. +107 −0 framework/Source/GPUImage3x3TextureSamplingFilter.m
  22. +8 −5 framework/Source/GPUImageAdaptiveThresholdFilter.m
  23. +2 −2 framework/Source/GPUImageAlphaBlendFilter.h
  24. +3 −2 framework/Source/GPUImageAlphaBlendFilter.m
  25. +5 −0 framework/Source/GPUImageBilateralFilter.h
  26. +87 −0 framework/Source/GPUImageBilateralFilter.m
  27. +10 −2 framework/Source/GPUImageBoxBlurFilter.m
  28. +4 −3 framework/Source/GPUImageBulgeDistortionFilter.h
  29. +10 −2 framework/Source/GPUImageBulgeDistortionFilter.m
  30. +3 −3 framework/Source/GPUImageCannyEdgeDetectionFilter.h
  31. +10 −10 framework/Source/GPUImageCannyEdgeDetectionFilter.m
  32. +2 −2 framework/Source/GPUImageChromaKeyBlendFilter.h
  33. +3 −2 framework/Source/GPUImageChromaKeyBlendFilter.m
  34. +2 −2 framework/Source/GPUImageColorBurnBlendFilter.h
  35. +3 −2 framework/Source/GPUImageColorBurnBlendFilter.m
  36. +2 −2 framework/Source/GPUImageColorDodgeBlendFilter.h
  37. +2 −1  framework/Source/GPUImageColorDodgeBlendFilter.m
  38. +1 −0  framework/Source/GPUImageCropFilter.h
  39. +119 −7 framework/Source/GPUImageCropFilter.m
  40. +8 −0 framework/Source/GPUImageCrosshatchFilter.h
  41. +88 −29 framework/Source/GPUImageCrosshatchFilter.m
  42. +2 −2 framework/Source/GPUImageDarkenBlendFilter.h
  43. +3 −2 framework/Source/GPUImageDarkenBlendFilter.m
  44. +2 −2 framework/Source/GPUImageDifferenceBlendFilter.h
  45. +3 −2 framework/Source/GPUImageDifferenceBlendFilter.m
  46. +2 −2 framework/Source/GPUImageDissolveBlendFilter.h
  47. +4 −3 framework/Source/GPUImageDissolveBlendFilter.m
  48. +2 −2 framework/Source/GPUImageExclusionBlendFilter.h
  49. +3 −2 framework/Source/GPUImageExclusionBlendFilter.m
  50. +3 −2 framework/Source/GPUImageFastBlurFilter.h
  51. +11 −3 framework/Source/GPUImageFastBlurFilter.m
  52. +38 −6 framework/Source/GPUImageFilter.h
  53. +142 −49 framework/Source/GPUImageFilter.m
  54. +10 −2 framework/Source/GPUImageFilterGroup.m
  55. +1 −3 framework/Source/GPUImageGaussianBlurFilter.h
  56. +13 −17 framework/Source/GPUImageGaussianBlurFilter.m
  57. +9 −8 framework/Source/GPUImageGaussianSelectiveBlurFilter.h
  58. +56 −69 framework/Source/GPUImageGaussianSelectiveBlurFilter.m
  59. +2 −0  framework/Source/GPUImageGrayscaleFilter.h
  60. +2 −2 framework/Source/GPUImageHardLightBlendFilter.h
  61. +3 −2 framework/Source/GPUImageHardLightBlendFilter.m
  62. +15 −0 framework/Source/GPUImageHistogramFilter.m
  63. +3 −1 framework/Source/GPUImageKuwaharaFilter.h
  64. +3 −2 framework/Source/GPUImageLightenBlendFilter.h
  65. +3 −2 framework/Source/GPUImageLightenBlendFilter.m
  66. +5 −0 framework/Source/GPUImageMedianFilter.h
  67. +119 −0 framework/Source/GPUImageMedianFilter.m
  68. +9 −5 framework/Source/GPUImageMovie.m
  69. +1 −0  framework/Source/GPUImageMovieWriter.h
  70. +8 −2 framework/Source/GPUImageMovieWriter.m
  71. +2 −2 framework/Source/GPUImageMultiplyBlendFilter.h
  72. +3 −2 framework/Source/GPUImageMultiplyBlendFilter.m
  73. +10 −2 framework/Source/GPUImageNonMaximumSuppressionFilter.m
  74. +7 −1 framework/Source/GPUImageOpenGLESContext.h
  75. +23 −0 framework/Source/GPUImageOpenGLESContext.m
  76. +55 −3 framework/Source/GPUImageOutput.h
  77. +38 −4 framework/Source/GPUImageOutput.m
  78. +2 −2 framework/Source/GPUImageOverlayBlendFilter.h
  79. +3 −2 framework/Source/GPUImageOverlayBlendFilter.m
  80. +17 −3 framework/Source/GPUImagePicture.m
  81. +11 −3 framework/Source/GPUImagePinchDistortionFilter.m
  82. +9 −2 framework/Source/GPUImagePolarPixellateFilter.m
  83. +14 −0 framework/Source/GPUImageRGBFilter.h
  84. +77 −0 framework/Source/GPUImageRGBFilter.m
  85. +1 −0  framework/Source/GPUImageRawData.h
  86. +7 −1 framework/Source/GPUImageRawData.m
  87. +0 −13 framework/Source/GPUImageRotationFilter.h
  88. +0 −105 framework/Source/GPUImageRotationFilter.m
  89. +2 −2 framework/Source/GPUImageScreenBlendFilter.h
  90. +3 −2 framework/Source/GPUImageScreenBlendFilter.m
  91. +1 −0  framework/Source/GPUImageSepiaFilter.h
  92. +4 −0 framework/Source/GPUImageSketchFilter.h
  93. +9 −6 framework/Source/GPUImageSmoothToonFilter.h
  94. +10 −10 framework/Source/GPUImageSmoothToonFilter.m
  95. +4 −4 framework/Source/GPUImageSobelEdgeDetectionFilter.h
  96. +14 −14 framework/Source/GPUImageSobelEdgeDetectionFilter.m
  97. +2 −2 framework/Source/GPUImageSoftLightBlendFilter.h
  98. +3 −2 framework/Source/GPUImageSoftLightBlendFilter.m
  99. +5 −1 framework/Source/GPUImageStillCamera.h
  100. +153 −5 framework/Source/GPUImageStillCamera.m
  101. +9 −4 framework/Source/GPUImageStretchDistortionFilter.m
  102. +2 −2 framework/Source/GPUImageSubtractBlendFilter.h
  103. +3 −2 framework/Source/GPUImageSubtractBlendFilter.m
  104. +5 −3 framework/Source/GPUImageSwirlFilter.h
  105. +11 −3 framework/Source/GPUImageSwirlFilter.m
  106. +4 −1 framework/Source/GPUImageTextureInput.m
  107. +5 −1 framework/Source/GPUImageTextureOutput.m
  108. +5 −4 framework/Source/GPUImageTiltShiftFilter.h
  109. +6 −4 framework/Source/GPUImageTiltShiftFilter.m
  110. +2 −8 framework/Source/GPUImageToonFilter.h
  111. +1 −53 framework/Source/GPUImageToonFilter.m
  112. +2 −9 framework/Source/GPUImageTransformFilter.m
  113. +15 −0 framework/Source/GPUImageTwoInputFilter.h
  114. +158 −0 framework/Source/GPUImageTwoInputFilter.m
  115. +1 −9 framework/Source/GPUImageTwoPassFilter.m
  116. +4 −2 framework/Source/GPUImageUnsharpMaskFilter.m
  117. +55 −11 framework/Source/GPUImageVideoCamera.h
  118. +76 −18 framework/Source/GPUImageVideoCamera.m
  119. +15 −3 framework/Source/GPUImageView.h
  120. +88 −11 framework/Source/GPUImageView.m
  121. +33 −4 framework/Source/GPUImageVignetteFilter.m
View
7 .gitignore
@@ -1,5 +1,6 @@
# Exclude the build directory
build/*
+examples/FilterShowcase/build*
# Exclude temp nibs and swap files
*~.nib
@@ -16,4 +17,8 @@ build/*
*.perspectivev3
*.pbxuser
*.xcworkspace
-xcuserdata
+xcuserdata
+
+# Documentation
+documentation/*
+
View
55 README.md
@@ -46,6 +46,10 @@ For example, an application that takes in live video from the camera, converts t
GPUImageVideoCamera -> GPUImageSepiaFilter -> GPUImageView
+## Documentation ##
+
+Documentation is generated from header comments using appledoc. To build the documentation, switch to the "Documentation" scheme in Xcode. You should ensure that "APPLEDOC_PATH" (a User-Defined build setting) points to an appledoc binary, available on <a href="https://github.com/tomaz/appledoc">Github</a> or through <a href="https://github.com/mxcl/homebrew">Homebrew</a>. It will also build and install a .docset file, which you can view with your favorite documentation tool.
+
## Built-in filters ##
### Color adjustments ###
@@ -88,8 +92,6 @@ For example, an application that takes in live video from the camera, converts t
### Image processing ###
-- **GPUImageRotationFilter**: This lets you rotate an image left or right by 90 degrees, or flip it horizontally or vertically
-
- **GPUImageTransformFilter**: This applies an arbitrary 2-D or 3-D transformation to an image
- *affineTransform*: This takes in a CGAffineTransform to adjust an image in 2-D
- *transform3D*: This takes in a CATransform3D to manipulate an image in 3-D
@@ -174,30 +176,34 @@ For example, an application that takes in live video from the camera, converts t
- *center*: The center about which to apply the pixellation, defaulting to (0.5, 0.5)
- *pixelSize*: The fractional pixel size, split into width and height components. The default is (0.05, 0.05)
+- **GPUImageCrosshatchFilter**: This converts an image into a black-and-white crosshatch pattern
+ - *crossHatchSpacing*: The fractional width of the image to use as the spacing for the crosshatch. The default is 0.03.
+ - *lineWidth*: A relative width for the crosshatch lines. The default is 0.003.
+
- **GPUImageSobelEdgeDetectionFilter**: Sobel edge detection, with edges highlighted in white
- - *imageWidthFactor*:
- - *imageHeightFactor*: These parameters affect the visibility of the detected edges
+ - *texelWidth*:
+ - *texelHeight*: These parameters affect the visibility of the detected edges
- **GPUImageCannyEdgeDetectionFilter**: This uses a Gaussian blur before applying a Sobel operator to highlight edges
- - *imageWidthFactor*:
- - *imageHeightFactor*: These parameters affect the visibility of the detected edges
+ - *texelWidth*:
+ - *texelHeight*: These parameters affect the visibility of the detected edges
- *blurSize*: A multiplier for the prepass blur size, ranging from 0.0 on up, with a default of 1.0
- *threshold*: Any edge above this threshold will be black, and anything below white. Ranges from 0.0 to 1.0, with 0.5 as the default
- **GPUImageSketchFilter**: Converts video to look like a sketch. This is just the Sobel edge detection filter with the colors inverted
- *intensity*: The degree to which the original image colors are replaced by the detected edges (0.0 - 1.0, with 1.0 as the default)
- - *imageWidthFactor*:
- - *imageHeightFactor*: These parameters affect the visibility of the detected edges
+ - *texelWidth*:
+ - *texelHeight*: These parameters affect the visibility of the detected edges
- **GPUImageToonFilter**: This uses Sobel edge detection to place a black border around objects, and then it quantizes the colors present in the image to give a cartoon-like quality to the image.
- - *imageWidthFactor*:
- - *imageHeightFactor*: These parameters affect the visibility of the detected edges
+ - *texelWidth*:
+ - *texelHeight*: These parameters affect the visibility of the detected edges
- *threshold*: The sensitivity of the edge detection, with lower values being more sensitive. Ranges from 0.0 to 1.0, with 0.2 as the default
- *quantizationLevels*: The number of color levels to represent in the final image. Default is 10.0
- **GPUImageSmoothToonFilter**: This uses a similar process as the GPUImageToonFilter, only it precedes the toon effect with a Gaussian blur to smooth out noise.
- - *imageWidthFactor*:
- - *imageHeightFactor*: These parameters affect the visibility of the detected edges
+ - *texelWidth*:
+ - *texelHeight*: These parameters affect the visibility of the detected edges
- *blurSize*: A multiplier for the prepass blur size, ranging from 0.0 on up, with a default of 0.5
- *threshold*: The sensitivity of the edge detection, with lower values being more sensitive. Ranges from 0.0 to 1.0, with 0.2 as the default
- *quantizationLevels*: The number of color levels to represent in the final image. Default is 10.0
@@ -225,7 +231,7 @@ For example, an application that takes in live video from the camera, converts t
- **GPUImageVignetteFilter**: Performs a vignetting effect, fading out the image at the edges
- *x*:
- - *y*: The directional intensity of the vignetting, with a default of x = 0.5, y = 0.75
+ - *y*: The directional intensity of the vignetting, with a default of x = 0.75, y = 0.5
- **GPUImageKuwaharaFilter**: Kuwahara image abstraction, drawn from the work of Kyprianidis, et. al. in their publication "Anisotropic Kuwahara Filtering on the GPU" within the GPU Pro collection. This produces an oil-painting-like image, but it is extremely computationally expensive, so it can take seconds to render a frame on an iPad 2. This might be best used for still images.
- *radius*: In integer specifying the number of pixels out from the center pixel to test when applying the filter, with a default of 4. A higher value creates a more abstracted image, but at the cost of much greater processing time.
@@ -264,6 +270,8 @@ Additionally, this is an ARC-enabled framework, so if you want to use this withi
To filter live video from an iOS device's camera, you can use code like the following:
GPUImageVideoCamera *videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack];
+ videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
+
GPUImageFilter *customFilter = [[GPUImageFilter alloc] initWithFragmentShaderFromFile:@"CustomShader"];
GPUImageView *filteredVideoView = [[GPUImageView alloc] initWithFrame:CGRectMake(0.0, 0.0, viewWidth, viewHeight)];
@@ -274,7 +282,7 @@ To filter live video from an iOS device's camera, you can use code like the foll
[videoCamera startCameraCapture];
-This sets up a video source coming from the iOS device's back-facing camera, using a preset that tries to capture at 640x480. A custom filter, using code from the file CustomShader.fsh, is then set as the target for the video frames from the camera. These filtered video frames are finally displayed onscreen with the help of a UIView subclass that can present the filtered OpenGL ES texture that results from this pipeline.
+This sets up a video source coming from the iOS device's back-facing camera, using a preset that tries to capture at 640x480. This video is captured with the interface being in portrait mode, where the landscape-left-mounted camera needs to have its video frames rotated before display. A custom filter, using code from the file CustomShader.fsh, is then set as the target for the video frames from the camera. These filtered video frames are finally displayed onscreen with the help of a UIView subclass that can present the filtered OpenGL ES texture that results from this pipeline.
The fill mode of the GPUImageView can be altered by setting its fillMode property, so that if the aspect ratio of the source video is different from that of the view, the video will either be stretched, centered with black bars, or zoomed to fill.
@@ -290,11 +298,10 @@ Also, if you wish to enable microphone audio capture for recording to a movie, y
To capture and filter still photos, you can use a process similar to the one for filtering video. Instead of a GPUImageVideoCamera, you use a GPUImageStillCamera:
stillCamera = [[GPUImageStillCamera alloc] init];
+ stillCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
+
filter = [[GPUImageGammaFilter alloc] init];
- GPUImageRotationFilter *rotationFilter = [[GPUImageRotationFilter alloc] initWithRotation:kGPUImageRotateRight];
-
- [stillCamera addTarget:rotationFilter];
- [rotationFilter addTarget:filter];
+ [stillCamera addTarget:filter];
GPUImageView *filterView = (GPUImageView *)self.view;
[filter addTarget:filterView];
@@ -380,14 +387,12 @@ One thing to note when adding fragment shaders to your Xcode project is that Xco
Movies can be loaded into the framework via the GPUImageMovie class, filtered, and then written out using a GPUImageMovieWriter. GPUImageMovieWriter is also fast enough to record video in realtime from an iPhone 4's camera at 640x480, so a direct filtered video source can be fed into it.
-The following is an example of how you would load a sample movie, pass it through a pixellation and rotation filter, then record the result to disk as a 480 x 640 h.264 movie:
+The following is an example of how you would load a sample movie, pass it through a pixellation filter, then record the result to disk as a 480 x 640 h.264 movie:
movieFile = [[GPUImageMovie alloc] initWithURL:sampleURL];
pixellateFilter = [[GPUImagePixellateFilter alloc] init];
- GPUImageRotationFilter *rotationFilter = [[GPUImageRotationFilter alloc] initWithRotation:kGPUImageRotateRight];
- [movieFile addTarget:rotationFilter];
- [rotationFilter addTarget:pixellateFilter];
+ [movieFile addTarget:pixellateFilter];
NSString *pathToMovie = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.m4v"];
unlink([pathToMovie UTF8String]);
@@ -428,6 +433,10 @@ A bundled JPEG image is loaded into the application at launch, a filter is appli
A pixellate filter is applied to a live video stream, with a UISlider control that lets you adjust the pixel size on the live video.
+### SimpleVideoFileFilter ###
+
+A movie file is loaded from disk, an unsharp mask filter is applied to it, and the filtered result is re-encoded as another movie.
+
### MultiViewFilterExample ###
From a single camera feed, four views are populated with realtime filters applied to camera. One is just the straight camera video, one is a preprogrammed sepia tone, and two are custom filters based on shader programs.
@@ -450,4 +459,4 @@ In other words, the path of this application is camera -> sepia tone filter -> c
A version of my ColorTracking example from http://www.sunsetlakesoftware.com/2010/10/22/gpu-accelerated-video-processing-mac-and-ios ported across to use GPUImage, this application uses color in a scene to track objects from a live camera feed. The four views you can switch between include the raw camera feed, the camera feed with pixels matching the color threshold in white, the processed video where positions are encoded as colors within the pixels passing the threshold test, and finally the live video feed with a dot that tracks the selected color. Tapping the screen changes the color to track to match the color of the pixels under your finger. Tapping and dragging on the screen makes the color threshold more or less forgiving. This is most obvious on the second, color thresholding view.
-Currently, all processing for the color averaging in the last step is done on the CPU, so this is part is extremely slow.
+Currently, all processing for the color averaging in the last step is done on the CPU, so this is part is extremely slow.
View
5 examples/BenchmarkSuite/BenchmarkSuite/VideoFilteringDisplayController.m
@@ -170,12 +170,11 @@ - (void)displayVideoForGPUImage;
NSLog(@"Start GPU Image");
videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack];
videoCamera.runBenchmark = YES;
+ videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
sepiaFilter = [[GPUImageSepiaFilter alloc] init];
- GPUImageRotationFilter *rotationFilter = [[GPUImageRotationFilter alloc] initWithRotation:kGPUImageRotateRight];
- [videoCamera addTarget:rotationFilter];
- [rotationFilter addTarget:sepiaFilter];
+ [videoCamera addTarget:sepiaFilter];
filterView = [[GPUImageView alloc] initWithFrame:self.view.bounds];
[self.view addSubview:filterView];
[sepiaFilter addTarget:filterView];
View
2  examples/ColorObjectTracking/ColorObjectTracking/ColorTrackingViewController.h
@@ -8,7 +8,7 @@ typedef enum { PASSTHROUGH_VIDEO, SIMPLE_THRESHOLDING, POSITION_THRESHOLDING, OB
CALayer *trackingDot;
GPUImageVideoCamera *videoCamera;
- GPUImageFilter *rotationFilter, *thresholdFilter, *positionFilter;
+ GPUImageFilter *thresholdFilter, *positionFilter;
GPUImageRawData *positionRawData, *videoRawData;
GPUImageView *filteredVideoView;
View
24 examples/ColorObjectTracking/ColorObjectTracking/ColorTrackingViewController.m
@@ -49,6 +49,7 @@ - (void)configureVideoFiltering;
{
CGRect mainScreenFrame = [[UIScreen mainScreen] applicationFrame];
videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack];
+ videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
filteredVideoView = [[GPUImageView alloc] initWithFrame:CGRectMake(0.0, 0.0, mainScreenFrame.size.width, mainScreenFrame.size.height)];
[self.view addSubview:filteredVideoView];
@@ -58,7 +59,6 @@ - (void)configureVideoFiltering;
positionFilter = [[GPUImageFilter alloc] initWithFragmentShaderFromFile:@"PositionColor"];
[positionFilter setFloat:thresholdSensitivity forUniform:@"threshold"];
[positionFilter setFloatVec3:thresholdColor forUniform:@"inputColor"];
- rotationFilter = [[GPUImageRotationFilter alloc] initWithRotation:kGPUImageRotateRight];
// CGSize videoPixelSize = filteredVideoView.bounds.size;
// videoPixelSize.width *= [filteredVideoView contentScaleFactor];
@@ -72,12 +72,8 @@ - (void)configureVideoFiltering;
videoRawData = [[GPUImageRawData alloc] initWithImageSize:videoPixelSize];
videoRawData.delegate = self;
- [videoCamera addTarget:rotationFilter];
- [rotationFilter addTarget:filteredVideoView];
- [rotationFilter addTarget:videoRawData];
-// [rotationFilter addTarget:positionFilter];
-// [positionFilter addTarget:filteredVideoView];
-// [positionFilter addTarget:videoRawData];
+ [videoCamera addTarget:filteredVideoView];
+ [videoCamera addTarget:videoRawData];
[videoCamera startCameraCapture];
}
@@ -147,31 +143,31 @@ - (void)handleSwitchOfDisplayMode:(id)sender;
trackingDot.opacity = 0.0f;
}
- [rotationFilter removeAllTargets];
+ [videoCamera removeAllTargets];
[positionFilter removeAllTargets];
[thresholdFilter removeAllTargets];
- [rotationFilter addTarget:videoRawData];
+ [videoCamera addTarget:videoRawData];
switch(displayMode)
{
case PASSTHROUGH_VIDEO:
{
- [rotationFilter addTarget:filteredVideoView];
+ [videoCamera addTarget:filteredVideoView];
}; break;
case SIMPLE_THRESHOLDING:
{
- [rotationFilter addTarget:thresholdFilter];
+ [videoCamera addTarget:thresholdFilter];
[thresholdFilter addTarget:filteredVideoView];
}; break;
case POSITION_THRESHOLDING:
{
- [rotationFilter addTarget:positionFilter];
+ [videoCamera addTarget:positionFilter];
[positionFilter addTarget:filteredVideoView];
}; break;
case OBJECT_TRACKING:
{
- [rotationFilter addTarget:filteredVideoView];
- [rotationFilter addTarget:positionFilter];
+ [videoCamera addTarget:filteredVideoView];
+ [videoCamera addTarget:positionFilter];
[positionFilter addTarget:positionRawData];
}; break;
}
View
5 examples/CubeExample/Classes/ES2Renderer.m
@@ -72,13 +72,12 @@ - (id)initWithSize:(CGSize)newSize;
videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack];
+ videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
inputFilter = [[GPUImageSepiaFilter alloc] init];
- GPUImageRotationFilter *rotationFilter = [[GPUImageRotationFilter alloc] initWithRotation:kGPUImageRotateRight];
textureOutput = [[GPUImageTextureOutput alloc] init];
textureOutput.delegate = self;
- [videoCamera addTarget:rotationFilter];
- [rotationFilter addTarget:inputFilter];
+ [videoCamera addTarget:inputFilter];
[inputFilter addTarget:textureOutput];
[videoCamera startCameraCapture];
View
11 examples/FilterShowcase/FilterShowcase/ShowcaseFilterListController.m
@@ -65,6 +65,7 @@ - (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(N
case GPUIMAGE_CONTRAST: cell.textLabel.text = @"Contrast"; break;
case GPUIMAGE_BRIGHTNESS: cell.textLabel.text = @"Brightness"; break;
case GPUIMAGE_EXPOSURE: cell.textLabel.text = @"Exposure"; break;
+ case GPUIMAGE_RGB: cell.textLabel.text = @"RGB"; break;
case GPUIMAGE_SHARPEN: cell.textLabel.text = @"Sharpen"; break;
case GPUIMAGE_UNSHARPMASK: cell.textLabel.text = @"Unsharp mask"; break;
case GPUIMAGE_GAMMA: cell.textLabel.text = @"Gamma"; break;
@@ -115,10 +116,12 @@ - (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(N
case GPUIMAGE_SOFTLIGHTBLEND: cell.textLabel.text = @"Soft light blend"; break;
case GPUIMAGE_KUWAHARA: cell.textLabel.text = @"Kuwahara"; break;
case GPUIMAGE_VIGNETTE: cell.textLabel.text = @"Vignette"; break;
- case GPUIMAGE_GAUSSIAN: cell.textLabel.text = @"Gaussian Blur"; break;
- case GPUIMAGE_FASTBLUR: cell.textLabel.text = @"Fast Blur"; break;
- case GPUIMAGE_BOXBLUR: cell.textLabel.text = @"Box Blur"; break;
- case GPUIMAGE_GAUSSIAN_SELECTIVE: cell.textLabel.text = @"Gaussian Selective Blur"; break;
+ case GPUIMAGE_GAUSSIAN: cell.textLabel.text = @"Gaussian blur"; break;
+ case GPUIMAGE_FASTBLUR: cell.textLabel.text = @"Fast blur"; break;
+ case GPUIMAGE_MEDIAN: cell.textLabel.text = @"Median (3x3)"; break;
+ case GPUIMAGE_BILATERAL: cell.textLabel.text = @"Bilateral blur"; break;
+ case GPUIMAGE_BOXBLUR: cell.textLabel.text = @"Box blur"; break;
+ case GPUIMAGE_GAUSSIAN_SELECTIVE: cell.textLabel.text = @"Gaussian selective blur"; break;
case GPUIMAGE_CUSTOM: cell.textLabel.text = @"Custom"; break;
case GPUIMAGE_FILECONFIG: cell.textLabel.text = @"Filter Chain"; break;
case GPUIMAGE_FILTERGROUP: cell.textLabel.text = @"Filter Group"; break;
View
3  examples/FilterShowcase/FilterShowcase/ShowcaseFilterViewController.h
@@ -6,6 +6,7 @@ typedef enum {
GPUIMAGE_CONTRAST,
GPUIMAGE_BRIGHTNESS,
GPUIMAGE_EXPOSURE,
+ GPUIMAGE_RGB,
GPUIMAGE_SHARPEN,
GPUIMAGE_UNSHARPMASK,
GPUIMAGE_TRANSFORM,
@@ -42,6 +43,8 @@ typedef enum {
GPUIMAGE_GAUSSIAN_SELECTIVE,
GPUIMAGE_FASTBLUR,
GPUIMAGE_BOXBLUR,
+ GPUIMAGE_MEDIAN,
+ GPUIMAGE_BILATERAL,
GPUIMAGE_SWIRL,
GPUIMAGE_BULGE,
GPUIMAGE_PINCH,
View
95 examples/FilterShowcase/FilterShowcase/ShowcaseFilterViewController.m
@@ -51,7 +51,10 @@ - (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interface
- (void)setupFilter;
{
videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack];
- GPUImageRotationFilter *rotationFilter = [[GPUImageRotationFilter alloc] initWithRotation:kGPUImageRotateRight];
+// videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionFront];
+ videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
+
+ BOOL needsSecondImage = NO;
switch (filterType)
{
@@ -91,7 +94,11 @@ - (void)setupFilter;
case GPUIMAGE_CROSSHATCH:
{
self.title = @"Crosshatch";
- self.filterSettingsSlider.hidden = YES;
+ self.filterSettingsSlider.hidden = NO;
+
+ [self.filterSettingsSlider setValue:0.03];
+ [self.filterSettingsSlider setMinimumValue:0.01];
+ [self.filterSettingsSlider setMaximumValue:0.06];
filter = [[GPUImageCrosshatchFilter alloc] init];
}; break;
@@ -142,6 +149,17 @@ - (void)setupFilter;
filter = [[GPUImageBrightnessFilter alloc] init];
}; break;
+ case GPUIMAGE_RGB:
+ {
+ self.title = @"RGB";
+ self.filterSettingsSlider.hidden = NO;
+
+ [self.filterSettingsSlider setMinimumValue:0.0];
+ [self.filterSettingsSlider setMaximumValue:2.0];
+ [self.filterSettingsSlider setValue:1.0];
+
+ filter = [[GPUImageRGBFilter alloc] init];
+ }; break;
case GPUIMAGE_EXPOSURE:
{
self.title = @"Exposure";
@@ -233,21 +251,21 @@ - (void)setupFilter;
self.title = @"Crop";
self.filterSettingsSlider.hidden = NO;
- [self.filterSettingsSlider setMinimumValue:0.3];
+ [self.filterSettingsSlider setMinimumValue:0.2];
[self.filterSettingsSlider setMaximumValue:1.0];
[self.filterSettingsSlider setValue:0.5];
- filter = [[GPUImageCropFilter alloc] initWithCropRegion:CGRectMake(0.0, 0.0, 0.5, 0.25)];
+ filter = [[GPUImageCropFilter alloc] initWithCropRegion:CGRectMake(0.0, 0.0, 1.0, 0.25)];
}; break;
case GPUIMAGE_MASK:
{
self.title = @"Mask";
self.filterSettingsSlider.hidden = YES;
+ needsSecondImage = YES;
filter = [[GPUImageMaskFilter alloc] init];
[(GPUImageFilter*)filter setBackgroundColorRed:0.0 green:1.0 blue:0.0 alpha:1.0];
-
}; break;
case GPUIMAGE_TRANSFORM:
{
@@ -464,6 +482,7 @@ - (void)setupFilter;
{
self.title = @"Chroma Key (Green)";
self.filterSettingsSlider.hidden = NO;
+ needsSecondImage = YES;
[self.filterSettingsSlider setMinimumValue:0.0];
[self.filterSettingsSlider setMaximumValue:1.0];
@@ -476,6 +495,7 @@ - (void)setupFilter;
{
self.title = @"Multiply Blend";
self.filterSettingsSlider.hidden = YES;
+ needsSecondImage = YES;
filter = [[GPUImageMultiplyBlendFilter alloc] init];
}; break;
@@ -483,6 +503,7 @@ - (void)setupFilter;
{
self.title = @"Overlay Blend";
self.filterSettingsSlider.hidden = YES;
+ needsSecondImage = YES;
filter = [[GPUImageOverlayBlendFilter alloc] init];
}; break;
@@ -490,6 +511,7 @@ - (void)setupFilter;
{
self.title = @"Lighten Blend";
self.filterSettingsSlider.hidden = YES;
+ needsSecondImage = YES;
filter = [[GPUImageLightenBlendFilter alloc] init];
}; break;
@@ -498,12 +520,14 @@ - (void)setupFilter;
self.title = @"Darken Blend";
self.filterSettingsSlider.hidden = YES;
+ needsSecondImage = YES;
filter = [[GPUImageDarkenBlendFilter alloc] init];
}; break;
case GPUIMAGE_DISSOLVE:
{
self.title = @"Dissolve Blend";
self.filterSettingsSlider.hidden = NO;
+ needsSecondImage = YES;
[self.filterSettingsSlider setMinimumValue:0.0];
[self.filterSettingsSlider setMaximumValue:1.0];
@@ -515,6 +539,7 @@ - (void)setupFilter;
{
self.title = @"Screen Blend";
self.filterSettingsSlider.hidden = YES;
+ needsSecondImage = YES;
filter = [[GPUImageScreenBlendFilter alloc] init];
}; break;
@@ -522,6 +547,7 @@ - (void)setupFilter;
{
self.title = @"Color Burn Blend";
self.filterSettingsSlider.hidden = YES;
+ needsSecondImage = YES;
filter = [[GPUImageColorBurnBlendFilter alloc] init];
}; break;
@@ -529,6 +555,7 @@ - (void)setupFilter;
{
self.title = @"Color Dodge Blend";
self.filterSettingsSlider.hidden = YES;
+ needsSecondImage = YES;
filter = [[GPUImageColorDodgeBlendFilter alloc] init];
}; break;
@@ -536,6 +563,7 @@ - (void)setupFilter;
{
self.title = @"Exclusion Blend";
self.filterSettingsSlider.hidden = YES;
+ needsSecondImage = YES;
filter = [[GPUImageExclusionBlendFilter alloc] init];
}; break;
@@ -543,6 +571,7 @@ - (void)setupFilter;
{
self.title = @"Difference Blend";
self.filterSettingsSlider.hidden = YES;
+ needsSecondImage = YES;
filter = [[GPUImageDifferenceBlendFilter alloc] init];
}; break;
@@ -550,6 +579,7 @@ - (void)setupFilter;
{
self.title = @"Subtract Blend";
self.filterSettingsSlider.hidden = YES;
+ needsSecondImage = YES;
filter = [[GPUImageSubtractBlendFilter alloc] init];
}; break;
@@ -557,6 +587,7 @@ - (void)setupFilter;
{
self.title = @"Hard Light Blend";
self.filterSettingsSlider.hidden = YES;
+ needsSecondImage = YES;
filter = [[GPUImageHardLightBlendFilter alloc] init];
}; break;
@@ -564,6 +595,7 @@ - (void)setupFilter;
{
self.title = @"Soft Light Blend";
self.filterSettingsSlider.hidden = YES;
+ needsSecondImage = YES;
filter = [[GPUImageSoftLightBlendFilter alloc] init];
}; break;
@@ -625,6 +657,13 @@ - (void)setupFilter;
filter = [[GPUImageBoxBlurFilter alloc] init];
}; break;
+ case GPUIMAGE_MEDIAN:
+ {
+ self.title = @"Median";
+ self.filterSettingsSlider.hidden = YES;
+
+ filter = [[GPUImageMedianFilter alloc] init];
+ }; break;
case GPUIMAGE_GAUSSIAN_SELECTIVE:
{
self.title = @"Selective Blur";
@@ -637,6 +676,17 @@ - (void)setupFilter;
filter = [[GPUImageGaussianSelectiveBlurFilter alloc] init];
[(GPUImageGaussianSelectiveBlurFilter*)filter setExcludeCircleRadius:40.0/320.0];
}; break;
+ case GPUIMAGE_BILATERAL:
+ {
+ self.title = @"Bilateral Blur";
+ self.filterSettingsSlider.hidden = NO;
+
+ [self.filterSettingsSlider setMinimumValue:0.0];
+ [self.filterSettingsSlider setMaximumValue:10.0];
+ [self.filterSettingsSlider setValue:1.0];
+
+ filter = [[GPUImageBilateralFilter alloc] init];
+ }; break;
case GPUIMAGE_FILTERGROUP:
{
self.title = @"Filter Group";
@@ -668,15 +718,14 @@ - (void)setupFilter;
pipeline = [[GPUImageFilterPipeline alloc] initWithConfigurationFile:[[NSBundle mainBundle] URLForResource:@"SampleConfiguration" withExtension:@"plist"]
input:videoCamera output:(GPUImageView*)self.view];
- [pipeline addFilter:rotationFilter atIndex:0];
+// [pipeline addFilter:rotationFilter atIndex:0];
}
else
{
- [videoCamera addTarget:rotationFilter];
- [rotationFilter addTarget:filter];
+ [videoCamera addTarget:filter];
videoCamera.runBenchmark = YES;
- if ( (filterType != GPUIMAGE_UNSHARPMASK) && (filterType != GPUIMAGE_TILTSHIFT) )
+ if (needsSecondImage)
{
UIImage *inputImage;
@@ -693,33 +742,40 @@ - (void)setupFilter;
sourcePicture = [[GPUImagePicture alloc] initWithImage:inputImage smoothlyScaleOutput:YES];
[sourcePicture addTarget:filter];
+ [sourcePicture processImage];
}
GPUImageView *filterView = (GPUImageView *)self.view;
if (filterType == GPUIMAGE_HISTOGRAM)
{
+ // I'm adding an intermediary filter because glReadPixels() requires something to be rendered for its glReadPixels() operation to work
+ [videoCamera removeTarget:filter];
+ GPUImageGammaFilter *gammaFilter = [[GPUImageGammaFilter alloc] init];
+ [videoCamera addTarget:gammaFilter];
+ [gammaFilter addTarget:filter];
+
GPUImageHistogramGenerator *histogramGraph = [[GPUImageHistogramGenerator alloc] init];
[histogramGraph forceProcessingAtSize:CGSizeMake(256.0, 330.0)];
+ [filter addTarget:histogramGraph];
GPUImageAlphaBlendFilter *blendFilter = [[GPUImageAlphaBlendFilter alloc] init];
- blendFilter.mix = 0.75;
-
- [filter addTarget:histogramGraph];
+ blendFilter.mix = 0.75;
- [rotationFilter addTarget:blendFilter];
+ [videoCamera addTarget:blendFilter];
[histogramGraph addTarget:blendFilter];
- rotationFilter.targetToIgnoreForUpdates = blendFilter; // Avoid double-updating the blend
-
+ videoCamera.targetToIgnoreForUpdates = blendFilter; // Avoid double-updating the blend
+
[blendFilter addTarget:filterView];
}
else if (filterType == GPUIMAGE_HARRISCORNERDETECTION)
{
GPUImageAlphaBlendFilter *blendFilter = [[GPUImageAlphaBlendFilter alloc] init];
- [rotationFilter addTarget:blendFilter];
+
+ [videoCamera addTarget:blendFilter];
[filter addTarget:blendFilter];
- rotationFilter.targetToIgnoreForUpdates = blendFilter; // Avoid double-updating the blend
+ videoCamera.targetToIgnoreForUpdates = blendFilter; // Avoid double-updating the blend
[blendFilter addTarget:filterView];
}
@@ -746,11 +802,13 @@ - (IBAction)updateFilterFromSlider:(id)sender;
case GPUIMAGE_CONTRAST: [(GPUImageContrastFilter *)filter setContrast:[(UISlider *)sender value]]; break;
case GPUIMAGE_BRIGHTNESS: [(GPUImageBrightnessFilter *)filter setBrightness:[(UISlider *)sender value]]; break;
case GPUIMAGE_EXPOSURE: [(GPUImageExposureFilter *)filter setExposure:[(UISlider *)sender value]]; break;
+ case GPUIMAGE_RGB: [(GPUImageRGBFilter *)filter setGreen:[(UISlider *)sender value]]; break;
case GPUIMAGE_SHARPEN: [(GPUImageSharpenFilter *)filter setSharpness:[(UISlider *)sender value]]; break;
case GPUIMAGE_HISTOGRAM: [(GPUImageHistogramFilter *)filter setDownsamplingFactor:round([(UISlider *)sender value])]; break;
case GPUIMAGE_UNSHARPMASK: [(GPUImageUnsharpMaskFilter *)filter setIntensity:[(UISlider *)sender value]]; break;
// case GPUIMAGE_UNSHARPMASK: [(GPUImageUnsharpMaskFilter *)filter setBlurSize:[(UISlider *)sender value]]; break;
case GPUIMAGE_GAMMA: [(GPUImageGammaFilter *)filter setGamma:[(UISlider *)sender value]]; break;
+ case GPUIMAGE_CROSSHATCH: [(GPUImageCrosshatchFilter *)filter setCrossHatchSpacing:[(UISlider *)sender value]]; break;
case GPUIMAGE_POSTERIZE: [(GPUImagePosterizeFilter *)filter setColorLevels:round([(UISlider*)sender value])]; break;
case GPUIMAGE_HAZE: [(GPUImageHazeFilter *)filter setDistance:[(UISlider *)sender value]]; break;
case GPUIMAGE_THRESHOLD: [(GPUImageLuminanceThresholdFilter *)filter setThreshold:[(UISlider *)sender value]]; break;
@@ -767,11 +825,12 @@ - (IBAction)updateFilterFromSlider:(id)sender;
case GPUIMAGE_PINCH: [(GPUImagePinchDistortionFilter *)filter setScale:[(UISlider *)sender value]]; break;
case GPUIMAGE_VIGNETTE: [(GPUImageVignetteFilter *)filter setY:[(UISlider *)sender value]]; break;
case GPUIMAGE_GAUSSIAN: [(GPUImageGaussianBlurFilter *)filter setBlurSize:[(UISlider*)sender value]]; break;
+ case GPUIMAGE_BILATERAL: [(GPUImageBilateralFilter *)filter setBlurSize:[(UISlider*)sender value]]; break;
case GPUIMAGE_FASTBLUR: [(GPUImageFastBlurFilter *)filter setBlurPasses:round([(UISlider*)sender value])]; break;
// case GPUIMAGE_FASTBLUR: [(GPUImageFastBlurFilter *)filter setBlurSize:[(UISlider*)sender value]]; break;
case GPUIMAGE_GAUSSIAN_SELECTIVE: [(GPUImageGaussianSelectiveBlurFilter *)filter setExcludeCircleRadius:[(UISlider*)sender value]]; break;
case GPUIMAGE_FILTERGROUP: [(GPUImagePixellateFilter *)[(GPUImageFilterGroup *)filter filterAtIndex:1] setFractionalWidthOfAPixel:[(UISlider *)sender value]]; break;
- case GPUIMAGE_CROP: [(GPUImageCropFilter *)filter setCropRegion:CGRectMake(0.0, 0.0, [(UISlider*)sender value], [(UISlider*)sender value] * 0.5)]; break;
+ case GPUIMAGE_CROP: [(GPUImageCropFilter *)filter setCropRegion:CGRectMake(0.0, 0.0, 1.0, [(UISlider*)sender value])]; break;
case GPUIMAGE_TRANSFORM: [(GPUImageTransformFilter *)filter setAffineTransform:CGAffineTransformMakeRotation([(UISlider*)sender value])]; break;
case GPUIMAGE_TRANSFORM3D:
{
View
15 examples/MultiViewFilterExample/MultiViewFilterExample/MultiViewViewController.m
@@ -25,7 +25,8 @@ - (void)loadView
self.view = primaryView;
videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack];
-
+ videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
+
CGFloat halfWidth = round(mainScreenFrame.size.width / 2.0);
CGFloat halfHeight = round(mainScreenFrame.size.height / 2.0);
view1 = [[GPUImageView alloc] initWithFrame:CGRectMake(0.0, 0.0, halfWidth, halfHeight)];
@@ -37,7 +38,6 @@ - (void)loadView
[self.view addSubview:view3];
[self.view addSubview:view4];
- GPUImageRotationFilter *rotationFilter = [[GPUImageRotationFilter alloc] initWithRotation:kGPUImageRotateRight];
GPUImageFilter *filter1 = [[GPUImageFilter alloc] initWithFragmentShaderFromFile:@"Shader1"];
GPUImageFilter *filter2 = [[GPUImageFilter alloc] initWithFragmentShaderFromFile:@"Shader2"];
GPUImageSepiaFilter *filter3 = [[GPUImageSepiaFilter alloc] init];
@@ -46,19 +46,16 @@ - (void)loadView
// This is to avoid wasting processing time on larger frames than will be displayed.
// You'll need to use -forceProcessingAtSize: with a zero size to re-enable full frame processing of video.
- [rotationFilter forceProcessingAtSize:view1.sizeInPixels];
[filter1 forceProcessingAtSize:view2.sizeInPixels];
[filter2 forceProcessingAtSize:view3.sizeInPixels];
[filter3 forceProcessingAtSize:view4.sizeInPixels];
- [videoCamera addTarget:rotationFilter];
-
- [rotationFilter addTarget:view1];
- [rotationFilter addTarget:filter1];
+ [videoCamera addTarget:view1];
+ [videoCamera addTarget:filter1];
[filter1 addTarget:view2];
- [rotationFilter addTarget:filter2];
+ [videoCamera addTarget:filter2];
[filter2 addTarget:view3];
- [rotationFilter addTarget:filter3];
+ [videoCamera addTarget:filter3];
[filter3 addTarget:view4];
[videoCamera startCameraCapture];
View
13 examples/SimpleImageFilter/SimpleImageFilter/SimpleImageViewController.m
@@ -45,6 +45,7 @@ - (void)setupDisplayFiltering;
sourcePicture = [[GPUImagePicture alloc] initWithImage:inputImage smoothlyScaleOutput:YES];
sepiaFilter = [[GPUImageSepiaFilter alloc] init];
+
GPUImageView *imageView = (GPUImageView *)self.view;
[sepiaFilter forceProcessingAtSize:imageView.sizeInPixels]; // This is now needed to make the filter run at the smaller output size
@@ -59,15 +60,13 @@ - (void)setupImageFilteringToDisk;
{
// Set up a manual image filtering chain
UIImage *inputImage = [UIImage imageNamed:@"Lambeau.jpg"];
-
+
GPUImagePicture *stillImageSource = [[GPUImagePicture alloc] initWithImage:inputImage];
GPUImageSepiaFilter *stillImageFilter = [[GPUImageSepiaFilter alloc] init];
GPUImageVignetteFilter *vignetteImageFilter = [[GPUImageVignetteFilter alloc] init];
vignetteImageFilter.x = 0.6;
vignetteImageFilter.y = 0.4;
-// GPUImageSketchFilter *stillImageFilter = [[GPUImageSketchFilter alloc] init];
-
// There's a problem with the Kuwahara filter where it doesn't finish rendering before the image is extracted from it.
// It looks like it only gets through certain tiles before glReadPixels() is called. Odd.
// GPUImageKuwaharaFilter *stillImageFilter = [[GPUImageKuwaharaFilter alloc] init];
@@ -76,18 +75,18 @@ - (void)setupImageFilteringToDisk;
[stillImageSource addTarget:stillImageFilter];
[stillImageFilter addTarget:vignetteImageFilter];
[vignetteImageFilter prepareForImageCapture];
- [stillImageSource processImage];
- UIImage *currentFilteredVideoFrame = [vignetteImageFilter imageFromCurrentlyProcessedOutput];
+ [stillImageSource processImage];
+ UIImage *currentFilteredImage = [vignetteImageFilter imageFromCurrentlyProcessedOutput];
+
// Do a simpler image filtering
-// GPUImageSepiaFilter *stillImageFilter2 = [[GPUImageSepiaFilter alloc] init];
GPUImageSketchFilter *stillImageFilter2 = [[GPUImageSketchFilter alloc] init];
UIImage *quickFilteredImage = [stillImageFilter2 imageByFilteringImage:inputImage];
// Write images to disk, as proof
- NSData *dataForPNGFile = UIImagePNGRepresentation(currentFilteredVideoFrame);
+ NSData *dataForPNGFile = UIImagePNGRepresentation(currentFilteredImage);
NSData *dataForPNGFile2 = UIImagePNGRepresentation(quickFilteredImage);
NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
View
37 examples/SimplePhotoFilter/SimplePhotoFilter/PhotoViewController.m
@@ -50,19 +50,18 @@ - (void)viewDidLoad
[super viewDidLoad];
stillCamera = [[GPUImageStillCamera alloc] init];
+// stillCamera = [[GPUImageStillCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionFront];
+ stillCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
// filter = [[GPUImageGammaFilter alloc] init];
filter = [[GPUImageSketchFilter alloc] init];
- [(GPUImageSketchFilter *)filter setImageHeightFactor:1024.0];
- [(GPUImageSketchFilter *)filter setImageWidthFactor:768.0];
+ [(GPUImageSketchFilter *)filter setTexelHeight:(1.0 / 1024.0)];
+ [(GPUImageSketchFilter *)filter setTexelWidth:(1.0 / 768.0)];
// filter = [[GPUImageSmoothToonFilter alloc] init];
// filter = [[GPUImageSepiaFilter alloc] init];
[filter prepareForImageCapture];
- GPUImageRotationFilter *rotationFilter = [[GPUImageRotationFilter alloc] initWithRotation:kGPUImageRotateRight];
-
- [stillCamera addTarget:rotationFilter];
- [rotationFilter addTarget:filter];
+ [stillCamera addTarget:filter];
GPUImageView *filterView = (GPUImageView *)self.view;
[filter addTarget:filterView];
@@ -94,28 +93,15 @@ - (IBAction)takePhoto:(id)sender;
{
[photoCaptureButton setEnabled:NO];
- [stillCamera capturePhotoProcessedUpToFilter:filter withCompletionHandler:^(UIImage *processedImage, NSError *error){
-
- // Having both this and the asset library saving uses twice the memory sometimes
-// NSData *dataForPNGFile = UIImageJPEGRepresentation(processedImage, 0.8);
-//
-// NSArray *paths = NSSearchPathForDirectoriesInDomains(NSDocumentDirectory, NSUserDomainMask, YES);
-// NSString *documentsDirectory = [paths objectAtIndex:0];
-//
-// NSError *error2 = nil;
-// if (![dataForPNGFile writeToFile:[documentsDirectory stringByAppendingPathComponent:@"FilteredPhoto.jpg"] options:NSAtomicWrite error:&error2])
-// {
-// return;
-// }
+ [stillCamera capturePhotoAsJPEGProcessedUpToFilter:filter withCompletionHandler:^(NSData *processedJPEG, NSError *error){
// Save to assets library
ALAssetsLibrary *library = [[ALAssetsLibrary alloc] init];
-
- CGImageRef imageRef = [processedImage CGImage];
- CGImageRetain(imageRef);
-
- [library writeImageToSavedPhotosAlbum:imageRef metadata:nil completionBlock:^(NSURL *assetURL, NSError *error2)
+// report_memory(@"After asset library creation");
+
+ [library writeImageDataToSavedPhotosAlbum:processedJPEG metadata:nil completionBlock:^(NSURL *assetURL, NSError *error2)
{
+// report_memory(@"After writing to library");
if (error2) {
NSLog(@"ERROR: the image failed to be written");
}
@@ -123,9 +109,8 @@ - (IBAction)takePhoto:(id)sender;
NSLog(@"PHOTO SAVED - assetURL: %@", assetURL);
}
- CGImageRelease(imageRef);
-
runOnMainQueueWithoutDeadlocking(^{
+// report_memory(@"Operation completed");
[photoCaptureButton setEnabled:YES];
});
}];
View
4 examples/SimpleVideoFileFilter/SimpleVideoFileFilter/SimpleVideoFileFilterViewController.m
@@ -27,14 +27,12 @@ - (void)viewDidLoad
movieFile.runBenchmark = YES;
// filter = [[GPUImagePixellateFilter alloc] init];
filter = [[GPUImageUnsharpMaskFilter alloc] init];
- GPUImageRotationFilter *rotationFilter = [[GPUImageRotationFilter alloc] initWithRotation:kGPUImageRotateRight];
[movieFile addTarget:filter];
// Only rotate the video for display, leave orientation the same for recording
GPUImageView *filterView = (GPUImageView *)self.view;
- [filter addTarget:rotationFilter];
- [rotationFilter addTarget:filterView];
+ [filter addTarget:filterView];
// In addition to displaying to the screen, write out a processed version of the movie to disk
NSString *pathToMovie = [NSHomeDirectory() stringByAppendingPathComponent:@"Documents/Movie.m4v"];
View
8 examples/SimpleVideoFilter/SimpleVideoFilter/SimpleVideoFilterViewController.m
@@ -22,6 +22,7 @@ - (void)viewDidLoad
[super viewDidLoad];
videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionBack];
+ videoCamera.outputImageOrientation = UIInterfaceOrientationPortrait;
// videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset640x480 cameraPosition:AVCaptureDevicePositionFront];
// videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1280x720 cameraPosition:AVCaptureDevicePositionBack];
// videoCamera = [[GPUImageVideoCamera alloc] initWithSessionPreset:AVCaptureSessionPreset1920x1080 cameraPosition:AVCaptureDevicePositionBack];
@@ -35,11 +36,9 @@ - (void)viewDidLoad
// filter = [[GPUImageSketchFilter alloc] init];
// filter = [[GPUImageSmoothToonFilter alloc] init];
- GPUImageRotationFilter *rotationFilter = [[GPUImageRotationFilter alloc] initWithRotation:kGPUImageRotateRight];
// GPUImageRotationFilter *rotationFilter = [[GPUImageRotationFilter alloc] initWithRotation:kGPUImageRotateRightFlipVertical];
- [videoCamera addTarget:rotationFilter];
- [rotationFilter addTarget:filter];
+ [videoCamera addTarget:filter];
GPUImageView *filterView = (GPUImageView *)self.view;
[filter addTarget:filterView];
// filterView.fillMode = kGPUImageFillModeStretch;
@@ -100,8 +99,7 @@ - (BOOL)shouldAutorotateToInterfaceOrientation:(UIInterfaceOrientation)interface
- (IBAction)updateSliderValue:(id)sender
{
- [(GPUImagePixellateFilter *)filter setFractionalWidthOfAPixel:[(UISlider *)sender value]];
-// [(GPUImageSketchFilter *)filter setIntensity:1.0];
+ [(GPUImageSepiaFilter *)filter setIntensity:[(UISlider *)sender value]];
}
@end
View
16 examples/SimpleVideoFilter/SimpleVideoFilter/SimpleVideoFilterViewController.xib
@@ -3,12 +3,12 @@
<data>
<int key="IBDocument.SystemTarget">1280</int>
<string key="IBDocument.SystemVersion">11D50</string>
- <string key="IBDocument.InterfaceBuilderVersion">2177</string>
+ <string key="IBDocument.InterfaceBuilderVersion">2182</string>
<string key="IBDocument.AppKitVersion">1138.32</string>
<string key="IBDocument.HIToolboxVersion">568.00</string>
<object class="NSMutableDictionary" key="IBDocument.PluginVersions">
<string key="NS.key.0">com.apple.InterfaceBuilder.IBCocoaTouchPlugin</string>
- <string key="NS.object.0">1173</string>
+ <string key="NS.object.0">1179</string>
</object>
<array key="IBDocument.IntegratedClassDependencies">
<string>IBProxyObject</string>
@@ -41,13 +41,13 @@
<string key="NSFrame">{{18, 418}, {284, 23}}</string>
<reference key="NSSuperview" ref="191373211"/>
<reference key="NSWindow"/>
+ <reference key="NSNextKeyView"/>
<string key="NSReuseIdentifierKey">_NS:623</string>
<bool key="IBUIOpaque">NO</bool>
<string key="targetRuntimeIdentifier">IBCocoaTouchFramework</string>
<int key="IBUIContentHorizontalAlignment">0</int>
<int key="IBUIContentVerticalAlignment">0</int>
- <float key="IBUIValue">0.10000000149011612</float>
- <float key="IBUIMaxValue">0.30000001192092896</float>
+ <float key="IBUIValue">1</float>
</object>
</array>
<string key="NSFrame">{{0, 20}, {320, 460}}</string>
@@ -148,13 +148,13 @@
<string key="className">SimpleVideoFilterViewController</string>
<string key="superclassName">UIViewController</string>
<object class="NSMutableDictionary" key="actions">
- <string key="NS.key.0">updatePixelWidth:</string>
+ <string key="NS.key.0">updateSliderValue:</string>
<string key="NS.object.0">id</string>
</object>
<object class="NSMutableDictionary" key="actionInfosByName">
- <string key="NS.key.0">updatePixelWidth:</string>
+ <string key="NS.key.0">updateSliderValue:</string>
<object class="IBActionInfo" key="NS.object.0">
- <string key="name">updatePixelWidth:</string>
+ <string key="name">updateSliderValue:</string>
<string key="candidateClassName">id</string>
</object>
</object>
@@ -169,6 +169,6 @@
<string key="IBDocument.TargetRuntimeIdentifier">IBCocoaTouchFramework</string>
<bool key="IBDocument.PluginDeclaredDependenciesTrackSystemTargetVersion">YES</bool>
<int key="IBDocument.defaultPropertyAccessControl">3</int>
- <string key="IBCocoaTouchPluginVersion">1173</string>
+ <string key="IBCocoaTouchPluginVersion">1179</string>
</data>
</archive>
View
121 framework/GPUImage.xcodeproj/project.pbxproj 100755 → 100644
@@ -6,6 +6,20 @@
objectVersion = 46;
objects = {
+/* Begin PBXAggregateTarget section */
+ BC552B361558C6F4001F3FFA /* Documentation */ = {
+ isa = PBXAggregateTarget;
+ buildConfigurationList = BC552B371558C6F4001F3FFA /* Build configuration list for PBXAggregateTarget "Documentation" */;
+ buildPhases = (
+ BC552B3A1558C6FC001F3FFA /* ShellScript */,
+ );
+ dependencies = (
+ );
+ name = Documentation;
+ productName = Documentation;
+ };
+/* End PBXAggregateTarget section */
+
/* Begin PBXBuildFile section */
0DF3FA2B14FA00C9006AF7D9 /* GPUImageVignetteFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = 0DF3FA2914FA00C9006AF7D9 /* GPUImageVignetteFilter.h */; };
0DF3FA2C14FA00C9006AF7D9 /* GPUImageVignetteFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = 0DF3FA2A14FA00C9006AF7D9 /* GPUImageVignetteFilter.m */; };
@@ -19,6 +33,12 @@
83AE9FCE1540E92800F7FC13 /* GPUImageMaskFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = 83AE9FCC1540E92800F7FC13 /* GPUImageMaskFilter.m */; };
B81521A214F1BA6A00F105F8 /* GPUImageColorMatrixFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = B81521A014F1BA6A00F105F8 /* GPUImageColorMatrixFilter.h */; };
B81521A314F1BA6A00F105F8 /* GPUImageColorMatrixFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = B81521A114F1BA6A00F105F8 /* GPUImageColorMatrixFilter.m */; };
+ BC01E82D155CA3F1004C75C3 /* GPUImageMedianFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BC01E82B155CA3F1004C75C3 /* GPUImageMedianFilter.h */; };
+ BC01E82E155CA3F1004C75C3 /* GPUImageMedianFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BC01E82C155CA3F1004C75C3 /* GPUImageMedianFilter.m */; };
+ BC01E832155CA5E2004C75C3 /* GPUImage3x3TextureSamplingFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BC01E830155CA5E1004C75C3 /* GPUImage3x3TextureSamplingFilter.h */; };
+ BC01E833155CA5E2004C75C3 /* GPUImage3x3TextureSamplingFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BC01E831155CA5E1004C75C3 /* GPUImage3x3TextureSamplingFilter.m */; };
+ BC114898155AF65400F107AF /* GPUImageTwoInputFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BC114896155AF65400F107AF /* GPUImageTwoInputFilter.h */; };
+ BC114899155AF65400F107AF /* GPUImageTwoInputFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BC114897155AF65400F107AF /* GPUImageTwoInputFilter.m */; };
BC1A47F514FC759D00D552E8 /* GPUImageGaussianBlurFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = 0D04CB7B14FB2A29001D6733 /* GPUImageGaussianBlurFilter.m */; };
BC1A483E14FD1EF900D552E8 /* GPUImageGaussianBlurFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = 0D04CB7A14FB2A29001D6733 /* GPUImageGaussianBlurFilter.h */; };
BC1B715714F49DAA00ACA2AB /* GPUImageRawData.h in Headers */ = {isa = PBXBuildFile; fileRef = BC1B715514F49DAA00ACA2AB /* GPUImageRawData.h */; };
@@ -36,6 +56,8 @@
BC54D564151904FF003F4A41 /* GPUImageChromaKeyBlendFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BC54D562151904FF003F4A41 /* GPUImageChromaKeyBlendFilter.m */; };
BC6ED9C21549CA0600966798 /* GPUImageHistogramFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BC6ED9C01549CA0600966798 /* GPUImageHistogramFilter.h */; };
BC6ED9C31549CA0600966798 /* GPUImageHistogramFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BC6ED9C11549CA0600966798 /* GPUImageHistogramFilter.m */; };
+ BC7CC63515605A3500468085 /* GPUImageBilateralFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BC7CC63315605A3500468085 /* GPUImageBilateralFilter.h */; };
+ BC7CC63615605A3500468085 /* GPUImageBilateralFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BC7CC63415605A3500468085 /* GPUImageBilateralFilter.m */; };
BC7D95D51523EE67000DF037 /* GPUImageStillCamera.h in Headers */ = {isa = PBXBuildFile; fileRef = BC7D95D31523EE67000DF037 /* GPUImageStillCamera.h */; };
BC7D95D61523EE67000DF037 /* GPUImageStillCamera.m in Sources */ = {isa = PBXBuildFile; fileRef = BC7D95D41523EE67000DF037 /* GPUImageStillCamera.m */; };
BC982B5314F07F790001FF6F /* GPUImageColorInvertFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BC982B4F14F07F790001FF6F /* GPUImageColorInvertFilter.h */; };
@@ -102,8 +124,6 @@
BCB5E7C314E4B6D400701302 /* GPUImageSepiaFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCB5E7C114E4B6D400701302 /* GPUImageSepiaFilter.m */; };
BCB5E7DA14E6003400701302 /* GPUImagePicture.h in Headers */ = {isa = PBXBuildFile; fileRef = BCB5E7D814E6003400701302 /* GPUImagePicture.h */; };
BCB5E7DB14E6003400701302 /* GPUImagePicture.m in Sources */ = {isa = PBXBuildFile; fileRef = BCB5E7D914E6003400701302 /* GPUImagePicture.m */; };
- BCB5E85514E63BBB00701302 /* GPUImageRotationFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCB5E85314E63BBB00701302 /* GPUImageRotationFilter.h */; };
- BCB5E85614E63BBB00701302 /* GPUImageRotationFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCB5E85414E63BBB00701302 /* GPUImageRotationFilter.m */; };
BCB6B837150400030041703B /* GPUImageExposureFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCB6B833150400030041703B /* GPUImageExposureFilter.h */; };
BCB6B838150400030041703B /* GPUImageExposureFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCB6B834150400030041703B /* GPUImageExposureFilter.m */; };
BCB6B839150400030041703B /* GPUImageSharpenFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCB6B835150400030041703B /* GPUImageSharpenFilter.h */; };
@@ -178,10 +198,8 @@
BCF3D723153E0E0C009A1FE5 /* GPUImageThresholdEdgeDetection.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF3D721153E0E0B009A1FE5 /* GPUImageThresholdEdgeDetection.m */; };
BCF3D730153F0D6F009A1FE5 /* GPUImageSmoothToonFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BCF3D72E153F0D6E009A1FE5 /* GPUImageSmoothToonFilter.h */; };
BCF3D731153F0D6F009A1FE5 /* GPUImageSmoothToonFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BCF3D72F153F0D6F009A1FE5 /* GPUImageSmoothToonFilter.m */; };
- F2C5D53D1552ED9800B963A1 /* GPUImageJFAVoroni.h in Headers */ = {isa = PBXBuildFile; fileRef = F2C5D5391552ED9800B963A1 /* GPUImageJFAVoroni.h */; };
- F2C5D53E1552ED9800B963A1 /* GPUImageJFAVoroni.m in Sources */ = {isa = PBXBuildFile; fileRef = F2C5D53A1552ED9800B963A1 /* GPUImageJFAVoroni.m */; };
- F2C5D53F1552ED9800B963A1 /* GPUImageVoroniConsumerFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = F2C5D53B1552ED9800B963A1 /* GPUImageVoroniConsumerFilter.h */; };
- F2C5D5401552ED9800B963A1 /* GPUImageVoroniConsumerFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = F2C5D53C1552ED9800B963A1 /* GPUImageVoroniConsumerFilter.m */; };
+ BEBE83B5155C092A00EEF8C3 /* GPUImageRGBFilter.h in Headers */ = {isa = PBXBuildFile; fileRef = BEBE83B3155C092A00EEF8C3 /* GPUImageRGBFilter.h */; };
+ BEBE83B6155C092A00EEF8C3 /* GPUImageRGBFilter.m in Sources */ = {isa = PBXBuildFile; fileRef = BEBE83B4155C092A00EEF8C3 /* GPUImageRGBFilter.m */; };
/* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */
@@ -213,6 +231,12 @@
83AE9FCC1540E92800F7FC13 /* GPUImageMaskFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageMaskFilter.m; path = Source/GPUImageMaskFilter.m; sourceTree = SOURCE_ROOT; };
B81521A014F1BA6A00F105F8 /* GPUImageColorMatrixFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageColorMatrixFilter.h; path = Source/GPUImageColorMatrixFilter.h; sourceTree = SOURCE_ROOT; };
B81521A114F1BA6A00F105F8 /* GPUImageColorMatrixFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageColorMatrixFilter.m; path = Source/GPUImageColorMatrixFilter.m; sourceTree = SOURCE_ROOT; };
+ BC01E82B155CA3F1004C75C3 /* GPUImageMedianFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageMedianFilter.h; path = Source/GPUImageMedianFilter.h; sourceTree = SOURCE_ROOT; };
+ BC01E82C155CA3F1004C75C3 /* GPUImageMedianFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageMedianFilter.m; path = Source/GPUImageMedianFilter.m; sourceTree = SOURCE_ROOT; };
+ BC01E830155CA5E1004C75C3 /* GPUImage3x3TextureSamplingFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImage3x3TextureSamplingFilter.h; path = Source/GPUImage3x3TextureSamplingFilter.h; sourceTree = SOURCE_ROOT; };
+ BC01E831155CA5E1004C75C3 /* GPUImage3x3TextureSamplingFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImage3x3TextureSamplingFilter.m; path = Source/GPUImage3x3TextureSamplingFilter.m; sourceTree = SOURCE_ROOT; };
+ BC114896155AF65400F107AF /* GPUImageTwoInputFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageTwoInputFilter.h; path = Source/GPUImageTwoInputFilter.h; sourceTree = SOURCE_ROOT; };
+ BC114897155AF65400F107AF /* GPUImageTwoInputFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageTwoInputFilter.m; path = Source/GPUImageTwoInputFilter.m; sourceTree = SOURCE_ROOT; };
BC1B715514F49DAA00ACA2AB /* GPUImageRawData.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageRawData.h; path = Source/GPUImageRawData.h; sourceTree = SOURCE_ROOT; };
BC1B715614F49DAA00ACA2AB /* GPUImageRawData.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageRawData.m; path = Source/GPUImageRawData.m; sourceTree = SOURCE_ROOT; };
BC1B717A14F566E200ACA2AB /* GPUImageSketchFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageSketchFilter.h; path = Source/GPUImageSketchFilter.h; sourceTree = SOURCE_ROOT; };
@@ -228,6 +252,8 @@
BC54D562151904FF003F4A41 /* GPUImageChromaKeyBlendFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageChromaKeyBlendFilter.m; path = Source/GPUImageChromaKeyBlendFilter.m; sourceTree = SOURCE_ROOT; };
BC6ED9C01549CA0600966798 /* GPUImageHistogramFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageHistogramFilter.h; path = Source/GPUImageHistogramFilter.h; sourceTree = SOURCE_ROOT; };
BC6ED9C11549CA0600966798 /* GPUImageHistogramFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageHistogramFilter.m; path = Source/GPUImageHistogramFilter.m; sourceTree = SOURCE_ROOT; };
+ BC7CC63315605A3500468085 /* GPUImageBilateralFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageBilateralFilter.h; path = Source/GPUImageBilateralFilter.h; sourceTree = SOURCE_ROOT; };
+ BC7CC63415605A3500468085 /* GPUImageBilateralFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageBilateralFilter.m; path = Source/GPUImageBilateralFilter.m; sourceTree = SOURCE_ROOT; };
BC7D95D31523EE67000DF037 /* GPUImageStillCamera.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageStillCamera.h; path = Source/GPUImageStillCamera.h; sourceTree = SOURCE_ROOT; };
BC7D95D41523EE67000DF037 /* GPUImageStillCamera.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageStillCamera.m; path = Source/GPUImageStillCamera.m; sourceTree = SOURCE_ROOT; };
BC982B4F14F07F790001FF6F /* GPUImageColorInvertFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageColorInvertFilter.h; path = Source/GPUImageColorInvertFilter.h; sourceTree = SOURCE_ROOT; };
@@ -294,8 +320,6 @@
BCB5E7C114E4B6D400701302 /* GPUImageSepiaFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageSepiaFilter.m; path = Source/GPUImageSepiaFilter.m; sourceTree = SOURCE_ROOT; };
BCB5E7D814E6003400701302 /* GPUImagePicture.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImagePicture.h; path = Source/GPUImagePicture.h; sourceTree = SOURCE_ROOT; };
BCB5E7D914E6003400701302 /* GPUImagePicture.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImagePicture.m; path = Source/GPUImagePicture.m; sourceTree = SOURCE_ROOT; };
- BCB5E85314E63BBB00701302 /* GPUImageRotationFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageRotationFilter.h; path = Source/GPUImageRotationFilter.h; sourceTree = SOURCE_ROOT; };
- BCB5E85414E63BBB00701302 /* GPUImageRotationFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageRotationFilter.m; path = Source/GPUImageRotationFilter.m; sourceTree = SOURCE_ROOT; };
BCB6B833150400030041703B /* GPUImageExposureFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageExposureFilter.h; path = Source/GPUImageExposureFilter.h; sourceTree = SOURCE_ROOT; };
BCB6B834150400030041703B /* GPUImageExposureFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageExposureFilter.m; path = Source/GPUImageExposureFilter.m; sourceTree = SOURCE_ROOT; };
BCB6B835150400030041703B /* GPUImageSharpenFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageSharpenFilter.h; path = Source/GPUImageSharpenFilter.h; sourceTree = SOURCE_ROOT; };
@@ -369,10 +393,8 @@
BCF3D721153E0E0B009A1FE5 /* GPUImageThresholdEdgeDetection.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageThresholdEdgeDetection.m; path = Source/GPUImageThresholdEdgeDetection.m; sourceTree = SOURCE_ROOT; };
BCF3D72E153F0D6E009A1FE5 /* GPUImageSmoothToonFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageSmoothToonFilter.h; path = Source/GPUImageSmoothToonFilter.h; sourceTree = SOURCE_ROOT; };
BCF3D72F153F0D6F009A1FE5 /* GPUImageSmoothToonFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageSmoothToonFilter.m; path = Source/GPUImageSmoothToonFilter.m; sourceTree = SOURCE_ROOT; };
- F2C5D5391552ED9800B963A1 /* GPUImageJFAVoroni.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageJFAVoroni.h; path = Source/GPUImageJFAVoroni.h; sourceTree = SOURCE_ROOT; };
- F2C5D53A1552ED9800B963A1 /* GPUImageJFAVoroni.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageJFAVoroni.m; path = Source/GPUImageJFAVoroni.m; sourceTree = SOURCE_ROOT; };
- F2C5D53B1552ED9800B963A1 /* GPUImageVoroniConsumerFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageVoroniConsumerFilter.h; path = Source/GPUImageVoroniConsumerFilter.h; sourceTree = SOURCE_ROOT; };
- F2C5D53C1552ED9800B963A1 /* GPUImageVoroniConsumerFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageVoroniConsumerFilter.m; path = Source/GPUImageVoroniConsumerFilter.m; sourceTree = SOURCE_ROOT; };
+ BEBE83B3155C092A00EEF8C3 /* GPUImageRGBFilter.h */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.h; name = GPUImageRGBFilter.h; path = Source/GPUImageRGBFilter.h; sourceTree = SOURCE_ROOT; };
+ BEBE83B4155C092A00EEF8C3 /* GPUImageRGBFilter.m */ = {isa = PBXFileReference; fileEncoding = 4; lastKnownFileType = sourcecode.c.objc; name = GPUImageRGBFilter.m; path = Source/GPUImageRGBFilter.m; sourceTree = SOURCE_ROOT; };
/* End PBXFileReference section */
/* Begin PBXFrameworksBuildPhase section */
@@ -415,6 +437,8 @@
BC1B715D14F4AFFF00ACA2AB /* Color processing */ = {
isa = PBXGroup;
children = (
+ BEBE83B3155C092A00EEF8C3 /* GPUImageRGBFilter.h */,
+ BEBE83B4155C092A00EEF8C3 /* GPUImageRGBFilter.m */,
BC982B7714F098CC0001FF6F /* GPUImageBrightnessFilter.h */,
BC982B7814F098CC0001FF6F /* GPUImageBrightnessFilter.m */,
BCB6B833150400030041703B /* GPUImageExposureFilter.h */,
@@ -487,10 +511,6 @@
BC1B715F14F4B06600ACA2AB /* Effects */ = {
isa = PBXGroup;
children = (
- F2C5D5391552ED9800B963A1 /* GPUImageJFAVoroni.h */,
- F2C5D53A1552ED9800B963A1 /* GPUImageJFAVoroni.m */,
- F2C5D53B1552ED9800B963A1 /* GPUImageVoroniConsumerFilter.h */,
- F2C5D53C1552ED9800B963A1 /* GPUImageVoroniConsumerFilter.m */,
BCB5DE0B14E87F32000AF3C2 /* GPUImagePixellateFilter.h */,
BCB5DE0C14E87F32000AF3C2 /* GPUImagePixellateFilter.m */,
BCABED8A15263CF20098A93E /* GPUImagePolarPixellateFilter.h */,
@@ -540,10 +560,14 @@
children = (
BC245DC814DDBED7009FE7EB /* GPUImageFilter.h */,
BC245DC914DDBED7009FE7EB /* GPUImageFilter.m */,
+ BC114896155AF65400F107AF /* GPUImageTwoInputFilter.h */,
+ BC114897155AF65400F107AF /* GPUImageTwoInputFilter.m */,
BCC93A1C1501E42E00958B26 /* GPUImageTwoPassFilter.h */,
BCC93A1D1501E42F00958B26 /* GPUImageTwoPassFilter.m */,
BCC1E5B6151E83700006EFA5 /* GPUImageFilterGroup.h */,
BCC1E5B7151E83700006EFA5 /* GPUImageFilterGroup.m */,
+ BC01E830155CA5E1004C75C3 /* GPUImage3x3TextureSamplingFilter.h */,
+ BC01E831155CA5E1004C75C3 /* GPUImage3x3TextureSamplingFilter.m */,
BC1B715D14F4AFFF00ACA2AB /* Color processing */,
BCC93A5215031B1700958B26 /* Image processing */,
BC1B715E14F4B04800ACA2AB /* Blends */,
@@ -587,22 +611,24 @@
BCC93A5215031B1700958B26 /* Image processing */ = {
isa = PBXGroup;
children = (
- BCB5E85314E63BBB00701302 /* GPUImageRotationFilter.h */,
- BCB5E85414E63BBB00701302 /* GPUImageRotationFilter.m */,
BCC94CF815101EB3002F9BC5 /* GPUImageTransformFilter.h */,
BCC94CF915101EB3002F9BC5 /* GPUImageTransformFilter.m */,
BCB6B9021507CA8C0041703B /* GPUImageCropFilter.h */,
BCB6B9031507CA8C0041703B /* GPUImageCropFilter.m */,
BCB6B835150400030041703B /* GPUImageSharpenFilter.h */,
BCB6B836150400030041703B /* GPUImageSharpenFilter.m */,
+ BCC1E5C5151EA12B0006EFA5 /* GPUImageBoxBlurFilter.h */,
+ BCC1E5C6151EA12B0006EFA5 /* GPUImageBoxBlurFilter.m */,
BCC93A0D1501D1BF00958B26 /* GPUImageFastBlurFilter.h */,
BCC93A0E1501D1BF00958B26 /* GPUImageFastBlurFilter.m */,
0D04CB7A14FB2A29001D6733 /* GPUImageGaussianBlurFilter.h */,
0D04CB7B14FB2A29001D6733 /* GPUImageGaussianBlurFilter.m */,
0D9D91AD15011CA200A5BC83 /* GPUImageGaussianSelectiveBlurFilter.h */,
0D9D91AE15011CA200A5BC83 /* GPUImageGaussianSelectiveBlurFilter.m */,
- BCC1E5C5151EA12B0006EFA5 /* GPUImageBoxBlurFilter.h */,
- BCC1E5C6151EA12B0006EFA5 /* GPUImageBoxBlurFilter.m */,
+ BC01E82B155CA3F1004C75C3 /* GPUImageMedianFilter.h */,
+ BC01E82C155CA3F1004C75C3 /* GPUImageMedianFilter.m */,
+ BC7CC63315605A3500468085 /* GPUImageBilateralFilter.h */,
+ BC7CC63415605A3500468085 /* GPUImageBilateralFilter.m */,
BCC1E60E152156620006EFA5 /* GPUImageUnsharpMaskFilter.h */,
BCC1E60F152156620006EFA5 /* GPUImageUnsharpMaskFilter.m */,
BCF3D6DB153CFF61009A1FE5 /* GPUImageTiltShiftFilter.h */,
@@ -715,7 +741,6 @@
BCB5E79414E3275200701302 /* GLProgram.h in Headers */,
BCB5E7C214E4B6D400701302 /* GPUImageSepiaFilter.h in Headers */,
BCB5E7DA14E6003400701302 /* GPUImagePicture.h in Headers */,
- BCB5E85514E63BBB00701302 /* GPUImageRotationFilter.h in Headers */,
BCB5DE0D14E87F32000AF3C2 /* GPUImagePixellateFilter.h in Headers */,
BC982B5314F07F790001FF6F /* GPUImageColorInvertFilter.h in Headers */,
BC982B5514F07F790001FF6F /* GPUImageSaturationFilter.h in Headers */,
@@ -786,8 +811,11 @@
BCAD0981154F7B2800278521 /* GPUImageHarrisCornerDetectionFilter.h in Headers */,
BCAD0985154F931C00278521 /* GPUImageAlphaBlendFilter.h in Headers */,
BCAD099F15506F6F00278521 /* GPUImageNonMaximumSuppressionFilter.h in Headers */,
- F2C5D53D1552ED9800B963A1 /* GPUImageJFAVoroni.h in Headers */,
- F2C5D53F1552ED9800B963A1 /* GPUImageVoroniConsumerFilter.h in Headers */,
+ BC114898155AF65400F107AF /* GPUImageTwoInputFilter.h in Headers */,
+ BEBE83B5155C092A00EEF8C3 /* GPUImageRGBFilter.h in Headers */,
+ BC01E82D155CA3F1004C75C3 /* GPUImageMedianFilter.h in Headers */,
+ BC01E832155CA5E2004C75C3 /* GPUImage3x3TextureSamplingFilter.h in Headers */,
+ BC7CC63515605A3500468085 /* GPUImageBilateralFilter.h in Headers */,
);
runOnlyForDeploymentPostprocessing = 0;
};
@@ -853,6 +881,7 @@
targets = (
BCF1A33314DDB1EC00852800 /* GPUImage */,
BCF1A34314DDB1EC00852800 /* GPUImageTests */,
+ BC552B361558C6F4001F3FFA /* Documentation */,
);
};
/* End PBXProject section */
@@ -869,6 +898,19 @@
/* End PBXResourcesBuildPhase section */
/* Begin PBXShellScriptBuildPhase section */
+ BC552B3A1558C6FC001F3FFA /* ShellScript */ = {
+ isa = PBXShellScriptBuildPhase;
+ buildActionMask = 2147483647;
+ files = (
+ );
+ inputPaths = (
+ );
+ outputPaths = (
+ );
+ runOnlyForDeploymentPostprocessing = 0;
+ shellPath = /bin/sh;
+ shellScript = "# docsetutil was moved in Xcode 4.3\n# if you're using Xcode 4.2 or earlier, remove --docsetutil-path\n\n# to show missing documentation warnings, add --logformat xcode\n${APPLEDOC_PATH} \\\n--project-name \"GPUImage\" \\\n--project-company \"Sunset Lake Software\" \\\n--company-id \"com.sunsetlakesoftware\" \\\n--output \"${SOURCE_ROOT}/../documentation\" \\\n--docsetutil-path \"/Applications/Xcode.app/Contents/Developer/usr/bin/docsetutil\" \\\n--keep-undocumented-objects \\\n--keep-undocumented-members \\\n--create-html \\\n--install-docset \\\n--keep-intermediate-files \\\n--no-repeat-first-par \\\n--exit-threshold 9999 \\\n--clean-output \\\n--ignore .m \\\n${SOURCE_ROOT}";
+ };
BCF1A34214DDB1EC00852800 /* ShellScript */ = {
isa = PBXShellScriptBuildPhase;
buildActionMask = 2147483647;
@@ -897,7 +939,6 @@
BCB5E79514E3275200701302 /* GLProgram.m in Sources */,
BCB5E7C314E4B6D400701302 /* GPUImageSepiaFilter.m in Sources */,
BCB5E7DB14E6003400701302 /* GPUImagePicture.m in Sources */,
- BCB5E85614E63BBB00701302 /* GPUImageRotationFilter.m in Sources */,
BCB5DE0E14E87F32000AF3C2 /* GPUImagePixellateFilter.m in Sources */,
BC982B5414F07F790001FF6F /* GPUImageColorInvertFilter.m in Sources */,
BC982B5614F07F790001FF6F /* GPUImageSaturationFilter.m in Sources */,
@@ -968,8 +1009,11 @@
BCAD0982154F7B2800278521 /* GPUImageHarrisCornerDetectionFilter.m in Sources */,
BCAD0986154F931C00278521 /* GPUImageAlphaBlendFilter.m in Sources */,
BCAD09A015506F6F00278521 /* GPUImageNonMaximumSuppressionFilter.m in Sources */,
- F2C5D53E1552ED9800B963A1 /* GPUImageJFAVoroni.m in Sources */,
- F2C5D5401552ED9800B963A1 /* GPUImageVoroniConsumerFilter.m in Sources */,
+ BC114899155AF65400F107AF /* GPUImageTwoInputFilter.m in Sources */,
+ BEBE83B6155C092A00EEF8C3 /* GPUImageRGBFilter.m in Sources */,
+ BC01E82E155CA3F1004C75C3 /* GPUImageMedianFilter.m in Sources */,
+ BC01E833155CA5E2004C75C3 /* GPUImage3x3TextureSamplingFilter.m in Sources */,
+ BC7CC63615605A3500468085 /* GPUImageBilateralFilter.m in Sources */,
);
runOnlyForDeploymentPostprocessing = 0;
};
@@ -1003,6 +1047,22 @@
/* End PBXVariantGroup section */
/* Begin XCBuildConfiguration section */
+ BC552B381558C6F4001F3FFA /* Debug */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ APPLEDOC_PATH = /usr/local/bin/appledoc;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ };
+ name = Debug;
+ };
+ BC552B391558C6F4001F3FFA /* Release */ = {
+ isa = XCBuildConfiguration;
+ buildSettings = {
+ APPLEDOC_PATH = /usr/local/bin/appledoc;
+ PRODUCT_NAME = "$(TARGET_NAME)";
+ };
+ name = Release;
+ };
BCF1A35614DDB1EC00852800 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
@@ -1114,6 +1174,15 @@
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
+ BC552B371558C6F4001F3FFA /* Build configuration list for PBXAggregateTarget "Documentation" */ = {
+ isa = XCConfigurationList;
+ buildConfigurations = (
+ BC552B381558C6F4001F3FFA /* Debug */,
+ BC552B391558C6F4001F3FFA /* Release */,
+ );
+ defaultConfigurationIsVisible = 0;
+ defaultConfigurationName = Release;
+ };
BCF1A32E14DDB1EC00852800 /* Build configuration list for PBXProject "GPUImage" */ = {
isa = XCConfigurationList;
buildConfigurations = (
View
5 framework/Source/GPUImage.h
@@ -17,7 +17,7 @@
// Filters
#import "GPUImageFilter.h"
-#import "GPUImageRotationFilter.h"
+#import "GPUImageTwoInputFilter.h"
#import "GPUImagePixellateFilter.h"
#import "GPUImageSepiaFilter.h"
#import "GPUImageColorInvertFilter.h"
@@ -79,3 +79,6 @@
#import "GPUImageHarrisCornerDetectionFilter.h"
#import "GPUImageAlphaBlendFilter.h"
#import "GPUImageNonMaximumSuppressionFilter.h"
+#import "GPUImageRGBFilter.h"
+#import "GPUImageMedianFilter.h"
+#import "GPUImageBilateralFilter.h"
View
9 framework/Source/GPUImage3x3ConvolutionFilter.h
@@ -1,13 +1,8 @@
-#import "GPUImageFilter.h"
+#import "GPUImage3x3TextureSamplingFilter.h"
-extern NSString *const kGPUImageNearbyTexelSamplingVertexShaderString;
-
-@interface GPUImage3x3ConvolutionFilter : GPUImageFilter
+@interface GPUImage3x3ConvolutionFilter : GPUImage3x3TextureSamplingFilter
{
GLint convolutionMatrixUniform;
- GLint imageWidthFactorUniform, imageHeightFactorUniform;
-
- CGFloat imageWidthFactor, imageHeightFactor;
}
// The convolution kernel is a 3x3 matrix of values to apply to the pixel and its 8 surrounding pixels. The matrix is specified in row-major order,
View
60 framework/Source/GPUImage3x3ConvolutionFilter.m
@@ -1,50 +1,5 @@
#import "GPUImage3x3ConvolutionFilter.h"
-// Override vertex shader to remove dependent texture reads
-NSString *const kGPUImageNearbyTexelSamplingVertexShaderString = SHADER_STRING
-(
- attribute vec4 position;
- attribute vec4 inputTextureCoordinate;
-
- uniform highp float imageWidthFactor;
- uniform highp float imageHeightFactor;
-
- varying vec2 textureCoordinate;
- varying vec2 leftTextureCoordinate;
- varying vec2 rightTextureCoordinate;
-
- varying vec2 topTextureCoordinate;
- varying vec2 topLeftTextureCoordinate;
- varying vec2 topRightTextureCoordinate;
-
- varying vec2 bottomTextureCoordinate;
- varying vec2 bottomLeftTextureCoordinate;
- varying vec2 bottomRightTextureCoordinate;
-
- void main()
- {
- gl_Position = position;
-
- vec2 widthStep = vec2(imageWidthFactor, 0.0);
- vec2 heightStep = vec2(0.0, imageHeightFactor);
- vec2 widthHeightStep = vec2(imageWidthFactor, imageHeightFactor);
- vec2 widthNegativeHeightStep = vec2(imageWidthFactor, -imageHeightFactor);
-
- textureCoordinate = inputTextureCoordinate.xy;
- leftTextureCoordinate = inputTextureCoordinate.xy - widthStep;
- rightTextureCoordinate = inputTextureCoordinate.xy + widthStep;
-
- topTextureCoordinate = inputTextureCoordinate.xy - heightStep;
- topLeftTextureCoordinate = inputTextureCoordinate.xy - widthHeightStep;
- topRightTextureCoordinate = inputTextureCoordinate.xy + widthNegativeHeightStep;
-
- bottomTextureCoordinate = inputTextureCoordinate.xy + heightStep;
- bottomLeftTextureCoordinate = inputTextureCoordinate.xy - widthNegativeHeightStep;
- bottomRightTextureCoordinate = inputTextureCoordinate.xy + widthHeightStep;
- }
-);
-
-
NSString *const kGPUImage3x3ConvolutionFragmentShaderString = SHADER_STRING
(
precision highp float;
@@ -94,14 +49,12 @@ @implementation GPUImage3x3ConvolutionFilter
- (id)init;
{
- if (!(self = [super initWithVertexShaderFromString:kGPUImageNearbyTexelSamplingVertexShaderString fragmentShaderFromString:kGPUImage3x3ConvolutionFragmentShaderString]))
+ if (!(self = [super initWithFragmentShaderFromString:kGPUImage3x3ConvolutionFragmentShaderString]))
{
return nil;
}
convolutionMatrixUniform = [filterProgram uniformIndex:@"convolutionMatrix"];
- imageWidthFactorUniform = [filterProgram uniformIndex:@"imageWidthFactor"];
- imageHeightFactorUniform = [filterProgram uniformIndex:@"imageHeightFactor"];
self.convolutionKernel = (GPUMatrix3x3){
{0.f, 0.f, 0.f},
@@ -112,17 +65,6 @@ - (id)init;
return self;
}
-- (void)setupFilterForSize:(CGSize)filterFrameSize;
-{
- imageWidthFactor = filterFrameSize.width;
- imageHeightFactor = filterFrameSize.height;
-
- [GPUImageOpenGLESContext useImageProcessingContext];
- [filterProgram use];
- glUniform1f(imageWidthFactorUniform, 1.0 / imageWidthFactor);
- glUniform1f(imageHeightFactorUniform, 1.0 / imageHeightFactor);
-}
-
#pragma mark -
#pragma mark Accessors
View
18 framework/Source/GPUImage3x3TextureSamplingFilter.h
@@ -0,0 +1,18 @@
+#import "GPUImageFilter.h"
+
+extern NSString *const kGPUImageNearbyTexelSamplingVertexShaderString;
+
+@interface GPUImage3x3TextureSamplingFilter : GPUImageFilter
+{
+ GLint texelWidthUniform, texelHeightUniform;
+
+ CGFloat texelWidth, texelHeight;
+ BOOL hasOverriddenImageSizeFactor;
+}
+
+// The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
+@property(readwrite, nonatomic) CGFloat texelWidth;
+@property(readwrite, nonatomic) CGFloat texelHeight;
+
+
+@end
View
107 framework/Source/GPUImage3x3TextureSamplingFilter.m
@@ -0,0 +1,107 @@
+#import "GPUImage3x3TextureSamplingFilter.h"
+
+// Override vertex shader to remove dependent texture reads
+NSString *const kGPUImageNearbyTexelSamplingVertexShaderString = SHADER_STRING
+(
+ attribute vec4 position;
+ attribute vec4 inputTextureCoordinate;
+
+ uniform highp float texelWidth;
+ uniform highp float texelHeight;
+
+ varying vec2 textureCoordinate;
+ varying vec2 leftTextureCoordinate;
+ varying vec2 rightTextureCoordinate;
+
+ varying vec2 topTextureCoordinate;
+ varying vec2 topLeftTextureCoordinate;
+ varying vec2 topRightTextureCoordinate;
+
+ varying vec2 bottomTextureCoordinate;
+ varying vec2 bottomLeftTextureCoordinate;
+ varying vec2 bottomRightTextureCoordinate;
+
+ void main()
+ {
+ gl_Position = position;
+
+ vec2 widthStep = vec2(texelWidth, 0.0);
+ vec2 heightStep = vec2(0.0, texelHeight);
+ vec2 widthHeightStep = vec2(texelWidth, texelHeight);
+ vec2 widthNegativeHeightStep = vec2(texelWidth, -texelHeight);
+
+ textureCoordinate = inputTextureCoordinate.xy;
+ leftTextureCoordinate = inputTextureCoordinate.xy - widthStep;
+ rightTextureCoordinate = inputTextureCoordinate.xy + widthStep;
+
+ topTextureCoordinate = inputTextureCoordinate.xy - heightStep;
+ topLeftTextureCoordinate = inputTextureCoordinate.xy - widthHeightStep;
+ topRightTextureCoordinate = inputTextureCoordinate.xy + widthNegativeHeightStep;
+
+ bottomTextureCoordinate = inputTextureCoordinate.xy + heightStep;
+ bottomLeftTextureCoordinate = inputTextureCoordinate.xy - widthNegativeHeightStep;
+ bottomRightTextureCoordinate = inputTextureCoordinate.xy + widthHeightStep;
+ }
+);
+
+
+@implementation GPUImage3x3TextureSamplingFilter
+
+@synthesize texelWidth = _texelWidth;
+@synthesize texelHeight = _texelHeight;
+
+#pragma mark -
+#pragma mark Initialization and teardown
+
+- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
+{
+ if (!(self = [super initWithVertexShaderFromString:kGPUImageNearbyTexelSamplingVertexShaderString fragmentShaderFromString:fragmentShaderString]))
+ {
+ return nil;
+ }
+
+ texelWidthUniform = [filterProgram uniformIndex:@"texelWidth"];
+ texelHeightUniform = [filterProgram uniformIndex:@"texelHeight"];
+
+ return self;
+}
+
+- (void)setupFilterForSize:(CGSize)filterFrameSize;
+{
+ if (!hasOverriddenImageSizeFactor)
+ {
+ _texelWidth = 1.0 / filterFrameSize.width;
+ _texelHeight = 1.0 / filterFrameSize.height;
+
+ [GPUImageOpenGLESContext useImageProcessingContext];
+ [filterProgram use];
+ glUniform1f(texelWidthUniform, _texelWidth);
+ glUniform1f(texelHeightUniform, _texelHeight);
+ }
+}
+
+#pragma mark -
+#pragma mark Accessors
+
+- (void)setTexelWidth:(CGFloat)newValue;
+{
+ hasOverriddenImageSizeFactor = YES;
+ _texelWidth = newValue;
+
+ [GPUImageOpenGLESContext useImageProcessingContext];
+ [filterProgram use];
+ glUniform1f(texelWidthUniform, _texelWidth);
+}
+
+- (void)setTexelHeight:(CGFloat)newValue;
+{
+ hasOverriddenImageSizeFactor = YES;
+ _texelHeight = newValue;
+
+ [GPUImageOpenGLESContext useImageProcessingContext];
+ [filterProgram use];
+ glUniform1f(texelHeightUniform, _texelHeight);
+}
+
+
+@end
View
13 framework/Source/GPUImageAdaptiveThresholdFilter.m
@@ -1,23 +1,26 @@
#import "GPUImageAdaptiveThresholdFilter.h"
#import "GPUImageFilter.h"
+#import "GPUImageTwoInputFilter.h"
#import "GPUImageGrayscaleFilter.h"
#import "GPUImageBoxBlurFilter.h"
NSString *const kGPUImageAdaptiveThresholdFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
+ varying highp vec2 textureCoordinate2;
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
- highp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- highp float localLuminance = texture2D(inputImageTexture2, textureCoordinate).r;
- highp float thresholdResult = step(localLuminance - 0.05, textureColor.r);
+ highp float blurredInput = texture2D(inputImageTexture, textureCoordinate).r;
+ highp float localLuminance = texture2D(inputImageTexture2, textureCoordinate2).r;
+ highp float thresholdResult = step(blurredInput - 0.05, localLuminance);
- gl_FragColor = vec4(vec3(thresholdResult), textureColor.w);
+ gl_FragColor = vec4(vec3(thresholdResult), 1.0);
// gl_FragColor = vec4(localLuminance, textureColor.r, 0.0, textureColor.w);
+// gl_FragColor = vec4(localLuminance, localLuminance, localLuminance, 1.0);
}
);
@@ -39,7 +42,7 @@ - (id)init;
[self addFilter:boxBlurFilter];
// Third pass: compare the blurred background luminance to the local value
- GPUImageFilter *adaptiveThresholdFilter = [[GPUImageFilter alloc] initWithFragmentShaderFromString:kGPUImageAdaptiveThresholdFragmentShaderString];
+ GPUImageFilter *adaptiveThresholdFilter = [[GPUImageTwoInputFilter alloc] initWithFragmentShaderFromString:kGPUImageAdaptiveThresholdFragmentShaderString];
[self addFilter:adaptiveThresholdFilter];
[luminanceFilter addTarget:boxBlurFilter];
View
4 framework/Source/GPUImageAlphaBlendFilter.h
@@ -1,6 +1,6 @@
-#import "GPUImageFilter.h"
+#import "GPUImageTwoInputFilter.h"
-@interface GPUImageAlphaBlendFilter : GPUImageFilter
+@interface GPUImageAlphaBlendFilter : GPUImageTwoInputFilter
{
GLint mixUniform;
}
View
5 framework/Source/GPUImageAlphaBlendFilter.m
@@ -3,7 +3,8 @@
NSString *const kGPUImageAlphaBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
-
+ varying highp vec2 textureCoordinate2;
+
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
@@ -12,7 +13,7 @@
void main()
{
lowp vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate);
+ lowp vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
gl_FragColor = vec4(mix(textureColor.rgb, textureColor2.rgb, textureColor2.a * mixturePercent), textureColor.a);
}
View
5 framework/Source/GPUImageBilateralFilter.h
@@ -0,0 +1,5 @@
+#import "GPUImageGaussianBlurFilter.h"
+
+@interface GPUImageBilateralFilter : GPUImageGaussianBlurFilter
+
+@end
View
87 framework/Source/GPUImageBilateralFilter.m
@@ -0,0 +1,87 @@
+#import "GPUImageBilateralFilter.h"
+
+NSString *const kGPUImageBilateralFilterFragmentShaderString = SHADER_STRING
+(
+ uniform sampler2D inputImageTexture;
+
+ const lowp int GAUSSIAN_SAMPLES = 9;
+
+ varying highp vec2 textureCoordinate;
+ varying highp vec2 blurCoordinates[GAUSSIAN_SAMPLES];
+
+ const mediump float distanceNormalizationFactor = 0.6933613;
+
+ void main() {
+ lowp vec4 centralColor = texture2D(inputImageTexture, blurCoordinates[4]);
+ lowp float gaussianWeightTotal = 0.18;
+ lowp vec4 sum = centralColor * 0.18;
+
+ lowp vec4 sampleColor = texture2D(inputImageTexture, blurCoordinates[0]);
+ mediump float distanceFromCentralColor = distance(centralColor, sampleColor) * distanceNormalizationFactor;
+ lowp float gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);
+ gaussianWeightTotal += gaussianWeight;
+ sum += sampleColor * gaussianWeight;
+
+ sampleColor = texture2D(inputImageTexture, blurCoordinates[1]);
+ distanceFromCentralColor = distance(centralColor, sampleColor) * distanceNormalizationFactor;
+ gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor);
+ gaussianWeightTotal += gaussianWeight;
+ sum += sampleColor * gaussianWeight;
+
+ sampleColor = texture2D(inputImageTexture, blurCoordinates[2]);
+ distanceFromCentralColor = distance(centralColor, sampleColor) * distanceNormalizationFactor;
+ gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor);
+ gaussianWeightTotal += gaussianWeight;
+ sum += sampleColor * gaussianWeight;
+
+ sampleColor = texture2D(inputImageTexture, blurCoordinates[3]);
+ distanceFromCentralColor = distance(centralColor, sampleColor) * distanceNormalizationFactor;
+ gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor);
+ gaussianWeightTotal += gaussianWeight;
+ sum += sampleColor * gaussianWeight;
+
+ sampleColor = texture2D(inputImageTexture, blurCoordinates[5]);
+ distanceFromCentralColor = distance(centralColor, sampleColor) * distanceNormalizationFactor;
+ gaussianWeight = 0.15 * (1.0 - distanceFromCentralColor);
+ gaussianWeightTotal += gaussianWeight;
+ sum += sampleColor * gaussianWeight;
+
+ sampleColor = texture2D(inputImageTexture, blurCoordinates[6]);
+ distanceFromCentralColor = distance(centralColor, sampleColor) * distanceNormalizationFactor;
+ gaussianWeight = 0.12 * (1.0 - distanceFromCentralColor);
+ gaussianWeightTotal += gaussianWeight;
+ sum += sampleColor * gaussianWeight;
+
+ sampleColor = texture2D(inputImageTexture, blurCoordinates[7]);
+ distanceFromCentralColor = distance(centralColor, sampleColor) * distanceNormalizationFactor;
+ gaussianWeight = 0.09 * (1.0 - distanceFromCentralColor);
+ gaussianWeightTotal += gaussianWeight;
+ sum += sampleColor * gaussianWeight;
+
+ sampleColor = texture2D(inputImageTexture, blurCoordinates[8]);
+ distanceFromCentralColor = distance(centralColor, sampleColor) * distanceNormalizationFactor;
+ gaussianWeight = 0.05 * (1.0 - distanceFromCentralColor);
+ gaussianWeightTotal += gaussianWeight;
+ sum += sampleColor * gaussianWeight;
+
+ gl_FragColor = sum / gaussianWeightTotal;
+ }
+);
+
+@implementation GPUImageBilateralFilter
+
+- (id)init;
+{
+
+ if (!(self = [super initWithFirstStageVertexShaderFromString:nil
+ firstStageFragmentShaderFromString:kGPUImageBilateralFilterFragmentShaderString
+ secondStageVertexShaderFromString:nil
+ secondStageFragmentShaderFromString:kGPUImageBilateralFilterFragmentShaderString])) {
+ return nil;
+ }
+
+
+ return self;
+}
+
+@end
View
12 framework/Source/GPUImageBoxBlurFilter.m
@@ -96,8 +96,16 @@ - (void)setupFilterForSize:(CGSize)filterFrameSize;
{
[GPUImageOpenGLESContext useImageProcessingContext];
[filterProgram use];
- glUniform1f(verticalPassTexelWidthOffsetUniform, 0.0);
- glUniform1f(verticalPassTexelHeightOffsetUniform, 1.0 / filterFrameSize.height);
+ if (GPUImageRotationSwapsWidthAndHeight(inputRotation))
+ {
+ glUniform1f(verticalPassTexelWidthOffsetUniform, 1.0 / filterFrameSize.height);
+ glUniform1f(verticalPassTexelHeightOffsetUniform, 0.0);
+ }
+ else
+ {
+ glUniform1f(verticalPassTexelWidthOffsetUniform, 0.0);
+ glUniform1f(verticalPassTexelHeightOffsetUniform, 1.0 / filterFrameSize.height);
+ }
[secondFilterProgram use];
glUniform1f(horizontalPassTexelWidthOffsetUniform, 1.0 / filterFrameSize.width);
View
7 framework/Source/GPUImageBulgeDistortionFilter.h
@@ -1,15 +1,16 @@
#import "GPUImageFilter.h"
+/// Creates a bulge distortion on the image
@interface GPUImageBulgeDistortionFilter : GPUImageFilter
{
GLint radiusUniform, centerUniform, scaleUniform;
}
-// The center about which to apply the distortion, with a default of (0.5, 0.5)
+/// The center about which to apply the distortion, with a default of (0.5, 0.5)
@property(readwrite, nonatomic) CGPoint center;
-// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25
+/// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25
@property(readwrite, nonatomic) CGFloat radius;
-// The amount of distortion to apply, from -1.0 to 1.0, with a default of 0.5
+/// The amount of distortion to apply, from -1.0 to 1.0, with a default of 0.5
@property(readwrite, nonatomic) CGFloat scale;
@end
View
12 framework/Source/GPUImageBulgeDistortionFilter.m
@@ -59,6 +59,12 @@ - (id)init;
#pragma mark -
#pragma mark Accessors
+- (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex;
+{
+ [super setInputRotation:newInputRotation atIndex:textureIndex];
+ [self setCenter:self.center];
+}
+
- (void)setRadius:(CGFloat)newValue;
{
_radius = newValue;
@@ -84,9 +90,11 @@ - (void)setCenter:(CGPoint)newValue;
[GPUImageOpenGLESContext useImageProcessingContext];
[filterProgram use];
+ CGPoint rotatedPoint = [self rotatedPoint:_center forRotation:inputRotation];
+
GLfloat centerPosition[2];
- centerPosition[0] = _center.x;
- centerPosition[1] = _center.y;
+ centerPosition[0] = rotatedPoint.x;
+ centerPosition[1] = rotatedPoint.y;
glUniform2fv(centerUniform, 1, centerPosition);
}
View
6 framework/Source/GPUImageCannyEdgeDetectionFilter.h
@@ -11,9 +11,9 @@
// GPUImageSketchFilter *edgeDetectionFilter;
}
-// The image width and height factors tweak the appearance of the edges. By default, they match the filter size in pixels
-@property(readwrite, nonatomic) CGFloat imageWidthFactor;
-@property(readwrite, nonatomic) CGFloat imageHeightFactor;
+// The image width and height factors tweak the appearance of the edges. By default, they match the inverse of the filter size in pixels
+@property(readwrite, nonatomic) CGFloat texelWidth;
+@property(readwrite, nonatomic) CGFloat texelHeight;
// A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
@property (readwrite, nonatomic) CGFloat blurSize;
View
20 framework/Source/GPUImageCannyEdgeDetectionFilter.m
@@ -7,8 +7,8 @@ @implementation GPUImageCannyEdgeDetectionFilter
@synthesize threshold;
@synthesize blurSize;
-@synthesize imageWidthFactor;
-@synthesize imageHeightFactor;
+@synthesize texelWidth;
+@synthesize texelHeight;
- (id)init;
{
@@ -51,24 +51,24 @@ - (CGFloat)blurSize;
return blurFilter.blurSize;
}
-- (void)setImageWidthFactor:(CGFloat)newValue;
+- (void)setTexelWidth:(CGFloat)newValue;
{
- edgeDetectionFilter.imageWidthFactor = newValue;
+ edgeDetectionFilter.texelWidth = newValue;
}
-- (CGFloat)imageWidthFactor;
+- (CGFloat)texelWidth;
{
- return edgeDetectionFilter.imageWidthFactor;
+ return edgeDetectionFilter.texelWidth;
}
-- (void)setImageHeightFactor:(CGFloat)newValue;
+- (void)setTexelHeight:(CGFloat)newValue;
{
- edgeDetectionFilter.imageHeightFactor = newValue;
+ edgeDetectionFilter.texelHeight = newValue;
}
-- (CGFloat)imageHeightFactor;
+- (CGFloat)texelHeight;
{
- return edgeDetectionFilter.imageHeightFactor;
+ return edgeDetectionFilter.texelHeight;
}
- (void)setThreshold:(CGFloat)newValue;
View
4 framework/Source/GPUImageChromaKeyBlendFilter.h
@@ -1,6 +1,6 @@
-#import "GPUImageFilter.h"
+#import "GPUImageTwoInputFilter.h"
-@interface GPUImageChromaKeyBlendFilter : GPUImageFilter
+@interface GPUImageChromaKeyBlendFilter : GPUImageTwoInputFilter
{
GLint colorToReplaceUniform, thresholdSensitivityUniform, smoothingUniform;
}
View
5 framework/Source/GPUImageChromaKeyBlendFilter.m
@@ -7,7 +7,8 @@
precision highp float;
varying highp vec2 textureCoordinate;
-
+ varying highp vec2 textureCoordinate2;
+
uniform float thresholdSensitivity;
uniform float smoothing;
uniform vec3 colorToReplace;
@@ -17,7 +18,7 @@
void main()
{
vec3 textureColor = texture2D(inputImageTexture, textureCoordinate).rgb;
- vec3 textureColor2 = texture2D(inputImageTexture2, textureCoordinate).rgb;
+ vec3 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2).rgb;
float maskY = 0.2989 * colorToReplace.r + 0.5866 * colorToReplace.g + 0.1145 * colorToReplace.b;
float maskCr = 0.7132 * (colorToReplace.r - maskY);
View
4 framework/Source/GPUImageColorBurnBlendFilter.h
@@ -1,6 +1,6 @@
-#import "GPUImageFilter.h"
+#import "GPUImageTwoInputFilter.h"
-@interface GPUImageColorBurnBlendFilter : GPUImageFilter
+@interface GPUImageColorBurnBlendFilter : GPUImageTwoInputFilter
{
}
View
5 framework/Source/GPUImageColorBurnBlendFilter.m
@@ -3,14 +3,15 @@
NSString *const kGPUImageColorBurnBlendFragmentShaderString = SHADER_STRING
(
varying highp vec2 textureCoordinate;
-
+ varying highp vec2 textureCoordinate2;
+
uniform sampler2D inputImageTexture;
uniform sampler2D inputImageTexture2;
void main()
{
mediump vec4 textureColor = texture2D(inputImageTexture, textureCoordinate);
- mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate);
+ mediump vec4 textureColor2 = texture2D(inputImageTexture2, textureCoordinate2);
mediump vec4 whiteColor = vec4(1.0);
gl_FragColor = whiteColor - (whiteColor - textureColor) / textureColor2;
}
View
4 framework/Source/GPUImageColorDodgeBlendFilter.h
@@ -1,6 +1,6 @@
-#import "GPUImageFilter.h"
+#import "GPUImageTwoInputFilter.h"
-@interface GPUImageColorDodgeBlendFilter : GPUImageFilter
+@interface GPUImageColorDodgeBlendFilter : GPUImageTwoInputFilter
{
}
View
3  framework/Source/GPUImageColorDodgeBlendFilter.m