text
stringlengths 2
104M
| meta
dict |
---|---|
//
// UIView(Extention).m
// CoreImageDemo
//
// Created by Gejiaxin on 16/12/28.
// Copyright © 2016 VincentJac. All rights reserved.
//
#import "UIView+Extention.h"
@implementation UIView (Extention)
- (CGFloat)top {
return self.frame.origin.y;
}
- (void)setTop:(CGFloat)y {
CGRect frame = self.frame;
frame.origin.y = y;
self.frame = frame;
}
- (CGFloat)left {
return self.frame.origin.x;
}
- (void)setLeft:(CGFloat)x {
CGRect frame = self.frame;
frame.origin.x = x;
self.frame = frame;
}
- (CGFloat)bottom {
return self.frame.origin.y + self.frame.size.height;
}
- (void)setBottom:(CGFloat)bottom {
CGRect frame = self.frame;
frame.origin.y = bottom - frame.size.height;
self.frame = frame;
}
- (CGFloat)right {
return self.frame.origin.x + self.frame.size.width;
}
- (void)setRight:(CGFloat)right {
CGRect frame = self.frame;
frame.origin.x = right - frame.size.width;
self.frame = frame;
}
- (CGFloat)centerX {
return self.center.x;
}
- (void)setCenterX:(CGFloat)centerX {
self.center = CGPointMake(centerX, self.center.y);
}
- (CGFloat)centerY {
return self.center.y;
}
- (void)setCenterY:(CGFloat)centerY {
self.center = CGPointMake(self.center.x, centerY);
}
- (CGFloat)width {
return self.frame.size.width;
}
- (void)setWidth:(CGFloat)width {
CGRect frame = self.frame;
frame.size.width = width;
self.frame = frame;
}
- (CGFloat)height {
return self.frame.size.height;
}
- (void)setHeight:(CGFloat)height {
CGRect frame = self.frame;
frame.size.height = height;
self.frame = frame;
}
- (CGFloat)screenX {
CGFloat x = 0.0f;
for (UIView *view = self; view; view = view.superview) {
x += view.left;
}
return x;
}
- (CGFloat)screenY {
CGFloat y = 0.0f;
for (UIView *view = self; view; view = view.superview) {
y += view.top;
}
return y;
}
- (CGFloat)screenViewX {
CGFloat x = 0.0f;
for (UIView *view = self; view; view = view.superview) {
x += view.left;
if ([view isKindOfClass:[UIScrollView class]]) {
UIScrollView *scrollView = (UIScrollView *)view;
x -= scrollView.contentOffset.x;
}
}
return x;
}
- (CGFloat)screenViewY {
CGFloat y = 0;
for (UIView *view = self; view; view = view.superview) {
y += view.top;
if ([view isKindOfClass:[UIScrollView class]]) {
UIScrollView *scrollView = (UIScrollView *)view;
y -= scrollView.contentOffset.y;
}
}
return y;
}
- (CGPoint)origin {
return self.frame.origin;
}
- (void)setOrigin:(CGPoint)origin {
CGRect frame = self.frame;
frame.origin = origin;
self.frame = frame;
}
- (CGSize)size {
return self.frame.size;
}
- (void)setSize:(CGSize)size {
CGRect frame = self.frame;
frame.size = size;
self.frame = frame;
}
- (CGFloat)orientationWidth {
return UIInterfaceOrientationIsLandscape([UIApplication sharedApplication].statusBarOrientation)
? self.height : self.width;
}
- (CGFloat)orientationHeight {
return UIInterfaceOrientationIsLandscape([UIApplication sharedApplication].statusBarOrientation)
? self.width : self.height;
}
- (CGPoint)offsetFromView:(UIView*)otherView {
CGFloat x = 0.0f, y = 0.0f;
for (UIView *view = self; view && view != otherView; view = view.superview) {
x += view.left;
y += view.top;
}
return CGPointMake(x, y);
}
- (CGRect)screenFrame {
NSLog(@"~~~~FrameW = %f",self.width);
NSLog(@"~~~~screenFrameW = %f",self.screenSize.width);
return CGRectMake(self.screenViewX, self.screenViewY, self.screenSize.width, self.screenSize.height);
}
- (CGSize)screenSize
{
CGRect screenFrame = [[UIScreen mainScreen] bounds];
int w = screenFrame.size.width, h = screenFrame.size.height;
UIDeviceOrientation deviceOri = [[UIDevice currentDevice] orientation];
UIInterfaceOrientation statusOri = [[UIApplication sharedApplication] statusBarOrientation];
if (UIDeviceOrientationIsLandscape(deviceOri)) {
w = screenFrame.size.height;
h = screenFrame.size.width;
} else if (!UIDeviceOrientationIsPortrait(deviceOri)) {
if (UIDeviceOrientationIsLandscape(statusOri)) {
w = screenFrame.size.height;
h = screenFrame.size.width;
}
}
CGSize size = CGSizeMake(w, h);
return size;
}
-(void)setScreenSize:(CGSize)screenSize
{
CGRect frame = self.frame;
frame.size = screenSize;
self.frame = frame;
}
- (CGFloat)x
{
return self.left;
}
- (void)setX:(CGFloat)value
{
self.left = value;
}
- (CGFloat)y
{
return self.top;
}
- (void)setY:(CGFloat)value
{
self.top = value;
}
- (CGFloat)w
{
return self.width;
}
- (void)setW:(CGFloat)width
{
self.width = width;
}
- (CGFloat)h
{
return self.height;
}
- (void)setH:(CGFloat)height
{
self.height = height;
}
- (void)removeAllSubviews {
while (self.subviews.count) {
UIView *child = self.subviews.lastObject;
[child removeFromSuperview];
}
}
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
//
// main.m
// CoreImageDemo
//
// Created by Gejiaxin on 16/12/25.
// Copyright © 2016年 VincentJac. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "AppDelegate.h"
int main(int argc, char * argv[]) {
@autoreleasepool {
return UIApplicationMain(argc, argv, nil, NSStringFromClass([AppDelegate class]));
}
}
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
//
// SnapseedDropMenu.h
// CoreImageDemo
//
// Created by Gejiaxin on 16/12/28.
// Copyright © 2016 VincentJac. All rights reserved.
//
#import <UIKit/UIKit.h>
#import "UIView+Extention.h"
@interface SnapseedDropMenuModel : NSObject
@property (nonatomic, assign) CGFloat defaultValue;
@property (nonatomic, assign) CGFloat maxValue;
@property (nonatomic, assign) CGFloat minValue;
@property (nonatomic, copy) NSString * title;
- (instancetype)initWithTitle:(NSString *)title defaultValue:(CGFloat)defaultValue maxValue:(CGFloat)maxValue minValue:(CGFloat)minValue;
@end
@class SnapseedDropMenu;
@protocol SnapseedDropMenuDelegate<NSObject>
- (void)snapseedDropMenu:(SnapseedDropMenu*)sender didSelectCellAtIndex:(NSInteger)index value:(CGFloat)value;
- (void)snapseedDropMenu:(SnapseedDropMenu*)sender atIndex:(NSInteger)index isChanging:(CGFloat)value;
- (void)snapseedDropMenu:(SnapseedDropMenu*)sender atIndex:(NSInteger)index valueDidChange:(CGFloat)value;
@end
@interface SnapseedDropMenu : UITableView
@property (nonatomic, weak) id<SnapseedDropMenuDelegate> dropMenuDelegate;
@property (nonatomic, assign) NSInteger selectNum;
- (instancetype)initWithArray:(NSArray *)array viewCenterPoint:(CGPoint)origin inView:(UIView *)superView;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
//
// SnapseedDropMenu.m
// CoreImageDemo
//
// Created by Gejiaxin on 16/12/28.
// Copyright © 2016 VincentJac. All rights reserved.
//
#import "SnapseedDropMenu.h"
#import "SnapseedDropMenuTableViewCell.h"
#import <math.h>
@implementation SnapseedDropMenuModel
- (instancetype)initWithTitle:(NSString *)title defaultValue:(CGFloat)defaultValue maxValue:(CGFloat)maxValue minValue:(CGFloat)minValue {
self = [super init];
self.title = title;
self.defaultValue = defaultValue;
self.maxValue = maxValue;
self.minValue = minValue;
return self;
}
@end
typedef NS_ENUM(NSInteger, PanGestureDirection) {
NoGestureDirection = 0,
LeftOrRight = 1,
UpOrDown,
PanGestureUp,
PanGestureDown,
PanGestureleft,
PanGestureRight
};
#define MoveZoom 10
@interface SnapseedDropMenu()<UITableViewDelegate,UITableViewDataSource> {
CGFloat * _intValueArray;
}
@property (nonatomic, copy)NSArray<SnapseedDropMenuModel*> * dataArray;
@property (nonatomic, strong) UIView * superView;
@property (nonatomic, assign) PanGestureDirection lastGestureDirecttion;
@property (nonatomic, assign) PanGestureDirection gestureLock;
@property (nonatomic, assign) NSInteger originY;
@property (nonatomic, assign) NSInteger curValue;
@end
@implementation SnapseedDropMenu
- (instancetype)initWithArray:(NSArray *)array viewCenterPoint:(CGPoint)origin inView:(UIView *)superView{
self = [super initWithFrame:CGRectMake(0, 0, SnapseedDropMenuCellWidth, SnapseedDropMenuCellHeight * array.count)];
_dataArray = array;
self.centerX = origin.x;
self.centerY = origin.y;
self.originY = origin.y - (self.height / 2);
self.dataSource = self;
self.delegate = self;
self.rowHeight = SnapseedDropMenuCellHeight;
self.backgroundColor = [UIColor clearColor];
self.scrollEnabled = NO;
self.hidden = YES;
_superView = superView;
_selectNum = 0;
[self reloadData];
UIPanGestureRecognizer *panGesture = [[UIPanGestureRecognizer alloc] initWithTarget:self
action:@selector(panGesture:)];
[_superView addGestureRecognizer:panGesture];
self.separatorStyle = UITableViewCellSeparatorStyleNone;
_intValueArray = (CGFloat *)malloc(sizeof(CGFloat)*array.count);
for(int i = 0; i < _dataArray.count; i++) {
SnapseedDropMenuModel * model = [_dataArray objectAtIndex:i];
_intValueArray[i] = model.defaultValue;
}
return self;
}
- (void) selectCellByOffsetY{
CGFloat distant = self.y - self.originY;
NSInteger selectNum = 0;
if(distant >= 0) {
selectNum = distant / SnapseedDropMenuCellHeight;
} else {
selectNum = (-distant) / SnapseedDropMenuCellHeight ;
}
_selectNum = selectNum;
[self reloadData];
}
#pragma mark - UITableview
- (NSInteger)tableView:(UITableView *)tableView numberOfRowsInSection:(NSInteger)section {
return _dataArray.count;
}
- (UITableViewCell *)tableView:(UITableView *)tableView cellForRowAtIndexPath:(NSIndexPath *)indexPath {
SnapseedDropMenuModel * model = [_dataArray objectAtIndex:indexPath.row];
SnapseedDropMenuTableViewCell * cell = [tableView dequeueReusableCellWithIdentifier:@"SnapseedDrowMenuCell"];
if(!cell) {
cell = [[SnapseedDropMenuTableViewCell alloc]initWithStyle:UITableViewCellStyleDefault reuseIdentifier:@"SnapseedDrowMenuCell"];
cell.backgroundColor = [UIColor clearColor];
}
cell.title.text = model.title;
cell.valueLab.text = [NSString stringWithFormat:@"%.1f",_intValueArray[indexPath.row]];
if(indexPath.row == _selectNum) {
cell.mainView.backgroundColor = COLOR_14;
} else {
cell.mainView.backgroundColor = COLOR_20;
}
return cell;
}
- (void)panGesture:(id)sender {
UIPanGestureRecognizer *panGesture = sender;
if(!_superView) {
return ;
}
switch (panGesture.state) {
case UIGestureRecognizerStateEnded: {
if(self.dropMenuDelegate && _gestureLock == UpOrDown) {
if([self.dropMenuDelegate respondsToSelector:@selector(snapseedDropMenu:didSelectCellAtIndex:value:)]) {
[self.dropMenuDelegate snapseedDropMenu:self
didSelectCellAtIndex:_selectNum
value:_intValueArray[_selectNum]];
}
} else if(self.dropMenuDelegate && _gestureLock == LeftOrRight) {
if([self.dropMenuDelegate respondsToSelector:@selector(snapseedDropMenu:atIndex:valueDidChange:)]) {
[self.dropMenuDelegate snapseedDropMenu:self atIndex:_selectNum valueDidChange:_intValueArray[_selectNum]];
}
}
self.hidden = YES;
_gestureLock = NoGestureDirection;
}
break;
case UIGestureRecognizerStateBegan: {
_gestureLock = NoGestureDirection;
}
break;
case UIGestureRecognizerStateChanged : {
if(_gestureLock == UpOrDown) {
self.hidden = NO;
}
}
default:
break;
}
//每次仅获取较短的位移距离
CGPoint movePoint = [panGesture translationInView:_superView];
[panGesture setTranslation:CGPointZero inView:_superView];
if(_gestureLock == UpOrDown){
//锁定当前滑动为上下滑动
self.y += movePoint.y;
if(self.y <= (self.originY - self.height + SnapseedDropMenuCellHeight)) {
self.y = self.originY - self.height + SnapseedDropMenuCellHeight;
} else if(self.y >= self.originY) {
self.y = self.originY;
}
[self selectCellByOffsetY];
} else if(_gestureLock == LeftOrRight ){
//锁定当前滑动为左右滑动
SnapseedDropMenuModel * model = [_dataArray objectAtIndex:_selectNum];
CGFloat value = movePoint.x / (model.maxValue - model.minValue) / 50;
if(_intValueArray[_selectNum] + value > model.maxValue) {
_intValueArray[_selectNum] = model.maxValue;
} else if(_intValueArray[_selectNum] + value < model.minValue) {
_intValueArray[_selectNum] = model.minValue;
} else {
_intValueArray[_selectNum] += value;
}
if(self.dropMenuDelegate) {
if([self.dropMenuDelegate respondsToSelector:@selector(snapseedDropMenu: atIndex:isChanging:)]) {
[self.dropMenuDelegate snapseedDropMenu:self atIndex:_selectNum isChanging:_intValueArray[_selectNum]];
}
}
} else {
//首次滑动,判断用户意图为上下滑动还是左右滑动
if(movePoint.x > -MoveZoom && movePoint.x < MoveZoom ) {
if(movePoint.y <= -MoveZoom || movePoint.y >= MoveZoom ) {
if(_gestureLock == UpOrDown || _gestureLock == NoGestureDirection) {
NSLog(@"Up or down");
_gestureLock = UpOrDown;
}
}
} else {
NSLog(@"Left or right");
_gestureLock = LeftOrRight;
}
}
}
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="11134" systemVersion="15F34" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" useTraitCollections="YES" colorMatched="YES" initialViewController="BYZ-38-t0r">
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="11106"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="tne-QT-ifu">
<objects>
<viewController id="BYZ-38-t0r" customClass="ViewController" customModuleProvider="" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="y3c-jy-aDJ"/>
<viewControllerLayoutGuide type="bottom" id="wfy-db-euE"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="8bC-Xf-vdC">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="dkx-z0-nzr" sceneMemberID="firstResponder"/>
</objects>
</scene>
</scenes>
</document>
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
<document type="com.apple.InterfaceBuilder3.CocoaTouch.Storyboard.XIB" version="3.0" toolsVersion="11134" systemVersion="15F34" targetRuntime="iOS.CocoaTouch" propertyAccessControl="none" useAutolayout="YES" launchScreen="YES" useTraitCollections="YES" colorMatched="YES" initialViewController="01J-lp-oVM">
<dependencies>
<plugIn identifier="com.apple.InterfaceBuilder.IBCocoaTouchPlugin" version="11106"/>
</dependencies>
<scenes>
<!--View Controller-->
<scene sceneID="EHf-IW-A2E">
<objects>
<viewController id="01J-lp-oVM" sceneMemberID="viewController">
<layoutGuides>
<viewControllerLayoutGuide type="top" id="Llm-lL-Icb"/>
<viewControllerLayoutGuide type="bottom" id="xb3-aO-Qok"/>
</layoutGuides>
<view key="view" contentMode="scaleToFill" id="Ze5-6b-2t3">
<rect key="frame" x="0.0" y="0.0" width="375" height="667"/>
<autoresizingMask key="autoresizingMask" widthSizable="YES" heightSizable="YES"/>
<color key="backgroundColor" red="1" green="1" blue="1" alpha="1" colorSpace="custom" customColorSpace="sRGB"/>
</view>
</viewController>
<placeholder placeholderIdentifier="IBFirstResponder" id="iYj-Kq-Ea1" userLabel="First Responder" sceneMemberID="firstResponder"/>
</objects>
<point key="canvasLocation" x="53" y="375"/>
</scene>
</scenes>
</document>
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
# MBProgressHUD [](https://travis-ci.org/matej/MBProgressHUD)
MBProgressHUD is an iOS drop-in class that displays a translucent HUD with an indicator and/or labels while work is being done in a background thread. The HUD is meant as a replacement for the undocumented, private UIKit UIProgressHUD with some additional features.
[](http://dl.dropbox.com/u/378729/MBProgressHUD/1.png)
[](http://dl.dropbox.com/u/378729/MBProgressHUD/2.png)
[](http://dl.dropbox.com/u/378729/MBProgressHUD/3.png)
[](http://dl.dropbox.com/u/378729/MBProgressHUD/4.png)
[](http://dl.dropbox.com/u/378729/MBProgressHUD/5.png)
[](http://dl.dropbox.com/u/378729/MBProgressHUD/6.png)
[](http://dl.dropbox.com/u/378729/MBProgressHUD/7.png)
## Requirements
MBProgressHUD works on any iOS version and is compatible with both ARC and non-ARC projects. It depends on the following Apple frameworks, which should already be included with most Xcode templates:
* Foundation.framework
* UIKit.framework
* CoreGraphics.framework
You will need the latest developer tools in order to build MBProgressHUD. Old Xcode versions might work, but compatibility will not be explicitly maintained.
## Adding MBProgressHUD to your project
### Cocoapods
[CocoaPods](http://cocoapods.org) is the recommended way to add MBProgressHUD to your project.
1. Add a pod entry for MBProgressHUD to your Podfile `pod 'MBProgressHUD', '~> 0.9.2'`
2. Install the pod(s) by running `pod install`.
3. Include MBProgressHUD wherever you need it with `#import "MBProgressHUD.h"`.
### Source files
Alternatively you can directly add the `MBProgressHUD.h` and `MBProgressHUD.m` source files to your project.
1. Download the [latest code version](https://github.com/matej/MBProgressHUD/archive/master.zip) or add the repository as a git submodule to your git-tracked project.
2. Open your project in Xcode, then drag and drop `MBProgressHUD.h` and `MBProgressHUD.m` onto your project (use the "Product Navigator view"). Make sure to select Copy items when asked if you extracted the code archive outside of your project.
3. Include MBProgressHUD wherever you need it with `#import "MBProgressHUD.h"`.
### Static library
You can also add MBProgressHUD as a static library to your project or workspace.
1. Download the [latest code version](https://github.com/matej/MBProgressHUD/downloads) or add the repository as a git submodule to your git-tracked project.
2. Open your project in Xcode, then drag and drop `MBProgressHUD.xcodeproj` onto your project or workspace (use the "Product Navigator view").
3. Select your target and go to the Build phases tab. In the Link Binary With Libraries section select the add button. On the sheet find and add `libMBProgressHUD.a`. You might also need to add `MBProgressHUD` to the Target Dependencies list.
4. Include MBProgressHUD wherever you need it with `#import <MBProgressHUD/MBProgressHUD.h>`.
## Usage
The main guideline you need to follow when dealing with MBProgressHUD while running long-running tasks is keeping the main thread work-free, so the UI can be updated promptly. The recommended way of using MBProgressHUD is therefore to set it up on the main thread and then spinning the task, that you want to perform, off onto a new thread.
```objective-c
[MBProgressHUD showHUDAddedTo:self.view animated:YES];
dispatch_async(dispatch_get_global_queue( DISPATCH_QUEUE_PRIORITY_LOW, 0), ^{
// Do something...
dispatch_async(dispatch_get_main_queue(), ^{
[MBProgressHUD hideHUDForView:self.view animated:YES];
});
});
```
If you need to configure the HUD you can do this by using the MBProgressHUD reference that showHUDAddedTo:animated: returns.
```objective-c
MBProgressHUD *hud = [MBProgressHUD showHUDAddedTo:self.view animated:YES];
hud.mode = MBProgressHUDModeAnnularDeterminate;
hud.labelText = @"Loading";
[self doSomethingInBackgroundWithProgressCallback:^(float progress) {
hud.progress = progress;
} completionCallback:^{
[hud hide:YES];
}];
```
UI updates should always be done on the main thread. Some MBProgressHUD setters are however considered "thread safe" and can be called from background threads. Those also include `setMode:`, `setCustomView:`, `setLabelText:`, `setLabelFont:`, `setDetailsLabelText:`, `setDetailsLabelFont:` and `setProgress:`.
If you need to run your long-running task in the main thread, you should perform it with a slight delay, so UIKit will have enough time to update the UI (i.e., draw the HUD) before you block the main thread with your task.
```objective-c
[MBProgressHUD showHUDAddedTo:self.view animated:YES];
dispatch_time_t popTime = dispatch_time(DISPATCH_TIME_NOW, 0.01 * NSEC_PER_SEC);
dispatch_after(popTime, dispatch_get_main_queue(), ^(void){
// Do something...
[MBProgressHUD hideHUDForView:self.view animated:YES];
});
```
You should be aware that any HUD updates issued inside the above block won't be displayed until the block completes.
For more examples, including how to use MBProgressHUD with asynchronous operations such as NSURLConnection, take a look at the bundled demo project. Extensive API documentation is provided in the header file (MBProgressHUD.h).
## License
This code is distributed under the terms and conditions of the [MIT license](LICENSE).
## Change-log
A brief summary of each MBProgressHUD release can be found on the [wiki](https://github.com/matej/MBProgressHUD/wiki/Change-log).
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
//
// MBProgressHUD.m
// Version 0.9.2
// Created by Matej Bukovinski on 2.4.09.
//
#import "MBProgressHUD.h"
#import <tgmath.h>
#if __has_feature(objc_arc)
#define MB_AUTORELEASE(exp) exp
#define MB_RELEASE(exp) exp
#define MB_RETAIN(exp) exp
#else
#define MB_AUTORELEASE(exp) [exp autorelease]
#define MB_RELEASE(exp) [exp release]
#define MB_RETAIN(exp) [exp retain]
#endif
#if __IPHONE_OS_VERSION_MIN_REQUIRED >= 60000
#define MBLabelAlignmentCenter NSTextAlignmentCenter
#else
#define MBLabelAlignmentCenter UITextAlignmentCenter
#endif
#if __IPHONE_OS_VERSION_MIN_REQUIRED >= 70000
#define MB_TEXTSIZE(text, font) [text length] > 0 ? [text \
sizeWithAttributes:@{NSFontAttributeName:font}] : CGSizeZero;
#else
#define MB_TEXTSIZE(text, font) [text length] > 0 ? [text sizeWithFont:font] : CGSizeZero;
#endif
#if __IPHONE_OS_VERSION_MIN_REQUIRED >= 70000
#define MB_MULTILINE_TEXTSIZE(text, font, maxSize, mode) [text length] > 0 ? [text \
boundingRectWithSize:maxSize options:(NSStringDrawingUsesLineFragmentOrigin) \
attributes:@{NSFontAttributeName:font} context:nil].size : CGSizeZero;
#else
#define MB_MULTILINE_TEXTSIZE(text, font, maxSize, mode) [text length] > 0 ? [text \
sizeWithFont:font constrainedToSize:maxSize lineBreakMode:mode] : CGSizeZero;
#endif
#ifndef kCFCoreFoundationVersionNumber_iOS_7_0
#define kCFCoreFoundationVersionNumber_iOS_7_0 847.20
#endif
#ifndef kCFCoreFoundationVersionNumber_iOS_8_0
#define kCFCoreFoundationVersionNumber_iOS_8_0 1129.15
#endif
static const CGFloat kPadding = 4.f;
static const CGFloat kLabelFontSize = 16.f;
static const CGFloat kDetailsLabelFontSize = 12.f;
@interface MBProgressHUD () {
BOOL useAnimation;
SEL methodForExecution;
id targetForExecution;
id objectForExecution;
UILabel *label;
UILabel *detailsLabel;
BOOL isFinished;
CGAffineTransform rotationTransform;
}
@property (atomic, MB_STRONG) UIView *indicator;
@property (atomic, MB_STRONG) NSTimer *graceTimer;
@property (atomic, MB_STRONG) NSTimer *minShowTimer;
@property (atomic, MB_STRONG) NSDate *showStarted;
@end
@implementation MBProgressHUD
#pragma mark - Properties
@synthesize animationType;
@synthesize delegate;
@synthesize opacity;
@synthesize color;
@synthesize labelFont;
@synthesize labelColor;
@synthesize detailsLabelFont;
@synthesize detailsLabelColor;
@synthesize indicator;
@synthesize xOffset;
@synthesize yOffset;
@synthesize minSize;
@synthesize square;
@synthesize margin;
@synthesize dimBackground;
@synthesize graceTime;
@synthesize minShowTime;
@synthesize graceTimer;
@synthesize minShowTimer;
@synthesize taskInProgress;
@synthesize removeFromSuperViewOnHide;
@synthesize customView;
@synthesize showStarted;
@synthesize mode;
@synthesize labelText;
@synthesize detailsLabelText;
@synthesize progress;
@synthesize size;
@synthesize activityIndicatorColor;
#if NS_BLOCKS_AVAILABLE
@synthesize completionBlock;
#endif
#pragma mark - Class methods
+ (MB_INSTANCETYPE)showHUDAddedTo:(UIView *)view animated:(BOOL)animated {
MBProgressHUD *hud = [[self alloc] initWithView:view];
hud.removeFromSuperViewOnHide = YES;
[view addSubview:hud];
[hud show:animated];
return MB_AUTORELEASE(hud);
}
+ (BOOL)hideHUDForView:(UIView *)view animated:(BOOL)animated {
MBProgressHUD *hud = [self HUDForView:view];
if (hud != nil) {
hud.removeFromSuperViewOnHide = YES;
[hud hide:animated];
return YES;
}
return NO;
}
+ (NSUInteger)hideAllHUDsForView:(UIView *)view animated:(BOOL)animated {
NSArray *huds = [MBProgressHUD allHUDsForView:view];
for (MBProgressHUD *hud in huds) {
hud.removeFromSuperViewOnHide = YES;
[hud hide:animated];
}
return [huds count];
}
+ (MB_INSTANCETYPE)HUDForView:(UIView *)view {
NSEnumerator *subviewsEnum = [view.subviews reverseObjectEnumerator];
for (UIView *subview in subviewsEnum) {
if ([subview isKindOfClass:self]) {
return (MBProgressHUD *)subview;
}
}
return nil;
}
+ (NSArray *)allHUDsForView:(UIView *)view {
NSMutableArray *huds = [NSMutableArray array];
NSArray *subviews = view.subviews;
for (UIView *aView in subviews) {
if ([aView isKindOfClass:self]) {
[huds addObject:aView];
}
}
return [NSArray arrayWithArray:huds];
}
#pragma mark - Lifecycle
- (id)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self) {
// Set default values for properties
self.animationType = MBProgressHUDAnimationFade;
self.mode = MBProgressHUDModeIndeterminate;
self.labelText = nil;
self.detailsLabelText = nil;
self.opacity = 0.8f;
self.color = nil;
self.labelFont = [UIFont boldSystemFontOfSize:kLabelFontSize];
self.labelColor = [UIColor whiteColor];
self.detailsLabelFont = [UIFont boldSystemFontOfSize:kDetailsLabelFontSize];
self.detailsLabelColor = [UIColor whiteColor];
self.activityIndicatorColor = [UIColor whiteColor];
self.xOffset = 0.0f;
self.yOffset = 0.0f;
self.dimBackground = NO;
self.margin = 20.0f;
self.cornerRadius = 10.0f;
self.graceTime = 0.0f;
self.minShowTime = 0.0f;
self.removeFromSuperViewOnHide = NO;
self.minSize = CGSizeZero;
self.square = NO;
self.contentMode = UIViewContentModeCenter;
self.autoresizingMask = UIViewAutoresizingFlexibleTopMargin | UIViewAutoresizingFlexibleBottomMargin
| UIViewAutoresizingFlexibleLeftMargin | UIViewAutoresizingFlexibleRightMargin;
// Transparent background
self.opaque = NO;
self.backgroundColor = [UIColor clearColor];
// Make it invisible for now
self.alpha = 0.0f;
taskInProgress = NO;
rotationTransform = CGAffineTransformIdentity;
[self setupLabels];
[self updateIndicators];
[self registerForKVO];
[self registerForNotifications];
}
return self;
}
- (id)initWithView:(UIView *)view {
NSAssert(view, @"View must not be nil.");
return [self initWithFrame:view.bounds];
}
- (id)initWithWindow:(UIWindow *)window {
return [self initWithView:window];
}
- (void)dealloc {
[self unregisterFromNotifications];
[self unregisterFromKVO];
#if !__has_feature(objc_arc)
[color release];
[indicator release];
[label release];
[detailsLabel release];
[labelText release];
[detailsLabelText release];
[graceTimer release];
[minShowTimer release];
[showStarted release];
[customView release];
[labelFont release];
[labelColor release];
[detailsLabelFont release];
[detailsLabelColor release];
#if NS_BLOCKS_AVAILABLE
[completionBlock release];
#endif
[super dealloc];
#endif
}
#pragma mark - Show & hide
- (void)show:(BOOL)animated {
NSAssert([NSThread isMainThread], @"MBProgressHUD needs to be accessed on the main thread.");
useAnimation = animated;
// If the grace time is set postpone the HUD display
if (self.graceTime > 0.0) {
NSTimer *newGraceTimer = [NSTimer timerWithTimeInterval:self.graceTime target:self selector:@selector(handleGraceTimer:) userInfo:nil repeats:NO];
[[NSRunLoop currentRunLoop] addTimer:newGraceTimer forMode:NSRunLoopCommonModes];
self.graceTimer = newGraceTimer;
}
// ... otherwise show the HUD imediately
else {
[self showUsingAnimation:useAnimation];
}
}
- (void)hide:(BOOL)animated {
NSAssert([NSThread isMainThread], @"MBProgressHUD needs to be accessed on the main thread.");
useAnimation = animated;
// If the minShow time is set, calculate how long the hud was shown,
// and pospone the hiding operation if necessary
if (self.minShowTime > 0.0 && showStarted) {
NSTimeInterval interv = [[NSDate date] timeIntervalSinceDate:showStarted];
if (interv < self.minShowTime) {
self.minShowTimer = [NSTimer scheduledTimerWithTimeInterval:(self.minShowTime - interv) target:self
selector:@selector(handleMinShowTimer:) userInfo:nil repeats:NO];
return;
}
}
// ... otherwise hide the HUD immediately
[self hideUsingAnimation:useAnimation];
}
- (void)hide:(BOOL)animated afterDelay:(NSTimeInterval)delay {
[self performSelector:@selector(hideDelayed:) withObject:[NSNumber numberWithBool:animated] afterDelay:delay];
}
- (void)hideDelayed:(NSNumber *)animated {
[self hide:[animated boolValue]];
}
#pragma mark - Timer callbacks
- (void)handleGraceTimer:(NSTimer *)theTimer {
// Show the HUD only if the task is still running
if (taskInProgress) {
[self showUsingAnimation:useAnimation];
}
}
- (void)handleMinShowTimer:(NSTimer *)theTimer {
[self hideUsingAnimation:useAnimation];
}
#pragma mark - View Hierrarchy
- (void)didMoveToSuperview {
[self updateForCurrentOrientationAnimated:NO];
}
#pragma mark - Internal show & hide operations
- (void)showUsingAnimation:(BOOL)animated {
// Cancel any scheduled hideDelayed: calls
[NSObject cancelPreviousPerformRequestsWithTarget:self];
[self setNeedsDisplay];
if (animated && animationType == MBProgressHUDAnimationZoomIn) {
self.transform = CGAffineTransformConcat(rotationTransform, CGAffineTransformMakeScale(0.5f, 0.5f));
} else if (animated && animationType == MBProgressHUDAnimationZoomOut) {
self.transform = CGAffineTransformConcat(rotationTransform, CGAffineTransformMakeScale(1.5f, 1.5f));
}
self.showStarted = [NSDate date];
// Fade in
if (animated) {
[UIView beginAnimations:nil context:NULL];
[UIView setAnimationDuration:0.30];
self.alpha = 1.0f;
if (animationType == MBProgressHUDAnimationZoomIn || animationType == MBProgressHUDAnimationZoomOut) {
self.transform = rotationTransform;
}
[UIView commitAnimations];
}
else {
self.alpha = 1.0f;
}
}
- (void)hideUsingAnimation:(BOOL)animated {
// Fade out
if (animated && showStarted) {
[UIView beginAnimations:nil context:NULL];
[UIView setAnimationDuration:0.30];
[UIView setAnimationDelegate:self];
[UIView setAnimationDidStopSelector:@selector(animationFinished:finished:context:)];
// 0.02 prevents the hud from passing through touches during the animation the hud will get completely hidden
// in the done method
if (animationType == MBProgressHUDAnimationZoomIn) {
self.transform = CGAffineTransformConcat(rotationTransform, CGAffineTransformMakeScale(1.5f, 1.5f));
} else if (animationType == MBProgressHUDAnimationZoomOut) {
self.transform = CGAffineTransformConcat(rotationTransform, CGAffineTransformMakeScale(0.5f, 0.5f));
}
self.alpha = 0.02f;
[UIView commitAnimations];
}
else {
self.alpha = 0.0f;
[self done];
}
self.showStarted = nil;
}
- (void)animationFinished:(NSString *)animationID finished:(BOOL)finished context:(void*)context {
[self done];
}
- (void)done {
[NSObject cancelPreviousPerformRequestsWithTarget:self];
isFinished = YES;
self.alpha = 0.0f;
if (removeFromSuperViewOnHide) {
[self removeFromSuperview];
}
#if NS_BLOCKS_AVAILABLE
if (self.completionBlock) {
self.completionBlock();
self.completionBlock = NULL;
}
#endif
if ([delegate respondsToSelector:@selector(hudWasHidden:)]) {
[delegate performSelector:@selector(hudWasHidden:) withObject:self];
}
}
#pragma mark - Threading
- (void)showWhileExecuting:(SEL)method onTarget:(id)target withObject:(id)object animated:(BOOL)animated {
methodForExecution = method;
targetForExecution = MB_RETAIN(target);
objectForExecution = MB_RETAIN(object);
// Launch execution in new thread
self.taskInProgress = YES;
[NSThread detachNewThreadSelector:@selector(launchExecution) toTarget:self withObject:nil];
// Show HUD view
[self show:animated];
}
#if NS_BLOCKS_AVAILABLE
- (void)showAnimated:(BOOL)animated whileExecutingBlock:(dispatch_block_t)block {
dispatch_queue_t queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
[self showAnimated:animated whileExecutingBlock:block onQueue:queue completionBlock:NULL];
}
- (void)showAnimated:(BOOL)animated whileExecutingBlock:(dispatch_block_t)block completionBlock:(void (^)())completion {
dispatch_queue_t queue = dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0);
[self showAnimated:animated whileExecutingBlock:block onQueue:queue completionBlock:completion];
}
- (void)showAnimated:(BOOL)animated whileExecutingBlock:(dispatch_block_t)block onQueue:(dispatch_queue_t)queue {
[self showAnimated:animated whileExecutingBlock:block onQueue:queue completionBlock:NULL];
}
- (void)showAnimated:(BOOL)animated whileExecutingBlock:(dispatch_block_t)block onQueue:(dispatch_queue_t)queue
completionBlock:(MBProgressHUDCompletionBlock)completion {
self.taskInProgress = YES;
self.completionBlock = completion;
dispatch_async(queue, ^(void) {
block();
dispatch_async(dispatch_get_main_queue(), ^(void) {
[self cleanUp];
});
});
[self show:animated];
}
#endif
- (void)launchExecution {
@autoreleasepool {
#pragma clang diagnostic push
#pragma clang diagnostic ignored "-Warc-performSelector-leaks"
// Start executing the requested task
[targetForExecution performSelector:methodForExecution withObject:objectForExecution];
#pragma clang diagnostic pop
// Task completed, update view in main thread (note: view operations should
// be done only in the main thread)
[self performSelectorOnMainThread:@selector(cleanUp) withObject:nil waitUntilDone:NO];
}
}
- (void)cleanUp {
taskInProgress = NO;
#if !__has_feature(objc_arc)
[targetForExecution release];
[objectForExecution release];
#else
targetForExecution = nil;
objectForExecution = nil;
#endif
[self hide:useAnimation];
}
#pragma mark - UI
- (void)setupLabels {
label = [[UILabel alloc] initWithFrame:self.bounds];
label.adjustsFontSizeToFitWidth = NO;
label.textAlignment = MBLabelAlignmentCenter;
label.opaque = NO;
label.backgroundColor = [UIColor clearColor];
label.textColor = self.labelColor;
label.font = self.labelFont;
label.text = self.labelText;
[self addSubview:label];
detailsLabel = [[UILabel alloc] initWithFrame:self.bounds];
detailsLabel.font = self.detailsLabelFont;
detailsLabel.adjustsFontSizeToFitWidth = NO;
detailsLabel.textAlignment = MBLabelAlignmentCenter;
detailsLabel.opaque = NO;
detailsLabel.backgroundColor = [UIColor clearColor];
detailsLabel.textColor = self.detailsLabelColor;
detailsLabel.numberOfLines = 0;
detailsLabel.font = self.detailsLabelFont;
detailsLabel.text = self.detailsLabelText;
[self addSubview:detailsLabel];
}
- (void)updateIndicators {
BOOL isActivityIndicator = [indicator isKindOfClass:[UIActivityIndicatorView class]];
BOOL isRoundIndicator = [indicator isKindOfClass:[MBRoundProgressView class]];
if (mode == MBProgressHUDModeIndeterminate) {
if (!isActivityIndicator) {
// Update to indeterminate indicator
[indicator removeFromSuperview];
self.indicator = MB_AUTORELEASE([[UIActivityIndicatorView alloc]
initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleWhiteLarge]);
[(UIActivityIndicatorView *)indicator startAnimating];
[self addSubview:indicator];
}
#if __IPHONE_OS_VERSION_MIN_REQUIRED >= 50000
[(UIActivityIndicatorView *)indicator setColor:self.activityIndicatorColor];
#endif
}
else if (mode == MBProgressHUDModeDeterminateHorizontalBar) {
// Update to bar determinate indicator
[indicator removeFromSuperview];
self.indicator = MB_AUTORELEASE([[MBBarProgressView alloc] init]);
[self addSubview:indicator];
}
else if (mode == MBProgressHUDModeDeterminate || mode == MBProgressHUDModeAnnularDeterminate) {
if (!isRoundIndicator) {
// Update to determinante indicator
[indicator removeFromSuperview];
self.indicator = MB_AUTORELEASE([[MBRoundProgressView alloc] init]);
[self addSubview:indicator];
}
if (mode == MBProgressHUDModeAnnularDeterminate) {
[(MBRoundProgressView *)indicator setAnnular:YES];
}
[(MBRoundProgressView *)indicator setProgressTintColor:self.activityIndicatorColor];
[(MBRoundProgressView *)indicator setBackgroundTintColor:[self.activityIndicatorColor colorWithAlphaComponent:0.1f]];
}
else if (mode == MBProgressHUDModeCustomView && customView != indicator) {
// Update custom view indicator
[indicator removeFromSuperview];
self.indicator = customView;
[self addSubview:indicator];
} else if (mode == MBProgressHUDModeText) {
[indicator removeFromSuperview];
self.indicator = nil;
}
}
#pragma mark - Layout
- (void)layoutSubviews {
[super layoutSubviews];
// Entirely cover the parent view
UIView *parent = self.superview;
if (parent) {
self.frame = parent.bounds;
}
CGRect bounds = self.bounds;
// Determine the total width and height needed
CGFloat maxWidth = bounds.size.width - 4 * margin;
CGSize totalSize = CGSizeZero;
CGRect indicatorF = indicator.bounds;
indicatorF.size.width = MIN(indicatorF.size.width, maxWidth);
totalSize.width = MAX(totalSize.width, indicatorF.size.width);
totalSize.height += indicatorF.size.height;
CGSize labelSize = MB_TEXTSIZE(label.text, label.font);
labelSize.width = MIN(labelSize.width, maxWidth);
totalSize.width = MAX(totalSize.width, labelSize.width);
totalSize.height += labelSize.height;
if (labelSize.height > 0.f && indicatorF.size.height > 0.f) {
totalSize.height += kPadding;
}
CGFloat remainingHeight = bounds.size.height - totalSize.height - kPadding - 4 * margin;
CGSize maxSize = CGSizeMake(maxWidth, remainingHeight);
CGSize detailsLabelSize = MB_MULTILINE_TEXTSIZE(detailsLabel.text, detailsLabel.font, maxSize, detailsLabel.lineBreakMode);
totalSize.width = MAX(totalSize.width, detailsLabelSize.width);
totalSize.height += detailsLabelSize.height;
if (detailsLabelSize.height > 0.f && (indicatorF.size.height > 0.f || labelSize.height > 0.f)) {
totalSize.height += kPadding;
}
totalSize.width += 2 * margin;
totalSize.height += 2 * margin;
// Position elements
CGFloat yPos = round(((bounds.size.height - totalSize.height) / 2)) + margin + yOffset;
CGFloat xPos = xOffset;
indicatorF.origin.y = yPos;
indicatorF.origin.x = round((bounds.size.width - indicatorF.size.width) / 2) + xPos;
indicator.frame = indicatorF;
yPos += indicatorF.size.height;
if (labelSize.height > 0.f && indicatorF.size.height > 0.f) {
yPos += kPadding;
}
CGRect labelF;
labelF.origin.y = yPos;
labelF.origin.x = round((bounds.size.width - labelSize.width) / 2) + xPos;
labelF.size = labelSize;
label.frame = labelF;
yPos += labelF.size.height;
if (detailsLabelSize.height > 0.f && (indicatorF.size.height > 0.f || labelSize.height > 0.f)) {
yPos += kPadding;
}
CGRect detailsLabelF;
detailsLabelF.origin.y = yPos;
detailsLabelF.origin.x = round((bounds.size.width - detailsLabelSize.width) / 2) + xPos;
detailsLabelF.size = detailsLabelSize;
detailsLabel.frame = detailsLabelF;
// Enforce minsize and quare rules
if (square) {
CGFloat max = MAX(totalSize.width, totalSize.height);
if (max <= bounds.size.width - 2 * margin) {
totalSize.width = max;
}
if (max <= bounds.size.height - 2 * margin) {
totalSize.height = max;
}
}
if (totalSize.width < minSize.width) {
totalSize.width = minSize.width;
}
if (totalSize.height < minSize.height) {
totalSize.height = minSize.height;
}
size = totalSize;
}
#pragma mark BG Drawing
- (void)drawRect:(CGRect)rect {
CGContextRef context = UIGraphicsGetCurrentContext();
UIGraphicsPushContext(context);
if (self.dimBackground) {
//Gradient colours
size_t gradLocationsNum = 2;
CGFloat gradLocations[2] = {0.0f, 1.0f};
CGFloat gradColors[8] = {0.0f,0.0f,0.0f,0.0f,0.0f,0.0f,0.0f,0.75f};
CGColorSpaceRef colorSpace = CGColorSpaceCreateDeviceRGB();
CGGradientRef gradient = CGGradientCreateWithColorComponents(colorSpace, gradColors, gradLocations, gradLocationsNum);
CGColorSpaceRelease(colorSpace);
//Gradient center
CGPoint gradCenter= CGPointMake(self.bounds.size.width/2, self.bounds.size.height/2);
//Gradient radius
float gradRadius = MIN(self.bounds.size.width , self.bounds.size.height) ;
//Gradient draw
CGContextDrawRadialGradient (context, gradient, gradCenter,
0, gradCenter, gradRadius,
kCGGradientDrawsAfterEndLocation);
CGGradientRelease(gradient);
}
// Set background rect color
if (self.color) {
CGContextSetFillColorWithColor(context, self.color.CGColor);
} else {
CGContextSetGrayFillColor(context, 0.0f, self.opacity);
}
// Center HUD
CGRect allRect = self.bounds;
// Draw rounded HUD backgroud rect
CGRect boxRect = CGRectMake(round((allRect.size.width - size.width) / 2) + self.xOffset,
round((allRect.size.height - size.height) / 2) + self.yOffset, size.width, size.height);
float radius = self.cornerRadius;
CGContextBeginPath(context);
CGContextMoveToPoint(context, CGRectGetMinX(boxRect) + radius, CGRectGetMinY(boxRect));
CGContextAddArc(context, CGRectGetMaxX(boxRect) - radius, CGRectGetMinY(boxRect) + radius, radius, 3 * (float)M_PI / 2, 0, 0);
CGContextAddArc(context, CGRectGetMaxX(boxRect) - radius, CGRectGetMaxY(boxRect) - radius, radius, 0, (float)M_PI / 2, 0);
CGContextAddArc(context, CGRectGetMinX(boxRect) + radius, CGRectGetMaxY(boxRect) - radius, radius, (float)M_PI / 2, (float)M_PI, 0);
CGContextAddArc(context, CGRectGetMinX(boxRect) + radius, CGRectGetMinY(boxRect) + radius, radius, (float)M_PI, 3 * (float)M_PI / 2, 0);
CGContextClosePath(context);
CGContextFillPath(context);
UIGraphicsPopContext();
}
#pragma mark - KVO
- (void)registerForKVO {
for (NSString *keyPath in [self observableKeypaths]) {
[self addObserver:self forKeyPath:keyPath options:NSKeyValueObservingOptionNew context:NULL];
}
}
- (void)unregisterFromKVO {
for (NSString *keyPath in [self observableKeypaths]) {
[self removeObserver:self forKeyPath:keyPath];
}
}
- (NSArray *)observableKeypaths {
return [NSArray arrayWithObjects:@"mode", @"customView", @"labelText", @"labelFont", @"labelColor",
@"detailsLabelText", @"detailsLabelFont", @"detailsLabelColor", @"progress", @"activityIndicatorColor", nil];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
if (![NSThread isMainThread]) {
[self performSelectorOnMainThread:@selector(updateUIForKeypath:) withObject:keyPath waitUntilDone:NO];
} else {
[self updateUIForKeypath:keyPath];
}
}
- (void)updateUIForKeypath:(NSString *)keyPath {
if ([keyPath isEqualToString:@"mode"] || [keyPath isEqualToString:@"customView"] ||
[keyPath isEqualToString:@"activityIndicatorColor"]) {
[self updateIndicators];
} else if ([keyPath isEqualToString:@"labelText"]) {
label.text = self.labelText;
} else if ([keyPath isEqualToString:@"labelFont"]) {
label.font = self.labelFont;
} else if ([keyPath isEqualToString:@"labelColor"]) {
label.textColor = self.labelColor;
} else if ([keyPath isEqualToString:@"detailsLabelText"]) {
detailsLabel.text = self.detailsLabelText;
} else if ([keyPath isEqualToString:@"detailsLabelFont"]) {
detailsLabel.font = self.detailsLabelFont;
} else if ([keyPath isEqualToString:@"detailsLabelColor"]) {
detailsLabel.textColor = self.detailsLabelColor;
} else if ([keyPath isEqualToString:@"progress"]) {
if ([indicator respondsToSelector:@selector(setProgress:)]) {
[(id)indicator setValue:@(progress) forKey:@"progress"];
}
return;
}
[self setNeedsLayout];
[self setNeedsDisplay];
}
#pragma mark - Notifications
- (void)registerForNotifications {
#if !TARGET_OS_TV
NSNotificationCenter *nc = [NSNotificationCenter defaultCenter];
[nc addObserver:self selector:@selector(statusBarOrientationDidChange:)
name:UIApplicationDidChangeStatusBarOrientationNotification object:nil];
#endif
}
- (void)unregisterFromNotifications {
#if !TARGET_OS_TV
NSNotificationCenter *nc = [NSNotificationCenter defaultCenter];
[nc removeObserver:self name:UIApplicationDidChangeStatusBarOrientationNotification object:nil];
#endif
}
#if !TARGET_OS_TV
- (void)statusBarOrientationDidChange:(NSNotification *)notification {
UIView *superview = self.superview;
if (!superview) {
return;
} else {
[self updateForCurrentOrientationAnimated:YES];
}
}
#endif
- (void)updateForCurrentOrientationAnimated:(BOOL)animated {
// Stay in sync with the superview in any case
if (self.superview) {
self.bounds = self.superview.bounds;
[self setNeedsDisplay];
}
// Not needed on iOS 8+, compile out when the deployment target allows,
// to avoid sharedApplication problems on extension targets
#if __IPHONE_OS_VERSION_MIN_REQUIRED < 80000
// Only needed pre iOS 7 when added to a window
BOOL iOS8OrLater = kCFCoreFoundationVersionNumber >= kCFCoreFoundationVersionNumber_iOS_8_0;
if (iOS8OrLater || ![self.superview isKindOfClass:[UIWindow class]]) return;
UIInterfaceOrientation orientation = [UIApplication sharedApplication].statusBarOrientation;
CGFloat radians = 0;
if (UIInterfaceOrientationIsLandscape(orientation)) {
if (orientation == UIInterfaceOrientationLandscapeLeft) { radians = -(CGFloat)M_PI_2; }
else { radians = (CGFloat)M_PI_2; }
// Window coordinates differ!
self.bounds = CGRectMake(0, 0, self.bounds.size.height, self.bounds.size.width);
} else {
if (orientation == UIInterfaceOrientationPortraitUpsideDown) { radians = (CGFloat)M_PI; }
else { radians = 0; }
}
rotationTransform = CGAffineTransformMakeRotation(radians);
if (animated) {
[UIView beginAnimations:nil context:nil];
[UIView setAnimationDuration:0.3];
}
[self setTransform:rotationTransform];
if (animated) {
[UIView commitAnimations];
}
#endif
}
@end
@implementation MBRoundProgressView
#pragma mark - Lifecycle
- (id)init {
return [self initWithFrame:CGRectMake(0.f, 0.f, 37.f, 37.f)];
}
- (id)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self) {
self.backgroundColor = [UIColor clearColor];
self.opaque = NO;
_progress = 0.f;
_annular = NO;
_progressTintColor = [[UIColor alloc] initWithWhite:1.f alpha:1.f];
_backgroundTintColor = [[UIColor alloc] initWithWhite:1.f alpha:.1f];
[self registerForKVO];
}
return self;
}
- (void)dealloc {
[self unregisterFromKVO];
#if !__has_feature(objc_arc)
[_progressTintColor release];
[_backgroundTintColor release];
[super dealloc];
#endif
}
#pragma mark - Drawing
- (void)drawRect:(CGRect)rect {
CGRect allRect = self.bounds;
CGRect circleRect = CGRectInset(allRect, 2.0f, 2.0f);
CGContextRef context = UIGraphicsGetCurrentContext();
if (_annular) {
// Draw background
BOOL isPreiOS7 = kCFCoreFoundationVersionNumber < kCFCoreFoundationVersionNumber_iOS_7_0;
CGFloat lineWidth = isPreiOS7 ? 5.f : 2.f;
UIBezierPath *processBackgroundPath = [UIBezierPath bezierPath];
processBackgroundPath.lineWidth = lineWidth;
processBackgroundPath.lineCapStyle = kCGLineCapButt;
CGPoint center = CGPointMake(self.bounds.size.width/2, self.bounds.size.height/2);
CGFloat radius = (self.bounds.size.width - lineWidth)/2;
CGFloat startAngle = - ((float)M_PI / 2); // 90 degrees
CGFloat endAngle = (2 * (float)M_PI) + startAngle;
[processBackgroundPath addArcWithCenter:center radius:radius startAngle:startAngle endAngle:endAngle clockwise:YES];
[_backgroundTintColor set];
[processBackgroundPath stroke];
// Draw progress
UIBezierPath *processPath = [UIBezierPath bezierPath];
processPath.lineCapStyle = isPreiOS7 ? kCGLineCapRound : kCGLineCapSquare;
processPath.lineWidth = lineWidth;
endAngle = (self.progress * 2 * (float)M_PI) + startAngle;
[processPath addArcWithCenter:center radius:radius startAngle:startAngle endAngle:endAngle clockwise:YES];
[_progressTintColor set];
[processPath stroke];
} else {
// Draw background
[_progressTintColor setStroke];
[_backgroundTintColor setFill];
CGContextSetLineWidth(context, 2.0f);
CGContextFillEllipseInRect(context, circleRect);
CGContextStrokeEllipseInRect(context, circleRect);
// Draw progress
CGPoint center = CGPointMake(allRect.size.width / 2, allRect.size.height / 2);
CGFloat radius = (allRect.size.width - 4) / 2;
CGFloat startAngle = - ((float)M_PI / 2); // 90 degrees
CGFloat endAngle = (self.progress * 2 * (float)M_PI) + startAngle;
[_progressTintColor setFill];
CGContextMoveToPoint(context, center.x, center.y);
CGContextAddArc(context, center.x, center.y, radius, startAngle, endAngle, 0);
CGContextClosePath(context);
CGContextFillPath(context);
}
}
#pragma mark - KVO
- (void)registerForKVO {
for (NSString *keyPath in [self observableKeypaths]) {
[self addObserver:self forKeyPath:keyPath options:NSKeyValueObservingOptionNew context:NULL];
}
}
- (void)unregisterFromKVO {
for (NSString *keyPath in [self observableKeypaths]) {
[self removeObserver:self forKeyPath:keyPath];
}
}
- (NSArray *)observableKeypaths {
return [NSArray arrayWithObjects:@"progressTintColor", @"backgroundTintColor", @"progress", @"annular", nil];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
[self setNeedsDisplay];
}
@end
@implementation MBBarProgressView
#pragma mark - Lifecycle
- (id)init {
return [self initWithFrame:CGRectMake(.0f, .0f, 120.0f, 20.0f)];
}
- (id)initWithFrame:(CGRect)frame {
self = [super initWithFrame:frame];
if (self) {
_progress = 0.f;
_lineColor = [UIColor whiteColor];
_progressColor = [UIColor whiteColor];
_progressRemainingColor = [UIColor clearColor];
self.backgroundColor = [UIColor clearColor];
self.opaque = NO;
[self registerForKVO];
}
return self;
}
- (void)dealloc {
[self unregisterFromKVO];
#if !__has_feature(objc_arc)
[_lineColor release];
[_progressColor release];
[_progressRemainingColor release];
[super dealloc];
#endif
}
#pragma mark - Drawing
- (void)drawRect:(CGRect)rect {
CGContextRef context = UIGraphicsGetCurrentContext();
CGContextSetLineWidth(context, 2);
CGContextSetStrokeColorWithColor(context,[_lineColor CGColor]);
CGContextSetFillColorWithColor(context, [_progressRemainingColor CGColor]);
// Draw background
float radius = (rect.size.height / 2) - 2;
CGContextMoveToPoint(context, 2, rect.size.height/2);
CGContextAddArcToPoint(context, 2, 2, radius + 2, 2, radius);
CGContextAddLineToPoint(context, rect.size.width - radius - 2, 2);
CGContextAddArcToPoint(context, rect.size.width - 2, 2, rect.size.width - 2, rect.size.height / 2, radius);
CGContextAddArcToPoint(context, rect.size.width - 2, rect.size.height - 2, rect.size.width - radius - 2, rect.size.height - 2, radius);
CGContextAddLineToPoint(context, radius + 2, rect.size.height - 2);
CGContextAddArcToPoint(context, 2, rect.size.height - 2, 2, rect.size.height/2, radius);
CGContextFillPath(context);
// Draw border
CGContextMoveToPoint(context, 2, rect.size.height/2);
CGContextAddArcToPoint(context, 2, 2, radius + 2, 2, radius);
CGContextAddLineToPoint(context, rect.size.width - radius - 2, 2);
CGContextAddArcToPoint(context, rect.size.width - 2, 2, rect.size.width - 2, rect.size.height / 2, radius);
CGContextAddArcToPoint(context, rect.size.width - 2, rect.size.height - 2, rect.size.width - radius - 2, rect.size.height - 2, radius);
CGContextAddLineToPoint(context, radius + 2, rect.size.height - 2);
CGContextAddArcToPoint(context, 2, rect.size.height - 2, 2, rect.size.height/2, radius);
CGContextStrokePath(context);
CGContextSetFillColorWithColor(context, [_progressColor CGColor]);
radius = radius - 2;
float amount = self.progress * rect.size.width;
// Progress in the middle area
if (amount >= radius + 4 && amount <= (rect.size.width - radius - 4)) {
CGContextMoveToPoint(context, 4, rect.size.height/2);
CGContextAddArcToPoint(context, 4, 4, radius + 4, 4, radius);
CGContextAddLineToPoint(context, amount, 4);
CGContextAddLineToPoint(context, amount, radius + 4);
CGContextMoveToPoint(context, 4, rect.size.height/2);
CGContextAddArcToPoint(context, 4, rect.size.height - 4, radius + 4, rect.size.height - 4, radius);
CGContextAddLineToPoint(context, amount, rect.size.height - 4);
CGContextAddLineToPoint(context, amount, radius + 4);
CGContextFillPath(context);
}
// Progress in the right arc
else if (amount > radius + 4) {
float x = amount - (rect.size.width - radius - 4);
CGContextMoveToPoint(context, 4, rect.size.height/2);
CGContextAddArcToPoint(context, 4, 4, radius + 4, 4, radius);
CGContextAddLineToPoint(context, rect.size.width - radius - 4, 4);
float angle = -acos(x/radius);
if (isnan(angle)) angle = 0;
CGContextAddArc(context, rect.size.width - radius - 4, rect.size.height/2, radius, M_PI, angle, 0);
CGContextAddLineToPoint(context, amount, rect.size.height/2);
CGContextMoveToPoint(context, 4, rect.size.height/2);
CGContextAddArcToPoint(context, 4, rect.size.height - 4, radius + 4, rect.size.height - 4, radius);
CGContextAddLineToPoint(context, rect.size.width - radius - 4, rect.size.height - 4);
angle = acos(x/radius);
if (isnan(angle)) angle = 0;
CGContextAddArc(context, rect.size.width - radius - 4, rect.size.height/2, radius, -M_PI, angle, 1);
CGContextAddLineToPoint(context, amount, rect.size.height/2);
CGContextFillPath(context);
}
// Progress is in the left arc
else if (amount < radius + 4 && amount > 0) {
CGContextMoveToPoint(context, 4, rect.size.height/2);
CGContextAddArcToPoint(context, 4, 4, radius + 4, 4, radius);
CGContextAddLineToPoint(context, radius + 4, rect.size.height/2);
CGContextMoveToPoint(context, 4, rect.size.height/2);
CGContextAddArcToPoint(context, 4, rect.size.height - 4, radius + 4, rect.size.height - 4, radius);
CGContextAddLineToPoint(context, radius + 4, rect.size.height/2);
CGContextFillPath(context);
}
}
#pragma mark - KVO
- (void)registerForKVO {
for (NSString *keyPath in [self observableKeypaths]) {
[self addObserver:self forKeyPath:keyPath options:NSKeyValueObservingOptionNew context:NULL];
}
}
- (void)unregisterFromKVO {
for (NSString *keyPath in [self observableKeypaths]) {
[self removeObserver:self forKeyPath:keyPath];
}
}
- (NSArray *)observableKeypaths {
return [NSArray arrayWithObjects:@"lineColor", @"progressRemainingColor", @"progressColor", @"progress", nil];
}
- (void)observeValueForKeyPath:(NSString *)keyPath ofObject:(id)object change:(NSDictionary *)change context:(void *)context {
[self setNeedsDisplay];
}
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
//
// MBProgressHUD.h
// Version 0.9.2
// Created by Matej Bukovinski on 2.4.09.
//
// This code is distributed under the terms and conditions of the MIT license.
// Copyright (c) 2009-2015 Matej Bukovinski
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
#import <Foundation/Foundation.h>
#import <UIKit/UIKit.h>
#import <CoreGraphics/CoreGraphics.h>
@protocol MBProgressHUDDelegate;
typedef NS_ENUM(NSInteger, MBProgressHUDMode) {
/** Progress is shown using an UIActivityIndicatorView. This is the default. */
MBProgressHUDModeIndeterminate,
/** Progress is shown using a round, pie-chart like, progress view. */
MBProgressHUDModeDeterminate,
/** Progress is shown using a horizontal progress bar */
MBProgressHUDModeDeterminateHorizontalBar,
/** Progress is shown using a ring-shaped progress view. */
MBProgressHUDModeAnnularDeterminate,
/** Shows a custom view */
MBProgressHUDModeCustomView,
/** Shows only labels */
MBProgressHUDModeText
};
typedef NS_ENUM(NSInteger, MBProgressHUDAnimation) {
/** Opacity animation */
MBProgressHUDAnimationFade,
/** Opacity + scale animation */
MBProgressHUDAnimationZoom,
MBProgressHUDAnimationZoomOut = MBProgressHUDAnimationZoom,
MBProgressHUDAnimationZoomIn
};
#ifndef MB_INSTANCETYPE
#if __has_feature(objc_instancetype)
#define MB_INSTANCETYPE instancetype
#else
#define MB_INSTANCETYPE id
#endif
#endif
#ifndef MB_STRONG
#if __has_feature(objc_arc)
#define MB_STRONG strong
#else
#define MB_STRONG retain
#endif
#endif
#ifndef MB_WEAK
#if __has_feature(objc_arc_weak)
#define MB_WEAK weak
#elif __has_feature(objc_arc)
#define MB_WEAK unsafe_unretained
#else
#define MB_WEAK assign
#endif
#endif
#if NS_BLOCKS_AVAILABLE
typedef void (^MBProgressHUDCompletionBlock)();
#endif
/**
* Displays a simple HUD window containing a progress indicator and two optional labels for short messages.
*
* This is a simple drop-in class for displaying a progress HUD view similar to Apple's private UIProgressHUD class.
* The MBProgressHUD window spans over the entire space given to it by the initWithFrame constructor and catches all
* user input on this region, thereby preventing the user operations on components below the view. The HUD itself is
* drawn centered as a rounded semi-transparent view which resizes depending on the user specified content.
*
* This view supports four modes of operation:
* - MBProgressHUDModeIndeterminate - shows a UIActivityIndicatorView
* - MBProgressHUDModeDeterminate - shows a custom round progress indicator
* - MBProgressHUDModeAnnularDeterminate - shows a custom annular progress indicator
* - MBProgressHUDModeCustomView - shows an arbitrary, user specified view (see `customView`)
*
* All three modes can have optional labels assigned:
* - If the labelText property is set and non-empty then a label containing the provided content is placed below the
* indicator view.
* - If also the detailsLabelText property is set then another label is placed below the first label.
*/
@interface MBProgressHUD : UIView
/**
* Creates a new HUD, adds it to provided view and shows it. The counterpart to this method is hideHUDForView:animated:.
*
* @note This method sets `removeFromSuperViewOnHide`. The HUD will automatically be removed from the view hierarchy when hidden.
*
* @param view The view that the HUD will be added to
* @param animated If set to YES the HUD will appear using the current animationType. If set to NO the HUD will not use
* animations while appearing.
* @return A reference to the created HUD.
*
* @see hideHUDForView:animated:
* @see animationType
*/
+ (MB_INSTANCETYPE)showHUDAddedTo:(UIView *)view animated:(BOOL)animated;
/**
* Finds the top-most HUD subview and hides it. The counterpart to this method is showHUDAddedTo:animated:.
*
* @note This method sets `removeFromSuperViewOnHide`. The HUD will automatically be removed from the view hierarchy when hidden.
*
* @param view The view that is going to be searched for a HUD subview.
* @param animated If set to YES the HUD will disappear using the current animationType. If set to NO the HUD will not use
* animations while disappearing.
* @return YES if a HUD was found and removed, NO otherwise.
*
* @see showHUDAddedTo:animated:
* @see animationType
*/
+ (BOOL)hideHUDForView:(UIView *)view animated:(BOOL)animated;
/**
* Finds all the HUD subviews and hides them.
*
* @note This method sets `removeFromSuperViewOnHide`. The HUDs will automatically be removed from the view hierarchy when hidden.
*
* @param view The view that is going to be searched for HUD subviews.
* @param animated If set to YES the HUDs will disappear using the current animationType. If set to NO the HUDs will not use
* animations while disappearing.
* @return the number of HUDs found and removed.
*
* @see hideHUDForView:animated:
* @see animationType
*/
+ (NSUInteger)hideAllHUDsForView:(UIView *)view animated:(BOOL)animated;
/**
* Finds the top-most HUD subview and returns it.
*
* @param view The view that is going to be searched.
* @return A reference to the last HUD subview discovered.
*/
+ (MB_INSTANCETYPE)HUDForView:(UIView *)view;
/**
* Finds all HUD subviews and returns them.
*
* @param view The view that is going to be searched.
* @return All found HUD views (array of MBProgressHUD objects).
*/
+ (NSArray *)allHUDsForView:(UIView *)view;
/**
* A convenience constructor that initializes the HUD with the window's bounds. Calls the designated constructor with
* window.bounds as the parameter.
*
* @param window The window instance that will provide the bounds for the HUD. Should be the same instance as
* the HUD's superview (i.e., the window that the HUD will be added to).
*/
- (id)initWithWindow:(UIWindow *)window;
/**
* A convenience constructor that initializes the HUD with the view's bounds. Calls the designated constructor with
* view.bounds as the parameter
*
* @param view The view instance that will provide the bounds for the HUD. Should be the same instance as
* the HUD's superview (i.e., the view that the HUD will be added to).
*/
- (id)initWithView:(UIView *)view;
/**
* Display the HUD. You need to make sure that the main thread completes its run loop soon after this method call so
* the user interface can be updated. Call this method when your task is already set-up to be executed in a new thread
* (e.g., when using something like NSOperation or calling an asynchronous call like NSURLRequest).
*
* @param animated If set to YES the HUD will appear using the current animationType. If set to NO the HUD will not use
* animations while appearing.
*
* @see animationType
*/
- (void)show:(BOOL)animated;
/**
* Hide the HUD. This still calls the hudWasHidden: delegate. This is the counterpart of the show: method. Use it to
* hide the HUD when your task completes.
*
* @param animated If set to YES the HUD will disappear using the current animationType. If set to NO the HUD will not use
* animations while disappearing.
*
* @see animationType
*/
- (void)hide:(BOOL)animated;
/**
* Hide the HUD after a delay. This still calls the hudWasHidden: delegate. This is the counterpart of the show: method. Use it to
* hide the HUD when your task completes.
*
* @param animated If set to YES the HUD will disappear using the current animationType. If set to NO the HUD will not use
* animations while disappearing.
* @param delay Delay in seconds until the HUD is hidden.
*
* @see animationType
*/
- (void)hide:(BOOL)animated afterDelay:(NSTimeInterval)delay;
/**
* Shows the HUD while a background task is executing in a new thread, then hides the HUD.
*
* This method also takes care of autorelease pools so your method does not have to be concerned with setting up a
* pool.
*
* @param method The method to be executed while the HUD is shown. This method will be executed in a new thread.
* @param target The object that the target method belongs to.
* @param object An optional object to be passed to the method.
* @param animated If set to YES the HUD will (dis)appear using the current animationType. If set to NO the HUD will not use
* animations while (dis)appearing.
*/
- (void)showWhileExecuting:(SEL)method onTarget:(id)target withObject:(id)object animated:(BOOL)animated;
#if NS_BLOCKS_AVAILABLE
/**
* Shows the HUD while a block is executing on a background queue, then hides the HUD.
*
* @see showAnimated:whileExecutingBlock:onQueue:completionBlock:
*/
- (void)showAnimated:(BOOL)animated whileExecutingBlock:(dispatch_block_t)block;
/**
* Shows the HUD while a block is executing on a background queue, then hides the HUD.
*
* @see showAnimated:whileExecutingBlock:onQueue:completionBlock:
*/
- (void)showAnimated:(BOOL)animated whileExecutingBlock:(dispatch_block_t)block completionBlock:(MBProgressHUDCompletionBlock)completion;
/**
* Shows the HUD while a block is executing on the specified dispatch queue, then hides the HUD.
*
* @see showAnimated:whileExecutingBlock:onQueue:completionBlock:
*/
- (void)showAnimated:(BOOL)animated whileExecutingBlock:(dispatch_block_t)block onQueue:(dispatch_queue_t)queue;
/**
* Shows the HUD while a block is executing on the specified dispatch queue, executes completion block on the main queue, and then hides the HUD.
*
* @param animated If set to YES the HUD will (dis)appear using the current animationType. If set to NO the HUD will
* not use animations while (dis)appearing.
* @param block The block to be executed while the HUD is shown.
* @param queue The dispatch queue on which the block should be executed.
* @param completion The block to be executed on completion.
*
* @see completionBlock
*/
- (void)showAnimated:(BOOL)animated whileExecutingBlock:(dispatch_block_t)block onQueue:(dispatch_queue_t)queue
completionBlock:(MBProgressHUDCompletionBlock)completion;
/**
* A block that gets called after the HUD was completely hidden.
*/
@property (copy) MBProgressHUDCompletionBlock completionBlock;
#endif
/**
* MBProgressHUD operation mode. The default is MBProgressHUDModeIndeterminate.
*
* @see MBProgressHUDMode
*/
@property (assign) MBProgressHUDMode mode;
/**
* The animation type that should be used when the HUD is shown and hidden.
*
* @see MBProgressHUDAnimation
*/
@property (assign) MBProgressHUDAnimation animationType;
/**
* The UIView (e.g., a UIImageView) to be shown when the HUD is in MBProgressHUDModeCustomView.
* For best results use a 37 by 37 pixel view (so the bounds match the built in indicator bounds).
*/
@property (MB_STRONG) UIView *customView;
/**
* The HUD delegate object.
*
* @see MBProgressHUDDelegate
*/
@property (MB_WEAK) id<MBProgressHUDDelegate> delegate;
/**
* An optional short message to be displayed below the activity indicator. The HUD is automatically resized to fit
* the entire text. If the text is too long it will get clipped by displaying "..." at the end. If left unchanged or
* set to @"", then no message is displayed.
*/
@property (copy) NSString *labelText;
/**
* An optional details message displayed below the labelText message. This message is displayed only if the labelText
* property is also set and is different from an empty string (@""). The details text can span multiple lines.
*/
@property (copy) NSString *detailsLabelText;
/**
* The opacity of the HUD window. Defaults to 0.8 (80% opacity).
*/
@property (assign) float opacity;
/**
* The color of the HUD window. Defaults to black. If this property is set, color is set using
* this UIColor and the opacity property is not used. using retain because performing copy on
* UIColor base colors (like [UIColor greenColor]) cause problems with the copyZone.
*/
@property (MB_STRONG) UIColor *color;
/**
* The x-axis offset of the HUD relative to the centre of the superview.
*/
@property (assign) float xOffset;
/**
* The y-axis offset of the HUD relative to the centre of the superview.
*/
@property (assign) float yOffset;
/**
* The amount of space between the HUD edge and the HUD elements (labels, indicators or custom views).
* Defaults to 20.0
*/
@property (assign) float margin;
/**
* The corner radius for the HUD
* Defaults to 10.0
*/
@property (assign) float cornerRadius;
/**
* Cover the HUD background view with a radial gradient.
*/
@property (assign) BOOL dimBackground;
/*
* Grace period is the time (in seconds) that the invoked method may be run without
* showing the HUD. If the task finishes before the grace time runs out, the HUD will
* not be shown at all.
* This may be used to prevent HUD display for very short tasks.
* Defaults to 0 (no grace time).
* Grace time functionality is only supported when the task status is known!
* @see taskInProgress
*/
@property (assign) float graceTime;
/**
* The minimum time (in seconds) that the HUD is shown.
* This avoids the problem of the HUD being shown and than instantly hidden.
* Defaults to 0 (no minimum show time).
*/
@property (assign) float minShowTime;
/**
* Indicates that the executed operation is in progress. Needed for correct graceTime operation.
* If you don't set a graceTime (different than 0.0) this does nothing.
* This property is automatically set when using showWhileExecuting:onTarget:withObject:animated:.
* When threading is done outside of the HUD (i.e., when the show: and hide: methods are used directly),
* you need to set this property when your task starts and completes in order to have normal graceTime
* functionality.
*/
@property (assign) BOOL taskInProgress;
/**
* Removes the HUD from its parent view when hidden.
* Defaults to NO.
*/
@property (assign) BOOL removeFromSuperViewOnHide;
/**
* Font to be used for the main label. Set this property if the default is not adequate.
*/
@property (MB_STRONG) UIFont* labelFont;
/**
* Color to be used for the main label. Set this property if the default is not adequate.
*/
@property (MB_STRONG) UIColor* labelColor;
/**
* Font to be used for the details label. Set this property if the default is not adequate.
*/
@property (MB_STRONG) UIFont* detailsLabelFont;
/**
* Color to be used for the details label. Set this property if the default is not adequate.
*/
@property (MB_STRONG) UIColor* detailsLabelColor;
/**
* The color of the activity indicator. Defaults to [UIColor whiteColor]
* Does nothing on pre iOS 5.
*/
@property (MB_STRONG) UIColor *activityIndicatorColor;
/**
* The progress of the progress indicator, from 0.0 to 1.0. Defaults to 0.0.
*/
@property (assign) float progress;
/**
* The minimum size of the HUD bezel. Defaults to CGSizeZero (no minimum size).
*/
@property (assign) CGSize minSize;
/**
* The actual size of the HUD bezel.
* You can use this to limit touch handling on the bezel area only.
* @see https://github.com/jdg/MBProgressHUD/pull/200
*/
@property (atomic, assign, readonly) CGSize size;
/**
* Force the HUD dimensions to be equal if possible.
*/
@property (assign, getter = isSquare) BOOL square;
@end
@protocol MBProgressHUDDelegate <NSObject>
@optional
/**
* Called after the HUD was fully hidden from the screen.
*/
- (void)hudWasHidden:(MBProgressHUD *)hud;
@end
/**
* A progress view for showing definite progress by filling up a circle (pie chart).
*/
@interface MBRoundProgressView : UIView
/**
* Progress (0.0 to 1.0)
*/
@property (nonatomic, assign) float progress;
/**
* Indicator progress color.
* Defaults to white [UIColor whiteColor]
*/
@property (nonatomic, MB_STRONG) UIColor *progressTintColor;
/**
* Indicator background (non-progress) color.
* Defaults to translucent white (alpha 0.1)
*/
@property (nonatomic, MB_STRONG) UIColor *backgroundTintColor;
/*
* Display mode - NO = round or YES = annular. Defaults to round.
*/
@property (nonatomic, assign, getter = isAnnular) BOOL annular;
@end
/**
* A flat bar progress view.
*/
@interface MBBarProgressView : UIView
/**
* Progress (0.0 to 1.0)
*/
@property (nonatomic, assign) float progress;
/**
* Bar border line color.
* Defaults to white [UIColor whiteColor].
*/
@property (nonatomic, MB_STRONG) UIColor *lineColor;
/**
* Bar background color.
* Defaults to clear [UIColor clearColor];
*/
@property (nonatomic, MB_STRONG) UIColor *progressRemainingColor;
/**
* Bar progress color.
* Defaults to white [UIColor whiteColor].
*/
@property (nonatomic, MB_STRONG) UIColor *progressColor;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
typedef enum { kGPUImageHistogramRed, kGPUImageHistogramGreen, kGPUImageHistogramBlue, kGPUImageHistogramRGB, kGPUImageHistogramLuminance} GPUImageHistogramType;
@interface GPUImageHistogramFilter : GPUImageFilter
{
GPUImageHistogramType histogramType;
GLubyte *vertexSamplingCoordinates;
GLProgram *secondFilterProgram, *thirdFilterProgram;
GLint secondFilterPositionAttribute, thirdFilterPositionAttribute;
}
// Rather than sampling every pixel, this dictates what fraction of the image is sampled. By default, this is 16 with a minimum of 1.
@property(readwrite, nonatomic) NSUInteger downsamplingFactor;
// Initialization and teardown
- (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType;
- (void)initializeSecondaryAttributes;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
/** Adjusts the contrast of the image
*/
@interface GPUImageContrastFilter : GPUImageFilter
{
GLint contrastUniform;
}
/** Contrast ranges from 0.0 to 4.0 (max contrast), with 1.0 as the normal level
*/
@property(readwrite, nonatomic) CGFloat contrast;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageOutput.h"
#define STRINGIZE(x) #x
#define STRINGIZE2(x) STRINGIZE(x)
#define SHADER_STRING(text) @ STRINGIZE2(text)
#define GPUImageHashIdentifier #
#define GPUImageWrappedLabel(x) x
#define GPUImageEscapedHashIdentifier(a) GPUImageWrappedLabel(GPUImageHashIdentifier)a
extern NSString *const kGPUImageVertexShaderString;
extern NSString *const kGPUImagePassthroughFragmentShaderString;
struct GPUVector4 {
GLfloat one;
GLfloat two;
GLfloat three;
GLfloat four;
};
typedef struct GPUVector4 GPUVector4;
struct GPUVector3 {
GLfloat one;
GLfloat two;
GLfloat three;
};
typedef struct GPUVector3 GPUVector3;
struct GPUMatrix4x4 {
GPUVector4 one;
GPUVector4 two;
GPUVector4 three;
GPUVector4 four;
};
typedef struct GPUMatrix4x4 GPUMatrix4x4;
struct GPUMatrix3x3 {
GPUVector3 one;
GPUVector3 two;
GPUVector3 three;
};
typedef struct GPUMatrix3x3 GPUMatrix3x3;
/** GPUImage's base filter class
Filters and other subsequent elements in the chain conform to the GPUImageInput protocol, which lets them take in the supplied or processed texture from the previous link in the chain and do something with it. Objects one step further down the chain are considered targets, and processing can be branched by adding multiple targets to a single output or filter.
*/
@interface GPUImageFilter : GPUImageOutput <GPUImageInput>
{
GPUImageFramebuffer *firstInputFramebuffer;
GLProgram *filterProgram;
GLint filterPositionAttribute, filterTextureCoordinateAttribute;
GLint filterInputTextureUniform;
GLfloat backgroundColorRed, backgroundColorGreen, backgroundColorBlue, backgroundColorAlpha;
BOOL isEndProcessing;
CGSize currentFilterSize;
GPUImageRotationMode inputRotation;
BOOL currentlyReceivingMonochromeInput;
NSMutableDictionary *uniformStateRestorationBlocks;
dispatch_semaphore_t imageCaptureSemaphore;
}
@property(readonly) CVPixelBufferRef renderTarget;
@property(readwrite, nonatomic) BOOL preventRendering;
@property(readwrite, nonatomic) BOOL currentlyReceivingMonochromeInput;
/// @name Initialization and teardown
/**
Initialize with vertex and fragment shaders
You make take advantage of the SHADER_STRING macro to write your shaders in-line.
@param vertexShaderString Source code of the vertex shader to use
@param fragmentShaderString Source code of the fragment shader to use
*/
- (id)initWithVertexShaderFromString:(NSString *)vertexShaderString fragmentShaderFromString:(NSString *)fragmentShaderString;
/**
Initialize with a fragment shader
You may take advantage of the SHADER_STRING macro to write your shader in-line.
@param fragmentShaderString Source code of fragment shader to use
*/
- (id)initWithFragmentShaderFromString:(NSString *)fragmentShaderString;
/**
Initialize with a fragment shader
@param fragmentShaderFilename Filename of fragment shader to load
*/
- (id)initWithFragmentShaderFromFile:(NSString *)fragmentShaderFilename;
- (void)initializeAttributes;
- (void)setupFilterForSize:(CGSize)filterFrameSize;
- (CGSize)rotatedSize:(CGSize)sizeToRotate forIndex:(NSInteger)textureIndex;
- (CGPoint)rotatedPoint:(CGPoint)pointToRotate forRotation:(GPUImageRotationMode)rotation;
/// @name Managing the display FBOs
/** Size of the frame buffer object
*/
- (CGSize)sizeOfFBO;
/// @name Rendering
+ (const GLfloat *)textureCoordinatesForRotation:(GPUImageRotationMode)rotationMode;
- (void)renderToTextureWithVertices:(const GLfloat *)vertices textureCoordinates:(const GLfloat *)textureCoordinates;
- (void)informTargetsAboutNewFrameAtTime:(CMTime)frameTime;
- (CGSize)outputFrameSize;
/// @name Input parameters
- (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent;
- (void)setInteger:(GLint)newInteger forUniformName:(NSString *)uniformName;
- (void)setFloat:(GLfloat)newFloat forUniformName:(NSString *)uniformName;
- (void)setSize:(CGSize)newSize forUniformName:(NSString *)uniformName;
- (void)setPoint:(CGPoint)newPoint forUniformName:(NSString *)uniformName;
- (void)setFloatVec3:(GPUVector3)newVec3 forUniformName:(NSString *)uniformName;
- (void)setFloatVec4:(GPUVector4)newVec4 forUniform:(NSString *)uniformName;
- (void)setFloatArray:(GLfloat *)array length:(GLsizei)count forUniform:(NSString*)uniformName;
- (void)setMatrix3f:(GPUMatrix3x3)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setMatrix4f:(GPUMatrix4x4)matrix forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setFloat:(GLfloat)floatValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setPoint:(CGPoint)pointValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setSize:(CGSize)sizeValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setVec3:(GPUVector3)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setVec4:(GPUVector4)vectorValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setFloatArray:(GLfloat *)arrayValue length:(GLsizei)arrayLength forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setInteger:(GLint)intValue forUniform:(GLint)uniform program:(GLProgram *)shaderProgram;
- (void)setAndExecuteUniformStateCallbackAtIndex:(GLint)uniform forProgram:(GLProgram *)shaderProgram toBlock:(dispatch_block_t)uniformStateBlock;
- (void)setUniformsForProgramAtIndex:(NSUInteger)programIndex;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
extern NSString *const kGPUImageTwoInputTextureVertexShaderString;
@interface GPUImageTwoInputFilter : GPUImageFilter
{
GPUImageFramebuffer *secondInputFramebuffer;
GLint filterSecondTextureCoordinateAttribute;
GLint filterInputTextureUniform2;
GPUImageRotationMode inputRotation2;
CMTime firstFrameTime, secondFrameTime;
BOOL hasSetFirstTexture, hasReceivedFirstFrame, hasReceivedSecondFrame, firstFrameWasVideo, secondFrameWasVideo;
BOOL firstFrameCheckDisabled, secondFrameCheckDisabled;
}
- (void)disableFirstFrameCheck;
- (void)disableSecondFrameCheck;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoPassFilter.h"
@interface GPUImageSobelEdgeDetectionFilter : GPUImageTwoPassFilter
{
GLint texelWidthUniform, texelHeightUniform, edgeStrengthUniform;
BOOL hasOverriddenImageSizeFactor;
}
// The texel width and height factors tweak the appearance of the edges. By default, they match the inverse of the filter size in pixels
@property(readwrite, nonatomic) CGFloat texelWidth;
@property(readwrite, nonatomic) CGFloat texelHeight;
// The filter strength property affects the dynamic range of the filter. High values can make edges more visible, but can lead to saturation. Default of 1.0.
@property(readwrite, nonatomic) CGFloat edgeStrength;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilterGroup.h"
@class GPUImageErosionFilter;
@class GPUImageDilationFilter;
// A filter that first performs an erosion on the red channel of an image, followed by a dilation of the same radius.
// This helps to filter out smaller bright elements.
@interface GPUImageOpeningFilter : GPUImageFilterGroup
{
GPUImageErosionFilter *erosionFilter;
GPUImageDilationFilter *dilationFilter;
}
@property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
- (id)initWithRadius:(NSUInteger)radius;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoInputFilter.h"
@interface GPUImageDifferenceBlendFilter : GPUImageTwoInputFilter
{
}
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
// This outputs an image with a constant color. You need to use -forceProcessingAtSize: in order to set the output image
// dimensions, or this won't work correctly
@interface GPUImageSolidColorGenerator : GPUImageFilter
{
GLint colorUniform;
GLint useExistingAlphaUniform;
}
// This color dictates what the output image will be filled with
@property(readwrite, nonatomic) GPUVector4 color;
@property(readwrite, nonatomic, assign) BOOL useExistingAlpha; // whether to use the alpha of the existing image or not, default is NO
- (void)setColorRed:(CGFloat)redComponent green:(CGFloat)greenComponent blue:(CGFloat)blueComponent alpha:(CGFloat)alphaComponent;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoInputFilter.h"
@interface GPUImageMaskFilter : GPUImageTwoInputFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoInputFilter.h"
@interface GPUImageSaturationBlendFilter : GPUImageTwoInputFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImage3x3TextureSamplingFilter.h"
@interface GPUImageColorLocalBinaryPatternFilter : GPUImage3x3TextureSamplingFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoInputFilter.h"
/** Selectively replaces a color in the first image with the second image
*/
@interface GPUImageChromaKeyBlendFilter : GPUImageTwoInputFilter
{
GLint colorToReplaceUniform, thresholdSensitivityUniform, smoothingUniform;
}
/** The threshold sensitivity controls how similar pixels need to be colored to be replaced
The default value is 0.3
*/
@property(readwrite, nonatomic) CGFloat thresholdSensitivity;
/** The degree of smoothing controls how gradually similar colors are replaced in the image
The default value is 0.1
*/
@property(readwrite, nonatomic) CGFloat smoothing;
/** The color to be replaced is specified using individual red, green, and blue components (normalized to 1.0).
The default is green: (0.0, 1.0, 0.0).
@param redComponent Red component of color to be replaced
@param greenComponent Green component of color to be replaced
@param blueComponent Blue component of color to be replaced
*/
- (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImage3x3ConvolutionFilter.h"
@interface GPUImageEmbossFilter : GPUImage3x3ConvolutionFilter
// The strength of the embossing, from 0.0 to 4.0, with 1.0 as the normal level
@property(readwrite, nonatomic) CGFloat intensity;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoInputFilter.h"
@interface GPUImageOverlayBlendFilter : GPUImageTwoInputFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
@interface GPUImageGammaFilter : GPUImageFilter
{
GLint gammaUniform;
}
// Gamma ranges from 0.0 to 3.0, with 1.0 as the normal level
@property(readwrite, nonatomic) CGFloat gamma;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import <UIKit/UIKit.h>
#import "GPUImageOutput.h"
@interface GPUImagePicture : GPUImageOutput
{
CGSize pixelSizeOfImage;
BOOL hasProcessedImage;
dispatch_semaphore_t imageUpdateSemaphore;
}
// Initialization and teardown
- (id)initWithURL:(NSURL *)url;
- (id)initWithImage:(UIImage *)newImageSource;
- (id)initWithCGImage:(CGImageRef)newImageSource;
- (id)initWithImage:(UIImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;
- (id)initWithCGImage:(CGImageRef)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput;
- (id)initWithImage:(UIImage *)newImageSource removePremultiplication:(BOOL)removePremultiplication;
- (id)initWithCGImage:(CGImageRef)newImageSource removePremultiplication:(BOOL)removePremultiplication;
- (id)initWithImage:(UIImage *)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput removePremultiplication:(BOOL)removePremultiplication;
- (id)initWithCGImage:(CGImageRef)newImageSource smoothlyScaleOutput:(BOOL)smoothlyScaleOutput removePremultiplication:(BOOL)removePremultiplication;
// Image rendering
- (void)processImage;
- (CGSize)outputImageSize;
/**
* Process image with all targets and filters asynchronously
* The completion handler is called after processing finished in the
* GPU's dispatch queue - and only if this method did not return NO.
*
* @returns NO if resource is blocked and processing is discarded, YES otherwise
*/
- (BOOL)processImageWithCompletionHandler:(void (^)(void))completion;
- (void)processImageUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage))block;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageSobelEdgeDetectionFilter.h"
@interface GPUImagePrewittEdgeDetectionFilter : GPUImageSobelEdgeDetectionFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
/** Adjusts the saturation of an image
*/
@interface GPUImageSaturationFilter : GPUImageFilter
{
GLint saturationUniform;
}
/** Saturation ranges from 0.0 (fully desaturated) to 2.0 (max saturation), with 1.0 as the normal level
*/
@property(readwrite, nonatomic) CGFloat saturation;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
//
// GPUImagePicture+TextureSubimage.h
// GPUImage
//
// Created by Jack Wu on 2014-05-28.
// Copyright (c) 2014 Brad Larson. All rights reserved.
//
#import "GPUImagePicture.h"
@interface GPUImagePicture (TextureSubimage)
- (void)replaceTextureWithSubimage:(UIImage*)subimage;
- (void)replaceTextureWithSubCGImage:(CGImageRef)subimageSource;
- (void)replaceTextureWithSubimage:(UIImage*)subimage inRect:(CGRect)subRect;
- (void)replaceTextureWithSubCGImage:(CGImageRef)subimageSource inRect:(CGRect)subRect;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
@interface GPUImageMonochromeFilter : GPUImageFilter
{
GLint intensityUniform, filterColorUniform;
}
@property(readwrite, nonatomic) CGFloat intensity;
@property(readwrite, nonatomic) GPUVector4 color;
- (void)setColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import <Foundation/Foundation.h>
#import "GPUImageOutput.h"
@interface GPUImageFilterPipeline : NSObject
{
NSString *stringValue;
}
@property (strong) NSMutableArray *filters;
@property (strong) GPUImageOutput *input;
@property (strong) id <GPUImageInput> output;
- (id) initWithOrderedFilters:(NSArray*) filters input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (id) initWithConfiguration:(NSDictionary*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (id) initWithConfigurationFile:(NSURL*) configuration input:(GPUImageOutput*)input output:(id <GPUImageInput>)output;
- (void) addFilter:(GPUImageOutput<GPUImageInput> *)filter;
- (void) addFilter:(GPUImageOutput<GPUImageInput> *)filter atIndex:(NSUInteger)insertIndex;
- (void) replaceFilterAtIndex:(NSUInteger)index withFilter:(GPUImageOutput<GPUImageInput> *)filter;
- (void) replaceAllFilters:(NSArray *) newFilters;
- (void) removeFilter:(GPUImageOutput<GPUImageInput> *)filter;
- (void) removeFilterAtIndex:(NSUInteger)index;
- (void) removeAllFilters;
- (UIImage *) currentFilteredFrame;
- (UIImage *) currentFilteredFrameWithOrientation:(UIImageOrientation)imageOrientation;
- (CGImageRef) newCGImageFromCurrentFilteredFrame;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
/// Creates a bulge distortion on the image
@interface GPUImageBulgeDistortionFilter : GPUImageFilter
{
GLint aspectRatioUniform, radiusUniform, centerUniform, scaleUniform;
}
/// The center about which to apply the distortion, with a default of (0.5, 0.5)
@property(readwrite, nonatomic) CGPoint center;
/// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25
@property(readwrite, nonatomic) CGFloat radius;
/// The amount of distortion to apply, from -1.0 to 1.0, with a default of 0.5
@property(readwrite, nonatomic) CGFloat scale;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilterGroup.h"
@interface GPUImageAverageLuminanceThresholdFilter : GPUImageFilterGroup
// This is multiplied by the continually calculated average image luminosity to arrive at the final threshold. Default is 1.0.
@property(readwrite, nonatomic) CGFloat thresholdMultiplier;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImage3x3TextureSamplingFilter.h"
@interface GPUImageWeakPixelInclusionFilter : GPUImage3x3TextureSamplingFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoInputFilter.h"
@interface GPUImageDivideBlendFilter : GPUImageTwoInputFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoInputFilter.h"
/// Blends two images by taking the maximum value of each color component between the images
@interface GPUImageLightenBlendFilter : GPUImageTwoInputFilter
{
}
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoInputFilter.h"
@interface GPUImageSourceOverBlendFilter : GPUImageTwoInputFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoInputFilter.h"
@interface GPUImageMultiplyBlendFilter : GPUImageTwoInputFilter
{
}
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
@interface GPUImageMotionBlurFilter : GPUImageFilter
/** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
*/
@property (readwrite, nonatomic) CGFloat blurSize;
/** The angular direction of the blur, in degrees. 0 degrees by default
*/
@property (readwrite, nonatomic) CGFloat blurAngle;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilterGroup.h"
#import "GPUImageLowPassFilter.h"
#import "GPUImageAverageColor.h"
@interface GPUImageMotionDetector : GPUImageFilterGroup
{
GPUImageLowPassFilter *lowPassFilter;
GPUImageTwoInputFilter *frameComparisonFilter;
GPUImageAverageColor *averageColor;
}
// This controls the low pass filter strength used to compare the current frame with previous ones to detect motion. This ranges from 0.0 to 1.0, with a default of 0.5.
@property(readwrite, nonatomic) CGFloat lowPassFilterStrength;
// For every frame, this will feed back the calculated centroid of the motion, as well as a relative intensity.
@property(nonatomic, copy) void(^motionDetectionBlock)(CGPoint motionCentroid, CGFloat motionIntensity, CMTime frameTime);
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import <Foundation/Foundation.h>
#import "GPUImageContext.h"
struct GPUByteColorVector {
GLubyte red;
GLubyte green;
GLubyte blue;
GLubyte alpha;
};
typedef struct GPUByteColorVector GPUByteColorVector;
@protocol GPUImageRawDataProcessor;
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
@interface GPUImageRawDataOutput : NSObject <GPUImageInput> {
CGSize imageSize;
GPUImageRotationMode inputRotation;
BOOL outputBGRA;
}
#else
@interface GPUImageRawDataOutput : NSObject <GPUImageInput> {
CGSize imageSize;
GPUImageRotationMode inputRotation;
BOOL outputBGRA;
}
#endif
@property(readonly) GLubyte *rawBytesForImage;
@property(nonatomic, copy) void(^newFrameAvailableBlock)(void);
@property(nonatomic) BOOL enabled;
// Initialization and teardown
- (id)initWithImageSize:(CGSize)newImageSize resultsInBGRAFormat:(BOOL)resultsInBGRAFormat;
// Data access
- (GPUByteColorVector)colorAtLocation:(CGPoint)locationInImage;
- (NSUInteger)bytesPerRowInOutput;
- (void)setImageSize:(CGSize)newImageSize;
- (void)lockFramebufferForReading;
- (void)unlockFramebufferAfterReading;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoPassFilter.h"
@interface GPUImageTwoPassTextureSamplingFilter : GPUImageTwoPassFilter
{
GLint verticalPassTexelWidthOffsetUniform, verticalPassTexelHeightOffsetUniform, horizontalPassTexelWidthOffsetUniform, horizontalPassTexelHeightOffsetUniform;
GLfloat verticalPassTexelWidthOffset, verticalPassTexelHeightOffset, horizontalPassTexelWidthOffset, horizontalPassTexelHeightOffset;
CGFloat _verticalTexelSpacing, _horizontalTexelSpacing;
}
// This sets the spacing between texels (in pixels) when sampling for the first. By default, this is 1.0
@property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import <Foundation/Foundation.h>
#import <QuartzCore/QuartzCore.h>
#import "GPUImageFramebuffer.h"
@interface GPUImageFramebufferCache : NSObject
// Framebuffer management
- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)textureOptions onlyTexture:(BOOL)onlyTexture;
- (GPUImageFramebuffer *)fetchFramebufferForSize:(CGSize)framebufferSize onlyTexture:(BOOL)onlyTexture;
- (void)returnFramebufferToCache:(GPUImageFramebuffer *)framebuffer;
- (void)purgeAllUnassignedFramebuffers;
- (void)addFramebufferToActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;
- (void)removeFramebufferFromActiveImageCaptureList:(GPUImageFramebuffer *)framebuffer;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
@interface GPUImageToneCurveFilter : GPUImageFilter
@property(readwrite, nonatomic, copy) NSArray *redControlPoints;
@property(readwrite, nonatomic, copy) NSArray *greenControlPoints;
@property(readwrite, nonatomic, copy) NSArray *blueControlPoints;
@property(readwrite, nonatomic, copy) NSArray *rgbCompositeControlPoints;
// Initialization and teardown
- (id)initWithACVData:(NSData*)data;
- (id)initWithACV:(NSString*)curveFilename;
- (id)initWithACVURL:(NSURL*)curveFileURL;
// This lets you set all three red, green, and blue tone curves at once.
// NOTE: Deprecated this function because this effect can be accomplished
// using the rgbComposite channel rather then setting all 3 R, G, and B channels.
- (void)setRGBControlPoints:(NSArray *)points DEPRECATED_ATTRIBUTE;
- (void)setPointsWithACV:(NSString*)curveFilename;
- (void)setPointsWithACVURL:(NSURL*)curveFileURL;
// Curve calculation
- (NSMutableArray *)getPreparedSplineCurve:(NSArray *)points;
- (NSMutableArray *)splineCurve:(NSArray *)points;
- (NSMutableArray *)secondDerivative:(NSArray *)cgPoints;
- (void)updateToneCurveTexture;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
@interface GPUImageLineGenerator : GPUImageFilter
{
GLint lineWidthUniform, lineColorUniform;
GLfloat *lineCoordinates;
}
// The width of the displayed lines, in pixels. The default is 1.
@property(readwrite, nonatomic) CGFloat lineWidth;
// The color of the lines is specified using individual red, green, and blue components (normalized to 1.0). The default is green: (0.0, 1.0, 0.0).
- (void)setLineColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
// Rendering
- (void)renderLinesFromArray:(GLfloat *)lineSlopeAndIntercepts count:(NSUInteger)numberOfLines frameTime:(CMTime)frameTime;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoPassTextureSamplingFilter.h"
/** A Gaussian blur filter
Interpolated optimization based on Daniel Rákos' work at http://rastergrid.com/blog/2010/09/efficient-gaussian-blur-with-linear-sampling/
*/
@interface GPUImageGaussianBlurFilter : GPUImageTwoPassTextureSamplingFilter
{
BOOL shouldResizeBlurRadiusWithImageSize;
CGFloat _blurRadiusInPixels;
}
/** A multiplier for the spacing between texels, ranging from 0.0 on up, with a default of 1.0. Adjusting this may slightly increase the blur strength, but will introduce artifacts in the result.
*/
@property (readwrite, nonatomic) CGFloat texelSpacingMultiplier;
/** A radius in pixels to use for the blur, with a default of 2.0. This adjusts the sigma variable in the Gaussian distribution function.
*/
@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
/** Setting these properties will allow the blur radius to scale with the size of the image. These properties are mutually exclusive; setting either will set the other to 0.
*/
@property (readwrite, nonatomic) CGFloat blurRadiusAsFractionOfImageWidth;
@property (readwrite, nonatomic) CGFloat blurRadiusAsFractionOfImageHeight;
/// The number of times to sequentially blur the incoming image. The more passes, the slower the filter.
@property(readwrite, nonatomic) NSUInteger blurPasses;
+ (NSString *)vertexShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
+ (NSString *)fragmentShaderForStandardBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
+ (NSString *)vertexShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
+ (NSString *)fragmentShaderForOptimizedBlurOfRadius:(NSUInteger)blurRadius sigma:(CGFloat)sigma;
- (void)switchToVertexShader:(NSString *)newVertexShader fragmentShader:(NSString *)newFragmentShader;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageGaussianBlurFilter.h"
/** A hardware-accelerated box blur of an image
*/
@interface GPUImageBoxBlurFilter : GPUImageGaussianBlurFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoPassTextureSamplingFilter.h"
// For each pixel, this sets it to the maximum value of each color channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
// This extends out brighter colors, and can be used for abstraction of color images.
@interface GPUImageRGBDilationFilter : GPUImageTwoPassTextureSamplingFilter
// Acceptable values for dilationRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
- (id)initWithRadius:(NSUInteger)dilationRadius;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
@interface GPUImageSharpenFilter : GPUImageFilter
{
GLint sharpnessUniform;
GLint imageWidthFactorUniform, imageHeightFactorUniform;
}
// Sharpness ranges from -4.0 to 4.0, with 0.0 as the normal level
@property(readwrite, nonatomic) CGFloat sharpness;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
// This is Jeff LaMarche's GLProgram OpenGL shader wrapper class from his OpenGL ES 2.0 book.
// A description of this can be found at his page on the topic:
// http://iphonedevelopment.blogspot.com/2010/11/opengl-es-20-for-ios-chapter-4.html
// I've extended this to be able to take programs as NSStrings in addition to files, for baked-in shaders
#import <Foundation/Foundation.h>
#if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE
#import <OpenGLES/ES2/gl.h>
#import <OpenGLES/ES2/glext.h>
#else
#import <OpenGL/OpenGL.h>
#import <OpenGL/gl.h>
#endif
@interface GLProgram : NSObject
{
NSMutableArray *attributes;
NSMutableArray *uniforms;
GLuint program,
vertShader,
fragShader;
}
@property(readwrite, nonatomic) BOOL initialized;
@property(readwrite, copy, nonatomic) NSString *vertexShaderLog;
@property(readwrite, copy, nonatomic) NSString *fragmentShaderLog;
@property(readwrite, copy, nonatomic) NSString *programLog;
- (id)initWithVertexShaderString:(NSString *)vShaderString
fragmentShaderString:(NSString *)fShaderString;
- (id)initWithVertexShaderString:(NSString *)vShaderString
fragmentShaderFilename:(NSString *)fShaderFilename;
- (id)initWithVertexShaderFilename:(NSString *)vShaderFilename
fragmentShaderFilename:(NSString *)fShaderFilename;
- (void)addAttribute:(NSString *)attributeName;
- (GLuint)attributeIndex:(NSString *)attributeName;
- (GLuint)uniformIndex:(NSString *)uniformName;
- (BOOL)link;
- (void)use;
- (void)validate;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImage3x3TextureSamplingFilter.h"
@interface GPUImageDirectionalSobelEdgeDetectionFilter : GPUImage3x3TextureSamplingFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
@interface GPUImageTransformFilter : GPUImageFilter
{
GLint transformMatrixUniform, orthographicMatrixUniform;
GPUMatrix4x4 orthographicMatrix;
}
// You can either set the transform to apply to be a 2-D affine transform or a 3-D transform. The default is the identity transform (the output image is identical to the input).
@property(readwrite, nonatomic) CGAffineTransform affineTransform;
@property(readwrite, nonatomic) CATransform3D transform3D;
// This applies the transform to the raw frame data if set to YES, the default of NO takes the aspect ratio of the image input into account when rotating
@property(readwrite, nonatomic) BOOL ignoreAspectRatio;
// sets the anchor point to top left corner
@property(readwrite, nonatomic) BOOL anchorTopLeft;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoInputFilter.h"
@interface GPUImageLookupFilter : GPUImageTwoInputFilter
{
GLint intensityUniform;
}
// How To Use:
// 1) Use your favourite photo editing application to apply a filter to lookup.png from GPUImage/framework/Resources.
// For this to work properly each pixel color must not depend on other pixels (e.g. blur will not work).
// If you need more complex filter you can create as many lookup tables as required.
// E.g. color_balance_lookup_1.png -> GPUImageGaussianBlurFilter -> color_balance_lookup_2.png
// 2) Use you new lookup.png file as a second input for GPUImageLookupFilter.
// See GPUImageAmatorkaFilter, GPUImageMissEtikateFilter, and GPUImageSoftEleganceFilter for example.
// Additional Info:
// Lookup texture is organised as 8x8 quads of 64x64 pixels representing all possible RGB colors:
//for (int by = 0; by < 8; by++) {
// for (int bx = 0; bx < 8; bx++) {
// for (int g = 0; g < 64; g++) {
// for (int r = 0; r < 64; r++) {
// image.setPixel(r + bx * 64, g + by * 64, qRgb((int)(r * 255.0 / 63.0 + 0.5),
// (int)(g * 255.0 / 63.0 + 0.5),
// (int)((bx + by * 8.0) * 255.0 / 63.0 + 0.5)));
// }
// }
// }
//}
// Opacity/intensity of lookup filter ranges from 0.0 to 1.0, with 1.0 as the normal setting
@property(readwrite, nonatomic) CGFloat intensity;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
//
// GPUImageKuwaharaRadius3Filter.h
#import "GPUImageFilter.h"
@interface GPUImageKuwaharaRadius3Filter : GPUImageFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import <CoreMedia/CoreMedia.h>
#import "GPUImageContext.h"
#import "GPUImageOutput.h"
#import "GPUImageColorConversion.h"
//Optionally override the YUV to RGB matrices
void setColorConversion601( GLfloat conversionMatrix[9] );
void setColorConversion601FullRange( GLfloat conversionMatrix[9] );
void setColorConversion709( GLfloat conversionMatrix[9] );
//Delegate Protocal for Face Detection.
@protocol GPUImageVideoCameraDelegate <NSObject>
@optional
- (void)willOutputSampleBuffer:(CMSampleBufferRef)sampleBuffer;
@end
/**
A GPUImageOutput that provides frames from either camera
*/
@interface GPUImageVideoCamera : GPUImageOutput <AVCaptureVideoDataOutputSampleBufferDelegate, AVCaptureAudioDataOutputSampleBufferDelegate>
{
NSUInteger numberOfFramesCaptured;
CGFloat totalFrameTimeDuringCapture;
AVCaptureSession *_captureSession;
AVCaptureDevice *_inputCamera;
AVCaptureDevice *_microphone;
AVCaptureDeviceInput *videoInput;
AVCaptureVideoDataOutput *videoOutput;
BOOL capturePaused;
GPUImageRotationMode outputRotation, internalRotation;
dispatch_semaphore_t frameRenderingSemaphore;
BOOL captureAsYUV;
GLuint luminanceTexture, chrominanceTexture;
__unsafe_unretained id<GPUImageVideoCameraDelegate> _delegate;
}
/// Whether or not the underlying AVCaptureSession is running
@property(readonly, nonatomic) BOOL isRunning;
/// The AVCaptureSession used to capture from the camera
@property(readonly, retain, nonatomic) AVCaptureSession *captureSession;
/// This enables the capture session preset to be changed on the fly
@property (readwrite, nonatomic, copy) NSString *captureSessionPreset;
/// This sets the frame rate of the camera (iOS 5 and above only)
/**
Setting this to 0 or below will set the frame rate back to the default setting for a particular preset.
*/
@property (readwrite) int32_t frameRate;
/// Easy way to tell which cameras are present on device
@property (readonly, getter = isFrontFacingCameraPresent) BOOL frontFacingCameraPresent;
@property (readonly, getter = isBackFacingCameraPresent) BOOL backFacingCameraPresent;
/// This enables the benchmarking mode, which logs out instantaneous and average frame times to the console
@property(readwrite, nonatomic) BOOL runBenchmark;
/// Use this property to manage camera settings. Focus point, exposure point, etc.
@property(readonly) AVCaptureDevice *inputCamera;
/// This determines the rotation applied to the output image, based on the source material
@property(readwrite, nonatomic) UIInterfaceOrientation outputImageOrientation;
/// These properties determine whether or not the two camera orientations should be mirrored. By default, both are NO.
@property(readwrite, nonatomic) BOOL horizontallyMirrorFrontFacingCamera, horizontallyMirrorRearFacingCamera;
@property(nonatomic, assign) id<GPUImageVideoCameraDelegate> delegate;
/// @name Initialization and teardown
/** Begin a capture session
See AVCaptureSession for acceptable values
@param sessionPreset Session preset to use
@param cameraPosition Camera to capture from
*/
- (id)initWithSessionPreset:(NSString *)sessionPreset cameraPosition:(AVCaptureDevicePosition)cameraPosition;
/** Add audio capture to the session. Adding inputs and outputs freezes the capture session momentarily, so you
can use this method to add the audio inputs and outputs early, if you're going to set the audioEncodingTarget
later. Returns YES is the audio inputs and outputs were added, or NO if they had already been added.
*/
- (BOOL)addAudioInputsAndOutputs;
/** Remove the audio capture inputs and outputs from this session. Returns YES if the audio inputs and outputs
were removed, or NO is they hadn't already been added.
*/
- (BOOL)removeAudioInputsAndOutputs;
/** Tear down the capture session
*/
- (void)removeInputsAndOutputs;
/// @name Manage the camera video stream
/** Start camera capturing
*/
- (void)startCameraCapture;
/** Stop camera capturing
*/
- (void)stopCameraCapture;
/** Pause camera capturing
*/
- (void)pauseCameraCapture;
/** Resume camera capturing
*/
- (void)resumeCameraCapture;
/** Process a video sample
@param sampleBuffer Buffer to process
*/
- (void)processVideoSampleBuffer:(CMSampleBufferRef)sampleBuffer;
/** Process an audio sample
@param sampleBuffer Buffer to process
*/
- (void)processAudioSampleBuffer:(CMSampleBufferRef)sampleBuffer;
/** Get the position (front, rear) of the source camera
*/
- (AVCaptureDevicePosition)cameraPosition;
/** Get the AVCaptureConnection of the source camera
*/
- (AVCaptureConnection *)videoCaptureConnection;
/** This flips between the front and rear cameras
*/
- (void)rotateCamera;
/// @name Benchmarking
/** When benchmarking is enabled, this will keep a running average of the time from uploading, processing, and final recording or display
*/
- (CGFloat)averageFrameDurationDuringCapture;
- (void)resetBenchmarkAverage;
+ (BOOL)isBackFacingCameraPresent;
+ (BOOL)isFrontFacingCameraPresent;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
@interface GPUImageRGBFilter : GPUImageFilter
{
GLint redUniform;
GLint greenUniform;
GLint blueUniform;
}
// Normalized values by which each color channel is multiplied. The range is from 0.0 up, with 1.0 as the default.
@property (readwrite, nonatomic) CGFloat red;
@property (readwrite, nonatomic) CGFloat green;
@property (readwrite, nonatomic) CGFloat blue;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
@interface GPUImageDirectionalNonMaximumSuppressionFilter : GPUImageFilter
{
GLint texelWidthUniform, texelHeightUniform;
GLint upperThresholdUniform, lowerThresholdUniform;
BOOL hasOverriddenImageSizeFactor;
}
// The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
@property(readwrite, nonatomic) CGFloat texelWidth;
@property(readwrite, nonatomic) CGFloat texelHeight;
// These thresholds set cutoffs for the intensities that definitely get registered (upper threshold) and those that definitely don't (lower threshold)
@property(readwrite, nonatomic) CGFloat upperThreshold;
@property(readwrite, nonatomic) CGFloat lowerThreshold;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoInputFilter.h"
@interface GPUImageLinearBurnBlendFilter : GPUImageTwoInputFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
@interface GPUImagePixellatePositionFilter : GPUImageFilter
{
GLint fractionalWidthOfAPixelUniform, aspectRatioUniform, centerUniform, radiusUniform;
}
// The fractional width of the image to use as a size for the pixels in the resulting image. Values below one pixel width in the source image are ignored.
@property(readwrite, nonatomic) CGFloat fractionalWidthOfAPixel;
// the center point to start pixelation in texture coordinates, default 0.5, 0.5
@property(readwrite, nonatomic) CGPoint center;
// the radius (0.0 - 1.0) in which to pixelate, default 1.0
@property(readwrite, nonatomic) CGFloat radius;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
@interface GPUImageHighlightShadowFilter : GPUImageFilter
{
GLint shadowsUniform, highlightsUniform;
}
/**
* 0 - 1, increase to lighten shadows.
* @default 0
*/
@property(readwrite, nonatomic) CGFloat shadows;
/**
* 0 - 1, decrease to darken highlights.
* @default 1
*/
@property(readwrite, nonatomic) CGFloat highlights;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoInputFilter.h"
@interface GPUImageTwoInputCrossTextureSamplingFilter : GPUImageTwoInputFilter
{
GLint texelWidthUniform, texelHeightUniform;
CGFloat texelWidth, texelHeight;
BOOL hasOverriddenImageSizeFactor;
}
// The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects.
@property(readwrite, nonatomic) CGFloat texelWidth;
@property(readwrite, nonatomic) CGFloat texelHeight;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilterGroup.h"
@class GPUImageRGBErosionFilter;
@class GPUImageRGBDilationFilter;
// A filter that first performs a dilation on each color channel of an image, followed by an erosion of the same radius.
// This helps to filter out smaller dark elements.
@interface GPUImageRGBClosingFilter : GPUImageFilterGroup
{
GPUImageRGBErosionFilter *erosionFilter;
GPUImageRGBDilationFilter *dilationFilter;
}
- (id)initWithRadius:(NSUInteger)radius;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoInputFilter.h"
@interface GPUImageHueBlendFilter : GPUImageTwoInputFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
/**
* Levels like Photoshop.
*
* The min, max, minOut and maxOut parameters are floats in the range [0, 1].
* If you have parameters from Photoshop in the range [0, 255] you must first
* convert them to be [0, 1].
* The gamma/mid parameter is a float >= 0. This matches the value from Photoshop.
*
* If you want to apply levels to RGB as well as individual channels you need to use
* this filter twice - first for the individual channels and then for all channels.
*/
@interface GPUImageLevelsFilter : GPUImageFilter
{
GLint minUniform;
GLint midUniform;
GLint maxUniform;
GLint minOutputUniform;
GLint maxOutputUniform;
GPUVector3 minVector, midVector, maxVector, minOutputVector, maxOutputVector;
}
/** Set levels for the red channel */
- (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
- (void)setRedMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
/** Set levels for the green channel */
- (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
- (void)setGreenMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
/** Set levels for the blue channel */
- (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
- (void)setBlueMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
/** Set levels for all channels at once */
- (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max minOut:(CGFloat)minOut maxOut:(CGFloat)maxOut;
- (void)setMin:(CGFloat)min gamma:(CGFloat)mid max:(CGFloat)max;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoInputFilter.h"
@interface GPUImageSoftLightBlendFilter : GPUImageTwoInputFilter
{
}
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoInputFilter.h"
@interface GPUImageSubtractBlendFilter : GPUImageTwoInputFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageHarrisCornerDetectionFilter.h"
/** Noble corner detector
This is the Noble variant on the Harris detector, from
Alison Noble, "Descriptions of Image Surfaces", PhD thesis, Department of Engineering Science, Oxford University 1989, p45.
*/
@interface GPUImageNobleCornerDetectionFilter : GPUImageHarrisCornerDetectionFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
//
// GPUImageHistogramEqualizationFilter.h
// FilterShowcase
//
// Created by Adam Marcus on 19/08/2014.
// Copyright (c) 2014 Sunset Lake Software LLC. All rights reserved.
//
#import "GPUImageFilterGroup.h"
#import "GPUImageHistogramFilter.h"
#import "GPUImageRawDataOutput.h"
#import "GPUImageRawDataInput.h"
#import "GPUImageTwoInputFilter.h"
@interface GPUImageHistogramEqualizationFilter : GPUImageFilterGroup
{
GPUImageHistogramFilter *histogramFilter;
GPUImageRawDataOutput *rawDataOutputFilter;
GPUImageRawDataInput *rawDataInputFilter;
}
@property(readwrite, nonatomic) NSUInteger downsamplingFactor;
- (id)initWithHistogramType:(GPUImageHistogramType)newHistogramType;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoPassTextureSamplingFilter.h"
// For each pixel, this sets it to the maximum value of the red channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
// This extends out bright features, and is most commonly used with black-and-white thresholded images.
extern NSString *const kGPUImageDilationRadiusOneVertexShaderString;
extern NSString *const kGPUImageDilationRadiusTwoVertexShaderString;
extern NSString *const kGPUImageDilationRadiusThreeVertexShaderString;
extern NSString *const kGPUImageDilationRadiusFourVertexShaderString;
@interface GPUImageDilationFilter : GPUImageTwoPassTextureSamplingFilter
// Acceptable values for dilationRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
- (id)initWithRadius:(NSUInteger)dilationRadius;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoPassTextureSamplingFilter.h"
@interface GPUImageLanczosResamplingFilter : GPUImageTwoPassTextureSamplingFilter
@property(readwrite, nonatomic) CGSize originalImageSize;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageGaussianBlurFilter.h"
@interface GPUImageBilateralFilter : GPUImageGaussianBlurFilter
{
CGFloat firstDistanceNormalizationFactorUniform;
CGFloat secondDistanceNormalizationFactorUniform;
}
// A normalization factor for the distance between central color and sample color.
@property(nonatomic, readwrite) CGFloat distanceNormalizationFactor;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
@interface GPUImageOpacityFilter : GPUImageFilter
{
GLint opacityUniform;
}
// Opacity ranges from 0.0 to 1.0, with 1.0 as the normal setting
@property(readwrite, nonatomic) CGFloat opacity;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
@interface GPUImageColorPackingFilter : GPUImageFilter
{
GLint texelWidthUniform, texelHeightUniform;
CGFloat texelWidth, texelHeight;
}
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import "GPUImageContext.h"
#import "GPUImageOutput.h"
/** Protocol for getting Movie played callback.
*/
@protocol GPUImageMovieDelegate <NSObject>
- (void)didCompletePlayingMovie;
@end
/** Source object for filtering movies
*/
@interface GPUImageMovie : GPUImageOutput
@property (readwrite, retain) AVAsset *asset;
@property (readwrite, retain) AVPlayerItem *playerItem;
@property(readwrite, retain) NSURL *url;
/** This enables the benchmarking mode, which logs out instantaneous and average frame times to the console
*/
@property(readwrite, nonatomic) BOOL runBenchmark;
/** This determines whether to play back a movie as fast as the frames can be processed, or if the original speed of the movie should be respected. Defaults to NO.
*/
@property(readwrite, nonatomic) BOOL playAtActualSpeed;
/** This determines whether the video should repeat (loop) at the end and restart from the beginning. Defaults to NO.
*/
@property(readwrite, nonatomic) BOOL shouldRepeat;
/** This specifies the progress of the process on a scale from 0 to 1.0. A value of 0 means the process has not yet begun, A value of 1.0 means the conversaion is complete.
This property is not key-value observable.
*/
@property(readonly, nonatomic) float progress;
/** This is used to send the delete Movie did complete playing alert
*/
@property (readwrite, nonatomic, assign) id <GPUImageMovieDelegate>delegate;
@property (readonly, nonatomic) AVAssetReader *assetReader;
@property (readonly, nonatomic) BOOL audioEncodingIsFinished;
@property (readonly, nonatomic) BOOL videoEncodingIsFinished;
/// @name Initialization and teardown
- (id)initWithAsset:(AVAsset *)asset;
- (id)initWithPlayerItem:(AVPlayerItem *)playerItem;
- (id)initWithURL:(NSURL *)url;
- (void)yuvConversionSetup;
/// @name Movie processing
- (void)enableSynchronizedEncodingUsingMovieWriter:(GPUImageMovieWriter *)movieWriter;
- (BOOL)readNextVideoFrameFromOutput:(AVAssetReaderOutput *)readerVideoTrackOutput;
- (BOOL)readNextAudioSampleFromOutput:(AVAssetReaderOutput *)readerAudioTrackOutput;
- (void)startProcessing;
- (void)endProcessing;
- (void)cancelProcessing;
- (void)processMovieFrame:(CMSampleBufferRef)movieSampleBuffer;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
extern NSString *const kGPUImageLuminanceFragmentShaderString;
/** Converts an image to grayscale (a slightly faster implementation of the saturation filter, without the ability to vary the color contribution)
*/
@interface GPUImageGrayscaleFilter : GPUImageFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageThresholdEdgeDetectionFilter.h"
@interface GPUImageThresholdSketchFilter : GPUImageThresholdEdgeDetectionFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageSphereRefractionFilter.h"
@interface GPUImageGlassSphereFilter : GPUImageSphereRefractionFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageOutput.h"
// The bytes passed into this input are not copied or retained, but you are free to deallocate them after they are used by this filter.
// The bytes are uploaded and stored within a texture, so nothing is kept locally.
// The default format for input bytes is GPUPixelFormatBGRA, unless specified with pixelFormat:
// The default type for input bytes is GPUPixelTypeUByte, unless specified with pixelType:
typedef enum {
GPUPixelFormatBGRA = GL_BGRA,
GPUPixelFormatRGBA = GL_RGBA,
GPUPixelFormatRGB = GL_RGB,
GPUPixelFormatLuminance = GL_LUMINANCE
} GPUPixelFormat;
typedef enum {
GPUPixelTypeUByte = GL_UNSIGNED_BYTE,
GPUPixelTypeFloat = GL_FLOAT
} GPUPixelType;
@interface GPUImageRawDataInput : GPUImageOutput
{
CGSize uploadedImageSize;
dispatch_semaphore_t dataUpdateSemaphore;
}
// Initialization and teardown
- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;
- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat;
- (id)initWithBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize pixelFormat:(GPUPixelFormat)pixelFormat type:(GPUPixelType)pixelType;
/** Input data pixel format
*/
@property (readwrite, nonatomic) GPUPixelFormat pixelFormat;
@property (readwrite, nonatomic) GPUPixelType pixelType;
// Image rendering
- (void)updateDataFromBytes:(GLubyte *)bytesToUpload size:(CGSize)imageSize;
- (void)processData;
- (void)processDataForTimestamp:(CMTime)frameTime;
- (CGSize)outputImageSize;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilterGroup.h"
@class GPUImageGaussianBlurFilter;
@class GPUImageXYDerivativeFilter;
@class GPUImageGrayscaleFilter;
@class GPUImageGaussianBlurFilter;
@class GPUImageThresholdedNonMaximumSuppressionFilter;
@class GPUImageColorPackingFilter;
//#define DEBUGFEATUREDETECTION
/** Harris corner detector
First pass: reduce to luminance and take the derivative of the luminance texture (GPUImageXYDerivativeFilter)
Second pass: blur the derivative (GPUImageGaussianBlurFilter)
Third pass: apply the Harris corner detection calculation
This is the Harris corner detector, as described in
C. Harris and M. Stephens. A Combined Corner and Edge Detector. Proc. Alvey Vision Conf., Univ. Manchester, pp. 147-151, 1988.
*/
@interface GPUImageHarrisCornerDetectionFilter : GPUImageFilterGroup
{
GPUImageXYDerivativeFilter *derivativeFilter;
GPUImageGaussianBlurFilter *blurFilter;
GPUImageFilter *harrisCornerDetectionFilter;
GPUImageThresholdedNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;
GPUImageColorPackingFilter *colorPackingFilter;
GLfloat *cornersArray;
GLubyte *rawImagePixels;
}
/** The radius of the underlying Gaussian blur. The default is 2.0.
*/
@property(readwrite, nonatomic) CGFloat blurRadiusInPixels;
// This changes the dynamic range of the Harris corner detector by amplifying small cornerness values. Default is 5.0.
@property(readwrite, nonatomic) CGFloat sensitivity;
// A threshold value at which a point is recognized as being a corner after the non-maximum suppression. Default is 0.20.
@property(readwrite, nonatomic) CGFloat threshold;
// This block is called on the detection of new corner points, usually on every processed frame. A C array containing normalized coordinates in X, Y pairs is passed in, along with a count of the number of corners detected and the current timestamp of the video frame
@property(nonatomic, copy) void(^cornersDetectedBlock)(GLfloat* cornerArray, NSUInteger cornersDetected, CMTime frameTime);
// These images are only enabled when built with DEBUGFEATUREDETECTION defined, and are used to examine the intermediate states of the feature detector
@property(nonatomic, readonly, strong) NSMutableArray *intermediateImages;
// Initialization and teardown
- (id)initWithCornerDetectionFragmentShader:(NSString *)cornerDetectionFragmentShader;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
@interface GPUImageFalseColorFilter : GPUImageFilter
{
GLint firstColorUniform, secondColorUniform;
}
// The first and second colors specify what colors replace the dark and light areas of the image, respectively. The defaults are (0.0, 0.0, 0.5) amd (1.0, 0.0, 0.0).
@property(readwrite, nonatomic) GPUVector4 firstColor;
@property(readwrite, nonatomic) GPUVector4 secondColor;
- (void)setFirstColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
- (void)setSecondColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
@interface GPUImageLuminanceRangeFilter : GPUImageFilter
{
GLint rangeReductionUniform;
}
/** The degree to reduce the luminance range, from 0.0 to 1.0. Default is 0.6.
*/
@property(readwrite, nonatomic) CGFloat rangeReductionFactor;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImage3x3TextureSamplingFilter.h"
@interface GPUImageThresholdedNonMaximumSuppressionFilter : GPUImage3x3TextureSamplingFilter
{
GLint thresholdUniform;
}
/** Any local maximum above this threshold will be white, and anything below black. Ranges from 0.0 to 1.0, with 0.8 as the default
*/
@property(readwrite, nonatomic) CGFloat threshold;
- (id)initWithPackedColorspace:(BOOL)inputUsesPackedColorspace;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImage3x3TextureSamplingFilter.h"
@interface GPUImageMedianFilter : GPUImage3x3TextureSamplingFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import <Foundation/Foundation.h>
#import <AVFoundation/AVFoundation.h>
#import "GPUImageContext.h"
extern NSString *const kGPUImageColorSwizzlingFragmentShaderString;
@protocol GPUImageMovieWriterDelegate <NSObject>
@optional
- (void)movieRecordingCompleted;
- (void)movieRecordingFailedWithError:(NSError*)error;
@end
@interface GPUImageMovieWriter : NSObject <GPUImageInput>
{
BOOL alreadyFinishedRecording;
NSURL *movieURL;
NSString *fileType;
AVAssetWriter *assetWriter;
AVAssetWriterInput *assetWriterAudioInput;
AVAssetWriterInput *assetWriterVideoInput;
AVAssetWriterInputPixelBufferAdaptor *assetWriterPixelBufferInput;
GPUImageContext *_movieWriterContext;
CVPixelBufferRef renderTarget;
CVOpenGLESTextureRef renderTexture;
CGSize videoSize;
GPUImageRotationMode inputRotation;
}
@property(readwrite, nonatomic) BOOL hasAudioTrack;
@property(readwrite, nonatomic) BOOL shouldPassthroughAudio;
@property(readwrite, nonatomic) BOOL shouldInvalidateAudioSampleWhenDone;
@property(nonatomic, copy) void(^completionBlock)(void);
@property(nonatomic, copy) void(^failureBlock)(NSError*);
@property(nonatomic, assign) id<GPUImageMovieWriterDelegate> delegate;
@property(readwrite, nonatomic) BOOL encodingLiveVideo;
@property(nonatomic, copy) BOOL(^videoInputReadyCallback)(void);
@property(nonatomic, copy) BOOL(^audioInputReadyCallback)(void);
@property(nonatomic, copy) void(^audioProcessingCallback)(SInt16 **samplesRef, CMItemCount numSamplesInBuffer);
@property(nonatomic) BOOL enabled;
@property(nonatomic, readonly) AVAssetWriter *assetWriter;
@property(nonatomic, readonly) CMTime duration;
@property(nonatomic, assign) CGAffineTransform transform;
@property(nonatomic, copy) NSArray *metaData;
@property(nonatomic, assign, getter = isPaused) BOOL paused;
@property(nonatomic, retain) GPUImageContext *movieWriterContext;
// Initialization and teardown
- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize;
- (id)initWithMovieURL:(NSURL *)newMovieURL size:(CGSize)newSize fileType:(NSString *)newFileType outputSettings:(NSDictionary *)outputSettings;
- (void)setHasAudioTrack:(BOOL)hasAudioTrack audioSettings:(NSDictionary *)audioOutputSettings;
// Movie recording
- (void)startRecording;
- (void)startRecordingInOrientation:(CGAffineTransform)orientationTransform;
- (void)finishRecording;
- (void)finishRecordingWithCompletionHandler:(void (^)(void))handler;
- (void)cancelRecording;
- (void)processAudioBuffer:(CMSampleBufferRef)audioBuffer;
- (void)enableSynchronizationCallbacks;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoInputFilter.h"
@interface GPUImageAlphaBlendFilter : GPUImageTwoInputFilter
{
GLint mixUniform;
}
// Mix ranges from 0.0 (only image 1) to 1.0 (only image 2), with 1.0 as the normal level
@property(readwrite, nonatomic) CGFloat mix;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageSobelEdgeDetectionFilter.h"
@interface GPUImageXYDerivativeFilter : GPUImageSobelEdgeDetectionFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageVideoCamera.h"
void stillImageDataReleaseCallback(void *releaseRefCon, const void *baseAddress);
void GPUImageCreateResizedSampleBuffer(CVPixelBufferRef cameraFrame, CGSize finalSize, CMSampleBufferRef *sampleBuffer);
@interface GPUImageStillCamera : GPUImageVideoCamera
/** The JPEG compression quality to use when capturing a photo as a JPEG.
*/
@property CGFloat jpegCompressionQuality;
// Only reliably set inside the context of the completion handler of one of the capture methods
@property (readonly) NSDictionary *currentCaptureMetadata;
// Photography controls
- (void)capturePhotoAsSampleBufferWithCompletionHandler:(void (^)(CMSampleBufferRef imageSampleBuffer, NSError *error))block;
- (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block;
- (void)capturePhotoAsImageProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(UIImage *processedImage, NSError *error))block;
- (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedJPEG, NSError *error))block;
- (void)capturePhotoAsJPEGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(NSData *processedJPEG, NSError *error))block;
- (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;
- (void)capturePhotoAsPNGProcessedUpToFilter:(GPUImageOutput<GPUImageInput> *)finalFilterInChain withOrientation:(UIImageOrientation)orientation withCompletionHandler:(void (^)(NSData *processedPNG, NSError *error))block;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
extern NSString *const kGPUImageColorAveragingVertexShaderString;
@interface GPUImageAverageColor : GPUImageFilter
{
GLint texelWidthUniform, texelHeightUniform;
NSUInteger numberOfStages;
GLubyte *rawImagePixels;
CGSize finalStageSize;
}
// This block is called on the completion of color averaging for a frame
@property(nonatomic, copy) void(^colorAverageProcessingFinishedBlock)(CGFloat redComponent, CGFloat greenComponent, CGFloat blueComponent, CGFloat alphaComponent, CMTime frameTime);
- (void)extractAverageColorAtFrameTime:(CMTime)frameTime;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImage3x3TextureSamplingFilter.h"
@interface GPUImageNonMaximumSuppressionFilter : GPUImage3x3TextureSamplingFilter
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoInputFilter.h"
@interface GPUImageDissolveBlendFilter : GPUImageTwoInputFilter
{
GLint mixUniform;
}
// Mix ranges from 0.0 (only image 1) to 1.0 (only image 2), with 0.5 (half of either) as the normal level
@property(readwrite, nonatomic) CGFloat mix;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoPassTextureSamplingFilter.h"
// For each pixel, this sets it to the minimum value of each color channel in a rectangular neighborhood extending out dilationRadius pixels from the center.
// This extends out dark features, and can be used for abstraction of color images.
@interface GPUImageRGBErosionFilter : GPUImageTwoPassTextureSamplingFilter
// Acceptable values for erosionRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4.
- (id)initWithRadius:(NSUInteger)erosionRadius;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
@interface GPUImageCrosshairGenerator : GPUImageFilter
{
GLint crosshairWidthUniform, crosshairColorUniform;
}
// The width of the displayed crosshairs, in pixels. Currently this only works well for odd widths. The default is 5.
@property(readwrite, nonatomic) CGFloat crosshairWidth;
// The color of the crosshairs is specified using individual red, green, and blue components (normalized to 1.0). The default is green: (0.0, 1.0, 0.0).
- (void)setCrosshairColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent;
// Rendering
- (void)renderCrosshairsFromArray:(GLfloat *)crosshairCoordinates count:(NSUInteger)numberOfCrosshairs frameTime:(CMTime)frameTime;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilterGroup.h"
@class GPUImageSaturationFilter;
@class GPUImageGaussianBlurFilter;
@class GPUImageLuminanceRangeFilter;
@interface GPUImageiOSBlurFilter : GPUImageFilterGroup
{
GPUImageSaturationFilter *saturationFilter;
GPUImageGaussianBlurFilter *blurFilter;
GPUImageLuminanceRangeFilter *luminanceRangeFilter;
}
/** A radius in pixels to use for the blur, with a default of 12.0. This adjusts the sigma variable in the Gaussian distribution function.
*/
@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
/** Saturation ranges from 0.0 (fully desaturated) to 2.0 (max saturation), with 0.8 as the normal level
*/
@property (readwrite, nonatomic) CGFloat saturation;
/** The degree to which to downsample, then upsample the incoming image to minimize computations within the Gaussian blur, default of 4.0
*/
@property (readwrite, nonatomic) CGFloat downsampling;
/** The degree to reduce the luminance range, from 0.0 to 1.0. Default is 0.6.
*/
@property (readwrite, nonatomic) CGFloat rangeReductionFactor;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilterGroup.h"
@class GPUImageErosionFilter;
@class GPUImageDilationFilter;
// A filter that first performs a dilation on the red channel of an image, followed by an erosion of the same radius.
// This helps to filter out smaller dark elements.
@interface GPUImageClosingFilter : GPUImageFilterGroup
{
GPUImageErosionFilter *erosionFilter;
GPUImageDilationFilter *dilationFilter;
}
@property(readwrite, nonatomic) CGFloat verticalTexelSpacing, horizontalTexelSpacing;
- (id)initWithRadius:(NSUInteger)radius;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageHarrisCornerDetectionFilter.h"
/** Shi-Tomasi feature detector
This is the Shi-Tomasi feature detector, as described in
J. Shi and C. Tomasi. Good features to track. Proceedings of the IEEE Conference on Computer Vision and Pattern Recognition, pages 593-600, June 1994.
*/
@interface GPUImageShiTomasiFeatureDetectionFilter : GPUImageHarrisCornerDetectionFilter
// Compared to the Harris corner detector, the default sensitivity value for this detector is set to 1.5
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
@interface GPUImagePixellateFilter : GPUImageFilter
{
GLint fractionalWidthOfAPixelUniform, aspectRatioUniform;
}
// The fractional width of the image to use as a size for the pixels in the resulting image. Values below one pixel width in the source image are ignored.
@property(readwrite, nonatomic) CGFloat fractionalWidthOfAPixel;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoPassTextureSamplingFilter.h"
/** A more generalized 9x9 Gaussian blur filter
*/
@interface GPUImageGaussianBlurPositionFilter : GPUImageTwoPassTextureSamplingFilter
{
GLint blurCenterUniform, blurRadiusUniform, aspectRatioUniform;
}
/** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0
*/
@property (readwrite, nonatomic) CGFloat blurSize;
/** Center for the blur, defaults to 0.5, 0.5
*/
@property (readwrite, nonatomic) CGPoint blurCenter;
/** Radius for the blur, defaults to 1.0
*/
@property (readwrite, nonatomic) CGFloat blurRadius;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageTwoInputCrossTextureSamplingFilter.h"
#import "GPUImageFilterGroup.h"
@interface GPUImagePoissonBlendFilter : GPUImageTwoInputCrossTextureSamplingFilter
{
GLint mixUniform;
GPUImageFramebuffer *secondOutputFramebuffer;
}
// Mix ranges from 0.0 (only image 1) to 1.0 (only image 2 gradients), with 1.0 as the normal level
@property(readwrite, nonatomic) CGFloat mix;
// The number of times to propagate the gradients.
// Crank this up to 100 or even 1000 if you want to get anywhere near convergence. Yes, this will be slow.
@property(readwrite, nonatomic) NSUInteger numIterations;
@end | {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilterGroup.h"
@class GPUImageGrayscaleFilter;
@class GPUImageSingleComponentGaussianBlurFilter;
@class GPUImageDirectionalSobelEdgeDetectionFilter;
@class GPUImageDirectionalNonMaximumSuppressionFilter;
@class GPUImageWeakPixelInclusionFilter;
/** This applies the edge detection process described by John Canny in
Canny, J., A Computational Approach To Edge Detection, IEEE Trans. Pattern Analysis and Machine Intelligence, 8(6):679–698, 1986.
and implemented in OpenGL ES by
A. Ensor, S. Hall. GPU-based Image Analysis on Mobile Devices. Proceedings of Image and Vision Computing New Zealand 2011.
It starts with a conversion to luminance, followed by an accelerated 9-hit Gaussian blur. A Sobel operator is applied to obtain the overall
gradient strength in the blurred image, as well as the direction (in texture sampling steps) of the gradient. A non-maximum suppression filter
acts along the direction of the gradient, highlighting strong edges that pass the threshold and completely removing those that fail the lower
threshold. Finally, pixels from in-between these thresholds are either included in edges or rejected based on neighboring pixels.
*/
@interface GPUImageCannyEdgeDetectionFilter : GPUImageFilterGroup
{
GPUImageGrayscaleFilter *luminanceFilter;
GPUImageSingleComponentGaussianBlurFilter *blurFilter;
GPUImageDirectionalSobelEdgeDetectionFilter *edgeDetectionFilter;
GPUImageDirectionalNonMaximumSuppressionFilter *nonMaximumSuppressionFilter;
GPUImageWeakPixelInclusionFilter *weakPixelInclusionFilter;
}
/** The image width and height factors tweak the appearance of the edges.
These parameters affect the visibility of the detected edges
By default, they match the inverse of the filter size in pixels
*/
@property(readwrite, nonatomic) CGFloat texelWidth;
/** The image width and height factors tweak the appearance of the edges.
These parameters affect the visibility of the detected edges
By default, they match the inverse of the filter size in pixels
*/
@property(readwrite, nonatomic) CGFloat texelHeight;
/** The underlying blur radius for the Gaussian blur. Default is 2.0.
*/
@property (readwrite, nonatomic) CGFloat blurRadiusInPixels;
/** The underlying blur texel spacing multiplier. Default is 1.0.
*/
@property (readwrite, nonatomic) CGFloat blurTexelSpacingMultiplier;
/** Any edge with a gradient magnitude above this threshold will pass and show up in the final result.
*/
@property(readwrite, nonatomic) CGFloat upperThreshold;
/** Any edge with a gradient magnitude below this threshold will fail and be removed from the final result.
*/
@property(readwrite, nonatomic) CGFloat lowerThreshold;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageThreeInputFilter.h"
extern NSString *const kGPUImageFourInputTextureVertexShaderString;
@interface GPUImageFourInputFilter : GPUImageThreeInputFilter
{
GPUImageFramebuffer *fourthInputFramebuffer;
GLint filterFourthTextureCoordinateAttribute;
GLint filterInputTextureUniform4;
GPUImageRotationMode inputRotation4;
GLuint filterSourceTexture4;
CMTime fourthFrameTime;
BOOL hasSetThirdTexture, hasReceivedFourthFrame, fourthFrameWasVideo;
BOOL fourthFrameCheckDisabled;
}
- (void)disableFourthFrameCheck;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
#import "GPUImageFilter.h"
@interface GPUImagePolarPixellateFilter : GPUImageFilter {
GLint centerUniform, pixelSizeUniform;
}
// The center about which to apply the distortion, with a default of (0.5, 0.5)
@property(readwrite, nonatomic) CGPoint center;
// The amount of distortion to apply, from (-2.0, -2.0) to (2.0, 2.0), with a default of (0.05, 0.05)
@property(readwrite, nonatomic) CGSize pixelSize;
@end
| {
"repo_name": "filelife/SnapseedImitation",
"stars": "83",
"repo_language": "Objective-C",
"file_name": "GPUImageParallelCoordinateLineTransformFilter.h",
"mime_type": "text/x-objective-c"
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.