text
stringlengths
2
104M
meta
dict
#import <Foundation/Foundation.h> #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE #import <OpenGLES/EAGL.h> #import <OpenGLES/ES2/gl.h> #import <OpenGLES/ES2/glext.h> #else #import <OpenGL/OpenGL.h> #import <OpenGL/gl.h> #endif #import <QuartzCore/QuartzCore.h> #import <CoreMedia/CoreMedia.h> typedef struct GPUTextureOptions { GLenum minFilter; GLenum magFilter; GLenum wrapS; GLenum wrapT; GLenum internalFormat; GLenum format; GLenum type; } GPUTextureOptions; @interface GPUImageFramebuffer : NSObject @property(readonly) CGSize size; @property(readonly) GPUTextureOptions textureOptions; @property(readonly) GLuint texture; @property(readonly) BOOL missingFramebuffer; // Initialization and teardown - (id)initWithSize:(CGSize)framebufferSize; - (id)initWithSize:(CGSize)framebufferSize textureOptions:(GPUTextureOptions)fboTextureOptions onlyTexture:(BOOL)onlyGenerateTexture; - (id)initWithSize:(CGSize)framebufferSize overriddenTexture:(GLuint)inputTexture; // Usage - (void)activateFramebuffer; // Reference counting - (void)lock; - (void)unlock; - (void)clearAllLocks; - (void)disableReferenceCounting; - (void)enableReferenceCounting; // Image capture - (CGImageRef)newCGImageFromFramebufferContents; - (void)restoreRenderTarget; // Raw data bytes - (void)lockForReading; - (void)unlockAfterReading; - (NSUInteger)bytesPerRow; - (GLubyte *)byteBuffer; - (CVPixelBufferRef)pixelBuffer; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilterGroup.h" @class GPUImagePicture; /** A photo filter based on Photoshop action by Amatorka http://amatorka.deviantart.com/art/Amatorka-Action-2-121069631 */ // Note: If you want to use this effect you have to add lookup_amatorka.png // from Resources folder to your application bundle. @interface GPUImageAmatorkaFilter : GPUImageFilterGroup { GPUImagePicture *lookupImageSource; } @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageTwoInputFilter.h" @interface GPUImageScreenBlendFilter : GPUImageTwoInputFilter { } @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageTwoInputFilter.h" @interface GPUImageHardLightBlendFilter : GPUImageTwoInputFilter { } @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilterGroup.h" @class GPUImageGaussianBlurFilter; @class GPUImageToonFilter; /** This uses a similar process as the GPUImageToonFilter, only it precedes the toon effect with a Gaussian blur to smooth out noise. */ @interface GPUImageSmoothToonFilter : GPUImageFilterGroup { GPUImageGaussianBlurFilter *blurFilter; GPUImageToonFilter *toonFilter; } /// The image width and height factors tweak the appearance of the edges. By default, they match the filter size in pixels @property(readwrite, nonatomic) CGFloat texelWidth; /// The image width and height factors tweak the appearance of the edges. By default, they match the filter size in pixels @property(readwrite, nonatomic) CGFloat texelHeight; /// The radius of the underlying Gaussian blur. The default is 2.0. @property (readwrite, nonatomic) CGFloat blurRadiusInPixels; /// The threshold at which to apply the edges, default of 0.2 @property(readwrite, nonatomic) CGFloat threshold; /// The levels of quantization for the posterization of colors within the scene, with a default of 10.0 @property(readwrite, nonatomic) CGFloat quantizationLevels; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" @interface GPUImageCropFilter : GPUImageFilter { GLfloat cropTextureCoordinates[8]; } // The crop region is the rectangle within the image to crop. It is normalized to a coordinate space from 0.0 to 1.0, with 0.0, 0.0 being the upper left corner of the image @property(readwrite, nonatomic) CGRect cropRegion; // Initialization and teardown - (id)initWithCropRegion:(CGRect)newCropRegion; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
// Created by Jorge Garcia on 9/5/12. // #import "GPUImageTwoInputFilter.h" @interface GPUImageNormalBlendFilter : GPUImageTwoInputFilter @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" @interface GPUImageColorInvertFilter : GPUImageFilter { } @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
// This needs a little more work, it's rotating the input tileset and there are some artifacts (I think from GL_LINEAR interpolation), but it's working #import "GPUImageTwoInputFilter.h" #import "GPUImagePicture.h" @interface GPUImageMosaicFilter : GPUImageTwoInputFilter { GLint inputTileSizeUniform, numTilesUniform, displayTileSizeUniform, colorOnUniform; GPUImagePicture *pic; } // This filter takes an input tileset, the tiles must ascend in luminance // It looks at the input image and replaces each display tile with an input tile // according to the luminance of that tile. The idea was to replicate the ASCII // video filters seen in other apps, but the tileset can be anything. @property(readwrite, nonatomic) CGSize inputTileSize; @property(readwrite, nonatomic) float numTiles; @property(readwrite, nonatomic) CGSize displayTileSize; @property(readwrite, nonatomic) BOOL colorOn; @property(readwrite, nonatomic, copy) NSString *tileSet; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImage3x3TextureSamplingFilter.h" @interface GPUImageLocalBinaryPatternFilter : GPUImage3x3TextureSamplingFilter @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#ifndef GPUImageColorConversion_h #define GPUImageColorConversion_h extern GLfloat *kColorConversion601; extern GLfloat *kColorConversion601FullRange; extern GLfloat *kColorConversion709; extern NSString *const kGPUImageYUVVideoRangeConversionForRGFragmentShaderString; extern NSString *const kGPUImageYUVFullRangeConversionForLAFragmentShaderString; extern NSString *const kGPUImageYUVVideoRangeConversionForLAFragmentShaderString; #endif /* GPUImageColorConversion_h */
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" @interface GPUImageTwoPassFilter : GPUImageFilter { GPUImageFramebuffer *secondOutputFramebuffer; GLProgram *secondFilterProgram; GLint secondFilterPositionAttribute, secondFilterTextureCoordinateAttribute; GLint secondFilterInputTextureUniform, secondFilterInputTextureUniform2; NSMutableDictionary *secondProgramUniformStateRestorationBlocks; } // Initialization and teardown - (id)initWithFirstStageVertexShaderFromString:(NSString *)firstStageVertexShaderString firstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageVertexShaderFromString:(NSString *)secondStageVertexShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString; - (id)initWithFirstStageFragmentShaderFromString:(NSString *)firstStageFragmentShaderString secondStageFragmentShaderFromString:(NSString *)secondStageFragmentShaderString; - (void)initializeSecondaryAttributes; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
// // GPUImageMovieComposition.h // Givit // // Created by Sean Meiners on 2013/01/25. // // #import "GPUImageMovie.h" @interface GPUImageMovieComposition : GPUImageMovie @property (readwrite, retain) AVComposition *compositon; @property (readwrite, retain) AVVideoComposition *videoComposition; @property (readwrite, retain) AVAudioMix *audioMix; - (id)initWithComposition:(AVComposition*)compositon andVideoComposition:(AVVideoComposition*)videoComposition andAudioMix:(AVAudioMix*)audioMix; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
// // GPUImageHighlightShadowTintFilter.h // // // Created by github.com/r3mus on 8/14/15. // // #import "GPUImageFilter.h" @interface GPUImageHighlightShadowTintFilter : GPUImageFilter { GLint shadowTintIntensityUniform, highlightTintIntensityUniform, shadowTintColorUniform, highlightTintColorUniform; } // The shadowTint and highlightTint colors specify what colors replace the dark and light areas of the image, respectively. The defaults for shadows are black, highlighs white. @property(readwrite, nonatomic) GLfloat shadowTintIntensity; @property(readwrite, nonatomic) GPUVector4 shadowTintColor; @property(readwrite, nonatomic) GLfloat highlightTintIntensity; @property(readwrite, nonatomic) GPUVector4 highlightTintColor; - (void)setShadowTintColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent; - (void)setHighlightTintColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilterGroup.h" #import "GPUImageThresholdEdgeDetectionFilter.h" #import "GPUImageParallelCoordinateLineTransformFilter.h" #import "GPUImageThresholdedNonMaximumSuppressionFilter.h" #import "GPUImageCannyEdgeDetectionFilter.h" // This applies a Hough transform to detect lines in a scene. It starts with a thresholded Sobel edge detection pass, // then takes those edge points in and applies a Hough transform to convert them to lines. The intersection of these lines // is then determined via blending and accumulation, and a non-maximum suppression filter is applied to find local maxima. // These local maxima are then converted back into lines in normal space and returned via a callback block. // // Rather than using one of the standard Hough transform types, this filter uses parallel coordinate space which is far more efficient // to rasterize on a GPU. // // This approach is based entirely on the PC lines process developed by the Graph@FIT research group at the Brno University of Technology // and described in their publications: // // M. Dubská, J. Havel, and A. Herout. Real-Time Detection of Lines using Parallel Coordinates and OpenGL. Proceedings of SCCG 2011, Bratislava, SK, p. 7. // http://medusa.fit.vutbr.cz/public/data/papers/2011-SCCG-Dubska-Real-Time-Line-Detection-Using-PC-and-OpenGL.pdf // M. Dubská, J. Havel, and A. Herout. PClines — Line detection using parallel coordinates. 2011 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), p. 1489- 1494. // http://medusa.fit.vutbr.cz/public/data/papers/2011-CVPR-Dubska-PClines.pdf //#define DEBUGLINEDETECTION @interface GPUImageHoughTransformLineDetector : GPUImageFilterGroup { GPUImageOutput<GPUImageInput> *thresholdEdgeDetectionFilter; // GPUImageThresholdEdgeDetectionFilter *thresholdEdgeDetectionFilter; GPUImageParallelCoordinateLineTransformFilter *parallelCoordinateLineTransformFilter; GPUImageThresholdedNonMaximumSuppressionFilter *nonMaximumSuppressionFilter; GLfloat *linesArray; GLubyte *rawImagePixels; } // A threshold value for which a point is detected as belonging to an edge for determining lines. Default is 0.9. @property(readwrite, nonatomic) CGFloat edgeThreshold; // A threshold value for which a local maximum is detected as belonging to a line in parallel coordinate space. Default is 0.20. @property(readwrite, nonatomic) CGFloat lineDetectionThreshold; // This block is called on the detection of lines, usually on every processed frame. A C array containing normalized slopes and intercepts in m, b pairs (y=mx+b) is passed in, along with a count of the number of lines detected and the current timestamp of the video frame @property(nonatomic, copy) void(^linesDetectedBlock)(GLfloat* lineArray, NSUInteger linesDetected, CMTime frameTime); // These images are only enabled when built with DEBUGLINEDETECTION defined, and are used to examine the intermediate states of the Hough transform @property(nonatomic, readonly, strong) NSMutableArray *intermediateImages; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilterGroup.h" // This generates image-wide feature descriptors using the ColourFAST process, as developed and described in // // A. Ensor and S. Hall. ColourFAST: GPU-based feature point detection and tracking on mobile devices. 28th International Conference of Image and Vision Computing, New Zealand, 2013, p. 124-129. // // Seth Hall, "GPU accelerated feature algorithms for mobile devices", PhD thesis, School of Computing and Mathematical Sciences, Auckland University of Technology 2014. // http://aut.researchgateway.ac.nz/handle/10292/7991 @class GPUImageColourFASTSamplingOperation; @class GPUImageBoxBlurFilter; @interface GPUImageColourFASTFeatureDetector : GPUImageFilterGroup { GPUImageBoxBlurFilter *blurFilter; GPUImageColourFASTSamplingOperation *colourFASTSamplingOperation; } // The blur radius of the underlying box blur. The default is 3.0. @property (readwrite, nonatomic) CGFloat blurRadiusInPixels; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" @interface GPUImageCGAColorspaceFilter : GPUImageFilter @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" @interface GPUImageExposureFilter : GPUImageFilter { GLint exposureUniform; } // Exposure ranges from -10.0 to 10.0, with 0.0 as the normal level @property(readwrite, nonatomic) CGFloat exposure; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageTwoInputFilter.h" @interface GPUImageExclusionBlendFilter : GPUImageTwoInputFilter { } @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" @interface GPUImageChromaKeyFilter : GPUImageFilter { GLint colorToReplaceUniform, thresholdSensitivityUniform, smoothingUniform; } /** The threshold sensitivity controls how similar pixels need to be colored to be replaced The default value is 0.3 */ @property(readwrite, nonatomic) CGFloat thresholdSensitivity; /** The degree of smoothing controls how gradually similar colors are replaced in the image The default value is 0.1 */ @property(readwrite, nonatomic) CGFloat smoothing; /** The color to be replaced is specified using individual red, green, and blue components (normalized to 1.0). The default is green: (0.0, 1.0, 0.0). @param redComponent Red component of color to be replaced @param greenComponent Green component of color to be replaced @param blueComponent Blue component of color to be replaced */ - (void)setColorToReplaceRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilterGroup.h" @class GPUImageGaussianBlurFilter; /** A Gaussian blur that preserves focus within a circular region */ @interface GPUImageGaussianSelectiveBlurFilter : GPUImageFilterGroup { GPUImageGaussianBlurFilter *blurFilter; GPUImageFilter *selectiveFocusFilter; BOOL hasOverriddenAspectRatio; } /** The radius of the circular area being excluded from the blur */ @property (readwrite, nonatomic) CGFloat excludeCircleRadius; /** The center of the circular area being excluded from the blur */ @property (readwrite, nonatomic) CGPoint excludeCirclePoint; /** The size of the area between the blurred portion and the clear circle */ @property (readwrite, nonatomic) CGFloat excludeBlurSize; /** A radius in pixels to use for the blur, with a default of 5.0. This adjusts the sigma variable in the Gaussian distribution function. */ @property (readwrite, nonatomic) CGFloat blurRadiusInPixels; /** The aspect ratio of the image, used to adjust the circularity of the in-focus region. By default, this matches the image aspect ratio, but you can override this value. */ @property (readwrite, nonatomic) CGFloat aspectRatio; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImagePixellateFilter.h" @interface GPUImagePolkaDotFilter : GPUImagePixellateFilter { GLint dotScalingUniform; } @property(readwrite, nonatomic) CGFloat dotScaling; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageContext.h" #import "GPUImageFramebuffer.h" #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE #import <UIKit/UIKit.h> #else // For now, just redefine this on the Mac typedef NS_ENUM(NSInteger, UIImageOrientation) { UIImageOrientationUp, // default orientation UIImageOrientationDown, // 180 deg rotation UIImageOrientationLeft, // 90 deg CCW UIImageOrientationRight, // 90 deg CW UIImageOrientationUpMirrored, // as above but image mirrored along other axis. horizontal flip UIImageOrientationDownMirrored, // horizontal flip UIImageOrientationLeftMirrored, // vertical flip UIImageOrientationRightMirrored, // vertical flip }; #endif dispatch_queue_attr_t GPUImageDefaultQueueAttribute(void); void runOnMainQueueWithoutDeadlocking(void (^block)(void)); void runSynchronouslyOnVideoProcessingQueue(void (^block)(void)); void runAsynchronouslyOnVideoProcessingQueue(void (^block)(void)); void runSynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void)); void runAsynchronouslyOnContextQueue(GPUImageContext *context, void (^block)(void)); void reportAvailableMemoryForGPUImage(NSString *tag); @class GPUImageMovieWriter; /** GPUImage's base source object Images or frames of video are uploaded from source objects, which are subclasses of GPUImageOutput. These include: - GPUImageVideoCamera (for live video from an iOS camera) - GPUImageStillCamera (for taking photos with the camera) - GPUImagePicture (for still images) - GPUImageMovie (for movies) Source objects upload still image frames to OpenGL ES as textures, then hand those textures off to the next objects in the processing chain. */ @interface GPUImageOutput : NSObject { GPUImageFramebuffer *outputFramebuffer; NSMutableArray *targets, *targetTextureIndices; CGSize inputTextureSize, cachedMaximumOutputSize, forcedMaximumSize; BOOL overrideInputSize; BOOL allTargetsWantMonochromeData; BOOL usingNextFrameForImageCapture; } @property(readwrite, nonatomic) BOOL shouldSmoothlyScaleOutput; @property(readwrite, nonatomic) BOOL shouldIgnoreUpdatesToThisTarget; @property(readwrite, nonatomic, retain) GPUImageMovieWriter *audioEncodingTarget; @property(readwrite, nonatomic, unsafe_unretained) id<GPUImageInput> targetToIgnoreForUpdates; @property(nonatomic, copy) void(^frameProcessingCompletionBlock)(GPUImageOutput*, CMTime); @property(nonatomic) BOOL enabled; @property(readwrite, nonatomic) GPUTextureOptions outputTextureOptions; /// @name Managing targets - (void)setInputFramebufferForTarget:(id<GPUImageInput>)target atIndex:(NSInteger)inputTextureIndex; - (GPUImageFramebuffer *)framebufferForOutput; - (void)removeOutputFramebuffer; - (void)notifyTargetsAboutNewOutputTexture; /** Returns an array of the current targets. */ - (NSArray*)targets; /** Adds a target to receive notifications when new frames are available. The target will be asked for its next available texture. See [GPUImageInput newFrameReadyAtTime:] @param newTarget Target to be added */ - (void)addTarget:(id<GPUImageInput>)newTarget; /** Adds a target to receive notifications when new frames are available. See [GPUImageInput newFrameReadyAtTime:] @param newTarget Target to be added */ - (void)addTarget:(id<GPUImageInput>)newTarget atTextureLocation:(NSInteger)textureLocation; /** Removes a target. The target will no longer receive notifications when new frames are available. @param targetToRemove Target to be removed */ - (void)removeTarget:(id<GPUImageInput>)targetToRemove; /** Removes all targets. */ - (void)removeAllTargets; /// @name Manage the output texture - (void)forceProcessingAtSize:(CGSize)frameSize; - (void)forceProcessingAtSizeRespectingAspectRatio:(CGSize)frameSize; /// @name Still image processing - (void)useNextFrameForImageCapture; - (CGImageRef)newCGImageFromCurrentlyProcessedOutput; - (CGImageRef)newCGImageByFilteringCGImage:(CGImageRef)imageToFilter; // Platform-specific image output methods // If you're trying to use these methods, remember that you need to set -useNextFrameForImageCapture before running -processImage or running video and calling any of these methods, or you will get a nil image #if TARGET_IPHONE_SIMULATOR || TARGET_OS_IPHONE - (UIImage *)imageFromCurrentFramebuffer; - (UIImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation; - (UIImage *)imageByFilteringImage:(UIImage *)imageToFilter; - (CGImageRef)newCGImageByFilteringImage:(UIImage *)imageToFilter; #else - (NSImage *)imageFromCurrentFramebuffer; - (NSImage *)imageFromCurrentFramebufferWithOrientation:(UIImageOrientation)imageOrientation; - (NSImage *)imageByFilteringImage:(NSImage *)imageToFilter; - (CGImageRef)newCGImageByFilteringImage:(NSImage *)imageToFilter; #endif - (BOOL)providesMonochromeOutput; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageTwoInputFilter.h" extern NSString *const kGPUImageThreeInputTextureVertexShaderString; @interface GPUImageThreeInputFilter : GPUImageTwoInputFilter { GPUImageFramebuffer *thirdInputFramebuffer; GLint filterThirdTextureCoordinateAttribute; GLint filterInputTextureUniform3; GPUImageRotationMode inputRotation3; GLuint filterSourceTexture3; CMTime thirdFrameTime; BOOL hasSetSecondTexture, hasReceivedThirdFrame, thirdFrameWasVideo; BOOL thirdFrameCheckDisabled; } - (void)disableThirdFrameCheck; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" /** Kuwahara image abstraction, drawn from the work of Kyprianidis, et. al. in their publication "Anisotropic Kuwahara Filtering on the GPU" within the GPU Pro collection. This produces an oil-painting-like image, but it is extremely computationally expensive, so it can take seconds to render a frame on an iPad 2. This might be best used for still images. */ @interface GPUImageKuwaharaFilter : GPUImageFilter { GLint radiusUniform; } /// The radius to sample from when creating the brush-stroke effect, with a default of 3. The larger the radius, the slower the filter. @property(readwrite, nonatomic) NSUInteger radius; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" extern NSString *const kGPUImageNearbyTexelSamplingVertexShaderString; @interface GPUImage3x3TextureSamplingFilter : GPUImageFilter { GLint texelWidthUniform, texelHeightUniform; CGFloat texelWidth, texelHeight; BOOL hasOverriddenImageSizeFactor; } // The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects. @property(readwrite, nonatomic) CGFloat texelWidth; @property(readwrite, nonatomic) CGFloat texelHeight; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilterGroup.h" @class GPUImageGaussianBlurFilter; @interface GPUImageUnsharpMaskFilter : GPUImageFilterGroup { GPUImageGaussianBlurFilter *blurFilter; GPUImageFilter *unsharpMaskFilter; } // The blur radius of the underlying Gaussian blur. The default is 4.0. @property (readwrite, nonatomic) CGFloat blurRadiusInPixels; // The strength of the sharpening, from 0.0 on up, with a default of 1.0 @property(readwrite, nonatomic) CGFloat intensity; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" /** This reduces the color dynamic range into the number of steps specified, leading to a cartoon-like simple shading of the image. */ @interface GPUImagePosterizeFilter : GPUImageFilter { GLint colorLevelsUniform; } /** The number of color levels to reduce the image space to. This ranges from 1 to 256, with a default of 10. */ @property(readwrite, nonatomic) NSUInteger colorLevels; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageOutput.h" @interface GPUImageTextureInput : GPUImageOutput { CGSize textureSize; } // Initialization and teardown - (id)initWithTexture:(GLuint)newInputTexture size:(CGSize)newTextureSize; // Image rendering - (void)processTextureWithFrameTime:(CMTime)frameTime; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageTwoInputFilter.h" /** Applies a color burn blend of two images */ @interface GPUImageColorBurnBlendFilter : GPUImageTwoInputFilter { } @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" @interface GPUImageBrightnessFilter : GPUImageFilter { GLint brightnessUniform; } // Brightness ranges from -1.0 to 1.0, with 0.0 as the normal level @property(readwrite, nonatomic) CGFloat brightness; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageTwoInputFilter.h" @interface GPUImageDarkenBlendFilter : GPUImageTwoInputFilter { } @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageTwoPassTextureSamplingFilter.h" // For each pixel, this sets it to the minimum value of the red channel in a rectangular neighborhood extending out dilationRadius pixels from the center. // This extends out dark features, and is most commonly used with black-and-white thresholded images. @interface GPUImageErosionFilter : GPUImageTwoPassTextureSamplingFilter // Acceptable values for erosionRadius, which sets the distance in pixels to sample out from the center, are 1, 2, 3, and 4. - (id)initWithRadius:(NSUInteger)erosionRadius; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageTwoInputFilter.h" @interface GPUImageAddBlendFilter : GPUImageTwoInputFilter @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" /** * Created by Alaric Cole * Allows adjustment of color temperature in terms of what an image was effectively shot in. This means higher Kelvin values will warm the image, while lower values will cool it. */ @interface GPUImageWhiteBalanceFilter : GPUImageFilter { GLint temperatureUniform, tintUniform; } //choose color temperature, in degrees Kelvin @property(readwrite, nonatomic) CGFloat temperature; //adjust tint to compensate @property(readwrite, nonatomic) CGFloat tint; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" @interface GPUImageJFAVoronoiFilter : GPUImageFilter { GLuint secondFilterOutputTexture; GLuint secondFilterFramebuffer; GLint sampleStepUniform; GLint sizeUniform; NSUInteger numPasses; } @property (nonatomic, readwrite) CGSize sizeInPixels; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" /* * The haze filter can be used to add or remove haze (similar to a UV filter) * * @author Alaric Cole * @creationDate 03/10/12 * */ /** The haze filter can be used to add or remove haze This is similar to a UV filter */ @interface GPUImageHazeFilter : GPUImageFilter { GLint distanceUniform; GLint slopeUniform; } /** Strength of the color applied. Default 0. Values between -.3 and .3 are best */ @property(readwrite, nonatomic) CGFloat distance; /** Amount of color change. Default 0. Values between -.3 and .3 are best */ @property(readwrite, nonatomic) CGFloat slope; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageTwoInputFilter.h" @interface GPUImageVoronoiConsumerFilter : GPUImageTwoInputFilter { GLint sizeUniform; } @property (nonatomic, readwrite) CGSize sizeInPixels; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" /** Performs a vignetting effect, fading out the image at the edges */ @interface GPUImageVignetteFilter : GPUImageFilter { GLint vignetteCenterUniform, vignetteColorUniform, vignetteStartUniform, vignetteEndUniform; } // the center for the vignette in tex coords (defaults to 0.5, 0.5) @property (nonatomic, readwrite) CGPoint vignetteCenter; // The color to use for the Vignette (defaults to black) @property (nonatomic, readwrite) GPUVector3 vignetteColor; // The normalized distance from the center where the vignette effect starts. Default of 0.5. @property (nonatomic, readwrite) CGFloat vignetteStart; // The normalized distance from the center where the vignette effect ends. Default of 0.75. @property (nonatomic, readwrite) CGFloat vignetteEnd; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImage3x3TextureSamplingFilter.h" /** Runs a 3x3 convolution kernel against the image */ @interface GPUImage3x3ConvolutionFilter : GPUImage3x3TextureSamplingFilter { GLint convolutionMatrixUniform; } /** Convolution kernel to run against the image The convolution kernel is a 3x3 matrix of values to apply to the pixel and its 8 surrounding pixels. The matrix is specified in row-major order, with the top left pixel being one.one and the bottom right three.three If the values in the matrix don't add up to 1.0, the image could be brightened or darkened. */ @property(readwrite, nonatomic) GPUMatrix3x3 convolutionKernel; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImagePixellateFilter.h" @interface GPUImageHalftoneFilter : GPUImagePixellateFilter @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageAverageColor.h" @interface GPUImageLuminosity : GPUImageAverageColor { GLProgram *secondFilterProgram; GLint secondFilterPositionAttribute, secondFilterTextureCoordinateAttribute; GLint secondFilterInputTextureUniform, secondFilterInputTextureUniform2; GLint secondFilterTexelWidthUniform, secondFilterTexelHeightUniform; } // This block is called on the completion of color averaging for a frame @property(nonatomic, copy) void(^luminosityProcessingFinishedBlock)(CGFloat luminosity, CMTime frameTime); - (void)extractLuminosityAtFrameTime:(CMTime)frameTime; - (void)initializeSecondaryAttributes; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" /** Creates a pinch distortion of the image */ @interface GPUImagePinchDistortionFilter : GPUImageFilter { GLint aspectRatioUniform, radiusUniform, centerUniform, scaleUniform; } /** The center about which to apply the distortion, with a default of (0.5, 0.5) */ @property(readwrite, nonatomic) CGPoint center; /** The radius of the distortion, ranging from 0.0 to 2.0, with a default of 1.0 */ @property(readwrite, nonatomic) CGFloat radius; /** The amount of distortion to apply, from -2.0 to 2.0, with a default of 0.5 */ @property(readwrite, nonatomic) CGFloat scale; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageSobelEdgeDetectionFilter.h" /** Converts video to look like a sketch. This is just the Sobel edge detection filter with the colors inverted. */ @interface GPUImageSketchFilter : GPUImageSobelEdgeDetectionFilter { } @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
// // GPUImageVibranceFilter.h // // // Created by github.com/r3mus on 8/14/15. // // #import "GPUImageFilter.h" @interface GPUImageVibranceFilter : GPUImageFilter { GLint vibranceUniform; } // Modifies the saturation of desaturated colors, leaving saturated colors unmodified. // Value -1 to 1 (-1 is minimum vibrance, 0 is no change, and 1 is maximum vibrance) @property (readwrite, nonatomic) GLfloat vibrance; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GLProgram.h" #import "GPUImageFramebuffer.h" #import "GPUImageFramebufferCache.h" #define GPUImageRotationSwapsWidthAndHeight(rotation) ((rotation) == kGPUImageRotateLeft || (rotation) == kGPUImageRotateRight || (rotation) == kGPUImageRotateRightFlipVertical || (rotation) == kGPUImageRotateRightFlipHorizontal) typedef NS_ENUM(NSUInteger, GPUImageRotationMode) { kGPUImageNoRotation, kGPUImageRotateLeft, kGPUImageRotateRight, kGPUImageFlipVertical, kGPUImageFlipHorizonal, kGPUImageRotateRightFlipVertical, kGPUImageRotateRightFlipHorizontal, kGPUImageRotate180 }; @interface GPUImageContext : NSObject @property(readonly, nonatomic) dispatch_queue_t contextQueue; @property(readwrite, retain, nonatomic) GLProgram *currentShaderProgram; @property(readonly, retain, nonatomic) EAGLContext *context; @property(readonly) CVOpenGLESTextureCacheRef coreVideoTextureCache; @property(readonly) GPUImageFramebufferCache *framebufferCache; + (void *)contextKey; + (GPUImageContext *)sharedImageProcessingContext; + (dispatch_queue_t)sharedContextQueue; + (GPUImageFramebufferCache *)sharedFramebufferCache; + (void)useImageProcessingContext; - (void)useAsCurrentContext; + (void)setActiveShaderProgram:(GLProgram *)shaderProgram; - (void)setContextShaderProgram:(GLProgram *)shaderProgram; + (GLint)maximumTextureSizeForThisDevice; + (GLint)maximumTextureUnitsForThisDevice; + (GLint)maximumVaryingVectorsForThisDevice; + (BOOL)deviceSupportsOpenGLESExtension:(NSString *)extension; + (BOOL)deviceSupportsRedTextures; + (BOOL)deviceSupportsFramebufferReads; + (CGSize)sizeThatFitsWithinATextureForSize:(CGSize)inputSize; - (void)presentBufferForDisplay; - (GLProgram *)programForVertexShaderString:(NSString *)vertexShaderString fragmentShaderString:(NSString *)fragmentShaderString; - (void)useSharegroup:(EAGLSharegroup *)sharegroup; // Manage fast texture upload + (BOOL)supportsFastTextureUpload; @end @protocol GPUImageInput <NSObject> - (void)newFrameReadyAtTime:(CMTime)frameTime atIndex:(NSInteger)textureIndex; - (void)setInputFramebuffer:(GPUImageFramebuffer *)newInputFramebuffer atIndex:(NSInteger)textureIndex; - (NSInteger)nextAvailableTextureIndex; - (void)setInputSize:(CGSize)newSize atIndex:(NSInteger)textureIndex; - (void)setInputRotation:(GPUImageRotationMode)newInputRotation atIndex:(NSInteger)textureIndex; - (CGSize)maximumOutputSize; - (void)endProcessing; - (BOOL)shouldIgnoreUpdatesToThisTarget; - (BOOL)enabled; - (BOOL)wantsMonochromeInput; - (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" /** Pixels with a luminance above the threshold will invert their color */ @interface GPUImageSolarizeFilter : GPUImageFilter { GLint thresholdUniform; } /** Anything above this luminance will be inverted, and anything below normal. Ranges from 0.0 to 1.0, with 0.5 as the default */ @property(readwrite, nonatomic) CGFloat threshold; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageGaussianBlurFilter.h" // This filter merely performs the standard Gaussian blur on the red color channel (assuming a luminance image) @interface GPUImageSingleComponentGaussianBlurFilter : GPUImageGaussianBlurFilter @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" @interface GPUImageSphereRefractionFilter : GPUImageFilter { GLint radiusUniform, centerUniform, aspectRatioUniform, refractiveIndexUniform; } /// The center about which to apply the distortion, with a default of (0.5, 0.5) @property(readwrite, nonatomic) CGPoint center; /// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.25 @property(readwrite, nonatomic) CGFloat radius; /// The index of refraction for the sphere, with a default of 0.71 @property(readwrite, nonatomic) CGFloat refractiveIndex; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" /** Creates a stretch distortion of the image */ @interface GPUImageStretchDistortionFilter : GPUImageFilter { GLint centerUniform; } /** The center about which to apply the distortion, with a default of (0.5, 0.5) */ @property(readwrite, nonatomic) CGPoint center; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilterGroup.h" @class GPUImageGrayscaleFilter; @class GPUImage3x3TextureSamplingFilter; @class GPUImageNonMaximumSuppressionFilter; /* An implementation of the Features from Accelerated Segment Test (FAST) feature detector as described in the following publications: E. Rosten and T. Drummond. Fusing points and lines for high performance tracking. IEEE International Conference on Computer Vision, 2005. E. Rosten and T. Drummond. Machine learning for high-speed corner detection. European Conference on Computer Vision, 2006. For more about the FAST feature detector, see the resources here: http://www.edwardrosten.com/work/fast.html */ typedef enum { kGPUImageFAST12Contiguous, kGPUImageFAST12ContiguousNonMaximumSuppressed} GPUImageFASTDetectorType; @interface GPUImageFASTCornerDetectionFilter : GPUImageFilterGroup { GPUImageGrayscaleFilter *luminanceReductionFilter; GPUImage3x3TextureSamplingFilter *featureDetectionFilter; GPUImageNonMaximumSuppressionFilter *nonMaximumSuppressionFilter; // Generate a lookup texture based on the bit patterns // Step 1: convert to monochrome if necessary // Step 2: do a lookup at each pixel based on the Bresenham circle, encode comparison in two color components // Step 3: do non-maximum suppression of close corner points } - (id)initWithFASTDetectorVariant:(GPUImageFASTDetectorType)detectorType; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilterGroup.h" @class GPUImageGaussianBlurFilter; /// A simulated tilt shift lens effect @interface GPUImageTiltShiftFilter : GPUImageFilterGroup { GPUImageGaussianBlurFilter *blurFilter; GPUImageFilter *tiltShiftFilter; } /// The radius of the underlying blur, in pixels. This is 7.0 by default. @property(readwrite, nonatomic) CGFloat blurRadiusInPixels; /// The normalized location of the top of the in-focus area in the image, this value should be lower than bottomFocusLevel, default 0.4 @property(readwrite, nonatomic) CGFloat topFocusLevel; /// The normalized location of the bottom of the in-focus area in the image, this value should be higher than topFocusLevel, default 0.6 @property(readwrite, nonatomic) CGFloat bottomFocusLevel; /// The rate at which the image gets blurry away from the in-focus region, default 0.2 @property(readwrite, nonatomic) CGFloat focusFallOffRate; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import <Foundation/Foundation.h> #import "GPUImageContext.h" @protocol GPUImageTextureOutputDelegate; @interface GPUImageTextureOutput : NSObject <GPUImageInput> { GPUImageFramebuffer *firstInputFramebuffer; } @property(readwrite, unsafe_unretained, nonatomic) id<GPUImageTextureOutputDelegate> delegate; @property(readonly) GLuint texture; @property(nonatomic) BOOL enabled; - (void)doneWithTexture; @end @protocol GPUImageTextureOutputDelegate - (void)newFrameReadyFromTextureOutput:(GPUImageTextureOutput *)callbackTextureOutput; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" @interface GPUImageHistogramGenerator : GPUImageFilter { GLint backgroundColorUniform; } @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageColorMatrixFilter.h" @interface GPUImageHSBFilter : GPUImageColorMatrixFilter /** Reset the filter to have no transformations. */ - (void)reset; /** Add a hue rotation to the filter. The hue rotation is in the range [-360, 360] with 0 being no-change. Note that this adjustment is additive, so use the reset method if you need to. */ - (void)rotateHue:(float)h; /** Add a saturation adjustment to the filter. The saturation adjustment is in the range [0.0, 2.0] with 1.0 being no-change. Note that this adjustment is additive, so use the reset method if you need to. */ - (void)adjustSaturation:(float)s; /** Add a brightness adjustment to the filter. The brightness adjustment is in the range [0.0, 2.0] with 1.0 being no-change. Note that this adjustment is additive, so use the reset method if you need to. */ - (void)adjustBrightness:(float)b; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" @interface GPUImageZoomBlurFilter : GPUImageFilter /** A multiplier for the blur size, ranging from 0.0 on up, with a default of 1.0 */ @property (readwrite, nonatomic) CGFloat blurSize; /** The normalized center of the blur. (0.5, 0.5) by default */ @property (readwrite, nonatomic) CGPoint blurCenter; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import <UIKit/UIKit.h> #import "GPUImageContext.h" typedef NS_ENUM(NSUInteger, GPUImageFillModeType) { kGPUImageFillModeStretch, // Stretch to fill the full view, which may distort the image outside of its normal aspect ratio kGPUImageFillModePreserveAspectRatio, // Maintains the aspect ratio of the source image, adding bars of the specified background color kGPUImageFillModePreserveAspectRatioAndFill // Maintains the aspect ratio of the source image, zooming in on its center to fill the view }; /** UIView subclass to use as an endpoint for displaying GPUImage outputs */ @interface GPUImageView : UIView <GPUImageInput> { GPUImageRotationMode inputRotation; } /** The fill mode dictates how images are fit in the view, with the default being kGPUImageFillModePreserveAspectRatio */ @property(readwrite, nonatomic) GPUImageFillModeType fillMode; /** This calculates the current display size, in pixels, taking into account Retina scaling factors */ @property(readonly, nonatomic) CGSize sizeInPixels; @property(nonatomic) BOOL enabled; /** Handling fill mode @param redComponent Red component for background color @param greenComponent Green component for background color @param blueComponent Blue component for background color @param alphaComponent Alpha component for background color */ - (void)setBackgroundColorRed:(GLfloat)redComponent green:(GLfloat)greenComponent blue:(GLfloat)blueComponent alpha:(GLfloat)alphaComponent; - (void)setCurrentlyReceivingMonochromeInput:(BOOL)newValue; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageTwoInputFilter.h" @interface GPUImageLuminosityBlendFilter : GPUImageTwoInputFilter @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImage3x3ConvolutionFilter.h" @interface GPUImageLaplacianFilter : GPUImage3x3ConvolutionFilter @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" /** Transforms the colors of an image by applying a matrix to them */ @interface GPUImageColorMatrixFilter : GPUImageFilter { GLint colorMatrixUniform; GLint intensityUniform; } /** A 4x4 matrix used to transform each color in an image */ @property(readwrite, nonatomic) GPUMatrix4x4 colorMatrix; /** The degree to which the new transformed color replaces the original color for each pixel */ @property(readwrite, nonatomic) CGFloat intensity; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageOutput.h" @interface GPUImageUIElement : GPUImageOutput // Initialization and teardown - (id)initWithView:(UIView *)inputView; - (id)initWithLayer:(CALayer *)inputLayer; // Layer management - (CGSize)layerSizeInPixels; - (void)update; - (void)updateUsingCurrentTime; - (void)updateWithTimestamp:(CMTime)frameTime; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GLProgram.h" // Base classes #import "GPUImageContext.h" #import "GPUImageOutput.h" #import "GPUImageView.h" #import "GPUImageVideoCamera.h" #import "GPUImageStillCamera.h" #import "GPUImageMovie.h" #import "GPUImagePicture.h" #import "GPUImageRawDataInput.h" #import "GPUImageRawDataOutput.h" #import "GPUImageMovieWriter.h" #import "GPUImageFilterPipeline.h" #import "GPUImageTextureOutput.h" #import "GPUImageFilterGroup.h" #import "GPUImageTextureInput.h" #import "GPUImageUIElement.h" #import "GPUImageBuffer.h" #import "GPUImageFramebuffer.h" #import "GPUImageFramebufferCache.h" // Filters #import "GPUImageFilter.h" #import "GPUImageTwoInputFilter.h" #import "GPUImagePixellateFilter.h" #import "GPUImagePixellatePositionFilter.h" #import "GPUImageSepiaFilter.h" #import "GPUImageColorInvertFilter.h" #import "GPUImageSaturationFilter.h" #import "GPUImageContrastFilter.h" #import "GPUImageExposureFilter.h" #import "GPUImageBrightnessFilter.h" #import "GPUImageLevelsFilter.h" #import "GPUImageSharpenFilter.h" #import "GPUImageGammaFilter.h" #import "GPUImageSobelEdgeDetectionFilter.h" #import "GPUImageSketchFilter.h" #import "GPUImageToonFilter.h" #import "GPUImageSmoothToonFilter.h" #import "GPUImageMultiplyBlendFilter.h" #import "GPUImageDissolveBlendFilter.h" #import "GPUImageKuwaharaFilter.h" #import "GPUImageKuwaharaRadius3Filter.h" #import "GPUImageVignetteFilter.h" #import "GPUImageGaussianBlurFilter.h" #import "GPUImageGaussianBlurPositionFilter.h" #import "GPUImageGaussianSelectiveBlurFilter.h" #import "GPUImageOverlayBlendFilter.h" #import "GPUImageDarkenBlendFilter.h" #import "GPUImageLightenBlendFilter.h" #import "GPUImageSwirlFilter.h" #import "GPUImageSourceOverBlendFilter.h" #import "GPUImageColorBurnBlendFilter.h" #import "GPUImageColorDodgeBlendFilter.h" #import "GPUImageScreenBlendFilter.h" #import "GPUImageExclusionBlendFilter.h" #import "GPUImageDifferenceBlendFilter.h" #import "GPUImageSubtractBlendFilter.h" #import "GPUImageHardLightBlendFilter.h" #import "GPUImageSoftLightBlendFilter.h" #import "GPUImageColorBlendFilter.h" #import "GPUImageHueBlendFilter.h" #import "GPUImageSaturationBlendFilter.h" #import "GPUImageLuminosityBlendFilter.h" #import "GPUImageCropFilter.h" #import "GPUImageGrayscaleFilter.h" #import "GPUImageTransformFilter.h" #import "GPUImageChromaKeyBlendFilter.h" #import "GPUImageHazeFilter.h" #import "GPUImageLuminanceThresholdFilter.h" #import "GPUImagePosterizeFilter.h" #import "GPUImageBoxBlurFilter.h" #import "GPUImageAdaptiveThresholdFilter.h" #import "GPUImageSolarizeFilter.h" #import "GPUImageUnsharpMaskFilter.h" #import "GPUImageBulgeDistortionFilter.h" #import "GPUImagePinchDistortionFilter.h" #import "GPUImageCrosshatchFilter.h" #import "GPUImageCGAColorspaceFilter.h" #import "GPUImagePolarPixellateFilter.h" #import "GPUImageStretchDistortionFilter.h" #import "GPUImagePerlinNoiseFilter.h" #import "GPUImageJFAVoronoiFilter.h" #import "GPUImageVoronoiConsumerFilter.h" #import "GPUImageMosaicFilter.h" #import "GPUImageTiltShiftFilter.h" #import "GPUImage3x3ConvolutionFilter.h" #import "GPUImageEmbossFilter.h" #import "GPUImageCannyEdgeDetectionFilter.h" #import "GPUImageThresholdEdgeDetectionFilter.h" #import "GPUImageMaskFilter.h" #import "GPUImageHistogramFilter.h" #import "GPUImageHistogramGenerator.h" #import "GPUImageHistogramEqualizationFilter.h" #import "GPUImagePrewittEdgeDetectionFilter.h" #import "GPUImageXYDerivativeFilter.h" #import "GPUImageHarrisCornerDetectionFilter.h" #import "GPUImageAlphaBlendFilter.h" #import "GPUImageNormalBlendFilter.h" #import "GPUImageNonMaximumSuppressionFilter.h" #import "GPUImageRGBFilter.h" #import "GPUImageMedianFilter.h" #import "GPUImageBilateralFilter.h" #import "GPUImageCrosshairGenerator.h" #import "GPUImageToneCurveFilter.h" #import "GPUImageNobleCornerDetectionFilter.h" #import "GPUImageShiTomasiFeatureDetectionFilter.h" #import "GPUImageErosionFilter.h" #import "GPUImageRGBErosionFilter.h" #import "GPUImageDilationFilter.h" #import "GPUImageRGBDilationFilter.h" #import "GPUImageOpeningFilter.h" #import "GPUImageRGBOpeningFilter.h" #import "GPUImageClosingFilter.h" #import "GPUImageRGBClosingFilter.h" #import "GPUImageColorPackingFilter.h" #import "GPUImageSphereRefractionFilter.h" #import "GPUImageMonochromeFilter.h" #import "GPUImageOpacityFilter.h" #import "GPUImageHighlightShadowFilter.h" #import "GPUImageFalseColorFilter.h" #import "GPUImageHSBFilter.h" #import "GPUImageHueFilter.h" #import "GPUImageGlassSphereFilter.h" #import "GPUImageLookupFilter.h" #import "GPUImageAmatorkaFilter.h" #import "GPUImageMissEtikateFilter.h" #import "GPUImageSoftEleganceFilter.h" #import "GPUImageAddBlendFilter.h" #import "GPUImageDivideBlendFilter.h" #import "GPUImagePolkaDotFilter.h" #import "GPUImageLocalBinaryPatternFilter.h" #import "GPUImageColorLocalBinaryPatternFilter.h" #import "GPUImageLanczosResamplingFilter.h" #import "GPUImageAverageColor.h" #import "GPUImageSolidColorGenerator.h" #import "GPUImageLuminosity.h" #import "GPUImageAverageLuminanceThresholdFilter.h" #import "GPUImageWhiteBalanceFilter.h" #import "GPUImageChromaKeyFilter.h" #import "GPUImageLowPassFilter.h" #import "GPUImageHighPassFilter.h" #import "GPUImageMotionDetector.h" #import "GPUImageHalftoneFilter.h" #import "GPUImageThresholdedNonMaximumSuppressionFilter.h" #import "GPUImageHoughTransformLineDetector.h" #import "GPUImageParallelCoordinateLineTransformFilter.h" #import "GPUImageThresholdSketchFilter.h" #import "GPUImageLineGenerator.h" #import "GPUImageLinearBurnBlendFilter.h" #import "GPUImageGaussianBlurPositionFilter.h" #import "GPUImagePixellatePositionFilter.h" #import "GPUImageTwoInputCrossTextureSamplingFilter.h" #import "GPUImagePoissonBlendFilter.h" #import "GPUImageMotionBlurFilter.h" #import "GPUImageZoomBlurFilter.h" #import "GPUImageLaplacianFilter.h" #import "GPUImageiOSBlurFilter.h" #import "GPUImageLuminanceRangeFilter.h" #import "GPUImageDirectionalNonMaximumSuppressionFilter.h" #import "GPUImageDirectionalSobelEdgeDetectionFilter.h" #import "GPUImageSingleComponentGaussianBlurFilter.h" #import "GPUImageThreeInputFilter.h" #import "GPUImageFourInputFilter.h" #import "GPUImageWeakPixelInclusionFilter.h" #import "GPUImageColorConversion.h" #import "GPUImageColourFASTFeatureDetector.h" #import "GPUImageColourFASTSamplingOperation.h"
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
// // GPUImageSkinToneFilter.h // // // Created by github.com/r3mus on 8/14/15. // // #import "GPUImageTwoInputFilter.h" typedef NS_ENUM(NSUInteger, GPUImageSkinToneUpperColor) { GPUImageSkinToneUpperColorGreen, GPUImageSkinToneUpperColorOrange }; extern NSString *const kGPUImageSkinToneFragmentShaderString; @interface GPUImageSkinToneFilter : GPUImageFilter { GLint skinToneAdjustUniform; GLint skinHueUniform; GLint skinHueThresholdUniform; GLint maxHueShiftUniform; GLint maxSaturationShiftUniform; GLint upperSkinToneColorUniform; } // The amount of effect to apply, between -1.0 (pink) and +1.0 (orange OR green). Default is 0.0. @property (nonatomic, readwrite) CGFloat skinToneAdjust; // The initial hue of skin to adjust. Default is 0.05 (a common skin red). @property (nonatomic, readwrite) CGFloat skinHue; // The bell curve "breadth" of the skin hue adjustment (i.e. how different from the original skinHue will the modifications effect). // Default is 40.0 @property (nonatomic, readwrite) CGFloat skinHueThreshold; // The maximum amount of hue shift allowed in the adjustments that affect hue (pink, green). Default = 0.25. @property (nonatomic, readwrite) CGFloat maxHueShift; // The maximum amount of saturation shift allowed in the adjustments that affect saturation (orange). Default = 0.4. @property (nonatomic, readwrite) CGFloat maxSaturationShift; // Defines whether the upper range (> 0.0) will change the skin tone to green (hue) or orange (saturation) @property (nonatomic, readwrite) GPUImageSkinToneUpperColor upperSkinToneColor; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilterGroup.h" @class GPUImagePicture; /** A photo filter based on Soft Elegance Photoshop action http://h-d-stock.deviantart.com/art/H-D-A-soft-elegance-70107603 */ // Note: If you want to use this effect you have to add // lookup_soft_elegance_1.png and lookup_soft_elegance_2.png // from Resources folder to your application bundle. @interface GPUImageSoftEleganceFilter : GPUImageFilterGroup { GPUImagePicture *lookupImageSource1; GPUImagePicture *lookupImageSource2; } @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImage3x3TextureSamplingFilter.h" /** This uses Sobel edge detection to place a black border around objects, and then it quantizes the colors present in the image to give a cartoon-like quality to the image. */ @interface GPUImageToonFilter : GPUImage3x3TextureSamplingFilter { GLint thresholdUniform, quantizationLevelsUniform; } /** The threshold at which to apply the edges, default of 0.2 */ @property(readwrite, nonatomic) CGFloat threshold; /** The levels of quantization for the posterization of colors within the scene, with a default of 10.0 */ @property(readwrite, nonatomic) CGFloat quantizationLevels; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageColorMatrixFilter.h" /// Simple sepia tone filter @interface GPUImageSepiaFilter : GPUImageColorMatrixFilter @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageTwoInputFilter.h" /** Applies a color dodge blend of two images */ @interface GPUImageColorDodgeBlendFilter : GPUImageTwoInputFilter { } @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" @interface GPUImageHueFilter : GPUImageFilter { GLint hueAdjustUniform; } @property (nonatomic, readwrite) CGFloat hue; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilterGroup.h" @class GPUImagePicture; /** A photo filter based on Photoshop action by Miss Etikate: http://miss-etikate.deviantart.com/art/Photoshop-Action-15-120151961 */ // Note: If you want to use this effect you have to add lookup_miss_etikate.png // from Resources folder to your application bundle. @interface GPUImageMissEtikateFilter : GPUImageFilterGroup { GPUImagePicture *lookupImageSource; } @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilterGroup.h" @interface GPUImageAdaptiveThresholdFilter : GPUImageFilterGroup /** A multiplier for the background averaging blur radius in pixels, with a default of 4 */ @property(readwrite, nonatomic) CGFloat blurRadiusInPixels; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageTwoInputFilter.h" // This is the feature extraction phase of the ColourFAST feature detector, as described in: // // A. Ensor and S. Hall. ColourFAST: GPU-based feature point detection and tracking on mobile devices. 28th International Conference of Image and Vision Computing, New Zealand, 2013, p. 124-129. // // Seth Hall, "GPU accelerated feature algorithms for mobile devices", PhD thesis, School of Computing and Mathematical Sciences, Auckland University of Technology 2014. // http://aut.researchgateway.ac.nz/handle/10292/7991 @interface GPUImageColourFASTSamplingOperation : GPUImageTwoInputFilter { GLint texelWidthUniform, texelHeightUniform; CGFloat texelWidth, texelHeight; BOOL hasOverriddenImageSizeFactor; } // The texel width and height determines how far out to sample from this texel. By default, this is the normalized width of a pixel, but this can be overridden for different effects. @property(readwrite, nonatomic) CGFloat texelWidth; @property(readwrite, nonatomic) CGFloat texelHeight; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" @interface GPUImageBuffer : GPUImageFilter { NSMutableArray *bufferedFramebuffers; } @property(readwrite, nonatomic) NSUInteger bufferSize; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" /** Creates a swirl distortion on the image */ @interface GPUImageSwirlFilter : GPUImageFilter { GLint radiusUniform, centerUniform, angleUniform; } /// The center about which to apply the distortion, with a default of (0.5, 0.5) @property(readwrite, nonatomic) CGPoint center; /// The radius of the distortion, ranging from 0.0 to 1.0, with a default of 0.5 @property(readwrite, nonatomic) CGFloat radius; /// The amount of distortion to apply, with a minimum of 0.0 and a default of 1.0 @property(readwrite, nonatomic) CGFloat angle; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" @interface GPUImagePerlinNoiseFilter : GPUImageFilter { GLint scaleUniform, colorStartUniform, colorFinishUniform; } @property (readwrite, nonatomic) GPUVector4 colorStart; @property (readwrite, nonatomic) GPUVector4 colorFinish; @property (readwrite, nonatomic) float scale; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilterGroup.h" @class GPUImageRGBErosionFilter; @class GPUImageRGBDilationFilter; // A filter that first performs an erosion on each color channel of an image, followed by a dilation of the same radius. // This helps to filter out smaller bright elements. @interface GPUImageRGBOpeningFilter : GPUImageFilterGroup { GPUImageRGBErosionFilter *erosionFilter; GPUImageRGBDilationFilter *dilationFilter; } - (id)initWithRadius:(NSUInteger)radius; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilterGroup.h" #import "GPUImageBuffer.h" #import "GPUImageDissolveBlendFilter.h" @interface GPUImageLowPassFilter : GPUImageFilterGroup { GPUImageBuffer *bufferFilter; GPUImageDissolveBlendFilter *dissolveBlendFilter; } // This controls the degree by which the previous accumulated frames are blended with the current one. This ranges from 0.0 to 1.0, with a default of 0.5. @property(readwrite, nonatomic) CGFloat filterStrength; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageTwoInputFilter.h" @interface GPUImageColorBlendFilter : GPUImageTwoInputFilter @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" @interface GPUImageCrosshatchFilter : GPUImageFilter { GLint crossHatchSpacingUniform, lineWidthUniform; } // The fractional width of the image to use as the spacing for the crosshatch. The default is 0.03. @property(readwrite, nonatomic) CGFloat crossHatchSpacing; // A relative width for the crosshatch lines. The default is 0.003. @property(readwrite, nonatomic) CGFloat lineWidth; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageOutput.h" #import "GPUImageFilter.h" @interface GPUImageFilterGroup : GPUImageOutput <GPUImageInput> { NSMutableArray *filters; BOOL isEndProcessing; } @property(readwrite, nonatomic, strong) GPUImageOutput<GPUImageInput> *terminalFilter; @property(readwrite, nonatomic, strong) NSArray *initialFilters; @property(readwrite, nonatomic, strong) GPUImageOutput<GPUImageInput> *inputFilterToIgnoreForUpdates; // Filter management - (void)addFilter:(GPUImageOutput<GPUImageInput> *)newFilter; - (GPUImageOutput<GPUImageInput> *)filterAtIndex:(NSUInteger)filterIndex; - (NSUInteger)filterCount; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageSobelEdgeDetectionFilter.h" @interface GPUImageThresholdEdgeDetectionFilter : GPUImageSobelEdgeDetectionFilter { GLint thresholdUniform; } /** Any edge above this threshold will be black, and anything below white. Ranges from 0.0 to 1.0, with 0.8 as the default */ @property(readwrite, nonatomic) CGFloat threshold; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" /** Pixels with a luminance above the threshold will appear white, and those below will be black */ @interface GPUImageLuminanceThresholdFilter : GPUImageFilter { GLint thresholdUniform; } /** Anything above this luminance will be white, and anything below black. Ranges from 0.0 to 1.0, with 0.5 as the default */ @property(readwrite, nonatomic) CGFloat threshold; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilterGroup.h" #import "GPUImageLowPassFilter.h" #import "GPUImageDifferenceBlendFilter.h" @interface GPUImageHighPassFilter : GPUImageFilterGroup { GPUImageLowPassFilter *lowPassFilter; GPUImageDifferenceBlendFilter *differenceBlendFilter; } // This controls the degree by which the previous accumulated frames are blended and then subtracted from the current one. This ranges from 0.0 to 1.0, with a default of 0.5. @property(readwrite, nonatomic) CGFloat filterStrength; @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
#import "GPUImageFilter.h" // This is an accumulator that uses a Hough transform in parallel coordinate space to identify probable lines in a scene. // // It is entirely based on the work of the Graph@FIT research group at the Brno University of Technology and their publications: // M. Dubská, J. Havel, and A. Herout. Real-Time Detection of Lines using Parallel Coordinates and OpenGL. Proceedings of SCCG 2011, Bratislava, SK, p. 7. // M. Dubská, J. Havel, and A. Herout. PClines — Line detection using parallel coordinates. 2011 IEEE Conference on Computer Vision and Pattern Recognition (CVPR), p. 1489- 1494. @interface GPUImageParallelCoordinateLineTransformFilter : GPUImageFilter { GLubyte *rawImagePixels; GLfloat *lineCoordinates; unsigned int maxLinePairsToRender, linePairsToRender; } @end
{ "repo_name": "filelife/SnapseedImitation", "stars": "83", "repo_language": "Objective-C", "file_name": "GPUImageParallelCoordinateLineTransformFilter.h", "mime_type": "text/x-objective-c" }
<!DOCTYPE HTML> <html> <head> <meta charset="utf-8"> <title>Vanilla Javascript tooltip</title> <link rel="stylesheet" href="styles/vanilla-js-tooltip.css" /> </head> <body> <div class="wrap"> <p>Narwhal swag seitan, fap vegan stumptown blue bottle irony tote bag wayfarers semiotics banh mi raw denim. Street art swag scenester bicycle rights. Distillery <span data-tooltip="Distillery squid authentic" data-position="right bottom">right bottom tooltip</span> authentic, meggings swag farm-to-table kinfolk. Schlitz blue bottle deep v, cronut banjo tattooed aesthetic migas. Bitters put a bird on it hammock microdosing, chambray locavore pickled umami. Poutine XOXO shoreditch synth heirloom. Hashtag neutra pop-up, scenester pour-over typewriter shabby chic godard try-hard.</p> <p>Freegan affogato yr, <span data-tooltip="pinterest <b style='color: #c00'>gluten-free</b> forage street">top center with HTML</span> hammock etsy quinoa everyday carry skateboard shabby chic ugh mumblecore vinyl. Heirloom food truck mustache, hashtag chartreuse art party godard. Tacos bitters affogato taxidermy, brunch dreamcatcher offal raw denim cred forage. Fashion axe squid pickled, blue bottle kogi four loko portland. Butcher <span data-tooltip="<img src='http://lorempixel.com/150/100/' alt='' />" data-position="right top">image</span> umami, mixtape pinterest gluten-free forage street art taxidermy hella. Fap viral twee scenester shoreditch, messenger bag distillery meditation 90's 8-bit bicycle rights. Kale chips gluten-free hoodie, cronut helvetica keytar YOLO waistcoat beard brooklyn chillwave locavore tousled meggings.</p> </div> <script src="scripts/vanilla-js-tooltip.min.js"></script> <script> var tooltip = new Tooltip({ theme: "dark" }); </script> </body> </html>
{ "repo_name": "zoltantothcom/vanilla-js-tooltip", "stars": "26", "repo_language": "HTML", "file_name": "vanilla-js-tooltip.scss", "mime_type": "text/plain" }
Vanilla Javascript tooltip. ------- Vanilla Javascript tooltip. Accepts plain text and fancy HTML. #### Demo [http://zoltantothcom.github.io/vanilla-js-tooltip](http://zoltantothcom.github.io/vanilla-js-tooltip) #### Settings Option | Type | Default | Description ------ | ---- | ------- | ----------- theme | string | dark | Selects one of the pre-defined tooltip styles - light or dark. dist | number | 10 | Specifies the distance in pixels from trigger to tooltip. delay | number | 0 | Specifies how long the tooltip remains visible after the mouse leaves the trigger. #### Example HTML: ```html Lorem <span data-tooltip="<img src='//lorempixel.com/150/100/' />" data-position="right top">ipsum</span> dolor sit amet. ``` Javascript: ```javascript var tooltip = new Tooltip({ theme: "dark", delay: 750 }); ``` #### Browser support and dependencies Browser | Support | Dependencies ------ | -------- | ----------- Chrome | yes | - Firefox | yes | - Safari | yes | - Opera | yes | - IE | yes* | - \* _IE9 and up_ #### License Free. [Unlicense](http://unlicense.org).
{ "repo_name": "zoltantothcom/vanilla-js-tooltip", "stars": "26", "repo_language": "HTML", "file_name": "vanilla-js-tooltip.scss", "mime_type": "text/plain" }
/** * @fileOverview * @author Zoltan Toth * @version 0.1 */ /** * @description * Vanilla Javascript tooltip. * * @class * @param {string} [options.theme=dark] - Selects one of the pre-defined tooltip styles - light or dark. * @param {number} [options.dist=10] - Specifies the distance in pixels from trigger to tooltip. * @param {number} [options.delay=0] - Specifies how long the tooltip remains visible after the mouse leaves the trigger. */ Tooltip = function(options) { var theme = options.theme || "dark", delay = options.delay || 0, dist = options.distance || 10; /* * Attaching one mouseover and one mouseout listener to the document * instead of listeners for each trigger */ document.body.addEventListener("mouseover", function(e) { if (!e.target.hasAttribute('data-tooltip')) return; var tooltip = document.createElement("div"); tooltip.className = "b-tooltip " + "b-tooltip-" + theme; tooltip.innerHTML = e.target.getAttribute('data-tooltip'); document.body.appendChild(tooltip); var pos = e.target.getAttribute('data-position') || "center top", posHorizontal = pos.split(" ")[0]; posVertical = pos.split(" ")[1]; positionAt(e.target, tooltip, posHorizontal, posVertical); }); document.body.addEventListener("mouseout", function(e) { if (e.target.hasAttribute('data-tooltip')) { setTimeout(function() { document.body.removeChild(document.querySelector(".b-tooltip")); }, delay); } }); /** * Positions the tooltip. * * @param {object} parent - The trigger of the tooltip. * @param {object} tooltip - The tooltip itself. * @param {string} posHorizontal - Desired horizontal position of the tooltip relatively to the trigger (left/center/right) * @param {string} posVertical - Desired vertical position of the tooltip relatively to the trigger (top/center/bottom) * */ function positionAt(parent, tooltip, posHorizontal, posVertical) { var parentCoords = parent.getBoundingClientRect(), left, top; console.log(posVertical) switch (posHorizontal) { case "left": left = parseInt(parentCoords.left) - dist - tooltip.offsetWidth; if (parseInt(parentCoords.left) - tooltip.offsetWidth < 0) { left = dist; } break; case "right": left = parentCoords.right + dist; if (parseInt(parentCoords.right) + tooltip.offsetWidth > document.documentElement.clientWidth) { left = document.documentElement.clientWidth - tooltip.offsetWidth - dist; } break; default: case "center": left = parseInt(parentCoords.left) + ((parent.offsetWidth - tooltip.offsetWidth) / 2); } switch (posVertical) { case "center": top = (parseInt(parentCoords.top) + parseInt(parentCoords.bottom)) / 2 - tooltip.offsetHeight / 2; break; case "bottom": top = parseInt(parentCoords.bottom) + dist; break; default: case "top": top = parseInt(parentCoords.top) - tooltip.offsetHeight - dist; } left = (left < 0) ? parseInt(parentCoords.left) : left; top = (top < 0) ? parseInt(parentCoords.bottom) + dist : top; tooltip.style.left = left + "px"; tooltip.style.top = top + pageYOffset + "px"; } };
{ "repo_name": "zoltantothcom/vanilla-js-tooltip", "stars": "26", "repo_language": "HTML", "file_name": "vanilla-js-tooltip.scss", "mime_type": "text/plain" }
.b-tooltip { border: 3px solid #fff; display: inline-block; font-size: .875em; padding: .75em; position: absolute; text-align: center; } .b-tooltip-light { background: #eaeaea; background: linear-gradient(to bottom, #fdfdfd 0%, #eaeaea 100%); box-shadow: 0px 0px 6px 2px rgba(110, 110, 110, .4); color: #242424; } .b-tooltip-dark { background: #242424; background: linear-gradient(to bottom, #6e6e6e 0%, #242424 100%); box-shadow: 0px 0px 6px 2px rgba(110, 110, 110, 1); color: #fff; } // page styles - not used in tooltip body { font-family: sans-serif; height: 2000px; } span { background: #ffe4e1; border: 2px dotted #c00; cursor: default; display: inline-block; padding: 5px; } img { display: block; } .wrap { margin: 0 auto; width: 800px; }
{ "repo_name": "zoltantothcom/vanilla-js-tooltip", "stars": "26", "repo_language": "HTML", "file_name": "vanilla-js-tooltip.scss", "mime_type": "text/plain" }
module.exports = function geneticAlgorithmConstructor(options) { function settingDefaults() { return { mutationFunction : function(phenotype) { return phenotype }, crossoverFunction : function(a,b) { return [a,b] }, fitnessFunction : function(phenotype) { return 0 }, doesABeatBFunction : undefined, population : [], populationSize : 100, }} function settingWithDefaults( settings , defaults ) { settings = settings || {} settings.mutationFunction = settings.mutationFunction || defaults.mutationFunction settings.crossoverFunction = settings.crossoverFunction || defaults.crossoverFunction settings.fitnessFunction = settings.fitnessFunction || defaults.fitnessFunction settings.doesABeatBFunction = settings.doesABeatBFunction || defaults.doesABeatBFunction settings.population = settings.population || defaults.population if ( settings.population.length <= 0 ) throw Error("population must be an array and contain at least 1 phenotypes") settings.populationSize = settings.populationSize || defaults.populationSize if ( settings.populationSize <= 0 ) throw Error("populationSize must be greater than 0") return settings } var settings = settingWithDefaults(options,settingDefaults()) function populate () { var size = settings.population.length while( settings.population.length < settings.populationSize ) { settings.population.push( mutate( cloneJSON( settings.population[ Math.floor( Math.random() * size ) ] ) ) ) } } function cloneJSON( object ) { return JSON.parse ( JSON.stringify ( object ) ) } function mutate(phenotype) { return settings.mutationFunction(cloneJSON(phenotype)) } function crossover(phenotype) { phenotype = cloneJSON(phenotype) var mate = settings.population[ Math.floor(Math.random() * settings.population.length ) ] mate = cloneJSON(mate) return settings.crossoverFunction(phenotype,mate)[0] } function doesABeatB(a,b) { var doesABeatB = false; if ( settings.doesABeatBFunction ) { return settings.doesABeatBFunction(a,b) } else { return settings.fitnessFunction(a) >= settings.fitnessFunction(b) } } function compete( ) { var nextGeneration = [] for( var p = 0 ; p < settings.population.length - 1 ; p+=2 ) { var phenotype = settings.population[p]; var competitor = settings.population[p+1]; nextGeneration.push(phenotype) if ( doesABeatB( phenotype , competitor )) { if ( Math.random() < 0.5 ) { nextGeneration.push(mutate(phenotype)) } else { nextGeneration.push(crossover(phenotype)) } } else { nextGeneration.push(competitor) } } settings.population = nextGeneration; } function randomizePopulationOrder( ) { for( var index = 0 ; index < settings.population.length ; index++ ) { var otherIndex = Math.floor( Math.random() * settings.population.length ) var temp = settings.population[otherIndex] settings.population[otherIndex] = settings.population[index] settings.population[index] = temp } } return { evolve : function (options) { if ( options ) { settings = settingWithDefaults(options,settings) } populate() randomizePopulationOrder() compete() return this }, best : function() { var scored = this.scoredPopulation() var result = scored.reduce(function(a,b){ return a.score >= b.score ? a : b },scored[0]).phenotype return cloneJSON(result) }, bestScore : function() { return settings.fitnessFunction( this.best() ) }, population : function() { return cloneJSON( this.config().population ) }, scoredPopulation : function() { return this.population().map(function(phenotype) { return { phenotype : cloneJSON( phenotype ), score : settings.fitnessFunction( phenotype ) } }) }, config : function() { return cloneJSON( settings ) }, clone : function(options) { return geneticAlgorithmConstructor( settingWithDefaults(options, settingWithDefaults( this.config(), settings ) ) ) } } }
{ "repo_name": "panchishin/geneticalgorithm", "stars": "95", "repo_language": "JavaScript", "file_name": "islandHop.js", "mime_type": "text/plain" }
[![Downloads][downloads-image]][downloads-url] [![Auto Test Status][travis-image]][travis-url] [![license][license-image]][license-url] [![Gitter chat][gitter-image]][gitter-url] [Watch the demo video](https://youtu.be/kYFz03Cg3-A) [<img src="./img/preview.png" alt="Demo Video" width="300"/>](https://youtu.be/kYFz03Cg3-A) Genetic Algorithms are an attempt to mimic the benefits of Evolution. This package provides the calculation framework to execute and mimic artificial evolution. Genetic Algorithms have been used to solve many problems in Engineering and Mathmatics both for fun and profit. A naive implementation will try and maximize a fitness function by applying mutations and cross-over but fail to implement some diversity metric. What happens without a diversity metric is that most likely all your phenotypes will clump into one local optimia. When a diversity metric is added then a few phenotypes are allowed to 'explore' the local solution space without having to compete with the majority of the population that is overfit to a local maxima. Adding a diversity metric will allow these 'explorers' to avoid competition until they find something even better. To add diversity use the [doesABeatBFunction](#doesABeatBFunction) function instead of the [fitnessFunction](#fitnessFunction) and only allow A to beat B if A is more fit than B and B is close enough. See the [Islands](#Islands) example for sample code. Section Links : [Construction](#construction) , [Execution](#execution) , [Examples](#examples) , [Phenotype](#phenotype) , [FAQ](#faq) , [Related](#related-ai-projects) , and [References](#references) # Construction ### GeneticAlgorithm constructor ```js var GeneticAlgorithmConstructor = require('geneticalgorithm') var geneticalgorithm = GeneticAlgorithmConstructor( config ) ``` The minimal configuration for constructing an GeneticAlgorithm calculator is like so: ```js var config = { mutationFunction: aMutationFunctionYouSupply, crossoverFunction: yourCrossoverFunction, fitnessFunction: yourFitnessFunction, doesABeatBFunction: yourCompetitionFunction, population: [ /* one or more phenotypes */ ], populationSize: aDecimalNumberGreaterThanZero // defaults to 100 } var GeneticAlgorithmConstructor = require('geneticalgorithm') var geneticalgorithm = GeneticAlgorithmConstructor( config ) ``` That creates one instance of an GeneticAlgorithm calculator which uses the initial configuration you supply. All configuration options are optional except *population*. If you don't specify a crossover function then GeneticAlgorithm will only do mutations and similarly if you don't specify the mutation function it will only do crossovers. If you don't specify either then no evolution will happen, go figure. That is all the configuration you need to get started. You can skip the next sections on advanced configuration and jump right to [execution](#execution), [functions](#functions) and [examples](#example). ### geneticalgorithm.clone( ) Create another GeneticAlgorithm calculator based off of an existing configuration. ```js var anotherGA = geneticalgorithm.clone() ``` ### geneticalgorithm.clone( config ) Create another GeneticAlgorithm calculator based off of an existing configuration and override some or all of the configuration ```js var anotherWithLargePopulation = geneticalgorithm.clone({ populationSize : 1000 }) ``` ### geneticalgorithm.config() Get the current configuration of a GeneticAlgorithm. All defaults will be populated. Can be used for debugging or populating a new or clone GeneticAlgorithm. A clone GeneticAlgorithm with 10% larger population size could be created like so: ```js var size = geneticalgorithm.config().populationSize var biggerGeneticAlgorithm = geneticalgorithm.config({ populationSize = size * 1.10 }) ``` # Execution ### geneticalgorithm.evolve( ) Do one generation of evolution like so ```js geneticalgorithm.evolve( ) ``` The *.evolve()* moves the calculator ahead by one generation. Depending on the population size and the speed of the functions you provide in the configuration this coupld be quick or take some time. *.evolve()* changes the geneticalgorithm and also returns it. This is for simplicity so that you could do chain calls like so ```js geneticalgorithm.evolve().evolve().best() ``` to do two evolutions and then get the best phenoType (see *.best()* below). ### geneticalgorithm.evolve( config ) Same as *.evolve()* but change the configuration prior to running the evolution calculations. In this example the populationSize is increased to 200: ```js geneticalgorithm.evolve( { populationSize : 200 } ) ``` ### geneticalgorithm.best() Retrieve the Phenotype with the highest fitness score like so ```js var best = geneticalgorithm.best() ``` ### geneticalgorithm.bestScore() Retrieve the score of the best Phenotype like so ```js var best = geneticalgorithm.bestScore() ``` ### geneticalgorithm.population() Retrieve the whole population like so ```js var phenotypeList = geneticalgorithm.population() ``` ### geneticalgorithm.scoredPopulation() Retrieve the whole population wrapped in a score object like so ```js var scoreList = geneticalgorithm.scoredPopulation() console.log( scoreList[0].phenotype ) console.log( scoreList[0].score ) ``` The result of *.scoredPopulation* is the following data structure ```js result = { phenotype : anItem , score : aNumber } ``` # Functions This is the specification of the configuration functions you pass to GeneticAlgorithm ### mutationFunction(phenotype) > Must return a phenotype The mutation function that you provide. It is a synchronous function that mutates the phenotype that you provide like so: ```js function mutationFunction (oldPhenotype) { var resultPhenotype = {} // use oldPhenotype and some random // function to make a change to your // phenotype return resultPhenotype } ``` ### crossoverFunction (phenoTypeA, phenoTypeB) > Must return an array [] with 2 phenotypes The crossover function that you provide. It is a synchronous function that swaps random sections between two phenotypes. Construct it like so: ```js function crossoverFunction(phenoTypeA, phenoTypeB) { var result1 = {} , result2 = {} // use phenoTypeA and B to create phenotype result 1 and 2 return [result1,result2] } ``` ### fitnessFunction (phenotype) > Must return a number ```js function fitnessFunction(phenotype) { var fitness = 0 // use phenotype and possibly some other information // to determine the fitness number. Higher is better, lower is worse. return fitness; } ``` ### doesABeatBFunction (phenoTypeA, phenoTypeB) > Must return truthy or falsy This function, if specified, overrides using simply the fitness function to compare two phenotypes. There are situations where you will want to preserve a certain amount of genetic diversity and so your doesABeatBFunction can return false if the two phenotypes are too different. When GeneticAlgorithm is comparing two phenoTypes it *only* tests if A can beat B and if so then B dies and is replaced with a mutant or crossover child of A. If A cannot beat B then nothing happens. This is an important note to consider. Suppose A and B are very genetically different and you want to preserve diversity then in your *doesABeatBFunction* you would check how diverse A and B are and simply return falsy if it crosses your threshold. The default implementation if you don't supply one is: ```js function doesABeatBFunction(phenoTypeA, phenoTypeB) { return fitnessFunction(phenoTypeA) >= fitnessFunction(phenoTypeB) } ``` Imagine you have implemented a *yourDiversityFunc(phenoTypeA, phenoTypeB)* that returns some numeric value and you've identified that some *MINIMUM_SIMILARITY* value is necessary for A and B to even be compared otherwise you want to preserve both. Your implementation may look something like this ```js function doesABeatBFunction(phenoTypeA, phenoTypeB) { // if too genetically different to consider if ( yourDiversityFunc(phenoTypeA, phenoTypeB) > MINIMUM_SIMILARITY ) { return false; } // if phenoTypeA isn't better than phenoTypeB if ( fitnessFunction(phenoTypeA) =< fitnessFunction(phenoTypeB) ) { return false; } // else phenoTypeA beats phenoTypeB return true; } ``` # Examples If you have installed this as a npm dependency first change directory to *node_modules/geneticalgorithm/*. ### Template The template is a boiler plate of how to get started. It has a dummy phenotype and all the functions stubbed out. Execute it like so: ``` node examples/template.js ``` ### Islands One issue that arrises is when there are local maxima and the genetic algorithm gets stuck on it and does not explore and find the global maxima. In this example there are 25 local maxima but only one global maxima. One phenotype starts near the worst local maxima and after a few generations the genetic algorithm is able to find the global maxima. The command line example. ``` node examples/islandHop.js ``` There is also a web example. Use something like `http-server -p 8000` or another web server. Then go to (/examples/islandHop.html)[http://localhost:8000/examples/islandHop.html] ### Traveling Salesmen For a list of XY coordinates, find the order that results in the least distance being traveled. Run the example like so: ``` node examples/travelingSalesmen.js ``` ### Fixed Number Array Execute the *Fixed Number Array* example. This example shows how to use the basic configurations. It starts with one array of zeros. The objective is to evolve the array of zeros to an array of 50's. Run the example using the command line like so: ``` # use the default of 10 numbers in the number array node examples/fixedNumberArray.js # change the array to be 30 numbers long node examples/fixedNumberArray.js 30 ``` ### Variable Number Array This is similar to the Fixed Number Array. The key difference is that the number array can mutate to be longer or shorter. It starts as the list [0,0,0]. The objective is to evolve the array to the target size with all values equal to 50. This is a great example if you are searching for a list of values but you also don't know how long the list needs to be. Run the example using the command line like so: ``` # use the default of 5 as the target length node examples/variableNumberArray.js # use 15 as the target length node examples/variableNumberArray.js 15 ``` # Phenotype What is a phenotype? Any json object you want. GeneticAlgorithm doesn't care. Chose something that works well for your particular problem and your helper functions: mutation, crossover, and fitness. A phenotype could be a list of numbers, a dictionary of words, or a matric of boolean values. It must be a json object though. # FAQ **What is a phenotype?** A phenotype is a fancy name for the thing you want to evolve. Perhaps it is a list of numbers or a configuration file or a micro-instruction language. The key is that it is a json object and is the thing you want to evolve. It is just data. If it was called 'the data' instead of phenotype then it would get confusing when we want to talk about other data besides the thing we are trying to evolve. The name phenotype comes from evolution. If you are interested in that sort of thing Wikipedia has a great write up. # Related AI Projects This is part of a set of related projects. * [AlphaBeta](https://www.npmjs.com/package/alphabeta) * [Boosting](https://www.npmjs.com/package/boosting) * [GeneticAlgorithm](https://www.npmjs.com/package/geneticalgorithm) * [NearestNeighbour](https://www.npmjs.com/package/nearestneighbour) * [NeuralNet](https://www.npmjs.com/package/neuralnet) # References * [Instructor: Patrick Winston from MIT](http://www.youtube.com/v/kHyNqSnzP8Y) * [Wikipedia entry for Minimax](https://en.wikipedia.org/wiki/Genetic_Algorithm) [gitter-url]: https://gitter.im/panchishin/geneticalgorithm [gitter-image]: https://badges.gitter.im/panchishin/geneticalgorithm.png [downloads-image]: http://img.shields.io/npm/dm/geneticalgorithm.svg [downloads-url]: https://www.npmjs.com/~panchishin [travis-url]: https://travis-ci.org/panchishin/geneticalgorithm [travis-image]: http://img.shields.io/travis/panchishin/geneticalgorithm.svg [license-image]: https://img.shields.io/badge/license-Unlicense-green.svg [license-url]: https://tldrlegal.com/license/unlicense
{ "repo_name": "panchishin/geneticalgorithm", "stars": "95", "repo_language": "JavaScript", "file_name": "islandHop.js", "mime_type": "text/plain" }
var geneticAlgorithmConstructor = require("../index.js") module.exports = { 'solve genetic diversity problem' : function(beforeExit, assert) { function cloneJSON( item ) { return JSON.parse ( JSON.stringify ( item ) ) } function mutationFunction(phenotype) { var result = cloneJSON(phenotype) result.number[Math.floor(Math.random() * 2)] += (Math.random()*2-1)*2; return result; } function crossoverFunction(a, b) { var x = { number : [ a.number[0] , b.number[1] ]} var y = { number : [ b.number[0] , a.number[1] ]} return Math.random() > 0.5 ? [ x , y ] : [ y , x ] } function score(phenotype) { var score = Math.sin( phenotype.number[0] * Math.PI ) * Math.sin( phenotype.number[1] * Math.PI ) score *= ( 10 + phenotype.number[0] + phenotype.number[1] ) / 10 return Math.abs(score) } function fitnessFunction(phenotype) { if ( phenotype.number[0] < 0 || phenotype.number[0] > 10 || phenotype.number[1] < 0 || phenotype.number[1] > 10 ) { return 0 } if ( phenotype.number[0] > 7.5 && phenotype.number[1] > 7.5 ) { return score(phenotype) } if ( phenotype.number[0] > 1 && phenotype.number[1] > 1 ) { return 0 } return score(phenotype) } function distance(a,b) { return Math.abs( a.number[0] - b.number[0] ) + Math.abs( a.number[1] - b.number[1] ) } function doesABeatBFunction(a,b) { var doesABeatB = ( distance(a,b) < 2 ) && ( fitnessFunction(a) >= fitnessFunction(b) ) return doesABeatB } var ga = geneticAlgorithmConstructor({ mutationFunction: mutationFunction, crossoverFunction: crossoverFunction, fitnessFunction: fitnessFunction, doesABeatBFunction: doesABeatBFunction, population: [ { number : [.1,.1] } ] }); ga.evolve() var best = ga.best() for( var tries = 0 ; tries < 20 && ( best.number[0] < 3 || best.number[1] < 3 ) ; tries++ ) { for( var i = 0 ; i < 2000 ; i++ ) { ga.evolve() } best = ga.best() } assert.equal( true , best.number[0] >= 3 && best.number[1] >= 3 , "Error : untrue : " + best.number[0] + " >= 3 && " + best.number[1] + " >= 3 : " + JSON.stringify(ga.best())); } }
{ "repo_name": "panchishin/geneticalgorithm", "stars": "95", "repo_language": "JavaScript", "file_name": "islandHop.js", "mime_type": "text/plain" }
var geneticAlgorithmConstructor = require("../index.js") module.exports = { 'geneticalgorithm is a function' : function(beforeExit, assert) { assert.equal('function', typeof geneticAlgorithmConstructor) }, 'constructor creates basic config' : function(beforeExit, assert) { var geneticAlgorithm = geneticAlgorithmConstructor( { population : [ {} ] } ); assert.equal('object' , typeof geneticAlgorithm ) }, 'complete successfully for evolutions' : function(beforeExit, assert) { var config = { mutationFunction: function(phenotype) { return phenotype }, crossoverFunction: function(a,b) { return [a,b] }, fitnessFunction: function(phenotype) { return 0 }, population: [ { name : "bob" } ] } var geneticalgorithm = geneticAlgorithmConstructor( config ) geneticalgorithm.evolve() assert.equal( "bob" , geneticalgorithm.best().name ) }, 'solve number evolution' : function(beforeExit, assert) { var PhenotypeSize = 5; function mutationFunction(phenotype) { var gene = Math.floor( Math.random() * phenotype.numbers.length ); phenotype.numbers[gene] += Math.random() * 20 - 10; return phenotype; } function crossoverFunction(a, b) { function cloneJSON( item ) { return JSON.parse ( JSON.stringify ( item ) ) } var x = cloneJSON(a), y = cloneJSON(b), cross = false; for (var i in x.numbers) { if ( Math.random() * x.numbers.length <= 1 ) { cross = !cross } if (cross) { x.numbers[i] = b.numbers[i]; y.numbers[i] = a.numbers[i]; } } return [ x , y ]; } function fitnessFunction(phenotype) { var sumOfPowers = 0; for (var i in phenotype.numbers) { // assume perfect solution is '50.0' for all numbers sumOfPowers += Math.pow( 50 - phenotype.numbers[i], 2); } return 1 / Math.sqrt(sumOfPowers); } function createEmptyPhenotype() { var data = []; for (var i = 0; i < PhenotypeSize; i += 1) { data[i] = 0 } return { numbers : data } } var ga = geneticAlgorithmConstructor({ mutationFunction: mutationFunction, crossoverFunction: crossoverFunction, fitnessFunction: fitnessFunction, population: [ createEmptyPhenotype() ] }); ga = ga.clone() ga = ga.clone( ga.config() ) ga.evolve() var lastScore = ga.bestScore() for( var i = 0 ; i < 4 && lastScore < 1 ; i++ ) { for( var j = 0 ; j < 4 * 5 * PhenotypeSize ; j++ ) ga.evolve() var bestScore = ga.bestScore() assert.equal( true , bestScore > lastScore , i + " " + j + " " + lastScore) lastScore = bestScore } assert.equal( true , ga.bestScore() > 1 , "Error : untrue : " + ga.bestScore() + " > 1"); } }
{ "repo_name": "panchishin/geneticalgorithm", "stars": "95", "repo_language": "JavaScript", "file_name": "islandHop.js", "mime_type": "text/plain" }
var GeneticAlgorithm = require('../index') if ( !process.argv[2] ) { console.log("No argument found. Using default of 10 as the Phenotype size.") } var PhenotypeSize = process.argv[2] || 10; function mutationFunction(phenotype) { var gene = Math.floor( Math.random() * phenotype.numbers.length ); phenotype.numbers[gene] += Math.random() * 20 - 10; return phenotype; } function crossoverFunction(a, b) { function cloneJSON( item ) { return JSON.parse ( JSON.stringify ( item ) ) } var x = cloneJSON(a), y = cloneJSON(b), cross = false; for (var i in a.numbers) { if ( Math.random() * a.numbers.length <= 1 ) { cross = !cross } if (cross) { x.numbers[i] = b.numbers[i]; y.numbers[i] = a.numbers[i]; } } return [ x , y ]; } function fitnessFunction(phenotype) { var sumOfPowers = 0; for (var i in phenotype.numbers) { // assume perfect solution is '50.0' for all numbers sumOfPowers += Math.pow( 50 - phenotype.numbers[i], 2); } return 1 / Math.sqrt(sumOfPowers); } function createEmptyPhenotype() { var data = []; for (var i = 0; i < PhenotypeSize; i += 1) { data[i] = 0 } return { numbers : data } } var ga = GeneticAlgorithm({ mutationFunction: mutationFunction, crossoverFunction: crossoverFunction, fitnessFunction: fitnessFunction, population: [ createEmptyPhenotype() ], populationSize: PhenotypeSize * 10 }); for( var i = 0 ; i < 20 * PhenotypeSize ; i++ ) ga.evolve() console.log(ga.best());
{ "repo_name": "panchishin/geneticalgorithm", "stars": "95", "repo_language": "JavaScript", "file_name": "islandHop.js", "mime_type": "text/plain" }
function mutationFunction(phenotype) { phenotype = cloneJSON(phenotype) var chance = Math.random() var index = Math.floor( Math.random() * phenotype.numbers.length ) // remove one number if ( chance < 0.1 && phenotype.numbers.length > 0 ) { phenotype.numbers.splice( index , 1 ) return phenotype // add one number } else if ( chance < 0.2 ) { var numbers = phenotype.numbers.slice( 0 , index ) .concat( phenotype.numbers.slice( index , index+1 ) ) .concat( phenotype.numbers.slice( index ) ) phenotype.numbers = numbers return phenotype // mutate } else { phenotype.numbers[index] += Math.random() * 20 - 10; return phenotype; } } function cloneJSON( item ) { return JSON.parse ( JSON.stringify ( item ) ) } function crossoverFunction(a, b) { if ( a.numbers.length > b.numbers.length ) { var temp = a a = b b = temp } var x = cloneJSON(a), y = cloneJSON(b), cross = false, offset = 0; for (var i in a.numbers) { if ( a.numbers.length + offset < b.numbers.length ) { offset += Math.random() * a.numbers.length <= 1 ? 1 : 0 } if ( Math.random() * a.numbers.length <= 1 ) { cross = !cross } if (cross) { x.numbers[i] = b.numbers[i]; y.numbers[i+offset] = a.numbers[i]; } } return [ x , y ]; } function fitnessFunction(phenotype) { if ( phenotype.numbers.length == 0 ) { return 0 } var sumOfPowers = 0; for (var i in phenotype.numbers) { // assume perfect solution is '50.0' for all numbers sumOfPowers += Math.pow( 50 - phenotype.numbers[i], 2); } var result = 1 / Math.sqrt(sumOfPowers) result /= Math.max( Math.pow(0.01,TARGET_LENGTH) , Math.abs( phenotype.numbers.length - TARGET_LENGTH ) ) return result } function createEmptyPhenotype() { return { numbers : [ 0 ] } } if ( ! process.argv[2] ) { console.log("No arguments found. Using default of '5' for target length.") } var TARGET_LENGTH = process.argv[2] || 5; var geneticAlgorithm = require('../index')({ mutationFunction: mutationFunction, crossoverFunction: crossoverFunction, fitnessFunction: fitnessFunction, population: [ { numbers : [ 0 , 0 , 0 ] } ] }); for( var i = 0 ; i < 40 * TARGET_LENGTH ; i++ ) geneticAlgorithm.evolve() console.log(geneticAlgorithm.best());
{ "repo_name": "panchishin/geneticalgorithm", "stars": "95", "repo_language": "JavaScript", "file_name": "islandHop.js", "mime_type": "text/plain" }
function mutationFunction(phenotype) { // make a random change to phenotype return phenotype } function crossoverFunction(phenotypeA, phenotypeB) { // move, copy, or append some values from a to b and from b to a return [ phenotypeA , phenotypeB ] } function fitnessFunction(phenotype) { var score = 0 // use your phenotype data to figure out a fitness score return score } var firstPhenotype = { dummyKey : "dummyValue" // enter phenotype data here } var geneticAlgorithmConstructor = require('../index') var geneticAlgorithm = geneticAlgorithmConstructor({ mutationFunction: mutationFunction, crossoverFunction: crossoverFunction, fitnessFunction: fitnessFunction, population: [ firstPhenotype ] }); console.log("Starting with:") console.log( firstPhenotype ) for( var i = 0 ; i < 100 ; i++ ) geneticAlgorithm.evolve() var best = geneticAlgorithm.best() delete best.score console.log("Finished with:") console.log(best)
{ "repo_name": "panchishin/geneticalgorithm", "stars": "95", "repo_language": "JavaScript", "file_name": "islandHop.js", "mime_type": "text/plain" }
const canvas = document.getElementById("canvas"); const madeit = document.getElementById("madeit"); const context = canvas.getContext('2d'); MAX_SIZE = 600; let islands = [[0.5,0.5], // upper left [2.5,0.5],[4.5,0.5],[6.5,0.5],[8.5,0.5], // x column [0.5,2.5],[0.5,4.5],[0.5,6.5],[0.5,8.5], // y column [8.5,8.5]]; // sweet spot Math.sqr = function(x) { return Math.pow(x,2) } function cloneJSON( item ) { return JSON.parse ( JSON.stringify ( item ) ) } // ********************** GENETIC ALGO FUNCTIONS ************************* function mutationFunction(phenotype) { phenotype.x += 3*(Math.random()*2 - 1)*(Math.random()*2 - 1)*(Math.random()*2 - 1); phenotype.y += 3*(Math.random()*2 - 1)*(Math.random()*2 - 1)*(Math.random()*2 - 1); return phenotype; } function crossoverFunction(a, b) { let x = cloneJSON(a) let y = cloneJSON(b) x.x = b.x; y.y = a.y; return Math.random() < .5 ? [x,y] : [y,x]; } function positionScore(x,y){ return islands.map(function(island) { let islandValue = island[0]/2.+island[1] let distance = Math.sqrt( (Math.sqr(x - island[0]) + Math.sqr(y - island[1])) / 2 ) if (distance > .4) { return -10 } return Math.min(.4,.5 - distance) * islandValue }).reduce(function(a,b) { return Math.max(a,b) })*10 } function fitnessFunction(phenotype) { return positionScore(phenotype.x,phenotype.y) } function doesABeatBFunction(a,b) { let aScore = fitnessFunction(a) let bScore = fitnessFunction(b) let distance = Math.sqrt(Math.sqr(a.x - b.x) + Math.sqr(a.y - b.y)) if ((aScore >= 0)&&(bScore < 0)) return aScore if (aScore < 0) return 0 if (distance > 2 && Math.random() > .1/distance ) return 0 return aScore - bScore } let ga = 0; function basic_ga() { ga = geneticAlgorithmConstructor({ mutationFunction: mutationFunction, crossoverFunction: crossoverFunction, fitnessFunction: fitnessFunction, population: !ga?[ { x: .3 , y: .5 } ]:ga.population(), populationSize: 500 }); } function diversity_ga() { ga = ga.clone({ doesABeatBFunction: doesABeatBFunction }); } function reset_population() { ga = ga.clone({ population: [ { x: .3 , y: .5 } ] }); } // ********************** UI STUFF ************************* function drawCircle(x,y,s,color) { context.fillStyle = "hsla("+color+",90%,40%,1)"; context.strokeStyle = "hsla("+color+",100%,20%,1)"; context.beginPath(); context.arc(x*MAX_SIZE/10., y*MAX_SIZE/10., s*MAX_SIZE/10., 0, Math.PI * 2, true); context.fill(); context.stroke(); } function drawIsland(x,y) { drawCircle(x,y,.4,90) drawCircle(x,y,.1,90) context.fillStyle = "hsla(0,0%,0%,1)"; context.strokeStyle = "hsla(0,0%,0%,1)"; context.fillText(Math.round(positionScore(x,y)),x*MAX_SIZE/10.-5, y*MAX_SIZE/10.-10); } function drawFrog(x,y) { let score = positionScore(x,y) drawCircle(x,y,.1,score > 0 ? 270: 320) } function draw() { // clear the screen context.fillStyle = "hsla(180,90%,40%,1)"; context.fillRect(0, 0, canvas.width, canvas.height); context.fill(); if (ga) { for(island of islands) { drawIsland(island[0],island[1]) } for(frog of ga.population()) { drawFrog(frog.x,frog.y) } } window.requestAnimationFrame(draw); } window.onload = function() { basic_ga(); window.requestAnimationFrame(draw); } var doSimulation = false setInterval(function(){ if (doSimulation) { for(let x=0 ; x<10; x++) ga.evolve() madeit.innerText = ga.population().map(function(p){ return Math.abs(p.x-8.5) < 1 && Math.abs(p.y-8.5) < 1 ? 1 : 0 }).reduce(function(a,b){ return a+b }) + " of " + ga.population().length } },50)
{ "repo_name": "panchishin/geneticalgorithm", "stars": "95", "repo_language": "JavaScript", "file_name": "islandHop.js", "mime_type": "text/plain" }
<!doctype html> <html lang="en"> <head> <meta charset="utf-8"> <title>Genetic Algorithm - Island Hop</title> </head> <style> body { margin: 10px; background-color: #ddd; color: black; } canvas { background-color: white; display: inline-block; } div { display: inline-block; height: 610px; vertical-align: top; margin: 10px; } div.side { width:200px; } button { display: block; } </style> <body> <h1>Genetic Algorithm - Island Hop Example</h1> <div><canvas id="canvas" width="600px" height="600px">what, no canvas? Yo browsa suks!</canvas></div> <div class="side"> <p>The objective is for the frogs to evolve to the island with the highest score.</p><p> The islands are to be though of as genetic attributes, not physical islands.</p><p> The X axis represents one attribute and the Y axis another.</p><p> Having both doesn't work until they are both really developed.</p><p> See how basic vs diversity genetic algorithms do. </p> <button onclick="basic_ga()">Basic GA</button> <button onclick="diversity_ga()">With Diversity</button> <p></p> <button onclick="doSimulation=true">Start Simulation</button> <button onclick="doSimulation=false">Stop Simulation</button> <p></p> <button onclick="reset_population()">Reset Population</button> <p>Number of frogs that made it </p> <p id="madeit">0</p> </div> <script> let module = {}; </script> <script src="/index.js"></script> <script> let geneticAlgorithmConstructor = module.exports; </script> <script src="islandHopBrowser.js"></script> </body> </html>
{ "repo_name": "panchishin/geneticalgorithm", "stars": "95", "repo_language": "JavaScript", "file_name": "islandHop.js", "mime_type": "text/plain" }
var mutationFunction = function( phenotype ) { var gene1_index = Math.floor(Math.random() * phenotype.length ) var gene2_index = Math.floor(Math.random() * phenotype.length ) var temp = phenotype[ gene1_index ] phenotype[ gene1_index ] = phenotype[ gene2_index ] phenotype[ gene2_index ] = temp //console.log("mutant = " + JSON.stringify(phenotype)) return phenotype } function helper_concat(index,phenotypeA,phenotypeB) { return phenotypeA.slice(0,index).concat( phenotypeB.slice(index) ).concat( phenotypeA.slice(index) ) } function helper_removeDuplicates(phenotype) { var duplicates = {} return phenotype.filter( function( item ) { if ( duplicates[JSON.stringify(item)] ) { return false } else { duplicates[JSON.stringify(item)] = true ; return true } }) } function crossoverFunction(phenotypeA, phenotypeB) { var index = Math.round( Math.random() * phenotypeA.length ) phenotypeX = helper_removeDuplicates( helper_concat(index,phenotypeA,phenotypeB) ) phenotypeY = helper_removeDuplicates( helper_concat(index,phenotypeB,phenotypeA) ) // move, copy, or append some values from a to b and from b to a return [ phenotypeX , phenotypeY ] } var fitnessFunction = function( phenotype ) { var calculateDistance = function( a , b ) { return Math.sqrt( Math.pow( a.x - b.x , 2 ) + Math.pow( a.y - b.y , 2 ) ) } var prev = phenotype[ 0 ] //console.log("The phenotype are " + JSON.stringify(phenotype)) var distances = phenotype.slice(1).map( function( item ) { result = [prev,item] ; prev = item ; return result } ) //console.log("The distances are " + JSON.stringify(distances)) var distance = distances.reduce( function( total, item ) { //console.log("item = " + JSON.stringify(item) ) return total + calculateDistance(item[0],item[1]) } , 0 ) //console.log("total = " + distance ) return -1 * distance } // outline a large square but not in order. var firstPhenotype = [] for (i=2;i<10;i++) { firstPhenotype.push( {x:i,y:1} ) firstPhenotype.push( {x:1,y:i} ) firstPhenotype.push( {x:i,y:10} ) firstPhenotype.push( {x:10,y:i} ) } var geneticAlgorithmConstructor = require('../index') var geneticAlgorithm = geneticAlgorithmConstructor({ mutationFunction: mutationFunction, crossoverFunction: crossoverFunction, fitnessFunction: fitnessFunction, population: [ firstPhenotype ], populationSize:1000 }); console.log("Starting with:") console.log( firstPhenotype ) var best = [] var previousBestScore = 0 for( var a = 0 ; a < 100 ; a++ ) { for( var i = 0 ; i < 25 ; i++ ) geneticAlgorithm.evolve() var score = geneticAlgorithm.bestScore() if ( score == previousBestScore ) { break; } previousBestScore = score console.log("Distance is " + -1 * score) } best = geneticAlgorithm.best() console.log("Finished with:") console.log(best) console.log("Distance is " + -1 * fitnessFunction(best))
{ "repo_name": "panchishin/geneticalgorithm", "stars": "95", "repo_language": "JavaScript", "file_name": "islandHop.js", "mime_type": "text/plain" }
var GeneticAlgorithm = require('../index') console.log("\nTest to see how well GeneticAlgorithm does with islands.\n") console.log("There are 10 islands located at [0.5,0.5], [2.5,0.5] ... [8.5,8.5], like so") console.log("+----------------+") console.log("| A B D E G |") console.log("| C |") console.log("| F |") console.log("| H |") console.log("| I X |") console.log("+----------------+") console.log("A = [0.5,0.5] and is the least valuable genetic island.") console.log("X = [8.5,8.5] is the most valuable, but it is very far from anything.") console.log("One phenotype starts at [0.3,0.5] which is close to island A.") console.log("The space inbetween the islands is very undesirable.") console.log("The mutation function allows mutations that have a small chance of crossing") console.log("islands and is not large enough to reach X.\n") let islands = [[0.5,0.5], // upper left [2.5,0.5],[4.5,0.5],[6.5,0.5],[8.5,0.5], // x column [0.5,2.5],[0.5,4.5],[0.5,6.5],[0.5,8.5], // y column [8.5,8.5]]; // sweet spot Math.sqr = function(x) { return Math.pow(x,2) } function cloneJSON( item ) { return JSON.parse ( JSON.stringify ( item ) ) } // ********************** GENETIC ALGO FUNCTIONS ************************* function mutationFunction(phenotype) { phenotype.x += 3*(Math.random()*2 - 1)*(Math.random()*2 - 1)*(Math.random()*2 - 1); phenotype.y += 3*(Math.random()*2 - 1)*(Math.random()*2 - 1)*(Math.random()*2 - 1); return phenotype; } function crossoverFunction(a, b) { let x = cloneJSON(a) let y = cloneJSON(b) x.x = b.x; y.y = a.y; return Math.random() < .5 ? [x,y] : [y,x]; } function positionScore(x,y){ return islands.map(function(island) { let islandValue = island[0]/2.+island[1] let distance = Math.sqrt( (Math.sqr(x - island[0]) + Math.sqr(y - island[1])) / 2 ) if (distance > .4) { return -10 } return Math.min(.4,.5 - distance) * islandValue }).reduce(function(a,b) { return Math.max(a,b) })*10 } function fitnessFunction(phenotype) { return positionScore(phenotype.x,phenotype.y) } // This function implements genetic diversity. function doesABeatBFunction(a,b) { let aScore = fitnessFunction(a) let bScore = fitnessFunction(b) let distance = Math.sqrt(Math.sqr(a.x - b.x) + Math.sqr(a.y - b.y)) // if b isn't on an island and 'a' is, then a wins if (aScore >= 0 && bScore < 0) return aScore // if a isn't on an island, it can't beat b if (aScore < 0) return 0 // if it is far away, then there is little chance if (distance > 2 && Math.random() > .1/distance ) return 0 // otherwise, a beats b by the margin of difference return aScore - bScore } let ga = GeneticAlgorithm({ mutationFunction: mutationFunction, crossoverFunction: crossoverFunction, fitnessFunction: fitnessFunction, doesABeatBFunction: doesABeatBFunction, population: [ { x: .3 , y: .5 } ], populationSize: 500 }); function finished() { best_frog = ga.best(); return best_frog.x > 8 && best_frog.x < 9 && best_frog.y > 8 && best_frog.y < 9 ; } var done = finished() for( var loop = 1 ; loop <= 1000 && !done; loop++ ) { ga.evolve() if ( loop % 50 == 0 ) { process.stdout.write("Completed " + loop + " evolutions : ") console.log(ga.best()); done = finished() } } if ( finished() ) { console.log("\nSuccessfully hopped evolutionaryly difficult islands.\n") } else { console.log("\nFailed to hop evolutionaryly difficult islands.\n") }
{ "repo_name": "panchishin/geneticalgorithm", "stars": "95", "repo_language": "JavaScript", "file_name": "islandHop.js", "mime_type": "text/plain" }
# DML <b>An Object Oriented Web Programming Framework</b> DML is a new and radical approach to make web programming more efficient. Frameworks like VUE or REACT use Javascript to create dynamic content too, but need their own and very special ecosystem. This brings additional complexity to web design - and a long and steep learning curve. DML is different, it was designed to make web programming easier and more efficient. Why use different languages like HTML,CSS,JS,AJAX,JQUERY and PHP to solve 1 single task? The initial approach was simple, but was found to be amazingly useful: DML exposes (most) HTML-tags as Javascript functions. So, instead of creating a `<h1>Headline</h1>`-tag you can write `h1("Headline")` to create a DOM-object in Javascript now. What is the advantage, as the result will be identical? <b>All DML-functions return a reference</b> to the newly created object, that can directly be assigned to a JS-variable. In standard web sites - as html and JS are parsed by different systems - we need some ID´s to communicate between HTML and JS. The usual way is ``` <h1 id="MyHeadline">Headline</h1> <script> var h = document.getElementById("MyHeadline"); h.textContent = "New Headline"; </script> ``` BUT (!!!) id´s are always! globally scoped, so encapsulation is simply not possible in web sites. With DML, things are different: you can write the same script as follows ``` <script> h = h1("MyHeadline"); h.textContent = "New Headline"; </script> ``` Sounds like a small advantage, but in fact, this is a huge step! DML brings Javascript closer to the DOM, which has a great impact: DOM-elements are created as part of Javascript object, so they can be fully encapsulated too. No need to create a virtual DOM, no need to use a shadow DOM. Pleas try out to see the advantages. DML-websites run smooth without unnecessary page refreshes even with very complex content. The <a href="https://efpage.de/DML/DML_homepage/">DML-homepage</a> was created only using DML! Don't be fooled by the apparent simplicity: DML provides a complete design platform to create web components and templates - just in a very different way you may be used to. This lib is made for OO-programmers rather than for web designers. Just add the DML-Library to your website and start HTML-Programming... And let the power of OO be with you! A minimal DML page will look like this: ``` <!DOCTYPE html> <html lang="de"> <head> <meta charset="utf-8"> <title>title</title> <meta name="viewport" content="width=device-width, initial-scale=1.0"> <script src="..\lib\DML.js"></script> </head> <body> <script> "use strict"; h1("test") p("test") </script> </body> </html> ``` ## DML in a nutshell 1. DML implements only a very small set of "generic" functions to let you use the HTML DOM API in a more "natural" way. Most HTML-tags are implemented as javascript functions: h1() creates a headline, p() create a paragraph and so on. 2. All DML-function return a reference to the DOM-element they just created. No need for ID´s, Classes and complicated Javascript functions to retrieve DOM references any more. 3. DOM references are returned as LOCAL variables, removing unwanted side effects and naming conflicts. DOM elements created inside a class are owned only by the instantiated object. True encapsulation can be realized. 4. Functions act as templates: Groups of DOM elements can be created by functions. As functions can use any kind of logic, functional templating is far more flexible than static templates. 5. Classes can be used to implement DOM objects AND event logic. Class objects can therefore generate very complex web objects, that are still completely encapsulated. This is the true basis for OO and to reuse web components. 6. The DML library is organized in multiple script libraries. The first - and always necessary - is DML.js. Functional units (like Menues, Table handlers etc.) are put together in separate units that usually contain only one class definition. General scripts should reside in the "lib"-folder, project specific scripts should be placed on the same level as your html-files. 7. Yes - it´s true: DML creates dynamic web pages by direct DOM manipulation, which is not search engine friendly (except with google, which can handle this). But this is not different from React or Vue. For more information see: https://efpage.de/DML/DML_homepage/
{ "repo_name": "efpage/DML", "stars": "26", "repo_language": "JavaScript", "file_name": "DML_onlineEditor.js", "mime_type": "text/x-c++" }
// Browser check for ES6 function get_browser() { var ua=navigator.userAgent,tem,M=ua.match(/(opera|chrome|safari|firefox|msie|trident(?=\/))\/?\s*(\d+)/i) || []; if(/trident/i.test(M[1])){ tem=/\brv[ :]+(\d+)/g.exec(ua) || []; return {name:'IE',version:(tem[1]||'')}; } if(M[1]==='Chrome'){ tem=ua.match(/\bOPR|Edge\/(\d+)/) if(tem!=null) {return {name:'Opera', version:tem[1]};} } M=M[2]? [M[1], M[2]]: [navigator.appName, navigator.appVersion, '-?']; if((tem=ua.match(/version\/(\d+)/i))!=null) {M.splice(1,1,tem[1]);} return { name: M[0], version: M[1] }; } try { sUsrAg = navigator.userAgent; new Function("(a = 0) => a"); } catch (err) { document.writeln("<h3>Please update your Browser!</h3>"); let b = get_browser(); document.writeln("- Current: <b>"+b.name+", version "+b.version+"</b><br><br>"); document.writeln("<div style='padding: 10px; border: thin solid silver; background-color: #ffffee; display: inline-block;'>"); document.writeln("Full ECMAScript 6 (ES2015) support required<br>"); document.writeln("</div><br><br>"); }
{ "repo_name": "efpage/DML", "stars": "26", "repo_language": "JavaScript", "file_name": "DML_onlineEditor.js", "mime_type": "text/x-c++" }