├── Discord.x
├── Instagram.x
├── Makefile
├── PFPViewer.plist
├── README.md
├── ShareImageViewController.h
├── ShareImageViewController.m
├── TikTok.x
├── Twitch.x
├── VK.x
├── YYImage
├── YYAnimatedImageView.h
├── YYAnimatedImageView.m
├── YYFrameImage.h
├── YYFrameImage.m
├── YYImage.h
├── YYImage.m
├── YYImageCoder.h
├── YYImageCoder.m
├── YYSpriteSheetImage.h
└── YYSpriteSheetImage.m
├── YouTube.x
└── control
/Discord.x:
--------------------------------------------------------------------------------
1 | #import "ShareImageViewController.h"
2 |
3 | UIWindow *window;
4 |
5 | static void showImageFromURL(UIView *view, id delegate, NSString *URLString) {
6 | NSRegularExpression *regex = [NSRegularExpression regularExpressionWithPattern:@"(size=)\\d+" options:0 error:nil];
7 | NSRange range = NSMakeRange(0, [URLString length]);
8 | URLString = [regex stringByReplacingMatchesInString:URLString options:0 range:range withTemplate:@"size=4096"];
9 |
10 | dispatch_async(dispatch_get_main_queue(), ^{
11 | UIImpactFeedbackGenerator *haptic = [[UIImpactFeedbackGenerator alloc] initWithStyle:UIImpactFeedbackStyleMedium];
12 | [haptic prepare];
13 | [haptic impactOccurred];
14 |
15 | window = [[UIWindow alloc] initWithFrame:[UIScreen mainScreen].bounds];
16 | window.windowLevel = UIWindowLevelAlert + 1;
17 | window.rootViewController = [[UIViewController alloc] init];
18 | [window makeKeyAndVisible];
19 |
20 | UIView *bg = [[UIView alloc] initWithFrame:view.bounds];
21 | bg.backgroundColor = [[UIColor blackColor] colorWithAlphaComponent:0.7];
22 | [view addSubview:bg];
23 |
24 | UIActivityIndicatorView *activityIndicator = [[UIActivityIndicatorView alloc] initWithActivityIndicatorStyle:UIActivityIndicatorViewStyleLarge];
25 | activityIndicator.color = [UIColor colorWithRed:255/255.0 green:29/255.0 blue:83/255.0 alpha:255/255.0];
26 | activityIndicator.center = view.center;
27 | [bg addSubview:activityIndicator];
28 | [activityIndicator startAnimating];
29 |
30 | dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_DEFAULT, 0), ^{
31 | NSData *data = [NSData dataWithContentsOfURL:[NSURL URLWithString:URLString]];
32 |
33 | dispatch_async(dispatch_get_main_queue(), ^{
34 | [bg removeFromSuperview];
35 |
36 | if (data) {
37 | NSString *extension = [[URLString componentsSeparatedByString:@"?size="].firstObject pathExtension];
38 |
39 | ShareImageViewController *shareVC = [[ShareImageViewController alloc] init];
40 | shareVC.delegate = delegate;
41 | UINavigationController *navigationController = [[UINavigationController alloc] initWithRootViewController:shareVC];
42 | navigationController.modalPresentationStyle = UIModalPresentationFormSheet;
43 |
44 | if (extension && [extension isEqualToString:@"gif"]) {
45 | shareVC.gifDataToShare = data;
46 | } else {
47 | UIImage *image = [UIImage imageWithData:data];
48 | NSData *pngData = UIImagePNGRepresentation(image);
49 | UIImage *pngImage = [UIImage imageWithData:pngData];
50 | shareVC.imageToShare = pngImage;
51 | }
52 |
53 | [window.rootViewController presentViewController:navigationController animated:YES completion:nil];
54 | }
55 | });
56 | });
57 | });
58 | }
59 |
60 | %hook DCDFastImageView
61 | - (instancetype)initWithFrame:(CGRect)frame {
62 | UILongPressGestureRecognizer *longPress = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(showImage:)];
63 | longPress.minimumPressDuration = 0.3;
64 | [self addGestureRecognizer:longPress];
65 |
66 | return %orig;
67 | }
68 |
69 | - (void)layoutSubviews {
70 | %orig;
71 |
72 | // Disable interaction with animated effect on profile picture
73 | if ([[self.source description] containsString:@"avatar-decoration-presets"]) {
74 | [self setUserInteractionEnabled:NO];
75 | }
76 | }
77 |
78 | %new
79 | - (void)showImage:(UILongPressGestureRecognizer *)sender {
80 | if (sender.state == UIGestureRecognizerStateBegan) {
81 |
82 | NSString *URLString = self.source.request.URL.absoluteString;
83 | if (URLString) showImageFromURL(self, self, URLString);
84 | }
85 | }
86 |
87 | %new
88 | - (void)didVCDismiss {
89 | [window removeFromSuperview];
90 | window = nil;
91 | }
92 | %end
93 |
94 | %hook RCTImageView
95 | - (id)initWithBridge:(id)arg1 {
96 | UILongPressGestureRecognizer *longPress = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(showImage:)];
97 | longPress.minimumPressDuration = 0.3;
98 | [self addGestureRecognizer:longPress];
99 |
100 | return %orig;
101 | }
102 |
103 | %new
104 | - (void)showImage:(UILongPressGestureRecognizer *)sender {
105 | if (sender.state == UIGestureRecognizerStateBegan) {
106 |
107 | RCTImageSource *source = self.imageSources[0];
108 | NSString *URLString = source.request.URL.absoluteString;
109 | if (URLString) showImageFromURL(self, self, URLString);
110 | }
111 | }
112 |
113 | %new
114 | - (void)didVCDismiss {
115 | [window removeFromSuperview];
116 | window = nil;
117 | }
118 | %end
119 |
--------------------------------------------------------------------------------
/Instagram.x:
--------------------------------------------------------------------------------
1 | #import "ShareImageViewController.h"
2 |
3 | @interface IGImageSpecifier : NSObject
4 | @property(readonly, nonatomic) NSURL *url;
5 | @end
6 |
7 | @interface IGImageView : UIImageView
8 | @property(retain, nonatomic) IGImageSpecifier *imageSpecifier;
9 | @end
10 |
11 | @interface IGProfilePicturePreviewViewController : UIViewController
12 | { IGImageView *_profilePictureView; }
13 | - (void)addHandleLongPress;
14 | - (void)handleLongPress:(UILongPressGestureRecognizer *)sender;
15 | @end
16 |
17 | %hook IGProfilePicturePreviewViewController
18 | - (void)viewDidLoad {
19 | %orig;
20 | [self addHandleLongPress];
21 | }
22 |
23 | %new
24 | - (void)addHandleLongPress {
25 | UILongPressGestureRecognizer *longPress = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(handleLongPress:)];
26 | longPress.minimumPressDuration = 0.3;
27 | [self.view addGestureRecognizer:longPress];
28 | }
29 |
30 | %new
31 | - (void)handleLongPress:(UILongPressGestureRecognizer *)sender {
32 | if (sender.state == UIGestureRecognizerStateBegan) {
33 | IGImageView *profilePictureView = [self valueForKey:@"_profilePictureView"];
34 |
35 | NSURL *url = profilePictureView.imageSpecifier.url;
36 | NSData *data = [NSData dataWithContentsOfURL:url];
37 | UIImage *image = [UIImage imageWithData:data];
38 | NSData *pngData = UIImagePNGRepresentation(image);
39 | UIImage *pngImage = [UIImage imageWithData:pngData];
40 |
41 | UIViewController *currentController = [UIApplication sharedApplication].keyWindow.rootViewController;
42 | while (currentController.presentedViewController) currentController = currentController.presentedViewController;
43 |
44 | if ([currentController isKindOfClass:[UINavigationController class]]) {
45 | UINavigationController *currentNavController = (UINavigationController *)currentController;
46 | currentController = currentNavController.topViewController;
47 | }
48 |
49 | ShareImageViewController *shareVC = [[ShareImageViewController alloc] init];
50 | shareVC.imageToShare = pngImage;
51 | UINavigationController *navigationController = [[UINavigationController alloc] initWithRootViewController:shareVC];
52 | navigationController.navigationBar.tintColor = [UIColor colorWithRed:255/255.0 green:29/255.0 blue:83/255.0 alpha:255/255.0];
53 | navigationController.modalPresentationStyle = UIModalPresentationFormSheet;
54 | [currentController presentViewController:navigationController animated:YES completion:nil];
55 | }
56 | }
57 | %end
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | ifeq ($(ROOTLESS),1)
2 | THEOS_PACKAGE_SCHEME=rootless
3 | endif
4 |
5 | DEBUG=0
6 | FINALPACKAGE=1
7 | export ARCHS = arm64
8 | TARGET := iphone:clang:latest:13.0
9 |
10 | include $(THEOS)/makefiles/common.mk
11 |
12 | TWEAK_NAME = PFPViewer
13 | $(TWEAK_NAME)_FILES = $(wildcard *.x *.m YYImage/*.m)
14 | $(TWEAK_NAME)_FRAMEWORKS = UIKit Foundation Photos
15 | $(TWEAK_NAME)_EXTRA_FRAMEWORKS = WebP
16 | $(TWEAK_NAME)_CFLAGS = -fobjc-arc
17 |
18 | include $(THEOS_MAKE_PATH)/tweak.mk
19 |
--------------------------------------------------------------------------------
/PFPViewer.plist:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | Filter
6 |
7 | Bundles
8 |
9 | com.burbn.instagram
10 | com.google.ios.youtube
11 | com.hammerandchisel.discord
12 | tv.twitch
13 | com.vk.vkclient
14 | com.zhiliaoapp.musically
15 |
16 |
17 |
18 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # PFPViewer
2 | Tweak that allows to view profile pictures by long-tap gesture
3 |
4 | Supported apps:
5 |
6 | - Discord: Profile pictures and banners (static and animated) via __[YYImage](https://github.com/ibireme/YYImage)__. Example with __[FLAnimatedImage](https://github.com/Flipboard/FLAnimatedImage)__ is available __[here](https://github.com/dayanch96/PFPViewer/tree/afe5071a82703f56bea4ee07fdf6dc596c39ccc4)__.
7 | - Twitch: Profile pictures
8 | - Instagram: Profile pictures
9 | - TikTok: Profile pictures (static and animated WebPs) via __[YYImage](https://github.com/ibireme/YYImage)__. WebP.framework required. See Installation on YYImage page
10 | - VK: Profile pictures in user profiles (cropped, if profile private) and in 'People you may know' section (full images)
11 | - YouTube: Profile pictures in profiles
12 |
13 | No options to configure. Just install or inject into app from the list above
14 |
--------------------------------------------------------------------------------
/ShareImageViewController.h:
--------------------------------------------------------------------------------
1 | #import
2 | #import
3 | #import
4 | #import "YYImage/YYImage.h"
5 |
6 | @interface _UITapticEngine : NSObject
7 | - (void)actuateFeedback:(NSInteger)count;
8 | @end
9 |
10 | @interface UIDevice (Private)
11 | - (_UITapticEngine *)_tapticEngine;
12 | @end
13 |
14 | @protocol ShareImageViewControllerDelegate
15 | - (void)didVCDismiss;
16 | @end
17 |
18 | @interface ShareImageViewController : UIViewController
19 | @property (nonatomic, weak) id delegate;
20 | @property (nonatomic, strong) UIImage *imageToShare;
21 | @property (nonatomic, strong) NSData *gifDataToShare;
22 | @end
23 |
24 | @interface ShareImageViewController ()
25 | @end
26 |
27 | // Discord
28 | // PFPs
29 | @interface RCTImageSource : NSObject
30 | @property (nonatomic, copy, readonly) NSURLRequest *request;
31 | @end
32 |
33 | @interface DCDFastImageView : UIImageView
34 | @property (nonatomic, strong) RCTImageSource *source;
35 | - (void)showImage:(UILongPressGestureRecognizer *)sender;
36 | - (void)didVCDismiss;
37 | @end
38 |
39 | // Banners
40 | @interface RCTImageView : UIImageView
41 | @property (nonatomic, strong) NSArray *imageSources;
42 | - (void)showImage:(UILongPressGestureRecognizer *)sender;
43 | - (void)didVCDismiss;
44 | @end
45 |
46 | // TikTok
47 | @interface BDImage : UIImage
48 | @property (nonatomic, strong, readwrite) NSURL *bd_webURL;
49 | @end
50 |
51 | @interface BDImageView : UIImageView
52 | @property (nonatomic, strong, readwrite) BDImage *bd_baseImage;
53 | @end
54 |
55 | @interface AWEProfileImagePreviewView : UIView
56 | @property (nonatomic, strong, readwrite) BDImageView *avatar;
57 | - (void)showPFP:(UILongPressGestureRecognizer *)sender;
58 | @end
59 |
60 | @interface TTKAvatar : NSObject
61 | @property (nonatomic, assign, readonly) BDImageView *avatarImageView;
62 | @end
63 |
64 | @interface TTKEnlargeAvatarViewController : UIViewController
65 | @property (nonatomic, strong, readwrite) UIButton *closeButton;
66 | @property (nonatomic, strong, readwrite) TTKAvatar *avatar;
67 | - (void)showPFP:(UILongPressGestureRecognizer *)sender;
68 | @end
69 |
70 | // Twitch
71 | @interface TWImageView : UIImageView
72 | @property (nonatomic, strong, readwrite) UIImage *placeholder;
73 | @property (nonatomic, copy, readwrite) NSURL *url;
74 | - (void)showPFP:(UILongPressGestureRecognizer *)sender;
75 | @end
76 |
77 | // VK
78 | // People you may know
79 | @interface WAImageView : UIImageView
80 | @property (nonatomic, strong) NSString *url;
81 | @end
82 |
83 | @interface DimmingButton : UIControl
84 | - (void)addHandleLongPress;
85 | - (void)handleLongPress:(UILongPressGestureRecognizer *)sender;
86 | - (WAImageView *)lookForWAImageView:(UIView *)view;
87 | @end
88 |
89 | //Old UI PFPs
90 | @interface UserProfileMainPhoto : UIImageView
91 | @property (nonatomic, strong) NSString *photoURL;
92 | - (void)addHandleLongPress;
93 | - (void)handleLongPress:(UILongPressGestureRecognizer *)sender;
94 | @end
95 |
96 | // YouTube
97 | @interface ASNetworkImageNode : NSObject
98 | @property (atomic, copy, readwrite) NSURL *URL;
99 | @end
100 |
101 | @interface _ASDisplayView : UIView
102 | @property (nonatomic, strong, readwrite) ASNetworkImageNode *keepalive_node;
103 | - (void)savePFP:(UILongPressGestureRecognizer *)sender;
104 | @end
--------------------------------------------------------------------------------
/ShareImageViewController.m:
--------------------------------------------------------------------------------
1 | #import "ShareImageViewController.h"
2 |
3 | @implementation ShareImageViewController
4 | - (void)viewDidLoad {
5 | [super viewDidLoad];
6 |
7 | UIBarButtonItem *dismissButton = [[UIBarButtonItem alloc] initWithImage:[UIImage systemImageNamed:@"xmark"]
8 | style:UIBarButtonItemStylePlain
9 | target:self
10 | action:@selector(closeButtonTapped:)];
11 |
12 | UIBarButtonItem *shareButton = [[UIBarButtonItem alloc] initWithImage:[UIImage systemImageNamed:@"square.and.arrow.up"]
13 | style:UIBarButtonItemStylePlain
14 | target:self
15 | action:@selector(shareButtonTapped:)];
16 |
17 | self.title = @"💕";
18 | self.view.backgroundColor = [UIColor clearColor];
19 | self.navigationItem.leftBarButtonItem = dismissButton;
20 | self.navigationItem.rightBarButtonItem = shareButton;
21 |
22 | self.navigationController.navigationBar.translucent = YES;
23 | [self.navigationController.navigationBar setBackgroundImage:[UIImage new] forBarMetrics:UIBarMetricsDefault];
24 | self.navigationController.navigationBar.shadowImage = [UIImage new];
25 | [self.navigationController.navigationBar setTintColor:[UIColor colorWithRed:255/255.0 green:29/255.0 blue:83/255.0 alpha:255/255.0]];
26 |
27 | UIBlurEffect *blurEffect = [UIBlurEffect effectWithStyle:UIBlurEffectStyleLight];
28 | UIVisualEffectView *blurEffectView = [[UIVisualEffectView alloc] initWithEffect:blurEffect];
29 | blurEffectView.frame = self.view.bounds;
30 | blurEffectView.autoresizingMask = UIViewAutoresizingFlexibleWidth | UIViewAutoresizingFlexibleHeight;
31 | [self.view addSubview:blurEffectView];
32 | [self.view sendSubviewToBack:blurEffectView];
33 |
34 | [[UIDevice currentDevice]._tapticEngine actuateFeedback:1];
35 |
36 | if (self.gifDataToShare) {
37 | YYAnimatedImageView *imageView = [[YYAnimatedImageView alloc] initWithFrame:self.view.bounds];
38 | imageView.contentMode = UIViewContentModeScaleAspectFit;
39 | YYImage *animatedImage = [YYImage imageWithData:self.gifDataToShare];
40 | imageView.image = animatedImage;
41 |
42 | [self.view addSubview:imageView];
43 | }
44 |
45 | if ([self.imageToShare isKindOfClass:[YYImage class]] && ((YYImage *)self.imageToShare).animatedImageData) {
46 | YYAnimatedImageView *animatedImageView = [[YYAnimatedImageView alloc] initWithImage:(YYImage *)self.imageToShare];
47 | animatedImageView.frame = self.view.bounds;
48 | animatedImageView.contentMode = UIViewContentModeScaleAspectFit;
49 | [self.view addSubview:animatedImageView];
50 | }
51 |
52 | else {
53 | UIImageView *imageView = [[UIImageView alloc] initWithFrame:self.view.bounds];
54 | imageView.contentMode = UIViewContentModeScaleAspectFit;
55 | imageView.image = (UIImage *)self.imageToShare;
56 | [self.view addSubview:imageView];
57 | }
58 | }
59 |
60 | - (void)closeButtonTapped:(id)sender {
61 | [self dismissViewControllerAnimated:YES completion:nil];
62 | }
63 |
64 | - (void)viewDidDisappear:(BOOL)animated {
65 | [super viewDidDisappear:animated];
66 |
67 | if (![self isBeingDismissed] && [self.delegate respondsToSelector:@selector(didVCDismiss)]) {
68 | [self.delegate didVCDismiss];
69 | }
70 | }
71 |
72 | - (void)shareButtonTapped:(id)sender {
73 | if ([self.imageToShare isKindOfClass:[YYImage class]] && ((YYImage *)self.imageToShare).animatedImageData) {
74 | NSData *animatedWebPData = ((YYImage *)self.imageToShare).animatedImageData;
75 |
76 | // Decode animated webp since Photos doesnt support it ;D
77 | YYImageDecoder *decoder = [YYImageDecoder decoderWithData:animatedWebPData scale:[UIScreen mainScreen].scale];
78 | NSMutableArray *frames = [NSMutableArray new];
79 | for (NSUInteger i = 0; i < decoder.frameCount; i++) {
80 | YYImageFrame *frame = [decoder frameAtIndex:i decodeForDisplay:YES];
81 | if (frame) {
82 | [frames addObject:frame];
83 | }
84 | }
85 |
86 | // Create gif from decoded webp frames
87 | YYImageEncoder *gifEncoder = [[YYImageEncoder alloc] initWithType:YYImageTypeGIF];
88 | gifEncoder.loopCount = 0;
89 | for (YYImageFrame *frame in frames) {
90 | [gifEncoder addImage:frame.image duration:frame.duration];
91 | }
92 | NSData *animatedGIFData = [gifEncoder encode];
93 |
94 | UIActivityViewController *activityVC = [[UIActivityViewController alloc] initWithActivityItems:@[animatedGIFData] applicationActivities:nil];
95 | [self presentViewController:activityVC animated:YES completion:nil];
96 | } else {
97 | NSData *imageData;
98 | if ([self.imageToShare isKindOfClass:[UIImage class]]) {
99 | imageData = UIImagePNGRepresentation((UIImage *)self.imageToShare);
100 | } else {
101 | imageData = (NSData *)self.imageToShare;
102 | }
103 |
104 | NSArray *activityItems = self.gifDataToShare ? @[self.gifDataToShare] : @[imageData];
105 | UIActivityViewController *activityVC = [[UIActivityViewController alloc] initWithActivityItems:activityItems applicationActivities:nil];
106 | [self presentViewController:activityVC animated:YES completion:nil];
107 | }
108 | }
109 |
110 | @end
111 |
--------------------------------------------------------------------------------
/TikTok.x:
--------------------------------------------------------------------------------
1 | #import "ShareImageViewController.h"
2 |
3 | static void showImageFromURL(NSURL *URL) {
4 | YYImage *webpImage = [YYImage imageWithData:[NSData dataWithContentsOfURL:URL]];
5 |
6 | UIViewController *currentController = [[UIApplication sharedApplication] delegate].window.rootViewController;
7 | while (currentController.presentedViewController) currentController = currentController.presentedViewController;
8 |
9 | if ([currentController isKindOfClass:[UINavigationController class]]) {
10 | UINavigationController *currentNavController = (UINavigationController *)currentController;
11 | currentController = currentNavController.topViewController;
12 | }
13 |
14 | ShareImageViewController *shareVC = [[ShareImageViewController alloc] init];
15 | shareVC.imageToShare = webpImage;
16 | UINavigationController *navigationController = [[UINavigationController alloc] initWithRootViewController:shareVC];
17 | navigationController.modalPresentationStyle = UIModalPresentationFormSheet;
18 | [currentController presentViewController:navigationController animated:YES completion:nil];
19 | }
20 |
21 | %hook AWEProfileImagePreviewView
22 | - (id)initWithFrame:(CGRect)arg1 image:(id)arg2 imageURL:(id)arg3 backgroundColor:(id)arg4 userID:(id)arg5 type:(NSUInteger)arg6 {
23 | UILongPressGestureRecognizer *longPress = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(showPFP:)];
24 | longPress.minimumPressDuration = 0.3;
25 | [self addGestureRecognizer:longPress];
26 |
27 | return %orig;
28 | }
29 |
30 | %new
31 | - (void)showPFP:(UILongPressGestureRecognizer *)sender {
32 | if (sender.state == UIGestureRecognizerStateBegan) {
33 | NSURL *avatar = self.avatar.bd_baseImage.bd_webURL;
34 | if (avatar) showImageFromURL(avatar);
35 | }
36 | }
37 | %end
38 |
39 | %hook TTKEnlargeAvatarViewController
40 | - (void)viewDidLoad {
41 | %orig;
42 |
43 | UILongPressGestureRecognizer *longPress = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(showPFP:)];
44 | longPress.minimumPressDuration = 0.3;
45 | [self.avatar.avatarImageView addGestureRecognizer:longPress];
46 | }
47 |
48 | %new
49 | - (void)showPFP:(UILongPressGestureRecognizer *)sender {
50 | if (sender.state == UIGestureRecognizerStateBegan) {
51 | NSURL *avatar = self.avatar.avatarImageView.bd_baseImage.bd_webURL;
52 | if (avatar) showImageFromURL(avatar);
53 | }
54 | }
55 | %end
--------------------------------------------------------------------------------
/Twitch.x:
--------------------------------------------------------------------------------
1 | #import "ShareImageViewController.h"
2 |
3 | %hook TWImageView
4 | - (void)layoutSubviews {
5 | %orig;
6 |
7 | if ([[((TWImageView *)self).placeholder description] containsString:@"avatar_placeholder"]) {
8 | ((TWImageView *)self).userInteractionEnabled = YES;
9 |
10 | UILongPressGestureRecognizer *longPress = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(showPFP:)];
11 | longPress.minimumPressDuration = 0.3;
12 | [((TWImageView *)self) addGestureRecognizer:longPress];
13 | }
14 | }
15 |
16 | %new
17 | - (void)showPFP:(UILongPressGestureRecognizer *)sender {
18 | if (sender.state == UIGestureRecognizerStateBegan) {
19 |
20 | NSURL *imageURL = ((TWImageView *)self).url;
21 | if (imageURL) {
22 | ShareImageViewController *shareVC = [[ShareImageViewController alloc] init];
23 | shareVC.imageToShare = [UIImage imageWithData:[NSData dataWithContentsOfURL:imageURL]];
24 | UINavigationController *navigationController = [[UINavigationController alloc] initWithRootViewController:shareVC];
25 | navigationController.modalPresentationStyle = UIModalPresentationFormSheet;
26 | [[[UIApplication sharedApplication] delegate].window.rootViewController presentViewController:navigationController animated:YES completion:nil];
27 | }
28 | }
29 | }
30 | %end
31 |
32 | %ctor {
33 | %init(TWImageView = objc_getClass("TwitchCoreUI.TWImageView"));
34 | }
--------------------------------------------------------------------------------
/VK.x:
--------------------------------------------------------------------------------
1 | #import "ShareImageViewController.h"
2 |
3 | // People you may know
4 | %hook DimmingButton
5 | - (instancetype)initWithFrame:(CGRect)frame {
6 | self = %orig(frame);
7 | if (self) [self addHandleLongPress];
8 | return self;
9 | }
10 |
11 | %new
12 | - (void)addHandleLongPress {
13 | UILongPressGestureRecognizer *longPress = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(handleLongPress:)];
14 | longPress.minimumPressDuration = 0.3;
15 | [self addGestureRecognizer:longPress];
16 | }
17 |
18 | %new
19 | - (void)handleLongPress:(UILongPressGestureRecognizer *)sender {
20 | if (sender.state == UIGestureRecognizerStateBegan) {
21 | [[UIDevice currentDevice]._tapticEngine actuateFeedback:1];
22 | WAImageView *imageView = [self lookForWAImageView:self];
23 |
24 | if (imageView) {
25 | NSString *avatarImageView = imageView.url;
26 |
27 | if (avatarImageView) {
28 | NSString *imageUrlDescription = [avatarImageView description];
29 | if (imageUrlDescription && [imageUrlDescription isKindOfClass:[NSString class]]) {
30 | NSURL *url = [NSURL URLWithString:imageUrlDescription];
31 | NSData *data = [NSData dataWithContentsOfURL:url];
32 | UIImage *image = [UIImage imageWithData:data];
33 | NSData *pngData = UIImagePNGRepresentation(image);
34 | UIImage *pngImage = [UIImage imageWithData:pngData];
35 |
36 | ShareImageViewController *shareVC = [[ShareImageViewController alloc] init];
37 | shareVC.imageToShare = pngImage;
38 | UINavigationController *navigationController = [[UINavigationController alloc] initWithRootViewController:shareVC];
39 | navigationController.navigationBar.tintColor = [UIColor colorWithRed:255/255.0 green:29/255.0 blue:83/255.0 alpha:255/255.0];
40 | navigationController.modalPresentationStyle = UIModalPresentationFormSheet;
41 | UIViewController *currentController = [UIApplication sharedApplication].keyWindow.rootViewController;
42 | [currentController presentViewController:navigationController animated:YES completion:nil];
43 | }
44 | }
45 | }
46 | }
47 | }
48 |
49 | %new
50 | - (WAImageView *)lookForWAImageView:(UIView *)view {
51 | if ([view isKindOfClass:objc_lookUpClass("WAImageView")]) {
52 | return (WAImageView *)view;
53 | }
54 |
55 | for (UIView *subview in view.subviews) {
56 | WAImageView *imageView = [self lookForWAImageView:subview];
57 | if (imageView) return imageView;
58 | } return nil;
59 | }
60 | %end
61 |
62 | // Modern UI PFPs
63 | %hook ProfilePhotoView
64 | - (BOOL)isUserInteractionEnabled { return YES; }
65 | %end
66 |
67 | // Old UI PFPs
68 | %hook UserProfileMainPhoto
69 | - (instancetype)initWithFrame:(CGRect)frame {
70 | self = %orig(frame);
71 | if (self) [self addHandleLongPress];
72 | return self;
73 | }
74 |
75 | %new
76 | - (void)addHandleLongPress {
77 | UILongPressGestureRecognizer *longPress = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(handleLongPress:)];
78 | longPress.minimumPressDuration = 0.3;
79 | [self addGestureRecognizer:longPress];
80 | }
81 |
82 | %new
83 | - (void)handleLongPress:(UILongPressGestureRecognizer *)sender {
84 | if (sender.state == UIGestureRecognizerStateBegan) {
85 | [[UIDevice currentDevice]._tapticEngine actuateFeedback:1];
86 | NSString *avatarImageView = self.photoURL;
87 |
88 | if (avatarImageView) {
89 | NSString *imageUrlDescription = [avatarImageView description];
90 | if (imageUrlDescription && [imageUrlDescription isKindOfClass:[NSString class]]) {
91 | NSURL *url = [NSURL URLWithString:imageUrlDescription];
92 | NSData *data = [NSData dataWithContentsOfURL:url];
93 | UIImage *image = [UIImage imageWithData:data];
94 | NSData *pngData = UIImagePNGRepresentation(image);
95 | UIImage *pngImage = [UIImage imageWithData:pngData];
96 |
97 | ShareImageViewController *shareVC = [[ShareImageViewController alloc] init];
98 | shareVC.imageToShare = pngImage;
99 | UINavigationController *navigationController = [[UINavigationController alloc] initWithRootViewController:shareVC];
100 | navigationController.navigationBar.tintColor = [UIColor colorWithRed:255/255.0 green:29/255.0 blue:83/255.0 alpha:255/255.0];
101 | navigationController.modalPresentationStyle = UIModalPresentationFormSheet;
102 | UIViewController *currentController = [UIApplication sharedApplication].keyWindow.rootViewController;
103 | [currentController presentViewController:navigationController animated:YES completion:nil];
104 | }
105 | }
106 | }
107 | }
108 | %end
--------------------------------------------------------------------------------
/YYImage/YYAnimatedImageView.h:
--------------------------------------------------------------------------------
1 | //
2 | // YYAnimatedImageView.h
3 | // YYImage
4 | //
5 | // Created by ibireme on 14/10/19.
6 | // Copyright (c) 2015 ibireme.
7 | //
8 | // This source code is licensed under the MIT-style license found in the
9 | // LICENSE file in the root directory of this source tree.
10 | //
11 |
12 | #import
13 |
14 | NS_ASSUME_NONNULL_BEGIN
15 |
16 | /**
17 | An image view for displaying animated image.
18 |
19 | @discussion It is a fully compatible `UIImageView` subclass.
20 | If the `image` or `highlightedImage` property adopt to the `YYAnimatedImage` protocol,
21 | then it can be used to play the multi-frame animation. The animation can also be
22 | controlled with the UIImageView methods `-startAnimating`, `-stopAnimating` and `-isAnimating`.
23 |
24 | This view request the frame data just in time. When the device has enough free memory,
25 | this view may cache some or all future frames in an inner buffer for lower CPU cost.
26 | Buffer size is dynamically adjusted based on the current state of the device memory.
27 |
28 | Sample Code:
29 |
30 | // ani@3x.gif
31 | YYImage *image = [YYImage imageNamed:@"ani"];
32 | YYAnimatedImageView *imageView = [YYAnimatedImageView alloc] initWithImage:image];
33 | [view addSubView:imageView];
34 | */
35 | @interface YYAnimatedImageView : UIImageView
36 |
37 | /**
38 | If the image has more than one frame, set this value to `YES` will automatically
39 | play/stop the animation when the view become visible/invisible.
40 |
41 | The default value is `YES`.
42 | */
43 | @property (nonatomic) BOOL autoPlayAnimatedImage;
44 |
45 | /**
46 | Index of the currently displayed frame (index from 0).
47 |
48 | Set a new value to this property will cause to display the new frame immediately.
49 | If the new value is invalid, this method has no effect.
50 |
51 | You can add an observer to this property to observe the playing status.
52 | */
53 | @property (nonatomic) NSUInteger currentAnimatedImageIndex;
54 |
55 | /**
56 | Whether the image view is playing animation currently.
57 |
58 | You can add an observer to this property to observe the playing status.
59 | */
60 | @property (nonatomic, readonly) BOOL currentIsPlayingAnimation;
61 |
62 | /**
63 | The animation timer's runloop mode, default is `NSRunLoopCommonModes`.
64 |
65 | Set this property to `NSDefaultRunLoopMode` will make the animation pause during
66 | UIScrollView scrolling.
67 | */
68 | @property (nonatomic, copy) NSString *runloopMode;
69 |
70 | /**
71 | The max size (in bytes) for inner frame buffer size, default is 0 (dynamically).
72 |
73 | When the device has enough free memory, this view will request and decode some or
74 | all future frame image into an inner buffer. If this property's value is 0, then
75 | the max buffer size will be dynamically adjusted based on the current state of
76 | the device free memory. Otherwise, the buffer size will be limited by this value.
77 |
78 | When receive memory warning or app enter background, the buffer will be released
79 | immediately, and may grow back at the right time.
80 | */
81 | @property (nonatomic) NSUInteger maxBufferSize;
82 |
83 | @end
84 |
85 |
86 |
87 | /**
88 | The YYAnimatedImage protocol declares the required methods for animated image
89 | display with YYAnimatedImageView.
90 |
91 | Subclass a UIImage and implement this protocol, so that instances of that class
92 | can be set to YYAnimatedImageView.image or YYAnimatedImageView.highlightedImage
93 | to display animation.
94 |
95 | See `YYImage` and `YYFrameImage` for example.
96 | */
97 | @protocol YYAnimatedImage
98 | @required
99 | /// Total animated frame count.
100 | /// It the frame count is less than 1, then the methods below will be ignored.
101 | - (NSUInteger)animatedImageFrameCount;
102 |
103 | /// Animation loop count, 0 means infinite looping.
104 | - (NSUInteger)animatedImageLoopCount;
105 |
106 | /// Bytes per frame (in memory). It may used to optimize memory buffer size.
107 | - (NSUInteger)animatedImageBytesPerFrame;
108 |
109 | /// Returns the frame image from a specified index.
110 | /// This method may be called on background thread.
111 | /// @param index Frame index (zero based).
112 | - (nullable UIImage *)animatedImageFrameAtIndex:(NSUInteger)index;
113 |
114 | /// Returns the frames's duration from a specified index.
115 | /// @param index Frame index (zero based).
116 | - (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index;
117 |
118 | @optional
119 | /// A rectangle in image coordinates defining the subrectangle of the image that
120 | /// will be displayed. The rectangle should not outside the image's bounds.
121 | /// It may used to display sprite animation with a single image (sprite sheet).
122 | - (CGRect)animatedImageContentsRectAtIndex:(NSUInteger)index;
123 | @end
124 |
125 | NS_ASSUME_NONNULL_END
126 |
--------------------------------------------------------------------------------
/YYImage/YYAnimatedImageView.m:
--------------------------------------------------------------------------------
1 | //
2 | // YYAnimatedImageView.m
3 | // YYImage
4 | //
5 | // Created by ibireme on 14/10/19.
6 | // Copyright (c) 2015 ibireme.
7 | //
8 | // This source code is licensed under the MIT-style license found in the
9 | // LICENSE file in the root directory of this source tree.
10 | //
11 |
12 | #import "YYAnimatedImageView.h"
13 | #import "YYImageCoder.h"
14 | #import
15 | #import
16 |
17 |
18 | #define BUFFER_SIZE (10 * 1024 * 1024) // 10MB (minimum memory buffer size)
19 |
20 | #define LOCK(...) dispatch_semaphore_wait(self->_lock, DISPATCH_TIME_FOREVER); \
21 | __VA_ARGS__; \
22 | dispatch_semaphore_signal(self->_lock);
23 |
24 | #define LOCK_VIEW(...) dispatch_semaphore_wait(view->_lock, DISPATCH_TIME_FOREVER); \
25 | __VA_ARGS__; \
26 | dispatch_semaphore_signal(view->_lock);
27 |
28 |
29 | static int64_t _YYDeviceMemoryTotal() {
30 | int64_t mem = [[NSProcessInfo processInfo] physicalMemory];
31 | if (mem < -1) mem = -1;
32 | return mem;
33 | }
34 |
35 | static int64_t _YYDeviceMemoryFree() {
36 | mach_port_t host_port = mach_host_self();
37 | mach_msg_type_number_t host_size = sizeof(vm_statistics_data_t) / sizeof(integer_t);
38 | vm_size_t page_size;
39 | vm_statistics_data_t vm_stat;
40 | kern_return_t kern;
41 |
42 | kern = host_page_size(host_port, &page_size);
43 | if (kern != KERN_SUCCESS) return -1;
44 | kern = host_statistics(host_port, HOST_VM_INFO, (host_info_t)&vm_stat, &host_size);
45 | if (kern != KERN_SUCCESS) return -1;
46 | return vm_stat.free_count * page_size;
47 | }
48 |
49 | /**
50 | A proxy used to hold a weak object.
51 | It can be used to avoid retain cycles, such as the target in NSTimer or CADisplayLink.
52 | */
53 | @interface _YYImageWeakProxy : NSProxy
54 | @property (nonatomic, weak, readonly) id target;
55 | - (instancetype)initWithTarget:(id)target;
56 | + (instancetype)proxyWithTarget:(id)target;
57 | @end
58 |
59 | @implementation _YYImageWeakProxy
60 | - (instancetype)initWithTarget:(id)target {
61 | _target = target;
62 | return self;
63 | }
64 | + (instancetype)proxyWithTarget:(id)target {
65 | return [[_YYImageWeakProxy alloc] initWithTarget:target];
66 | }
67 | - (id)forwardingTargetForSelector:(SEL)selector {
68 | return _target;
69 | }
70 | - (void)forwardInvocation:(NSInvocation *)invocation {
71 | void *null = NULL;
72 | [invocation setReturnValue:&null];
73 | }
74 | - (NSMethodSignature *)methodSignatureForSelector:(SEL)selector {
75 | return [NSObject instanceMethodSignatureForSelector:@selector(init)];
76 | }
77 | - (BOOL)respondsToSelector:(SEL)aSelector {
78 | return [_target respondsToSelector:aSelector];
79 | }
80 | - (BOOL)isEqual:(id)object {
81 | return [_target isEqual:object];
82 | }
83 | - (NSUInteger)hash {
84 | return [_target hash];
85 | }
86 | - (Class)superclass {
87 | return [_target superclass];
88 | }
89 | - (Class)class {
90 | return [_target class];
91 | }
92 | - (BOOL)isKindOfClass:(Class)aClass {
93 | return [_target isKindOfClass:aClass];
94 | }
95 | - (BOOL)isMemberOfClass:(Class)aClass {
96 | return [_target isMemberOfClass:aClass];
97 | }
98 | - (BOOL)conformsToProtocol:(Protocol *)aProtocol {
99 | return [_target conformsToProtocol:aProtocol];
100 | }
101 | - (BOOL)isProxy {
102 | return YES;
103 | }
104 | - (NSString *)description {
105 | return [_target description];
106 | }
107 | - (NSString *)debugDescription {
108 | return [_target debugDescription];
109 | }
110 | @end
111 |
112 |
113 |
114 |
115 | typedef NS_ENUM(NSUInteger, YYAnimatedImageType) {
116 | YYAnimatedImageTypeNone = 0,
117 | YYAnimatedImageTypeImage,
118 | YYAnimatedImageTypeHighlightedImage,
119 | YYAnimatedImageTypeImages,
120 | YYAnimatedImageTypeHighlightedImages,
121 | };
122 |
123 | @interface YYAnimatedImageView() {
124 | @package
125 | UIImage *_curAnimatedImage;
126 |
127 | dispatch_semaphore_t _lock; ///< lock for _buffer
128 | NSOperationQueue *_requestQueue; ///< image request queue, serial
129 |
130 | CADisplayLink *_link; ///< ticker for change frame
131 | NSTimeInterval _time; ///< time after last frame
132 |
133 | UIImage *_curFrame; ///< current frame to display
134 | NSUInteger _curIndex; ///< current frame index (from 0)
135 | NSUInteger _totalFrameCount; ///< total frame count
136 |
137 | BOOL _loopEnd; ///< whether the loop is end.
138 | NSUInteger _curLoop; ///< current loop count (from 0)
139 | NSUInteger _totalLoop; ///< total loop count, 0 means infinity
140 |
141 | NSMutableDictionary *_buffer; ///< frame buffer
142 | BOOL _bufferMiss; ///< whether miss frame on last opportunity
143 | NSUInteger _maxBufferCount; ///< maximum buffer count
144 | NSInteger _incrBufferCount; ///< current allowed buffer count (will increase by step)
145 |
146 | CGRect _curContentsRect;
147 | BOOL _curImageHasContentsRect; ///< image has implementated "animatedImageContentsRectAtIndex:"
148 | }
149 | @property (nonatomic, readwrite) BOOL currentIsPlayingAnimation;
150 | - (void)calcMaxBufferCount;
151 | @end
152 |
153 | /// An operation for image fetch
154 | @interface _YYAnimatedImageViewFetchOperation : NSOperation
155 | @property (nonatomic, weak) YYAnimatedImageView *view;
156 | @property (nonatomic, assign) NSUInteger nextIndex;
157 | @property (nonatomic, strong) UIImage *curImage;
158 | @end
159 |
160 | @implementation _YYAnimatedImageViewFetchOperation
161 | - (void)main {
162 | __strong YYAnimatedImageView *view = _view;
163 | if (!view) return;
164 | if ([self isCancelled]) return;
165 | view->_incrBufferCount++;
166 | if (view->_incrBufferCount == 0) [view calcMaxBufferCount];
167 | if (view->_incrBufferCount > (NSInteger)view->_maxBufferCount) {
168 | view->_incrBufferCount = view->_maxBufferCount;
169 | }
170 | NSUInteger idx = _nextIndex;
171 | NSUInteger max = view->_incrBufferCount < 1 ? 1 : view->_incrBufferCount;
172 | NSUInteger total = view->_totalFrameCount;
173 | view = nil;
174 |
175 | for (int i = 0; i < max; i++, idx++) {
176 | @autoreleasepool {
177 | if (idx >= total) idx = 0;
178 | if ([self isCancelled]) break;
179 | __strong YYAnimatedImageView *view = _view;
180 | if (!view) break;
181 | LOCK_VIEW(BOOL miss = (view->_buffer[@(idx)] == nil));
182 |
183 | if (miss) {
184 | UIImage *img = [_curImage animatedImageFrameAtIndex:idx];
185 | img = img.yy_imageByDecoded;
186 | if ([self isCancelled]) break;
187 | LOCK_VIEW(view->_buffer[@(idx)] = img ? img : [NSNull null]);
188 | view = nil;
189 | }
190 | }
191 | }
192 | }
193 | @end
194 |
195 | @implementation YYAnimatedImageView
196 |
197 | - (instancetype)init {
198 | self = [super init];
199 | _runloopMode = NSRunLoopCommonModes;
200 | _autoPlayAnimatedImage = YES;
201 | return self;
202 | }
203 |
204 | - (instancetype)initWithFrame:(CGRect)frame {
205 | self = [super initWithFrame:frame];
206 | _runloopMode = NSRunLoopCommonModes;
207 | _autoPlayAnimatedImage = YES;
208 | return self;
209 | }
210 |
211 | - (instancetype)initWithImage:(UIImage *)image {
212 | self = [super init];
213 | _runloopMode = NSRunLoopCommonModes;
214 | _autoPlayAnimatedImage = YES;
215 | self.frame = (CGRect) {CGPointZero, image.size };
216 | self.image = image;
217 | return self;
218 | }
219 |
220 | - (instancetype)initWithImage:(UIImage *)image highlightedImage:(UIImage *)highlightedImage {
221 | self = [super init];
222 | _runloopMode = NSRunLoopCommonModes;
223 | _autoPlayAnimatedImage = YES;
224 | CGSize size = image ? image.size : highlightedImage.size;
225 | self.frame = (CGRect) {CGPointZero, size };
226 | self.image = image;
227 | self.highlightedImage = highlightedImage;
228 | return self;
229 | }
230 |
231 | // init the animated params.
232 | - (void)resetAnimated {
233 | if (!_link) {
234 | _lock = dispatch_semaphore_create(1);
235 | _buffer = [NSMutableDictionary new];
236 | _requestQueue = [[NSOperationQueue alloc] init];
237 | _requestQueue.maxConcurrentOperationCount = 1;
238 | _link = [CADisplayLink displayLinkWithTarget:[_YYImageWeakProxy proxyWithTarget:self] selector:@selector(step:)];
239 | if (_runloopMode) {
240 | [_link addToRunLoop:[NSRunLoop mainRunLoop] forMode:_runloopMode];
241 | }
242 | _link.paused = YES;
243 |
244 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didReceiveMemoryWarning:) name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
245 | [[NSNotificationCenter defaultCenter] addObserver:self selector:@selector(didEnterBackground:) name:UIApplicationDidEnterBackgroundNotification object:nil];
246 | }
247 |
248 | [_requestQueue cancelAllOperations];
249 | LOCK(
250 | if (_buffer.count) {
251 | NSMutableDictionary *holder = _buffer;
252 | _buffer = [NSMutableDictionary new];
253 | dispatch_async(dispatch_get_global_queue(DISPATCH_QUEUE_PRIORITY_LOW, 0), ^{
254 | // Capture the dictionary to global queue,
255 | // release these images in background to avoid blocking UI thread.
256 | [holder class];
257 | });
258 | }
259 | );
260 | _link.paused = YES;
261 | _time = 0;
262 | if (_curIndex != 0) {
263 | [self willChangeValueForKey:@"currentAnimatedImageIndex"];
264 | _curIndex = 0;
265 | [self didChangeValueForKey:@"currentAnimatedImageIndex"];
266 | }
267 | _curAnimatedImage = nil;
268 | _curFrame = nil;
269 | _curLoop = 0;
270 | _totalLoop = 0;
271 | _totalFrameCount = 1;
272 | _loopEnd = NO;
273 | _bufferMiss = NO;
274 | _incrBufferCount = 0;
275 | }
276 |
277 | - (void)setImage:(UIImage *)image {
278 | if (self.image == image) return;
279 | [self setImage:image withType:YYAnimatedImageTypeImage];
280 | }
281 |
282 | - (void)setHighlightedImage:(UIImage *)highlightedImage {
283 | if (self.highlightedImage == highlightedImage) return;
284 | [self setImage:highlightedImage withType:YYAnimatedImageTypeHighlightedImage];
285 | }
286 |
287 | - (void)setAnimationImages:(NSArray *)animationImages {
288 | if (self.animationImages == animationImages) return;
289 | [self setImage:animationImages withType:YYAnimatedImageTypeImages];
290 | }
291 |
292 | - (void)setHighlightedAnimationImages:(NSArray *)highlightedAnimationImages {
293 | if (self.highlightedAnimationImages == highlightedAnimationImages) return;
294 | [self setImage:highlightedAnimationImages withType:YYAnimatedImageTypeHighlightedImages];
295 | }
296 |
297 | - (void)setHighlighted:(BOOL)highlighted {
298 | [super setHighlighted:highlighted];
299 | if (_link) [self resetAnimated];
300 | [self imageChanged];
301 | }
302 |
303 | - (id)imageForType:(YYAnimatedImageType)type {
304 | switch (type) {
305 | case YYAnimatedImageTypeNone: return nil;
306 | case YYAnimatedImageTypeImage: return self.image;
307 | case YYAnimatedImageTypeHighlightedImage: return self.highlightedImage;
308 | case YYAnimatedImageTypeImages: return self.animationImages;
309 | case YYAnimatedImageTypeHighlightedImages: return self.highlightedAnimationImages;
310 | }
311 | return nil;
312 | }
313 |
314 | - (YYAnimatedImageType)currentImageType {
315 | YYAnimatedImageType curType = YYAnimatedImageTypeNone;
316 | if (self.highlighted) {
317 | if (self.highlightedAnimationImages.count) curType = YYAnimatedImageTypeHighlightedImages;
318 | else if (self.highlightedImage) curType = YYAnimatedImageTypeHighlightedImage;
319 | }
320 | if (curType == YYAnimatedImageTypeNone) {
321 | if (self.animationImages.count) curType = YYAnimatedImageTypeImages;
322 | else if (self.image) curType = YYAnimatedImageTypeImage;
323 | }
324 | return curType;
325 | }
326 |
327 | - (void)setImage:(id)image withType:(YYAnimatedImageType)type {
328 | [self stopAnimating];
329 | if (_link) [self resetAnimated];
330 | _curFrame = nil;
331 | switch (type) {
332 | case YYAnimatedImageTypeNone: break;
333 | case YYAnimatedImageTypeImage: super.image = image; break;
334 | case YYAnimatedImageTypeHighlightedImage: super.highlightedImage = image; break;
335 | case YYAnimatedImageTypeImages: super.animationImages = image; break;
336 | case YYAnimatedImageTypeHighlightedImages: super.highlightedAnimationImages = image; break;
337 | }
338 | [self imageChanged];
339 | }
340 |
341 | - (void)imageChanged {
342 | YYAnimatedImageType newType = [self currentImageType];
343 | id newVisibleImage = [self imageForType:newType];
344 | NSUInteger newImageFrameCount = 0;
345 | BOOL hasContentsRect = NO;
346 | if ([newVisibleImage isKindOfClass:[UIImage class]] &&
347 | [newVisibleImage conformsToProtocol:@protocol(YYAnimatedImage)]) {
348 | newImageFrameCount = ((UIImage *) newVisibleImage).animatedImageFrameCount;
349 | if (newImageFrameCount > 1) {
350 | hasContentsRect = [((UIImage *) newVisibleImage) respondsToSelector:@selector(animatedImageContentsRectAtIndex:)];
351 | }
352 | }
353 | if (!hasContentsRect && _curImageHasContentsRect) {
354 | if (!CGRectEqualToRect(self.layer.contentsRect, CGRectMake(0, 0, 1, 1)) ) {
355 | [CATransaction begin];
356 | [CATransaction setDisableActions:YES];
357 | self.layer.contentsRect = CGRectMake(0, 0, 1, 1);
358 | [CATransaction commit];
359 | }
360 | }
361 | _curImageHasContentsRect = hasContentsRect;
362 | if (hasContentsRect) {
363 | CGRect rect = [((UIImage *) newVisibleImage) animatedImageContentsRectAtIndex:0];
364 | [self setContentsRect:rect forImage:newVisibleImage];
365 | }
366 |
367 | if (newImageFrameCount > 1) {
368 | [self resetAnimated];
369 | _curAnimatedImage = newVisibleImage;
370 | _curFrame = newVisibleImage;
371 | _totalLoop = _curAnimatedImage.animatedImageLoopCount;
372 | _totalFrameCount = _curAnimatedImage.animatedImageFrameCount;
373 | [self calcMaxBufferCount];
374 | }
375 | [self setNeedsDisplay];
376 | [self didMoved];
377 | }
378 |
379 | // dynamically adjust buffer size for current memory.
380 | - (void)calcMaxBufferCount {
381 | int64_t bytes = (int64_t)_curAnimatedImage.animatedImageBytesPerFrame;
382 | if (bytes == 0) bytes = 1024;
383 |
384 | int64_t total = _YYDeviceMemoryTotal();
385 | int64_t free = _YYDeviceMemoryFree();
386 | int64_t max = MIN(total * 0.2, free * 0.6);
387 | max = MAX(max, BUFFER_SIZE);
388 | if (_maxBufferSize) max = max > _maxBufferSize ? _maxBufferSize : max;
389 | double maxBufferCount = (double)max / (double)bytes;
390 | if (maxBufferCount < 1) maxBufferCount = 1;
391 | else if (maxBufferCount > 512) maxBufferCount = 512;
392 | _maxBufferCount = maxBufferCount;
393 | }
394 |
395 | - (void)dealloc {
396 | [_requestQueue cancelAllOperations];
397 | [[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidReceiveMemoryWarningNotification object:nil];
398 | [[NSNotificationCenter defaultCenter] removeObserver:self name:UIApplicationDidEnterBackgroundNotification object:nil];
399 | [_link invalidate];
400 | }
401 |
402 | - (BOOL)isAnimating {
403 | return self.currentIsPlayingAnimation;
404 | }
405 |
406 | - (void)stopAnimating {
407 | [super stopAnimating];
408 | [_requestQueue cancelAllOperations];
409 | _link.paused = YES;
410 | self.currentIsPlayingAnimation = NO;
411 | }
412 |
413 | - (void)startAnimating {
414 | YYAnimatedImageType type = [self currentImageType];
415 | if (type == YYAnimatedImageTypeImages || type == YYAnimatedImageTypeHighlightedImages) {
416 | NSArray *images = [self imageForType:type];
417 | if (images.count > 0) {
418 | [super startAnimating];
419 | self.currentIsPlayingAnimation = YES;
420 | }
421 | } else {
422 | if (_curAnimatedImage && _link.paused) {
423 | _curLoop = 0;
424 | _loopEnd = NO;
425 | _link.paused = NO;
426 | self.currentIsPlayingAnimation = YES;
427 | }
428 | }
429 | }
430 |
431 | - (void)didReceiveMemoryWarning:(NSNotification *)notification {
432 | [_requestQueue cancelAllOperations];
433 | [_requestQueue addOperationWithBlock: ^{
434 | _incrBufferCount = -60 - (int)(arc4random() % 120); // about 1~3 seconds to grow back..
435 | NSNumber *next = @((_curIndex + 1) % _totalFrameCount);
436 | LOCK(
437 | NSArray * keys = _buffer.allKeys;
438 | for (NSNumber * key in keys) {
439 | if (![key isEqualToNumber:next]) { // keep the next frame for smoothly animation
440 | [_buffer removeObjectForKey:key];
441 | }
442 | }
443 | )//LOCK
444 | }];
445 | }
446 |
447 | - (void)didEnterBackground:(NSNotification *)notification {
448 | [_requestQueue cancelAllOperations];
449 | NSNumber *next = @((_curIndex + 1) % _totalFrameCount);
450 | LOCK(
451 | NSArray * keys = _buffer.allKeys;
452 | for (NSNumber * key in keys) {
453 | if (![key isEqualToNumber:next]) { // keep the next frame for smoothly animation
454 | [_buffer removeObjectForKey:key];
455 | }
456 | }
457 | )//LOCK
458 | }
459 |
460 | - (void)step:(CADisplayLink *)link {
461 | UIImage *image = _curAnimatedImage;
462 | NSMutableDictionary *buffer = _buffer;
463 | UIImage *bufferedImage = nil;
464 | NSUInteger nextIndex = (_curIndex + 1) % _totalFrameCount;
465 | BOOL bufferIsFull = NO;
466 |
467 | if (!image) return;
468 | if (_loopEnd) { // view will keep in last frame
469 | [self stopAnimating];
470 | return;
471 | }
472 |
473 | NSTimeInterval delay = 0;
474 | if (!_bufferMiss) {
475 | _time += link.duration;
476 | delay = [image animatedImageDurationAtIndex:_curIndex];
477 | if (_time < delay) return;
478 | _time -= delay;
479 | if (nextIndex == 0) {
480 | _curLoop++;
481 | if (_curLoop >= _totalLoop && _totalLoop != 0) {
482 | _loopEnd = YES;
483 | [self stopAnimating];
484 | [self.layer setNeedsDisplay]; // let system call `displayLayer:` before runloop sleep
485 | return; // stop at last frame
486 | }
487 | }
488 | delay = [image animatedImageDurationAtIndex:nextIndex];
489 | if (_time > delay) _time = delay; // do not jump over frame
490 | }
491 | LOCK(
492 | bufferedImage = buffer[@(nextIndex)];
493 | if (bufferedImage) {
494 | if ((int)_incrBufferCount < _totalFrameCount) {
495 | [buffer removeObjectForKey:@(nextIndex)];
496 | }
497 | [self willChangeValueForKey:@"currentAnimatedImageIndex"];
498 | _curIndex = nextIndex;
499 | [self didChangeValueForKey:@"currentAnimatedImageIndex"];
500 | _curFrame = bufferedImage == (id)[NSNull null] ? nil : bufferedImage;
501 | if (_curImageHasContentsRect) {
502 | _curContentsRect = [image animatedImageContentsRectAtIndex:_curIndex];
503 | [self setContentsRect:_curContentsRect forImage:_curFrame];
504 | }
505 | nextIndex = (_curIndex + 1) % _totalFrameCount;
506 | _bufferMiss = NO;
507 | if (buffer.count == _totalFrameCount) {
508 | bufferIsFull = YES;
509 | }
510 | } else {
511 | _bufferMiss = YES;
512 | }
513 | )//LOCK
514 |
515 | if (!_bufferMiss) {
516 | [self.layer setNeedsDisplay]; // let system call `displayLayer:` before runloop sleep
517 | }
518 |
519 | if (!bufferIsFull && _requestQueue.operationCount == 0) { // if some work not finished, wait for next opportunity
520 | _YYAnimatedImageViewFetchOperation *operation = [_YYAnimatedImageViewFetchOperation new];
521 | operation.view = self;
522 | operation.nextIndex = nextIndex;
523 | operation.curImage = image;
524 | [_requestQueue addOperation:operation];
525 | }
526 | }
527 |
528 | - (void)displayLayer:(CALayer *)layer {
529 | if (_curFrame) {
530 | layer.contents = (__bridge id)_curFrame.CGImage;
531 | }
532 | }
533 |
534 | - (void)setContentsRect:(CGRect)rect forImage:(UIImage *)image{
535 | CGRect layerRect = CGRectMake(0, 0, 1, 1);
536 | if (image) {
537 | CGSize imageSize = image.size;
538 | if (imageSize.width > 0.01 && imageSize.height > 0.01) {
539 | layerRect.origin.x = rect.origin.x / imageSize.width;
540 | layerRect.origin.y = rect.origin.y / imageSize.height;
541 | layerRect.size.width = rect.size.width / imageSize.width;
542 | layerRect.size.height = rect.size.height / imageSize.height;
543 | layerRect = CGRectIntersection(layerRect, CGRectMake(0, 0, 1, 1));
544 | if (CGRectIsNull(layerRect) || CGRectIsEmpty(layerRect)) {
545 | layerRect = CGRectMake(0, 0, 1, 1);
546 | }
547 | }
548 | }
549 | [CATransaction begin];
550 | [CATransaction setDisableActions:YES];
551 | self.layer.contentsRect = layerRect;
552 | [CATransaction commit];
553 | }
554 |
555 | - (void)didMoved {
556 | if (self.autoPlayAnimatedImage) {
557 | if(self.superview && self.window) {
558 | [self startAnimating];
559 | } else {
560 | [self stopAnimating];
561 | }
562 | }
563 | }
564 |
565 | - (void)didMoveToWindow {
566 | [super didMoveToWindow];
567 | [self didMoved];
568 | }
569 |
570 | - (void)didMoveToSuperview {
571 | [super didMoveToSuperview];
572 | [self didMoved];
573 | }
574 |
575 | - (void)setCurrentAnimatedImageIndex:(NSUInteger)currentAnimatedImageIndex {
576 | if (!_curAnimatedImage) return;
577 | if (currentAnimatedImageIndex >= _curAnimatedImage.animatedImageFrameCount) return;
578 | if (_curIndex == currentAnimatedImageIndex) return;
579 |
580 | void (^block)() = ^{
581 | LOCK(
582 | [_requestQueue cancelAllOperations];
583 | [_buffer removeAllObjects];
584 | [self willChangeValueForKey:@"currentAnimatedImageIndex"];
585 | _curIndex = currentAnimatedImageIndex;
586 | [self didChangeValueForKey:@"currentAnimatedImageIndex"];
587 | _curFrame = [_curAnimatedImage animatedImageFrameAtIndex:_curIndex];
588 | if (_curImageHasContentsRect) {
589 | _curContentsRect = [_curAnimatedImage animatedImageContentsRectAtIndex:_curIndex];
590 | }
591 | _time = 0;
592 | _loopEnd = NO;
593 | _bufferMiss = NO;
594 | [self.layer setNeedsDisplay];
595 | )//LOCK
596 | };
597 |
598 | if (pthread_main_np()) {
599 | block();
600 | } else {
601 | dispatch_async(dispatch_get_main_queue(), block);
602 | }
603 | }
604 |
605 | - (NSUInteger)currentAnimatedImageIndex {
606 | return _curIndex;
607 | }
608 |
609 | - (void)setRunloopMode:(NSString *)runloopMode {
610 | if ([_runloopMode isEqual:runloopMode]) return;
611 | if (_link) {
612 | if (_runloopMode) {
613 | [_link removeFromRunLoop:[NSRunLoop mainRunLoop] forMode:_runloopMode];
614 | }
615 | if (runloopMode.length) {
616 | [_link addToRunLoop:[NSRunLoop mainRunLoop] forMode:runloopMode];
617 | }
618 | }
619 | _runloopMode = runloopMode.copy;
620 | }
621 |
622 | #pragma mark - Override NSObject(NSKeyValueObservingCustomization)
623 |
624 | + (BOOL)automaticallyNotifiesObserversForKey:(NSString *)key {
625 | if ([key isEqualToString:@"currentAnimatedImageIndex"]) {
626 | return NO;
627 | }
628 | return [super automaticallyNotifiesObserversForKey:key];
629 | }
630 |
631 | #pragma mark - NSCoding
632 |
633 | - (instancetype)initWithCoder:(NSCoder *)aDecoder {
634 | self = [super initWithCoder:aDecoder];
635 | _runloopMode = [aDecoder decodeObjectForKey:@"runloopMode"];
636 | if (_runloopMode.length == 0) _runloopMode = NSRunLoopCommonModes;
637 | if ([aDecoder containsValueForKey:@"autoPlayAnimatedImage"]) {
638 | _autoPlayAnimatedImage = [aDecoder decodeBoolForKey:@"autoPlayAnimatedImage"];
639 | } else {
640 | _autoPlayAnimatedImage = YES;
641 | }
642 |
643 | UIImage *image = [aDecoder decodeObjectForKey:@"YYAnimatedImage"];
644 | UIImage *highlightedImage = [aDecoder decodeObjectForKey:@"YYHighlightedAnimatedImage"];
645 | if (image) {
646 | self.image = image;
647 | [self setImage:image withType:YYAnimatedImageTypeImage];
648 | }
649 | if (highlightedImage) {
650 | self.highlightedImage = highlightedImage;
651 | [self setImage:highlightedImage withType:YYAnimatedImageTypeHighlightedImage];
652 | }
653 | return self;
654 | }
655 |
656 | - (void)encodeWithCoder:(NSCoder *)aCoder {
657 | [super encodeWithCoder:aCoder];
658 | [aCoder encodeObject:_runloopMode forKey:@"runloopMode"];
659 | [aCoder encodeBool:_autoPlayAnimatedImage forKey:@"autoPlayAnimatedImage"];
660 |
661 | BOOL ani, multi;
662 | ani = [self.image conformsToProtocol:@protocol(YYAnimatedImage)];
663 | multi = (ani && ((UIImage *)self.image).animatedImageFrameCount > 1);
664 | if (multi) [aCoder encodeObject:self.image forKey:@"YYAnimatedImage"];
665 |
666 | ani = [self.highlightedImage conformsToProtocol:@protocol(YYAnimatedImage)];
667 | multi = (ani && ((UIImage *)self.highlightedImage).animatedImageFrameCount > 1);
668 | if (multi) [aCoder encodeObject:self.highlightedImage forKey:@"YYHighlightedAnimatedImage"];
669 | }
670 |
671 | @end
672 |
--------------------------------------------------------------------------------
/YYImage/YYFrameImage.h:
--------------------------------------------------------------------------------
1 | //
2 | // YYFrameImage.h
3 | // YYImage
4 | //
5 | // Created by ibireme on 14/12/9.
6 | // Copyright (c) 2015 ibireme.
7 | //
8 | // This source code is licensed under the MIT-style license found in the
9 | // LICENSE file in the root directory of this source tree.
10 | //
11 |
12 | #import
13 |
14 | #if __has_include()
15 | #import
16 | #elif __has_include()
17 | #import
18 | #else
19 | #import "YYAnimatedImageView.h"
20 | #endif
21 |
22 | NS_ASSUME_NONNULL_BEGIN
23 |
24 | /**
25 | An image to display frame-based animation.
26 |
27 | @discussion It is a fully compatible `UIImage` subclass.
28 | It only support system image format such as png and jpeg.
29 | The animation can be played by YYAnimatedImageView.
30 |
31 | Sample Code:
32 |
33 | NSArray *paths = @[@"/ani/frame1.png", @"/ani/frame2.png", @"/ani/frame3.png"];
34 | NSArray *times = @[@0.1, @0.2, @0.1];
35 | YYFrameImage *image = [YYFrameImage alloc] initWithImagePaths:paths frameDurations:times repeats:YES];
36 | YYAnimatedImageView *imageView = [YYAnimatedImageView alloc] initWithImage:image];
37 | [view addSubView:imageView];
38 | */
39 | @interface YYFrameImage : UIImage
40 |
41 | /**
42 | Create a frame animated image from files.
43 |
44 | @param paths An array of NSString objects, contains the full or
45 | partial path to each image file.
46 | e.g. @[@"/ani/1.png",@"/ani/2.png",@"/ani/3.png"]
47 |
48 | @param oneFrameDuration The duration (in seconds) per frame.
49 |
50 | @param loopCount The animation loop count, 0 means infinite.
51 |
52 | @return An initialized YYFrameImage object, or nil when an error occurs.
53 | */
54 | - (nullable instancetype)initWithImagePaths:(NSArray *)paths
55 | oneFrameDuration:(NSTimeInterval)oneFrameDuration
56 | loopCount:(NSUInteger)loopCount;
57 |
58 | /**
59 | Create a frame animated image from files.
60 |
61 | @param paths An array of NSString objects, contains the full or
62 | partial path to each image file.
63 | e.g. @[@"/ani/frame1.png",@"/ani/frame2.png",@"/ani/frame3.png"]
64 |
65 | @param frameDurations An array of NSNumber objects, contains the duration (in seconds) per frame.
66 | e.g. @[@0.1, @0.2, @0.3];
67 |
68 | @param loopCount The animation loop count, 0 means infinite.
69 |
70 | @return An initialized YYFrameImage object, or nil when an error occurs.
71 | */
72 | - (nullable instancetype)initWithImagePaths:(NSArray *)paths
73 | frameDurations:(NSArray *)frameDurations
74 | loopCount:(NSUInteger)loopCount;
75 |
76 | /**
77 | Create a frame animated image from an array of data.
78 |
79 | @param dataArray An array of NSData objects.
80 |
81 | @param oneFrameDuration The duration (in seconds) per frame.
82 |
83 | @param loopCount The animation loop count, 0 means infinite.
84 |
85 | @return An initialized YYFrameImage object, or nil when an error occurs.
86 | */
87 | - (nullable instancetype)initWithImageDataArray:(NSArray *)dataArray
88 | oneFrameDuration:(NSTimeInterval)oneFrameDuration
89 | loopCount:(NSUInteger)loopCount;
90 |
91 | /**
92 | Create a frame animated image from an array of data.
93 |
94 | @param dataArray An array of NSData objects.
95 |
96 | @param frameDurations An array of NSNumber objects, contains the duration (in seconds) per frame.
97 | e.g. @[@0.1, @0.2, @0.3];
98 |
99 | @param loopCount The animation loop count, 0 means infinite.
100 |
101 | @return An initialized YYFrameImage object, or nil when an error occurs.
102 | */
103 | - (nullable instancetype)initWithImageDataArray:(NSArray *)dataArray
104 | frameDurations:(NSArray *)frameDurations
105 | loopCount:(NSUInteger)loopCount;
106 |
107 | @end
108 |
109 | NS_ASSUME_NONNULL_END
110 |
--------------------------------------------------------------------------------
/YYImage/YYFrameImage.m:
--------------------------------------------------------------------------------
1 | //
2 | // YYFrameImage.m
3 | // YYImage
4 | //
5 | // Created by ibireme on 14/12/9.
6 | // Copyright (c) 2015 ibireme.
7 | //
8 | // This source code is licensed under the MIT-style license found in the
9 | // LICENSE file in the root directory of this source tree.
10 | //
11 |
12 | #import "YYFrameImage.h"
13 | #import "YYImageCoder.h"
14 |
15 |
16 | /**
17 | Return the path scale.
18 |
19 | e.g.
20 |
21 | Path | Scale |
22 | "icon.png" | 1 |
23 | "icon@2x.png" | 2 |
24 | "icon@2.5x.png" | 2.5 |
25 | "icon@2x" | 1 |
26 | "icon@2x..png" | 1 |
27 | "icon@2x.png/" | 1 |
28 |
29 | */
30 | static CGFloat _NSStringPathScale(NSString *string) {
31 | if (string.length == 0 || [string hasSuffix:@"/"]) return 1;
32 | NSString *name = string.stringByDeletingPathExtension;
33 | __block CGFloat scale = 1;
34 |
35 | NSRegularExpression *pattern = [NSRegularExpression regularExpressionWithPattern:@"@[0-9]+\\.?[0-9]*x$" options:NSRegularExpressionAnchorsMatchLines error:nil];
36 | [pattern enumerateMatchesInString:name options:kNilOptions range:NSMakeRange(0, name.length) usingBlock:^(NSTextCheckingResult *result, NSMatchingFlags flags, BOOL *stop) {
37 | if (result.range.location >= 3) {
38 | scale = [string substringWithRange:NSMakeRange(result.range.location + 1, result.range.length - 2)].doubleValue;
39 | }
40 | }];
41 |
42 | return scale;
43 | }
44 |
45 |
46 |
47 | @implementation YYFrameImage {
48 | NSUInteger _loopCount;
49 | NSUInteger _oneFrameBytes;
50 | NSArray *_imagePaths;
51 | NSArray *_imageDatas;
52 | NSArray *_frameDurations;
53 | }
54 |
55 | - (instancetype)initWithImagePaths:(NSArray *)paths oneFrameDuration:(NSTimeInterval)oneFrameDuration loopCount:(NSUInteger)loopCount {
56 | NSMutableArray *durations = [NSMutableArray new];
57 | for (int i = 0, max = (int)paths.count; i < max; i++) {
58 | [durations addObject:@(oneFrameDuration)];
59 | }
60 | return [self initWithImagePaths:paths frameDurations:durations loopCount:loopCount];
61 | }
62 |
63 | - (instancetype)initWithImagePaths:(NSArray *)paths frameDurations:(NSArray *)frameDurations loopCount:(NSUInteger)loopCount {
64 | if (paths.count == 0) return nil;
65 | if (paths.count != frameDurations.count) return nil;
66 |
67 | NSString *firstPath = paths[0];
68 | NSData *firstData = [NSData dataWithContentsOfFile:firstPath];
69 | CGFloat scale = _NSStringPathScale(firstPath);
70 | UIImage *firstCG = [[[UIImage alloc] initWithData:firstData] yy_imageByDecoded];
71 | self = [self initWithCGImage:firstCG.CGImage scale:scale orientation:UIImageOrientationUp];
72 | if (!self) return nil;
73 | long frameByte = CGImageGetBytesPerRow(firstCG.CGImage) * CGImageGetHeight(firstCG.CGImage);
74 | _oneFrameBytes = (NSUInteger)frameByte;
75 | _imagePaths = paths.copy;
76 | _frameDurations = frameDurations.copy;
77 | _loopCount = loopCount;
78 |
79 | return self;
80 | }
81 |
82 | - (instancetype)initWithImageDataArray:(NSArray *)dataArray oneFrameDuration:(NSTimeInterval)oneFrameDuration loopCount:(NSUInteger)loopCount {
83 | NSMutableArray *durations = [NSMutableArray new];
84 | for (int i = 0, max = (int)dataArray.count; i < max; i++) {
85 | [durations addObject:@(oneFrameDuration)];
86 | }
87 | return [self initWithImageDataArray:dataArray frameDurations:durations loopCount:loopCount];
88 | }
89 |
90 | - (instancetype)initWithImageDataArray:(NSArray *)dataArray frameDurations:(NSArray *)frameDurations loopCount:(NSUInteger)loopCount {
91 | if (dataArray.count == 0) return nil;
92 | if (dataArray.count != frameDurations.count) return nil;
93 |
94 | NSData *firstData = dataArray[0];
95 | CGFloat scale = [UIScreen mainScreen].scale;
96 | UIImage *firstCG = [[[UIImage alloc] initWithData:firstData] yy_imageByDecoded];
97 | self = [self initWithCGImage:firstCG.CGImage scale:scale orientation:UIImageOrientationUp];
98 | if (!self) return nil;
99 | long frameByte = CGImageGetBytesPerRow(firstCG.CGImage) * CGImageGetHeight(firstCG.CGImage);
100 | _oneFrameBytes = (NSUInteger)frameByte;
101 | _imageDatas = dataArray.copy;
102 | _frameDurations = frameDurations.copy;
103 | _loopCount = loopCount;
104 |
105 | return self;
106 | }
107 |
108 | #pragma mark - YYAnimtedImage
109 |
110 | - (NSUInteger)animatedImageFrameCount {
111 | if (_imagePaths) {
112 | return _imagePaths.count;
113 | } else if (_imageDatas) {
114 | return _imageDatas.count;
115 | } else {
116 | return 1;
117 | }
118 | }
119 |
120 | - (NSUInteger)animatedImageLoopCount {
121 | return _loopCount;
122 | }
123 |
124 | - (NSUInteger)animatedImageBytesPerFrame {
125 | return _oneFrameBytes;
126 | }
127 |
128 | - (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index {
129 | if (_imagePaths) {
130 | if (index >= _imagePaths.count) return nil;
131 | NSString *path = _imagePaths[index];
132 | CGFloat scale = _NSStringPathScale(path);
133 | NSData *data = [NSData dataWithContentsOfFile:path];
134 | return [[UIImage imageWithData:data scale:scale] yy_imageByDecoded];
135 | } else if (_imageDatas) {
136 | if (index >= _imageDatas.count) return nil;
137 | NSData *data = _imageDatas[index];
138 | return [[UIImage imageWithData:data scale:[UIScreen mainScreen].scale] yy_imageByDecoded];
139 | } else {
140 | return index == 0 ? self : nil;
141 | }
142 | }
143 |
144 | - (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index {
145 | if (index >= _frameDurations.count) return 0;
146 | NSNumber *num = _frameDurations[index];
147 | return [num doubleValue];
148 | }
149 |
150 | @end
151 |
--------------------------------------------------------------------------------
/YYImage/YYImage.h:
--------------------------------------------------------------------------------
1 | //
2 | // YYImage.h
3 | // YYImage
4 | //
5 | // Created by ibireme on 14/10/20.
6 | // Copyright (c) 2015 ibireme.
7 | //
8 | // This source code is licensed under the MIT-style license found in the
9 | // LICENSE file in the root directory of this source tree.
10 | //
11 |
12 | #import
13 |
14 | #if __has_include()
15 | FOUNDATION_EXPORT double YYImageVersionNumber;
16 | FOUNDATION_EXPORT const unsigned char YYImageVersionString[];
17 | #import
18 | #import
19 | #import
20 | #import
21 | #elif __has_include()
22 | #import
23 | #import
24 | #import
25 | #import
26 | #else
27 | #import "YYFrameImage.h"
28 | #import "YYSpriteSheetImage.h"
29 | #import "YYImageCoder.h"
30 | #import "YYAnimatedImageView.h"
31 | #endif
32 |
33 | NS_ASSUME_NONNULL_BEGIN
34 |
35 |
36 | /**
37 | A YYImage object is a high-level way to display animated image data.
38 |
39 | @discussion It is a fully compatible `UIImage` subclass. It extends the UIImage
40 | to support animated WebP, APNG and GIF format image data decoding. It also
41 | support NSCoding protocol to archive and unarchive multi-frame image data.
42 |
43 | If the image is created from multi-frame image data, and you want to play the
44 | animation, try replace UIImageView with `YYAnimatedImageView`.
45 |
46 | Sample Code:
47 |
48 | // animation@3x.webp
49 | YYImage *image = [YYImage imageNamed:@"animation.webp"];
50 | YYAnimatedImageView *imageView = [YYAnimatedImageView alloc] initWithImage:image];
51 | [view addSubView:imageView];
52 |
53 | */
54 | @interface YYImage : UIImage
55 |
56 | + (nullable YYImage *)imageNamed:(NSString *)name; // no cache!
57 | + (nullable YYImage *)imageWithContentsOfFile:(NSString *)path;
58 | + (nullable YYImage *)imageWithData:(NSData *)data;
59 | + (nullable YYImage *)imageWithData:(NSData *)data scale:(CGFloat)scale;
60 |
61 | /**
62 | If the image is created from data or file, then the value indicates the data type.
63 | */
64 | @property (nonatomic, readonly) YYImageType animatedImageType;
65 |
66 | /**
67 | If the image is created from animated image data (multi-frame GIF/APNG/WebP),
68 | this property stores the original image data.
69 | */
70 | @property (nullable, nonatomic, readonly) NSData *animatedImageData;
71 |
72 | /**
73 | The total memory usage (in bytes) if all frame images was loaded into memory.
74 | The value is 0 if the image is not created from a multi-frame image data.
75 | */
76 | @property (nonatomic, readonly) NSUInteger animatedImageMemorySize;
77 |
78 | /**
79 | Preload all frame image to memory.
80 |
81 | @discussion Set this property to `YES` will block the calling thread to decode
82 | all animation frame image to memory, set to `NO` will release the preloaded frames.
83 | If the image is shared by lots of image views (such as emoticon), preload all
84 | frames will reduce the CPU cost.
85 |
86 | See `animatedImageMemorySize` for memory cost.
87 | */
88 | @property (nonatomic) BOOL preloadAllAnimatedImageFrames;
89 |
90 | @end
91 |
92 | NS_ASSUME_NONNULL_END
93 |
--------------------------------------------------------------------------------
/YYImage/YYImage.m:
--------------------------------------------------------------------------------
1 | //
2 | // YYImage.m
3 | // YYImage
4 | //
5 | // Created by ibireme on 14/10/20.
6 | // Copyright (c) 2015 ibireme.
7 | //
8 | // This source code is licensed under the MIT-style license found in the
9 | // LICENSE file in the root directory of this source tree.
10 | //
11 |
12 | #import "YYImage.h"
13 |
14 | /**
15 | An array of NSNumber objects, shows the best order for path scale search.
16 | e.g. iPhone3GS:@[@1,@2,@3] iPhone5:@[@2,@3,@1] iPhone6 Plus:@[@3,@2,@1]
17 | */
18 | static NSArray *_NSBundlePreferredScales() {
19 | static NSArray *scales;
20 | static dispatch_once_t onceToken;
21 | dispatch_once(&onceToken, ^{
22 | CGFloat screenScale = [UIScreen mainScreen].scale;
23 | if (screenScale <= 1) {
24 | scales = @[@1,@2,@3];
25 | } else if (screenScale <= 2) {
26 | scales = @[@2,@3,@1];
27 | } else {
28 | scales = @[@3,@2,@1];
29 | }
30 | });
31 | return scales;
32 | }
33 |
34 | /**
35 | Add scale modifier to the file name (without path extension),
36 | From @"name" to @"name@2x".
37 |
38 | e.g.
39 |
40 | Before | After(scale:2) |
41 | "icon" | "icon@2x" |
42 | "icon " | "icon @2x" |
43 | "icon.top" | "icon.top@2x" |
44 | "/p/name" | "/p/name@2x" |
45 | "/path/" | "/path/" |
46 |
47 |
48 | @param scale Resource scale.
49 | @return String by add scale modifier, or just return if it's not end with file name.
50 | */
51 | static NSString *_NSStringByAppendingNameScale(NSString *string, CGFloat scale) {
52 | if (!string) return nil;
53 | if (fabs(scale - 1) <= __FLT_EPSILON__ || string.length == 0 || [string hasSuffix:@"/"]) return string.copy;
54 | return [string stringByAppendingFormat:@"@%@x", @(scale)];
55 | }
56 |
57 | /**
58 | Return the path scale.
59 |
60 | e.g.
61 |
62 | Path | Scale |
63 | "icon.png" | 1 |
64 | "icon@2x.png" | 2 |
65 | "icon@2.5x.png" | 2.5 |
66 | "icon@2x" | 1 |
67 | "icon@2x..png" | 1 |
68 | "icon@2x.png/" | 1 |
69 |
70 | */
71 | static CGFloat _NSStringPathScale(NSString *string) {
72 | if (string.length == 0 || [string hasSuffix:@"/"]) return 1;
73 | NSString *name = string.stringByDeletingPathExtension;
74 | __block CGFloat scale = 1;
75 |
76 | NSRegularExpression *pattern = [NSRegularExpression regularExpressionWithPattern:@"@[0-9]+\\.?[0-9]*x$" options:NSRegularExpressionAnchorsMatchLines error:nil];
77 | [pattern enumerateMatchesInString:name options:kNilOptions range:NSMakeRange(0, name.length) usingBlock:^(NSTextCheckingResult *result, NSMatchingFlags flags, BOOL *stop) {
78 | if (result.range.location >= 3) {
79 | scale = [string substringWithRange:NSMakeRange(result.range.location + 1, result.range.length - 2)].doubleValue;
80 | }
81 | }];
82 |
83 | return scale;
84 | }
85 |
86 |
87 | @implementation YYImage {
88 | YYImageDecoder *_decoder;
89 | NSArray *_preloadedFrames;
90 | dispatch_semaphore_t _preloadedLock;
91 | NSUInteger _bytesPerFrame;
92 | }
93 |
94 | + (YYImage *)imageNamed:(NSString *)name {
95 | if (name.length == 0) return nil;
96 | if ([name hasSuffix:@"/"]) return nil;
97 |
98 | NSString *res = name.stringByDeletingPathExtension;
99 | NSString *ext = name.pathExtension;
100 | NSString *path = nil;
101 | CGFloat scale = 1;
102 |
103 | // If no extension, guess by system supported (same as UIImage).
104 | NSArray *exts = ext.length > 0 ? @[ext] : @[@"", @"png", @"jpeg", @"jpg", @"gif", @"webp", @"apng"];
105 | NSArray *scales = _NSBundlePreferredScales();
106 | for (int s = 0; s < scales.count; s++) {
107 | scale = ((NSNumber *)scales[s]).floatValue;
108 | NSString *scaledName = _NSStringByAppendingNameScale(res, scale);
109 | for (NSString *e in exts) {
110 | path = [[NSBundle mainBundle] pathForResource:scaledName ofType:e];
111 | if (path) break;
112 | }
113 | if (path) break;
114 | }
115 | if (path.length == 0) return nil;
116 |
117 | NSData *data = [NSData dataWithContentsOfFile:path];
118 | if (data.length == 0) return nil;
119 |
120 | return [[self alloc] initWithData:data scale:scale];
121 | }
122 |
123 | + (YYImage *)imageWithContentsOfFile:(NSString *)path {
124 | return [[self alloc] initWithContentsOfFile:path];
125 | }
126 |
127 | + (YYImage *)imageWithData:(NSData *)data {
128 | return [[self alloc] initWithData:data];
129 | }
130 |
131 | + (YYImage *)imageWithData:(NSData *)data scale:(CGFloat)scale {
132 | return [[self alloc] initWithData:data scale:scale];
133 | }
134 |
135 | - (instancetype)initWithContentsOfFile:(NSString *)path {
136 | NSData *data = [NSData dataWithContentsOfFile:path];
137 | return [self initWithData:data scale:_NSStringPathScale(path)];
138 | }
139 |
140 | - (instancetype)initWithData:(NSData *)data {
141 | return [self initWithData:data scale:1];
142 | }
143 |
144 | - (instancetype)initWithData:(NSData *)data scale:(CGFloat)scale {
145 | if (data.length == 0) return nil;
146 | if (scale <= 0) scale = [UIScreen mainScreen].scale;
147 | _preloadedLock = dispatch_semaphore_create(1);
148 | @autoreleasepool {
149 | YYImageDecoder *decoder = [YYImageDecoder decoderWithData:data scale:scale];
150 | YYImageFrame *frame = [decoder frameAtIndex:0 decodeForDisplay:YES];
151 | UIImage *image = frame.image;
152 | if (!image) return nil;
153 | self = [self initWithCGImage:image.CGImage scale:decoder.scale orientation:image.imageOrientation];
154 | if (!self) return nil;
155 | _animatedImageType = decoder.type;
156 | if (decoder.frameCount > 1) {
157 | _decoder = decoder;
158 | _bytesPerFrame = CGImageGetBytesPerRow(image.CGImage) * CGImageGetHeight(image.CGImage);
159 | _animatedImageMemorySize = _bytesPerFrame * decoder.frameCount;
160 | }
161 | self.yy_isDecodedForDisplay = YES;
162 | }
163 | return self;
164 | }
165 |
166 | - (NSData *)animatedImageData {
167 | return _decoder.data;
168 | }
169 |
170 | - (void)setPreloadAllAnimatedImageFrames:(BOOL)preloadAllAnimatedImageFrames {
171 | if (_preloadAllAnimatedImageFrames != preloadAllAnimatedImageFrames) {
172 | if (preloadAllAnimatedImageFrames && _decoder.frameCount > 0) {
173 | NSMutableArray *frames = [NSMutableArray new];
174 | for (NSUInteger i = 0, max = _decoder.frameCount; i < max; i++) {
175 | UIImage *img = [self animatedImageFrameAtIndex:i];
176 | if (img) {
177 | [frames addObject:img];
178 | } else {
179 | [frames addObject:[NSNull null]];
180 | }
181 | }
182 | dispatch_semaphore_wait(_preloadedLock, DISPATCH_TIME_FOREVER);
183 | _preloadedFrames = frames;
184 | dispatch_semaphore_signal(_preloadedLock);
185 | } else {
186 | dispatch_semaphore_wait(_preloadedLock, DISPATCH_TIME_FOREVER);
187 | _preloadedFrames = nil;
188 | dispatch_semaphore_signal(_preloadedLock);
189 | }
190 | }
191 | }
192 |
193 | #pragma mark - protocol NSCoding
194 |
195 | - (instancetype)initWithCoder:(NSCoder *)aDecoder {
196 | NSNumber *scale = [aDecoder decodeObjectForKey:@"YYImageScale"];
197 | NSData *data = [aDecoder decodeObjectForKey:@"YYImageData"];
198 | if (data.length) {
199 | self = [self initWithData:data scale:scale.doubleValue];
200 | } else {
201 | self = [super initWithCoder:aDecoder];
202 | }
203 | return self;
204 | }
205 |
206 | - (void)encodeWithCoder:(NSCoder *)aCoder {
207 | if (_decoder.data.length) {
208 | [aCoder encodeObject:@(self.scale) forKey:@"YYImageScale"];
209 | [aCoder encodeObject:_decoder.data forKey:@"YYImageData"];
210 | } else {
211 | [super encodeWithCoder:aCoder]; // Apple use UIImagePNGRepresentation() to encode UIImage.
212 | }
213 | }
214 |
215 | + (BOOL)supportsSecureCoding {
216 | return YES;
217 | }
218 |
219 | #pragma mark - protocol YYAnimatedImage
220 |
221 | - (NSUInteger)animatedImageFrameCount {
222 | return _decoder.frameCount;
223 | }
224 |
225 | - (NSUInteger)animatedImageLoopCount {
226 | return _decoder.loopCount;
227 | }
228 |
229 | - (NSUInteger)animatedImageBytesPerFrame {
230 | return _bytesPerFrame;
231 | }
232 |
233 | - (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index {
234 | if (index >= _decoder.frameCount) return nil;
235 | dispatch_semaphore_wait(_preloadedLock, DISPATCH_TIME_FOREVER);
236 | UIImage *image = _preloadedFrames[index];
237 | dispatch_semaphore_signal(_preloadedLock);
238 | if (image) return image == (id)[NSNull null] ? nil : image;
239 | return [_decoder frameAtIndex:index decodeForDisplay:YES].image;
240 | }
241 |
242 | - (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index {
243 | NSTimeInterval duration = [_decoder frameDurationAtIndex:index];
244 |
245 | /*
246 | http://opensource.apple.com/source/WebCore/WebCore-7600.1.25/platform/graphics/cg/ImageSourceCG.cpp
247 | Many annoying ads specify a 0 duration to make an image flash as quickly as
248 | possible. We follow Safari and Firefox's behavior and use a duration of 100 ms
249 | for any frames that specify a duration of <= 10 ms.
250 | See and for more information.
251 |
252 | See also: http://nullsleep.tumblr.com/post/16524517190/animated-gif-minimum-frame-delay-browser.
253 | */
254 | if (duration < 0.011f) return 0.100f;
255 | return duration;
256 | }
257 |
258 | @end
259 |
--------------------------------------------------------------------------------
/YYImage/YYImageCoder.h:
--------------------------------------------------------------------------------
1 | //
2 | // YYImageCoder.h
3 | // YYImage
4 | //
5 | // Created by ibireme on 15/5/13.
6 | // Copyright (c) 2015 ibireme.
7 | //
8 | // This source code is licensed under the MIT-style license found in the
9 | // LICENSE file in the root directory of this source tree.
10 | //
11 |
12 | #import
13 |
14 | NS_ASSUME_NONNULL_BEGIN
15 |
16 | /**
17 | Image file type.
18 | */
19 | typedef NS_ENUM(NSUInteger, YYImageType) {
20 | YYImageTypeUnknown = 0, ///< unknown
21 | YYImageTypeJPEG, ///< jpeg, jpg
22 | YYImageTypeJPEG2000, ///< jp2
23 | YYImageTypeTIFF, ///< tiff, tif
24 | YYImageTypeBMP, ///< bmp
25 | YYImageTypeICO, ///< ico
26 | YYImageTypeICNS, ///< icns
27 | YYImageTypeGIF, ///< gif
28 | YYImageTypePNG, ///< png
29 | YYImageTypeWebP, ///< webp
30 | YYImageTypeOther, ///< other image format
31 | };
32 |
33 |
34 | /**
35 | Dispose method specifies how the area used by the current frame is to be treated
36 | before rendering the next frame on the canvas.
37 | */
38 | typedef NS_ENUM(NSUInteger, YYImageDisposeMethod) {
39 |
40 | /**
41 | No disposal is done on this frame before rendering the next; the contents
42 | of the canvas are left as is.
43 | */
44 | YYImageDisposeNone = 0,
45 |
46 | /**
47 | The frame's region of the canvas is to be cleared to fully transparent black
48 | before rendering the next frame.
49 | */
50 | YYImageDisposeBackground,
51 |
52 | /**
53 | The frame's region of the canvas is to be reverted to the previous contents
54 | before rendering the next frame.
55 | */
56 | YYImageDisposePrevious,
57 | };
58 |
59 | /**
60 | Blend operation specifies how transparent pixels of the current frame are
61 | blended with those of the previous canvas.
62 | */
63 | typedef NS_ENUM(NSUInteger, YYImageBlendOperation) {
64 |
65 | /**
66 | All color components of the frame, including alpha, overwrite the current
67 | contents of the frame's canvas region.
68 | */
69 | YYImageBlendNone = 0,
70 |
71 | /**
72 | The frame should be composited onto the output buffer based on its alpha.
73 | */
74 | YYImageBlendOver,
75 | };
76 |
77 | /**
78 | An image frame object.
79 | */
80 | @interface YYImageFrame : NSObject
81 | @property (nonatomic) NSUInteger index; ///< Frame index (zero based)
82 | @property (nonatomic) NSUInteger width; ///< Frame width
83 | @property (nonatomic) NSUInteger height; ///< Frame height
84 | @property (nonatomic) NSUInteger offsetX; ///< Frame origin.x in canvas (left-bottom based)
85 | @property (nonatomic) NSUInteger offsetY; ///< Frame origin.y in canvas (left-bottom based)
86 | @property (nonatomic) NSTimeInterval duration; ///< Frame duration in seconds
87 | @property (nonatomic) YYImageDisposeMethod dispose; ///< Frame dispose method.
88 | @property (nonatomic) YYImageBlendOperation blend; ///< Frame blend operation.
89 | @property (nullable, nonatomic, strong) UIImage *image; ///< The image.
90 | + (instancetype)frameWithImage:(UIImage *)image;
91 | @end
92 |
93 |
94 | #pragma mark - Decoder
95 |
96 | /**
97 | An image decoder to decode image data.
98 |
99 | @discussion This class supports decoding animated WebP, APNG, GIF and system
100 | image format such as PNG, JPG, JP2, BMP, TIFF, PIC, ICNS and ICO. It can be used
101 | to decode complete image data, or to decode incremental image data during image
102 | download. This class is thread-safe.
103 |
104 | Example:
105 |
106 | // Decode single image:
107 | NSData *data = [NSData dataWithContentOfFile:@"/tmp/image.webp"];
108 | YYImageDecoder *decoder = [YYImageDecoder decoderWithData:data scale:2.0];
109 | UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image;
110 |
111 | // Decode image during download:
112 | NSMutableData *data = [NSMutableData new];
113 | YYImageDecoder *decoder = [[YYImageDecoder alloc] initWithScale:2.0];
114 | while(newDataArrived) {
115 | [data appendData:newData];
116 | [decoder updateData:data final:NO];
117 | if (decoder.frameCount > 0) {
118 | UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image;
119 | // progressive display...
120 | }
121 | }
122 | [decoder updateData:data final:YES];
123 | UIImage image = [decoder frameAtIndex:0 decodeForDisplay:YES].image;
124 | // final display...
125 |
126 | */
127 | @interface YYImageDecoder : NSObject
128 |
129 | @property (nullable, nonatomic, readonly) NSData *data; ///< Image data.
130 | @property (nonatomic, readonly) YYImageType type; ///< Image data type.
131 | @property (nonatomic, readonly) CGFloat scale; ///< Image scale.
132 | @property (nonatomic, readonly) NSUInteger frameCount; ///< Image frame count.
133 | @property (nonatomic, readonly) NSUInteger loopCount; ///< Image loop count, 0 means infinite.
134 | @property (nonatomic, readonly) NSUInteger width; ///< Image canvas width.
135 | @property (nonatomic, readonly) NSUInteger height; ///< Image canvas height.
136 | @property (nonatomic, readonly, getter=isFinalized) BOOL finalized;
137 |
138 | /**
139 | Creates an image decoder.
140 |
141 | @param scale Image's scale.
142 | @return An image decoder.
143 | */
144 | - (instancetype)initWithScale:(CGFloat)scale NS_DESIGNATED_INITIALIZER;
145 |
146 | /**
147 | Updates the incremental image with new data.
148 |
149 | @discussion You can use this method to decode progressive/interlaced/baseline
150 | image when you do not have the complete image data. The `data` was retained by
151 | decoder, you should not modify the data in other thread during decoding.
152 |
153 | @param data The data to add to the image decoder. Each time you call this
154 | function, the 'data' parameter must contain all of the image file data
155 | accumulated so far.
156 |
157 | @param final A value that specifies whether the data is the final set.
158 | Pass YES if it is, NO otherwise. When the data is already finalized, you can
159 | not update the data anymore.
160 |
161 | @return Whether succeed.
162 | */
163 | - (BOOL)updateData:(nullable NSData *)data final:(BOOL)final;
164 |
165 | /**
166 | Convenience method to create a decoder with specified data.
167 | @param data Image data.
168 | @param scale Image's scale.
169 | @return A new decoder, or nil if an error occurs.
170 | */
171 | + (nullable instancetype)decoderWithData:(NSData *)data scale:(CGFloat)scale;
172 |
173 | /**
174 | Decodes and returns a frame from a specified index.
175 | @param index Frame image index (zero-based).
176 | @param decodeForDisplay Whether decode the image to memory bitmap for display.
177 | If NO, it will try to returns the original frame data without blend.
178 | @return A new frame with image, or nil if an error occurs.
179 | */
180 | - (nullable YYImageFrame *)frameAtIndex:(NSUInteger)index decodeForDisplay:(BOOL)decodeForDisplay;
181 |
182 | /**
183 | Returns the frame duration from a specified index.
184 | @param index Frame image (zero-based).
185 | @return Duration in seconds.
186 | */
187 | - (NSTimeInterval)frameDurationAtIndex:(NSUInteger)index;
188 |
189 | /**
190 | Returns the frame's properties. See "CGImageProperties.h" in ImageIO.framework
191 | for more information.
192 |
193 | @param index Frame image index (zero-based).
194 | @return The ImageIO frame property.
195 | */
196 | - (nullable NSDictionary *)framePropertiesAtIndex:(NSUInteger)index;
197 |
198 | /**
199 | Returns the image's properties. See "CGImageProperties.h" in ImageIO.framework
200 | for more information.
201 | */
202 | - (nullable NSDictionary *)imageProperties;
203 |
204 | @end
205 |
206 |
207 |
208 | #pragma mark - Encoder
209 |
210 | /**
211 | An image encoder to encode image to data.
212 |
213 | @discussion It supports encoding single frame image with the type defined in YYImageType.
214 | It also supports encoding multi-frame image with GIF, APNG and WebP.
215 |
216 | Example:
217 |
218 | YYImageEncoder *jpegEncoder = [[YYImageEncoder alloc] initWithType:YYImageTypeJPEG];
219 | jpegEncoder.quality = 0.9;
220 | [jpegEncoder addImage:image duration:0];
221 | NSData jpegData = [jpegEncoder encode];
222 |
223 | YYImageEncoder *gifEncoder = [[YYImageEncoder alloc] initWithType:YYImageTypeGIF];
224 | gifEncoder.loopCount = 5;
225 | [gifEncoder addImage:image0 duration:0.1];
226 | [gifEncoder addImage:image1 duration:0.15];
227 | [gifEncoder addImage:image2 duration:0.2];
228 | NSData gifData = [gifEncoder encode];
229 |
230 | @warning It just pack the images together when encoding multi-frame image. If you
231 | want to reduce the image file size, try imagemagick/ffmpeg for GIF and WebP,
232 | and apngasm for APNG.
233 | */
234 | @interface YYImageEncoder : NSObject
235 |
236 | @property (nonatomic, readonly) YYImageType type; ///< Image type.
237 | @property (nonatomic) NSUInteger loopCount; ///< Loop count, 0 means infinit, only available for GIF/APNG/WebP.
238 | @property (nonatomic) BOOL lossless; ///< Lossless, only available for WebP.
239 | @property (nonatomic) CGFloat quality; ///< Compress quality, 0.0~1.0, only available for JPG/JP2/WebP.
240 |
241 | - (instancetype)init UNAVAILABLE_ATTRIBUTE;
242 | + (instancetype)new UNAVAILABLE_ATTRIBUTE;
243 |
244 | /**
245 | Create an image encoder with a specified type.
246 | @param type Image type.
247 | @return A new encoder, or nil if an error occurs.
248 | */
249 | - (nullable instancetype)initWithType:(YYImageType)type NS_DESIGNATED_INITIALIZER;
250 |
251 | /**
252 | Add an image to encoder.
253 | @param image Image.
254 | @param duration Image duration for animation. Pass 0 to ignore this parameter.
255 | */
256 | - (void)addImage:(UIImage *)image duration:(NSTimeInterval)duration;
257 |
258 | /**
259 | Add an image with image data to encoder.
260 | @param data Image data.
261 | @param duration Image duration for animation. Pass 0 to ignore this parameter.
262 | */
263 | - (void)addImageWithData:(NSData *)data duration:(NSTimeInterval)duration;
264 |
265 | /**
266 | Add an image from a file path to encoder.
267 | @param path Image file path.
268 | @param duration Image duration for animation. Pass 0 to ignore this parameter.
269 | */
270 | - (void)addImageWithFile:(NSString *)path duration:(NSTimeInterval)duration;
271 |
272 | /**
273 | Encodes the image and returns the image data.
274 | @return The image data, or nil if an error occurs.
275 | */
276 | - (nullable NSData *)encode;
277 |
278 | /**
279 | Encodes the image to a file.
280 | @param path The file path (overwrite if exist).
281 | @return Whether succeed.
282 | */
283 | - (BOOL)encodeToFile:(NSString *)path;
284 |
285 | /**
286 | Convenience method to encode single frame image.
287 | @param image The image.
288 | @param type The destination image type.
289 | @param quality Image quality, 0.0~1.0.
290 | @return The image data, or nil if an error occurs.
291 | */
292 | + (nullable NSData *)encodeImage:(UIImage *)image type:(YYImageType)type quality:(CGFloat)quality;
293 |
294 | /**
295 | Convenience method to encode image from a decoder.
296 | @param decoder The image decoder.
297 | @param type The destination image type;
298 | @param quality Image quality, 0.0~1.0.
299 | @return The image data, or nil if an error occurs.
300 | */
301 | + (nullable NSData *)encodeImageWithDecoder:(YYImageDecoder *)decoder type:(YYImageType)type quality:(CGFloat)quality;
302 |
303 | @end
304 |
305 |
306 | #pragma mark - UIImage
307 |
308 | @interface UIImage (YYImageCoder)
309 |
310 | /**
311 | Decompress this image to bitmap, so when the image is displayed on screen,
312 | the main thread won't be blocked by additional decode. If the image has already
313 | been decoded or unable to decode, it just returns itself.
314 |
315 | @return an image decoded, or just return itself if no needed.
316 | @see yy_isDecodedForDisplay
317 | */
318 | - (instancetype)yy_imageByDecoded;
319 |
320 | /**
321 | Wherher the image can be display on screen without additional decoding.
322 | @warning It just a hint for your code, change it has no other effect.
323 | */
324 | @property (nonatomic) BOOL yy_isDecodedForDisplay;
325 |
326 | /**
327 | Saves this image to iOS Photos Album.
328 |
329 | @discussion This method attempts to save the original data to album if the
330 | image is created from an animated GIF/APNG, otherwise, it will save the image
331 | as JPEG or PNG (based on the alpha information).
332 |
333 | @param completionBlock The block invoked (in main thread) after the save operation completes.
334 | assetURL: An URL that identifies the saved image file. If the image is not saved, assetURL is nil.
335 | error: If the image is not saved, an error object that describes the reason for failure, otherwise nil.
336 | */
337 | - (void)yy_saveToAlbumWithCompletionBlock:(nullable void(^)(NSURL * _Nullable assetURL, NSError * _Nullable error))completionBlock;
338 |
339 | /**
340 | Return a 'best' data representation for this image.
341 |
342 | @discussion The convertion based on these rule:
343 | 1. If the image is created from an animated GIF/APNG/WebP, it returns the original data.
344 | 2. It returns PNG or JPEG(0.9) representation based on the alpha information.
345 |
346 | @return Image data, or nil if an error occurs.
347 | */
348 | - (nullable NSData *)yy_imageDataRepresentation;
349 |
350 | @end
351 |
352 |
353 |
354 | #pragma mark - Helper
355 |
356 | /// Detect a data's image type by reading the data's header 16 bytes (very fast).
357 | CG_EXTERN YYImageType YYImageDetectType(CFDataRef data);
358 |
359 | /// Convert YYImageType to UTI (such as kUTTypeJPEG).
360 | CG_EXTERN CFStringRef _Nullable YYImageTypeToUTType(YYImageType type);
361 |
362 | /// Convert UTI (such as kUTTypeJPEG) to YYImageType.
363 | CG_EXTERN YYImageType YYImageTypeFromUTType(CFStringRef uti);
364 |
365 | /// Get image type's file extension (such as @"jpg").
366 | CG_EXTERN NSString *_Nullable YYImageTypeGetExtension(YYImageType type);
367 |
368 |
369 |
370 | /// Returns the shared DeviceRGB color space.
371 | CG_EXTERN CGColorSpaceRef YYCGColorSpaceGetDeviceRGB();
372 |
373 | /// Returns the shared DeviceGray color space.
374 | CG_EXTERN CGColorSpaceRef YYCGColorSpaceGetDeviceGray();
375 |
376 | /// Returns whether a color space is DeviceRGB.
377 | CG_EXTERN BOOL YYCGColorSpaceIsDeviceRGB(CGColorSpaceRef space);
378 |
379 | /// Returns whether a color space is DeviceGray.
380 | CG_EXTERN BOOL YYCGColorSpaceIsDeviceGray(CGColorSpaceRef space);
381 |
382 |
383 |
384 | /// Convert EXIF orientation value to UIImageOrientation.
385 | CG_EXTERN UIImageOrientation YYUIImageOrientationFromEXIFValue(NSInteger value);
386 |
387 | /// Convert UIImageOrientation to EXIF orientation value.
388 | CG_EXTERN NSInteger YYUIImageOrientationToEXIFValue(UIImageOrientation orientation);
389 |
390 |
391 |
392 | /**
393 | Create a decoded image.
394 |
395 | @discussion If the source image is created from a compressed image data (such as
396 | PNG or JPEG), you can use this method to decode the image. After decoded, you can
397 | access the decoded bytes with CGImageGetDataProvider() and CGDataProviderCopyData()
398 | without additional decode process. If the image has already decoded, this method
399 | just copy the decoded bytes to the new image.
400 |
401 | @param imageRef The source image.
402 | @param decodeForDisplay If YES, this method will decode the image and convert
403 | it to BGRA8888 (premultiplied) or BGRX8888 format for CALayer display.
404 |
405 | @return A decoded image, or NULL if an error occurs.
406 | */
407 | CG_EXTERN CGImageRef _Nullable YYCGImageCreateDecodedCopy(CGImageRef imageRef, BOOL decodeForDisplay);
408 |
409 | /**
410 | Create an image copy with an orientation.
411 |
412 | @param imageRef Source image
413 | @param orientation Image orientation which will applied to the image.
414 | @param destBitmapInfo Destimation image bitmap, only support 32bit format (such as ARGB8888).
415 | @return A new image, or NULL if an error occurs.
416 | */
417 | CG_EXTERN CGImageRef _Nullable YYCGImageCreateCopyWithOrientation(CGImageRef imageRef,
418 | UIImageOrientation orientation,
419 | CGBitmapInfo destBitmapInfo);
420 |
421 | /**
422 | Create an image copy with CGAffineTransform.
423 |
424 | @param imageRef Source image.
425 | @param transform Transform applied to image (left-bottom based coordinate system).
426 | @param destSize Destination image size
427 | @param destBitmapInfo Destimation image bitmap, only support 32bit format (such as ARGB8888).
428 | @return A new image, or NULL if an error occurs.
429 | */
430 | CG_EXTERN CGImageRef _Nullable YYCGImageCreateAffineTransformCopy(CGImageRef imageRef,
431 | CGAffineTransform transform,
432 | CGSize destSize,
433 | CGBitmapInfo destBitmapInfo);
434 |
435 | /**
436 | Encode an image to data with CGImageDestination.
437 |
438 | @param imageRef The image.
439 | @param type The image destination data type.
440 | @param quality The quality (0.0~1.0)
441 | @return A new image data, or nil if an error occurs.
442 | */
443 | CG_EXTERN CFDataRef _Nullable YYCGImageCreateEncodedData(CGImageRef imageRef, YYImageType type, CGFloat quality);
444 |
445 |
446 | /**
447 | Whether WebP is available in YYImage.
448 | */
449 | CG_EXTERN BOOL YYImageWebPAvailable();
450 |
451 | /**
452 | Get a webp image frame count;
453 |
454 | @param webpData WebP data.
455 | @return Image frame count, or 0 if an error occurs.
456 | */
457 | CG_EXTERN NSUInteger YYImageGetWebPFrameCount(CFDataRef webpData);
458 |
459 | /**
460 | Decode an image from WebP data, returns NULL if an error occurs.
461 |
462 | @param webpData The WebP data.
463 | @param decodeForDisplay If YES, this method will decode the image and convert it
464 | to BGRA8888 (premultiplied) format for CALayer display.
465 | @param useThreads YES to enable multi-thread decode.
466 | (speed up, but cost more CPU)
467 | @param bypassFiltering YES to skip the in-loop filtering.
468 | (speed up, but may lose some smooth)
469 | @param noFancyUpsampling YES to use faster pointwise upsampler.
470 | (speed down, and may lose some details).
471 | @return The decoded image, or NULL if an error occurs.
472 | */
473 | CG_EXTERN CGImageRef _Nullable YYCGImageCreateWithWebPData(CFDataRef webpData,
474 | BOOL decodeForDisplay,
475 | BOOL useThreads,
476 | BOOL bypassFiltering,
477 | BOOL noFancyUpsampling);
478 |
479 | typedef NS_ENUM(NSUInteger, YYImagePreset) {
480 | YYImagePresetDefault = 0, ///< default preset.
481 | YYImagePresetPicture, ///< digital picture, like portrait, inner shot
482 | YYImagePresetPhoto, ///< outdoor photograph, with natural lighting
483 | YYImagePresetDrawing, ///< hand or line drawing, with high-contrast details
484 | YYImagePresetIcon, ///< small-sized colorful images
485 | YYImagePresetText ///< text-like
486 | };
487 |
488 | /**
489 | Encode a CGImage to WebP data
490 |
491 | @param imageRef image
492 | @param lossless YES=lossless (similar to PNG), NO=lossy (similar to JPEG)
493 | @param quality 0.0~1.0 (0=smallest file, 1.0=biggest file)
494 | For lossless image, try the value near 1.0; for lossy, try the value near 0.8.
495 | @param compressLevel 0~6 (0=fast, 6=slower-better). Default is 4.
496 | @param preset Preset for different image type, default is YYImagePresetDefault.
497 | @return WebP data, or nil if an error occurs.
498 | */
499 | CG_EXTERN CFDataRef _Nullable YYCGImageCreateEncodedWebPData(CGImageRef imageRef,
500 | BOOL lossless,
501 | CGFloat quality,
502 | int compressLevel,
503 | YYImagePreset preset);
504 |
505 | NS_ASSUME_NONNULL_END
506 |
--------------------------------------------------------------------------------
/YYImage/YYSpriteSheetImage.h:
--------------------------------------------------------------------------------
1 | //
2 | // YYSpriteImage.h
3 | // YYImage
4 | //
5 | // Created by ibireme on 15/4/21.
6 | // Copyright (c) 2015 ibireme.
7 | //
8 | // This source code is licensed under the MIT-style license found in the
9 | // LICENSE file in the root directory of this source tree.
10 | //
11 |
12 | #import
13 |
14 | #if __has_include()
15 | #import
16 | #elif __has_include()
17 | #import
18 | #else
19 | #import "YYAnimatedImageView.h"
20 | #endif
21 |
22 | NS_ASSUME_NONNULL_BEGIN
23 |
24 | /**
25 | An image to display sprite sheet animation.
26 |
27 | @discussion It is a fully compatible `UIImage` subclass.
28 | The animation can be played by YYAnimatedImageView.
29 |
30 | Sample Code:
31 |
32 | // 8 * 12 sprites in a single sheet image
33 | UIImage *spriteSheet = [UIImage imageNamed:@"sprite-sheet"];
34 | NSMutableArray *contentRects = [NSMutableArray new];
35 | NSMutableArray *durations = [NSMutableArray new];
36 | for (int j = 0; j < 12; j++) {
37 | for (int i = 0; i < 8; i++) {
38 | CGRect rect;
39 | rect.size = CGSizeMake(img.size.width / 8, img.size.height / 12);
40 | rect.origin.x = img.size.width / 8 * i;
41 | rect.origin.y = img.size.height / 12 * j;
42 | [contentRects addObject:[NSValue valueWithCGRect:rect]];
43 | [durations addObject:@(1 / 60.0)];
44 | }
45 | }
46 | YYSpriteSheetImage *sprite;
47 | sprite = [[YYSpriteSheetImage alloc] initWithSpriteSheetImage:img
48 | contentRects:contentRects
49 | frameDurations:durations
50 | loopCount:0];
51 | YYAnimatedImageView *imgView = [YYAnimatedImageView new];
52 | imgView.size = CGSizeMake(img.size.width / 8, img.size.height / 12);
53 | imgView.image = sprite;
54 |
55 |
56 |
57 | @discussion It can also be used to display single frame in sprite sheet image.
58 | Sample Code:
59 |
60 | YYSpriteSheetImage *sheet = ...;
61 | UIImageView *imageView = ...;
62 | imageView.image = sheet;
63 | imageView.layer.contentsRect = [sheet contentsRectForCALayerAtIndex:6];
64 |
65 | */
66 | @interface YYSpriteSheetImage : UIImage
67 |
68 | /**
69 | Creates and returns an image object.
70 |
71 | @param image The sprite sheet image (contains all frames).
72 |
73 | @param contentRects The sprite sheet image frame rects in the image coordinates.
74 | The rectangle should not outside the image's bounds. The objects in this array
75 | should be created with [NSValue valueWithCGRect:].
76 |
77 | @param frameDurations The sprite sheet image frame's durations in seconds.
78 | The objects in this array should be NSNumber.
79 |
80 | @param loopCount Animation loop count, 0 means infinite looping.
81 |
82 | @return An image object, or nil if an error occurs.
83 | */
84 | - (nullable instancetype)initWithSpriteSheetImage:(UIImage *)image
85 | contentRects:(NSArray *)contentRects
86 | frameDurations:(NSArray *)frameDurations
87 | loopCount:(NSUInteger)loopCount;
88 |
89 | @property (nonatomic, readonly) NSArray *contentRects;
90 | @property (nonatomic, readonly) NSArray *frameDurations;
91 | @property (nonatomic, readonly) NSUInteger loopCount;
92 |
93 | /**
94 | Get the contents rect for CALayer.
95 | See "contentsRect" property in CALayer for more information.
96 |
97 | @param index Index of frame.
98 | @return Contents Rect.
99 | */
100 | - (CGRect)contentsRectForCALayerAtIndex:(NSUInteger)index;
101 |
102 | @end
103 |
104 | NS_ASSUME_NONNULL_END
105 |
--------------------------------------------------------------------------------
/YYImage/YYSpriteSheetImage.m:
--------------------------------------------------------------------------------
1 | //
2 | // YYSpriteImage.m
3 | // YYImage
4 | //
5 | // Created by ibireme on 15/4/21.
6 | // Copyright (c) 2015 ibireme.
7 | //
8 | // This source code is licensed under the MIT-style license found in the
9 | // LICENSE file in the root directory of this source tree.
10 | //
11 |
12 | #import "YYSpriteSheetImage.h"
13 |
14 | @implementation YYSpriteSheetImage
15 |
16 | - (instancetype)initWithSpriteSheetImage:(UIImage *)image
17 | contentRects:(NSArray *)contentRects
18 | frameDurations:(NSArray *)frameDurations
19 | loopCount:(NSUInteger)loopCount {
20 | if (!image.CGImage) return nil;
21 | if (contentRects.count < 1 || frameDurations.count < 1) return nil;
22 | if (contentRects.count != frameDurations.count) return nil;
23 |
24 | self = [super initWithCGImage:image.CGImage scale:image.scale orientation:image.imageOrientation];
25 | if (!self) return nil;
26 |
27 | _contentRects = contentRects.copy;
28 | _frameDurations = frameDurations.copy;
29 | _loopCount = loopCount;
30 | return self;
31 | }
32 |
33 | - (CGRect)contentsRectForCALayerAtIndex:(NSUInteger)index {
34 | CGRect layerRect = CGRectMake(0, 0, 1, 1);
35 | if (index >= _contentRects.count) return layerRect;
36 |
37 | CGSize imageSize = self.size;
38 | CGRect rect = [self animatedImageContentsRectAtIndex:index];
39 | if (imageSize.width > 0.01 && imageSize.height > 0.01) {
40 | layerRect.origin.x = rect.origin.x / imageSize.width;
41 | layerRect.origin.y = rect.origin.y / imageSize.height;
42 | layerRect.size.width = rect.size.width / imageSize.width;
43 | layerRect.size.height = rect.size.height / imageSize.height;
44 | layerRect = CGRectIntersection(layerRect, CGRectMake(0, 0, 1, 1));
45 | if (CGRectIsNull(layerRect) || CGRectIsEmpty(layerRect)) {
46 | layerRect = CGRectMake(0, 0, 1, 1);
47 | }
48 | }
49 | return layerRect;
50 | }
51 |
52 | #pragma mark @protocol YYAnimatedImage
53 |
54 | - (NSUInteger)animatedImageFrameCount {
55 | return _contentRects.count;
56 | }
57 |
58 | - (NSUInteger)animatedImageLoopCount {
59 | return _loopCount;
60 | }
61 |
62 | - (NSUInteger)animatedImageBytesPerFrame {
63 | return 0;
64 | }
65 |
66 | - (UIImage *)animatedImageFrameAtIndex:(NSUInteger)index {
67 | return self;
68 | }
69 |
70 | - (NSTimeInterval)animatedImageDurationAtIndex:(NSUInteger)index {
71 | if (index >= _frameDurations.count) return 0;
72 | return ((NSNumber *)_frameDurations[index]).doubleValue;
73 | }
74 |
75 | - (CGRect)animatedImageContentsRectAtIndex:(NSUInteger)index {
76 | if (index >= _contentRects.count) return CGRectZero;
77 | return ((NSValue *)_contentRects[index]).CGRectValue;
78 | }
79 |
80 | @end
81 |
--------------------------------------------------------------------------------
/YouTube.x:
--------------------------------------------------------------------------------
1 | #import "ShareImageViewController.h"
2 |
3 | %hook _ASDisplayView
4 | - (void)setKeepalive_node:(id)arg1 {
5 | %orig;
6 | NSString *description = [self description];
7 | if ([description containsString:@"ELMImageNode-View"] && [description containsString:@"eml.avatar"]) {
8 | UILongPressGestureRecognizer *longPress = [[UILongPressGestureRecognizer alloc] initWithTarget:self action:@selector(savePFP:)];
9 | longPress.minimumPressDuration = 0.3;
10 | [self addGestureRecognizer:longPress];
11 | }
12 | }
13 |
14 | %new
15 | - (void)savePFP:(UILongPressGestureRecognizer *)sender {
16 | if (sender.state == UIGestureRecognizerStateBegan) {
17 |
18 | NSString *URLString = self.keepalive_node.URL.absoluteString;
19 | if (URLString) {
20 | NSRange sizeRange = [URLString rangeOfString:@"=s"];
21 | if (sizeRange.location != NSNotFound) {
22 | NSRange dashRange = [URLString rangeOfString:@"-" options:0 range:NSMakeRange(sizeRange.location, URLString.length - sizeRange.location)];
23 | if (dashRange.location != NSNotFound) {
24 | NSString *newURLString = [URLString stringByReplacingCharactersInRange:NSMakeRange(sizeRange.location + 2, dashRange.location - sizeRange.location - 2) withString:@"1024"];
25 | NSURL *PFPURL = [NSURL URLWithString:newURLString];
26 |
27 | ShareImageViewController *shareVC = [[ShareImageViewController alloc] init];
28 | shareVC.imageToShare = [UIImage imageWithData:[NSData dataWithContentsOfURL:PFPURL]];
29 | UINavigationController *navigationController = [[UINavigationController alloc] initWithRootViewController:shareVC];
30 | navigationController.modalPresentationStyle = UIModalPresentationFormSheet;
31 | UIViewController *currentController = [[UIApplication sharedApplication] delegate].window.rootViewController;
32 | [currentController presentViewController:navigationController animated:YES completion:nil];
33 | }
34 | }
35 | }
36 | }
37 | }
38 | %end
--------------------------------------------------------------------------------
/control:
--------------------------------------------------------------------------------
1 | Package: com.dvntm.pfpviewer
2 | Name: PFPViewer
3 | Version: 0.0.6
4 | Architecture: iphoneos-arm
5 | Description: Tweak to view profile pictures
6 | Maintainer: dvntm
7 | Author: dvntm
8 | Section: Tweaks
9 | Depends: mobilesubstrate
10 |
--------------------------------------------------------------------------------