├── CHANGES.md
├── LICENCE.md
├── README.md
├── bin
└── ucss.bin.js
├── examples
└── config_ucss.js
├── fixtures
├── markup.html
├── markup2.html
└── rules.css
├── lib
├── css.js
├── helpers
│ ├── login.js
│ └── output.js
├── html.js
└── ucss.js
├── package.json
├── run-tests.js
└── test
├── crawler.js
├── general.js
├── http.js
├── login.js
└── selectors.js
/CHANGES.md:
--------------------------------------------------------------------------------
1 | ## 2016.10.16, Version 0.5.0
2 |
3 | * Features:
4 | - Make HTML optional, which allows finding duplicates in CSS (#33).
5 |
6 | * Bugs:
7 | - Strip URL fragments to avoid multiple visits to the same URL (#46).
8 | - Improve RegEx matching in excluded URLs.
9 |
10 | * Other:
11 | - Update dependencies.
12 | - Add RegEx example in example config.
13 | - Various refactorings.
14 |
15 | ## 2016.05.01, Version 0.4.8
16 |
17 | * Features:
18 | - Support for regular expressions in list of pages to exclude.
19 |
20 | * Other:
21 | - Update dependencies.
22 |
23 | ## 2015.28.08, Version 0.4.7-beta
24 |
25 | * Bugs:
26 | - #37: Fix bug where relative protocol URIs caused crawler to escape to
27 | other domains.
28 | - #40: Fix length check of array for load errors.
29 | - Fix bug where load error warnings where not shown for local files.
30 |
31 | * Other:
32 | - Update licence information.
33 |
34 | ## 2014.02.07, Version 0.4.6-beta
35 |
36 | * Bugs:
37 | - Fix bug where crawler reached maximum call stack size on large sites.
38 | - Fix so that links pointing to URLs with other protocols than http(s) are
39 | not followed.
40 |
41 | ## 2014.02.04, Version 0.4.4-beta
42 |
43 | * Features:
44 | - #30: Option for setting User agent header.
45 | Any header can now be set in configuration.
46 |
47 | * Bugs:
48 | - #32: Stops unexpectedly without any error message.
49 | - #29: Follows links to binaries.
50 | Now checks content type of response.
51 |
52 | * Other:
53 | - Better error handling and reporting (#9), including:
54 | - Warning at the end if any resources did not load.
55 | - Timeouts.
56 | - Increased default timeout from 4 seconds to 10 seconds.
57 | - "nosummary" option renamed to "silent".
58 |
59 | * Refactoring that has implications for customization:
60 | - Added error message parameter to logger.
61 | - Renamed output.standard to output.report, and changed/renamed the
62 | "summary" parameter to "silent".
63 | - Add load_error property to the result object, with a list of all errors
64 | on loading CSS and HTML.
65 |
66 | ## 2013.10.05, Version 0.4.3-beta
67 |
68 | * Bugs:
69 | - #26: Skip mailto:url
70 |
71 | ## 2013.08.28, Version 0.4.2-beta
72 |
73 | * Bugs:
74 | - Whitelisted rules not output correctly (#25).
75 |
76 | ## 2013.08.16, Version 0.4.1-beta
77 |
78 | * Bugs:
79 | - Updated help text, which had wrong name for the config file.
80 |
81 | ## 2013.08.16, Version 0.4.0-beta
82 |
83 | * Features:
84 | - Added option --full, to show full details for all rules.
85 | - Removed option --used (functionality is covered by --full).
86 | - New output report, with more information.
87 |
88 | * Refactoring:
89 | - Result object has new format. This has implications for custom
90 | output functions, as well as library use.
91 | - Added CSS selector position(s) in source file to result object.
92 |
93 | ## 2013.08.12, Version 0.3.2-beta
94 |
95 | * Features:
96 | - Added possibility to use custom logger and custom result handler.
97 |
98 | ## 2013.08.01, Version 0.3.1-beta
99 |
100 | * Bugs/issues fixed:
101 | - Parameters are now kept for links in pages.include (but removed from
102 | links found by the crawler).
103 | - Request pooling disabled, to avoid hangs
104 | (see https://github.com/mikeal/request/issues/465).
105 | - Added (configurable) timeout for requests.
106 | - Improved logging and error handling.
107 | - Handles links without href attributes.
108 |
109 | ## 2013.07.26, Version 0.3.0-beta
110 |
111 | * Features:
112 | - Follow links/crawl (#3).
113 | When running from command line, crawling is the new default.
114 |
115 | * Other:
116 | - Replace jsdom and jQuery with Cheerio.
117 | - Use request module instead of http/https (except in helpers/login.js).
118 | - Major refactoring, which includes introducing the promises pattern for
119 | managing callbacks (using Q).
120 |
121 | ## 2013.03.11, Version 0.2.2-beta
122 |
123 | * Bug fixes:
124 | - Make sure @keyframe and @font-face does not crash uCSS (issues #15, #16)
125 |
126 | ## 2012.11.23, Version 0.2.1-beta
127 |
128 | * Bug fixes:
129 | - Fix loading of config from relative paths.
130 |
131 | ## 2012.09.17, Version 0.2.0-beta
132 |
133 | * Changes:
134 | - Change config file format from JSON to Node module
135 | NOTE: This breaks backwards compability.
136 | * Bug fixes:
137 | - Fix custom login functions to work.
138 |
139 | ## 2012.09.10, Version 0.1.2-beta
140 |
141 | * Features:
142 | - Adds support for fetching CSS over HTTP.
143 | * Bug/stability fixes:
144 | - Now handles grouped selectors (issue #11)
145 | - Now tests selectors inside e.g. a media query (issue #10)
146 | - Now properly reads selectors after e.g. a media query (issue #10)
147 | * Dependency changes:
148 | - Adds cssom CSS parser, to properly extract selectors
149 |
150 | ## 2012.08.17, Version 0.1.1-beta
151 |
152 | * Stability fixes:
153 | - Ignore pseudo part of a selector.
154 | - Ignore at-rules (like @media and @font-face).
155 |
156 | ## 2012.08.10, Version 0.1.0-beta
157 |
158 | * First release.
--------------------------------------------------------------------------------
/LICENCE.md:
--------------------------------------------------------------------------------
1 | This code is offered under the Open Source [BSD license](http://www.opensource.org/licenses/bsd-license.php).
2 |
3 | # BSD License
4 |
5 | Copyright © 2015, Øyvind Håkestad
6 | Copyright © 2012, Opera Software
7 | All rights reserved.
8 |
9 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
10 |
11 | * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
12 | * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
13 | * Neither the name of Opera Software nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
14 |
15 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
16 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## uCSS (beta)
2 | uCSS is made for crawling (large) websites to find unused CSS selectors.
3 |
4 | ### Features
5 | Features of uCSS include:
6 | * Find unused CSS selectors, given a HTML code base.
7 | * Find duplicate CSS selectors (also when no HTML is given).
8 | * Count matches for each rule.
9 | * Follows links (crawl), within the given domain.
10 | * Give information about server responses, redirects, etc. while crawling, allowing you to find e.g. dead links.
11 |
12 | By setting up a config file, uCSS can also:
13 | * Do login, and visit pages both as a logged in and logged out user.
14 | * Exclude specific pages and/or subfolders.
15 | * Visit specific pages instead of, or in addition to, crawling.
16 | * White list CSS rules to be ignored (e.g. those toggled by JavaScript).
17 | * Customizable output.
18 |
19 | uCSS is written for [Node](http://www.nodejs.org/). It can be used both as a
20 | library and as a command line tool.
21 |
22 | Want to contribute? Please see at the bottom.
23 |
24 | Please note: uCSS is currently in beta. There will be bugs, docs may be
25 | outdated, and functionality may change.
26 |
27 | #### What uCSS can't do
28 | uCSS does NOT:
29 | * Capture rules that are switched on using JavaScript after page load.
30 | * Look for style sheet URLs, internal style sheets, or inline styles in HTML
31 | code.
32 | * Remove unused CSS. If you need to automatically strip away unused CSS from your favourite CSS library, you may find [UnCSS](https://github.com/giakki/uncss) helpful.
33 |
34 | These features may, or may not, be added in the future.
35 |
36 | ### Installation
37 |
38 | `npm install ucss -g`
39 |
40 | If you use [Grunt](http://gruntjs.com/), check out [grunt-ucss](https://github.com/ullmark/grunt-ucss).
41 |
42 | ### Usage (command line)
43 |
44 | For basic usage, you can use the command line options:
45 | ```
46 | $ ucss --help
47 | Usage: ucss [OPTION]...
48 |
49 | Options:
50 | --help This help text.
51 | --html, -h HTML to load (local file or URL).
52 | --css, -c CSS to load (local file or URL).
53 | --config, -g Config file to use.
54 | --full, -f Show full report, with details for each rule.
55 | --silent, -s Only output list of rules. Nice if you need to pipe the output somewhere.
56 | --duplicates, -d Show duplicates. If only CSS is given, this is enabled by default.
57 |
58 | Either a config file or a CSS file is required. If no HTML is given, uCSS will only look for duplicate CSS.
59 | If no arguments are specified, uCSS will assume there is a config_ucss.js file in the current directory.
60 | ```
61 | So, to check a web page you could write
62 | ```
63 | $ ucss -h http://example.com/foo.html -c http://example.com/foo.css
64 |
65 | Note that if you use a selector both inside and outside a media query, it will
66 | be counted as a duplicate.
67 | ```
68 | To output a full report, with all found selectors and an overview of duplicates
69 | and ignored ones, you can do:
70 | ```
71 | $ ucss -f -h foo.html -c foo.css
72 | ```
73 | As you can see in the examples above, files can be stored locally as well as on
74 | the web.
75 |
76 | To use a config file ("config_ucss.js") that you have created in the current
77 | folder, simply run
78 | ```
79 | $ ucss
80 | ```
81 | or specify another file name using the -g option. For more info on the config
82 | file, see below.
83 |
84 | For advanced usage, please see the sections about config files.
85 |
86 | ### Usage (as library)
87 |
88 | ```
89 | // css can be an array of strings, file paths, or URLs
90 | var css = [".foo {} .bar {} .baz {}"];
91 |
92 | // html can be an array of strings, file paths, or URLs
93 | var html = ["
"];
94 |
95 | var context = {
96 | whitelist: [".baz"], // CSS selectors to ignore
97 | auth: null, // For login (please se example elsewhere)
98 | timeout: 400 // Request timeout (defaults to 400ms)
99 | };
100 | var logger = null; // Function for logging HTTP requests
101 |
102 | // Do the magic
103 | ucss.analyze(html, css, context, logger, function(result) {
104 | // Do something to the result object
105 | console.log(result);
106 | });
107 | ```
108 |
109 | ### Setting up a config file
110 | There are several things you can do with a config file, that you cannot do with
111 | command line arguments.
112 |
113 | uCSS follows links by default. But there may be specific files, or whole
114 | subfolders, that you don't want to check. These can be listed inside
115 | pages.exclude. If you want to exclude a whole subfolder, use a wildcard ("*")
116 | at the end of the url (please see [example config
117 | file](https://github.com/operasoftware/ucss/blob/master/examples/config_ucss.js)).
118 | You may also use regular expressions.
119 |
120 | In other cases, you may want to visit just a single file, or there are files that
121 | the crawler cannot reach (e.g. because they are not linked to). Those can be
122 | added to pages.include. Also, if you want to visit certain pages under an
123 | excluded subfolder, you can add those to pages.include. Note that pages.include
124 | does not support wildcards, nor regular expressions.
125 |
126 | In addition to managing what pages to visit (and not to visit), you can check
127 | pages both as a regular visitor and as a logged in user. This is done by
128 | specifying a function that performs a log in, and then returns a session cookie
129 | for uCSS to use for identifying itself to the server. You can also set headers
130 | to send to the server.
131 |
132 | Furthermore, you can create a white list of selectors to be ignored. This is
133 | useful if you e.g. have classes toggled by JavaScript, or if you have special
134 | styling for various error situations that is tricky to trigger.
135 |
136 | As well as checking several html files, uCSS can also combine CSS from several
137 | files. You can specify a list of CSS files in your config file.
138 |
139 | If you name your config file "config_ucss.js", it will automatically be picked
140 | up by ucss. You can also name your config file something else, and use the -g
141 | option to point to it.
142 |
143 | Again, please see the [example config
144 | file](https://github.com/operasoftware/ucss/blob/master/examples/config_ucss.js).
145 | If you want to write a custom login function, please see below.
146 |
147 | #### Logging in
148 | Login requires you to set up a config file. In the config file, you can specify
149 | your own login function:
150 |
151 | ```
152 | module.exports = {
153 | ...,
154 | auth: {
155 | "username": "foo",
156 | "password": "bar",
157 | "loginUrl": "http://example.com/login/",
158 | "loginFunc": function(url, username, password, callback) {
159 | // Do login, get cookie
160 | var cookie = "sessionid:1234"
161 | callback(cookie);
162 | }
163 | },
164 | ...
165 | }
166 |
167 | ```
168 | If you use Django, you can use the supplied Django login helper (see [example
169 | config file](https://github.com/operasoftware/ucss/blob/master/examples/config_ucss.js)).
170 |
171 | ### Understanding the output
172 | While crawling, uCSS will output all URLs it visits, with response status code.
173 | If logged in, it will say so. It will also output a hash of the response body,
174 | which may be useful if you want to make sure that uCSS was successfully logged
175 | in (compare the hash of the logged in and the logged out visit).
176 |
177 | When the crawling is done, you will get a list of all unused selectors. If
178 | you've asked for a list of duplicates, that will be printed as well.
179 |
180 | Lastly, a summary will be printed. This contains the total amount of CSS
181 | selectors found, and how many used, unused and duplicates there were.
182 |
183 | Using the --full option, you will get a more detailed and colorful output, with
184 | all rules being listed with number of matches, in addition to number of
185 | duplicates in CSS and details about ignored and whitelisted rules.
186 |
187 | If the output doesn't suit your needs, and you know some JavaScript, you may
188 | customize it.
189 |
190 | ### Customizable output
191 | You can configure uCSS to do logging and handle the result differently from
192 | what's default. Do this by adding an "output" property in the config, which can
193 | contain two functions, named "logger" and "result".
194 |
195 | ```
196 | module.exports = {
197 | ...,
198 | "output": {
199 | "logger": function(response, originalUrl, loggedIn) {
200 | // Do some logging here, e.g. using console.log.
201 | },
202 | "result": function(result) {
203 | // Do something with the result object, e.g. print every rule
204 | // found, together with positions in CSS file:
205 | for (var s in result.selectors) {
206 | // Only unused rules:
207 | if (result.selectors[s].matches_html === 0) {
208 | // Print position(s), given it's only one CSS file:
209 | var pos_css = result.selectors[s].pos_css;
210 | var key = Object.keys(pos_css)[0];
211 | console.log(s + ": " + pos_css[key]);
212 | }
213 | }
214 | }
215 | },
216 | ...
217 | }
218 |
219 | ```
220 |
221 | #### Logging
222 | The "logger" function is called every time there is a response to a HTTP
223 | request. It takes three parameters: "res" is a response object, as returned by
224 | [request](https://github.com/mikeal/request). "originalUrl" is a string that
225 | points to the HTML instance being visited. "loggedIn" is a boolean that is true
226 | if uCSS has sent an authentication cookie in the request header.
227 |
228 | This function is normally used for logging, but you can make it do whatever you
229 | want. Just note that it is triggered by an event, so uCSS will not wait for it
230 | to return.
231 |
232 | Setting "logger" to null will silence logging.
233 |
234 | #### Result
235 | The "result" function is called when uCSS is done. It recieves an object with
236 | three properties: "used", "duplicates" and "ignored". "used" shows all CSS
237 | rules that has been matched in the HTML (including how many times).
238 | "duplicates" shows all duplicate CSS rules, including how many times they've
239 | been found. "ignored" shows all ignored rules.
240 |
241 | This function can also whatever you want, e.g. write the result to a file.
242 |
243 | ### Nice to know
244 |
245 | #### Some pages are not reachable by crawler
246 | Some pages are not accessible when crawling:
247 | * Pages that are only accessible by posting a form will not be checked. You may
248 | add them to pages.include if they are reachable without posting data.
249 | * All parameters in links are normally stripped away when crawling, which may
250 | have side effects for the rendering of some pages. If you want an URL to be
251 | visited with specific parameters, you have to include it in pages.include.
252 | * When crawling, pages that are not linked to in other pages will not be
253 | checked. You may add them to pages.include.
254 | * Style sheets are only fetched as a non-logged in user.
255 |
256 |
257 | #### At-rules
258 | All at-rules are ignored, except @media: All the content inside media queries
259 | is read as if there were no media query.
260 |
261 | ### I want to contribute!
262 |
263 | Great! Feel free to pick one of the issues, or submit a bug/feature you would
264 | want to work on.
265 |
266 | Please be tidy in your commits. Also, try to touch as small parts of the code
267 | as possible. This makes it easier to review and manage pull requests. Make sure
268 | your code is covered by tests, and write new ones if needed.
269 |
270 | If you plan to do big changes or refactoring, please notify me first, so that
271 | we can discuss this in advance.
272 |
--------------------------------------------------------------------------------
/bin/ucss.bin.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 | /* global module */
3 |
4 | var ucss = require('../lib/ucss');
5 | var optimist = require('optimist');
6 |
7 |
8 | /**
9 | * Read configuration file
10 | */
11 | function openConfig (filename) {
12 | var config;
13 | try {
14 | config = require(filename);
15 | } catch (e) {
16 | try {
17 | config = require(process.cwd() + '/' + filename);
18 | } catch (e) {
19 | console.log("Problems reading file '" + filename + "'.");
20 | console.log(e.name + ': ' + e.message);
21 | console.log('Please check that the file exists, and has the correct permissions.');
22 | process.exit(1);
23 | }
24 | }
25 |
26 | return config;
27 | }
28 |
29 |
30 | /**
31 | * Main.
32 | */
33 | function main () {
34 | var showHelp = function () {
35 | optimist.showHelp();
36 | console.log('Either a config file or a CSS file is required. '
37 | + 'If no HTML is given, uCSS will only look for duplicate CSS.'
38 | + '\nIf no arguments are specified, uCSS will assume there is a '
39 | + 'config_ucss.js file in the current directory.');
40 | };
41 |
42 | // Arguments parsing
43 | var argv = optimist.usage('Check if CSS selectors matches anything in given HTML.\n'
44 | + 'Usage: $0 [OPTION]...')
45 | .options({
46 | help: {
47 | description: 'This help text.'
48 | },
49 | html: {
50 | alias: 'h',
51 | description: 'HTML to load (local file or URL).'
52 | },
53 | css: {
54 | alias: 'c',
55 | description: 'CSS to load (local file or URL).'
56 | },
57 | config: {
58 | alias: 'g',
59 | description: 'Config file to use.'
60 | },
61 | full: {
62 | alias: 'f',
63 | description:
64 | 'Show full report, with details for each rule.'
65 | },
66 | silent: {
67 | alias: 's',
68 | description: 'Only output list of rules. Nice if you need to pipe the output somewhere.'
69 | },
70 | duplicates: {
71 | alias: 'd',
72 | description: 'Show duplicates. If only CSS is given, this is enabled by default.'
73 | }
74 | }).argv;
75 |
76 | if (argv.help) {
77 | showHelp();
78 | process.exit(0);
79 | }
80 |
81 | if (undefined === argv.config) {
82 | argv.config = true;
83 | }
84 | if (undefined === argv.full) {
85 | argv.full = false;
86 | }
87 | if (undefined === argv.duplicates) {
88 | argv.duplicates = false;
89 | }
90 |
91 | var silent = true;
92 | if (undefined === argv.silent) {
93 | silent = false;
94 | }
95 |
96 | // CSS or config file is required
97 | var config = null;
98 | var cssIsSet = typeof argv.css === 'string';
99 | if (cssIsSet) {
100 | // Do stuff with html & css
101 | argv.config = false;
102 | } else if (typeof argv.config === 'string') {
103 | // Use config file
104 | config = openConfig(argv.config);
105 | } else if (argv.config === true) {
106 | // Search for config.json
107 | config = openConfig(process.cwd() + '/config_ucss.js');
108 | } else {
109 | showHelp();
110 | }
111 |
112 | // Read from config, if it was found
113 | var css, pages, whitelist, auth, headers, timeout, logger, resultHandler;
114 | if (config) {
115 | css = config.css;
116 | pages = config.pages || null;
117 | whitelist = config.whitelist;
118 | auth = config.auth;
119 | timeout = config.timeout;
120 | headers = config.headers;
121 |
122 | if (config.output) {
123 | if (undefined !== config.output.logger) {
124 | logger = config.output.logger;
125 | }
126 | if (undefined !== config.output.result) {
127 | resultHandler = config.output.result;
128 | }
129 | }
130 | } else { // No config, using CSS and HTML arguments
131 | css = argv.css;
132 |
133 | if (argv.html) {
134 | pages = { 'crawl': argv.html };
135 | } else {
136 | pages = null;
137 | }
138 | }
139 |
140 | // Set up logger (custom, or default)
141 | if (typeof logger === 'undefined' && !silent) {
142 | logger = require('../lib/helpers/output').logger;
143 | }
144 |
145 | var done;
146 | if (typeof resultHandler === 'undefined') {
147 | done = function (result) {
148 | var gotHtml = pages ? true : false;
149 |
150 | require('../lib/helpers/output').report(
151 | result, argv.full, silent, argv.duplicates, gotHtml);
152 |
153 | process.exit(0);
154 | };
155 | } else {
156 | done = resultHandler;
157 | }
158 |
159 | var context = {
160 | whitelist: whitelist,
161 | auth: auth,
162 | headers: headers,
163 | timeout: timeout
164 | };
165 |
166 | ucss.analyze(pages, css, context, logger, done);
167 | }
168 |
169 | if (require.main === module) {
170 | main();
171 | }
172 |
--------------------------------------------------------------------------------
/examples/config_ucss.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | "pages": { // (Optional) Pages to check. Crawl or include is required.
3 | "crawl": "http://localhost/", // (Optional if "include" is given).
4 | // Starting point for crawler.
5 | "exclude": [ // (Optional) List of HTML files/URLs to skip.
6 | "http://localhost/some_page_to_exclude ", // Exclude this specific page.
7 | "http://localhost/products/*", // Exclude all product pages. No
8 | // need to check lots of similar
9 | // pages. Add a few selected ones in
10 | // the 'include' list below instad.
11 | /http[s]*:\/\/localhost\/(en|nb)\/.*/ // Use regexps for exclude
12 | ],
13 | "include": [ // (Optional if "crawl" is given) List of HTML files/URLs
14 | // to check.
15 | "http://localhost/unlinkedpage",
16 | "http://localhost/products/foo" // Add product from excluded subfolder.
17 | ]
18 | },
19 | "headers": { "Accept-Language": "nb-no" }, // (Optional) Headers to send
20 | // to server.
21 | "css": [ // (Required) List of CSS files to check. If no HTML pages is given,
22 | // uCSS just reports duplicate rules.
23 | "base.css"
24 | ],
25 | "whitelist": [".foo", ".bar"], // (Optional) List of CSS rules to ignore,
26 | // e.g. ones added by JavaScript.
27 | "timeout": 10000, // (Optional) Timeout for HTTP requests (default is 4000ms).
28 | "auth": { // (Optional) Authentication information. Please see docs.
29 | "username": "foo",
30 | "password": "bar",
31 | "loginUrl": "http://localhost:8000/accounts/login/",
32 | "loginFunc": "djangoLogin"
33 | },
34 | "output": { // (Optional) How to output information from uCSS
35 | "logger": function (res, originalUrl, loggedIn) {
36 | // (Optional) Function that is called for each visited URL.
37 | // Set it to null if you want it to be silent.
38 | console.log("Visited: ", originalUrl);
39 | },
40 | "result": function (result) { console.log(result); } // (Optional)
41 | }
42 | };
43 |
--------------------------------------------------------------------------------
/fixtures/markup.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/fixtures/markup2.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/fixtures/rules.css:
--------------------------------------------------------------------------------
1 | @charset "UTF-8";
2 | @font-face {font-family: 'MyWebFont'; src: url('webfont.eot'); src: url('webfont.eot?#iefix') format('embedded-opentype'), url('webfont.woff') format('woff'), url('webfont.ttf') format('truetype'), url('webfont.svg#svgFontName') format('svg');}
3 | @import url("print.css") print;
4 |
5 | /*
6 | TODO: @document
7 | TODO: @supports
8 | TODO: Conditionals
9 | */
10 |
11 | * { /* match */
12 | color: blue;
13 | }
14 | .foo { /* match */
15 | color: red;
16 | }
17 | .bar { /* match */
18 | background: blue;
19 | }
20 | .foo .bar { /* no match */
21 | float: left;
22 | }
23 | .bar #baz { /* match */
24 | }
25 | .qux { /* match */
26 | } /* comment here used to break things */
27 | .quux { /* no match */
28 | }
29 | .foo .qux .bar {
30 | }
31 | span[dir="ltr"] { /* match */
32 | float: left;
33 | }
34 | .bar span[dir="ltr"] { /* match */
35 | float: right;
36 | }
37 | .foo span[dir="ltr"] { /* no match */
38 | float: right;
39 | }
40 | .foo .qux .bar .baz {
41 | }
42 | @page {
43 | margin: 2in;
44 | }
45 | @-webkit-keyframes progress-bar-stripes {
46 | from { background-position: 40px 0 }
47 | to { background-position: 0 0 }
48 | }
49 | @-moz-keyframes progress-bar-stripes {
50 | from { background-position: 40px 0 }
51 | to { background-position: 0 0 }
52 | }
53 | @-ms-keyframes progress-bar-stripes {
54 | from { background-position: 40px 0 }
55 | to { background-position: 0 0 }
56 | }
57 | @-o-keyframes progress-bar-stripes {
58 | from { background-position: 0 0 }
59 | to { background-position: 40px 0 }
60 | }
61 | @keyframes progress-bar-stripes {
62 | from { background-position: 40px 0 }
63 | to { background-position: 0 0 }
64 | }
65 | @media all and (min-width: 500px) { .bar { background: blue; } }
66 | /* This comment should not make tests fail (it used to) */
67 |
--------------------------------------------------------------------------------
/lib/css.js:
--------------------------------------------------------------------------------
1 | var Q = require('q');
2 | var async = require('async');
3 | var fs = require('fs');
4 | var cssom = require('cssom');
5 | var request = require('request');
6 | var crypto = require('crypto');
7 | var events = require('events');
8 |
9 |
10 | module.exports = {
11 | // TODO: Make private, add documentation. Rename to newSelector?
12 | newRule: function () {
13 | return {
14 | ignored: false,
15 | whitelisted: false,
16 | matches_html: 0,
17 | matches_uris: [],
18 | occurences_css: 0,
19 | pos_css: {}
20 | };
21 | },
22 |
23 | _logger: null,
24 |
25 | // TODO: Document result object properly
26 | /**
27 | * Get selectors from CSS
28 | * @param {Array} css CSS to search through. This can be either an array
29 | * of Strings (containing CSS), an array of URIs to CSS files, or
30 | * an array of paths to CSS files.
31 | * @param {Array} whitelist List of rules to ignore
32 | * @param {Object} result Result object
33 | * @param {Object} headers Headers to send to server.
34 | * @param {Int} timeout Request timeout
35 | * @param {Function} logger Custom log function
36 | * @returns {Promise} Result object
37 | */
38 | getSelectors: function (css, whitelist, result, headers, timeout, logger) {
39 | this._logger = new events.EventEmitter();
40 | this._logger.on('request', logger);
41 |
42 | var self = this;
43 | var deferred = Q.defer();
44 |
45 | // Find all selectors
46 | async.forEach(css, function (item, forEachCallback) {
47 | var itemId = '';
48 | if (0 === item.indexOf('http')) { // From URI
49 | var uri = item;
50 | itemId = item;
51 |
52 | var options = { uri: uri,
53 | timeout: timeout || 10000,
54 | headers: headers,
55 | pool: false };
56 |
57 | request.get(options, function (error, res, data) {
58 | if (res && res.statusCode !== 200) {
59 | self._logger.emit('request', res, uri, false);
60 | result.load_errors.push({ uri: uri, error: res.statusCode });
61 | } else if (error) {
62 | if (error.toString().indexOf('TIMEDOUT' > -1)) {
63 | self._logger.emit('request', null, uri, false, 'Timeout');
64 | } else {
65 | self._logger.emit('request', null, uri, false, error);
66 | }
67 | result.load_errors.push({ uri: uri, error: error });
68 | } else {
69 | self._logger.emit('request', res, uri, false);
70 | self._extractSelectorsFromString(
71 | itemId, data, whitelist, result);
72 | }
73 | forEachCallback();
74 | });
75 |
76 | return;
77 | } else if (-1 === item.indexOf('{')) { // From file
78 | itemId = item;
79 |
80 | try {
81 | item = fs.readFileSync(item).toString();
82 | } catch (error) {
83 | console.error('Unable to read %s: %s,', item, error.message);
84 | result.load_errors.push({ uri: uri, error: error.message });
85 | }
86 | self._extractSelectorsFromString(itemId, item, whitelist, result);
87 | } else { // From string
88 | itemId = crypto.createHash('md5').update(item).digest('hex');
89 |
90 | self._extractSelectorsFromString(itemId, item, whitelist, result);
91 | }
92 |
93 | forEachCallback();
94 | }, function (err) {
95 | if (err) {
96 | // TODO: Error handling
97 | deferred.reject(new Error(err));
98 | } else {
99 | deferred.resolve(result);
100 | }
101 | });
102 |
103 | return deferred.promise;
104 | },
105 |
106 | // TODO: Document result object properly
107 | /**
108 | * Find selectors in CSS string
109 | *
110 | * @private
111 | * @param {String} css CSS code
112 | * @param {Object} selectors (optional) object to append found selectors
113 | * to. Also keeps count (e.g. {'.foo': 2})
114 | * @param {Array} whitelist List of rules to ignore
115 | * @param {Object} result Result object
116 | * @returns {Object} Result object
117 | */
118 | _extractSelectorsFromString: function (itemId, css, whitelist, result) {
119 | if (!css) {
120 | return result;
121 | }
122 |
123 | // Delete unsupported rules before CSSOM parsing, to avoid crash
124 | // TODO: Remove these, when/if they get supported by CSSOM
125 | var unsupported = [
126 | // "@supports { .foo { ... }}" or
127 | // "@-prefix-supports { .foo { ... }}"
128 | /@-*\w*-*supports\s.*?\}\s*?\}/g,
129 |
130 | // "@document url(http://example.com) { .foo { ... }}" or
131 | // "@-prefix-document url(http://example.com) { .foo { ... }}"
132 | /@-*\w*-*document\s.*?\}\s*?\}/g];
133 | for (var i = 0, l = unsupported.length; i < l; i++) {
134 | css = css.replace(unsupported[i], '');
135 | }
136 |
137 | var styles = cssom.parse(css);
138 | var rules = styles.cssRules;
139 | if (!rules) {
140 | return result;
141 | }
142 |
143 | this._getSelectorsFromRules(itemId, rules, whitelist, result);
144 |
145 | return result;
146 | },
147 |
148 | // TODO: Document result object properly
149 | /**
150 | * @private
151 | * @param {Object} rules Object as given by cssom.parse().cssRules.
152 | * @param {Object} selectors Already found selectors, with count.
153 | * @param {Array} whitelist List of rules to ignore
154 | * @param {Object} result Result object
155 | * @returns {Object} Result object
156 | */
157 | _getSelectorsFromRules: function (itemId, rules, whitelist, result) {
158 | for (var i = 0, l = rules.length; i < l; i++) {
159 | var rule = rules[i];
160 | var pos = rule.__starts;
161 |
162 | // @-rules are ignored, except media queries. For media queries,
163 | // child rules are handled. Other rules are handled as if they
164 | // have a selector text.
165 | //
166 | // @media:
167 | if (rule.media && rule.cssRules) {
168 | this._getSelectorsFromRules(itemId, rule.cssRules, whitelist, result);
169 |
170 | // Rules without selectorText are not processed (@-rules,
171 | // except @media)
172 | } else if (!rule.selectorText) {
173 | // Cleaning: Only want the first part (e.g. @font-face),
174 | // not full rule
175 | var sel = rule.cssText.split('{')[0].trim();
176 | result.selectors[sel] = this.newRule();
177 | result.selectors[sel].occurences_css++;
178 | result.total++;
179 |
180 | if (whitelist && whitelist.indexOf(sel) > -1) {
181 | result.selectors[sel].whitelisted = true;
182 | }
183 |
184 | if (undefined === result.selectors[sel].pos_css[itemId]) {
185 | result.selectors[sel].pos_css[itemId] = [pos];
186 | } else {
187 | result.selectors[sel].pos_css[itemId].push(pos);
188 | }
189 | // Other rules, containing selector(s)
190 | } else {
191 | var selectorGroup = rule.selectorText;
192 |
193 | // Several selectors can be grouped together, separated by
194 | // comma, e.g. ".foo, .bar":
195 | var selectorList = selectorGroup.split(',');
196 | var selectors = result.selectors;
197 |
198 | for (var j = 0, sl = selectorList.length; j < sl; j++) {
199 | var s = selectorList[j].trim();
200 |
201 | if (undefined === selectors[s]) {
202 | selectors[s] = this.newRule();
203 | selectors[s].occurences_css = 1;
204 | result.total++;
205 | } else {
206 | selectors[s].occurences_css++;
207 | }
208 |
209 | if (whitelist && whitelist.indexOf(s) > -1) {
210 | result.selectors[s].whitelisted = true;
211 | }
212 |
213 | if (undefined === selectors[s].pos_css[itemId]) {
214 | selectors[s].pos_css[itemId] = [pos];
215 | } else {
216 | selectors[s].pos_css[itemId].push(pos);
217 | }
218 | }
219 | }
220 | }
221 |
222 | return result;
223 | }
224 | };
225 |
--------------------------------------------------------------------------------
/lib/helpers/login.js:
--------------------------------------------------------------------------------
1 | /* global module */
2 |
3 | var cheerio = require('cheerio');
4 | var querystring = require('querystring');
5 |
6 | module.exports = {
7 | /**
8 | * Django login with CSRF handling. Uses login page with CSRF, username and
9 | * password fields.
10 | *
11 | * PLEASE NOTE:
12 | * Django does not give any indication wheter login was succesfull or not,
13 | * so this code won't know.
14 | *
15 | * This code is rudimentary, and you may have to tweak it to make it work
16 | * for you. If so, you may want to copy it to "loginFunc" in your uCSS
17 | * config file.
18 | *
19 | * This code is not covered by any tests.
20 | *
21 | * @param {String} url Login URL
22 | * @param {String} username Username
23 | * @param {String} password Password
24 | * @param {Function} callback, that must take a cookie string ("key=value;") as parameter.
25 | */
26 | djangoLogin: function (url, username, password, callback) {
27 | var parts = require('url').parse(url);
28 | var protocol = parts.protocol.replace(':', '');
29 |
30 | var host = parts.host.split(':')[0]; // Strip port from host
31 | var path = parts.path;
32 | var port = parts.port;
33 |
34 | // Set port
35 | if (!port) {
36 | if ('https' === protocol) {
37 | port = 443;
38 | }
39 | else {
40 | port = 80;
41 | }
42 | }
43 | var options = {
44 | host: host,
45 | port: port,
46 | path: path
47 | };
48 |
49 | // Create a cookie string from the set-cookie header
50 | var makeCookieString = function (setCookie) {
51 | // Make string from array parts
52 | var cp = setCookie.join(';');
53 | // Split string to get cookie parts
54 | cp = cp.split(';');
55 |
56 | var csrftoken, sessionid;
57 | for (var i = 0; i < cp.length; i++) {
58 | if (-1 < cp[i].indexOf('csrftoken')) {
59 | csrftoken = cp[i].split('=')[1].trim();
60 | }
61 | if (-1 < cp[i].indexOf('sessionid')) {
62 | sessionid = cp[i].split('=')[1].trim();
63 | }
64 | }
65 | var cookie = 'sessionid=' + sessionid;
66 | if (csrftoken) {
67 | cookie += ';csrftoken=' + csrftoken;
68 | }
69 |
70 | return cookie;
71 | };
72 |
73 | // Get login form, find crsf token, log in to get cookie.
74 | require(protocol).get(options, function (res) {
75 | var data = '';
76 | res.on('data', function (chunk) {
77 | data += chunk.toString();
78 | }).on('end', function () {
79 | var $ = cheerio.load(data);
80 |
81 | // Get token
82 | var token = $("input[name='csrfmiddlewaretoken']").attr('value').trim();
83 |
84 | // Find csrftoken and sessionid in set-cookie header
85 | var cookie = makeCookieString(res.headers['set-cookie']);
86 |
87 | // Log in
88 | var postData = querystring.stringify({
89 | 'username': username,
90 | 'password': password,
91 | 'csrfmiddlewaretoken': token
92 | });
93 |
94 | options.method = 'POST';
95 | options.headers = { 'Cookie': cookie,
96 | 'X-CSRFToken': token,
97 | 'Content-Length': postData.length,
98 | 'Content-Type': 'application/x-www-form-urlencoded',
99 | 'Referer': url
100 | };
101 | var postReq = require(protocol).request(options, function (res) {
102 | var cookie = makeCookieString(res.headers['set-cookie']);
103 | callback(cookie);
104 | });
105 |
106 | // post the data
107 | postReq.write(postData);
108 | postReq.end();
109 | });
110 | }).on('error', function (e) {
111 | console.log('Got error: ' + e.message);
112 | });
113 | }
114 | };
115 |
--------------------------------------------------------------------------------
/lib/helpers/output.js:
--------------------------------------------------------------------------------
1 | /* global module */
2 | var util = require('util');
3 | var crypto = require('crypto');
4 |
5 |
6 | // Colors
7 | var RED = '\033[31m';
8 | var GREEN = '\033[32m';
9 | var YELLOW = '\033[33m';
10 | var GREY = '\033[90m';
11 | var BOLD = '\x1B[1m';
12 | var RESET = '\033[0m';
13 |
14 |
15 | module.exports = {
16 | // TODO: Document result object properly
17 | /**
18 | * @param {Object} result Result object
19 | * @param {Boolean} showFull Show full report
20 | * @param {Boolean} silent Silent mode, only output rule list
21 | * @param {Boolean} showDuplicates Show duplicate rules
22 | * @param {Boolean} gotHtml Show duplicate rules
23 | */
24 | report: function (result, showFull, silent, showDuplicates, gotHtml) {
25 | var selectors = result.selectors;
26 |
27 | if (gotHtml) {
28 | if (!silent) {
29 | if (!showFull) {
30 | console.log('\nUnused rules:');
31 | }
32 | else {
33 | console.log('\nRules:');
34 | }
35 | console.log('-------------------------------------');
36 | }
37 |
38 | // Output unused rules:
39 | if (!showFull) {
40 | for (var s in selectors) {
41 | if (selectors[s].matches_html > 0 ||
42 | selectors[s].ignored ||
43 | selectors[s].whitelisted
44 | ) {
45 | continue;
46 | }
47 | console.log(s);
48 | }
49 | } else { // Output all rules, both used and unused
50 | for (var s in result.selectors) {
51 | var ignoredText = '';
52 | var duplicateText = '';
53 | var matchesText = '';
54 | var whitelistText = '';
55 |
56 | // Matches text
57 | if (selectors[s].matches_html === 0) {
58 | matchesText = s + ': ' + RED + selectors[s].matches_html
59 | + ' matches ' + RESET;
60 | } else {
61 | matchesText = s + ': ' + GREEN + selectors[s].matches_html
62 | + ' matches ' + RESET;
63 | }
64 |
65 | // Ignored text
66 | if (selectors[s].ignored) {
67 | ignoredText = RED + '[IGNORED] ' + RESET;
68 | }
69 |
70 | // Whitelisted text
71 | if (selectors[s].whitelisted) {
72 | whitelistText = YELLOW + '[WHITELIST] ' + RESET;
73 | }
74 |
75 | // Duplicates text
76 | if (selectors[s].occurences_css === 2) {
77 | duplicateText = YELLOW + '('
78 | + (selectors[s].occurences_css - 1)
79 | + ' duplicate)' + RESET;
80 | } else if (selectors[s].occurences_css > 2) {
81 | duplicateText = YELLOW + '('
82 | + (selectors[s].occurences_css - 1)
83 | + ' duplicates)' + RESET;
84 | }
85 |
86 | console.log(ignoredText + whitelistText + matchesText + duplicateText);
87 | }
88 | }
89 | }
90 |
91 | // Output duplicates:
92 | if (!gotHtml || showDuplicates) {
93 | if (!silent) {
94 | console.log('\nDuplicates:');
95 | console.log('-------------------------------------');
96 | } else {
97 | console.log(''); // Empty line, as separator
98 | }
99 | for (var s in result.selectors) {
100 | if (selectors[s].occurences_css > 1) {
101 | console.log(s + ': ' + selectors[s].occurences_css);
102 | }
103 | }
104 | }
105 |
106 | // Summary
107 | if (!silent) {
108 | var duplicateText = '';
109 | if (0 === result.total_duplicates) {
110 | duplicateText = ', no duplicates';
111 | } else {
112 | duplicateText = ', ' + result.total_duplicates;
113 | duplicateText += result.total_duplicates > 1
114 | ? ' duplicates' : ' duplicate';
115 | }
116 |
117 | var ignoredText = '';
118 | if (0 === result.total_ignored) {
119 | ignoredText = ', none ignored)';
120 | } else {
121 | ignoredText = ', ' + result.total_ignored + ' ignored)';
122 | }
123 |
124 | console.log(['-------------------------------------\n',
125 | 'Total: ', result.total,
126 | ' (', result.total_used, ' used, ',
127 | result.total_unused, ' unused',
128 | duplicateText, ignoredText
129 | ].join(''));
130 | if (result.load_errors.length > 0) {
131 | console.log('\n' + RED + BOLD + 'WARNING: ' + RESET
132 | + 'Some resources were not loaded successfully. This '
133 | + 'affects the result. Please see crawler log for '
134 | + 'more information.');
135 | }
136 | }
137 | },
138 |
139 | /**
140 | * Log to console for each visited URL.
141 | *
142 | * @param {Object} res Response object, as given by the request module.
143 | * @param {String} reqHref The href that was requested
144 | * @param {Boolean} loggedIn true if logged in, false if not.
145 | * @param {String} error Error message.
146 | */
147 | logger: function (res, reqHref, loggedIn, error) {
148 | var outputStr = '';
149 | var statusStr = '';
150 | var md5sum = '';
151 |
152 | if (error) {
153 | outputStr = RED + BOLD + error + ': ' + RESET + reqHref;
154 | } else {
155 | // Create a hash of content. Useful for checking if content of e.g. a
156 | // logged in and logged out version of a page is equal.
157 |
158 | if (res.body) {
159 | md5sum = crypto.createHash('md5').update(res.body).digest('hex');
160 | }
161 |
162 | // Status code, with color
163 | if (res.statusCode <= 300) {
164 | statusStr = GREEN + res.statusCode + RESET;
165 | } else if (res.statusCode >= 400) {
166 | statusStr = RED + BOLD + res.statusCode + RESET;
167 | }
168 |
169 | // Notify if redirect
170 | if (res.request.href === reqHref) {
171 | outputStr = util.format('HTTP %s %s', statusStr, res.request.href);
172 | } else {
173 | outputStr = util.format('HTTP %s %s ' + YELLOW + '=>' + RESET + ' %s',
174 | statusStr, reqHref, res.request.href);
175 | }
176 |
177 | }
178 |
179 | // Notify if logged in
180 | if (loggedIn) {
181 | outputStr += YELLOW + ' (with login)' + RESET;
182 | }
183 |
184 | // Log to console
185 | outputStr += ' ' + GREY + md5sum + RESET;
186 | console.log(outputStr);
187 | }
188 | };
189 |
--------------------------------------------------------------------------------
/lib/html.js:
--------------------------------------------------------------------------------
1 | var Q = require('q');
2 | var async = require('async');
3 | var url = require('url');
4 | var cheerio = require('cheerio');
5 | var fs = require('fs');
6 | var request = require('request');
7 | var events = require('events');
8 |
9 |
10 | // Colors
11 | var RED = '\033[31m';
12 | var RESET = '\033[0m';
13 |
14 |
15 | module.exports = {
16 | // TODO: Add documentation
17 | _logger: null,
18 |
19 | // TODO: Document result object properly
20 | /**
21 | * Iterates through a set of HTML resources, to check for matches of given
22 | * CSS selectors.
23 | *
24 | * @param {Object} pages
25 | * @param {String} pages.crawl URL to starting point of crawl.
26 | * @param {String} pages.include HTML instances to include, given
27 | * as a string of HTML, an URL or a path.
28 | * Useful for checking single files, in addition to
29 | * crawling (or if just a set of pages are to be
30 | * checked.
31 | * @param {String} pages.exclude Pages or subfolders to exclude.
32 | * @param {Object} result Result object
33 | * @param {Object} headers Headers to send to server.
34 | * @param {String} cookie Cookie to use for login, on the form
35 | * "sessionid=foo". Each uri in the html parameter will
36 | * be visited both with and without the cookie set.
37 | * @param {Int} timeout Request timeout
38 | * @param {Function} logger Custom log function
39 | * @returns {Promise} Result object
40 | */
41 | matchSelectors: function (pages, result, headers, cookie, timeout, logger) {
42 | if (!pages) return;
43 | if (!(pages.crawl || pages.exclude || pages.include)) return;
44 |
45 | this._logger = new events.EventEmitter();
46 | this._logger.on('request', logger);
47 |
48 | var deferred = Q.defer();
49 | var processed = []; // Array of processed items/pages
50 | var i, l;
51 |
52 | // Handle excludes
53 | var excludedUrls = [];
54 | if (pages.exclude) {
55 | for (i = 0, l = pages.exclude.length; i < l; i++) {
56 | var current = pages.exclude[i];
57 | if (current.test) {
58 | // regex detected
59 | excludedUrls.push(current);
60 | continue;
61 | } else if (current.indexOf('*') !== -1) {
62 | // folder globs
63 | var subfolder = current.substring(0, current.length - 1);
64 |
65 | excludedUrls.push(subfolder);
66 | continue;
67 | }
68 | excludedUrls.push(current.split('?')[0]);
69 | }
70 | }
71 |
72 | var queueItemHandler = this._queueItemHandler.bind(this);
73 | var queue = async.queue(function (item, queueCallback) {
74 | setImmediate(function () {
75 | queueItemHandler(item, processed, result, headers, timeout,
76 | cookie, excludedUrls, queue,
77 | queueCallback);
78 | });
79 | }, 8);
80 |
81 | queue.drain = function (err) {
82 | if (err) {
83 | deferred.reject(new Error(err));
84 | } else {
85 | deferred.resolve(result);
86 | }
87 | };
88 |
89 | // Crawl to find all HTML links
90 | if (pages.crawl) {
91 | for (i = 0, l = pages.crawl.length; i < l; i++) {
92 | var page = pages.crawl[i];
93 |
94 | // Strip away parameters and fragments when crawling
95 | page = page.split('#')[0];
96 | page = page.split('?')[0];
97 |
98 | queue.push({ page: page, followLinks: true });
99 | }
100 | }
101 |
102 | // Add pages included explicitly (e.g. in config) to queue
103 | if (pages.include) {
104 | for (i = 0, l = pages.include.length; i < l; i++) {
105 | // Add to queue, with parameters
106 | queue.push({ page: pages.include[i], followLinks: false });
107 | }
108 | }
109 |
110 | return deferred.promise;
111 | },
112 |
113 | /**
114 | * Handle an item in the queue of html documents to check.
115 | *
116 | * @param {Object} item HTML to process
117 | * @param {Object} processed Array of already processed items/pages
118 | * @param {Object} result Result object
119 | * @param {Object} headers Headers to send to server.
120 | * @param {String} cookie Cookie to use for login, on the form
121 | * "sessionid=foo". Each uri in the html parameter will
122 | * be visited both with and without the cookie set.
123 | * @param {Int} timeout Request timeout
124 | * @param {Array} excludedUrls List of sub domains to be excluded
125 | * when crawling.
126 | * @param {Object} queue Queue object from async, used to queue up new
127 | * items found during crawl.
128 | * @param {Function} queueCallback Callback to call when done.
129 | */
130 |
131 | _queueItemHandler: function (item, processed, result, headers, timeout,
132 | cookie, excludedUrls, queue,
133 | queueCallback) {
134 | // If in processed, skip (may have been in excluded list)
135 | if (-1 !== processed.indexOf(item.page)) {
136 | queueCallback();
137 | return;
138 | }
139 |
140 | var page = item.page;
141 | var uri, host;
142 |
143 | if (0 === page.indexOf('http')) {
144 | uri = page;
145 | host = url.parse(uri).host || '';
146 | }
147 |
148 | // Gather all html resources as strings
149 | var htmlToCheck = [];
150 | var html = this._getHtmlAsString(page, result, headers, null, timeout);
151 | if (html) {
152 | htmlToCheck.push(html); // regular visit
153 | }
154 | if (cookie) {
155 | html = this._getHtmlAsString(page, result, headers, cookie, timeout);
156 | if (html) {
157 | htmlToCheck.push(html); // logged in visit
158 | }
159 | }
160 | processed.push(page);
161 |
162 | // TODO: Add documentation - why spread here???
163 | // Process html strings to match selectors
164 | var queueLinks = this._queueLinks.bind(this);
165 | var matchSelectorsInDocument = this._matchSelectorsInDocument.bind(this);
166 |
167 | Q.spread(htmlToCheck, function (regularResult, loggedInResult) {
168 | var context = {
169 | uri: uri,
170 | followLinks: item.followLinks,
171 | result: result,
172 | excludedUrls: excludedUrls
173 | };
174 |
175 | var document;
176 | if (regularResult) {
177 | document = cheerio.load(regularResult);
178 |
179 | matchSelectorsInDocument(document, context.uri, result);
180 | if (context.followLinks) { // look for links in document, add to queue
181 | queueLinks(document, queue, context);
182 | }
183 | }
184 |
185 | if (loggedInResult) {
186 | document = cheerio.load(loggedInResult);
187 |
188 | matchSelectorsInDocument(document, context.uri, result);
189 | if (context.followLinks) { // look for links in document, add to queue
190 | queueLinks(document, queue, context);
191 | }
192 | }
193 | }).fail(function (error) {
194 | console.error('Unable to read %s: %s', uri, error);
195 | console.log(error.stack);
196 | }).done(queueCallback);
197 | },
198 |
199 | /**
200 | * Finds links in a HTML string and adds them to queue of pages to
201 | * process.
202 | *
203 | * Will also find links and queue them for later processing (if
204 | * context.followLinks is true)
205 | *
206 | * @private
207 | * @param {Object} document HTML as Cheerio document.
208 | * @param {Object} queue Queue object, as returned from the Async library.
209 | * @param {Object} context An object containing the context information:
210 | * @param {String} context.uri Original URI to the html string
211 | * @param {Boolean} context.followLinks If links in html string should be followed
212 | * @param {Array} context.selectors List of selectors to match
213 | * @param {Array} context.excludedUrls List of subfolders to ignore
214 | * @param {Object} context.result Object to append results to
215 | * @param {Object} context.result.used E.g. {".foo": 1 }
216 | * @param {Object} context.result.ignored E.g. {"@font-face": 1}
217 | */
218 | _queueLinks: function (document, queue, context) {
219 | var uri = context.uri;
220 | var excludedUrls = context.excludedUrls;
221 | var followLinks = context.followLinks;
222 | var links = document('a');
223 |
224 | for (var i = 0, l = links.length; i < l; i++) {
225 | var link = links[i];
226 |
227 | if (!('href' in link.attribs)) {
228 | continue;
229 | }
230 |
231 | var href = link.attribs.href;
232 |
233 | var protocol = url.parse(href).protocol ? url.parse(href).protocol : 'http:';
234 |
235 | // If no href, URI is parameter only, or protocol is not http,
236 | // then skip.
237 | if (!href
238 | || href.indexOf('?') === 0
239 | || protocol.indexOf('http') === -1) {
240 | continue;
241 | }
242 |
243 | this._addLinkToQueue(link.attribs.href, queue, uri, followLinks, excludedUrls);
244 | }
245 | },
246 |
247 | /**
248 | * Add link to queue (but only if it's on the same host as parentUri).
249 | *
250 | * @private
251 | * @param {String} link Link to handle.
252 | * @param {Object} queue Queue object, as returned from the Async library.
253 | * @param {String} parentUri URL to document containing this link.
254 | * @param {Boolean} followLinks Should links in the document link refers to
255 | * be followed?
256 | */
257 | _addLinkToQueue: function (link, queue, parentUri, followLinks, excludedUrls) {
258 | var host = url.parse(parentUri).host;
259 |
260 | // Strip away parameters and fragments when crawling
261 | link = link.split('?')[0];
262 | link = link.split('#')[0];
263 |
264 | // Resolve link. This handles domain relative URLs, as well as protocol
265 | // relative URLs. If link and parentUri points to different domains,
266 | // link is left alone.
267 | link = url.resolve(parentUri, link);
268 |
269 | var handleThis = true;
270 |
271 | // If under excluded domain, skip
272 | if (url.parse(link).host !== host) handleThis = false;
273 | if (handleThis) {
274 | for (var j = 0; j < excludedUrls.length; j++) {
275 | var excluded = excludedUrls[j];
276 | if (excluded.test) {
277 | // is a regex
278 | if (excluded.test(link)) {
279 | handleThis = false;
280 | break;
281 | }
282 | } else if (link.indexOf(excluded) === 0 || url.parse(link).pathname.indexOf(excluded) === 0) {
283 | handleThis = false;
284 | break;
285 | }
286 | }
287 | }
288 |
289 | if (handleThis) {
290 | queue.push({ page: link, followLinks: followLinks });
291 | }
292 | },
293 |
294 | // TODO: Document result object properly
295 | /**
296 | * Match selectors loaded into a Cheerio HTML document.
297 | *
298 | * @private
299 | * @param {Object} document HTML as Cheerio document.
300 | * @param {string} uri Original URI to the html string.
301 | * @param {Object} result Result object.
302 | */
303 | _matchSelectorsInDocument: function (document, uri, result) {
304 | // Loop through selectors
305 | for (var selector in result.selectors) {
306 | // If current selector is whitelisted, skip.
307 | if (result.selectors[selector].whitelisted) {
308 | continue;
309 | }
310 |
311 | if (selector) {
312 | if (-1 < selector.indexOf('@')) {
313 | result.selectors[selector].ignored = true;
314 |
315 | continue;
316 | }
317 |
318 | var oSelector = selector;
319 | // Remove pseudo part of selector
320 | selector = selector.split(':')[0];
321 | // Check if selector is used
322 | try {
323 | var len = document(selector).length;
324 | if (len > 0) {
325 | // Increment total number of selectors used
326 | if (result.selectors[oSelector].matches_html == 0) {
327 | result.total_used++;
328 | }
329 |
330 | // Add number of matches for selector to total matches
331 | result.selectors[oSelector].matches_html = result.selectors[oSelector].matches_html + len;
332 | result.selectors[oSelector].matches_uris.push(uri);
333 | }
334 | } catch (error) {
335 | console.log(RED + 'Selector: "' +
336 | oSelector + '" has ' + error.name + '. ' + error.message + RESET);
337 | }
338 | }
339 | }
340 | return;
341 | },
342 |
343 | /**
344 | * Match selectors in a html string.
345 | *
346 | * @private
347 | * @param {String} page URL, path or HTML as string.
348 | * @param {Object} result Result object
349 | * @param {Object} headers Headers to send to server.
350 | * @param {Array} cookie Cookie to use for login, on the form
351 | * "sessionid=foo". Each uri in the html parameter will
352 | * be visited both with and without the cookie set.
353 | * @param {Int} timeout Request timeout
354 | * @returns {Promise} HTML as string
355 | */
356 | _getHtmlAsString: function (page, result, headers, cookie, timeout) {
357 | var deferred = Q.defer();
358 | var data;
359 |
360 | // Get page as raw html
361 | // If URI is given, fetch HTML
362 | //
363 | // Note: _addLinkToQueue adds host etc. to relative URLs when crawling.
364 | if (0 === page.indexOf('http')) { // From URI
365 | var uri = page;
366 |
367 | var loggedIn = false;
368 | if (cookie) {
369 | headers['Cookie'] = cookie;
370 | headers['Referer'] = uri;
371 |
372 | loggedIn = true;
373 | }
374 |
375 | var options = { uri: uri,
376 | headers: headers,
377 | timeout: timeout || 10000,
378 | pool: false };
379 |
380 | var self = this;
381 |
382 | request.get(options, function (error, res, data) {
383 | if (res && res.statusCode !== 200) {
384 | self._logger.emit('request', res, uri, loggedIn);
385 | result.load_errors.push({ uri: uri, error: res.statusCode });
386 | } else if (error) {
387 | if (error.toString().indexOf('TIMEDOUT' > -1)) {
388 | self._logger.emit('request', null, uri, loggedIn, 'Timeout');
389 | } else {
390 | self._logger.emit('request', null, uri, loggedIn, error);
391 | }
392 | result.load_errors.push({ uri: uri, error: error });
393 | data = '';
394 | } else {
395 | // Check content type, ignore if not html (trust server).
396 | // TODO: Look into ways of improving this further, as
397 | // server may not be telling the truth.
398 | var contentType = res.headers['content-type'];
399 | if (contentType.indexOf('text/html') === -1) {
400 | data = '';
401 | } else {
402 | self._logger.emit('request', res, uri, loggedIn);
403 | }
404 | }
405 |
406 | deferred.resolve(data);
407 | });
408 | } else if (-1 === page.indexOf('')) { // From file
409 | try {
410 | data = fs.readFileSync(page).toString();
411 | } catch (error) {
412 | console.error('Unable to read %s: %s', page, error.message);
413 | result.load_errors.push({ uri: uri, error: error.message });
414 | }
415 | deferred.resolve(data);
416 | } else { // From string
417 | deferred.resolve(page);
418 | }
419 |
420 | return deferred.promise;
421 | }
422 | };
423 |
--------------------------------------------------------------------------------
/lib/ucss.js:
--------------------------------------------------------------------------------
1 | var Q = require('q');
2 |
3 |
4 | var ucss = {
5 | /**
6 | * @param {Object} pages
7 | * @param {String} pages.crawl URL to starting point of crawl.
8 | * @param {String} pages.include HTML instances to include, given
9 | * as a string of HTML, an URL or a path.
10 | * Useful for checking single files, in addition to
11 | * crawling (or if just a set of pages are to be
12 | * checked.
13 | * @param {String} pages.exclude Pages or subfolders to exclude.
14 | * @param {Array|String} css A CSS string, or an array of CSS
15 | * resources (strings, paths, or URLs).
16 | * @param {Object} context
17 | * @param {Array} context.whitelist List of selectors to ignore
18 | * @param {Object} context.cookie Cookie to use for login, on the form
19 | * "sessionid=foo". Each uri in the html parameter will
20 | * be visited both with and without the cookie.
21 | * @param {Int} timeout Request timeout.
22 | * @param {Function} logger Function for handling logging output (see the
23 | * logger function in output.js for example).
24 | * @param {Function} doneCallback Function to execute when done (typically a
25 | * reporter). An object on the form { "": count }
26 | * is passed to it, where count is the number of occurences of
27 | * .
28 | */
29 | search: function (pages, css, context, timeout, logger, doneCallback) {
30 | var cssHandler = require('../lib/css');
31 | var htmlHandler = require('../lib/html');
32 | var cookie = context.cookie;
33 | var whitelist = context.whitelist;
34 | var headers = context.headers;
35 | var result = {
36 | selectors: {},
37 | total: 0,
38 | total_used: 0,
39 | total_unused: 0,
40 | total_duplicates: 0,
41 | total_ignored: 0,
42 | load_errors: []
43 | };
44 |
45 | Q.fcall(function () {
46 | return cssHandler.getSelectors(css, whitelist, result, headers, timeout, logger);
47 | })
48 | .then(function (result) {
49 | if (result.total === 0) {
50 | throw 'No CSS loaded, nothing to do.';
51 | }
52 | if (pages) {
53 | return htmlHandler.matchSelectors(pages, result, headers, cookie,
54 | timeout, logger);
55 | }
56 | return result;
57 | })
58 | .fail(function (error) {
59 | console.error(error);
60 | process.exit(1);
61 | })
62 | .done(function () {
63 | // Count unused and ignored
64 | for (var selector in result.selectors) {
65 | var sel = result.selectors[selector];
66 |
67 | // Unused: No matches in html, and not ignored
68 | if (sel.matches_html === 0 && !sel.ignored && !sel.whitelisted) {
69 | result.total_unused++;
70 | }
71 |
72 | // Ignored
73 | if (sel.ignored || sel.whitelisted) {
74 | result.total_ignored++;
75 | }
76 |
77 | // Duplicate
78 | if (sel.occurences_css > 1) {
79 | result.total_duplicates++;
80 | }
81 | }
82 |
83 | doneCallback(result);
84 | });
85 | }
86 | };
87 |
88 |
89 | module.exports = {
90 | /**
91 | * Matches selectors in a set of CSS files against a set of HTML resources
92 | * to find how many times each rule has been used, and if rules are unused.
93 | * Also finds duplicate CSS rules.
94 | *
95 | * @param {Object} [pages]
96 | * @param {String} [pages.crawl] URL to starting point of crawl.
97 | * @param {String} [pages.include] HTML instances to include, given
98 | * as a string of HTML, an URL or a path.
99 | * Useful for checking single files, in addition to
100 | * crawling (or if just a set of pages are to be
101 | * checked.
102 | * @param {String} [pages.exclude] Pages or subfolders to exclude.
103 | * @param {Array|String} css A CSS string, or an array of CSS
104 | * resources (strings, paths, or URLs).
105 | * @param {Object} context
106 | * @param {Array} context.whitelist List of selectors to ignore
107 | * @param {Object} context.headers Headers to send to server
108 | * @param {Object} context.auth Authentication information
109 | * @param {String} context.auth.username
110 | * @param {String} context.auth.password
111 | * @param {String} context.auth.loginUrl
112 | * @param {String|Function} context.auth.loginFunk Login function, or name
113 | * of function in lib/helpers/login.js.
114 | * @param {Function} logger Function for handling logging output
115 | * (see the logger function in output.js for example).
116 | * @param {Function} doneCallback Function to execute when done (typically a
117 | * reporter). An object on the form { "": count }
118 | * is passed to it, where count is the number of occurences of
119 | * .
120 | */
121 | analyze: function (pages, css, context, logger, doneCallback) {
122 | // Ensure that doneCallback is callable:
123 | if (!doneCallback) {
124 | doneCallback = function (result) {
125 | console.log(result);
126 | };
127 | }
128 |
129 | // Are the needed resources available?
130 | if (!css) {
131 | console.warn('No CSS given, nothing to do.');
132 | doneCallback({});
133 | return null;
134 | }
135 |
136 | // Make sure resources is given as lists
137 | if (pages && pages.include) {
138 | if (!(pages.include instanceof Array)) {
139 | pages.include = [pages.include];
140 | }
141 | }
142 | if (pages && pages.crawl) {
143 | if (!(pages.crawl instanceof Array)) {
144 | pages.crawl = [pages.crawl];
145 | }
146 | }
147 | if (pages && pages.exclude) {
148 | if (!(pages.exclude instanceof Array)) {
149 | pages.exclude = [pages.exclude];
150 | }
151 | }
152 | if (!(css instanceof Array)) {
153 | css = [css];
154 | }
155 |
156 | // Set up empty logger function, if no logger given.
157 | if (!logger) {
158 | logger = function () {};
159 | }
160 |
161 | // Set up context object, to reduce number of arguments
162 | if (!context) {
163 | context = {};
164 | }
165 |
166 | var timeout = context.timeout;
167 |
168 | var auth = context.auth ? context.auth : null;
169 |
170 | context.headers = context.headers ? context.headers : {};
171 |
172 | // If login info is given, do login.
173 | if (auth) {
174 | var loginFunc;
175 | var username = auth.username;
176 | var password = auth.password;
177 | var loginUrl = auth.loginUrl;
178 |
179 | if (auth.loginFunc instanceof Function) {
180 | loginFunc = auth.loginFunc;
181 | } else {
182 | loginFunc = require('./helpers/login')[auth.loginFunc];
183 | }
184 |
185 | loginFunc(loginUrl, username, password, function (cookie) {
186 | context.cookie = cookie;
187 | ucss.search(pages, css, context, timeout, logger, doneCallback);
188 | });
189 | } else {
190 | ucss.search(pages, css, context, timeout, logger, doneCallback);
191 | }
192 | }
193 | };
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "author": "Øyvind Håkestad (https://github.com/oyvindeh)",
3 | "contributors": [
4 | "Chris Adams (https://github.com/acdha)",
5 | "Hans S. Tømmerholt (https://github.com/hanssto)",
6 | "Crow Norlander (https://github.com/crowjonah)"
7 | ],
8 | "name": "ucss",
9 | "description": "Find unused CSS rules",
10 | "version": "0.5.0",
11 | "homepage": "https://github.com/oyvindeh/ucss",
12 | "repository": {
13 | "type": "git",
14 | "url": "git@github.com:oyvindeh/ucss"
15 | },
16 | "main": "./lib/ucss.js",
17 | "bin": {
18 | "ucss": "bin/ucss.bin.js"
19 | },
20 | "keywords": [
21 | "css",
22 | "unused"
23 | ],
24 | "dependencies": {
25 | "optimist": "0.6.x",
26 | "async": "2.1.x",
27 | "cssom": "0.3.x",
28 | "cheerio": "0.22.x",
29 | "request": "2.75.x",
30 | "q": "1.4.x"
31 | },
32 | "devDependencies": {
33 | "buster": "0.7.x",
34 | "sinon": "1.17.x"
35 | },
36 | "license": "BSD",
37 | "engines": {
38 | "node": ">=0.6"
39 | },
40 | "scripts": {
41 | "test": "node run-tests.js"
42 | }
43 | }
44 |
--------------------------------------------------------------------------------
/run-tests.js:
--------------------------------------------------------------------------------
1 | require('./test/selectors');
2 | require('./test/general');
3 | require('./test/crawler');
4 | require('./test/http');
5 | require('./test/login');
6 |
--------------------------------------------------------------------------------
/test/crawler.js:
--------------------------------------------------------------------------------
1 | /* global assert:true */
2 |
3 |
4 | if (typeof require !== 'undefined') {
5 | var buster = require('buster');
6 | var lib = require('../lib/ucss');
7 | }
8 |
9 | var assert = buster.referee.assert;
10 | var refute = buster.referee.refute;
11 |
12 |
13 | var pageSetOne = {
14 | '/rules1.css': '.foo {} .bar {}',
15 | '/rules2.css': '.foo {} .bar {} .baz {} .qux {}',
16 | '/rules3.css': '.foo {} .bar {} .baz {} .qux {} .quux {}',
17 | '/markup1.html': [
18 | '',
19 | ' ',
20 | ' ',
21 | " ",
22 | " markup2",
23 | " markup1",
24 | ' ',
25 | ''].join(''),
26 | '/markup2.html': [
27 | '',
28 | ' ',
29 | ' ',
30 | " ",
31 | " markup1",
32 | ' ',
33 | ''].join(''),
34 | '/markup3.html': [
35 | '',
36 | ' ',
37 | ' ',
38 | " ",
39 | " markup1",
40 | ' ',
41 | ''].join(''),
42 | '/external_links.html': [
43 | '',
44 | ' ',
45 | ' ',
46 | " ",
47 | " index.html",
48 | " markup1",
49 | " anotherpage.html",
50 | " markup3.html",
51 | " markup2",
52 | ' ',
53 | ''].join(''),
54 | '/path1/relative_paths.html': [
55 | '',
56 | ' ',
57 | ' ',
58 | ' ',
59 | " index.html",
60 | " markup1",
61 | " markup1",
62 | " markup1",
63 | ' ',
64 | ''].join(''),
65 | '/path1/relative1.html': [
66 | '',
67 | ' ',
68 | ' ',
69 | " ",
70 | ' ',
71 | ''].join(''),
72 | '/relative2.html': [
73 | '',
74 | ' ',
75 | ' ',
76 | " ",
77 | ' ',
78 | ''].join(''),
79 | '/relative3.html': [
80 | '',
81 | ' ',
82 | ' ',
83 | " ",
84 | ' ',
85 | ''].join(''),
86 | '/path2/relative4.html': [
87 | '',
88 | ' ',
89 | ' ',
90 | " ",
91 | ' ',
92 | ''].join(''),
93 | '/fragments.html': [
94 | '',
95 | ' ',
96 | ' ',
97 | ' ',
98 | " link with fragment",
99 | " link with another fragment",
100 | ' ',
101 | ''].join(''),
102 | '/has_no_links.html': [
103 | '',
104 | ' ',
105 | ' ',
106 | " ",
107 | ' ',
108 | ''].join(''),
109 | '/not_linked_to.html': [
110 | '',
111 | ' ',
112 | ' ',
113 | " ",
114 | ' ',
115 | ''].join(''),
116 | '/deadlink.html': [
117 | '',
118 | ' ',
119 | ' ',
120 | " ",
121 | " markup1.html",
122 | " not_existing.html",
123 | ' ',
124 | ''].join(''),
125 | '/subfolder_links.html': [
126 | '',
127 | ' ',
128 | ' ',
129 | " ",
130 | " no_links.html",
131 | " doc1.html",
132 | " doc3.html",
133 | ' ',
134 | ''].join(''),
135 | '/subfolder/doc1.html': [
136 | '',
137 | ' ',
138 | ' ',
139 | " ",
140 | ' ',
141 | ''].join(''),
142 | '/subfolder/doc2.html': [
143 | '',
144 | ' ',
145 | ' ',
146 | " ",
147 | ' ',
148 | ''].join(''),
149 | '/links_with_parameters.html': [
150 | '',
151 | ' ',
152 | ' ',
153 | " ",
154 | " markup1",
155 | " markup1",
156 | ' ',
157 | ''].join(''),
158 | '/non_http_links.html': [
159 | '',
160 | ' ',
161 | ' ',
162 | " ",
163 | " markup1",
164 | ' ',
165 | ''].join(''),
166 | '/no_links.html': [
167 | '',
168 | ' ',
169 | ' ',
170 | " ",
171 | ' ',
172 | ''].join(''),
173 | '/no_links.html?foo=1': [
174 | '',
175 | ' ',
176 | ' ',
177 | " ",
178 | ' ',
179 | ''].join(''),
180 | '/no_links.html?bar=1': [
181 | '',
182 | ' ',
183 | ' ',
184 | " ",
185 | ' ',
186 | ''].join(''),
187 | '/has_parameters.html?foo=1': [
188 | '',
189 | ' ',
190 | ' ',
191 | " ",
192 | ' ',
193 | ''].join('')
194 | };
195 |
196 |
197 | var pageSetTwo = {
198 | '/index.html': "",
199 | '/anotherpage.html': ""
200 | };
201 |
202 | /* buster.assertions.add("ding", {
203 | assert: function (result, expected) {
204 | result == expected;
205 | },
206 | assertMessage: "Expected ${1}, got ${0}."
207 | });*/
208 |
209 |
210 | buster.testCase('uCSS crawler', {
211 | setUp: function () {
212 | var http = require('http');
213 |
214 | this.server = http.createServer(function (req, res) {
215 | res.setHeader('content-type', 'text/html');
216 |
217 | if (req.url in pageSetOne) {
218 | res.end(pageSetOne[req.url]);
219 | } else {
220 | res.writeHead(404);
221 | res.end();
222 | }
223 | }).listen(9988, '0.0.0.0');
224 |
225 | this.anotherServer = http.createServer(function (req, res) {
226 | res.setHeader('content-type', 'text/html');
227 |
228 | if (req.url in pageSetTwo) {
229 | res.end(pageSetTwo[req.url]);
230 | } else {
231 | res.writeHead(404);
232 | res.end();
233 | }
234 | }).listen(9989, '0.0.0.0');
235 | },
236 |
237 | tearDown: function () {
238 | this.server.close();
239 | this.anotherServer.close();
240 | },
241 |
242 | 'can crawl webpages': function (done) {
243 | var pages = {
244 | crawl: ['http://127.0.0.1:9988/markup1.html']
245 | };
246 | var css = ['http://127.0.0.1:9988/rules1.css'];
247 |
248 | var expected = {
249 | selectors: {
250 | '.foo': {
251 | 'matches_html': 1, 'occurences_css': 1 },
252 | '.bar': {
253 | 'matches_html': 1, 'occurences_css': 1 }
254 | },
255 | total_used: 2,
256 | total_unused: 0,
257 | total_ignored: 0,
258 | total_duplicates: 0
259 | };
260 |
261 | lib.analyze(pages, css, null, null, function (result) {
262 | assert.match(result, expected);
263 | done();
264 | });
265 | },
266 |
267 | 'does not go outside given domain': function (done) {
268 | var pages = {
269 | crawl: ['http://127.0.0.1:9988/external_links.html']
270 | };
271 | var css = ['http://127.0.0.1:9988/rules2.css'];
272 |
273 | var expected = {
274 | selectors: {
275 | '.foo': {
276 | 'matches_html': 1, 'occurences_css': 1 },
277 | '.bar': {
278 | 'matches_html': 3, 'occurences_css': 1 },
279 | '.baz': {
280 | 'matches_html': 0, 'occurences_css': 1 },
281 | '.qux': {
282 | 'matches_html': 0, 'occurences_css': 1 }
283 |
284 | },
285 | total_used: 2,
286 | total_unused: 2,
287 | total_ignored: 0,
288 | total_duplicates: 0
289 | };
290 |
291 | lib.analyze(pages, css, null, null, function (result) {
292 | assert.match(result, expected);
293 | done();
294 | });
295 | },
296 |
297 | 'handles relative paths': function (done) {
298 | var pages = {
299 | crawl: ['http://127.0.0.1:9988/path1/relative_paths.html']
300 | };
301 | var css = ['http://127.0.0.1:9988/rules2.css'];
302 |
303 | var expected = {
304 | selectors: {
305 | '.foo': {
306 | 'matches_html': 1, 'occurences_css': 1 },
307 | '.bar': {
308 | 'matches_html': 1, 'occurences_css': 1 },
309 | '.baz': {
310 | 'matches_html': 1, 'occurences_css': 1 },
311 | '.qux': {
312 | 'matches_html': 1, 'occurences_css': 1 }
313 | },
314 | total_used: 4,
315 | total_unused: 0,
316 | total_ignored: 0,
317 | total_duplicates: 0
318 | };
319 |
320 | lib.analyze(pages, css, null, null, function (result) {
321 | assert.match(result, expected);
322 | done();
323 | });
324 | },
325 |
326 | 'handles includes': function (done) {
327 | var pages = {
328 | crawl: ['http://127.0.0.1:9988/path1/relative_paths.html'],
329 | include: ['http://127.0.0.1:9988/not_linked_to.html']
330 | };
331 | var css = ['http://127.0.0.1:9988/rules3.css'];
332 |
333 | var expected = {
334 | selectors: {
335 | '.foo': {
336 | 'matches_html': 1, 'occurences_css': 1 },
337 | '.bar': {
338 | 'matches_html': 1, 'occurences_css': 1 },
339 | '.baz': {
340 | 'matches_html': 1, 'occurences_css': 1 },
341 | '.qux': {
342 | 'matches_html': 1, 'occurences_css': 1 },
343 | '.quux': {
344 | 'matches_html': 1, 'occurences_css': 1 }
345 | },
346 | total_used: 5,
347 | total_unused: 0,
348 | total_ignored: 0,
349 | total_duplicates: 0
350 | };
351 |
352 | lib.analyze(pages, css, null, null, function (result) {
353 | assert.match(result, expected);
354 | done();
355 | });
356 | },
357 |
358 | 'handles excludes': function (done) {
359 | var pages = {
360 | crawl: ['http://127.0.0.1:9988/path1/relative_paths.html'],
361 | exclude: ['http://127.0.0.1:9988/path1/relative1.html']
362 | };
363 | var css = ['http://127.0.0.1:9988/rules3.css'];
364 |
365 | var expected = {
366 | selectors: {
367 | '.foo': {
368 | 'matches_html': 0, 'occurences_css': 1 },
369 | '.bar': {
370 | 'matches_html': 1, 'occurences_css': 1 },
371 | '.baz': {
372 | 'matches_html': 1, 'occurences_css': 1 },
373 | '.qux': {
374 | 'matches_html': 1, 'occurences_css': 1 },
375 | '.quux': {
376 | 'matches_html': 0, 'occurences_css': 1 }
377 | },
378 | total_used: 3,
379 | total_unused: 2,
380 | total_ignored: 0,
381 | total_duplicates: 0
382 | };
383 |
384 | lib.analyze(pages, css, null, null, function (result) {
385 | assert.match(result, expected);
386 | done();
387 | });
388 | },
389 |
390 | 'handles regex excludes in paths': function (done) {
391 | // TODO: Review if this test should be rewritten to be similar to the
392 | // other exclude tests, for consistency.
393 | var excludePattern = /http:\/\/127\.0\.0\.1:9988\/.+\/relative.*/;
394 | var pages = {
395 | crawl: ['http://127.0.0.1:9988/path1/relative_paths.html'],
396 | exclude: [excludePattern]
397 | };
398 | var css = ['http://127.0.0.1:9988/rules2.css'];
399 |
400 | // .foo {} .bar {} .baz {} .qux {}
401 | var actualVisitedPages = [];
402 | var logger = function (result, requestedPage) {
403 | actualVisitedPages.push(requestedPage);
404 | };
405 |
406 | var shouldBeVisitedPages = [
407 | 'http://127.0.0.1:9988/rules2.css',
408 | 'http://127.0.0.1:9988/path1/relative_paths.html',
409 | 'http://127.0.0.1:9988/relative2.html',
410 | 'http://127.0.0.1:9988/relative3.html'
411 | ];
412 |
413 | lib.analyze(pages, css, null, logger, function (result) {
414 | // make sure we have no matches
415 | assert.equals(actualVisitedPages, shouldBeVisitedPages);
416 | done();
417 | });
418 | },
419 |
420 | 'handles regex excludes': function (done) {
421 | // TODO: Review if this test should be rewritten to be similar to the
422 | // other exclude tests, for consistency.
423 | var excludePattern = /relative[2-5]/;
424 | var pages = {
425 | crawl: ['http://127.0.0.1:9988/path1/relative_paths.html'],
426 | exclude: [excludePattern]
427 | };
428 | var css = ['http://127.0.0.1:9988/rules3.css'];
429 |
430 | var visitedPages = [];
431 | var logger = function (result, requestedPage) {
432 | visitedPages.push(requestedPage);
433 | };
434 |
435 | lib.analyze(pages, css, null, logger, function (result) {
436 | var matches = visitedPages.filter(function (url) {
437 | return excludePattern.test(url);
438 | });
439 | // make sure we have no matches
440 | assert.equals(matches.length, 0);
441 | done();
442 | });
443 | },
444 |
445 | 'handles dead links': function (done) {
446 | var pages = {
447 | crawl: ['http://127.0.0.1:9988/deadlink.html']
448 | };
449 | var css = ['http://127.0.0.1:9988/rules1.css'];
450 |
451 | var expected = {
452 | selectors: {
453 | '.foo': {
454 | 'matches_html': 2, 'occurences_css': 1 },
455 | '.bar': {
456 | 'matches_html': 1, 'occurences_css': 1 }
457 | },
458 | total_used: 2,
459 | total_unused: 0,
460 | total_ignored: 0,
461 | total_duplicates: 0
462 | };
463 |
464 | lib.analyze(pages, css, null, null, function (result) {
465 | assert.match(result, expected);
466 | done();
467 | });
468 | },
469 |
470 | 'handles exclude (given as string)': function (done) {
471 | var pages = {
472 | crawl: ['http://127.0.0.1:9988/path1/relative_paths.html'],
473 | exclude: 'http://127.0.0.1:9988/path1/relative1.html'
474 | };
475 | var css = ['http://127.0.0.1:9988/rules3.css'];
476 |
477 | var expected = {
478 | selectors: {
479 | '.foo': {
480 | 'matches_html': 0, 'occurences_css': 1 },
481 | '.bar': {
482 | 'matches_html': 1, 'occurences_css': 1 },
483 | '.baz': {
484 | 'matches_html': 1, 'occurences_css': 1 },
485 | '.qux': {
486 | 'matches_html': 1, 'occurences_css': 1 },
487 | '.quux': {
488 | 'matches_html': 0, 'occurences_css': 1 }
489 | },
490 | total_used: 3,
491 | total_unused: 2,
492 | total_ignored: 0,
493 | total_duplicates: 0
494 | };
495 |
496 | lib.analyze(pages, css, null, null, function (result) {
497 | assert.match(result, expected);
498 | done();
499 | });
500 | },
501 |
502 | 'handles exclude of subfolder using wildcard': function (done) {
503 | var pages = {
504 | crawl: ['http://127.0.0.1:9988/subfolder_links.html'],
505 | exclude: ['http://127.0.0.1:9988/subfolder/*']
506 | };
507 | var css = ['http://127.0.0.1:9988/rules1.css'];
508 |
509 | var expected = {
510 | selectors: {
511 | '.foo': {
512 | 'matches_html': 1, 'occurences_css': 1 },
513 | '.bar': {
514 | 'matches_html': 2, 'occurences_css': 1 }
515 | },
516 | total_used: 2,
517 | total_unused: 0,
518 | total_ignored: 0,
519 | total_duplicates: 0
520 | };
521 |
522 | lib.analyze(pages, css, null, null, function (result) {
523 | assert.match(result, expected);
524 | done();
525 | });
526 | },
527 |
528 | 'Does not follow links in includes': function (done) {
529 | var pages = {
530 | include: ['http://127.0.0.1:9988/path1/relative_paths.html']
531 | };
532 | var css = ['http://127.0.0.1:9988/rules3.css'];
533 |
534 | var expected = {
535 | selectors: {
536 | '.foo': {
537 | 'matches_html': 0, 'occurences_css': 1 },
538 | '.bar': {
539 | 'matches_html': 0, 'occurences_css': 1 },
540 | '.baz': {
541 | 'matches_html': 0, 'occurences_css': 1 },
542 | '.qux': {
543 | 'matches_html': 0, 'occurences_css': 1 },
544 | '.quux': {
545 | 'matches_html': 0, 'occurences_css': 1 }
546 | },
547 | total_used: 0,
548 | total_unused: 5,
549 | total_ignored: 0,
550 | total_duplicates: 0
551 | };
552 |
553 | lib.analyze(pages, css, null, null, function (result) {
554 | assert.match(result, expected);
555 | done();
556 | });
557 | },
558 |
559 | 'can crawl webpages that requires login': function (done) {
560 | var pages = {
561 | crawl: ['http://127.0.0.1:9988/markup1.html']
562 | };
563 | var css = ['http://127.0.0.1:9988/rules1.css'];
564 |
565 | var context = {
566 | auth: {
567 | 'username': 'foo',
568 | 'password': 'bar',
569 | 'loginUrl': 'http://example.com/login/',
570 | 'loginFunc': function (url, username, password, callback) {
571 | callback('1234');
572 | }
573 | }
574 | };
575 |
576 | var expected = {
577 | selectors: {
578 | '.foo': {
579 | 'matches_html': 2, 'occurences_css': 1 },
580 | '.bar': {
581 | 'matches_html': 2, 'occurences_css': 1 }
582 | },
583 | total_used: 2,
584 | total_unused: 0,
585 | total_ignored: 0,
586 | total_duplicates: 0
587 | };
588 |
589 | lib.analyze(pages, css, context, null, function (result) {
590 | assert.match(result, expected);
591 | done();
592 | });
593 | },
594 |
595 | 'strips away parameters, and visits URL only once': function (done) {
596 | var pages = {
597 | crawl: ['http://127.0.0.1:9988/links_with_parameters.html']
598 | };
599 | var css = ['http://127.0.0.1:9988/rules1.css'];
600 |
601 | var expected = {
602 | selectors: {
603 | '.foo': {
604 | 'matches_html': 1, 'occurences_css': 1 },
605 | '.bar': {
606 | 'matches_html': 1, 'occurences_css': 1 }
607 | },
608 | total_used: 2,
609 | total_unused: 0,
610 | total_ignored: 0,
611 | total_duplicates: 0
612 | };
613 |
614 | lib.analyze(pages, css, null, null, function (result) {
615 | assert.match(result, expected);
616 | done();
617 | });
618 | },
619 |
620 | 'does not strip away parameters from links in include list': function (done) {
621 | var pages = {
622 | include: ['http://127.0.0.1:9988/has_parameters.html?foo=1']
623 | };
624 | var css = ['http://127.0.0.1:9988/rules1.css'];
625 |
626 | var expected = {
627 | selectors: {
628 | '.foo': {
629 | 'matches_html': 1, 'occurences_css': 1 },
630 | '.bar': {
631 | 'matches_html': 1, 'occurences_css': 1 }
632 | },
633 | total_used: 2,
634 | total_unused: 0,
635 | total_ignored: 0,
636 | total_duplicates: 0
637 | };
638 |
639 | lib.analyze(pages, css, null, null, function (result) {
640 | assert.match(result, expected);
641 | done();
642 | });
643 | },
644 |
645 | 'only loads html, not binaries': function (done) {
646 | var pages = {
647 | include: ['http://127.0.0.1:9988/markup1.html',
648 | 'http://127.0.0.1:9988/document.pdf']
649 | };
650 | var css = ['http://127.0.0.1:9988/rules1.css'];
651 |
652 | var expected = {
653 | selectors: {
654 | '.foo': {
655 | 'matches_html': 1, 'occurences_css': 1 },
656 | '.bar': {
657 | 'matches_html': 0, 'occurences_css': 1 }
658 | },
659 | total_used: 1,
660 | total_unused: 1,
661 | total_ignored: 0,
662 | total_duplicates: 0
663 | };
664 |
665 | lib.analyze(pages, css, null, null, function (result) {
666 | assert.match(result, expected);
667 | done();
668 | });
669 | },
670 |
671 | 'skips protocols that are not http(s)': function (done) {
672 | var pages = {
673 | crawl: ['http://127.0.0.1:9988/non_http_links.html']
674 | };
675 | var css = ['http://127.0.0.1:9988/rules1.css'];
676 |
677 | var expected = {
678 | selectors: {
679 | '.foo': {
680 | 'matches_html': 1, 'occurences_css': 1 },
681 | '.bar': {
682 | 'matches_html': 0, 'occurences_css': 1 }
683 | },
684 | total_used: 1,
685 | total_unused: 1,
686 | total_ignored: 0,
687 | total_duplicates: 0
688 | };
689 |
690 | var visitedPages = [];
691 | var logger = function (result, requestedPage) {
692 | visitedPages.push(requestedPage);
693 | };
694 |
695 | lib.analyze(pages, css, null, logger, function (result) {
696 | // Should count given html and css page, but not mailto link.
697 | assert.equals(visitedPages.length, 2);
698 | done();
699 | });
700 | },
701 |
702 | '// supports @import': function (done) {
703 | var pages = {
704 | include: ['http://127.0.0.1:9988/markup1.html',
705 | 'http://127.0.0.1:9988/markup2.html']
706 | };
707 | var css = ["@import url('http://127.0.0.1:9988/rules1.css');",
708 | "@import url('http://127.0.0.1:9988/rules2.css');"];
709 |
710 | var expected = {
711 | selectors: {
712 | '.foo': {
713 | 'matches_html': 1, 'occurences_css': 1 },
714 | '.bar': {
715 | 'matches_html': 1, 'occurences_css': 1 },
716 | '.baz': {
717 | 'matches_html': 0, 'occurences_css': 1 }
718 | },
719 | total_used: 2,
720 | total_unused: 1,
721 | total_ignored: 0,
722 | total_duplicates: 0
723 | };
724 |
725 | lib.analyze(pages, css, null, null, function (result) {
726 | assert.match(result, expected);
727 | done();
728 | });
729 | },
730 | 'removes URL fragments': function (done) {
731 | var pages = {
732 | crawl: ['http://127.0.0.1:9988/fragments.html']
733 | };
734 | var css = '.foo {}';
735 |
736 | var expected = {
737 | selectors: {
738 | '.foo': {
739 | 'matches_html': 1, 'occurences_css': 1 }
740 | }
741 | };
742 |
743 | lib.analyze(pages, css, null, null, function (result) {
744 | assert.match(result, expected);
745 | done();
746 | });
747 | }
748 | });
749 |
--------------------------------------------------------------------------------
/test/general.js:
--------------------------------------------------------------------------------
1 | /* global assert:true */
2 |
3 | var fs = require('fs');
4 |
5 | if (typeof require !== 'undefined') {
6 | var buster = require('buster');
7 | var lib = require('../lib/ucss');
8 | }
9 |
10 | var assert = buster.referee.assert;
11 | var refute = buster.referee.refute;
12 |
13 |
14 | buster.testCase('uCSS', {
15 | setUp: function () {
16 | },
17 |
18 | tearDown: function () {
19 | },
20 |
21 | 'handles no markup given': function (done) {
22 | var pages = {};
23 | var css = '.foo {}';
24 |
25 | var expected = {};
26 |
27 | lib.analyze(pages, css, null, null, function (result) {
28 | assert.match(result, expected);
29 | done();
30 | });
31 | },
32 |
33 | 'finds duplicate CSS rules when no markup given': function (done) {
34 | var css = '.foo {} .bar {} .foo {}';
35 |
36 | var expected = {
37 | selectors: {
38 | '.foo': {
39 | 'matches_html': 0, 'occurences_css': 2 },
40 | '.bar': {
41 | 'matches_html': 0, 'occurences_css': 1 }
42 | },
43 | total_used: 0,
44 | total_unused: 2,
45 | total_ignored: 0,
46 | total_duplicates: 1
47 | };
48 |
49 | lib.analyze(null, css, null, null, function (result) {
50 | assert.match(result, expected);
51 | done();
52 | });
53 | },
54 |
55 | 'handles no CSS given': function (done) {
56 | var pages = {
57 | include: ['']
58 | };
59 |
60 | var expected = {};
61 |
62 | lib.analyze(pages, null, null, null, function (result) {
63 | assert.equals(result, expected);
64 | done();
65 | });
66 | },
67 | 'handles empty CSS': function (done) {
68 | var pages = {
69 | include: ['']
70 | };
71 |
72 | var expected = {};
73 |
74 | lib.analyze(pages, null, null, null, function (result) {
75 | assert.equals(result, expected);
76 | done();
77 | });
78 | },
79 |
80 | 'works with several css instances': function (done) {
81 | var pages = {
82 | include: [""]
83 | };
84 | var css = ['.foo {}', '.bar {}'];
85 |
86 | var expected = {
87 | selectors: {
88 | '.foo': {
89 | 'matches_html': 1, 'occurences_css': 1 },
90 | '.bar': {
91 | 'matches_html': 1, 'occurences_css': 1 }
92 | },
93 | total_used: 2,
94 | total_unused: 0,
95 | total_ignored: 0,
96 | total_duplicates: 0
97 | };
98 |
99 | lib.analyze(pages, css, null, null, function (result) {
100 | assert.match(result, expected);
101 | done();
102 | });
103 | },
104 |
105 | 'finds duplicates': function (done) {
106 | var pages = {
107 | include: [""]
108 | };
109 | var css = ['.foo {} .bar{} .foo{} .foo{}',
110 | '.bar{} .baz{}'];
111 |
112 | var expected = {
113 | selectors: {
114 | '.foo': {
115 | 'matches_html': 1, 'occurences_css': 3 },
116 | '.bar': {
117 | 'matches_html': 0, 'occurences_css': 2 },
118 | '.baz': {
119 | 'matches_html': 0, 'occurences_css': 1 }
120 | },
121 | total_used: 1,
122 | total_unused: 2,
123 | total_ignored: 0,
124 | total_duplicates: 2
125 | };
126 |
127 | lib.analyze(pages, css, null, null, function (result) {
128 | assert.match(result, expected);
129 | done();
130 | });
131 | },
132 |
133 | 'finds unused rules': function (done) {
134 | var pages = {
135 | include: [fs.readFileSync('fixtures/markup.html').toString()]
136 | };
137 | var css = fs.readFileSync('fixtures/rules.css').toString();
138 |
139 | var expected = {
140 | selectors: {
141 | '*': {
142 | 'matches_html': 9, 'occurences_css': 1 },
143 | '.foo': {
144 | 'matches_html': 1, 'occurences_css': 1 },
145 | '.bar': {
146 | 'matches_html': 1, 'occurences_css': 2 },
147 | '.foo .bar': {
148 | 'matches_html': 0, 'occurences_css': 1 },
149 | '.bar #baz': {
150 | 'matches_html': 1, 'occurences_css': 1 },
151 | '.qux': {
152 | 'matches_html': 1, 'occurences_css': 1 },
153 | '.quux': {
154 | 'matches_html': 0, 'occurences_css': 1 },
155 | 'span[dir="ltr"]': {
156 | 'matches_html': 1, 'occurences_css': 1 },
157 | '.bar span[dir="ltr"]': {
158 | 'matches_html': 1, 'occurences_css': 1 },
159 | '.foo span[dir="ltr"]': {
160 | 'matches_html': 0, 'occurences_css': 1 },
161 | '.foo .qux .bar': {
162 | 'matches_html': 0, 'occurences_css': 1 },
163 | '.foo .qux .bar .baz': {
164 | 'matches_html': 0, 'occurences_css': 1 }
165 | },
166 | total_used: 7,
167 | total_unused: 5,
168 | total_ignored: 8,
169 | total_duplicates: 1
170 | };
171 |
172 | lib.analyze(pages, css, null, null, function (result) {
173 | assert.match(result, expected);
174 | done();
175 | });
176 | },
177 |
178 | 'finds unused rules, with whitelist': function (done) {
179 | var pages = {
180 | include: [fs.readFileSync('fixtures/markup.html').toString()]
181 | };
182 | var css = fs.readFileSync('fixtures/rules.css').toString();
183 | var context = {
184 | whitelist: ['.foo .qux .bar', '.foo .qux .bar .baz']
185 | };
186 |
187 | var expected = {
188 | selectors: {
189 | '*': {
190 | 'matches_html': 9, 'occurences_css': 1, whitelisted: false },
191 | '.foo': {
192 | 'matches_html': 1, 'occurences_css': 1, whitelisted: false },
193 | '.bar': {
194 | 'matches_html': 1, 'occurences_css': 2, whitelisted: false },
195 | '.foo .bar': {
196 | 'matches_html': 0, 'occurences_css': 1, whitelisted: false },
197 | '.bar #baz': {
198 | 'matches_html': 1, 'occurences_css': 1, whitelisted: false },
199 | '.qux': {
200 | 'matches_html': 1, 'occurences_css': 1, whitelisted: false },
201 | '.quux': {
202 | 'matches_html': 0, 'occurences_css': 1, whitelisted: false },
203 | 'span[dir="ltr"]': {
204 | 'matches_html': 1, 'occurences_css': 1, whitelisted: false },
205 | '.bar span[dir="ltr"]': {
206 | 'matches_html': 1, 'occurences_css': 1, whitelisted: false },
207 | '.foo span[dir="ltr"]': {
208 | 'matches_html': 0, 'occurences_css': 1, whitelisted: false },
209 | '.foo .qux .bar': {
210 | 'matches_html': 0, 'occurences_css': 1, whitelisted: true },
211 | '.foo .qux .bar .baz': {
212 | 'matches_html': 0, 'occurences_css': 1, whitelisted: true }
213 | },
214 | total_used: 7,
215 | total_unused: 3,
216 | total_ignored: 10,
217 | total_duplicates: 1
218 | };
219 |
220 | lib.analyze(pages, css, context, null, function (result) {
221 | assert.match(result, expected);
222 | done();
223 | });
224 | },
225 |
226 | 'finds unused rules in several files': function (done) {
227 | var pages = {
228 | include: [fs.readFileSync('fixtures/markup.html').toString(),
229 | fs.readFileSync('fixtures/markup2.html').toString()]
230 | };
231 | var css = fs.readFileSync('fixtures/rules.css').toString();
232 |
233 | var expected = {
234 | selectors: {
235 | '*': {
236 | 'matches_html': 18, 'occurences_css': 1 },
237 | '.foo': {
238 | 'matches_html': 3, 'occurences_css': 1 },
239 | '.bar': {
240 | 'matches_html': 2, 'occurences_css': 2 },
241 | '.foo .bar': {
242 | 'matches_html': 0, 'occurences_css': 1 },
243 | '.bar #baz': {
244 | 'matches_html': 2, 'occurences_css': 1 },
245 | '.qux': {
246 | 'matches_html': 2, 'occurences_css': 1 },
247 | '.quux': {
248 | 'matches_html': 0, 'occurences_css': 1 },
249 | 'span[dir="ltr"]': {
250 | 'matches_html': 2, 'occurences_css': 1 },
251 | '.bar span[dir="ltr"]': {
252 | 'matches_html': 2, 'occurences_css': 1 },
253 | '.foo span[dir="ltr"]': {
254 | 'matches_html': 1, 'occurences_css': 1 },
255 | '.foo .qux .bar': {
256 | 'matches_html': 0, 'occurences_css': 1 },
257 | '.foo .qux .bar .baz': {
258 | 'matches_html': 0, 'occurences_css': 1 }
259 | },
260 | total_used: 8,
261 | total_unused: 4,
262 | total_ignored: 8,
263 | total_duplicates: 1
264 | };
265 |
266 | lib.analyze(pages, css, null, null, function (result) {
267 | assert.match(result, expected);
268 | done();
269 | });
270 | },
271 |
272 | 'checks that lists works as params': function (done) {
273 | var pages = {
274 | include: [""]
275 | };
276 | var css = ['.foo {}'];
277 |
278 | var expected = {
279 | selectors: {
280 | '.foo': {
281 | 'matches_html': 1, 'occurences_css': 1 }
282 | },
283 | total_used: 1,
284 | total_unused: 0,
285 | total_ignored: 0,
286 | total_duplicates: 0
287 | };
288 |
289 | lib.analyze(pages, css, null, null, function (result) {
290 | assert.match(result, expected);
291 | done();
292 | });
293 | },
294 |
295 | 'checks that strings works as params': function (done) {
296 | var pages = {
297 | include: ""
298 | };
299 | var css = '.foo {}';
300 |
301 | var expected = {
302 | selectors: {
303 | '.foo': {
304 | 'matches_html': 1, 'occurences_css': 1 }
305 | },
306 | total_used: 1,
307 | total_unused: 0,
308 | total_ignored: 0,
309 | total_duplicates: 0
310 | };
311 |
312 | lib.analyze(pages, css, null, null, function (result) {
313 | assert.match(result, expected);
314 | done();
315 | });
316 | }
317 | });
318 |
--------------------------------------------------------------------------------
/test/http.js:
--------------------------------------------------------------------------------
1 | /* global assert:true */
2 |
3 |
4 | if (typeof require !== 'undefined') {
5 | var buster = require('buster');
6 | var lib = require('../lib/ucss');
7 | }
8 |
9 | var assert = buster.referee.assert;
10 | var refute = buster.referee.refute;
11 |
12 |
13 | buster.testCase('uCSS (using http)', {
14 | setUp: function () {
15 | var http = require('http');
16 |
17 | this.server = http.createServer(function (req, res) {
18 | res.setHeader('content-type', 'text/html');
19 |
20 | if ('/markup1.html' === req.url) {
21 | res.end("");
22 | } else if ('/markup2.html' === req.url) {
23 | res.end("");
24 | } else if ('/rules1.css' === req.url) {
25 | res.end('.foo {} .bar {}');
26 | } else if ('/rules2.css' === req.url) {
27 | res.end('.baz {}');
28 | } else if ('/document.pdf' === req.url) {
29 | // HTML, but wrong content type
30 | res.setHeader('content-type', 'application/pdf');
31 | res.end("");
32 | } else if ('/hasHeader.html' === req.url) {
33 | if (req.headers['accept-language'] === 'nb-no') {
34 | res.end("");
35 | } else {
36 | res.end("");
37 | }
38 | } else if ('/hasHeader.css' === req.url) {
39 | if (req.headers['accept-language'] === 'nb-no') {
40 | res.end("[lang='nb'] {} .foo {}");
41 | } else {
42 | res.end("[lang='en'] {} .bar {}");
43 | }
44 | } else {
45 | res.writeHead(404);
46 | res.end();
47 | }
48 | }).listen(9988, '0.0.0.0');
49 | },
50 |
51 | tearDown: function () {
52 | this.server.close();
53 | },
54 |
55 | 'can load and process resources': function (done) {
56 | var pages = {
57 | include: ['http://127.0.0.1:9988/markup1.html',
58 | 'http://127.0.0.1:9988/markup2.html']
59 | };
60 | var css = ['http://127.0.0.1:9988/rules1.css',
61 | 'http://127.0.0.1:9988/rules2.css'];
62 |
63 | var expected = {
64 | selectors: {
65 | '.foo': {
66 | 'matches_html': 1, 'occurences_css': 1 },
67 | '.bar': {
68 | 'matches_html': 1, 'occurences_css': 1 },
69 | '.baz': {
70 | 'matches_html': 0, 'occurences_css': 1 }
71 | },
72 | total_used: 2,
73 | total_unused: 1,
74 | total_ignored: 0,
75 | total_duplicates: 0
76 | };
77 |
78 | lib.analyze(pages, css, null, null, function (result) {
79 | assert.match(result, expected);
80 | done();
81 | });
82 | },
83 |
84 | // Doesn't do actual login, but checks that occurences are doubled, since
85 | // every page is checked twice (once with cookie set, and once without).
86 | "finds unused rules in several files (with 'login')": function (done) {
87 | var pages = {
88 | include: ['http://127.0.0.1:9988/markup1.html',
89 | 'http://127.0.0.1:9988/markup2.html']
90 | };
91 | var css = ['http://127.0.0.1:9988/rules1.css',
92 | 'http://127.0.0.1:9988/rules2.css'];
93 |
94 | var context = {
95 | auth: {
96 | 'username': 'foo',
97 | 'password': 'bar',
98 | 'loginUrl': 'http://example.com/login/',
99 | 'loginFunc': function (url, username, password, callback) {
100 | callback('1234');
101 | }
102 | }
103 | };
104 |
105 | var expected = {
106 | selectors: {
107 | '.foo': {
108 | 'matches_html': 2, 'occurences_css': 1 },
109 | '.bar': {
110 | 'matches_html': 2, 'occurences_css': 1 },
111 | '.baz': {
112 | 'matches_html': 0, 'occurences_css': 1 }
113 | },
114 | total_used: 2,
115 | total_unused: 1,
116 | total_ignored: 0,
117 | total_duplicates: 0
118 | };
119 |
120 | lib.analyze(pages, css, context, null, function (result) {
121 | assert.match(result, expected);
122 | done();
123 | });
124 | },
125 |
126 | 'can send headers': function (done) {
127 | var pages = {
128 | include: ['http://127.0.0.1:9988/hasHeader.html']
129 | };
130 | var css = ['http://127.0.0.1:9988/hasHeader.css'];
131 |
132 | var context = {};
133 | context.headers = { 'accept-language': 'nb-no' };
134 |
135 | var expected = {
136 | selectors: {
137 | "[lang='nb']": {
138 | 'matches_html': 1, 'occurences_css': 1 },
139 | '.foo': {
140 | 'matches_html': 1, 'occurences_css': 1 }
141 | },
142 | total_used: 2,
143 | total_unused: 0,
144 | total_ignored: 0,
145 | total_duplicates: 0
146 | };
147 |
148 | lib.analyze(pages, css, context, null, function (result) {
149 | assert.match(result, expected);
150 | done();
151 | });
152 | }
153 | });
154 |
--------------------------------------------------------------------------------
/test/login.js:
--------------------------------------------------------------------------------
1 | /* global assert:true */
2 |
3 | /* PLEASE NOTE: There are tests elsewhere that also does login. */
4 |
5 |
6 | if (typeof require !== 'undefined') {
7 | var buster = require('buster');
8 | var lib = require('../lib/ucss');
9 | }
10 |
11 | var assert = buster.referee.assert;
12 | var refute = buster.referee.refute;
13 |
14 |
15 | var pageSet = {
16 | '/rules1.css': '.logged-in {} .logged-out {} .foo {}',
17 | '/markup1.html': {
18 | 'logged-out':
19 | ['',
20 | ' ',
21 | ' ',
22 | " ",
23 | " You are logged in
",
24 | ' ',
25 | ''].join(''),
26 | 'logged-in':
27 | ['',
28 | ' ',
29 | ' ',
30 | " ",
31 | " You are not logged in
",
32 | ' ',
33 | ''].join('')
34 | }
35 | };
36 |
37 | buster.testCase('uCSS', {
38 | setUp: function () {
39 | var http = require('http');
40 | this.server = http.createServer(function (req, res) {
41 | res.setHeader('content-type', 'text/html');
42 |
43 | if (req.url) {
44 | if (pageSet && req.url.indexOf('html') > -1) { // HTML
45 | if (req.headers['cookie']) { // Logged in
46 | res.end(pageSet[req.url]['logged-in']);
47 | } else { // Not logged in
48 | res.end(pageSet[req.url]['logged-out']);
49 | }
50 | } else { // CSS
51 | res.end(pageSet[req.url]);
52 | }
53 | } else {
54 | res.writeHead(404);
55 | res.end();
56 | }
57 | }).listen(9988, '0.0.0.0');
58 | },
59 |
60 | tearDown: function () {
61 | this.server.close();
62 | },
63 |
64 | 'finds different classes when logged in than when logged out (and vice versa)': function (done) {
65 | var pages = {
66 | crawl: ['http://127.0.0.1:9988/markup1.html']
67 | };
68 | var css = ['http://127.0.0.1:9988/rules1.css'];
69 |
70 | var context = {
71 | auth: {
72 | 'username': 'foo',
73 | 'password': 'bar',
74 | 'loginUrl': 'http://example.com/login/',
75 | 'loginFunc': function (url, username, password, callback) {
76 | callback('1234');
77 | }
78 | }
79 | };
80 |
81 | var expected = {
82 | selectors: {
83 | '.foo': {
84 | 'matches_html': 2, 'occurences_css': 1 },
85 | '.logged-in': {
86 | 'matches_html': 1, 'occurences_css': 1 },
87 | '.logged-out': {
88 | 'matches_html': 1, 'occurences_css': 1 }
89 | },
90 | total_used: 3,
91 | total_unused: 0,
92 | total_ignored: 0,
93 | total_duplicates: 0
94 | };
95 |
96 | lib.analyze(pages, css, context, null, function (result) {
97 | assert.match(result, expected);
98 | done();
99 | });
100 | },
101 |
102 | 'does not find classes for logged in pages when logged out': function (done) {
103 | var pages = {
104 | crawl: ['http://127.0.0.1:9988/markup1.html']
105 | };
106 | var css = ['http://127.0.0.1:9988/rules1.css'];
107 |
108 | var expected = {
109 | selectors: {
110 | '.foo': {
111 | 'matches_html': 1, 'occurences_css': 1 },
112 | '.logged-in': {
113 | 'matches_html': 0, 'occurences_css': 1 },
114 | '.logged-out': {
115 | 'matches_html': 1, 'occurences_css': 1 }
116 | },
117 | total_used: 2,
118 | total_unused: 1,
119 | total_ignored: 0,
120 | total_duplicates: 0
121 | };
122 |
123 | lib.analyze(pages, css, null, null, function (result) {
124 | assert.match(result, expected);
125 | done();
126 | });
127 | }
128 | });
129 |
--------------------------------------------------------------------------------
/test/selectors.js:
--------------------------------------------------------------------------------
1 | /* global assert:true */
2 |
3 | var fs = require('fs');
4 |
5 | if (typeof require !== 'undefined') {
6 | var buster = require('buster');
7 | var lib = require('../lib/ucss');
8 | }
9 |
10 | var assert = buster.referee.assert;
11 | var refute = buster.referee.refute;
12 |
13 |
14 | /**
15 | * Simple CSS selector tests.
16 | *
17 | * As uCSS does not have its own selector engine, this is not meant as a
18 | * complete CSS selector test suite.
19 | */
20 | buster.testCase('CSS Selectors:', {
21 | setUp: function () {
22 | },
23 |
24 | tearDown: function () {
25 | },
26 |
27 | 'Class': function (done) {
28 | var pages = {
29 | include: [""]
30 | };
31 | var css = '.foo {}';
32 |
33 | var expected = {
34 | selectors: {
35 | '.foo': {
36 | 'matches_html': 1, 'occurences_css': 1 }
37 | }
38 | };
39 |
40 | lib.analyze(pages, css, null, null, function (result) {
41 | assert.match(result, expected);
42 | done();
43 | });
44 | },
45 |
46 | 'Id': function (done) {
47 | var pages = {
48 | include: [""]
49 | };
50 | var css = '#foo {}';
51 |
52 | var expected = {
53 | selectors: {
54 | '#foo': {
55 | 'matches_html': 1, 'occurences_css': 1 }
56 | }
57 | };
58 |
59 |
60 | lib.analyze(pages, css, null, null, function (result) {
61 | assert.match(result, expected);
62 | done();
63 | });
64 | },
65 |
66 | 'All': function (done) {
67 | var pages = {
68 | include: ['']
69 | };
70 | var css = '* {}';
71 |
72 | var expected = {
73 | selectors: {
74 | '*': {
75 | 'matches_html': 4, 'occurences_css': 1 }
76 | }
77 | };
78 |
79 | lib.analyze(pages, css, null, null, function (result) {
80 | assert.match(result, expected);
81 | done();
82 | });
83 | },
84 |
85 | 'Element': function (done) {
86 | var pages = {
87 | include: ['']
88 | };
89 | var css = 'div {}';
90 |
91 | var expected = {
92 | selectors: {
93 | 'div': {
94 | 'matches_html': 1, 'occurences_css': 1 }
95 | }
96 | };
97 |
98 | lib.analyze(pages, css, null, null, function (result) {
99 | assert.match(result, expected);
100 | done();
101 | });
102 | },
103 |
104 | 'Element, element': function (done) {
105 | var pages = {
106 | include: [fs.readFileSync('fixtures/markup.html').toString()]
107 | };
108 | var css = '.foo, .bar { color: red; }';
109 |
110 |
111 | var expected = {
112 | selectors: {
113 | '.foo': {
114 | 'matches_html': 1, 'occurences_css': 1 },
115 | '.bar': {
116 | 'matches_html': 1, 'occurences_css': 1 }
117 |
118 | }
119 | };
120 |
121 | lib.analyze(pages, css, null, null, function (result) {
122 | assert.match(result, expected);
123 | done();
124 | });
125 | },
126 |
127 | 'Element + element': function (done) {
128 | var pages = {
129 | include: [fs.readFileSync('fixtures/markup.html').toString()]
130 | };
131 | var css = '.foo + .bar { color: red; }';
132 |
133 | var expected = {
134 | selectors: {
135 | '.foo + .bar': {
136 | 'matches_html': 1, 'occurences_css': 1 }
137 | }
138 | };
139 |
140 | lib.analyze(pages, css, null, null, function (result) {
141 | assert.match(result, expected);
142 | done();
143 | });
144 | },
145 |
146 | '[attribute=value]': function (done) {
147 | var pages = {
148 | include: [""]
149 | };
150 | var css = "div[dir='rtl'] {}";
151 |
152 | var expected = {
153 | selectors: {
154 | "div[dir='rtl']": {
155 | 'matches_html': 1, 'occurences_css': 1 }
156 | }
157 | };
158 |
159 | lib.analyze(pages, css, null, null, function (result) {
160 | assert.match(result, expected);
161 | done();
162 | });
163 | },
164 |
165 | 'Element1~element2': function (done) {
166 | var pages = {
167 | include: ['
']
168 | };
169 | var css = 'div~br {}';
170 |
171 | var expected = {
172 | selectors: {
173 | 'div~br': {
174 | 'matches_html': 2, 'occurences_css': 1 }
175 | }
176 | };
177 |
178 | lib.analyze(pages, css, null, null, function (result) {
179 | assert.match(result, expected);
180 | done();
181 | });
182 | },
183 |
184 |
185 | 'handles pseudo elements': function (done) {
186 | var pages = {
187 | include: [""]
188 | };
189 | var css = ['.foo::link{} .bar:lang(nb){} .foo::link{}',
190 | '.foo{} .foo{} .bar{} .baz:after{} input:invalid{}'].join('');
191 |
192 | var expected = {
193 | selectors: {
194 | '.bar': {
195 | 'matches_html': 0, 'occurences_css': 1 },
196 | '.bar:lang(nb)': {
197 | 'matches_html': 0, 'occurences_css': 1 },
198 | '.baz:after': {
199 | 'matches_html': 0, 'occurences_css': 1 },
200 | '.foo': {
201 | 'matches_html': 1, 'occurences_css': 2 },
202 | '.foo::link': {
203 | 'matches_html': 1, 'occurences_css': 2 },
204 | 'input:invalid': {
205 | 'matches_html': 0, 'occurences_css': 1 }
206 | }
207 | };
208 |
209 | lib.analyze(pages, css, null, null, function (result) {
210 | assert.match(result, expected);
211 | done();
212 | });
213 | }
214 | });
215 |
216 |
217 | /**
218 | * Checks that @-rules is handled.
219 | *
220 | * The goal for now is to not crash when these are encountered.
221 | */
222 | buster.testCase('CSS @-rules:', {
223 | setUp: function () {
224 | },
225 |
226 | tearDown: function () {
227 | },
228 |
229 | 'Nested selectors (@media)': function (done) {
230 | var pages = {
231 | include: [fs.readFileSync('fixtures/markup.html').toString()]
232 | };
233 | var css = ['.foo { color: red; } ',
234 | '@media all and (min-width: 500px) {',
235 | '.bar { background: blue; }',
236 | ' }'].join('');
237 |
238 | var expected = {
239 | selectors: {
240 | '.foo': {
241 | 'matches_html': 1, 'occurences_css': 1 },
242 | '.bar': {
243 | 'matches_html': 1, 'occurences_css': 1 }
244 | }
245 | };
246 |
247 | lib.analyze(pages, css, null, null, function (result) {
248 | assert.match(result, expected);
249 | done();
250 | });
251 | },
252 |
253 | 'Selectors succeding nested selectors (@media)': function (done) {
254 | var pages = {
255 | include: [fs.readFileSync('fixtures/markup.html').toString()]
256 | };
257 | var css = ['.foo { color: red; } ',
258 | '@media all and (min-width: 500px) ',
259 | '{ .bar { background: blue; } ',
260 | '} .qux { float: left; }'].join('');
261 |
262 |
263 | var expected = {
264 | selectors: {
265 | '.foo': {
266 | 'matches_html': 1, 'occurences_css': 1 },
267 | '.bar': {
268 | 'matches_html': 1, 'occurences_css': 1 },
269 | '.qux': {
270 | 'matches_html': 1, 'occurences_css': 1 }
271 | }
272 | };
273 |
274 | lib.analyze(pages, css, null, null, function (result) {
275 | assert.match(result, expected);
276 | done();
277 | });
278 | },
279 |
280 | 'Ignores @font-face': function (done) {
281 | var pages = {
282 | include: [""]
283 | };
284 | var css = ["@font-face {font-family: 'MyWebFont'; ",
285 | "src: url('webfont.eot'); src: url('webfont.eot?#iefix') ",
286 | "format('embedded-opentype'), url('webfont.woff') ",
287 | "format('woff'), url('webfont.ttf') format('truetype'), ",
288 | "url('webfont.svg#svgFontName') format('svg');}"].join('');
289 |
290 | var expected = {
291 | selectors: {
292 | '@font-face': {
293 | 'matches_html': 0, 'occurences_css': 1, ignored: true }
294 | }
295 | };
296 |
297 | lib.analyze(pages, css, null, null, function (result) {
298 | assert.match(result, expected);
299 | done();
300 | });
301 | },
302 |
303 | 'Ignores @keyframe': function (done) {
304 | var pages = {
305 | include: [""]
306 | };
307 | var css = ['@-webkit-keyframes progress-bar-stripes{',
308 | 'from{background-position:40px 0}',
309 | 'to{background-position:0 0}',
310 | '}@-moz-keyframes progress-bar-stripes{',
311 | 'from{background-position:40px 0}',
312 | 'to{background-position:0 0}',
313 | '}@-ms-keyframes progress-bar-stripes{',
314 | 'from{background-position:40px 0}',
315 | 'to{background-position:0 0}',
316 | '}@-o-keyframes progress-bar-stripes{',
317 | 'from{background-position:0 0}',
318 | 'to{background-position:40px 0}',
319 | '}@keyframes progress-bar-stripes{',
320 | 'from{background-position:40px 0}',
321 | 'to{background-position:0 0}}'].join('');
322 |
323 | var expected = {
324 | selectors: {
325 | '@-webkit-keyframes progress-bar-stripes': {
326 | 'matches_html': 0, 'occurences_css': 1, ignored: true },
327 | '@-moz-keyframes progress-bar-stripes': {
328 | 'matches_html': 0, 'occurences_css': 1, ignored: true },
329 | '@-ms-keyframes progress-bar-stripes': {
330 | 'matches_html': 0, 'occurences_css': 1, ignored: true },
331 | '@-o-keyframes progress-bar-stripes': {
332 | 'matches_html': 0, 'occurences_css': 1, ignored: true },
333 | '@keyframes progress-bar-stripes': {
334 | 'matches_html': 0, 'occurences_css': 1, ignored: true }
335 | },
336 | total_used: 0,
337 | total_ignored: 5
338 | };
339 |
340 | lib.analyze(pages, css, null, null, function (result) {
341 | assert.match(result, expected);
342 | done();
343 | });
344 | },
345 |
346 | 'Handles @supports': function (done) {
347 | var pages = {
348 | include: [""]
349 | };
350 | var css = ['.foo { background: blue } ',
351 | '@supports (box-shadow: 2px 2px 2px black) { ',
352 | '.bar { box-shadow: 2px 2px 2px black; }} ',
353 | '@-prefix-supports (box-shadow: 2px 2px 2px black) { ',
354 | '.bar { box-shadow: 2px 2px 2px black; }} ',
355 | '.baz { background: red }'].join('');
356 |
357 | var expected = {
358 | selectors: {
359 | '.foo': {
360 | 'matches_html': 1, 'occurences_css': 1 },
361 | '.baz': {
362 | 'matches_html': 1, 'occurences_css': 1 }
363 | }
364 | };
365 |
366 | lib.analyze(pages, css, null, null, function (result) {
367 | assert.match(result, expected);
368 | done();
369 | });
370 | },
371 |
372 | 'Handles @document': function (done) {
373 | var pages = {
374 | include: [""]
375 | };
376 | var css = ['.foo { background: blue } ',
377 | '@document url(http://www.example.com/), ',
378 | 'url-prefix(http://www.example.com/Style/), ',
379 | "domain(example.com), regexp('https:.*') { ",
380 | 'body { color: red; background: blue; }}',
381 | '@-prefix-document url(http://www.example.com/), ',
382 | 'url-prefix(http://www.example.com/Style/), ',
383 | "domain(example.com), regexp('https:.*') { ",
384 | 'body { color: red; background: blue; }}',
385 | '.baz { background: red }'].join('');
386 |
387 | var expected = {
388 | selectors: {
389 | '.foo': {
390 | 'matches_html': 1, 'occurences_css': 1 },
391 | '.baz': {
392 | 'matches_html': 1, 'occurences_css': 1 }
393 | }
394 | };
395 |
396 | lib.analyze(pages, css, null, null, function (result) {
397 | assert.match(result, expected);
398 | done();
399 | });
400 | }
401 | });
--------------------------------------------------------------------------------