├── .gitignore
├── app
├── lib
│ ├── version.rb
│ ├── exception.rb
│ ├── cache.rb
│ ├── log_and_profile.rb
│ ├── liquid_ext.rb
│ ├── commands.rb
│ └── local_server.rb
├── glim.gemspec
└── bin
│ └── glim
├── plugins
├── glim-seo-tag
│ ├── glim-seo-tag.rb
│ └── glim-seo-tag.gemspec
├── glim-feed
│ ├── glim-feed.rb
│ └── glim-feed.gemspec
├── glim-encode-email
│ ├── glim-encode-email.rb
│ └── glim-encode-email.gemspec
├── glim-edit-in-textmate
│ ├── glim-edit-in-textmate.gemspec
│ └── glim-edit-in-textmate.rb
├── glim-haml-converter
│ ├── glim-haml-converter.gemspec
│ └── glim-haml-converter.rb
└── glim-sass-converter
│ ├── glim-sass-converter.gemspec
│ └── glim-sass-converter.rb
└── README.md
/.gitignore:
--------------------------------------------------------------------------------
1 | *.gem
2 |
--------------------------------------------------------------------------------
/app/lib/version.rb:
--------------------------------------------------------------------------------
1 | module Glim
2 | VERSION = '0.1.5'
3 | end
4 |
--------------------------------------------------------------------------------
/plugins/glim-seo-tag/glim-seo-tag.rb:
--------------------------------------------------------------------------------
1 | module GlimExtensions
2 | class MockSEOTag < Liquid::Tag
3 | def initialize(tag_name, markup, options)
4 | super
5 | end
6 |
7 | def render(context)
8 | ""
9 | end
10 | end
11 | end
12 |
13 | Liquid::Template.register_tag("seo", GlimExtensions::MockSEOTag)
14 |
--------------------------------------------------------------------------------
/plugins/glim-feed/glim-feed.rb:
--------------------------------------------------------------------------------
1 | module GlimExtensions
2 | class MockFeedMetaTag < Liquid::Tag
3 | def initialize(tag_name, markup, options)
4 | super
5 | end
6 |
7 | def render(context)
8 | ""
9 | end
10 | end
11 | end
12 |
13 | Liquid::Template.register_tag("feed_meta", GlimExtensions::MockFeedMetaTag)
14 |
--------------------------------------------------------------------------------
/plugins/glim-encode-email/glim-encode-email.rb:
--------------------------------------------------------------------------------
1 | module EncodeEmailFilter
2 | def encode_email(input)
3 | input.gsub(/(^mailto:)|\p{Alnum}+/) do |char|
4 | char.bytes.inject(String.new) do |result, byte|
5 | result << ($1 ? '%d;' : '%%%02X') % byte
6 | end
7 | end unless input.nil?
8 | end
9 | end
10 |
11 | Liquid::Template.register_filter(EncodeEmailFilter)
12 |
--------------------------------------------------------------------------------
/plugins/glim-feed/glim-feed.gemspec:
--------------------------------------------------------------------------------
1 | Gem::Specification.new do |spec|
2 | spec.name = File.basename(Dir.pwd)
3 | spec.version = '0.1.1'
4 | spec.author = 'Allan Odgaard'
5 | spec.summary = 'A mock for jekyll-feed.'
6 | spec.homepage = 'https://macromates.com/glim/'
7 | spec.license = 'MIT'
8 |
9 | spec.files = Dir.glob('*.rb')
10 | spec.require_paths = ['.']
11 | end
12 |
--------------------------------------------------------------------------------
/plugins/glim-seo-tag/glim-seo-tag.gemspec:
--------------------------------------------------------------------------------
1 | Gem::Specification.new do |spec|
2 | spec.name = File.basename(Dir.pwd)
3 | spec.version = '0.1.1'
4 | spec.author = 'Allan Odgaard'
5 | spec.summary = 'A mock for jekyll-seo-tag.'
6 | spec.homepage = 'https://macromates.com/glim/'
7 | spec.license = 'MIT'
8 |
9 | spec.files = Dir.glob('*.rb')
10 | spec.require_paths = ['.']
11 | end
12 |
--------------------------------------------------------------------------------
/app/lib/exception.rb:
--------------------------------------------------------------------------------
1 | module Glim
2 | class Error < ::RuntimeError
3 | attr_reader :message, :previous
4 |
5 | def initialize(message, previous = nil)
6 | @message, @previous = message, previous
7 | end
8 |
9 | def messages
10 | res = [ @message ]
11 | e = self
12 | while e.respond_to?(:previous) && (e = e.previous)
13 | res << e.message
14 | end
15 | res
16 | end
17 | end
18 | end
--------------------------------------------------------------------------------
/plugins/glim-encode-email/glim-encode-email.gemspec:
--------------------------------------------------------------------------------
1 | Gem::Specification.new do |spec|
2 | spec.name = File.basename(Dir.pwd)
3 | spec.version = '0.1'
4 | spec.author = 'Allan Odgaard'
5 | spec.summary = 'Encode email addresses using HTML entities.'
6 | spec.homepage = 'https://macromates.com/glim/'
7 | spec.license = 'MIT'
8 |
9 | spec.files = Dir.glob('*.rb')
10 | spec.require_paths = ['.']
11 |
12 | spec.add_runtime_dependency 'glim', '~> 0.1'
13 | end
14 |
--------------------------------------------------------------------------------
/plugins/glim-edit-in-textmate/glim-edit-in-textmate.gemspec:
--------------------------------------------------------------------------------
1 | Gem::Specification.new do |spec|
2 | spec.name = File.basename(Dir.pwd)
3 | spec.version = '0.1'
4 | spec.author = 'Allan Odgaard'
5 | spec.summary = 'Press E in your browser to edit the page in TextMate.'
6 | spec.homepage = 'https://macromates.com/glim/'
7 | spec.license = 'MIT'
8 |
9 | spec.files = Dir.glob('*.rb')
10 | spec.require_paths = ['.']
11 |
12 | spec.add_runtime_dependency 'glim', '~> 0.1'
13 | end
14 |
--------------------------------------------------------------------------------
/plugins/glim-haml-converter/glim-haml-converter.gemspec:
--------------------------------------------------------------------------------
1 | Gem::Specification.new do |spec|
2 | spec.name = File.basename(Dir.pwd)
3 | spec.version = '0.1.1'
4 | spec.author = 'Allan Odgaard'
5 | spec.summary = 'Converts HAML files into HTML.'
6 | spec.homepage = 'https://macromates.com/glim/'
7 | spec.license = 'MIT'
8 |
9 | spec.files = Dir.glob('*.rb')
10 | spec.require_paths = ['.']
11 |
12 | spec.add_runtime_dependency 'glim', '~> 0.1', '>= 0.1.5'
13 | spec.add_runtime_dependency 'haml', '~> 5.0'
14 | end
15 |
--------------------------------------------------------------------------------
/plugins/glim-sass-converter/glim-sass-converter.gemspec:
--------------------------------------------------------------------------------
1 | Gem::Specification.new do |spec|
2 | spec.name = File.basename(Dir.pwd)
3 | spec.version = '0.1'
4 | spec.author = 'Allan Odgaard'
5 | spec.summary = 'Wrapper for jekyll-sass-converter.'
6 | spec.homepage = 'https://macromates.com/glim/'
7 | spec.license = 'MIT'
8 |
9 | spec.files = Dir.glob('*.rb')
10 | spec.require_paths = ['.']
11 |
12 | spec.add_runtime_dependency 'jekyll', '~> 3.8'
13 | spec.add_runtime_dependency 'jekyll-sass-converter', '~> 1.0'
14 | end
15 |
--------------------------------------------------------------------------------
/plugins/glim-sass-converter/glim-sass-converter.rb:
--------------------------------------------------------------------------------
1 | module Jekyll
2 | def self.sanitized_path(path, dir)
3 | File.expand_path(path, dir)
4 | end
5 | end
6 |
7 | module GlimExtensions
8 | class Sass < Glim::Filter
9 | transforms 'scss' => 'css', 'sass' => 'css'
10 |
11 | @@did_require_sass_converter = false
12 |
13 | def initialize(site)
14 | unless @@did_require_sass_converter
15 | @@did_require_sass_converter = true
16 | begin
17 | require 'jekyll-sass-converter'
18 | rescue LoadError => e
19 | STDERR << "Error loading ‘jekyll-sass-converter’: #{e}\n"
20 | end
21 | end
22 |
23 | @converters ||= Jekyll::Plugin.plugins_of_type(Jekyll::Converter).sort.map { |klass| klass.new(site.config) }
24 | end
25 |
26 | def transform(content, page, options)
27 | if converter = @converters.find { |c| c.matches(page.extname) }
28 | content = converter.convert(content)
29 | end
30 | content
31 | end
32 | end
33 | end
34 |
--------------------------------------------------------------------------------
/plugins/glim-edit-in-textmate/glim-edit-in-textmate.rb:
--------------------------------------------------------------------------------
1 | require 'cgi'
2 |
3 | module GlimExtensions
4 | class EditInTextMate < Glim::Filter
5 | transforms 'output.html' => 'output.html'
6 |
7 | def initialize(site)
8 | @enabled = site.config['environment'] == 'development'
9 | end
10 |
11 | def transform(content, page, options)
12 | if @enabled && page.path
13 | script_tag = edit_in_textmate_script(page.path)
14 | if content =~ /
/
15 | content = "#$`#${script_tag}#$'"
16 | elsif content =~ //
17 | content = "#$`#${script_tag}#$'"
18 | else
19 | content = script_tag + content
20 | end
21 | end
22 | content
23 | end
24 |
25 | def edit_in_textmate_script(path)
26 | <<~HTML
27 |
36 | HTML
37 | end
38 | end
39 | end
40 |
--------------------------------------------------------------------------------
/app/glim.gemspec:
--------------------------------------------------------------------------------
1 | require_relative 'lib/version'
2 |
3 | Gem::Specification.new do |spec|
4 | spec.name = 'glim'
5 | spec.version = Glim::VERSION
6 | spec.author = 'Allan Odgaard'
7 | spec.summary = 'Static site generator inspired by Jekyll but a lot faster'
8 | spec.description = 'Generating output is done in parallel using multiple tasks and lazy evaluation is used when serving pages locally for instant reloads when source content changes.'
9 |
10 | spec.license = 'MIT'
11 | spec.homepage = 'https://sigpipe.macromates.com/2018/creating-a-faster-jekyll/'
12 |
13 | spec.metadata = {
14 | 'homepage_uri' => 'https://sigpipe.macromates.com/2018/creating-a-faster-jekyll/',
15 | 'documentation_uri' => 'https://macromates.com/glim/',
16 | 'source_code_uri' => 'https://github.com/sorbits/glim/',
17 | 'mailing_list_uri' => 'https://lists.macromates.com/listinfo/glim',
18 | }
19 |
20 | spec.bindir = 'bin'
21 | spec.executables << 'glim'
22 |
23 | spec.files = Dir.glob('{bin/*,lib/*.rb}')
24 |
25 | spec.required_ruby_version = ">= 2.3.0"
26 |
27 | spec.add_runtime_dependency 'mercenary', '~> 0.3'
28 | spec.add_runtime_dependency 'liquid', '~> 4.0'
29 | spec.add_runtime_dependency 'kramdown', '~> 1.14'
30 | spec.add_runtime_dependency 'listen', '~> 3.0'
31 | spec.add_runtime_dependency 'websocket', '~> 1.2'
32 | spec.add_runtime_dependency 'mime-types', '~> 3.2'
33 | spec.add_runtime_dependency 'glim-sass-converter', '~> 0.1'
34 | spec.add_runtime_dependency 'glim-seo-tag', '~> 0.1'
35 | spec.add_runtime_dependency 'glim-feed', '~> 0.1'
36 | end
37 |
--------------------------------------------------------------------------------
/app/lib/cache.rb:
--------------------------------------------------------------------------------
1 | require 'fileutils'
2 |
3 | module Glim
4 | class Cache
5 | CACHE_PATH = '.cache/glim/data.bin'
6 |
7 | class << self
8 | def load
9 | cache
10 | end
11 |
12 | def save
13 | unless @cache.nil?
14 | FileUtils.mkdir_p(File.dirname(CACHE_PATH))
15 | open(CACHE_PATH, 'w') do |io|
16 | Marshal.dump(cache, io)
17 | end
18 | end
19 | end
20 |
21 | def track_updates=(flag)
22 | @updates = flag ? {} : nil
23 | end
24 |
25 | def updates
26 | @updates
27 | end
28 |
29 | def merge!(updates)
30 | updates.each do |group, paths|
31 | (cache[group] ||= {}).merge!(paths)
32 | end
33 | end
34 |
35 | def getset(path, group = :default)
36 | begin
37 | mtime = File.stat(path).mtime
38 | if record = cache.dig(group, path)
39 | if mtime == record['modified']
40 | return record['data']
41 | end
42 | end
43 |
44 | record = {
45 | 'modified' => mtime,
46 | 'data' => yield,
47 | }
48 |
49 | (cache[group] ||= {})[path] = record
50 | (@updates[group] ||= {})[path] = record if @updates
51 |
52 | record['data']
53 | rescue Errno::ENOENT
54 | $log.warn("File does not exist: #{path}")
55 | nil
56 | end
57 | end
58 |
59 | private
60 |
61 | def cache
62 | @cache ||= open(CACHE_PATH) { |io| Marshal.load(io) } rescue {}
63 | end
64 | end
65 | end
66 | end
67 |
--------------------------------------------------------------------------------
/plugins/glim-haml-converter/glim-haml-converter.rb:
--------------------------------------------------------------------------------
1 | require 'haml'
2 |
3 | module GlimHAMLSupport
4 | class ExposeLiquidFilters
5 | def initialize(site, page)
6 | @context = Liquid::Context.new({ 'site' => site, 'page' => page })
7 | end
8 |
9 | def method_missing(method, *args)
10 | @context.strainer.invoke(method, *args)
11 | end
12 | end
13 |
14 | class ExposeLiquidGetterAPI
15 | def initialize(obj)
16 | @obj = obj.to_liquid
17 | end
18 |
19 | def method_missing(method, *args)
20 | @obj.liquid_method_missing(method.to_s)
21 | end
22 | end
23 |
24 | class HAMLToHTML < Glim::Filter
25 | transforms 'haml' => 'html'
26 |
27 | def initialize(site)
28 | @site = site
29 | end
30 |
31 | def transform(content, page, options)
32 | engine = Haml::Engine.new(content, { :escape_attrs => :once })
33 | content = engine.render(ExposeLiquidFilters.new(@site, page), :content => content, :page => ExposeLiquidGetterAPI.new(page), :site => ExposeLiquidGetterAPI.new(@site))
34 | end
35 | end
36 |
37 | class HAMLLayout < Glim::Filter
38 | transforms '*' => 'output'
39 |
40 | def initialize(site)
41 | @site = site
42 | @cache = {}
43 | end
44 |
45 | def find_layout(name)
46 | unless name.nil? || @cache.has_key?(name)
47 | path = File.join(@site.layouts_dir, name + '.haml')
48 | @cache[name] = if File.exists?(path)
49 | Glim::FileItem.new(@site, path)
50 | end
51 | end
52 | @cache[name]
53 | end
54 |
55 | def transform(content, page, options)
56 | layout = page.data['layout']
57 | if find_layout(layout)
58 | while layout_file = find_layout(layout)
59 | engine = Haml::Engine.new(layout_file.content('liquid'), { :escape_attrs => :once })
60 | content = engine.render(ExposeLiquidFilters.new(@site, page), :content => content, :page => ExposeLiquidGetterAPI.new(page), :site => ExposeLiquidGetterAPI.new(@site))
61 | layout = layout_file.data['layout']
62 | end
63 | content
64 | else
65 | super
66 | end
67 | end
68 | end
69 | end
70 |
--------------------------------------------------------------------------------
/app/lib/log_and_profile.rb:
--------------------------------------------------------------------------------
1 | require 'logger'
2 |
3 | $log = Logger.new(STDERR)
4 | $log.formatter = proc do |severity, datetime, progname, msg|
5 | "[#{datetime.strftime('%Y-%m-%d %H:%M:%S.%3N')}] [#{Process.pid}] %7s #{msg}\n" % "[#{severity}]"
6 | end
7 |
8 | class Profiler
9 | @@instance = nil
10 |
11 | def initialize(format = "Program ran in %.3f seconds")
12 | @current = Entry.new(format)
13 | end
14 |
15 | def self.enabled=(flag)
16 | @@instance.dump if @@instance
17 | @@instance = flag ? Profiler.new : nil
18 | end
19 |
20 | def self.enabled
21 | @@instance ? true : false
22 | end
23 |
24 | def self.run(action, &block)
25 | if @@instance
26 | @@instance.profile("#{action} took %.3f seconds", &block)
27 | else
28 | block.call
29 | end
30 | end
31 |
32 | def self.group(group, &block)
33 | if @@instance
34 | @@instance.profile_group(group, &block)
35 | else
36 | block.call
37 | end
38 | end
39 |
40 | def profile(format)
41 | parent = @current
42 | parent.add_child(@current = Entry.new(format))
43 | res = yield
44 | @current.finished!
45 | @current = parent
46 | res
47 | end
48 |
49 | def profile_group(group)
50 | @current.group(group) do
51 | yield
52 | end
53 | end
54 |
55 | def dump
56 | @current.dump
57 | end
58 |
59 | class Entry
60 | attr_reader :duration
61 |
62 | def initialize(format)
63 | @format = format
64 | @start = Time.now
65 | end
66 |
67 | def group(name)
68 | @groups ||= {}
69 | @groups[name] ||= { :duration => 0, :count => 0 }
70 |
71 | previous_group, @current_group = @current_group, name
72 |
73 | start = Time.now
74 | res = yield
75 | @groups[name][:duration] += Time.now - start
76 | @groups[name][:count] += 1
77 |
78 | @groups[previous_group][:duration] -= Time.now - start if previous_group
79 | @current_group = previous_group
80 |
81 | res
82 | end
83 |
84 | def add_child(child)
85 | @children ||= []
86 | @children << child
87 | end
88 |
89 | def finished!
90 | @duration ||= Time.now - @start
91 | end
92 |
93 | def indent(level)
94 | ' ' * level
95 | end
96 |
97 | def dump(level = 0)
98 | self.finished!
99 |
100 | STDERR.puts indent(level) + (@format % @duration)
101 |
102 | if @groups
103 | @groups.sort_by { |group, info| info[:duration] }.reverse_each do |group, info|
104 | STDERR.puts indent(level+1) + "[#{group}: %.3f seconds, called #{info[:count]} time(s), %.3f seconds/time]" % [ info[:duration], info[:duration] / info[:count] ]
105 | end
106 | end
107 |
108 | if @children
109 | @children.sort_by { |child| child.duration }.reverse_each do |child|
110 | child.dump(level + 1)
111 | end
112 | end
113 | end
114 | end
115 | end
116 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Glim — Static Site Generator
2 |
3 | Glim is a static site generator which is semi-compatible with Jekyll but faster and with some additional features:
4 |
5 | * Running `serve` will generate content as requested by the browser (lazy evaluation), this allows instant previews as the full site doesn’t have to be built first. It also means that if there is a (syntax) error generating a page, the error will be shown in your browser and the error page even supports automatic reload. Another advantage of this approach is that during testing we do not write anything to disk, so `_site` will always contain deployment-ready pages (no instances of `localhost:4000`, injected reload scripts, or unpublished drafts).
6 |
7 | * Running `build` will make use of multiple tasks to parallelize content generation.
8 |
9 | * Collections have been generalized so that they all support both tags, categories, drafts, and arbitrary sorting (e.g. reverse chronological). There is nothing special about `_posts`.
10 |
11 | * Support for multiple domains has been added. This means generating content for `example.org` and `blog.example.org` can be done using the same project, so that resources can be shared and cross-linking is possible via the `link` tag.
12 |
13 | * Extensible render pipeline: Content is transformed using a pipeline where it is trivial to add new filters, this allows adding new converters, override the default converters, or simply pre/post-process content to support custom syntax, inject content, run the generated HTML through a validator, or similar.
14 |
15 | * Introduced a `digest` variable which can be used in permalinks to ensure that a page’s URL will change when the content is updated (guaranteed cache invalidation useful for CSS and JavaScript).
16 |
17 | * Easy pagination of both collections and data structures.
18 |
19 | * Collections can have pages generated for tags and categories. Making this a built-in feature makes it possible to iterate generated pages and link to these using their `url` property rather than make assumptions about where such pages end up in the file hierarchy.
20 |
21 | * Any change to a site file, be it files under `_data` or even `_config.yml`, will trigger a browser reload that will fetch the updated page. This is possible because we use lazy evaluation, so a file system change is effectively just triggering a cache flush, rather than having to rebuild the entire site.
22 |
23 | * Default values for pages can be set using file globs, making it easy to use the same set of values for a broad set of files, and default values for collection files can be set under the respective collection, which is extra useful when using cascading configuration files.
24 |
25 | * Introduced a `source_dir` setting to allow putting site content in a subdirectory, obviating the need for maintaining a list of excludes and/or prefixing non-publishable items with underscores.
26 |
27 | ## Installing
28 |
29 | Glim can be installed via `rubygems`:
30 |
31 | gem install glim
32 |
33 | ## Documentation
34 |
35 | Familiarity with Jekyll is assumed. The features that Glim adds are documented in the [Glim Manual](https://macromates.com/glim/).
36 |
--------------------------------------------------------------------------------
/app/lib/liquid_ext.rb:
--------------------------------------------------------------------------------
1 | require 'kramdown'
2 | require 'liquid'
3 |
4 | module Glim
5 | module LiquidFilters
6 | def markdownify(input)
7 | return if input.nil?
8 |
9 | Profiler.group('markdownify') do
10 | if defined?(MultiMarkdown)
11 | MultiMarkdown.new("\n" + input, 'snippet', 'no_metadata').to_html
12 | else
13 | options = @context['site']['kramdown'].map { |key, value| [ key.to_sym, value ] }.to_h
14 | document = Kramdown::Document.new(input, options)
15 | @context['warnings'].concat(document.warnings) if options[:show_warnings] && @context['warnings']
16 | document.to_html
17 | end
18 | end
19 | end
20 |
21 | def slugify(input)
22 | Util.slugify(input) unless input.nil?
23 | end
24 |
25 | def xml_escape(input)
26 | input.encode(:xml => :attr).gsub(/\A"|"\z/, '') unless input.nil?
27 | end
28 |
29 | def cgi_escape(input)
30 | CGI.escape(input) unless input.nil?
31 | end
32 |
33 | def absolute_url(path)
34 | return if path.nil?
35 |
36 | site, page = URI(@context['site']['url']), URI(@context['page']['url'])
37 | host, port = @context['site']['host'], @context['site']['port']
38 |
39 | if page.relative? || (site.host == host && site.port == port)
40 | site.merge(URI(path)).to_s
41 | else
42 | page.merge(URI(path)).to_s
43 | end
44 | end
45 |
46 | def relative_url(other)
47 | return if other.nil?
48 |
49 | site, page = URI(@context['site']['url']), URI(@context['page']['url'])
50 | host, port = @context['site']['host'], @context['site']['port']
51 |
52 | helper = lambda do |base, other|
53 | base_url, other_url = URI(base), URI(other)
54 | if other_url.absolute? && base_url.host == other_url.host
55 | other_url.path
56 | else
57 | other
58 | end
59 | end
60 |
61 | if page.relative? || (site.host == host && site.port == port)
62 | helper.call(@context['site']['url'], other)
63 | else
64 | helper.call(@context['page']['url'], other)
65 | end
66 | end
67 |
68 | def path_to_url(input)
69 | return if input.nil?
70 |
71 | if file = Jekyll.sites.last.links[input]
72 | file.url
73 | else
74 | raise Glim::Error.new("path_to_url: No file found for: #{input}")
75 | end
76 | end
77 |
78 | def date_to_xmlschema(input)
79 | Liquid::Utils.to_date(input).localtime.xmlschema unless input.nil?
80 | end
81 |
82 | def date_to_rfc822(input)
83 | Liquid::Utils.to_date(input).localtime.rfc822 unless input.nil?
84 | end
85 |
86 | def date_to_string(input)
87 | Liquid::Utils.to_date(input).localtime.strftime("%d %b %Y") unless input.nil?
88 | end
89 |
90 | def date_to_long_string(input)
91 | Liquid::Utils.to_date(input).localtime.strftime("%d %B %Y") unless input.nil?
92 | end
93 |
94 | def where(input, property, value)
95 | if input.respond_to?(:select) && property && value
96 | input = input.values if input.is_a?(Hash)
97 | input.select { |item| get_property(item, property) == value }
98 | else
99 | input
100 | end
101 | end
102 |
103 | def group_by(input, property)
104 | if input.respond_to?(:group_by) && property
105 | groups = input.group_by { |item| get_property(item, property) }
106 | groups.map { |key, value| { "name" => key, "items" => value, "size" => value.size } }
107 | else
108 | input
109 | end
110 | end
111 |
112 | def group_by_exp(input, variable, expression)
113 | return input unless input.respond_to?(:group_by)
114 |
115 | parsed_expr = Liquid::Variable.new(expression, Liquid::ParseContext.new)
116 | @context.stack do
117 | groups = input.group_by do |item|
118 | @context[variable] = item
119 | parsed_expr.render(@context)
120 | end
121 | groups.map { |key, value| { "name" => key, "items" => value, "size" => value.size } }
122 | end
123 | end
124 |
125 | private
126 |
127 | def get_property(obj, property)
128 | if obj.respond_to?(:to_liquid)
129 | property.to_s.split('.').reduce(obj.to_liquid) do |mem, key|
130 | mem[key]
131 | end
132 | elsif obj.respond_to?(:data)
133 | obj.data[property.to_s]
134 | else
135 | obj[property.to_s]
136 | end
137 | end
138 | end
139 |
140 | module LiquidTags
141 | class PostURL < Liquid::Tag
142 | def initialize(tag_name, markup, options)
143 | super
144 | @post_name = markup.strip
145 | end
146 |
147 | def render(context)
148 | if file = Jekyll.sites.last.post_links[@post_name]
149 | file.url
150 | else
151 | raise Glim::Error.new("post_url: No post found for: #{@post_name}")
152 | end
153 | end
154 | end
155 |
156 | class Link < Liquid::Tag
157 | def initialize(tag_name, markup, options)
158 | super
159 | @relative_path = markup.strip
160 | end
161 |
162 | def render(context)
163 | if file = Jekyll.sites.last.links[@relative_path]
164 | file.url
165 | else
166 | raise Glim::Error.new("link: No file found for: #{@relative_path}")
167 | end
168 | end
169 | end
170 |
171 | class HighlightBlock < Liquid::Block
172 | def initialize(tag_name, markup, tokens)
173 | super
174 |
175 | if markup =~ /^([a-zA-Z0-9.+#_-]+)((\s+\w+(=(\w+|"[^"]*"))?)*)\s*$/
176 | @language, @options = $1, $2.scan(/(\w+)(?:=(?:(\w+)|"([^"]*)"))?/).map do |key, value, list|
177 | [ key.to_sym, list ? list.split : (value || true) ]
178 | end.to_h
179 | else
180 | @language, @options = nil, {}
181 | $log.error("Unable to parse highlight tag: #{markup}") unless markup.strip.empty?
182 | end
183 |
184 | begin
185 | require 'rouge'
186 | rescue LoadError => e
187 | $log.warn("Unable to load the rouge gem required by the highlight tag: #{e}")
188 | end
189 | end
190 |
191 | def render(context)
192 | source = super.to_s.gsub(/\A[\r\n]+|[\r\n]+\z/, '')
193 |
194 | if defined?(Rouge)
195 | rouge_options = {
196 | :line_numbers => @options[:linenos] == true ? 'inline' : @options[:linenos],
197 | :wrap => false,
198 | :css_class => 'highlight',
199 | :gutter_class => 'gutter',
200 | :code_class => 'code'
201 | }.merge(@options)
202 |
203 | lexer = Rouge::Lexer.find_fancy(@language, source) || Rouge::Lexers::PlainText
204 | formatter = Rouge::Formatters::HTMLLegacy.new(rouge_options)
205 | source = formatter.format(lexer.lex(source))
206 |
207 | $log.warn("No language specified in highlight tag. Will use #{lexer.class.name} to parse the code.") if @language.nil?
208 | end
209 |
210 | code_attributes = @language ? " class=\"language-#{@language.tr('+', '-')}\" data-lang=\"#{@language}\"" : ""
211 | "#{source.chomp}
"
212 | end
213 | end
214 | end
215 |
216 | def self.preprocess_template(source)
217 | source = source.gsub(/({%-? include )([\w.\/-]+)(.*?)(-?%})/) do
218 | prefix, include, variables, suffix = $1, $2, $3, $4
219 | unless variables.strip.empty?
220 | variables = ', ' + variables.scan(/(\w+)=(.*?)(?=\s)/).map { |key, value| "include_#{key}: #{value}" }.join(', ') + ' '
221 | end
222 |
223 | "#{prefix}\"#{include}\"#{variables}#{suffix}"
224 | end
225 |
226 | source.gsub!(/({{-? include)\.(.*?}})/) { "#$1_#$2" }
227 | source.gsub!(/({%-? .+? include)\.(.*?%})/) { "#$1_#$2" }
228 |
229 | source
230 | end
231 |
232 | class LocalFileSystem
233 | def initialize(*paths)
234 | @paths = paths.reject { |path| path.nil? }
235 | end
236 |
237 | def read_template_file(name)
238 | @cache ||= {}
239 | unless @cache[name]
240 | paths = @paths.map { |path| File.join(path, name) }
241 | if file = paths.find { |path| File.exist?(path) }
242 | @cache[name] = Glim.preprocess_template(File.read(file))
243 | end
244 | end
245 | @cache[name]
246 | end
247 | end
248 | end
249 |
250 | Liquid::Template.register_filter(Glim::LiquidFilters)
251 | Liquid::Template.register_tag('post_url', Glim::LiquidTags::PostURL)
252 | Liquid::Template.register_tag('link', Glim::LiquidTags::Link)
253 | Liquid::Template.register_tag("highlight", Glim::LiquidTags::HighlightBlock)
254 |
--------------------------------------------------------------------------------
/app/lib/commands.rb:
--------------------------------------------------------------------------------
1 | module Glim
2 | module Commands
3 | def self.build(config)
4 | output_dir = File.expand_path(config['destination'])
5 | files = config.site.files_and_documents.select { |file| file.write? }
6 | symlinks = (config.site.symlinks || []).map { |link| [ File.expand_path(File.join(link[:data]['domain'] || '.', link[:name]), output_dir), link[:realpath] ] }.to_h
7 |
8 | output_paths = files.map { |file| file.output_path(output_dir) }
9 | output_paths.concat(symlinks.keys)
10 |
11 | delete_files, delete_dirs = items_in_directory(output_dir, skip: config['keep_files'])
12 | deleted = delete_items(delete_files, delete_dirs, keep: output_paths)
13 | created, updated, warnings, errors = *generate(output_dir, config['jobs'] || 7, files, backtrace: config['show_backtrace'])
14 |
15 | symlinks.each do |dest, path|
16 | FileUtils.mkdir_p(File.dirname(dest))
17 | begin
18 | File.symlink(path, dest)
19 | created << dest
20 | rescue Errno::EEXIST
21 | if File.readlink(dest) != path
22 | File.unlink(dest)
23 | File.symlink(path, dest)
24 | updated << dest
25 | end
26 | end
27 | end
28 |
29 | [ [ 'Created', created ], [ 'Deleted', deleted ], [ 'Updated', updated ] ].each do |label, files|
30 | unless files.empty?
31 | STDERR.puts "==> #{label} #{files.size} #{files.size == 1 ? 'File' : 'Files'}"
32 | STDERR.puts files.map { |path| Util.relative_path(path, output_dir) }.sort.join(', ')
33 | end
34 | end
35 |
36 | unless warnings.empty?
37 | STDERR.puts "==> #{warnings.size} #{warnings.size == 1 ? 'Warning' : 'Warnings'}"
38 | warnings.each do |message|
39 | STDERR.puts message
40 | end
41 | end
42 |
43 | unless errors.empty?
44 | STDERR.puts "==> Stopped After #{errors.size} #{errors.size == 1 ? 'Error' : 'Errors'}"
45 | errors.each do |arr|
46 | arr.each_with_index do |err, i|
47 | STDERR.puts err.gsub(/^/, ' '*i)
48 | end
49 | end
50 | end
51 | end
52 |
53 | def self.clean(config)
54 | files, dirs = items_in_directory(File.expand_path(config['destination']), skip: config['keep_files'])
55 |
56 | if config['dry_run']
57 | if files.empty?
58 | STDOUT.puts "No files to delete"
59 | else
60 | files.each do |file|
61 | STDOUT.puts "Delete #{Util.relative_path(file, File.expand_path(config['source']))}"
62 | end
63 | end
64 | else
65 | deleted = delete_items(files, dirs)
66 | STDOUT.puts "Deleted #{deleted.size} #{deleted.size == 1 ? 'File' : 'Files'}."
67 | end
68 | end
69 |
70 | def self.profile(config)
71 | Profiler.enabled = true
72 |
73 | site = Profiler.run("Setting up site") do
74 | config.site
75 | end
76 |
77 | Profiler.run("Loading cache") do
78 | Glim::Cache.load
79 | end
80 |
81 | files = []
82 |
83 | Profiler.run("Loading pages") do
84 | files.concat(site.files)
85 | end
86 |
87 | Profiler.run("Loading collections") do
88 | files.concat(site.documents)
89 | end
90 |
91 | Profiler.run("Generating virtual pages") do
92 | files.concat(site.generated_files)
93 | end
94 |
95 | files = files.select { |file| file.frontmatter? }
96 |
97 | Profiler.run("Expanding liquid tags") do
98 | files.each { |file| file.content('post-liquid') }
99 | end
100 |
101 | Profiler.run("Transforming pages") do
102 | files.each { |file| file.content('pre-output') }
103 | end
104 |
105 | Profiler.run("Creating final output (layout)") do
106 | files.each { |file| file.output }
107 | end
108 |
109 | Profiler.enabled = false
110 | end
111 |
112 | # ===========
113 | # = Private =
114 | # ===========
115 |
116 | def self.items_in_directory(dir, skip: [])
117 | files, dirs = [], []
118 |
119 | begin
120 | Find.find(dir) do |path|
121 | next if path == dir
122 | Find.prune if skip.include?(File.basename(path))
123 |
124 | if File.file?(path) || File.symlink?(path)
125 | files << path
126 | elsif File.directory?(path)
127 | dirs << path
128 | else
129 | $log.warn("Unknown entry: #{path}")
130 | end
131 | end
132 | rescue Errno::ENOENT
133 | end
134 |
135 | [ files, dirs ]
136 | end
137 |
138 | private_class_method :items_in_directory
139 |
140 | def self.delete_items(files, dirs, keep: [])
141 | res = []
142 |
143 | keep_files = Set.new(keep)
144 | files.each do |path|
145 | unless keep_files.include?(path)
146 | begin
147 | File.unlink(path)
148 | res << path
149 | rescue => e
150 | $log.error("Error unlinking ‘#{path}’: #{e}\n")
151 | end
152 | end
153 | end
154 |
155 | dirs.sort.reverse_each do |path|
156 | begin
157 | Dir.rmdir(path)
158 | rescue Errno::ENOTEMPTY => e
159 | # Ignore
160 | rescue => e
161 | $log.error("Error removing directory ‘#{path}’: #{e}\n")
162 | end
163 | end
164 |
165 | res
166 | end
167 |
168 | private_class_method :delete_items
169 |
170 | def self.generate(output_dir, number_of_jobs, files, backtrace: false)
171 | Profiler.run("Creating pages") do
172 | if number_of_jobs == 1
173 | generate_subset(output_dir, files, backtrace: backtrace)
174 | else
175 | generate_async(output_dir, files.shuffle, number_of_jobs, backtrace: backtrace)
176 | end
177 | end
178 | end
179 |
180 | private_class_method :generate
181 |
182 | def self.generate_async(output_dir, files, number_of_jobs, backtrace: false)
183 | total = files.size
184 | slices = number_of_jobs.times.map do |i|
185 | first = (total * i / number_of_jobs).ceil
186 | last = (total * (i+1) / number_of_jobs).ceil
187 | files.shift(last-first)
188 | end
189 |
190 | Glim::Cache.track_updates = true
191 | semaphore = Mutex.new
192 | created, updated, warnings, errors = [], [], [], []
193 |
194 | threads = slices.each_with_index.map do |files_slice, i|
195 | pipe_rd, pipe_wr = IO.pipe
196 | pid = fork do
197 | start = Time.now
198 | pipe_rd.close
199 | created, updated, warnings, errors = *generate_subset(output_dir, files_slice, backtrace: backtrace)
200 | pipe_wr << Marshal.dump({
201 | 'cache_updates' => Glim::Cache.updates,
202 | 'created' => created,
203 | 'updated' => updated,
204 | 'warnings' => warnings,
205 | 'errors' => errors,
206 | 'duration' => Time.now - start,
207 | 'id' => i,
208 | })
209 | pipe_wr.close
210 | end
211 |
212 | Process.detach(pid)
213 |
214 | Thread.new do
215 | pipe_wr.close
216 | res = Marshal.load(pipe_rd)
217 | semaphore.synchronize do
218 | Glim::Cache.merge!(res['cache_updates'])
219 | created += res['created']
220 | updated += res['updated']
221 | warnings += res['warnings']
222 | errors += res['errors']
223 | $log.debug("Wrote #{files_slice.size} pages in #{res['duration']} seconds (thread #{res['id']})") if Profiler.enabled
224 | end
225 | end
226 | end
227 |
228 | threads.each { |thread| thread.join }
229 |
230 | [ created, updated, warnings, errors ]
231 | end
232 |
233 | private_class_method :generate_async
234 |
235 | def self.generate_subset(output_dir, files, backtrace: false)
236 | created, updated, warnings, errors = [], [], [], []
237 |
238 | files.each do |file|
239 | dest = file.output_path(output_dir)
240 | file_exists = File.exists?(dest)
241 |
242 | FileUtils.mkdir_p(File.dirname(dest))
243 | if file.frontmatter?
244 | begin
245 | if !file_exists || File.read(dest) != file.output
246 | File.unlink(dest) if file_exists
247 | File.write(dest, file.output)
248 | (file_exists ? updated : created) << dest
249 | end
250 | warnings.concat(file.warnings.map { |warning| "#{file}: #{warning}" }) unless file.warnings.nil?
251 | rescue Glim::Error => e
252 | errors << [ "Unable to create output for: #{file}", *e.messages ]
253 | break
254 | rescue => e
255 | error = [ "Unable to create output for: #{file}", e.to_s ]
256 | error << e.backtrace.join("\n") if backtrace
257 | errors << error
258 | break
259 | end
260 | else
261 | unless File.file?(dest) && File.file?(file.path) && File.stat(dest).ino == File.stat(file.path).ino
262 | File.unlink(dest) if file_exists
263 | File.link(file.path, dest)
264 | end
265 | end
266 | end
267 |
268 | [ created, updated, warnings, errors ]
269 | end
270 |
271 | private_class_method :generate_subset
272 | end
273 | end
274 |
--------------------------------------------------------------------------------
/app/lib/local_server.rb:
--------------------------------------------------------------------------------
1 | require_relative '../lib/exception'
2 | require 'listen'
3 | require 'mime/types'
4 | require 'socket'
5 | require 'webrick'
6 | require 'websocket'
7 |
8 | module WebSocket
9 | class Connection
10 | attr_reader :socket
11 |
12 | def self.establish(socket)
13 | handshake = WebSocket::Handshake::Server.new
14 | handshake << socket.gets until handshake.finished?
15 |
16 | raise "Malformed handshake received from WebSocket client" unless handshake.valid?
17 |
18 | socket.puts(handshake.to_s)
19 | Connection.new(socket, handshake)
20 | end
21 |
22 | def initialize(socket, handshake)
23 | @socket = socket
24 | @handshake = handshake
25 | end
26 |
27 | def puts(message)
28 | frame = WebSocket::Frame::Outgoing::Server.new(version: @handshake.version, data: message, type: :text)
29 | @socket.puts(frame.to_s)
30 | end
31 |
32 | def each_message
33 | frame = WebSocket::Frame::Incoming::Server.new(version: @handshake.version)
34 | frame << @socket.read_nonblock(4096)
35 | while message = frame.next
36 | yield message
37 | end
38 | end
39 | end
40 |
41 | class Server
42 | def initialize(host: 'localhost', port: nil)
43 | @server, @rd_pipe, @wr_pipe = TCPServer.new(host, port), *IO.pipe
44 | end
45 |
46 | def broadcast(message)
47 | @wr_pipe.puts(message)
48 | end
49 |
50 | def shutdown
51 | broadcast('shutdown')
52 |
53 | @wr_pipe.close
54 | @wr_pipe = nil
55 |
56 | @thread.join
57 |
58 | @server.close
59 | @server = nil
60 | end
61 |
62 | def start
63 | @thread = Thread.new do
64 | connections = []
65 | running = true
66 | while running
67 | rs, _, _ = IO.select([ @server, @rd_pipe, *connections.map { |conn| conn.socket } ])
68 | rs.each do |socket|
69 | if socket == @server
70 | socket = @server.accept
71 | begin
72 | connections << Connection.establish(socket)
73 | rescue => e
74 | $log.warn("Failed to perform handshake with new WebSocket client: #{e}", e)
75 | socket.close
76 | end
77 | elsif socket == @rd_pipe
78 | message = @rd_pipe.gets.chomp
79 | if message == 'shutdown'
80 | running = false
81 | break
82 | end
83 | $log.debug("Send ‘#{message}’ to #{connections.size} WebSocket #{connections.size == 1 ? 'client' : 'clients'}") unless connections.empty?
84 | connections.each do |conn|
85 | begin
86 | conn.puts(message)
87 | rescue => e
88 | $log.warn("Error writing to WebSocket client socket: #{e}")
89 | end
90 | end
91 | else
92 | if conn = connections.find { |candidate| candidate.socket == socket }
93 | begin
94 | conn.each_message do |frame|
95 | $log.debug("Received #{frame.to_s.size} bytes from WebSocket client: #{frame}") unless frame.to_s.empty?
96 | end
97 | rescue IO::WaitReadable
98 | $log.warn("IO::WaitReadable exception while reading from WebSocket client")
99 | rescue EOFError
100 | conn.socket.close
101 | connections.delete(conn)
102 | end
103 | end
104 | end
105 | end
106 | end
107 | @rd_pipe.close
108 | @rd_pipe = nil
109 | end
110 | end
111 | end
112 | end
113 |
114 | module Glim
115 | module LocalServer
116 | class Servlet < WEBrick::HTTPServlet::AbstractServlet
117 | @@mutex = Mutex.new
118 |
119 | def initialize(server, config)
120 | @config = config
121 | end
122 |
123 | def do_GET(request, response)
124 | @@mutex.synchronize do
125 | do_GET_impl(request, response)
126 | end
127 | end
128 |
129 | def do_GET_impl(request, response)
130 | status, mime_type, body, file = 200, nil, nil, nil
131 |
132 | if request.path == '/.ws/script.js'
133 | mime_type, body = self.mime_type_for(request.path), self.websocket_script
134 | elsif page = self.find_page(request.path)
135 | file = page
136 | elsif dir = self.find_directory(request.path)
137 | if request.path.end_with?('/')
138 | if request.path == '/' || @config['show_dir_listing']
139 | mime_type, body = 'text/html', self.directory_index_for_path(dir)
140 | else
141 | $log.warn("Directory index forbidden for: #{request.path}")
142 | status = 403
143 | end
144 | else
145 | response['Location'] = "#{dir}/"
146 | status = 302
147 | end
148 | else
149 | $log.warn("No file for request: #{request.path}")
150 | status = 404
151 | end
152 |
153 | if status != 200 && body.nil? && file.nil?
154 | unless file = self.find_error_page(status, request.path)
155 | mime_type, body = 'text/html', self.error_page_for_status(status, request.path)
156 | end
157 | end
158 |
159 | mime_type ||= file ? self.mime_type_for(file.output_path('/')) : 'text/plain'
160 | body ||= content_for_file(file)
161 |
162 | response['Cache-Control'] = 'no-cache, no-store, must-revalidate'
163 | response['Pragma'] = 'no-cache'
164 | response['Expires'] = '0'
165 | response.status = status
166 | response.content_type = mime_type
167 | response.body = mime_type.start_with?('text/html') ? inject_reload_script(body) : body
168 | end
169 |
170 | def content_for_file(file)
171 | if file.frontmatter?
172 | begin
173 | file.output
174 | rescue Glim::Error => e
175 | content = "#{e.messages.join("\n")}"
176 | self.create_page("Error", "Exception raised for #{file}", content)
177 | rescue => e
178 | content = "#{e.to_s}"
179 | self.create_page("Error", "Exception raised for #{file}", content)
180 | end
181 | else
182 | File.read(file.path)
183 | end
184 | end
185 |
186 | def find_page(path)
187 | self.files.find do |file|
188 | candidate = file.output_path('/')
189 | if path == candidate || path + File.extname(candidate) == candidate
190 | true
191 | elsif path.end_with?('/')
192 | File.basename(candidate, '.*') == 'index' && path + File.basename(candidate) == candidate
193 | end
194 | end
195 | end
196 |
197 | def find_error_page(status, path)
198 | candidates = self.files.select do |file|
199 | file.basename == status.to_s && path_descends_from?(path, File.dirname(file.output_path('/')))
200 | end
201 | candidates.max { |lhs, rhs| lhs.output_path('/').size <=> rhs.output_path('/').size }
202 | end
203 |
204 | def find_directory(path)
205 | path = path.chomp('/') unless path == '/'
206 | self.files.map { |file| File.dirname(file.output_path('/')) }.find { |dir| path == dir }
207 | end
208 |
209 | def directory_index_for_path(path)
210 | candidates = self.files.map { |file| file.output_path('/') }
211 | candidates = candidates.select { |candidate| path_descends_from?(candidate, path) }
212 | candidates = candidates.map { |candidate| candidate.sub(/(^#{Regexp.escape(path.chomp('/'))}\/[^\/]+\/?).*/, '\1') }.sort.uniq
213 | candidates.unshift(path + '/..') if path != '/'
214 |
215 | heading = "Index of #{path}"
216 | content = candidates.map do |candidate|
217 | "#{candidate.sub(/.*?([^\/]+\/?)$/, '\1')}"
218 | end
219 |
220 | self.create_page("Directory Index", heading, "")
221 | end
222 |
223 | def error_page_for_status(status, path)
224 | case status
225 | when 302 then title, heading, content = "302 Redirecting…", "Redirecting…", "Your browser should have redirected you."
226 | when 403 then title, heading, content = "403 Forbidden", "Forbidden", "You don't have permission to access #{path} on this server."
227 | when 404 then title, heading, content = "404 Not Found", "Not Found", "The requested URL #{path} was not found on this server."
228 | else title, heading, content = "Error #{status}", "Error #{status}", "No detailed description of this error."
229 | end
230 | self.create_page(title, heading, content)
231 | end
232 |
233 | def websocket_script
234 | <<~JS
235 | const glim = {
236 | connect: function (host, port, should_retry, should_reload) {
237 | const server = host + ":" + port
238 | console.log("Connecting to Glim’s live reload server (" + server + ")…");
239 |
240 | const socket = new WebSocket("ws://" + server + "/socket");
241 |
242 | socket.onopen = () => {
243 | console.log("Established connection: Live reload enabled.")
244 | if(should_reload) {
245 | document.location.reload(true);
246 | }
247 | };
248 |
249 | socket.onmessage = (event) => {
250 | console.log("Message from live reload server: " + event.data);
251 |
252 | if(event.data == 'reload') {
253 | document.location.reload(true);
254 | }
255 | else if(event.data == 'close') {
256 | window.close();
257 | }
258 | };
259 |
260 | socket.onclose = () => {
261 | console.log("Lost connection: Live reload disabled.")
262 |
263 | if(should_retry) {
264 | window.setTimeout(() => this.connect(host, port, should_retry, true), 2500);
265 | }
266 | };
267 | },
268 | };
269 |
270 | glim.connect('#{@config['host']}', #{@config['livereload_port']}, true /* should_retry */, false /* should_reload */);
271 | JS
272 | end
273 |
274 | def path_descends_from?(path, parent)
275 | parent == '/' || path[parent.chomp('/').size] == '/' && path.start_with?(parent)
276 | end
277 |
278 | def create_page(title, heading, content)
279 | <<~HTML
280 |
284 | #{title}
285 | #{heading}
286 | #{content}
287 | HTML
288 | end
289 |
290 | def inject_reload_script(content)
291 | return content unless @config['livereload']
292 |
293 | script_tag = ""
294 | if content =~ //
295 | content = "#$`#${script_tag}#$'"
296 | elsif content =~ //
297 | content = "#$`#${script_tag}#$'"
298 | else
299 | content = script_tag + content
300 | end
301 | end
302 |
303 | def files
304 | @config.site.files_and_documents.select { |file| file.write? }
305 | end
306 |
307 | def mime_type_for(filename, encoding = nil)
308 | if type = MIME::Types.type_for(filename).shift
309 | if type.ascii? || type.media_type == 'text' || %w( ecmascript javascript ).include?(type.sub_type)
310 | "#{type.content_type}; charset=#{encoding || @config['encoding']}"
311 | else
312 | type.content_type
313 | end
314 | else
315 | 'application/octet-stream'
316 | end
317 | end
318 | end
319 |
320 | def self.start(config)
321 | config['url'] = "http://#{config['host']}:#{config['port']}"
322 | project_dir = config.site.project_dir
323 |
324 | websocket_server, listener = nil, nil
325 |
326 | if config['livereload']
327 | websocket_server = WebSocket::Server.new(host: config['host'], port: config['livereload_port'])
328 | websocket_server.start
329 | end
330 |
331 | server = WEBrick::HTTPServer.new(
332 | BindAddress: config['host'],
333 | Port: config['port'],
334 | Logger: WEBrick::Log.new('/dev/null'),
335 | AccessLog: [],
336 | )
337 |
338 | server.mount('/', Servlet, config)
339 |
340 | if config['watch'] || config['livereload']
341 | listener = Listen.to(project_dir) do |modified, added, removed|
342 | paths = [ *modified, *added, *removed ]
343 | $log.debug("File changes detected for: #{paths.select { |path| path.start_with?(project_dir) }.map { |path| Util.relative_path(path, project_dir) }.join(', ')}")
344 | config.reload
345 | websocket_server.broadcast('reload') if websocket_server
346 | end
347 | $log.debug("Watching #{project_dir} for changes")
348 | listener.start
349 | end
350 |
351 | trap("INT") do
352 | server.shutdown
353 | end
354 |
355 | if config['open_url'] && File.executable?('/usr/bin/open')
356 | page = config.site.links['.']
357 | system('/usr/bin/open', page ? page.url : config['url'])
358 | end
359 |
360 | $log.info("Starting server on #{config['url']}")
361 | server.start
362 | $log.info("Server shutting down…")
363 |
364 | listener.stop if listener
365 |
366 | if websocket_server
367 | if config['open_url'] && File.executable?('/usr/bin/open')
368 | websocket_server.broadcast('close')
369 | end
370 |
371 | websocket_server.shutdown
372 | end
373 | end
374 | end
375 | end
376 |
--------------------------------------------------------------------------------
/app/bin/glim:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env ruby
2 | require_relative '../lib/version'
3 | require_relative '../lib/exception'
4 | require_relative '../lib/log_and_profile'
5 | require_relative '../lib/cache'
6 | require_relative '../lib/commands'
7 | require_relative '../lib/liquid_ext'
8 | require 'csv'
9 | require 'digest'
10 | require 'enumerator'
11 | require 'fileutils'
12 | require 'find'
13 | require 'json'
14 | require 'pathname'
15 | require 'kramdown'
16 | require 'liquid'
17 | require 'mercenary'
18 |
19 | if File.exists?('Gemfile')
20 | begin
21 | require 'bundler/setup'
22 | rescue LoadError
23 | $log.warn("Unable to load Bundler: Ignoring ./Gemfile")
24 | end
25 | end
26 |
27 | module Util
28 | module_function
29 |
30 | def slugify(input, preserve_case: false)
31 | if input
32 | res = input.gsub(/[^[:alnum:]]+/, '-').gsub(/\A-|-\z/, '')
33 | preserve_case ? res : res.downcase
34 | end
35 | end
36 |
37 | def parse_date(maybe_a_date)
38 | if maybe_a_date.is_a?(Time)
39 | maybe_a_date
40 | elsif maybe_a_date.is_a?(Date)
41 | maybe_a_date.to_time
42 | elsif maybe_a_date.is_a?(String)
43 | begin
44 | Liquid::Utils.to_date(maybe_a_date)
45 | rescue => e
46 | $log.warn("Failed to parse date ‘#{maybe_a_date}’: #{e}")
47 | nil
48 | end
49 | end
50 | end
51 |
52 | def titlecase(input)
53 | unless input.nil?
54 | input.gsub(/[ _-]+/, ' ').gsub(/(\A)?(\w)(\w*)([^\s]*)(\z)?/) do |match|
55 | if $1 || $5 || $3.size > 2
56 | $2.upcase + $3 + $4
57 | else
58 | $&
59 | end
60 | end
61 | end
62 | end
63 |
64 | def relative_path(path, dir)
65 | Pathname.new(path).relative_path_from(Pathname.new(dir)).to_s
66 | end
67 |
68 | def deep_merge(old_hash, new_hash)
69 | old_hash.merge(new_hash) do |key, old_value, new_value|
70 | if old_value.is_a?(Hash) && new_value.is_a?(Hash)
71 | deep_merge(old_value, new_value)
72 | else
73 | new_hash.key?(key) ? new_value : old_value
74 | end
75 | end
76 | end
77 |
78 | def find_files(dir, glob)
79 | if File.directory?(dir)
80 | Find.find(dir).each do |path|
81 | if File.fnmatch?(glob, path, File::FNM_PATHNAME|File::FNM_CASEFOLD|File::FNM_EXTGLOB) && !File.directory?(path)
82 | yield(path)
83 | elsif File.basename(path).start_with?('.')
84 | Find.prune
85 | end
86 | end
87 | end
88 | end
89 | end
90 |
91 | module Jekyll
92 | class << self
93 | attr_accessor :sites
94 | end
95 |
96 | class Plugin
97 | PRIORITIES = {
98 | :lowest => -500,
99 | :lower => -250,
100 | :low => -100,
101 | :normal => 0,
102 | :high => 100,
103 | :higher => 250,
104 | :highest => 500,
105 | }
106 |
107 | def self.priority(priority = nil)
108 | if priority.is_a?(Symbol) && PRIORITIES.key?(priority)
109 | @priority = PRIORITIES[priority]
110 | elsif priority.is_a?(Numeric)
111 | @priority = priority
112 | end
113 | @priority || PRIORITIES[:normal]
114 | end
115 |
116 | def self.<=>(other)
117 | other.priority <=> self.priority
118 | end
119 |
120 | def self.safe(flag)
121 | end
122 |
123 | def initialize(config = {})
124 | @config = config
125 | end
126 |
127 | # ====================
128 | # = Track subclasses =
129 | # ====================
130 |
131 | @@plugins = []
132 |
133 | def self.inherited(subclass)
134 | @@plugins << subclass
135 | end
136 |
137 | def self.plugins_of_type(klass)
138 | @@plugins.select { |candidate| candidate < klass }
139 | end
140 | end
141 |
142 | Generator = Class.new(Plugin)
143 | Converter = Class.new(Plugin)
144 | Command = Class.new(Plugin)
145 |
146 | class Hooks
147 | def self.register(collection, event, &proc)
148 | @hooks ||= []
149 | @hooks << { :collection => collection, :event => event, :proc => proc }
150 | $log.debug("Register hook for event ‘#{event}’ in collection ‘#{collection}’")
151 | end
152 |
153 | def self.invoke(collection, event, file: nil, payload: nil)
154 | @hooks.select { |hook| hook[:event] == event && hook[:collection] == collection }.each do |hook|
155 | $log.debug("TODO Invoke #{hook[:proc]}")
156 | end
157 | end
158 | end
159 | end
160 |
161 | module Glim
162 | class Filter < Jekyll::Plugin
163 | class << self
164 | def transforms(hash = nil)
165 | @transforms = hash || @transforms
166 | end
167 |
168 | def extensions(hash = nil)
169 | @extensions = hash || @extensions
170 | end
171 | end
172 |
173 | def initialize(site)
174 | end
175 |
176 | def transform(content, page, options)
177 | if options[:filters].empty?
178 | content
179 | else
180 | options[:filters].shift.transform(content, page, options)
181 | end
182 | end
183 | end
184 |
185 | module Filters
186 | class Liquid < Glim::Filter
187 | transforms 'liquid' => '*'
188 | priority :lower
189 |
190 | def initialize(site)
191 | @site, @options = site, {
192 | :strict_variables => site.config['liquid']['strict_variables'],
193 | :strict_filters => site.config['liquid']['strict_filters'],
194 | }
195 | end
196 |
197 | def transform(content, page, options)
198 | begin
199 | template = ::Liquid::Template.parse(Glim.preprocess_template(content))
200 | template.render!({ 'site' => @site.to_liquid, 'page' => page.to_liquid }, @options)
201 | rescue ::Liquid::Error => e
202 | raise Glim::Error.new("While expanding liquid tags in: #{page}", e)
203 | end
204 | end
205 | end
206 |
207 | class Markdown < Glim::Filter
208 | transforms 'markdown' => 'html'
209 | priority :lower
210 |
211 | def initialize(site)
212 | self.class.extensions('markdown' => site.config['markdown_ext'].split(',').each { |ext| ext.strip }.reject { |e| e.empty? })
213 |
214 | legacy = {
215 | 'syntax_highlighter' => site.config['highlighter'],
216 | 'syntax_highlighter_opts' => {},
217 | }
218 |
219 | @options = legacy.merge(site.config['kramdown']).map { |key, value| [ key.to_sym, value ] }.to_h
220 | end
221 |
222 | def transform(content, page, options)
223 | document = Kramdown::Document.new(content, @options)
224 | options[:warnings].concat(document.warnings) if options[:warnings] && @options[:show_warnings]
225 | document.to_html
226 | end
227 | end
228 |
229 | class Layout < Glim::Filter
230 | transforms '*' => 'output'
231 | priority :lower
232 |
233 | def initialize(site)
234 | @site, @options = site, options = {
235 | :strict_variables => site.config['liquid']['strict_variables'],
236 | :strict_filters => site.config['liquid']['strict_filters'],
237 | }
238 | end
239 |
240 | def layouts
241 | unless @layouts
242 | @layouts = load_layouts(@site.layouts_dir)
243 | if dir = @site.theme_dir('_layouts')
244 | @layouts = load_layouts(dir, @site.theme_dir('..')).merge(@layouts)
245 | end
246 | end
247 | @layouts
248 | end
249 |
250 | def load_layouts(dir, parent = File.dirname(dir))
251 | layouts = {}
252 | Util.find_files(dir, '**/*.*') do |path|
253 | relative_basename = Util.relative_path(path, dir).chomp(File.extname(path))
254 | layout = Glim::FileItem.new(@site, path, directory: parent)
255 | layouts[relative_basename] = layout
256 | end
257 | layouts
258 | end
259 |
260 | def templates(name, file)
261 | @templates ||= {}
262 | @templates[name] ||= ::Liquid::Template.parse(Glim.preprocess_template(file.content('liquid')))
263 | end
264 |
265 | def transform(content, page, options)
266 | return content if %w( .sass .scss .coffee ).include?(page.extname)
267 |
268 | begin
269 | layout_data = {}
270 | layout_file = page
271 | layout_name = page.data['layout']
272 | Profiler.group(self.class.name + '::' + layout_file.data['layout']) do
273 | while layout_file = self.layouts[layout_name]
274 | layout_data.merge!(layout_file.data)
275 | template = templates(layout_name, layout_file)
276 | content = template.render!({ 'site' => @site.to_liquid, 'page' => page.to_liquid, 'layout' => HashDrop.new(layout_data), 'content' => content }, @options)
277 | layout_name = layout_file.data['layout']
278 | end
279 | end if self.layouts.has_key?(layout_name)
280 | rescue ::Liquid::Error => e
281 | raise Glim::Error.new("While using layout: #{layout_file}", e)
282 | end
283 | content
284 | end
285 | end
286 | end
287 |
288 | class AssociativeArrayDrop < Liquid::Drop
289 | include Enumerable
290 |
291 | def initialize(hash, method = :last)
292 | @hash, @values = hash, hash.to_a.sort { |a, b| a.first <=> b.first }.map { |a| a.send(method) }
293 | end
294 |
295 | def each(&block)
296 | @values.each(&block)
297 | end
298 |
299 | def liquid_method_missing(name)
300 | if @hash.key?(name)
301 | @hash[name]
302 | elsif name.is_a?(Numeric)
303 | @values[name]
304 | end
305 | end
306 | end
307 |
308 | class HashDrop < Liquid::Drop
309 | include Enumerable
310 |
311 | def each(&block)
312 | @hash.each(&block)
313 | end
314 |
315 | def liquid_method_missing(name)
316 | @hash[name]
317 | end
318 |
319 | def initialize(hash)
320 | @hash = hash
321 | end
322 | end
323 |
324 | class Drop < Liquid::Drop
325 | def liquid_method_missing(name)
326 | res = if @whitelist.include?(name.to_sym)
327 | @object.__send__(name.to_sym)
328 | elsif @hash && @hash.key?(name)
329 | @hash[name]
330 | elsif @proc
331 | @proc.call(name)
332 | end
333 |
334 | res.is_a?(Hash) ? HashDrop.new(res) : res
335 | end
336 |
337 | def initialize(object, whitelist, hash = {}, &proc)
338 | @object, @whitelist, @hash, @proc = object, whitelist, hash, proc
339 | end
340 | end
341 |
342 | # ============
343 | # = FileItem =
344 | # ============
345 |
346 | class FileItem
347 | attr_reader :path, :warnings
348 | attr_accessor :next, :previous, :collection_object
349 |
350 | def to_liquid
351 | whitelist = [ :url, :path, :relative_path, :name, :title, :next, :previous, :collection, :date, :basename, :extname, :output, :excerpt ]
352 | Drop.new(self, whitelist, self.data) do |key|
353 | case key
354 | when 'content' then self.content('pre-output')
355 | when 'markdown' then self.content('markdown')
356 | end
357 | end
358 | end
359 |
360 | def to_s
361 | self.link_path
362 | end
363 |
364 | def initialize(site, path = nil, directory: nil, content: nil, frontmatter: nil, defaults: {})
365 | @site = site
366 | @path = path
367 | @directory = directory
368 | @defaults = defaults
369 |
370 | @content = content
371 | @frontmatter = frontmatter
372 | @has_frontmatter = frontmatter || content ? true : nil
373 | end
374 |
375 | def title
376 | self.data['title'] || (@collection_object ? Util.titlecase(date_and_basename_without_ext.last) : nil)
377 | end
378 |
379 | def name
380 | File.basename(@path) unless @path.nil?
381 | end
382 |
383 | def basename
384 | File.basename(@path, '.*') unless @path.nil?
385 | end
386 |
387 | def extname
388 | File.extname(@path) unless @path.nil?
389 | end
390 |
391 | def directory
392 | @directory || (@collection_object ? @collection_object.directory : @site.source_dir) unless @path.nil?
393 | end
394 |
395 | def relative_path
396 | @path && @path.start_with?('/') ? Util.relative_path(@path, self.directory) : @path
397 | end
398 |
399 | def link_path
400 | Util.relative_path(@path, @collection_object ? File.dirname(@collection_object.directory) : self.directory) if @path
401 | end
402 |
403 | def collection
404 | @collection_object ? @collection_object.label : nil
405 | end
406 |
407 | def date
408 | @date ||= if date = Util.parse_date(self.data['date'])
409 | date
410 | elsif date = date_and_basename_without_ext.first
411 | Time.new(*date.split('-'))
412 | elsif @path && File.exists?(@path)
413 | File.mtime(@path)
414 | else
415 | Time.now
416 | end
417 | end
418 |
419 | def url
420 | if @url.nil?
421 | permalink = self.data['permalink']
422 | if permalink.nil?
423 | if self.basename == 'index'
424 | permalink = '/:path/'
425 | elsif self.output_ext == '.html'
426 | permalink = '/:path/:basename'
427 | else
428 | permalink = '/:path/:basename:output_ext'
429 | end
430 | end
431 |
432 | base = URI(@site.url)
433 | scheme = self.data['scheme']
434 | domain = self.data['domain']
435 | path = expand_permalink(permalink)
436 |
437 | @url = if domain && base.host == @site.config['host'] && base.port == @site.config['port']
438 | base.merge('/' + domain + path).to_s
439 | elsif base.relative?
440 | path
441 | else
442 | base.hostname = domain unless domain.nil?
443 | base.scheme = scheme unless scheme.nil?
444 | base.merge(path).to_s
445 | end
446 | end
447 | @url
448 | end
449 |
450 | def output_path(output_dir)
451 | res = expand_permalink(self.data['permalink'] || '/:path/:basename:output_ext')
452 | if res.end_with?('/')
453 | res << 'index' << (self.output_ext || '.html')
454 | elsif File.extname(res).empty? && self.data['permalink'] && self.output_ext
455 | res << self.output_ext
456 | end
457 | File.expand_path(File.join(output_dir, self.data['domain'] || '.', res[1..-1]))
458 | end
459 |
460 | def output_ext
461 | frontmatter? && pipeline.output_ext(self.extname) || self.extname
462 | end
463 |
464 | # ==============
465 |
466 | def format
467 | self.data['format'] || pipeline.format_for_filename('liquid' + (self.extname || ''))
468 | end
469 |
470 | def frontmatter?
471 | load_frontmatter
472 | @has_frontmatter
473 | end
474 |
475 | def data
476 | @data ||= @defaults.merge(load_frontmatter)
477 | end
478 |
479 | def merge_data!(data, source: "YAML front matter")
480 | self.data.merge!(data)
481 | @pipeline = nil
482 | @excerpt = nil
483 | @data
484 | end
485 |
486 | def write?
487 | !@collection_object || @collection_object.write?
488 | end
489 |
490 | def content(format = 'post-liquid')
491 | pipeline.transform(page: self, content: load_content, from: self.format, to: format, options: { :warnings => @warnings ||= [] })
492 | end
493 |
494 | def excerpt
495 | @excerpt ||= self.data['excerpt']
496 | if @excerpt.nil?
497 | parts = content('post-liquid').split(@site.config['excerpt_separator'], 2)
498 | if parts.size == 2
499 | @excerpt = @site.create_pipeline.transform(page: self, content: parts.first, from: self.format, to: 'pre-output')
500 | else
501 | @excerpt = content('pre-output')
502 | end
503 | end
504 |
505 | @excerpt
506 | end
507 |
508 | def output
509 | content('output')
510 | end
511 |
512 | private
513 |
514 | def load_frontmatter
515 | if @has_frontmatter.nil? && @path
516 | @frontmatter = Glim::Cache.getset(@path, :frontmatter) do
517 | open(@path) do |io|
518 | if io.read(4) == "---\n"
519 | data = ''
520 | while line = io.gets
521 | break if line == "---\n"
522 | data << line
523 | end
524 | data.strip.empty? ? {} : YAML.load(data)
525 | else
526 | nil
527 | end
528 | end
529 | end
530 | @has_frontmatter = @frontmatter != nil
531 | end
532 | @frontmatter ||= {}
533 | end
534 |
535 | def load_content
536 | if @content.nil? && @path
537 | open(@path) do |io|
538 | @content = io.read
539 | @content = @content.split(/^---\n/, 3).last if @content.start_with?("---\n")
540 | end
541 | end
542 | @content ||= ''
543 | end
544 |
545 | def pipeline
546 | @pipeline ||= @site.create_pipeline
547 | end
548 |
549 | def date_and_basename_without_ext
550 | if self.basename && self.basename =~ /^(\d{2}\d{2}?-\d{1,2}-\d{1,2}-)?(.+)$/
551 | Regexp.last_match.captures
552 | else
553 | []
554 | end
555 | end
556 |
557 | def expand_permalink(permalink)
558 | permalink = case permalink
559 | when 'date' then '/:categories/:year/:month/:day/:title:output_ext'
560 | when 'pretty' then '/:categories/:year/:month/:day/:title/'
561 | when 'ordinal' then '/:categories/:year/:y_day/:title:output_ext'
562 | when 'none' then '/:categories/:title:output_ext'
563 | else permalink
564 | end
565 |
566 | permalink.gsub(/\{:(\w+)\}|:(\w+)/) do
567 | case $1 || $2
568 | when 'title' then !self.frontmatter? ? File.basename(@path) : self.data['slug'] || Util.slugify(date_and_basename_without_ext.last, preserve_case: true)
569 | when 'slug' then !self.frontmatter? ? File.basename(@path) : self.data['slug'] || Util.slugify(date_and_basename_without_ext.last)
570 | when 'name' then !self.frontmatter? ? File.basename(@path) : Util.slugify(date_and_basename_without_ext.last)
571 | when 'basename' then self.basename
572 |
573 | when 'collection' then self.collection
574 | when 'output_ext' then self.output_ext
575 |
576 | when 'num' then self.data['paginator'].index
577 |
578 | when 'digest' then Digest::MD5.hexdigest(self.output) rescue ''
579 |
580 | when 'year' then self.date.strftime("%Y")
581 | when 'month' then self.date.strftime("%m")
582 | when 'day' then self.date.strftime("%d")
583 | when 'hour' then self.date.strftime("%H")
584 | when 'minute' then self.date.strftime("%M")
585 | when 'second' then self.date.strftime("%S")
586 | when 'i_day' then self.date.strftime("%-d")
587 | when 'i_month' then self.date.strftime("%-m")
588 | when 'short_month' then self.date.strftime("%b")
589 | when 'short_year' then self.date.strftime("%y")
590 | when 'y_day' then self.date.strftime("%j")
591 |
592 | when 'categories' then
593 | items = self.data['categories'].to_s
594 | items = items.split(' ') if items.is_a?(String)
595 | items.map { |category| Util.slugify(category) }.join('/')
596 |
597 | when 'path' then
598 | path = File.dirname(@path)
599 | if path.start_with?('/')
600 | path = Util.relative_path(path, File.expand_path(self.data['base_dir'] || '.', self.directory))
601 | end
602 | path == '.' ? '' : path
603 |
604 | else
605 | $log.warn("#{self}: Unknown permalink variable: ‘#{ $1 || $2 }’")
606 | $&
607 | end
608 | end.gsub(%r{//+}, '/')
609 | end
610 | end
611 |
612 | # ==============
613 | # = Collection =
614 | # ==============
615 |
616 | class Collection
617 | attr_reader :label, :directory, :docs, :files, :docs_and_files
618 |
619 | def to_liquid
620 | whitelist = [ :label, :docs, :files, :relative_directory, :directory, :output ]
621 | Drop.new(self, whitelist) do |key|
622 | case key
623 | when 'categories' then AssociativeArrayDrop.new(categories, :first)
624 | when 'tags' then AssociativeArrayDrop.new(tags, :first)
625 | else @data[key]
626 | end
627 | end
628 | end
629 |
630 | def initialize(site, label, directory, docs_and_files, data)
631 | docs_and_files.each { |file| file.collection_object = self }
632 |
633 | published = lambda do |file|
634 | file.frontmatter? &&
635 | (site.config['show_drafts'] || file.data['draft'] != true) &&
636 | (site.config['unpublished'] || file.data['published'] != false) &&
637 | (site.config['future'] || file.date < Time.now)
638 | end
639 | docs = docs_and_files.select(&published)
640 | files = docs_and_files.reject(&published)
641 |
642 | sort_property = data['sort_by']
643 | docs = docs.sort_by { |file| (sort_property && file.respond_to?(sort_property)) ? file.send(sort_property) : file.basename }
644 | docs.each_cons(2) { |first, second| first.next, second.previous = second, first }
645 | docs.reverse! if data['sort_descending']
646 |
647 | @site = site
648 | @label = label
649 | @directory = directory
650 | @docs, @files = docs, files
651 | @docs_and_files = docs_and_files
652 | @data = data
653 | end
654 |
655 | def generated_files(site)
656 | res = []
657 | %w( categories tags ).each do |type|
658 | if permalink = @data.dig(type, 'permalink')
659 | self.send(type.to_sym).each do |name, hash|
660 | data = {}
661 | data.merge!(site.defaults_for("", "#{@label}.#{type}"))
662 | data.merge!(@data[type])
663 | data.merge!({
664 | 'title' => name,
665 | 'slug' => Util.slugify(name),
666 | 'permalink' => permalink,
667 | 'posts' => hash['posts'],
668 | })
669 |
670 | res << Glim::FileItem.new(site, frontmatter: data).tap { |page| hash['url'] = page.url }
671 | end
672 | end
673 | end
674 | res
675 | end
676 |
677 | def write?
678 | @data.has_key?('output') ? @data['output'] : %w( defaults categories ).any? { |key| @data.has_key?(key) }
679 | end
680 |
681 | def relative_directory
682 | Util.relative_path(@directory, @site.source_dir)
683 | end
684 |
685 | def categories
686 | @categories ||= harvest('category', 'categories')
687 | end
688 |
689 | def tags
690 | @tags ||= harvest('tags')
691 | end
692 |
693 | private
694 |
695 | def harvest(*fields)
696 | res = {}
697 | self.docs.each do |page|
698 | fields.each do |field|
699 | if values = page.data[field]
700 | values = values.split(' ') if values.is_a?(String)
701 | values.each do |value|
702 | (res[value] ||= []) << page
703 | end
704 | end
705 | end
706 | end
707 | res.map { |field_value, pages| [ field_value, { 'posts' => pages } ] }.to_h
708 | end
709 | end
710 |
711 | # =============
712 | # = Paginator =
713 | # =============
714 |
715 | class Paginator
716 | attr_reader :posts
717 | attr_accessor :pages, :index, :next, :previous, :first, :last
718 |
719 | def to_liquid
720 | whitelist = [ :posts, :pages, :index, :next, :previous, :first, :last ]
721 | Drop.new(self, whitelist)
722 | end
723 |
724 | def initialize(posts, index)
725 | @posts = posts
726 | @index = index
727 | end
728 | end
729 |
730 | # ========
731 | # = Site =
732 | # ========
733 |
734 | class Site
735 | attr_accessor :url, :time, :config
736 | attr_reader :project_dir
737 |
738 | class << self
739 | def dir_reader(*dirs)
740 | dirs.each do |dir|
741 | define_method(dir) do
742 | value = @config[dir.to_s] || '.'
743 | if value.is_a?(Array)
744 | value.map { |path| File.expand_path(path, self.project_dir) }
745 | else
746 | File.expand_path(value, self.project_dir)
747 | end
748 | end
749 | end
750 | end
751 | end
752 |
753 | dir_reader :source_dir, :collections_dir, :data_dir, :layouts_dir, :includes_dir, :plugins_dir
754 |
755 | def to_liquid
756 | whitelist = [ :url, :time, :data, :pages, :html_pages, :static_files, :html_files, :documents, :posts, :related_posts, :categories, :tags ]
757 | Drop.new(self, whitelist) do |key|
758 | if collections.key?(key)
759 | collections[key].docs
760 | elsif key == 'collections'
761 | AssociativeArrayDrop.new(collections)
762 | else
763 | @config[key]
764 | end
765 | end
766 | end
767 |
768 | def initialize(options = {})
769 | @config = options
770 |
771 | @project_dir = File.expand_path(@config['source'])
772 | @url = @config['url']
773 | @time = Time.now
774 |
775 | Liquid::Template.file_system = Glim::LocalFileSystem.new(self.includes_dir, self.theme_dir('_includes'))
776 | Liquid::Template.error_mode = @config['liquid']['error_mode'].to_sym
777 |
778 | Jekyll.sites = [ self ]
779 | load_plugins
780 | run_generators
781 | end
782 |
783 | def files
784 | unless @files
785 | @files = load_pages(self.source_dir, @config['include'], @config['exclude'])
786 | if asset_dir = self.theme_dir('assets')
787 | @files += load_pages(asset_dir, @config['include'], @config['exclude'], directory: File.dirname(asset_dir))
788 | end
789 | end
790 | @files
791 | end
792 |
793 | def collections
794 | @collections ||= load_collections(@config['collections'], self.collections_dir, @config['include'], @config['exclude'])
795 | end
796 |
797 | def data
798 | @data ||= load_data(self.data_dir)
799 | end
800 |
801 | REDIRECT_TEMPLATE = <<~HTML
802 |
803 |
804 |
805 | Redirecting…
806 |
807 |
808 |
809 |
810 | Redirecting…
811 | Click here if you are not redirected.
812 |
813 | HTML
814 |
815 | def generated_files
816 | res = self.collections.map { |_, collection| collection.generated_files(self) }.flatten
817 |
818 | domains = {}
819 |
820 | files = [ *self.files, *self.documents ]
821 | files.each do |file|
822 | if redirect_from = file.data['redirect_from']
823 | redirect_from = redirect_from.split(' ') if redirect_from.is_a?(String)
824 | redirect_from.each do |path|
825 | domain = file.data['domain'] || URI(self.url).host
826 | domains[domain] ||= {}
827 | domains[domain][path] = file.url
828 |
829 | if path.end_with?('/')
830 | path = path + file.name
831 | elsif File.extname(path).empty?
832 | path = path + file.extname
833 | end
834 | res << Glim::FileItem.new(self, frontmatter: { 'permalink' => path, 'domain' => file.data['domain'], 'redirect_to' => file.url }, content: REDIRECT_TEMPLATE)
835 | end
836 | $log.warn("Generated redirect to non-HTML file: #{file}") unless file.output_ext == '.html'
837 | end
838 | end
839 |
840 | unless domains.empty?
841 | res << Glim::FileItem.new(self, frontmatter: { 'permalink' => '/redirects.json' }, content: { 'domains' => domains }.to_json + "\n")
842 | end
843 |
844 | files.each do |file|
845 | if paginate = file.data['paginate']
846 | per_page = paginate['per_page'] || 25
847 | permalink = paginate['permalink']
848 |
849 | items = []
850 | if key = paginate['collection']
851 | $log.warn("No collection named #{key}") unless collections.key?(key)
852 | items = collections[key].docs if collections.key?(key)
853 | permalink ||= "/#{key}/page/:num/"
854 | elsif key = paginate['data']
855 | $log.warn("No data named #{key}") unless self.data.key?(key)
856 | items = self.data[key] if self.data.key?(key)
857 | permalink ||= "/#{key}/page/:num/"
858 | end
859 |
860 | if sort_property = paginate['sort_by']
861 | items = items.sort_by do |item|
862 | if item.is_a?(Hash)
863 | item[sort_property]
864 | elsif items.is_a?(Hash) && item.is_a?(Array) && item.last.is_a?(Hash)
865 | item.last[sort_property]
866 | elsif item.respond_to?(sort_property)
867 | item.send(sort_property)
868 | else
869 | raise "Pagination failed for #{key} in #{file}: Unknown sort property: #{sort_property}"
870 | end
871 | end
872 | elsif paginate['sort']
873 | items = items.sort
874 | end
875 | items = items.reverse if paginate['sort_descending']
876 |
877 | chunks = items.each_slice(per_page)
878 | pages = chunks.each_with_index.map do |posts, i|
879 | paginator = Paginator.new(posts, i + 1)
880 | if i.zero?
881 | file.merge_data!({ 'paginator' => paginator })
882 | file
883 | else
884 | clone = Glim::FileItem.new(self, file.path, defaults: file.data)
885 | clone.merge_data!({ 'paginator' => paginator, 'permalink' => permalink })
886 | clone
887 | end
888 | end
889 |
890 | pages.each { |page| page.data['paginator'].pages = pages }
891 |
892 | pages.each_cons(2) do |first, second|
893 | first.data['paginator'].next, second.data['paginator'].previous = second, first
894 | first.data['paginator'].last, second.data['paginator'].first = pages.last, pages.first
895 | end
896 |
897 | res += pages[1..-1] unless pages.empty?
898 | end
899 | end
900 |
901 | res
902 | end
903 |
904 | def files_and_documents
905 | @files_and_documents ||= [ *self.files, *self.documents, *self.generated_files ]
906 | end
907 |
908 | def symlinks
909 | files_and_documents # Trigger dir scan
910 | @symlinks
911 | end
912 |
913 | # ================================
914 | # = Return a subset of all files =
915 | # ================================
916 |
917 | def pages
918 | files.select { |file| file.frontmatter? }
919 | end
920 |
921 | def html_pages
922 | pages.select { |page| page.extname == '.html' }
923 | end
924 |
925 | def static_files
926 | files.reject { |file| file.frontmatter? }
927 | end
928 |
929 | def html_files
930 | static_files.select { |page| page.extname == '.html' }
931 | end
932 |
933 | def documents
934 | collections.map { |_, collection| collection.docs_and_files }.flatten
935 | end
936 |
937 | def posts
938 | collections['posts'].docs if collections.key?('posts')
939 | end
940 |
941 | def related_posts # TODO
942 | []
943 | end
944 |
945 | def categories
946 | collections['posts'].categories.map { |category, hash| [ category, hash['posts'] ] }.to_h if collections.key?('posts')
947 | end
948 |
949 | def tags
950 | collections['posts'].tags.map { |tag, hash| [ tag, hash['posts'] ] }.to_h if collections.key?('posts')
951 | end
952 |
953 | # ============================
954 | # = These are not public API =
955 | # ============================
956 |
957 | def links
958 | if @links.nil?
959 | transform = [
960 | [ /([^\/]+)\.\w+$/, '\1' ],
961 | [ /\/index\.\w+$/, '/' ],
962 | [ /^index\.\w+$/, '.' ],
963 | ]
964 |
965 | @links = [ *self.files, *self.documents ].map { |file| [ file.link_path, file ] }.to_h
966 | @links.keys.each do |path|
967 | transform.each do |search, replace|
968 | shorthand = path.sub(search, replace)
969 | @links[shorthand] = @links[path] unless @links.key?(shorthand)
970 | end
971 | end
972 | end
973 | @links
974 | end
975 |
976 | def post_links
977 | @post_links ||= self.posts.map { |file| [ file.basename, file ] }.to_h
978 | end
979 |
980 | def theme_dir(subdir = '.')
981 | unless @did_load_theme
982 | @did_load_theme = true
983 |
984 | if theme = @config['theme']
985 | begin
986 | if theme_gem = Gem::Specification.find_by_name(theme)
987 | @theme_dir = theme_gem.full_gem_path
988 |
989 | $log.debug("Theme dependencies: #{theme_gem.runtime_dependencies.join(', ')}")
990 | theme_gem.runtime_dependencies.each do |dep|
991 | Glim.require(dep.name)
992 | end
993 |
994 | if sass_dir = self.theme_dir('_sass')
995 | Glim.require 'sass' unless defined?(::Sass)
996 | Glim.require 'glim-sass-converter'
997 | Sass.load_paths << sass_dir unless Sass.load_paths.include?(sass_dir)
998 | end
999 | end
1000 | rescue Gem::LoadError => e
1001 | $log.warn("Unable to load the #{theme} theme: #{e}")
1002 | end
1003 | end
1004 | end
1005 |
1006 | if @theme_dir && File.directory?(File.join(@theme_dir, subdir))
1007 | File.join(@theme_dir, subdir)
1008 | else
1009 | nil
1010 | end
1011 | end
1012 |
1013 | def defaults_for(path, type)
1014 | data = {}
1015 | @config['defaults'].each do |defaults|
1016 | if match = defaults['match']
1017 | next unless Array(match).any? do |glob|
1018 | glob = glob + '**/{*,.*}' if glob.end_with?('/')
1019 | File.fnmatch?(glob, path, File::FNM_PATHNAME|File::FNM_CASEFOLD|File::FNM_EXTGLOB)
1020 | end
1021 | end
1022 |
1023 | if scope = defaults['scope']
1024 | next if scope.key?('path') && !path.start_with?(scope['path'])
1025 | next if scope.key?('type') && type != scope['type']
1026 |
1027 | if globs = scope['glob']
1028 | globs = [ globs ] if globs.is_a?(String)
1029 | next unless globs.any? do |glob|
1030 | glob = glob + '**/{*,.*}' if glob.end_with?('/')
1031 | File.fnmatch?(glob, path, File::FNM_PATHNAME|File::FNM_CASEFOLD|File::FNM_EXTGLOB)
1032 | end
1033 | end
1034 | end
1035 |
1036 | data.merge!(defaults['values'])
1037 | end
1038 | data
1039 | end
1040 |
1041 | def create_pipeline
1042 | @pipeline_builder ||= PipelineBuilder.new(self)
1043 | Pipeline.new(@pipeline_builder)
1044 | end
1045 |
1046 | private
1047 |
1048 | def matches?(path, globs)
1049 | globs.find do |glob|
1050 | File.fnmatch?(glob, path) || File.fnmatch?(glob, File.basename(path))
1051 | end
1052 | end
1053 |
1054 | def run_generators
1055 | Jekyll::Plugin.plugins_of_type(Jekyll::Generator).sort.each do |klass|
1056 | Profiler.run("Letting #{klass} generate pages") do
1057 | begin
1058 | klass.new(@config).generate(self)
1059 | rescue => e
1060 | $log.error("Error running #{klass}#generate: #{e}")
1061 | end
1062 | end
1063 | end
1064 | end
1065 |
1066 | def load_plugins
1067 | @config['plugins'].each do |name|
1068 | Profiler.run("Loading #{name} plugin") do
1069 | Glim.require(name)
1070 | end
1071 | end
1072 |
1073 | Array(self.plugins_dir).each do |dir|
1074 | Util.find_files(dir, '**/*.rb') do |path|
1075 | Profiler.run("Loading #{Util.relative_path(path, dir)} plugin") do
1076 | Glim.require(path)
1077 | end
1078 | end
1079 | end
1080 | end
1081 |
1082 | def load_pages(dir, include_globs, exclude_globs, relative_to = dir, collection = nil, defaults = {}, directory: nil)
1083 | pages = []
1084 | if File.directory?(dir)
1085 | Find.find(dir) do |path|
1086 | next if path == dir
1087 | name = File.basename(path)
1088 | relative_path = Util.relative_path(path, relative_to)
1089 | dir_suffix = File.directory?(path) ? '/' : ''
1090 |
1091 | Find.prune if name.start_with?('_')
1092 | unless matches?(relative_path + dir_suffix, include_globs)
1093 | Find.prune if name.start_with?('.') || matches?(relative_path + dir_suffix, exclude_globs)
1094 | end
1095 |
1096 | settings = defaults_for(relative_path, collection || 'pages').merge(defaults)
1097 |
1098 | if File.symlink?(path)
1099 | begin
1100 | realpath = Pathname.new(path).realpath.to_s
1101 |
1102 | if File.directory?(realpath)
1103 | relative_path = Util.relative_path(path, settings['base_dir']) if settings.key?('base_dir')
1104 | (@symlinks ||= []) << { :name => relative_path, :realpath => realpath, :data => settings }
1105 | end
1106 | rescue Errno::ENOENT
1107 | $log.warn("No target for symbolic link: #{relative_path} → #{File.readlink(path)}")
1108 | next
1109 | end
1110 | end
1111 |
1112 | pages << Glim::FileItem.new(self, path, directory: directory || dir, defaults: settings) if File.file?(path)
1113 | end
1114 | end
1115 | pages
1116 | end
1117 |
1118 | def load_collections(collections, dir, include_globs, exclude_globs)
1119 | res = {}
1120 | collections.each do |collection, data|
1121 | collection_dir = File.expand_path("_#{collection}", dir)
1122 | if File.directory?(collection_dir)
1123 | Profiler.run("Loading #{collection} collection") do
1124 | defaults = { 'permalink' => data['permalink'], 'base_dir' => collection_dir }
1125 | defaults.merge!(data['defaults']) if data.key?('defaults')
1126 | files = load_pages(collection_dir, include_globs, exclude_globs, dir, collection, defaults)
1127 | if data.key?('drafts_dir')
1128 | drafts_dir = File.expand_path(data['drafts_dir'], dir)
1129 | defaults.merge!({ 'base_dir' => drafts_dir, 'draft' => true })
1130 | files += load_pages(drafts_dir, include_globs, exclude_globs, dir, collection, defaults)
1131 | end
1132 | res[collection] = Collection.new(self, collection, collection_dir, files, data)
1133 | end
1134 | end
1135 | end
1136 | res
1137 | end
1138 |
1139 | def load_data(dir)
1140 | data = {}
1141 | Util.find_files(dir, '**/*.{yml,yaml,json,csv,tsv}') do |path|
1142 | relative_basename = Util.relative_path(path, dir).chomp(File.extname(path))
1143 | begin
1144 | res = case File.extname(path).downcase
1145 | when '.yaml', '.yml'
1146 | YAML.load_file(path)
1147 | when '.json'
1148 | JSON.parse(File.read(path))
1149 | when '.csv'
1150 | CSV.read(path, { :headers => true }).map(&:to_hash)
1151 | when ".tsv"
1152 | CSV.read(path, { :headers => true, :col_sep => "\t" }).map(&:to_hash)
1153 | end
1154 |
1155 | *keys, last = relative_basename.split('/')
1156 | keys.inject(data) { |hash, key| hash[key] ||= {} }[last] = res
1157 | rescue => e
1158 | $log.error("Error loading data file ‘#{path}’: #{e}")
1159 | end
1160 | end
1161 | data
1162 | end
1163 |
1164 | class Pipeline
1165 | def initialize(builder)
1166 | @builder = builder
1167 | end
1168 |
1169 | def format_for_filename(filename)
1170 | input_ext = File.extname(filename)
1171 | from, _ = @builder.transformation_for_extension(input_ext)
1172 | from ? (filename.chomp(input_ext) + '.' + from) : filename
1173 | end
1174 |
1175 | def output_ext(input_ext)
1176 | _, to = @builder.transformation_for_extension(input_ext)
1177 | to ? ('.' + to) : nil
1178 | end
1179 |
1180 | def transform(page: nil, content: nil, from: 'liquid', to: 'output', options: {})
1181 | pipeline_for(from, to).inject(content) do |res, node|
1182 | node.transform(res, page, options)
1183 | end
1184 | end
1185 |
1186 | private
1187 |
1188 | def pipeline_for(from_format, to_format)
1189 | @pipeline ||= @builder.pipeline_for(from_format)
1190 |
1191 | is_pre_match, is_post_match = to_format.start_with?('pre-'), to_format.start_with?('post-')
1192 | is_full_match = !is_pre_match && !is_post_match
1193 |
1194 | nodes = []
1195 | @pipeline.each do |node|
1196 | break if is_full_match && node.from_format.start_with?(to_format) && !node.to_format.start_with?(to_format)
1197 | break if is_pre_match && node.to_format.start_with?(to_format[4..-1])
1198 | nodes << node
1199 | break if is_post_match && node.from_format.start_with?(to_format[5..-1]) && !node.to_format.start_with?(to_format[5..-1])
1200 | end
1201 |
1202 | nodes
1203 | end
1204 | end
1205 |
1206 | class PipelineBuilder
1207 | def initialize(site)
1208 | temp = []
1209 |
1210 | Jekyll::Plugin.plugins_of_type(Glim::Filter).each do |klass|
1211 | filter = klass.new(site)
1212 | filter.class.transforms.each do |from, to|
1213 | temp << [ filter, from, to ]
1214 | end
1215 | end
1216 |
1217 | @transformations = temp.sort_by do |filter, from, to|
1218 | [ (from == '*' ? +1 : -1), -filter.class.priority, (from.partition('.').first != to.partition('.').first ? +1 : -1) ]
1219 | end
1220 |
1221 | @cache = {}
1222 | end
1223 |
1224 | def transformation_for_extension(ext)
1225 | Jekyll::Plugin.plugins_of_type(Glim::Filter).sort.each do |klass|
1226 | klass.transforms.each do |from, to|
1227 | next if from == '*' || to == '*' || from.partition('.').first == to.partition('.').first
1228 |
1229 | input_extensions = if klass.extensions && klass.extensions.key?(from)
1230 | klass.extensions[from]
1231 | else
1232 | [ from ]
1233 | end
1234 |
1235 | input_extensions.each do |input_ext|
1236 | if ext == '.' + input_ext
1237 | return [ from, to ]
1238 | end
1239 | end
1240 | end if klass.transforms
1241 | end
1242 | nil
1243 | end
1244 |
1245 | def pipeline_for(from_format)
1246 | (@cache[from_format] ||= create_pipeline(from_format)).map { |node| node.dup }
1247 | end
1248 |
1249 | def create_pipeline(format)
1250 | result_nodes, transformations = [], @transformations.dup
1251 | while transformation = transformations.find { |_, from, to| from == format || from == format.partition('.').first || (from == '*' && to.partition('.').first != format.partition('.').first) }
1252 | filter, from, to = *transformation
1253 |
1254 | format = if from == '*' && to == '*'
1255 | format
1256 | elsif from == '*'
1257 | "#{to}.#{format}"
1258 | elsif to == '*'
1259 | format.partition('.').last
1260 | else
1261 | to
1262 | end
1263 |
1264 | filters = [ filter ]
1265 |
1266 | if from.partition('.').first != to.partition('.').first
1267 | filters = transformations.select { |_, filter_from, filter_to| from == filter_from && to == filter_to }.map { |f, _, _| f }
1268 | transformations.reject! { |_, filter_from, filter_to| from == filter_from && to = filter_to }
1269 | else
1270 | transformations.delete(transformation)
1271 | end
1272 |
1273 | result_nodes << PipelineNode.new(filters, from, to)
1274 | end
1275 | result_nodes
1276 | end
1277 |
1278 | class PipelineNode
1279 | attr_reader :from_format, :to_format
1280 |
1281 | def initialize(filters, from_format, to_format)
1282 | @filters, @from_format, @to_format = filters, from_format, to_format
1283 | end
1284 |
1285 | def dup
1286 | PipelineNode.new(@filters, @from_format, @to_format)
1287 | end
1288 |
1289 | def transform(content, page, options)
1290 | if @cache.nil?
1291 | filter, *filters = @filters
1292 | Profiler.group(filter.class.name) do
1293 | @cache = filter.transform(content, page, options.merge({ :filters => filters })).freeze
1294 | end
1295 | end
1296 | @cache
1297 | end
1298 | end
1299 | end
1300 | end
1301 |
1302 | # ==========
1303 | # = Config =
1304 | # ==========
1305 |
1306 | class Config
1307 | def initialize(files: nil, defaults: nil, override: nil)
1308 | @files = files || %w( _config.yml )
1309 | @defaults = defaults || {}
1310 | @override = override || {}
1311 | end
1312 |
1313 | def site
1314 | @site ||= Glim::Site.new(self.to_h)
1315 | end
1316 |
1317 | def [](key)
1318 | self.to_h[key]
1319 | end
1320 |
1321 | def []=(key, value)
1322 | @override[key] = value
1323 | @loaded[key] = value if @loaded
1324 | end
1325 |
1326 | def reload
1327 | @loaded, @site = nil, nil
1328 | end
1329 |
1330 | def to_h
1331 | @loaded ||= load_config
1332 | end
1333 |
1334 | private
1335 |
1336 | DEFAULT_CONFIG = {
1337 | # Where things are
1338 | "source" => ".",
1339 | "source_dir" => ".",
1340 | "destination" => "_site",
1341 | "collections_dir" => ".",
1342 | "plugins_dir" => "_plugins",
1343 | "layouts_dir" => "_layouts",
1344 | "data_dir" => "_data",
1345 | "includes_dir" => "_includes",
1346 | "collections" => {
1347 | "posts" => {
1348 | "output" => true,
1349 | "sort_by" => "date",
1350 | "sort_descending" => true,
1351 | "drafts_dir" => "_drafts",
1352 | }
1353 | },
1354 |
1355 | # Handling Reading
1356 | "include" => [".htaccess"],
1357 | "exclude" => %w(
1358 | Gemfile Gemfile.lock node_modules vendor/bundle/ vendor/cache/ vendor/gems/
1359 | vendor/ruby/
1360 | ),
1361 | "keep_files" => [".git", ".svn"],
1362 | "encoding" => "utf-8",
1363 | "markdown_ext" => "markdown,mkdown,mkdn,mkd,md",
1364 |
1365 | # Filtering Content
1366 | "future" => false,
1367 | "unpublished" => false,
1368 |
1369 | # Plugins
1370 | "whitelist" => [],
1371 | "plugins" => [],
1372 |
1373 | # Conversion
1374 | "highlighter" => "rouge",
1375 | "excerpt_separator" => "\n\n",
1376 |
1377 | # Serving
1378 | "detach" => false, # default to not detaching the server
1379 | "port" => 4000,
1380 | "host" => "127.0.0.1",
1381 | "show_dir_listing" => true,
1382 | "livereload" => true,
1383 | "livereload_port" => 35729,
1384 |
1385 | # Output Configuration
1386 | "permalink" => "date",
1387 | "timezone" => nil, # use the local timezone
1388 |
1389 | "quiet" => false,
1390 | "verbose" => false,
1391 | "defaults" => [],
1392 |
1393 | "liquid" => {
1394 | "error_mode" => "warn",
1395 | "strict_filters" => false,
1396 | "strict_variables" => false,
1397 | },
1398 |
1399 | "kramdown" => {
1400 | "auto_ids" => true,
1401 | "toc_levels" => "1..6",
1402 | "entity_output" => "as_char",
1403 | "smart_quotes" => "lsquo,rsquo,ldquo,rdquo",
1404 | "input" => "GFM",
1405 | "hard_wrap" => false,
1406 | "footnote_nr" => 1,
1407 | "show_warnings" => false,
1408 | },
1409 | }
1410 |
1411 | def load_config
1412 | initial = Util.deep_merge(DEFAULT_CONFIG, @defaults)
1413 | initial = @files.inject(initial) do |mem, file|
1414 | begin
1415 | Util.deep_merge(mem, YAML.load_file(file))
1416 | rescue => e
1417 | raise "Unable to load #{file}: #{e}"
1418 | end
1419 | end
1420 | initial = Util.deep_merge(initial, @override)
1421 | initial['collections']['posts']['permalink'] ||= initial['permalink']
1422 | initial
1423 | end
1424 | end
1425 |
1426 | # ==================
1427 | # = Custom Require =
1428 | # ==================
1429 |
1430 | REPLACEMENT_GEMS = {
1431 | 'jekyll' => nil,
1432 | 'jekyll-feed' => 'glim-feed',
1433 | 'jekyll-seo-tag' => 'glim-seo-tag',
1434 | }
1435 |
1436 | def self.require(name)
1437 | @@loaded_gems ||= {}
1438 |
1439 | unless @@loaded_gems.has_key?(name)
1440 | @@loaded_gems[name] = false
1441 |
1442 | begin
1443 | @@loaded_gems[name] = true
1444 | if REPLACEMENT_GEMS.key?(name)
1445 | if replacement = REPLACEMENT_GEMS[name]
1446 | $log.warn("Using #{replacement} instead of #{name}")
1447 | Kernel.require replacement
1448 | end
1449 | else
1450 | Kernel.require name
1451 | end
1452 | rescue LoadError => e
1453 | $log.warn("Error loading ‘#{name}’: #{e}")
1454 | end
1455 | end
1456 |
1457 | @@loaded_gems[name]
1458 | end
1459 | end
1460 |
1461 | def add_program_options(cmd)
1462 | cmd.option 'config', '--config CONFIG_FILE[,CONFIG_FILE2,...]', Array, 'Custom configuration file(s)'
1463 | cmd.option 'source', '-s', '--source SOURCE', 'Custom source directory'
1464 | cmd.option 'future', '--future', 'Publish posts with a future date'
1465 | cmd.option 'show_drafts', '-D', '--drafts', 'Publish posts in draft folders'
1466 | cmd.option 'unpublished', '--unpublished', 'Publish posts marked as unpublished'
1467 | cmd.option 'quiet', '-q', '--quiet', 'Silence output'
1468 | cmd.option 'verbose', '-V', '--verbose', 'Print verbose output'
1469 | cmd.option 'plugins_dir', '-p', '--plugins PLUGINS_DIR1[,PLUGINS_DIR2[,...]]', Array, 'Custom plugins directory'
1470 | cmd.option 'layouts_dir', '--layouts DIR', String, 'Custom layouts directory'
1471 | end
1472 |
1473 | Mercenary.program(:glim) do |p|
1474 | p.version Glim::VERSION
1475 | p.description 'Glim is a static site generator which is semi-compatible with Jekyll'
1476 | p.syntax 'glim [options]'
1477 |
1478 | p.command(:build) do |c|
1479 | c.syntax 'build'
1480 | c.description 'Build your site'
1481 | c.alias :b
1482 | c.option 'jobs', '-j', '--jobs N', Integer, 'Use N simultaneous jobs when building site'
1483 | c.option 'destination', '-d', '--destination DESTINATION', 'Set destination directory for generated files'
1484 | c.option 'profile', '--profile', 'Show timing information'
1485 | add_program_options(c)
1486 |
1487 | c.action do |args, options|
1488 | Profiler.enabled = options['profile']
1489 |
1490 | config = Glim::Config.new(files: options['config'], defaults: { 'environment' => 'production' }, override: options)
1491 | config['show_backtrace'] = true if c.trace
1492 | Glim::Commands.build(config)
1493 |
1494 | Profiler.run("Saving cache") do
1495 | Glim::Cache.save
1496 | end
1497 |
1498 | Profiler.enabled = false
1499 | end
1500 | end
1501 |
1502 | p.command(:clean) do |c|
1503 | c.syntax 'clean'
1504 | c.description 'Delete files in build directory'
1505 | c.option 'destination', '-d', '--destination DESTINATION', 'Set the directory to clean'
1506 | c.option 'dry_run', '--dry-run', 'Only show which files would be deleted'
1507 |
1508 | c.action do |args, options|
1509 | Glim::Commands.clean(Glim::Config.new(files: options['config'], defaults: { 'environment' => 'production' }, override: options))
1510 | end
1511 | end
1512 |
1513 | p.command(:serve) do |c|
1514 | c.syntax 'serve'
1515 | c.description 'Serve your site locally'
1516 | c.alias :s
1517 | c.option 'open_url', '-o', '--open-url', 'Launch your site in a browser'
1518 | c.option 'watch', '-w', '--[no-]watch', 'Watch for changes and rebuild'
1519 | c.option 'livereload', '-l', '--[no-]livereload', 'Automatically send reload to browser on changes'
1520 | c.option 'livereload_port', '--livereload-port [PORT]', Integer, 'Port to use for reload WebSocket server'
1521 | add_program_options(c)
1522 |
1523 | c.action do |args, options|
1524 | require_relative '../lib/local_server'
1525 | config = Glim::Config.new(files: options['config'], defaults: { 'environment' => 'development' }, override: options)
1526 | Glim::LocalServer.start(config)
1527 | end
1528 | end
1529 |
1530 | p.command(:profile) do |c|
1531 | c.syntax 'profile'
1532 | c.description 'Profile your site'
1533 | add_program_options(c)
1534 | c.action do |args, options|
1535 | Glim::Commands.profile(Glim::Config.new(files: options['config'], defaults: { 'environment' => 'production' }, override: options))
1536 | end
1537 | end
1538 |
1539 | Bundler.require(:glim_plugins) if defined?(Bundler) && File.exists?('Gemfile')
1540 |
1541 | Jekyll::Plugin.plugins_of_type(Jekyll::Command).sort.each do |klass|
1542 | klass.init_with_program(p)
1543 | end
1544 |
1545 | p.command(:help) do |c|
1546 | c.syntax 'help [subcommand]'
1547 | c.description 'Show this help message, optionally for a given subcommand'
1548 |
1549 | c.action do |args, _|
1550 | if cmd = args.shift
1551 | if p.has_command?(cmd.to_sym)
1552 | STDOUT.puts "#{p.commands[cmd.to_sym]}\n"
1553 | else
1554 | STDOUT.puts "Error: No subcommand named ‘#{cmd}’.\n"
1555 | STDOUT.puts "Valid commands are: #{p.commands.keys.join(', ')}\n"
1556 | end
1557 | else
1558 | STDOUT.puts "#{p}\n"
1559 | end
1560 | end
1561 | end
1562 |
1563 | p.default_command(:help)
1564 | end
1565 |
--------------------------------------------------------------------------------