forked from henrik/jekyll
/
site.rb
302 lines (267 loc) · 10.3 KB
/
site.rb
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
module Jekyll
class Site
attr_accessor :config, :layouts, :posts, :collated_posts, :categories, :tags
attr_accessor :source, :dest, :lsi, :pygments, :pygments_cache, :permalink_style, :permalink_date,
:sass, :post_defaults
# Initialize the site
# +config+ is a Hash containing site configurations details
#
# Returns <Site>
def initialize(config)
self.config = config.clone
self.source = config['source']
self.dest = config['destination']
self.lsi = config['lsi']
self.pygments = config['pygments']
self.pygments_cache = config['pygments_cache']
self.permalink_style = config['permalink'].to_sym
self.permalink_date = config['permalink_date'] && config['permalink_date'].sub(%r{\A/?(.*)/?\Z}, '/\1/')
self.post_defaults = config['post_defaults'] || {}
self.reset
self.setup
end
def reset
self.layouts = {}
self.posts = []
self.collated_posts = Hash.new {|h,k| h[k] = Hash.new {|h,k| h[k] = Hash.new {|h,k| h[k] = [] } } }
self.categories = Hash.new { |hash, key| hash[key] = [] }
self.tags = Hash.new { |hash, key| hash[key] = [] }
end
def setup
# Check to see if LSI is enabled.
require 'classifier' if self.lsi
if self.config['sass']
begin
require 'sass'
self.sass = true
puts 'Using Sass for CSS generation'
rescue LoadError
puts 'You must have the haml gem installed first'
end
end
if self.config['haml']
begin
require 'haml'
require 'jekyll/haml_helpers'
helpers = File.join(source, '_helpers.rb')
require helpers if File.exist?(helpers)
puts 'Enabled Haml'
rescue LoadError
puts 'You must have the haml gem installed first'
end
end
if self.pygments_cache
require 'fileutils'
FileUtils.mkdir_p(pygments_cache)
require 'digest/md5'
end
# Set the Markdown interpreter (and Maruku self.config, if necessary)
case self.config['markdown']
when 'rdiscount'
begin
require 'rdiscount'
def markdown(content)
RDiscount.new(content).to_html
end
puts 'Using rdiscount for Markdown'
rescue LoadError
puts 'You must have the rdiscount gem installed first'
end
when 'maruku'
begin
require 'maruku'
def markdown(content)
Maruku.new(content).to_html
end
if self.config['maruku']['use_divs']
require 'maruku/ext/div'
puts 'Maruku: Using extended syntax for div elements.'
end
if self.config['maruku']['use_tex']
require 'maruku/ext/math'
puts "Maruku: Using LaTeX extension. Images in `#{self.config['maruku']['png_dir']}`."
# Switch off MathML output
MaRuKu::Globals[:html_math_output_mathml] = false
MaRuKu::Globals[:html_math_engine] = 'none'
# Turn on math to PNG support with blahtex
# Resulting PNGs stored in `images/latex`
MaRuKu::Globals[:html_math_output_png] = true
MaRuKu::Globals[:html_png_engine] = self.config['maruku']['png_engine']
MaRuKu::Globals[:html_png_dir] = self.config['maruku']['png_dir']
MaRuKu::Globals[:html_png_url] = self.config['maruku']['png_url']
end
rescue LoadError
puts "The maruku gem is required for markdown support!"
end
end
end
def textile(content)
RedCloth.new(content).to_html
end
# Do the actual work of processing the site and generating the
# real deal.
#
# Returns nothing
def process
self.reset
self.read_layouts
self.transform_pages
self.transform_sass if self.sass
self.write_posts
end
# Read all the files in <source>/_layouts into memory for later use.
#
# Returns nothing
def read_layouts
base = File.join(self.source, "_layouts")
entries = []
Dir.chdir(base) { entries = filter_entries(Dir['*.*']) }
entries.each do |f|
name = f.split(".")[0..-2].join(".")
self.layouts[name] = Layout.new(self, base, f)
end
rescue Errno::ENOENT => e
# ignore missing layout dir
end
# Read all the files in <base>/_posts and create a new Post object with each one.
#
# Returns nothing
def read_posts(dir)
base = File.join(self.source, dir, '_posts')
entries = []
Dir.chdir(base) { entries = filter_entries(Dir['**/*']) }
# first pass processes, but does not yet render post content
entries.each do |f|
if Post.valid?(f)
post = Post.new(self, self.source, dir, f)
if post.published
self.posts << post
post.categories.each { |c| self.categories[c] << post }
post.tags.each { |c| self.tags[c] << post }
end
end
end
# second pass renders each post now that full site payload is available
self.posts.each do |post|
post.render(self.layouts, site_payload)
end
self.posts.sort!
self.posts.each do |post|
self.collated_posts[post.date.year][post.date.month][post.date.day].unshift(post)
end
self.categories.values.map { |ps| ps.sort! { |a, b| b <=> a} }
self.tags.values.map { |ps| ps.sort! { |a, b| b <=> a} }
rescue Errno::ENOENT => e
# ignore missing layout dir
end
# Write each post to <dest>/<year>/<month>/<day>/<slug>
#
# Returns nothing
def write_posts
self.posts.each do |post|
post.write(self.dest)
end
end
# Copy all regular files from <source> to <dest>/ ignoring
# any files/directories that are hidden or backup files (start
# with "." or "#" or end with "~") or contain site content (start with "_")
# unless they are "_posts" directories or web server files such as
# '.htaccess'
# The +dir+ String is a relative path used to call this method
# recursively as it descends through directories
#
# Returns nothing
def transform_pages(dir = '')
base = File.join(self.source, dir)
entries = filter_entries(Dir.entries(base))
directories = entries.select { |e| File.directory?(File.join(base, e)) }
files = entries.reject { |e| File.directory?(File.join(base, e)) }
# we need to make sure to process _posts *first* otherwise they
# might not be available yet to other templates as {{ site.posts }}
if directories.include?('_posts')
directories.delete('_posts')
read_posts(dir)
end
[directories, files].each do |entries|
entries.each do |f|
if File.directory?(File.join(base, f))
next if self.dest.sub(/\/$/, '') == File.join(base, f)
transform_pages(File.join(dir, f))
else
first3 = File.open(File.join(self.source, dir, f)) { |fd| fd.read(3) }
if first3 == "---"
# file appears to have a YAML header so process it as a page
page = Page.new(self, self.source, dir, f)
page.render(self.layouts, site_payload)
page.write(self.dest)
else
# otherwise copy the file without transforming it
FileUtils.mkdir_p(File.join(self.dest, dir))
FileUtils.cp(File.join(self.source, dir, f), File.join(self.dest, dir, f))
end
end
end
end
end
# Transform all *.sass files from <dest> to css with the same name
# and delete source sass files.
# Returns nothing
def transform_sass(dir = '')
base = File.join(self.source, dir)
entries = Dir.entries(base)
entries = entries.reject { |e| ['.', '_'].include?(e[0..0]) }
directories = entries.select { |e| File.directory?(File.join(base, e)) }
directories.each { |d| transform_sass(File.join(dir, d)) }
files = entries.reject { |e| File.directory?(File.join(base, e)) }
files = files.select { |f| File.extname(File.join(base, f)) == ".sass" }
files.each do |f|
input = File.open(File.join(base, f), "r")
result = Sass::Engine.new(input.read, :style => :compact, :load_paths => base).render
FileUtils.mkdir_p(File.join(self.dest, dir))
output = File.open(File.join(self.dest, dir, f).gsub(/.sass\Z/, ".css"), "w") do |o|
o.write(result)
end
FileUtils.rm(File.join(self.dest, dir, f))
end
end
# Constructs a hash map of Posts indexed by the specified Post attribute
#
# Returns {post_attr => [<Post>]}
def post_attr_hash(post_attr)
# Build a hash map based on the specified post attribute ( post attr => array of posts )
# then sort each array in reverse order
hash = Hash.new { |hash, key| hash[key] = Array.new }
self.posts.each { |p| p.send(post_attr.to_sym).each { |t| hash[t] << p } }
hash.values.map { |sortme| sortme.sort! { |a, b| b <=> a} }
return hash
end
# The Hash payload containing site-wide data
#
# Returns {"site" => {"time" => <Time>,
# "posts" => [<Post>],
# "categories" => [<Post>],
# "tags" => [<Post>],
# "topics" => [<Post>] }}
def site_payload
{"site" => {
"time" => Time.now,
"posts" => self.posts.sort { |a,b| b <=> a },
"categories" => post_attr_hash('categories'),
"tags" => post_attr_hash('tags'),
"topics" => post_attr_hash('topics')
}}
end
# Filter out any files/directories that are hidden or backup files (start
# with "." or "#" or end with "~") or contain site content (start with "_")
# unless they are "_posts" directories or web server files such as
# '.htaccess'
def filter_entries(entries)
entries = entries.reject do |e|
unless ['_posts', '.htaccess'].include?(e)
# Reject backup/hidden
['.', '_', '#'].include?(e[0..0]) or e[-1..-1] == '~'
end
end
end
end
end