Permalink
Browse files

hugolib: Add a cache to GetPage

Looks to be slightly slower with the low number of section pages, but the 1000 regular pages seem to add value.

```
benchmark                     old ns/op     new ns/op     delta
BenchmarkGetPage-4            97.7          145           +48.41%
BenchmarkGetPageRegular-4     7933          161           -97.97%

benchmark                     old allocs     new allocs     delta
BenchmarkGetPage-4            0              0              +0.00%
BenchmarkGetPageRegular-4     0              0              +0.00%

benchmark                     old bytes     new bytes     delta
BenchmarkGetPage-4            0             0             +0.00%
BenchmarkGetPageRegular-4     0             0             +0.00%
```
  • Loading branch information...
bep committed May 25, 2017
1 parent e0c2e79 commit 50d11138f3e18b545c15fadf52f7b0b744bf3e7c
Showing with 49 additions and 56 deletions.
  1. +3 −2 hugolib/page.go
  2. +46 −54 hugolib/page_collections.go
View
@@ -42,8 +42,9 @@ import (
)
var (
cjk = regexp.MustCompile(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}`)
allKinds = []string{KindPage, KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm, kindRSS, kindSitemap, kindRobotsTXT, kind404}
cjk = regexp.MustCompile(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}`)
allKindsInPages = []string{KindPage, KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm, kindRSS}
allKinds = append(allKindsInPages, []string{kindSitemap, kindRobotsTXT, kind404}...)
)
const (
View
@@ -15,6 +15,9 @@ package hugolib
import (
"path"
"path/filepath"
"github.com/spf13/hugo/cache"
)
// PageCollections contains the page collections for a site.
@@ -39,81 +42,70 @@ type PageCollections struct {
// Includes absolute all pages (of all types), including drafts etc.
rawAllPages Pages
pageCache *cache.PartitionedLazyCache
}
func (c *PageCollections) refreshPageCaches() {
c.indexPages = c.findPagesByKindNotIn(KindPage, c.Pages)
c.RegularPages = c.findPagesByKindIn(KindPage, c.Pages)
c.AllRegularPages = c.findPagesByKindIn(KindPage, c.AllPages)
}
func newPageCollections() *PageCollections {
return &PageCollections{}
}
func newPageCollectionsFromPages(pages Pages) *PageCollections {
return &PageCollections{rawAllPages: pages}
}
func (c *PageCollections) getFirstPageMatchIn(pages Pages, typ string, pathElements ...string) *Page {
if len(pages) == 0 {
return nil
}
var filename string
if typ == KindPage {
filename = path.Join(pathElements...)
}
for _, p := range pages {
if p.Kind != typ {
continue
}
if typ == KindHome {
return p
}
if typ == KindPage {
if p.Source.Path() == filename {
return p
cacheLoader := func(kind string) func() (map[string]interface{}, error) {
return func() (map[string]interface{}, error) {
cache := make(map[string]interface{})
switch kind {
case KindPage:
// Note that we deliberately use the pages from all sites
// in this cache, as we intend to use this in the ref and relref
// shortcodes. If the user says "sect/doc1.en.md", he/she knows
// what he/she is looking for.
for _, p := range c.AllRegularPages {
// TODO(bep) section
cache[filepath.ToSlash(p.Source.Path())] = p
}
default:
for _, p := range c.indexPages {
key := path.Join(p.sections...)
cache[key] = p
}
}
continue
}
match := false
for i := 0; i < len(pathElements); i++ {
if len(p.sections) > i && pathElements[i] == p.sections[i] {
match = true
} else {
match = false
break
}
}
if match {
return p
return cache, nil
}
}
return nil
var partitions []cache.Partition
}
for _, kind := range allKindsInPages {
partitions = append(partitions, cache.Partition{Key: kind, Load: cacheLoader(kind)})
}
func (c *PageCollections) getRegularPage(filename string) {
c.pageCache = cache.NewPartitionedLazyCache(partitions...)
}
func newPageCollections() *PageCollections {
return &PageCollections{}
}
func (c *PageCollections) getPage(typ string, path ...string) *Page {
var pages Pages
func newPageCollectionsFromPages(pages Pages) *PageCollections {
return &PageCollections{rawAllPages: pages}
}
if typ == KindPage {
pages = c.AllPages
func (c *PageCollections) getPage(typ string, sections ...string) *Page {
var key string
if len(sections) == 1 {
key = filepath.ToSlash(sections[0])
} else {
pages = c.indexPages
key = path.Join(sections...)
}
return c.getFirstPageMatchIn(pages, typ, path...)
// TODO(bep) section error
p, _ := c.pageCache.Get(typ, key)
if p == nil {
return nil
}
return p.(*Page)
}

0 comments on commit 50d1113

Please sign in to comment.