diff --git a/common/herrors/errors.go b/common/herrors/errors.go index 1a61070501a..e484ecb8002 100644 --- a/common/herrors/errors.go +++ b/common/herrors/errors.go @@ -50,9 +50,10 @@ func FprintStackTrace(w io.Writer, err error) { // Recover is a helper function that can be used to capture panics. // Put this at the top of a method/function that crashes in a template: // defer herrors.Recover() -func Recover() { +func Recover(args ...interface{}) { if r := recover(); r != nil { - fmt.Println("stacktrace from panic: \n" + string(debug.Stack())) + args = append(args, "stacktrace from panic: \n"+string(debug.Stack()), "\n") + fmt.Println(args...) } } diff --git a/common/maps/maps_get.go b/common/maps/maps_get.go new file mode 100644 index 00000000000..38a5f504af3 --- /dev/null +++ b/common/maps/maps_get.go @@ -0,0 +1,28 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package maps + +import ( + "github.com/spf13/cast" +) + +// GetString tries to get a value with key from map m and convert it to a string. +// It will return an empty string if not found or if it cannot be convertd to a string. +func GetString(m map[string]interface{}, key string) string { + v, found := m[key] + if !found { + return "" + } + return cast.ToString(v) +} diff --git a/go.mod b/go.mod index 616dce102e0..6937eead72d 100644 --- a/go.mod +++ b/go.mod @@ -8,6 +8,7 @@ require ( github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 github.com/alecthomas/chroma v0.6.4 github.com/alecthomas/repr v0.0.0-20181024024818-d37bc2a10ba1 // indirect + github.com/armon/go-radix v1.0.0 github.com/aws/aws-sdk-go v1.19.40 github.com/bep/debounce v1.2.0 github.com/bep/gitmap v1.1.0 diff --git a/go.sum b/go.sum index 94249500f7c..3bcfdcb7b84 100644 --- a/go.sum +++ b/go.sum @@ -58,6 +58,8 @@ github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRF github.com/anacrolix/dms v0.0.0-20180117034613-8af4925bffb5/go.mod h1:DGqLjaZ3ziKKNRt+U5Q9PLWJ52Q/4rxfaaH/b3QYKaE= github.com/apache/thrift v0.12.0/go.mod h1:cp2SuWMxlEZw2r+iP2GNCdIi4C1qmUzdZFSVb+bacwQ= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= +github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI= +github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8= github.com/aws/aws-sdk-go v1.15.27/go.mod h1:mFuSZ37Z9YOHbQEwBWztmVzqXrEkub65tZoCYDt7FT0= github.com/aws/aws-sdk-go v1.18.6/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= github.com/aws/aws-sdk-go v1.19.11/go.mod h1:KmX6BPdI08NWTb3/sm4ZGu5ShLoqVDhKgpiN924inxo= diff --git a/hugolib/hugo_sites.go b/hugolib/hugo_sites.go index 6ad8715645b..f4fdcf4c530 100644 --- a/hugolib/hugo_sites.go +++ b/hugolib/hugo_sites.go @@ -14,9 +14,7 @@ package hugolib import ( - "fmt" "io" - "path" "path/filepath" "sort" "strings" @@ -623,142 +621,14 @@ func (h *HugoSites) renderCrossSitesArtifacts() error { s.siteCfg.sitemap.Filename, h.toSiteInfos(), smLayouts...) } -// createMissingPages creates home page, taxonomies etc. that isnt't created as an -// effect of having a content file. -func (h *HugoSites) createMissingPages() error { - - for _, s := range h.Sites { - if s.isEnabled(page.KindHome) { - // home pages - homes := s.findWorkPagesByKind(page.KindHome) - if len(homes) > 1 { - panic("Too many homes") - } - var home *pageState - if len(homes) == 0 { - home = s.newPage(page.KindHome) - s.workAllPages = append(s.workAllPages, home) - } else { - home = homes[0] - } - - s.home = home - } - - // Will create content-less root sections. - newSections := s.assembleSections() - s.workAllPages = append(s.workAllPages, newSections...) - - taxonomyTermEnabled := s.isEnabled(page.KindTaxonomyTerm) - taxonomyEnabled := s.isEnabled(page.KindTaxonomy) - - // taxonomy list and terms pages - taxonomies := s.Language().GetStringMapString("taxonomies") - if len(taxonomies) > 0 { - taxonomyPages := s.findWorkPagesByKind(page.KindTaxonomy) - taxonomyTermsPages := s.findWorkPagesByKind(page.KindTaxonomyTerm) - - // Make them navigable from WeightedPage etc. - for _, p := range taxonomyPages { - ni := p.getTaxonomyNodeInfo() - if ni == nil { - // This can be nil for taxonomies, e.g. an author, - // with a content file, but no actual usage. - // Create one. - sections := p.SectionsEntries() - if len(sections) < 2 { - // Invalid state - panic(fmt.Sprintf("invalid taxonomy state for %q with sections %v", p.pathOrTitle(), sections)) - } - ni = p.s.taxonomyNodes.GetOrAdd(sections[0], path.Join(sections[1:]...)) - } - ni.TransferValues(p) - } - for _, p := range taxonomyTermsPages { - p.getTaxonomyNodeInfo().TransferValues(p) - } - - for _, plural := range taxonomies { - if taxonomyTermEnabled { - foundTaxonomyTermsPage := false - for _, p := range taxonomyTermsPages { - if p.SectionsPath() == plural { - foundTaxonomyTermsPage = true - break - } - } - - if !foundTaxonomyTermsPage { - n := s.newPage(page.KindTaxonomyTerm, plural) - n.getTaxonomyNodeInfo().TransferValues(n) - s.workAllPages = append(s.workAllPages, n) - } - } - - if taxonomyEnabled { - for termKey := range s.Taxonomies[plural] { - - foundTaxonomyPage := false - - for _, p := range taxonomyPages { - sectionsPath := p.SectionsPath() - - if !strings.HasPrefix(sectionsPath, plural) { - continue - } - - singularKey := strings.TrimPrefix(sectionsPath, plural) - singularKey = strings.TrimPrefix(singularKey, "/") - - if singularKey == termKey { - foundTaxonomyPage = true - break - } - } - - if !foundTaxonomyPage { - info := s.taxonomyNodes.Get(plural, termKey) - if info == nil { - panic("no info found") - } - - n := s.newTaxonomyPage(info.term, info.plural, info.termKey) - info.TransferValues(n) - s.workAllPages = append(s.workAllPages, n) - } - } - } - } - } - } - - return nil -} - func (h *HugoSites) removePageByFilename(filename string) { for _, s := range h.Sites { s.removePageFilename(filename) } } +// TODO(bep) cm func (h *HugoSites) createPageCollections() error { - for _, s := range h.Sites { - for _, p := range s.rawAllPages { - if !s.isEnabled(p.Kind()) { - continue - } - - shouldBuild := s.shouldBuild(p) - s.buildStats.update(p) - if shouldBuild { - if p.m.headless { - s.headlessPages = append(s.headlessPages, p) - } else { - s.workAllPages = append(s.workAllPages, p) - } - } - } - } allPages := newLazyPagesFactory(func() page.Pages { var pages page.Pages diff --git a/hugolib/hugo_sites_build.go b/hugolib/hugo_sites_build.go index d20932599c3..fca14920d1e 100644 --- a/hugolib/hugo_sites_build.go +++ b/hugolib/hugo_sites_build.go @@ -18,7 +18,6 @@ import ( "context" "fmt" "runtime/trace" - "sort" "github.com/gohugoio/hugo/output" @@ -235,26 +234,41 @@ func (h *HugoSites) assemble(config *BuildCfg) error { } } - if err := h.createPageCollections(); err != nil { - return err - } - if config.whatChanged.source { - for _, s := range h.Sites { - if err := s.assembleTaxonomies(); err != nil { - return err - } - } + //for _, s := range h.Sites { + // TODO(bep) cm + //if err := s.assembleTaxonomies(); err != nil { + // return err + //} + //} } - // Create pagexs for the section pages etc. without content file. - if err := h.createMissingPages(); err != nil { + // Create pages for the section pages etc. without content file. + // TODO(bep) cm + /*if err := h.createMissingPages(); err != nil { return err - } + }*/ for _, s := range h.Sites { - s.setupSitePages() - sort.Stable(s.workAllPages) + if err := s.assemblePagesMap(s); err != nil { + return err + } + + if err := s.pagesMap.assembleTaxonomies(s); err != nil { + return err + } + + if err := s.createWorkAllPages(); err != nil { + return err + } + + // TODO(bep) cm + //s.setupSitePages() + //sort.Stable(s.workAllPages) + } + + if err := h.createPageCollections(); err != nil { + return err } return nil diff --git a/hugolib/hugo_sites_build_test.go b/hugolib/hugo_sites_build_test.go index 876f21cfa6d..acb90a4a594 100644 --- a/hugolib/hugo_sites_build_test.go +++ b/hugolib/hugo_sites_build_test.go @@ -365,7 +365,6 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { require.NotNil(t, enTags["tag1"]) require.NotNil(t, frTags["FRtag1"]) b.AssertFileContent("public/fr/plaques/FRtag1/index.html", "FRtag1|Bonjour|http://example.com/blog/fr/plaques/FRtag1/") - b.AssertFileContent("public/en/tags/tag1/index.html", "tag1|Hello|http://example.com/blog/en/tags/tag1/") // Check Blackfriday config require.True(t, strings.Contains(content(doc1fr), "«"), content(doc1fr)) @@ -470,13 +469,6 @@ func TestMultiSitesRebuild(t *testing.T) { func(t *testing.T) { assert.Len(enSite.RegularPages(), 4, "1 en removed") - // Check build stats - require.Equal(t, 1, enSite.buildStats.draftCount, "Draft") - require.Equal(t, 1, enSite.buildStats.futureCount, "Future") - require.Equal(t, 1, enSite.buildStats.expiredCount, "Expired") - require.Equal(t, 0, frSite.buildStats.draftCount, "Draft") - require.Equal(t, 1, frSite.buildStats.futureCount, "Future") - require.Equal(t, 1, frSite.buildStats.expiredCount, "Expired") }, }, { @@ -775,13 +767,13 @@ END } func checkContent(s *sitesBuilder, filename string, matches ...string) { + s.T.Helper() content := readDestination(s.T, s.Fs, filename) for _, match := range matches { if !strings.Contains(content, match) { s.Fatalf("No match for %q in content for %s\n%q", match, filename, content) } } - } func TestTranslationsFromContentToNonContent(t *testing.T) { diff --git a/hugolib/hugo_sites_rebuild_test.go b/hugolib/hugo_sites_rebuild_test.go index 4a81fe950f9..e36c1a1d4cb 100644 --- a/hugolib/hugo_sites_rebuild_test.go +++ b/hugolib/hugo_sites_rebuild_test.go @@ -54,7 +54,7 @@ Content. {{ range (.Paginate .Site.RegularPages).Pages }} * Page Paginate: {{ .Title }}|Summary: {{ .Summary }}|Content: {{ .Content }} {{ end }} -{{ range .Pages }} +{{ range .Site.RegularPages }} * Page Pages: {{ .Title }}|Summary: {{ .Summary }}|Content: {{ .Content }} {{ end }} `) diff --git a/hugolib/page.go b/hugolib/page.go index 676cba762ac..c4cff86aef9 100644 --- a/hugolib/page.go +++ b/hugolib/page.go @@ -23,6 +23,8 @@ import ( "sort" "strings" + "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/hugofs/files" "github.com/bep/gitmap" @@ -121,31 +123,67 @@ func (p *pageState) MarshalJSON() ([]byte, error) { return page.MarshalPageToJSON(p) } -func (p *pageState) Pages() page.Pages { - p.pagesInit.Do(func() { - if p.pages != nil { - return - } +func (p *pageState) getPages() page.Pages { + b := p.bucket + if b == nil { + return nil + } + return b.getPages() +} + +func (p *pageState) getPagesAndSections() page.Pages { + b := p.bucket + if b == nil { + return nil + } + return b.getPagesAndSections() +} +// TODO(bep) cm add a test +func (p *pageState) RegularPages() page.Pages { + p.regularPagesInit.Do(func() { var pages page.Pages switch p.Kind() { case page.KindPage: - case page.KindHome: - pages = p.s.RegularPages() + case page.KindSection, page.KindHome, page.KindTaxonomyTerm: + pages = p.getPages() case page.KindTaxonomy: - termInfo := p.getTaxonomyNodeInfo() - taxonomy := p.s.Taxonomies[termInfo.plural].Get(termInfo.termKey) - pages = taxonomy.Pages() - case page.KindTaxonomyTerm: - plural := p.getTaxonomyNodeInfo().plural - // A list of all page.KindTaxonomy pages with matching plural - for _, p := range p.s.findPagesByKind(page.KindTaxonomy) { - if p.SectionsEntries()[0] == plural { + all := p.Pages() + for _, p := range all { + if p.IsPage() { pages = append(pages, p) } } - case kind404, kindSitemap, kindRobotsTXT: + default: + pages = p.s.RegularPages() + } + + p.regularPages = pages + + }) + + return p.regularPages +} + +func (p *pageState) Pages() page.Pages { + p.pagesInit.Do(func() { + var pages page.Pages + + switch p.Kind() { + case page.KindPage: + case page.KindSection, page.KindHome: + // TODO(bep) cm add page.RegularPages() + pages = p.getPagesAndSections() + case page.KindTaxonomy: + termInfo := p.bucket + plural := maps.GetString(termInfo.meta, "plural") + term := maps.GetString(termInfo.meta, "termKey") + taxonomy := p.s.Taxonomies[plural].Get(term) + pages = taxonomy.Pages() + case page.KindTaxonomyTerm: + pages = p.getPagesAndSections() + default: pages = p.s.Pages() } @@ -296,9 +334,9 @@ func (p *pageState) getLayoutDescriptor() output.LayoutDescriptor { section = sections[0] } case page.KindTaxonomyTerm: - section = p.getTaxonomyNodeInfo().singular + // TODO(bep) cm section = p.getTaxonomyNodeInfo().singular case page.KindTaxonomy: - section = p.getTaxonomyNodeInfo().parent.singular + // TODO(bep) cm section = p.getTaxonomyNodeInfo().parent.singular default: } @@ -359,11 +397,6 @@ func (p *pageState) initPage() error { return nil } -func (p *pageState) setPages(pages page.Pages) { - page.SortByDefault(pages) - p.pages = pages -} - func (p *pageState) renderResources() (err error) { p.resourcesPublishInit.Do(func() { var toBeDeleted []int @@ -489,13 +522,6 @@ func (p *pageState) addResources(r ...resource.Resource) { p.resources = append(p.resources, r...) } -func (p *pageState) addSectionToParent() { - if p.parent == nil { - return - } - p.parent.subSections = append(p.parent.subSections, p) -} - func (p *pageState) mapContent(meta *pageMeta) error { s := p.shortcodeState @@ -743,27 +769,6 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error { return nil } -func (p *pageState) getTaxonomyNodeInfo() *taxonomyNodeInfo { - info := p.s.taxonomyNodes.Get(p.SectionsEntries()...) - - if info == nil { - // There can be unused content pages for taxonomies (e.g. author that - // has not written anything, yet), and these will not have a taxonomy - // node created in the assemble taxonomies step. - return nil - } - - return info - -} - -func (p *pageState) sortParentSections() { - if p.parent == nil { - return - } - page.SortByDefault(p.parent.subSections) -} - // sourceRef returns the reference used by GetPage and ref/relref shortcodes to refer to // this page. It is prefixed with a "/". // diff --git a/hugolib/page__common.go b/hugolib/page__common.go index f9ceee8c905..cf554bb40ad 100644 --- a/hugolib/page__common.go +++ b/hugolib/page__common.go @@ -30,6 +30,8 @@ type pageCommon struct { s *Site m *pageMeta + bucket *pagesMapBucket + // Laziliy initialized dependencies. init *lazy.Init @@ -101,17 +103,17 @@ type pageCommon struct { translationKey string translationKeyInit sync.Once - // Will only be set for sections and regular pages. + // Will only be set for bundled pages. parent *pageState - // Will only be set for section pages and the home page. - subSections page.Pages - // Set in fast render mode to force render a given page. forceRender bool } type pagePages struct { - pages page.Pages pagesInit sync.Once + pages page.Pages + + regularPagesInit sync.Once + regularPages page.Pages } diff --git a/hugolib/page__data.go b/hugolib/page__data.go index 79a64931b4a..8bc818a00a0 100644 --- a/hugolib/page__data.go +++ b/hugolib/page__data.go @@ -16,6 +16,8 @@ package hugolib import ( "sync" + "github.com/gohugoio/hugo/common/maps" + "github.com/gohugoio/hugo/resources/page" ) @@ -36,22 +38,22 @@ func (p *pageData) Data() interface{} { switch p.Kind() { case page.KindTaxonomy: - termInfo := p.getTaxonomyNodeInfo() - pluralInfo := termInfo.parent + bucket := p.bucket + meta := bucket.meta + plural := maps.GetString(meta, "plural") + singular := maps.GetString(meta, "singular") - singular := pluralInfo.singular - plural := pluralInfo.plural - term := termInfo.term - taxonomy := p.s.Taxonomies[plural].Get(termInfo.termKey) + taxonomy := p.s.Taxonomies[plural].Get(maps.GetString(meta, "termKey")) p.data[singular] = taxonomy - p.data["Singular"] = singular + p.data["Singular"] = meta["singular"] p.data["Plural"] = plural - p.data["Term"] = term + p.data["Term"] = meta["term"] case page.KindTaxonomyTerm: - info := p.getTaxonomyNodeInfo() - plural := info.plural - singular := info.singular + bucket := p.bucket + meta := bucket.meta + plural := maps.GetString(meta, "plural") + singular := maps.GetString(meta, "singular") p.data["Singular"] = singular p.data["Plural"] = plural diff --git a/hugolib/page__paginator.go b/hugolib/page__paginator.go index 026546742c7..20476ecfa93 100644 --- a/hugolib/page__paginator.go +++ b/hugolib/page__paginator.go @@ -80,7 +80,17 @@ func (p *pagePaginator) Paginator(options ...interface{}) (*page.Pager, error) { pd := p.source.targetPathDescriptor pd.Type = p.source.outputFormat() - paginator, err := page.Paginate(pd, p.source.Pages(), pagerSize) + + var pages page.Pages + if p.source.IsHome() { + // From Hugo 0.57 we made home.Pages() work like any other + // section. To avoid the default paginators for the home page + // changing in the wild, we make this a special case. + pages = p.source.s.RegularPages() + } else { + pages = p.source.Pages() + } + paginator, err := page.Paginate(pd, pages, pagerSize) if err != nil { initErr = err return diff --git a/hugolib/page__per_output.go b/hugolib/page__per_output.go index 177e0420a8b..aa0fcd48835 100644 --- a/hugolib/page__per_output.go +++ b/hugolib/page__per_output.go @@ -27,9 +27,8 @@ import ( bp "github.com/gohugoio/hugo/bufferpool" "github.com/gohugoio/hugo/tpl" - "github.com/gohugoio/hugo/output" - "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/output" "github.com/gohugoio/hugo/resources/page" "github.com/gohugoio/hugo/resources/resource" ) diff --git a/hugolib/page__tree.go b/hugolib/page__tree.go index bddfde7c865..7bd2874bfa8 100644 --- a/hugolib/page__tree.go +++ b/hugolib/page__tree.go @@ -109,9 +109,21 @@ func (pt pageTree) Page() page.Page { } func (pt pageTree) Parent() page.Page { - return pt.p.parent + if pt.p.parent != nil { + return pt.p.parent + } + + if pt.p.bucket == nil || pt.p.bucket.parent == nil { + return nil + } + + return pt.p.bucket.parent.owner } func (pt pageTree) Sections() page.Pages { - return pt.p.subSections + if pt.p.bucket == nil { + return nil + } + + return pt.p.bucket.getSections() } diff --git a/hugolib/page_test.go b/hugolib/page_test.go index 05dacbe0ac9..bb34ccfb848 100644 --- a/hugolib/page_test.go +++ b/hugolib/page_test.go @@ -531,7 +531,6 @@ date: 2018-01-15 assert.Equal(2017, s.getPage("/no-index").Date().Year()) assert.True(s.getPage("/with-index-no-date").Date().IsZero()) assert.Equal(2018, s.getPage("/with-index-date").Date().Year()) - } func TestCreateNewPage(t *testing.T) { diff --git a/hugolib/pagebundler_test.go b/hugolib/pagebundler_test.go index 5c21dc4725d..96bec3fdd07 100644 --- a/hugolib/pagebundler_test.go +++ b/hugolib/pagebundler_test.go @@ -1040,6 +1040,10 @@ slug: leaf b.WithContent("sv/b1/data2.json", "sv: data2") b.WithContent("nb/b1/data2.json", "nb: data2") + b.WithContent("en/b3/_index.md", createPage("en: branch")) + b.WithContent("en/b3/p1.md", createPage("en: page")) + b.WithContent("en/b3/data1.json", "en: data") + b.Build(BuildCfg{}) b.AssertFileContent("public/en/index.html", @@ -1142,17 +1146,89 @@ Num Pages: {{ len .Site.Pages }} b.Build(BuildCfg{}) b.AssertFileContent("public/nn/index.html", - "Num Pages: 6", + "Num Pages: 8", "page|/nn/blog/sect1/b1/|Content: s1.b1.nn|Resources: R: data.json|s1.b1.data|", "page|/nn/blog/sect2/b3/|Content: s2.b3|Resources: R: s2.b3.bundlecontent.nn|", "page|/nn/blog/sect2/b4/|Content: s2.b4|Resources: R: s2.b4.bundlecontent.nn", ) b.AssertFileContent("public/en/index.html", - "Num Pages: 6", + "Num Pages: 8", "section|/en/blog/sect2/|Content: s2|Resources: R: data.json|s2.data|", "page|/en/blog/sect2/b1/|Content: s2.b1|Resources: R: data.json|s2.b1.data|", "page|/en/blog/sect2/b2/|Content: s2.b2|Resources: R: s2.b2.bundlecontent|", ) } + +func TestPageBundlerBasic(t *testing.T) { + b := newTestSitesBuilder(t) + b.WithConfigFile("toml", ` +baseURL = "https://example.org" +[outputs] + home = ["HTML", "RSS"] + taxonomy = ["HTML", "RSS"] + taxonomyTerm = ["HTML", "RSS"] + section = ["HTML", "RSS"] + +`) + pageContent := func(w int) string { + return fmt.Sprintf(` +--- +title: "Page" +weight: %d +--- + +`, w) + } + + pageContentTaxo := func(w int) string { + return fmt.Sprintf(` +--- +title: "Page With Taxo" +weight: %d +categories: ["a", "b"] +tags: ["blue", "green"] +--- + +`, w) + } + + b.WithContent( + "_index.md", pageContent(70), + "r1/page1.md", pageContent(80), + "r1/index.md", pageContent(90), + "r1.md", pageContent(100), + "s1/page1.md", pageContent(120), + "s1/page2.md", pageContent(1), + "s1/_index.md", pageContent(300), + "s1/s1_s1/_index.md", pageContent(400), + "s1/s1_s1/page1.md", pageContent(500), + "s1/s1_s2/_index.md", pageContent(600), + "s1/s1_s2/page1.md", pageContent(700), + "s1/subfolder/page1.md", pageContentTaxo(800), + "categories/_index.md", pageContent(900), + "tags/_index.md", pageContent(1000), + ) + + b.WithTemplates("index.html", ` +{{ template "sect" (dict "page" . "level" 0) }} +{{ define "sect" }} +{{- $page := .page -}} +{{- $level := .level -}} +{{ range seq $level }} {{ end }} Sect:|{{ $page.Kind }}|{{ $page.Path }} +{{ range $page.Pages }} +{{ range seq $level }} {{ end }} Sect Page:|{{ .Kind }}|{{ .Path }}|{{ .Section }}| +{{ end }} +{{ range $page.Sections }}{{ template "sect" (dict "page" . "level" (add $level 1) ) }}{{ end }} +{{ end }} +`) + + b.Build(BuildCfg{}) + + //s.pagesMap.dump() + // TODO(bep) cm remove? or improve. + b.AssertFileContent("public/index.html", + "Sect Page:|page|s1/page2.md|s1|") + +} diff --git a/hugolib/pagecollections.go b/hugolib/pagecollections.go index aedcf40901e..2a74dbbb4fb 100644 --- a/hugolib/pagecollections.go +++ b/hugolib/pagecollections.go @@ -17,8 +17,12 @@ import ( "fmt" "path" "path/filepath" + "sort" "strings" "sync" + "time" + + "github.com/gohugoio/hugo/resources/resource" "github.com/pkg/errors" @@ -32,6 +36,7 @@ var ambiguityFlag = &pageState{} // PageCollections contains the page collections for a site. type PageCollections struct { + pagesMap *pagesMap // Includes absolute all pages (of all types), including drafts etc. rawAllPages pageStatePages @@ -340,15 +345,6 @@ func (*PageCollections) findPagesByKindInWorkPages(kind string, inPages pageStat return pages } -func (c *PageCollections) findFirstWorkPageByKindIn(kind string) *pageState { - for _, p := range c.workAllPages { - if p.Kind() == kind { - return p - } - } - return nil -} - func (c *PageCollections) addPage(page *pageState) { c.rawAllPages = append(c.rawAllPages, page) } @@ -389,3 +385,182 @@ func (c *PageCollections) clearResourceCacheForPage(page *pageState) { page.s.ResourceSpec.DeleteCacheByPrefix(page.targetPaths().SubResourceBaseTarget) } } + +func (c *PageCollections) assemblePagesMap(s *Site) error { + c.pagesMap = newPagesMap(s) + + rootSections := make(map[string]bool) + + // Add all branch nodes first. + for _, p := range c.rawAllPages { + rootSections[p.Section()] = true + if p.IsPage() { + continue + } + c.pagesMap.addPage(p) + } + + // Create missing home page and the first level sections if no + // _index provided. + s.home = c.pagesMap.getOrCreateHome() + for k := range rootSections { + c.pagesMap.createSectionIfNotExists(k) + } + + // Attach the regular pages to their section. + for _, p := range c.rawAllPages { + if p.IsNode() { + continue + } + c.pagesMap.addPage(p) + } + + return nil +} + +func (c *PageCollections) createWorkAllPages() error { + c.workAllPages = make(pageStatePages, 0, len(c.rawAllPages)) + + var ( + homeDates *resource.Dates + sectionDates *resource.Dates + siteLastmod time.Time + siteLastDate time.Time + + sectionsParamId = "mainSections" + sectionsParamIdLower = strings.ToLower(sectionsParamId) + ) + + mainSections, mainSectionsFound := c.pagesMap.s.Info.Params()[sectionsParamIdLower] + + var ( + bucketsToRemove []string + rootBuckets []*pagesMapBucket + ) + + c.pagesMap.r.Walk(func(s string, v interface{}) bool { + bucket := v.(*pagesMapBucket) + var parentBucket *pagesMapBucket + + if s != "/" { + _, parentv, found := c.pagesMap.r.LongestPrefix(path.Dir(s)) + if !found { + panic(fmt.Sprintf("[BUG] parent bucket not found for %q", s)) + } + parentBucket = parentv.(*pagesMapBucket) + + if !mainSectionsFound && strings.Count(s, "/") == 1 { + // Root section + rootBuckets = append(rootBuckets, bucket) + } + } + + if bucket.owner.IsHome() { + if resource.IsZeroDates(bucket.owner) { + // Calculate dates from the page tree. + homeDates = &bucket.owner.m.Dates + } + } + + sectionDates = nil + if resource.IsZeroDates(bucket.owner) { + sectionDates = &bucket.owner.m.Dates + } + + if parentBucket != nil { + bucket.parent = parentBucket + if bucket.owner.IsSection() { + parentBucket.bucketSections = append(parentBucket.bucketSections, bucket) + } + } + + tmp := bucket.pages[:0] + for _, x := range bucket.pages { + if c.pagesMap.s.shouldBuild(x) { + tmp = append(tmp, x) + } + } + bucket.pages = tmp + + if bucket.isEmpty() { + if bucket.owner.IsSection() && bucket.owner.File().IsZero() { + // Check for any nested section. + var hasDescendant bool + c.pagesMap.r.WalkPrefix(s, func(ss string, v interface{}) bool { + if s != ss { + hasDescendant = true + return true + } + return false + }) + if !hasDescendant { + // This is an auto-created section with, now, nothing in it. + bucketsToRemove = append(bucketsToRemove, s) + return false + } + } + } + + c.workAllPages = append(c.workAllPages, bucket.owner) + + if !bucket.view { + for _, p := range bucket.pages { + ps := p.(*pageState) + ps.parent = bucket.owner + c.workAllPages = append(c.workAllPages, ps) + + if homeDates != nil { + homeDates.UpdateDateAndLastmodIfAfter(ps) + } + + if sectionDates != nil { + sectionDates.UpdateDateAndLastmodIfAfter(ps) + } + + if p.Lastmod().After(siteLastmod) { + siteLastmod = p.Lastmod() + } + if p.Date().After(siteLastDate) { + siteLastDate = p.Date() + } + } + } + + return false + }) + + c.pagesMap.s.lastmod = siteLastmod + + if !mainSectionsFound { + + // Calculare main section + var ( + maxRootBucketWeight int + maxRootBucket *pagesMapBucket + ) + + for _, b := range rootBuckets { + weight := len(b.pages) + (len(b.bucketSections) * 5) + if weight >= maxRootBucketWeight { + maxRootBucket = b + maxRootBucketWeight = weight + } + } + + if maxRootBucket != nil { + // Try to make this as backwards compatible as possible. + mainSections = []string{maxRootBucket.owner.Section()} + } + } + + c.pagesMap.s.Info.Params()[sectionsParamId] = mainSections + c.pagesMap.s.Info.Params()[sectionsParamIdLower] = mainSections + + for _, key := range bucketsToRemove { + c.pagesMap.r.Delete(key) + } + + sort.Sort(c.workAllPages) + + return nil +} diff --git a/hugolib/pages_capture.go b/hugolib/pages_capture.go index 361b87e84c3..498939fa4b6 100644 --- a/hugolib/pages_capture.go +++ b/hugolib/pages_capture.go @@ -36,9 +36,8 @@ import ( "github.com/gohugoio/hugo/source" - "github.com/gohugoio/hugo/hugofs" - "github.com/gohugoio/hugo/common/loggers" + "github.com/gohugoio/hugo/hugofs" "github.com/spf13/afero" ) @@ -125,7 +124,7 @@ func (c *pagesCollector) Collect() error { dirs[contentDirKey{dir, filename, btype}] = true } - for dir, _ := range dirs { + for dir := range dirs { switch dir.tp { case bundleLeaf, bundleBranch: collectErr = c.collectDir(dir.dirname, true, nil) diff --git a/hugolib/pages_map.go b/hugolib/pages_map.go new file mode 100644 index 00000000000..ddfa05f13c5 --- /dev/null +++ b/hugolib/pages_map.go @@ -0,0 +1,339 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package hugolib + +import ( + "fmt" + "path" + "path/filepath" + "strings" + "sync" + + radix "github.com/armon/go-radix" + "github.com/spf13/cast" + + "github.com/gohugoio/hugo/resources/page" +) + +func newPagesMap(s *Site) *pagesMap { + return &pagesMap{ + r: radix.New(), + s: s, + } +} + +type pagesMap struct { + r *radix.Tree + s *Site +} + +func (m *pagesMap) Get(key string) *pagesMapBucket { + key = m.cleanKey(key) + v, found := m.r.Get(key) + if !found { + return nil + } + + return v.(*pagesMapBucket) +} + +func (m *pagesMap) getKey(p *pageState) string { + if !p.File().IsZero() { + return m.cleanKey(filepath.ToSlash(p.File().Dir())) + } + return m.cleanKey(p.SectionsPath()) +} + +func (m *pagesMap) getOrCreateHome() *pageState { + var home *pageState + b, found := m.r.Get("/") + if !found { + home = m.s.newPage(page.KindHome) + m.addBucketFor("/", home, nil) + } else { + home = b.(*pagesMapBucket).owner + } + + return home +} + +func (m *pagesMap) createSectionIfNotExists(section string) { + key := m.cleanKey(section) + _, found := m.r.Get(key) + if !found { + kind := m.s.kindFromSectionPath(section) + p := m.s.newPage(kind, section) + m.addBucketFor(key, p, nil) + } +} + +func (m *pagesMap) addBucket(p *pageState) { + key := m.getKey(p) + + m.addBucketFor(key, p, nil) +} + +func (m *pagesMap) addBucketFor(key string, p *pageState, meta map[string]interface{}) *pagesMapBucket { + var isView bool + switch p.Kind() { + case page.KindTaxonomy, page.KindTaxonomyTerm: + isView = true + } + + bucket := &pagesMapBucket{owner: p, view: isView, meta: meta} + p.bucket = bucket + + m.r.Insert(key, bucket) + + return bucket +} + +func (m *pagesMap) addPage(p *pageState) { + if !p.IsPage() { + m.addBucket(p) + return + } + + key := m.getKey(p) + + var bucket *pagesMapBucket + + _, v, found := m.r.LongestPrefix(key) + if !found { + panic(fmt.Sprintf("[BUG] bucket with key %q not found", key)) + } + + bucket = v.(*pagesMapBucket) + p.bucket = bucket + + bucket.pages = append(bucket.pages, p) +} + +func (m *pagesMap) assembleTaxonomies(s *Site) error { + s.Taxonomies = make(TaxonomyList) + + type bucketKey struct { + plural string + termKey string + } + + // Temporary cache. + taxonomyBuckets := make(map[bucketKey]*pagesMapBucket) + + for singular, plural := range s.siteCfg.taxonomiesConfig { + s.Taxonomies[plural] = make(Taxonomy) + bkey := bucketKey{ + plural: plural, + } + + bucket := m.Get(plural) + + if bucket == nil { + // Create the page and bucket + n := s.newPage(page.KindTaxonomyTerm, plural) + meta := map[string]interface{}{ + "singular": singular, + "plural": plural, + } + + key := m.cleanKey(plural) + bucket = m.addBucketFor(key, n, meta) + + } + + // Add it to the temporary cache. + taxonomyBuckets[bkey] = bucket + + // Taxonomy entris used in page front matter will be picked up later, + // but there may be some yet to be used. + pluralPrefix := m.cleanKey(plural) + "/" + m.r.WalkPrefix(pluralPrefix, func(k string, v interface{}) bool { + tb := v.(*pagesMapBucket) + // TODO(bep) cm Tag1 vs tags. Consider if we can normalize the + // path in the radix map. + // TODO(bep) cm meta + termKey := m.s.getTaxonomyKey(strings.TrimPrefix(k, pluralPrefix)) + bucket.pages = append(bucket.pages, tb.owner) + bkey.termKey = termKey + taxonomyBuckets[bkey] = tb + return false + }) + + } + + addTaxonomy := func(singular, plural, term string, weight int, p page.Page) { + bkey := bucketKey{ + plural: plural, + } + + termKey := s.getTaxonomyKey(term) + + b1 := taxonomyBuckets[bkey] + + var b2 *pagesMapBucket + bkey.termKey = termKey + b, found := taxonomyBuckets[bkey] + if found { + b2 = b + } else { + + // Create the page and bucket + n := s.newTaxonomyPage(term, plural, termKey) + meta := map[string]interface{}{ + "singular": singular, + "plural": plural, + "term": term, + "termKey": termKey, + } + + key := m.cleanKey(path.Join(plural, termKey)) + b2 = m.addBucketFor(key, n, meta) + b1.pages = append(b1.pages, b2.owner) + taxonomyBuckets[bkey] = b2 + + } + + w := page.NewWeightedPage(weight, p, b2.owner) + + s.Taxonomies[plural].add(termKey, w) + + b1.owner.m.Dates.UpdateDateAndLastmodIfAfter(p) + b2.owner.m.Dates.UpdateDateAndLastmodIfAfter(p) + } + + m.r.Walk(func(k string, v interface{}) bool { + b := v.(*pagesMapBucket) + if b.view { + return false + } + + for singular, plural := range s.siteCfg.taxonomiesConfig { + for _, p := range b.pages { + + vals := getParam(p, plural, false) + + w := getParamToLower(p, plural+"_weight") + weight, err := cast.ToIntE(w) + if err != nil { + m.s.Log.ERROR.Printf("Unable to convert taxonomy weight %#v to int for %q", w, p.Path()) + // weight will equal zero, so let the flow continue + } + + if vals != nil { + if v, ok := vals.([]string); ok { + for _, idx := range v { + addTaxonomy(singular, plural, idx, weight, p) + } + } else if v, ok := vals.(string); ok { + addTaxonomy(singular, plural, v, weight, p) + } else { + m.s.Log.ERROR.Printf("Invalid %s in %q\n", plural, p.Path()) + } + } + + } + } + return false + }) + + for _, plural := range s.siteCfg.taxonomiesConfig { + for k := range s.Taxonomies[plural] { + s.Taxonomies[plural][k].Sort() + } + } + + return nil +} + +func (m *pagesMap) cleanKey(key string) string { + if !strings.HasPrefix(key, "/") { + key = "/" + key + } + if key != "/" { + key = strings.TrimRight(key, "/") + } + return key +} + +func (m *pagesMap) dump() { + m.r.Walk(func(s string, v interface{}) bool { + b := v.(*pagesMapBucket) + fmt.Println("-------\n", s, ":", b.owner.Kind(), ":") + if b.owner != nil { + fmt.Println("Owner:", b.owner.Path()) + } + for _, p := range b.pages { + fmt.Println(p.Path()) + } + return false + }) +} + +type pagesMapBucket struct { + // Set if the pages in this bucket is also present in another bucket. + view bool + + // Some additional metatadata attached to this node. + meta map[string]interface{} + + owner *pageState // The branch node + + // Used to navigate the sections tree + parent *pagesMapBucket + bucketSections []*pagesMapBucket + + pagesInit sync.Once + pages page.Pages + + pagesAndSectionsInit sync.Once + pagesAndSections page.Pages + + sectionsInit sync.Once + sections page.Pages +} + +func (b *pagesMapBucket) isEmpty() bool { + return len(b.pages) == 0 && len(b.bucketSections) == 0 +} + +func (b *pagesMapBucket) getPages() page.Pages { + b.pagesInit.Do(func() { + page.SortByDefault(b.pages) + }) + return b.pages +} + +func (b *pagesMapBucket) getPagesAndSections() page.Pages { + b.pagesAndSectionsInit.Do(func() { + var pas page.Pages + pas = append(pas, b.pages...) + for _, p := range b.bucketSections { + pas = append(pas, p.owner) + } + b.pagesAndSections = pas + page.SortByDefault(b.pagesAndSections) + }) + return b.pagesAndSections +} + +func (b *pagesMapBucket) getSections() page.Pages { + b.sectionsInit.Do(func() { + for _, p := range b.bucketSections { + b.sections = append(b.sections, p.owner) + } + page.SortByDefault(b.sections) + }) + + return b.sections +} diff --git a/hugolib/site.go b/hugolib/site.go index 882874db947..04150f01eda 100644 --- a/hugolib/site.go +++ b/hugolib/site.go @@ -58,7 +58,6 @@ import ( "github.com/gohugoio/hugo/related" "github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/resources/page/pagemeta" - "github.com/gohugoio/hugo/resources/resource" "github.com/gohugoio/hugo/source" "github.com/gohugoio/hugo/tpl" @@ -94,15 +93,11 @@ type Site struct { Taxonomies TaxonomyList - taxonomyNodes *taxonomyNodeInfos - Sections Taxonomy Info SiteInfo layoutHandler *output.LayoutHandler - buildStats *buildStats - language *langs.Language siteCfg siteConfigHolder @@ -221,7 +216,7 @@ func (s *Site) prepareInits() { rootSection = append(rootSection, i) } if p1.IsSection() { - sectionPages := p1.Pages() + sectionPages := p1.RegularPages() for i, p2 := range sectionPages { p2s := p2.(*pageState) if p2s.posNextPrevSection == nil { @@ -263,28 +258,6 @@ func (s *Site) prepareInits() { } -// Build stats for a given site. -type buildStats struct { - draftCount int - futureCount int - expiredCount int -} - -// TODO(bep) consolidate all site stats into this -func (b *buildStats) update(p page.Page) { - if p.Draft() { - b.draftCount++ - } - - if resource.IsFuture(p) { - b.futureCount++ - } - - if resource.IsExpired(p) { - b.expiredCount++ - } -} - type siteRenderingContext struct { output.Format } @@ -355,7 +328,6 @@ func (s *Site) reset() *Site { publisher: s.publisher, siteConfigConfig: s.siteConfigConfig, enableInlineShortcodes: s.enableInlineShortcodes, - buildStats: &buildStats{}, init: s.init, PageCollections: newPageCollections(), siteCfg: s.siteCfg, @@ -453,7 +425,6 @@ func newSite(cfg deps.DepsCfg) (*Site, error) { outputFormatsConfig: siteOutputFormatsConfig, mediaTypesConfig: siteMediaTypesConfig, frontmatterHandler: frontMatterHandler, - buildStats: &buildStats{}, enableInlineShortcodes: cfg.Language.GetBool("enableInlineShortcodes"), siteCfg: siteConfig, } @@ -1090,53 +1061,7 @@ func (s *Site) process(config BuildCfg) (err error) { } -func (s *Site) setupSitePages() { - var homeDates *resource.Dates - if s.home != nil { - // If the home page has no dates set, we fall back to the site dates. - homeDates = &s.home.m.Dates - } - - if !s.lastmod.IsZero() && (homeDates == nil || !resource.IsZeroDates(homeDates)) { - return - } - - if homeDates != nil && !s.lastmod.IsZero() { - homeDates.FDate = s.lastmod - homeDates.FLastmod = s.lastmod - return - - } - - var siteLastmod time.Time - var siteLastDate time.Time - - for _, page := range s.workAllPages { - if !page.IsPage() { - continue - } - // Determine Site.Info.LastChange - // Note that the logic to determine which date to use for Lastmod - // is already applied, so this is *the* date to use. - // We cannot just pick the last page in the default sort, because - // that may not be ordered by date. - // TODO(bep) check if this can be done earlier - if page.Lastmod().After(siteLastmod) { - siteLastmod = page.Lastmod() - } - if page.Date().After(siteLastDate) { - siteLastDate = page.Date() - } - } - - s.lastmod = siteLastmod - - if homeDates != nil && resource.IsZeroDates(homeDates) { - homeDates.FDate = siteLastDate - homeDates.FLastmod = s.lastmod - } - -} +// TODO(bep) cm func (s *Site) render(ctx *siteRenderContext) (err error) { @@ -1483,79 +1408,14 @@ func (s *Site) getTaxonomyKey(key string) string { return strings.ToLower(s.PathSpec.MakePath(key)) } -func (s *Site) assembleTaxonomies() error { - s.Taxonomies = make(TaxonomyList) - taxonomies := s.siteCfg.taxonomiesConfig - for _, plural := range taxonomies { - s.Taxonomies[plural] = make(Taxonomy) - } - - s.taxonomyNodes = &taxonomyNodeInfos{ - m: make(map[string]*taxonomyNodeInfo), - getKey: s.getTaxonomyKey, - } - - s.Log.INFO.Printf("found taxonomies: %#v\n", taxonomies) - - for singular, plural := range taxonomies { - parent := s.taxonomyNodes.GetOrCreate(plural, "") - parent.singular = singular - - addTaxonomy := func(plural, term string, weight int, p page.Page) { - key := s.getTaxonomyKey(term) - - n := s.taxonomyNodes.GetOrCreate(plural, term) - n.parent = parent - - w := page.NewWeightedPage(weight, p, n.owner) - - s.Taxonomies[plural].add(key, w) - - n.UpdateFromPage(w.Page) - parent.UpdateFromPage(w.Page) - } - - for _, p := range s.workAllPages { - vals := getParam(p, plural, false) - - w := getParamToLower(p, plural+"_weight") - weight, err := cast.ToIntE(w) - if err != nil { - s.Log.ERROR.Printf("Unable to convert taxonomy weight %#v to int for %q", w, p.pathOrTitle()) - // weight will equal zero, so let the flow continue - } - - if vals != nil { - if v, ok := vals.([]string); ok { - for _, idx := range v { - addTaxonomy(plural, idx, weight, p) - } - } else if v, ok := vals.(string); ok { - addTaxonomy(plural, v, weight, p) - } else { - s.Log.ERROR.Printf("Invalid %s in %q\n", plural, p.pathOrTitle()) - } - } - } - - for k := range s.Taxonomies[plural] { - s.Taxonomies[plural][k].Sort() - } - } - - return nil -} - // Prepare site for a new full build. func (s *Site) resetBuildState() { s.relatedDocsHandler = s.relatedDocsHandler.Clone() s.PageCollections = newPageCollectionsFromPages(s.rawAllPages) - s.buildStats = &buildStats{} s.init.Reset() for _, p := range s.rawAllPages { p.pagePages = &pagePages{} - p.subSections = page.Pages{} p.parent = nil p.Scratcher = maps.NewScratcher() } @@ -1759,8 +1619,11 @@ func (s *Site) kindFromSections(sections []string) string { return page.KindHome } - sectionPath := path.Join(sections...) + return s.kindFromSectionPath(path.Join(sections...)) + +} +func (s *Site) kindFromSectionPath(sectionPath string) string { for _, plural := range s.siteCfg.taxonomiesConfig { if plural == sectionPath { return page.KindTaxonomyTerm diff --git a/hugolib/site_sections.go b/hugolib/site_sections.go index 8fce43471fc..ae343716eaa 100644 --- a/hugolib/site_sections.go +++ b/hugolib/site_sections.go @@ -14,14 +14,7 @@ package hugolib import ( - "path" - "strconv" - "strings" - "github.com/gohugoio/hugo/resources/page" - "github.com/gohugoio/hugo/resources/resource" - - radix "github.com/hashicorp/go-immutable-radix" ) // Sections returns the top level sections. @@ -37,208 +30,3 @@ func (s *SiteInfo) Sections() page.Pages { func (s *SiteInfo) Home() (page.Page, error) { return s.s.home, nil } - -func (s *Site) assembleSections() pageStatePages { - var newPages pageStatePages - - if !s.isEnabled(page.KindSection) { - return newPages - } - - // Maps section kind pages to their path, i.e. "my/section" - sectionPages := make(map[string]*pageState) - - // The sections with content files will already have been created. - for _, sect := range s.findWorkPagesByKind(page.KindSection) { - sectionPages[sect.SectionsPath()] = sect - } - - const ( - sectKey = "__hs" - sectSectKey = "_a" + sectKey - sectPageKey = "_b" + sectKey - ) - - var ( - inPages = radix.New().Txn() - inSections = radix.New().Txn() - undecided pageStatePages - ) - - home := s.findFirstWorkPageByKindIn(page.KindHome) - - for i, p := range s.workAllPages { - - if p.Kind() != page.KindPage { - continue - } - - sections := p.SectionsEntries() - - if len(sections) == 0 { - // Root level pages. These will have the home page as their Parent. - p.parent = home - continue - } - - sectionKey := p.SectionsPath() - _, found := sectionPages[sectionKey] - - if !found && len(sections) == 1 { - - // We only create content-file-less sections for the root sections. - n := s.newPage(page.KindSection, sections[0]) - - sectionPages[sectionKey] = n - newPages = append(newPages, n) - found = true - } - - if len(sections) > 1 { - // Create the root section if not found. - _, rootFound := sectionPages[sections[0]] - if !rootFound { - sect := s.newPage(page.KindSection, sections[0]) - sectionPages[sections[0]] = sect - newPages = append(newPages, sect) - } - } - - if found { - pagePath := path.Join(sectionKey, sectPageKey, strconv.Itoa(i)) - inPages.Insert([]byte(pagePath), p) - } else { - undecided = append(undecided, p) - } - } - - // Create any missing sections in the tree. - // A sub-section needs a content file, but to create a navigational tree, - // given a content file in /content/a/b/c/_index.md, we cannot create just - // the c section. - for _, sect := range sectionPages { - sections := sect.SectionsEntries() - for i := len(sections); i > 0; i-- { - sectionPath := sections[:i] - sectionKey := path.Join(sectionPath...) - _, found := sectionPages[sectionKey] - if !found { - sect = s.newPage(page.KindSection, sectionPath[len(sectionPath)-1]) - sect.m.sections = sectionPath - sectionPages[sectionKey] = sect - newPages = append(newPages, sect) - } - } - } - - for k, sect := range sectionPages { - inPages.Insert([]byte(path.Join(k, sectSectKey)), sect) - inSections.Insert([]byte(k), sect) - } - - var ( - currentSection *pageState - children page.Pages - dates *resource.Dates - rootSections = inSections.Commit().Root() - ) - - for i, p := range undecided { - // Now we can decide where to put this page into the tree. - sectionKey := p.SectionsPath() - - _, v, _ := rootSections.LongestPrefix([]byte(sectionKey)) - sect := v.(*pageState) - pagePath := path.Join(path.Join(sect.SectionsEntries()...), sectSectKey, "u", strconv.Itoa(i)) - inPages.Insert([]byte(pagePath), p) - } - - var rootPages = inPages.Commit().Root() - - rootPages.Walk(func(path []byte, v interface{}) bool { - p := v.(*pageState) - - if p.Kind() == page.KindSection { - if currentSection != nil { - // A new section - currentSection.setPages(children) - if dates != nil { - currentSection.m.Dates = *dates - } - } - - currentSection = p - children = make(page.Pages, 0) - dates = nil - // Use section's dates from front matter if set. - if resource.IsZeroDates(currentSection) { - dates = &resource.Dates{} - } - - return false - - } - - // Regular page - p.parent = currentSection - children = append(children, p) - if dates != nil { - dates.UpdateDateAndLastmodIfAfter(p) - } - - return false - }) - - if currentSection != nil { - currentSection.setPages(children) - if dates != nil { - currentSection.m.Dates = *dates - } - } - - // Build the sections hierarchy - for _, sect := range sectionPages { - sections := sect.SectionsEntries() - if len(sections) == 1 { - if home != nil { - sect.parent = home - } - } else { - parentSearchKey := path.Join(sect.SectionsEntries()[:len(sections)-1]...) - _, v, _ := rootSections.LongestPrefix([]byte(parentSearchKey)) - p := v.(*pageState) - sect.parent = p - } - - sect.addSectionToParent() - } - - var ( - sectionsParamId = "mainSections" - sectionsParamIdLower = strings.ToLower(sectionsParamId) - mainSections interface{} - mainSectionsFound bool - maxSectionWeight int - ) - - mainSections, mainSectionsFound = s.Info.Params()[sectionsParamIdLower] - - for _, sect := range sectionPages { - sect.sortParentSections() - - if !mainSectionsFound { - weight := len(sect.Pages()) + (len(sect.Sections()) * 5) - if weight >= maxSectionWeight { - mainSections = []string{sect.Section()} - maxSectionWeight = weight - } - } - } - - // Try to make this as backwards compatible as possible. - s.Info.Params()[sectionsParamId] = mainSections - s.Info.Params()[sectionsParamIdLower] = mainSections - - return newPages - -} diff --git a/hugolib/site_sections_test.go b/hugolib/site_sections_test.go index d4aa9d354b9..2e7ffdf0b03 100644 --- a/hugolib/site_sections_test.go +++ b/hugolib/site_sections_test.go @@ -137,21 +137,20 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} }}, {"empty1", func(assert *require.Assertions, p page.Page) { // > b,c - assert.NotNil(getPage(p, "/empty1/b")) + assert.Nil(getPage(p, "/empty1/b")) // No _index.md page. assert.NotNil(getPage(p, "/empty1/b/c")) }}, {"empty2", func(assert *require.Assertions, p page.Page) { - // > b,c,d where b and d have content files. + // > b,c,d where b and d have _index.md files. b := getPage(p, "/empty2/b") assert.NotNil(b) assert.Equal("T40_-1", b.Title()) + c := getPage(p, "/empty2/b/c") + assert.Nil(c) // No _index.md - assert.NotNil(c) - assert.Equal("Cs", c.Title()) d := getPage(p, "/empty2/b/c/d") - assert.NotNil(d) assert.Equal("T41_-1", d.Title()) @@ -163,9 +162,10 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} {"empty3", func(assert *require.Assertions, p page.Page) { // b,c,d with regular page in b b := getPage(p, "/empty3/b") - assert.NotNil(b) - assert.Len(b.Pages(), 1) - assert.Equal("empty3.md", b.Pages()[0].File().LogicalName()) + assert.Nil(b) // No _index.md + e3 := getPage(p, "/empty3/b/empty3") + assert.NotNil(e3) + assert.Equal("empty3.md", e3.File().LogicalName()) }}, {"empty3", func(assert *require.Assertions, p page.Page) { @@ -188,19 +188,23 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} }}, {"l1", func(assert *require.Assertions, p page.Page) { assert.Equal("L1s", p.Title()) - assert.Len(p.Pages(), 2) + assert.Len(p.Pages(), 4) // 2 pages + 2 sections assert.True(p.Parent().IsHome()) assert.Len(p.Sections(), 2) }}, {"l1,l2", func(assert *require.Assertions, p page.Page) { assert.Equal("T2_-1", p.Title()) - assert.Len(p.Pages(), 3) + assert.Len(p.Pages(), 4) // 3 pages + 1 section assert.Equal(p, p.Pages()[0].Parent()) assert.Equal("L1s", p.Parent().Title()) assert.Equal("/l1/l2/", p.RelPermalink()) assert.Len(p.Sections(), 1) for _, child := range p.Pages() { + if child.IsSection() { + assert.Equal(child, child.CurrentSection()) + continue + } assert.Equal(p, child.CurrentSection()) active, err := child.InSection(p) diff --git a/hugolib/taxonomy.go b/hugolib/taxonomy.go index a7965ec26cb..e3f03310927 100644 --- a/hugolib/taxonomy.go +++ b/hugolib/taxonomy.go @@ -15,13 +15,11 @@ package hugolib import ( "fmt" - "path" "sort" "github.com/gohugoio/hugo/compare" "github.com/gohugoio/hugo/resources/page" - "github.com/gohugoio/hugo/resources/resource" ) // The TaxonomyList is a list of all taxonomies and their values @@ -156,95 +154,3 @@ func (s *orderedTaxonomySorter) Swap(i, j int) { func (s *orderedTaxonomySorter) Less(i, j int) bool { return s.by(&s.taxonomy[i], &s.taxonomy[j]) } - -// taxonomyNodeInfo stores additional metadata about a taxonomy. -type taxonomyNodeInfo struct { - plural string - - // Maps "tags" to "tag". - singular string - - // The term key as used in the taxonomy map, e.g "tag1". - // The value is normalized for paths, but may or not be lowercased - // depending on the disablePathToLower setting. - termKey string - - // The original, unedited term name. Useful for titles etc. - term string - - dates resource.Dates - - parent *taxonomyNodeInfo - - // Either of Kind taxonomyTerm (parent) or taxonomy - owner *page.PageWrapper -} - -func (t *taxonomyNodeInfo) UpdateFromPage(p page.Page) { - - // Select the latest dates - t.dates.UpdateDateAndLastmodIfAfter(p) -} - -func (t *taxonomyNodeInfo) TransferValues(p *pageState) { - t.owner.Page = p - if p.Lastmod().IsZero() && p.Date().IsZero() { - p.m.Dates.UpdateDateAndLastmodIfAfter(t.dates) - } -} - -// Maps either plural or plural/term to a taxonomy node. -// TODO(bep) consolidate somehow with s.Taxonomies -type taxonomyNodeInfos struct { - m map[string]*taxonomyNodeInfo - getKey func(string) string -} - -// map[string]*taxonomyNodeInfo -func (t taxonomyNodeInfos) key(parts ...string) string { - return path.Join(parts...) -} - -// GetOrAdd will get or create and add a new taxonomy node to the parent identified with plural. -// It will panic if the parent does not exist. -func (t taxonomyNodeInfos) GetOrAdd(plural, term string) *taxonomyNodeInfo { - parent := t.GetOrCreate(plural, "") - if parent == nil { - panic(fmt.Sprintf("no parent found with plural %q", plural)) - } - child := t.GetOrCreate(plural, term) - child.parent = parent - return child -} - -func (t taxonomyNodeInfos) GetOrCreate(plural, term string) *taxonomyNodeInfo { - termKey := t.getKey(term) - key := t.key(plural, termKey) - - n, found := t.m[key] - if found { - return n - } - - n = &taxonomyNodeInfo{ - plural: plural, - termKey: termKey, - term: term, - owner: &page.PageWrapper{}, // Page will be assigned later. - } - - t.m[key] = n - - return n -} - -func (t taxonomyNodeInfos) Get(sections ...string) *taxonomyNodeInfo { - key := t.key(sections...) - - n, found := t.m[key] - if found { - return n - } - - return nil -} diff --git a/hugolib/taxonomy_test.go b/hugolib/taxonomy_test.go index 2edc36d63e1..21748d0bf71 100644 --- a/hugolib/taxonomy_test.go +++ b/hugolib/taxonomy_test.go @@ -168,7 +168,7 @@ permalinkeds: for taxonomy, count := range taxonomyTermPageCounts { term := s.getPage(page.KindTaxonomyTerm, taxonomy) require.NotNil(t, term) - require.Len(t, term.Pages(), count) + require.Len(t, term.Pages(), count, taxonomy) for _, p := range term.Pages() { require.Equal(t, page.KindTaxonomy, p.Kind()) diff --git a/hugolib/testhelpers_test.go b/hugolib/testhelpers_test.go index ac511367d6e..d7e0d5c85c5 100644 --- a/hugolib/testhelpers_test.go +++ b/hugolib/testhelpers_test.go @@ -698,6 +698,7 @@ type testHelper struct { } func (th testHelper) assertFileContent(filename string, matches ...string) { + th.T.Helper() filename = th.replaceDefaultContentLanguageValue(filename) content := readDestination(th.T, th.Fs, filename) for _, match := range matches { diff --git a/resources/page/page.go b/resources/page/page.go index 00b449607fc..3b43b0af3f1 100644 --- a/resources/page/page.go +++ b/resources/page/page.go @@ -57,6 +57,13 @@ type AuthorProvider interface { // ChildCareProvider provides accessors to child resources. type ChildCareProvider interface { Pages() Pages + + // RegularPages returns a list of pages of kind 'Page'. + // In Hugo 0.57 we changed the Pages method so it returns all page + // kinds, even sections. If you want the old behaviour, you can + // use RegularPages. + RegularPages() Pages + Resources() resource.Resources } diff --git a/resources/page/page_nop.go b/resources/page/page_nop.go index c3a4819f1f4..ea1a44d8f24 100644 --- a/resources/page/page_nop.go +++ b/resources/page/page_nop.go @@ -284,6 +284,10 @@ func (p *nopPage) Pages() Pages { return nil } +func (p *nopPage) RegularPages() Pages { + return nil +} + func (p *nopPage) Paginate(seq interface{}, options ...interface{}) (*Pager, error) { return nil, nil } diff --git a/resources/page/testhelpers_test.go b/resources/page/testhelpers_test.go index 1a27985576d..e861c13751f 100644 --- a/resources/page/testhelpers_test.go +++ b/resources/page/testhelpers_test.go @@ -351,6 +351,10 @@ func (p *testPage) Pages() Pages { panic("not implemented") } +func (p *testPage) RegularPages() Pages { + panic("not implemented") +} + func (p *testPage) Paginate(seq interface{}, options ...interface{}) (*Pager, error) { return nil, nil } diff --git a/resources/page/weighted.go b/resources/page/weighted.go index 3f75bcc3cfe..48ed736ce0f 100644 --- a/resources/page/weighted.go +++ b/resources/page/weighted.go @@ -42,7 +42,7 @@ func (p WeightedPages) Page() Page { return nil } - return first.owner.Page + return first.owner } // A WeightedPage is a Page with a weight. @@ -54,15 +54,10 @@ type WeightedPage struct { // manual .Site.GetPage lookups. It is implemented in this roundabout way // because we cannot add additional state to the WeightedPages slice // without breaking lots of templates in the wild. - owner *PageWrapper + owner Page } -// PageWrapper wraps a Page. -type PageWrapper struct { - Page -} - -func NewWeightedPage(weight int, p Page, owner *PageWrapper) WeightedPage { +func NewWeightedPage(weight int, p Page, owner Page) WeightedPage { return WeightedPage{Weight: weight, Page: p, owner: owner} } diff --git a/tpl/tplimpl/embedded/templates.autogen.go b/tpl/tplimpl/embedded/templates.autogen.go index e2d1d3c39bd..93713f00ce1 100644 --- a/tpl/tplimpl/embedded/templates.autogen.go +++ b/tpl/tplimpl/embedded/templates.autogen.go @@ -19,7 +19,8 @@ package embedded // EmbeddedTemplates represents all embedded templates. var EmbeddedTemplates = [][2]string{ {`_default/robots.txt`, `User-agent: *`}, - {`_default/rss.xml`, `{{- $pages := .Data.Pages -}} + {`_default/rss.xml`, `{{- $pages := .Pages -}} +{{- if .IsHome -}}{{- $pages = .Site.RegularPages -}}{{- end -}} {{- $limit := .Site.Config.Services.RSS.Limit -}} {{- if ge $limit 1 -}} {{- $pages = $pages | first $limit -}} diff --git a/tpl/tplimpl/embedded/templates/_default/rss.xml b/tpl/tplimpl/embedded/templates/_default/rss.xml index 675ecd43c3a..a3f58010e82 100644 --- a/tpl/tplimpl/embedded/templates/_default/rss.xml +++ b/tpl/tplimpl/embedded/templates/_default/rss.xml @@ -1,4 +1,5 @@ -{{- $pages := .Data.Pages -}} +{{- $pages := .Pages -}} +{{- if .IsHome -}}{{- $pages = .Site.RegularPages -}}{{- end -}} {{- $limit := .Site.Config.Services.RSS.Limit -}} {{- if ge $limit 1 -}} {{- $pages = $pages | first $limit -}}