From b8fb10e38a412265e78ca07ac08ec0d1ab8e1be7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Bj=C3=B8rn=20Erik=20Pedersen?= Date: Wed, 2 Jan 2019 12:33:26 +0100 Subject: [PATCH] Add a Page interface See #5074 --- hugolib/collections.go | 8 + hugolib/embedded_shortcodes_test.go | 6 +- hugolib/hugo_sites.go | 38 +++-- hugolib/hugo_sites_build.go | 15 +- hugolib/hugo_sites_build_test.go | 78 +++++----- hugolib/hugo_sites_multihost_test.go | 8 +- hugolib/language_content_dir_test.go | 17 ++- hugolib/menu_test.go | 2 +- hugolib/page.go | 174 ++++++++++++---------- hugolib/pageCache_test.go | 4 +- hugolib/pageGroup.go | 43 +++--- hugolib/pageGroup_test.go | 14 +- hugolib/pageSort.go | 79 ++++++---- hugolib/pageSort_test.go | 66 ++++---- hugolib/page_output.go | 8 +- hugolib/page_paths.go | 10 +- hugolib/page_resource.go | 7 +- hugolib/page_test.go | 72 ++++----- hugolib/page_time_integration_test.go | 18 +-- hugolib/pagebundler_handlers.go | 14 +- hugolib/pagebundler_test.go | 22 +-- hugolib/pagecollections.go | 36 +++-- hugolib/pagecollections_test.go | 2 +- hugolib/pagemeta/page_frontmatter.go | 8 +- hugolib/pagemeta/page_frontmatter_test.go | 32 ++-- hugolib/pagemeta/pagemeta.go | 25 +++- hugolib/pagesPrevNext.go | 12 +- hugolib/pagesPrevNext_test.go | 8 +- hugolib/pages_language_merge.go | 6 +- hugolib/pages_language_merge_test.go | 14 +- hugolib/pages_related.go | 2 +- hugolib/pages_related_test.go | 16 +- hugolib/pagination.go | 12 +- hugolib/pagination_test.go | 10 +- hugolib/permalinks.go | 14 +- hugolib/shortcode_test.go | 2 +- hugolib/site.go | 56 +++---- hugolib/site_render.go | 45 +++--- hugolib/site_sections.go | 75 ++++++---- hugolib/site_sections_test.go | 23 +-- hugolib/site_test.go | 62 ++++---- hugolib/taxonomy.go | 20 +-- hugolib/taxonomy_test.go | 2 +- hugolib/testhelpers_test.go | 2 +- hugolib/translations.go | 24 +-- related/inverted_index.go | 8 +- related/inverted_index_test.go | 2 +- resources/resource/page.go | 48 ++++++ tpl/collections/collections.go | 7 +- 49 files changed, 720 insertions(+), 556 deletions(-) create mode 100644 resources/resource/page.go diff --git a/hugolib/collections.go b/hugolib/collections.go index cf75d373221..09065b696ad 100644 --- a/hugolib/collections.go +++ b/hugolib/collections.go @@ -102,3 +102,11 @@ func (pages Pages) ToResources() resource.Resources { } return r } + +func (p Pages) Group(key interface{}, in interface{}) (interface{}, error) { + pages, err := toPages(in) + if err != nil { + return nil, err + } + return PageGroup{Key: key, Pages: pages}, nil +} diff --git a/hugolib/embedded_shortcodes_test.go b/hugolib/embedded_shortcodes_test.go index 3a6220b532f..116c07ac963 100644 --- a/hugolib/embedded_shortcodes_test.go +++ b/hugolib/embedded_shortcodes_test.go @@ -20,6 +20,8 @@ import ( "strings" "testing" + "github.com/spf13/cast" + "path/filepath" "github.com/gohugoio/hugo/deps" @@ -69,7 +71,9 @@ func doTestShortcodeCrossrefs(t *testing.T, relative bool) { require.Len(t, s.RegularPages, 1) - output := string(s.RegularPages[0].content()) + content, err := s.RegularPages[0].(*Page).Content() + require.NoError(t, err) + output := cast.ToString(content) if !strings.Contains(output, expected) { t.Errorf("Got\n%q\nExpected\n%q", output, expected) diff --git a/hugolib/hugo_sites.go b/hugolib/hugo_sites.go index 9ce1c438e75..7f1da148e8b 100644 --- a/hugolib/hugo_sites.go +++ b/hugolib/hugo_sites.go @@ -32,6 +32,7 @@ import ( "github.com/gohugoio/hugo/langs" "github.com/gohugoio/hugo/i18n" + "github.com/gohugoio/hugo/resources/resource" "github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/tpl/tplimpl" ) @@ -136,7 +137,7 @@ func (h *HugoSites) langSite() map[string]*Site { // GetContentPage finds a Page with content given the absolute filename. // Returns nil if none found. -func (h *HugoSites) GetContentPage(filename string) *Page { +func (h *HugoSites) GetContentPage(filename string) resource.Page { for _, s := range h.Sites { pos := s.rawAllPages.findPagePosByFilename(filename) if pos == -1 { @@ -495,11 +496,14 @@ func (h *HugoSites) assignMissingTranslations() error { for _, nodeType := range []string{KindHome, KindSection, KindTaxonomy, KindTaxonomyTerm} { nodes := h.findPagesByKindIn(nodeType, allPages) + // TODO(bep) page // Assign translations for _, t1 := range nodes { + t1p := t1.(*Page) for _, t2 := range nodes { - if t1.isNewTranslation(t2) { - t1.translations = append(t1.translations, t2) + t2p := t2.(*Page) + if t1p.isNewTranslation(t2p) { + t1p.translations = append(t1p.translations, t2p) } } } @@ -507,8 +511,10 @@ func (h *HugoSites) assignMissingTranslations() error { // Now we can sort the translations. for _, p := range allPages { - if len(p.translations) > 0 { - pageBy(languagePageSort).Sort(p.translations) + // TODO(bep) page + pp := p.(*Page) + if len(pp.translations) > 0 { + pageBy(languagePageSort).Sort(pp.translations) } } return nil @@ -548,7 +554,7 @@ func (h *HugoSites) createMissingPages() error { if s.isEnabled(KindTaxonomyTerm) { foundTaxonomyTermsPage := false for _, p := range taxonomyTermsPages { - if p.sectionsPath() == plural { + if p.(*Page).sectionsPath() == plural { foundTaxonomyTermsPage = true break } @@ -570,7 +576,7 @@ func (h *HugoSites) createMissingPages() error { key = s.PathSpec.MakePathSanitized(key) } for _, p := range taxonomyPages { - sectionsPath := p.sectionsPath() + sectionsPath := p.(*Page).sectionsPath() if !strings.HasPrefix(sectionsPath, plural) { continue @@ -631,18 +637,20 @@ func (h *HugoSites) removePageByFilename(filename string) { func (h *HugoSites) setupTranslations() { for _, s := range h.Sites { for _, p := range s.rawAllPages { - if p.Kind == kindUnknown { - p.Kind = p.kindFromSections() + // TODO(bep) page .(*Page) and all others + pp := p.(*Page) + if p.Kind() == kindUnknown { + pp.kind = pp.kindFromSections() } - if !p.s.isEnabled(p.Kind) { + if !pp.s.isEnabled(p.Kind()) { continue } - shouldBuild := p.shouldBuild() - s.updateBuildStats(p) + shouldBuild := pp.shouldBuild() + s.updateBuildStats(pp) if shouldBuild { - if p.headless { + if pp.headless { s.headlessPages = append(s.headlessPages, p) } else { s.Pages = append(s.Pages, p) @@ -676,13 +684,13 @@ func (h *HugoSites) setupTranslations() { func (s *Site) preparePagesForRender(start bool) error { for _, p := range s.Pages { - if err := p.prepareForRender(start); err != nil { + if err := p.(*Page).prepareForRender(start); err != nil { return err } } for _, p := range s.headlessPages { - if err := p.prepareForRender(start); err != nil { + if err := p.(*Page).prepareForRender(start); err != nil { return err } } diff --git a/hugolib/hugo_sites_build.go b/hugolib/hugo_sites_build.go index ec5070fa814..2acf2ea5063 100644 --- a/hugolib/hugo_sites_build.go +++ b/hugolib/hugo_sites_build.go @@ -237,19 +237,20 @@ func (h *HugoSites) assemble(config *BuildCfg) error { for _, pages := range []Pages{s.Pages, s.headlessPages} { for _, p := range pages { // May have been set in front matter - if len(p.outputFormats) == 0 { - p.outputFormats = s.outputFormats[p.Kind] + pp := p.(*Page) + if len(pp.outputFormats) == 0 { + pp.outputFormats = s.outputFormats[p.Kind()] } - if p.headless { + if pp.headless { // headless = 1 output format only - p.outputFormats = p.outputFormats[:1] + pp.outputFormats = pp.outputFormats[:1] } - for _, r := range p.Resources.ByType(pageResourceType) { - r.(*Page).outputFormats = p.outputFormats + for _, r := range p.Resources().ByType(pageResourceType) { + r.(*Page).outputFormats = pp.outputFormats } - if err := p.initPaths(); err != nil { + if err := p.(*Page).initPaths(); err != nil { return err } diff --git a/hugolib/hugo_sites_build_test.go b/hugolib/hugo_sites_build_test.go index f772ce1926d..4127c863526 100644 --- a/hugolib/hugo_sites_build_test.go +++ b/hugolib/hugo_sites_build_test.go @@ -234,7 +234,7 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { gp1 := b.H.GetContentPage(filepath.FromSlash("content/sect/doc1.en.md")) require.NotNil(t, gp1) - require.Equal(t, "doc1", gp1.title) + require.Equal(t, "doc1", gp1.Title()) gp2 := b.H.GetContentPage(filepath.FromSlash("content/dummysect/notfound.md")) require.Nil(t, gp2) @@ -247,12 +247,12 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { assert.Equal(5, len(enSite.RegularPages)) assert.Equal(32, len(enSite.AllPages)) - doc1en := enSite.RegularPages[0] + doc1en := enSite.RegularPages[0].(*Page) permalink := doc1en.Permalink() require.Equal(t, "http://example.com/blog/en/sect/doc1-slug/", permalink, "invalid doc1.en permalink") require.Len(t, doc1en.Translations(), 1, "doc1-en should have one translation, excluding itself") - doc2 := enSite.RegularPages[1] + doc2 := enSite.RegularPages[1].(*Page) permalink = doc2.Permalink() require.Equal(t, "http://example.com/blog/en/sect/doc2/", permalink, "invalid doc2 permalink") @@ -263,11 +263,10 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { // and do no not do any language code prefixing. require.Equal(t, "http://example.com/blog/superbob/", permalink, "invalid doc3 permalink") - require.Equal(t, "/superbob", doc3.URL(), "invalid url, was specified on doc3") b.AssertFileContent("public/superbob/index.html", "doc3|Hello|en") require.Equal(t, doc2.PrevPage, doc3, "doc3 should follow doc2, in .PrevPage") - doc1fr := doc1en.Translations()[0] + doc1fr := doc1en.Translations()[0].(*Page) permalink = doc1fr.Permalink() require.Equal(t, "http://example.com/blog/fr/sect/doc1/", permalink, "invalid doc1fr permalink") @@ -275,10 +274,9 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { require.Equal(t, doc1fr.Translations()[0], doc1en, "doc1-fr should have doc1-en as translation") require.Equal(t, "fr", doc1fr.Language().Lang) - doc4 := enSite.AllPages[4] + doc4 := enSite.AllPages[4].(*Page) permalink = doc4.Permalink() require.Equal(t, "http://example.com/blog/fr/sect/doc4/", permalink, "invalid doc4 permalink") - require.Equal(t, "/blog/fr/sect/doc4/", doc4.URL()) require.Len(t, doc4.Translations(), 0, "found translations for doc4") @@ -299,7 +297,8 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { require.Len(t, frSite.AllPages, 32, "should have 32 total pages (including translations and nodes)") for _, frenchPage := range frSite.RegularPages { - require.Equal(t, "fr", frenchPage.Lang()) + p := frenchPage.(*Page) + require.Equal(t, "fr", p.Lang()) } // See https://github.com/gohugoio/hugo/issues/4285 @@ -331,32 +330,32 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { homeEn := enSite.getPage(KindHome) require.NotNil(t, homeEn) require.Len(t, homeEn.Translations(), 3) - require.Equal(t, "fr", homeEn.Translations()[0].Lang()) - require.Equal(t, "nn", homeEn.Translations()[1].Lang()) - require.Equal(t, "På nynorsk", homeEn.Translations()[1].title) - require.Equal(t, "nb", homeEn.Translations()[2].Lang()) - require.Equal(t, "På bokmål", homeEn.Translations()[2].title, configSuffix) - require.Equal(t, "Bokmål", homeEn.Translations()[2].Language().LanguageName, configSuffix) + require.Equal(t, "fr", homeEn.Translations()[0].(*Page).Lang()) + require.Equal(t, "nn", homeEn.Translations()[1].(*Page).Lang()) + require.Equal(t, "På nynorsk", homeEn.Translations()[1].Title()) + require.Equal(t, "nb", homeEn.Translations()[2].(*Page).Lang()) + require.Equal(t, "På bokmål", homeEn.Translations()[2].Title(), configSuffix) + require.Equal(t, "Bokmål", homeEn.Translations()[2].(*Page).Language().LanguageName, configSuffix) sectFr := frSite.getPage(KindSection, "sect") require.NotNil(t, sectFr) require.Equal(t, "fr", sectFr.Lang()) require.Len(t, sectFr.Translations(), 1) - require.Equal(t, "en", sectFr.Translations()[0].Lang()) - require.Equal(t, "Sects", sectFr.Translations()[0].title) + require.Equal(t, "en", sectFr.Translations()[0].(*Page).Lang()) + require.Equal(t, "Sects", sectFr.Translations()[0].Title()) nnSite := sites[2] require.Equal(t, "nn", nnSite.Language.Lang) taxNn := nnSite.getPage(KindTaxonomyTerm, "lag") require.NotNil(t, taxNn) require.Len(t, taxNn.Translations(), 1) - require.Equal(t, "nb", taxNn.Translations()[0].Lang()) + require.Equal(t, "nb", taxNn.Translations()[0].(*Page).Lang()) taxTermNn := nnSite.getPage(KindTaxonomy, "lag", "sogndal") require.NotNil(t, taxTermNn) require.Len(t, taxTermNn.Translations(), 1) - require.Equal(t, "nb", taxTermNn.Translations()[0].Lang()) + require.Equal(t, "nb", taxTermNn.Translations()[0].(*Page).Lang()) // Check sitemap(s) b.AssertFileContent("public/sitemap.xml", @@ -392,27 +391,27 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { require.Equal(t, "Heim", nnSite.Menus["main"].ByName()[0].Name) // Issue #1302 - require.Equal(t, template.URL(""), enSite.RegularPages[0].RSSLink()) + require.Equal(t, template.URL(""), enSite.RegularPages[0].(*Page).RSSLink()) // Issue #3108 - prevPage := enSite.RegularPages[0].PrevPage + prevPage := enSite.RegularPages[0].(*Page).PrevPage require.NotNil(t, prevPage) - require.Equal(t, KindPage, prevPage.Kind) + require.Equal(t, KindPage, prevPage.Kind()) for { if prevPage == nil { break } - require.Equal(t, KindPage, prevPage.Kind) - prevPage = prevPage.PrevPage + require.Equal(t, KindPage, prevPage.Kind()) + prevPage = prevPage.(*Page).PrevPage } // Check bundles bundleFr := frSite.getPage(KindPage, "bundles/b1/index.md") require.NotNil(t, bundleFr) require.Equal(t, "/blog/fr/bundles/b1/", bundleFr.RelPermalink()) - require.Equal(t, 1, len(bundleFr.Resources)) - logoFr := bundleFr.Resources.GetMatch("logo*") + require.Equal(t, 1, len(bundleFr.Resources())) + logoFr := bundleFr.Resources().GetMatch("logo*") require.NotNil(t, logoFr) require.Equal(t, "/blog/fr/bundles/b1/logo.png", logoFr.RelPermalink()) b.AssertFileContent("public/fr/bundles/b1/logo.png", "PNG Data") @@ -420,8 +419,8 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) { bundleEn := enSite.getPage(KindPage, "bundles/b1/index.en.md") require.NotNil(t, bundleEn) require.Equal(t, "/blog/en/bundles/b1/", bundleEn.RelPermalink()) - require.Equal(t, 1, len(bundleEn.Resources)) - logoEn := bundleEn.Resources.GetMatch("logo*") + require.Equal(t, 1, len(bundleEn.Resources())) + logoEn := bundleEn.Resources().GetMatch("logo*") require.NotNil(t, logoEn) require.Equal(t, "/blog/en/bundles/b1/logo.png", logoEn.RelPermalink()) b.AssertFileContent("public/en/bundles/b1/logo.png", "PNG Data") @@ -504,9 +503,9 @@ func TestMultiSitesRebuild(t *testing.T) { assert.Len(enSite.RegularPages, 6) assert.Len(enSite.AllPages, 34) assert.Len(frSite.RegularPages, 5) - require.Equal(t, "new_fr_1", frSite.RegularPages[3].title) - require.Equal(t, "new_en_2", enSite.RegularPages[0].title) - require.Equal(t, "new_en_1", enSite.RegularPages[1].title) + require.Equal(t, "new_fr_1", frSite.RegularPages[3].Title()) + require.Equal(t, "new_en_2", enSite.RegularPages[0].Title()) + require.Equal(t, "new_en_1", enSite.RegularPages[1].Title()) rendered := readDestination(t, fs, "public/en/new1/index.html") require.True(t, strings.Contains(rendered, "new_en_1"), rendered) @@ -540,7 +539,7 @@ func TestMultiSitesRebuild(t *testing.T) { }, func(t *testing.T) { assert.Len(enSite.RegularPages, 6, "Rename") - require.Equal(t, "new_en_1", enSite.RegularPages[1].title) + require.Equal(t, "new_en_1", enSite.RegularPages[1].Title()) rendered := readDestination(t, fs, "public/en/new1renamed/index.html") require.True(t, strings.Contains(rendered, "new_en_1"), rendered) }}, @@ -582,7 +581,7 @@ func TestMultiSitesRebuild(t *testing.T) { homeEn := enSite.getPage(KindHome) require.NotNil(t, homeEn) assert.Len(homeEn.Translations(), 3) - require.Equal(t, "fr", homeEn.Translations()[0].Lang()) + require.Equal(t, "fr", homeEn.Translations()[0].(*Page).Lang()) }, }, @@ -626,12 +625,13 @@ func assertShouldNotBuild(t *testing.T, sites *HugoSites) { s := sites.Sites[0] for _, p := range s.rawAllPages { + pp := p.(*Page) // No HTML when not processed - require.Equal(t, p.shouldBuild(), bytes.Contains(p.workContent, []byte(" p2.Date.Unix() + return p1.Date().Unix() > p2.Date().Unix() } - if p2.Weight == 0 { + if p2.Weight() == 0 { return true } - if p1.Weight == 0 { + if p1.Weight() == 0 { return false } - return p1.Weight < p2.Weight + return p1.Weight() < p2.Weight() } -var languagePageSort = func(p1, p2 *Page) bool { +var languagePageSort = func(p11, p21 resource.Page) bool { + // TODO(bep) page + p1 := p11.(*Page) + p2 := p21.(*Page) + if p1.Language().Weight == p2.Language().Weight { - if p1.Date.Unix() == p2.Date.Unix() { + if p1.Date().Unix() == p2.Date().Unix() { if p1.LinkTitle() == p2.LinkTitle() { return (p1.FullFilePath() < p2.FullFilePath()) } return (p1.LinkTitle() < p2.LinkTitle()) } - return p1.Date.Unix() > p2.Date.Unix() + return p1.Date().Unix() > p2.Date().Unix() } if p2.Language().Weight == 0 { @@ -137,8 +143,8 @@ func (p Pages) ByTitle() Pages { const key = "pageSort.ByTitle" - title := func(p1, p2 *Page) bool { - return p1.title < p2.title + title := func(p1, p2 resource.Page) bool { + return p1.Title() < p2.Title() } pages, _ := spc.get(key, pageBy(title).Sort, p) @@ -154,7 +160,7 @@ func (p Pages) ByLinkTitle() Pages { const key = "pageSort.ByLinkTitle" - linkTitle := func(p1, p2 *Page) bool { + linkTitle := func(p1, p2 resource.Page) bool { return p1.LinkTitle() < p2.LinkTitle() } @@ -172,8 +178,8 @@ func (p Pages) ByDate() Pages { const key = "pageSort.ByDate" - date := func(p1, p2 *Page) bool { - return p1.Date.Unix() < p2.Date.Unix() + date := func(p1, p2 resource.Page) bool { + return p1.Date().Unix() < p2.Date().Unix() } pages, _ := spc.get(key, pageBy(date).Sort, p) @@ -190,8 +196,8 @@ func (p Pages) ByPublishDate() Pages { const key = "pageSort.ByPublishDate" - pubDate := func(p1, p2 *Page) bool { - return p1.PublishDate.Unix() < p2.PublishDate.Unix() + pubDate := func(p1, p2 resource.Page) bool { + return p1.PublishDate().Unix() < p2.PublishDate().Unix() } pages, _ := spc.get(key, pageBy(pubDate).Sort, p) @@ -208,8 +214,8 @@ func (p Pages) ByExpiryDate() Pages { const key = "pageSort.ByExpiryDate" - expDate := func(p1, p2 *Page) bool { - return p1.ExpiryDate.Unix() < p2.ExpiryDate.Unix() + expDate := func(p1, p2 resource.Page) bool { + return p1.ExpiryDate().Unix() < p2.ExpiryDate().Unix() } pages, _ := spc.get(key, pageBy(expDate).Sort, p) @@ -226,8 +232,8 @@ func (p Pages) ByLastmod() Pages { const key = "pageSort.ByLastmod" - date := func(p1, p2 *Page) bool { - return p1.Lastmod.Unix() < p2.Lastmod.Unix() + date := func(p1, p2 resource.Page) bool { + return p1.Lastmod().Unix() < p2.Lastmod().Unix() } pages, _ := spc.get(key, pageBy(date).Sort, p) @@ -244,8 +250,20 @@ func (p Pages) ByLength() Pages { const key = "pageSort.ByLength" - length := func(p1, p2 *Page) bool { - return len(p1.content()) < len(p2.content()) + length := func(p1, p2 resource.Page) bool { + + p1l, ok1 := p1.(resource.LengthProvider) + p2l, ok2 := p2.(resource.LengthProvider) + + if !ok1 { + return true + } + + if !ok2 { + return false + } + + return p1l.Len() < p2l.Len() } pages, _ := spc.get(key, pageBy(length).Sort, p) @@ -296,9 +314,10 @@ func (p Pages) ByParam(paramsKey interface{}) Pages { paramsKeyStr := cast.ToString(paramsKey) key := "pageSort.ByParam." + paramsKeyStr - paramsKeyComparator := func(p1, p2 *Page) bool { - v1, _ := p1.Param(paramsKeyStr) - v2, _ := p2.Param(paramsKeyStr) + paramsKeyComparator := func(p1, p2 resource.Page) bool { + p1p, p2p := p1.(*Page), p2.(*Page) + v1, _ := p1p.Param(paramsKeyStr) + v2, _ := p2p.Param(paramsKeyStr) s1 := cast.ToString(v1) s2 := cast.ToString(v2) diff --git a/hugolib/pageSort_test.go b/hugolib/pageSort_test.go index 695045ff125..f844558d7fd 100644 --- a/hugolib/pageSort_test.go +++ b/hugolib/pageSort_test.go @@ -19,6 +19,8 @@ import ( "testing" "time" + "github.com/gohugoio/hugo/resources/resource" + "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) @@ -38,18 +40,18 @@ func TestDefaultSort(t *testing.T) { setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "a", "c", "d"}, [4]int{4, 3, 2, 1}, p) p.sort() - assert.Equal(t, 1, p[0].Weight) + assert.Equal(t, 1, p[0].Weight()) // Consider zero weight, issue #2673 setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "a", "d", "c"}, [4]int{0, 0, 0, 1}, p) p.sort() - assert.Equal(t, 1, p[0].Weight) + assert.Equal(t, 1, p[0].Weight()) // next by date setSortVals([4]time.Time{d3, d4, d1, d2}, [4]string{"a", "b", "c", "d"}, [4]int{1, 1, 1, 1}, p) p.sort() - assert.Equal(t, d1, p[0].Date) + assert.Equal(t, d1, p[0].Date()) // finally by link title setSortVals([4]time.Time{d3, d3, d3, d3}, [4]string{"b", "c", "a", "d"}, [4]int{1, 1, 1, 1}, p) @@ -67,12 +69,13 @@ func TestSortByLinkTitle(t *testing.T) { pages := createSortTestPages(s, 6) for i, p := range pages { + pp := p.(*Page) if i < 5 { - p.title = fmt.Sprintf("title%d", i) + pp.title = fmt.Sprintf("title%d", i) } if i > 2 { - p.linkTitle = fmt.Sprintf("linkTitle%d", i) + pp.linkTitle = fmt.Sprintf("linkTitle%d", i) } } @@ -104,14 +107,14 @@ func TestSortByN(t *testing.T) { sortFunc func(p Pages) Pages assertFunc func(p Pages) bool }{ - {(Pages).ByWeight, func(p Pages) bool { return p[0].Weight == 1 }}, - {(Pages).ByTitle, func(p Pages) bool { return p[0].title == "ab" }}, + {(Pages).ByWeight, func(p Pages) bool { return p[0].Weight() == 1 }}, + {(Pages).ByTitle, func(p Pages) bool { return p[0].Title() == "ab" }}, {(Pages).ByLinkTitle, func(p Pages) bool { return p[0].LinkTitle() == "abl" }}, - {(Pages).ByDate, func(p Pages) bool { return p[0].Date == d4 }}, - {(Pages).ByPublishDate, func(p Pages) bool { return p[0].PublishDate == d4 }}, - {(Pages).ByExpiryDate, func(p Pages) bool { return p[0].ExpiryDate == d4 }}, - {(Pages).ByLastmod, func(p Pages) bool { return p[1].Lastmod == d3 }}, - {(Pages).ByLength, func(p Pages) bool { return p[0].content() == "b_content" }}, + {(Pages).ByDate, func(p Pages) bool { return p[0].Date() == d4 }}, + {(Pages).ByPublishDate, func(p Pages) bool { return p[0].PublishDate() == d4 }}, + {(Pages).ByExpiryDate, func(p Pages) bool { return p[0].ExpiryDate() == d4 }}, + {(Pages).ByLastmod, func(p Pages) bool { return p[1].Lastmod() == d3 }}, + {(Pages).ByLength, func(p Pages) bool { return p[0].(resource.LengthProvider).Len() == len("b_content") }}, } { setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "ab", "cde", "fg"}, [4]int{0, 3, 2, 1}, p) @@ -140,11 +143,11 @@ func TestPageSortReverse(t *testing.T) { t.Parallel() s := newTestSite(t) p1 := createSortTestPages(s, 10) - assert.Equal(t, 0, p1[0].fuzzyWordCount) - assert.Equal(t, 9, p1[9].fuzzyWordCount) + assert.Equal(t, 0, p1[0].(*Page).fuzzyWordCount) + assert.Equal(t, 9, p1[9].(*Page).fuzzyWordCount) p2 := p1.Reverse() - assert.Equal(t, 9, p2[0].fuzzyWordCount) - assert.Equal(t, 0, p2[9].fuzzyWordCount) + assert.Equal(t, 9, p2[0].(*Page).fuzzyWordCount) + assert.Equal(t, 0, p2[9].(*Page).fuzzyWordCount) // cached assert.True(t, pagesEqual(p2, p1.Reverse())) } @@ -155,7 +158,7 @@ func TestPageSortByParam(t *testing.T) { s := newTestSite(t) unsorted := createSortTestPages(s, 10) - delete(unsorted[9].params, "arbitrarily") + delete(unsorted[9].Params(), "arbitrarily") firstSetValue, _ := unsorted[0].Param(k) secondSetValue, _ := unsorted[1].Param(k) @@ -191,22 +194,25 @@ func BenchmarkSortByWeightAndReverse(b *testing.B) { func setSortVals(dates [4]time.Time, titles [4]string, weights [4]int, pages Pages) { for i := range dates { - pages[i].Date = dates[i] - pages[i].Lastmod = dates[i] - pages[i].Weight = weights[i] - pages[i].title = titles[i] + this := pages[i].(*Page) + other := pages[len(dates)-1-i].(*Page) + + this.DDate = dates[i] + this.DLastMod = dates[i] + this.weight = weights[i] + this.title = titles[i] // make sure we compare apples and ... apples ... - pages[len(dates)-1-i].linkTitle = pages[i].title + "l" - pages[len(dates)-1-i].PublishDate = dates[i] - pages[len(dates)-1-i].ExpiryDate = dates[i] - pages[len(dates)-1-i].workContent = []byte(titles[i] + "_content") + other.linkTitle = this.Title() + "l" + other.DPublishDate = dates[i] + other.DExpiryDate = dates[i] + other.workContent = []byte(titles[i] + "_content") } - lastLastMod := pages[2].Lastmod - pages[2].Lastmod = pages[1].Lastmod - pages[1].Lastmod = lastLastMod + lastLastMod := pages[2].Lastmod() + pages[2].(*Page).DLastMod = pages[1].Lastmod() + pages[1].(*Page).DLastMod = lastLastMod for _, p := range pages { - p.resetContent() + p.(*Page).resetContent() } } @@ -228,7 +234,7 @@ func createSortTestPages(s *Site, num int) Pages { w = 10 } p.fuzzyWordCount = i - p.Weight = w + p.weight = w p.Description = "initial" pages[i] = p diff --git a/hugolib/page_output.go b/hugolib/page_output.go index 0a3eef9a6a5..0506a041081 100644 --- a/hugolib/page_output.go +++ b/hugolib/page_output.go @@ -230,7 +230,7 @@ func (p *PageOutput) AlternativeOutputFormats() (OutputFormats, error) { // always be of the same length, but may contain different elements. func (p *PageOutput) deleteResource(i int) { p.resources = append(p.resources[:i], p.resources[i+1:]...) - p.Page.Resources = append(p.Page.Resources[:i], p.Page.Resources[i+1:]...) + p.Page.resources = append(p.Page.resources[:i], p.Page.resources[i+1:]...) } @@ -241,14 +241,14 @@ func (p *PageOutput) Resources() resource.Resources { // base folder. ff := p.outputFormats[0] if p.outputFormat.Path == ff.Path { - p.resources = p.Page.Resources + p.resources = p.Page.resources return } // Clone it with new base. - resources := make(resource.Resources, len(p.Page.Resources)) + resources := make(resource.Resources, len(p.Page.Resources())) - for i, r := range p.Page.Resources { + for i, r := range p.Page.Resources() { if c, ok := r.(resource.Cloner); ok { // Clone the same resource with a new target. resources[i] = c.WithNewBase(p.outputFormat.Path) diff --git a/hugolib/page_paths.go b/hugolib/page_paths.go index 9de7b0764fc..a115ccf57e2 100644 --- a/hugolib/page_paths.go +++ b/hugolib/page_paths.go @@ -74,7 +74,7 @@ type targetPathDescriptor struct { // and URLs for this Page. func (p *Page) createTargetPathDescriptor(t output.Format) (targetPathDescriptor, error) { if p.targetPathDescriptorPrototype == nil { - panic(fmt.Sprintf("Must run initTargetPathDescriptor() for page %q, kind %q", p.title, p.Kind)) + panic(fmt.Sprintf("Must run initTargetPathDescriptor() for page %q, kind %q", p.Title(), p.Kind())) } d := *p.targetPathDescriptorPrototype d.Type = t @@ -84,7 +84,7 @@ func (p *Page) createTargetPathDescriptor(t output.Format) (targetPathDescriptor func (p *Page) initTargetPathDescriptor() error { d := &targetPathDescriptor{ PathSpec: p.s.PathSpec, - Kind: p.Kind, + Kind: p.Kind(), Sections: p.sections, UglyURLs: p.s.Info.uglyURLs(p), Dir: filepath.ToSlash(p.Dir()), @@ -107,7 +107,7 @@ func (p *Page) initTargetPathDescriptor() error { // the permalink configuration values are likely to be redundant, e.g. // naively expanding /category/:slug/ would give /category/categories/ for // the "categories" KindTaxonomyTerm. - if p.Kind == KindPage || p.Kind == KindTaxonomy { + if p.Kind() == KindPage || p.Kind() == KindTaxonomy { if override, ok := p.Site.Permalinks[p.Section()]; ok { opath, err := override.Expand(p) if err != nil { @@ -127,7 +127,7 @@ func (p *Page) initTargetPathDescriptor() error { func (p *Page) initURLs() error { if len(p.outputFormats) == 0 { - p.outputFormats = p.s.outputFormats[p.Kind] + p.outputFormats = p.s.outputFormats[p.Kind()] } target := filepath.ToSlash(p.createRelativeTargetPath()) rel := p.s.PathSpec.URLizeFilename(target) @@ -278,7 +278,7 @@ func createTargetPath(d targetPathDescriptor) string { func (p *Page) createRelativeTargetPath() string { if len(p.outputFormats) == 0 { - if p.Kind == kindUnknown { + if p.Kind() == kindUnknown { panic(fmt.Sprintf("Page %q has unknown kind", p.title)) } panic(fmt.Sprintf("Page %q missing output format(s)", p.title)) diff --git a/hugolib/page_resource.go b/hugolib/page_resource.go index 201076e8b0b..b6899bb5174 100644 --- a/hugolib/page_resource.go +++ b/hugolib/page_resource.go @@ -18,6 +18,9 @@ import ( ) var ( - _ resource.Resource = (*Page)(nil) - _ resource.Resource = (*PageOutput)(nil) + _ resource.Resource = (*Page)(nil) + _ resource.Page = (*Page)(nil) + _ resource.Resource = (*PageOutput)(nil) + _ resource.Page = (*PageOutput)(nil) + _ resource.LengthProvider = (*Page)(nil) ) diff --git a/hugolib/page_test.go b/hugolib/page_test.go index 9723b1426cf..a7420da8d2a 100644 --- a/hugolib/page_test.go +++ b/hugolib/page_test.go @@ -520,8 +520,8 @@ func checkPageType(t *testing.T, page *Page, pageType string) { } func checkPageDate(t *testing.T, page *Page, time time.Time) { - if page.Date != time { - t.Fatalf("Page date is: %s. Expected: %s", page.Date, time) + if page.Date() != time { + t.Fatalf("Page date is: %s. Expected: %s", page.Date(), time) } } @@ -624,7 +624,7 @@ func testAllMarkdownEnginesForPages(t *testing.T, func TestCreateNewPage(t *testing.T) { t.Parallel() assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) // issue #2290: Path is relative to the content dir and will continue to be so. require.Equal(t, filepath.FromSlash(fmt.Sprintf("p0.%s", ext)), p.Path()) @@ -646,7 +646,7 @@ func TestCreateNewPage(t *testing.T) { func TestPageWithDelimiter(t *testing.T) { t.Parallel() assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) checkPageTitle(t, p, "Simple") checkPageContent(t, p, normalizeExpected(ext, "

Summary Next Line

\n\n

Some more text

\n"), ext) checkPageSummary(t, p, normalizeExpected(ext, "

Summary Next Line

"), ext) @@ -668,7 +668,7 @@ func TestPageWithDelimiterForMarkdownThatCrossesBorder(t *testing.T) { require.Len(t, s.RegularPages, 1) - p := s.RegularPages[0] + p := s.RegularPages[0].(*Page) if p.Summary() != template.HTML( "

The best static site generator.1

") { @@ -694,7 +694,7 @@ weight: %d Simple Page With Some Date` hasDate := func(p *Page) bool { - return p.Date.Year() == 2017 + return p.Date().Year() == 2017 } datePage := func(field string, weight int) string { @@ -705,7 +705,7 @@ Simple Page With Some Date` assertFunc := func(t *testing.T, ext string, pages Pages) { assert.True(len(pages) > 0) for _, p := range pages { - assert.True(hasDate(p)) + assert.True(hasDate(p.(*Page))) } } @@ -734,7 +734,7 @@ title: Raw s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true}) require.Len(t, s.RegularPages, 1) - p := s.RegularPages[0] + p := s.RegularPages[0].(*Page) require.Equal(t, p.RawContent(), "**Raw**") @@ -743,7 +743,7 @@ title: Raw func TestPageWithShortCodeInSummary(t *testing.T) { t.Parallel() assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) checkPageTitle(t, p, "Simple") checkPageContent(t, p, normalizeExpected(ext, "

Summary Next Line.

. More text here.

Some more text

")) checkPageSummary(t, p, "Summary Next Line. . More text here. Some more text") @@ -756,7 +756,7 @@ func TestPageWithShortCodeInSummary(t *testing.T) { func TestPageWithEmbeddedScriptTag(t *testing.T) { t.Parallel() assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) if ext == "ad" || ext == "rst" { // TOD(bep) return @@ -777,7 +777,7 @@ func TestPageWithAdditionalExtension(t *testing.T) { require.Len(t, s.RegularPages, 1) - p := s.RegularPages[0] + p := s.RegularPages[0].(*Page) checkPageContent(t, p, "

first line.
\nsecond line.

\n\n

fourth line.

\n") } @@ -792,7 +792,7 @@ func TestTableOfContents(t *testing.T) { require.Len(t, s.RegularPages, 1) - p := s.RegularPages[0] + p := s.RegularPages[0].(*Page) checkPageContent(t, p, "\n\n

For some moments the old man did not reply. He stood with bowed head, buried in deep thought. But at last he spoke.

\n\n

AA

\n\n

I have no idea, of course, how long it took me to reach the limit of the plain,\nbut at last I entered the foothills, following a pretty little canyon upward\ntoward the mountains. Beside me frolicked a laughing brooklet, hurrying upon\nits noisy way down to the silent sea. In its quieter pools I discovered many\nsmall fish, of four-or five-pound weight I should imagine. In appearance,\nexcept as to size and color, they were not unlike the whale of our own seas. As\nI watched them playing about I discovered, not only that they suckled their\nyoung, but that at intervals they rose to the surface to breathe as well as to\nfeed upon certain grasses and a strange, scarlet lichen which grew upon the\nrocks just above the water line.

\n\n

AAA

\n\n

I remember I felt an extraordinary persuasion that I was being played with,\nthat presently, when I was upon the very verge of safety, this mysterious\ndeath–as swift as the passage of light–would leap after me from the pit about\nthe cylinder and strike me down. ## BB

\n\n

BBB

\n\n

“You’re a great Granser,” he cried delightedly, “always making believe them little marks mean something.”

\n") checkPageTOC(t, p, "") @@ -801,7 +801,7 @@ func TestTableOfContents(t *testing.T) { func TestPageWithMoreTag(t *testing.T) { t.Parallel() assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) checkPageTitle(t, p, "Simple") checkPageContent(t, p, normalizeExpected(ext, "

Summary Same Line

\n\n

Some more text

\n")) checkPageSummary(t, p, normalizeExpected(ext, "

Summary Same Line

")) @@ -815,7 +815,7 @@ func TestPageWithMoreTag(t *testing.T) { func TestPageWithMoreTagOnlySummary(t *testing.T) { assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) checkTruncation(t, p, false, "page with summary delimiter at end") } @@ -826,7 +826,7 @@ func TestPageWithMoreTagOnlySummary(t *testing.T) { func TestSummaryWithHTMLTagsOnNextLine(t *testing.T) { assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) require.Contains(t, p.Summary(), "Happy new year everyone!") require.NotContains(t, p.Summary(), "User interface") } @@ -855,7 +855,7 @@ func TestPageWithDate(t *testing.T) { require.Len(t, s.RegularPages, 1) - p := s.RegularPages[0] + p := s.RegularPages[0].(*Page) d, _ := time.Parse(time.RFC3339, "2013-05-17T16:59:30Z") checkPageDate(t, p, d) @@ -908,13 +908,13 @@ func TestPageWithLastmodFromGitInfo(t *testing.T) { assrt.Len(enSite.RegularPages, 1) // 2018-03-11 is the Git author date for testsite/content/first-post.md - assrt.Equal("2018-03-11", enSite.RegularPages[0].Lastmod.Format("2006-01-02")) + assrt.Equal("2018-03-11", enSite.RegularPages[0].Lastmod().Format("2006-01-02")) nnSite := h.Sites[1] assrt.Len(nnSite.RegularPages, 1) // 2018-08-11 is the Git author date for testsite/content_nn/first-post.md - assrt.Equal("2018-08-11", nnSite.RegularPages[0].Lastmod.Format("2006-01-02")) + assrt.Equal("2018-08-11", nnSite.RegularPages[0].Lastmod().Format("2006-01-02")) } @@ -955,22 +955,22 @@ Content assrt.Len(s.RegularPages, 2) - noSlug := s.RegularPages[0] - slug := s.RegularPages[1] + noSlug := s.RegularPages[0].(*Page) + slug := s.RegularPages[1].(*Page) - assrt.Equal(28, noSlug.Lastmod.Day()) + assrt.Equal(28, noSlug.Lastmod().Day()) switch strings.ToLower(dateHandler) { case ":filename": - assrt.False(noSlug.Date.IsZero()) - assrt.False(slug.Date.IsZero()) - assrt.Equal(2012, noSlug.Date.Year()) - assrt.Equal(2012, slug.Date.Year()) + assrt.False(noSlug.Date().IsZero()) + assrt.False(slug.Date().IsZero()) + assrt.Equal(2012, noSlug.Date().Year()) + assrt.Equal(2012, slug.Date().Year()) assrt.Equal("noslug", noSlug.Slug) assrt.Equal("aslug", slug.Slug) case ":filemodtime": - assrt.Equal(c1fi.ModTime().Year(), noSlug.Date.Year()) - assrt.Equal(c2fi.ModTime().Year(), slug.Date.Year()) + assrt.Equal(c1fi.ModTime().Year(), noSlug.Date().Year()) + assrt.Equal(c2fi.ModTime().Year(), slug.Date().Year()) fallthrough default: assrt.Equal("", noSlug.Slug) @@ -985,7 +985,7 @@ Content func TestWordCountWithAllCJKRunesWithoutHasCJKLanguage(t *testing.T) { t.Parallel() assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) if p.WordCount() != 8 { t.Fatalf("[%s] incorrect word count for content '%s'. expected %v, got %v", ext, p.plain, 8, p.WordCount()) } @@ -999,7 +999,7 @@ func TestWordCountWithAllCJKRunesHasCJKLanguage(t *testing.T) { settings := map[string]interface{}{"hasCJKLanguage": true} assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) if p.WordCount() != 15 { t.Fatalf("[%s] incorrect word count for content '%s'. expected %v, got %v", ext, p.plain, 15, p.WordCount()) } @@ -1012,7 +1012,7 @@ func TestWordCountWithMainEnglishWithCJKRunes(t *testing.T) { settings := map[string]interface{}{"hasCJKLanguage": true} assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) if p.WordCount() != 74 { t.Fatalf("[%s] incorrect word count for content '%s'. expected %v, got %v", ext, p.plain, 74, p.WordCount()) } @@ -1033,7 +1033,7 @@ func TestWordCountWithIsCJKLanguageFalse(t *testing.T) { } assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) if p.WordCount() != 75 { t.Fatalf("[%s] incorrect word count for content '%s'. expected %v, got %v", ext, p.plain, 74, p.WordCount()) } @@ -1051,7 +1051,7 @@ func TestWordCountWithIsCJKLanguageFalse(t *testing.T) { func TestWordCount(t *testing.T) { t.Parallel() assertFunc := func(t *testing.T, ext string, pages Pages) { - p := pages[0] + p := pages[0].(*Page) if p.WordCount() != 483 { t.Fatalf("[%s] incorrect word count. expected %v, got %v", ext, 483, p.WordCount()) } @@ -1473,8 +1473,8 @@ func TestTranslationKey(t *testing.T) { home, _ := s.Info.Home() assert.NotNil(home) assert.Equal("home", home.TranslationKey()) - assert.Equal("page/k1", s.RegularPages[0].TranslationKey()) - p2 := s.RegularPages[1] + assert.Equal("page/k1", s.RegularPages[0].(*Page).TranslationKey()) + p2 := s.RegularPages[1].(*Page) assert.Equal("page/sect/simple", p2.TranslationKey()) @@ -1492,7 +1492,7 @@ func TestChompBOM(t *testing.T) { require.Len(t, s.RegularPages, 1) - p := s.RegularPages[0] + p := s.RegularPages[0].(*Page) checkPageTitle(t, p, "Simple") } @@ -1786,7 +1786,7 @@ tags: } - p := s.RegularPages[0] + p := s.RegularPages[0].(*Page) if uglyURLs { require.Equal(t, "/post/test0.dot.html", p.RelPermalink()) } else { diff --git a/hugolib/page_time_integration_test.go b/hugolib/page_time_integration_test.go index f180afa5e2e..5e489373287 100644 --- a/hugolib/page_time_integration_test.go +++ b/hugolib/page_time_integration_test.go @@ -25,11 +25,6 @@ import ( ) const ( - pageWithInvalidDate = `--- -date: 2010-05-02_15:29:31+08:00 ---- -Page With Invalid Date (replace T with _ for RFC 3339)` - pageWithDateRFC3339 = `--- date: 2010-05-02T15:29:31+08:00 --- @@ -91,15 +86,6 @@ date: 02 May 2010 15:29 PST Page With Date HugoLong` ) -func TestDegenerateDateFrontMatter(t *testing.T) { - t.Parallel() - s := newTestSite(t) - p, _ := s.newPageFrom(strings.NewReader(pageWithInvalidDate), "page/with/invalid/date") - if p.Date != *new(time.Time) { - t.Fatalf("Date should be set to time.Time zero value. Got: %s", p.Date) - } -} - func TestParsingDateInFrontMatter(t *testing.T) { t.Parallel() s := newTestSite(t) @@ -142,8 +128,8 @@ func TestParsingDateInFrontMatter(t *testing.T) { if err != nil { t.Fatalf("Expected to be able to parse page.") } - if !dt.Equal(p.Date) { - t.Errorf("Date does not equal frontmatter:\n%s\nExpecting: %s\n Got: %s. Diff: %s\n internal: %#v\n %#v", test.buf, dt, p.Date, dt.Sub(p.Date), dt, p.Date) + if !dt.Equal(p.Date()) { + t.Errorf("Date does not equal frontmatter:\n%s\nExpecting: %s\n Got: %s. Diff: %s\n internal: %#v\n %#v", test.buf, dt, p.Date(), dt.Sub(p.Date()), dt, p.Date()) } } } diff --git a/hugolib/pagebundler_handlers.go b/hugolib/pagebundler_handlers.go index 2df1f87656f..b12ec8a3d73 100644 --- a/hugolib/pagebundler_handlers.go +++ b/hugolib/pagebundler_handlers.go @@ -231,17 +231,17 @@ func (c *contentHandlers) parsePage(h contentHandler) contentHandler { pageResource.resourcePath = filepath.ToSlash(childCtx.target) pageResource.parent = p } - p.Resources = append(p.Resources, res.resource) + p.resources = append(p.resources, res.resource) } } - sort.SliceStable(p.Resources, func(i, j int) bool { - if p.Resources[i].ResourceType() < p.Resources[j].ResourceType() { + sort.SliceStable(p.Resources(), func(i, j int) bool { + if p.resources[i].ResourceType() < p.resources[j].ResourceType() { return true } - p1, ok1 := p.Resources[i].(*Page) - p2, ok2 := p.Resources[j].(*Page) + p1, ok1 := p.resources[i].(*Page) + p2, ok2 := p.resources[j].(*Page) if ok1 != ok2 { return ok2 @@ -251,12 +251,12 @@ func (c *contentHandlers) parsePage(h contentHandler) contentHandler { return defaultPageSort(p1, p2) } - return p.Resources[i].RelPermalink() < p.Resources[j].RelPermalink() + return p.resources[i].RelPermalink() < p.resources[j].RelPermalink() }) // Assign metadata from front matter if set if len(p.resourcesMetadata) > 0 { - resources.AssignMetadata(p.resourcesMetadata, p.Resources...) + resources.AssignMetadata(p.resourcesMetadata, p.Resources()...) } } diff --git a/hugolib/pagebundler_test.go b/hugolib/pagebundler_test.go index ab047205911..78edc57fe8d 100644 --- a/hugolib/pagebundler_test.go +++ b/hugolib/pagebundler_test.go @@ -138,13 +138,13 @@ func TestPageBundlerSiteRegular(t *testing.T) { unicodeBundle := s.getPage(KindPage, "c/bundle/index.md") assert.NotNil(unicodeBundle) - pageResources := leafBundle1.Resources.ByType(pageResourceType) + pageResources := leafBundle1.Resources().ByType(pageResourceType) assert.Len(pageResources, 2) firstPage := pageResources[0].(*Page) secondPage := pageResources[1].(*Page) assert.Equal(filepath.FromSlash("/work/base/b/my-bundle/1.md"), firstPage.pathOrTitle(), secondPage.pathOrTitle()) assert.Contains(firstPage.content(), "TheContent") - assert.Equal(6, len(leafBundle1.Resources)) + assert.Equal(6, len(leafBundle1.Resources())) // Verify shortcode in bundled page assert.Contains(secondPage.content(), filepath.FromSlash("MyShort in b/my-bundle/2.md")) @@ -157,7 +157,7 @@ func TestPageBundlerSiteRegular(t *testing.T) { assert.Equal(secondPage, pageResources.GetMatch("2*")) assert.Nil(pageResources.GetMatch("doesnotexist*")) - imageResources := leafBundle1.Resources.ByType("image") + imageResources := leafBundle1.Resources().ByType("image") assert.Equal(3, len(imageResources)) image := imageResources[0] @@ -170,7 +170,7 @@ func TestPageBundlerSiteRegular(t *testing.T) { th.assertFileContent(filepath.FromSlash("/work/public/cpath/2017/pageslug/c/logo.png"), "content") // Custom media type defined in site config. - assert.Len(leafBundle1.Resources.ByType("bepsays"), 1) + assert.Len(leafBundle1.Resources().ByType("bepsays"), 1) relPermalinker := func(s string) string { return fmt.Sprintf(s, relURLBase) @@ -286,9 +286,9 @@ func TestPageBundlerSiteMultilingual(t *testing.T) { // See https://github.com/gohugoio/hugo/issues/4295 // Every resource should have its Name prefixed with its base folder. - cBundleResources := bundleWithSubPath.Resources.Match("c/**") + cBundleResources := bundleWithSubPath.Resources().Match("c/**") assert.Equal(4, len(cBundleResources)) - bundlePage := bundleWithSubPath.Resources.GetMatch("c/page*") + bundlePage := bundleWithSubPath.Resources().GetMatch("c/page*") assert.NotNil(bundlePage) assert.IsType(&Page{}, bundlePage) @@ -334,10 +334,10 @@ func TestMultilingualDisableLanguage(t *testing.T) { // No nn pages assert.Equal(16, len(s.AllPages)) for _, p := range s.rawAllPages { - assert.True(p.Lang() != "nn") + assert.True(p.(*Page).Lang() != "nn") } for _, p := range s.AllPages { - assert.True(p.Lang() != "nn") + assert.True(p.(*Page).Lang() != "nn") } } @@ -361,8 +361,8 @@ func TestPageBundlerSiteWitSymbolicLinksInContent(t *testing.T) { assert.Equal(7, len(s.RegularPages)) a1Bundle := s.getPage(KindPage, "symbolic2/a1/index.md") assert.NotNil(a1Bundle) - assert.Equal(2, len(a1Bundle.Resources)) - assert.Equal(1, len(a1Bundle.Resources.ByType(pageResourceType))) + assert.Equal(2, len(a1Bundle.Resources())) + assert.Equal(1, len(a1Bundle.Resources().ByType(pageResourceType))) th.assertFileContent(filepath.FromSlash(workDir+"/public/a/page/index.html"), "TheContent") th.assertFileContent(filepath.FromSlash(workDir+"/public/symbolic1/s1/index.html"), "TheContent") @@ -430,7 +430,7 @@ HEADLESS {{< myShort >}} assert.Equal("", headless.Permalink()) assert.Contains(headless.content(), "HEADLESS SHORTCODE") - headlessResources := headless.Resources + headlessResources := headless.Resources() assert.Equal(3, len(headlessResources)) assert.Equal(2, len(headlessResources.Match("l*"))) pageResource := headlessResources.GetMatch("p*") diff --git a/hugolib/pagecollections.go b/hugolib/pagecollections.go index 78325344b60..e07adf59d41 100644 --- a/hugolib/pagecollections.go +++ b/hugolib/pagecollections.go @@ -21,6 +21,7 @@ import ( "github.com/gohugoio/hugo/cache" "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/resources/resource" ) // PageCollections contains the page collections for a site. @@ -71,7 +72,7 @@ func (c *PageCollections) getFromCache(ref string) (*Page, error) { return nil, fmt.Errorf("page reference %q is ambiguous", ref) } -var ambiguityFlag = &Page{Kind: kindUnknown, title: "ambiguity flag"} +var ambiguityFlag = &Page{kind: kindUnknown, title: "ambiguity flag"} func (c *PageCollections) refreshPageCaches() { c.indexPages = c.findPagesByKindNotIn(KindPage, c.Pages) @@ -81,7 +82,7 @@ func (c *PageCollections) refreshPageCaches() { indexLoader := func() (map[string]interface{}, error) { index := make(map[string]interface{}) - add := func(ref string, p *Page) { + add := func(ref string, p resource.Page) { existing := index[ref] if existing == nil { index[ref] = p @@ -92,7 +93,8 @@ func (c *PageCollections) refreshPageCaches() { for _, pageCollection := range []Pages{c.RegularPages, c.headlessPages} { for _, p := range pageCollection { - sourceRef := p.absoluteSourceRef() + pp := p.(*Page) + sourceRef := pp.absoluteSourceRef() if sourceRef != "" { // index the canonical ref @@ -101,9 +103,9 @@ func (c *PageCollections) refreshPageCaches() { } // Ref/Relref supports this potentially ambiguous lookup. - add(p.LogicalName(), p) + add(pp.LogicalName(), p) - translationBaseName := p.TranslationBaseName() + translationBaseName := pp.TranslationBaseName() dir, _ := path.Split(sourceRef) dir = strings.TrimSuffix(dir, "/") @@ -124,12 +126,13 @@ func (c *PageCollections) refreshPageCaches() { for _, p := range c.indexPages { // index the canonical, unambiguous ref for any backing file // e.g. /section/_index.md - sourceRef := p.absoluteSourceRef() + pp := p.(*Page) + sourceRef := pp.absoluteSourceRef() if sourceRef != "" { add(sourceRef, p) } - ref := path.Join(p.sections...) + ref := path.Join(p.(*Page).sections...) // index the canonical, unambiguous virtual ref // e.g. /section @@ -265,7 +268,7 @@ func (c *PageCollections) getPageNew(context *Page, ref string) (*Page, error) { func (*PageCollections) findPagesByKindIn(kind string, inPages Pages) Pages { var pages Pages for _, p := range inPages { - if p.Kind == kind { + if p.Kind() == kind { pages = append(pages, p) } } @@ -274,8 +277,8 @@ func (*PageCollections) findPagesByKindIn(kind string, inPages Pages) Pages { func (*PageCollections) findFirstPageByKindIn(kind string, inPages Pages) *Page { for _, p := range inPages { - if p.Kind == kind { - return p + if p.Kind() == kind { + return p.(*Page) } } return nil @@ -284,7 +287,7 @@ func (*PageCollections) findFirstPageByKindIn(kind string, inPages Pages) *Page func (*PageCollections) findPagesByKindNotIn(kind string, inPages Pages) Pages { var pages Pages for _, p := range inPages { - if p.Kind != kind { + if p.Kind() != kind { pages = append(pages, p) } } @@ -301,7 +304,7 @@ func (c *PageCollections) addPage(page *Page) { func (c *PageCollections) removePageFilename(filename string) { if i := c.rawAllPages.findPagePosByFilename(filename); i >= 0 { - c.clearResourceCacheForPage(c.rawAllPages[i]) + c.clearResourceCacheForPage(c.rawAllPages[i].(*Page)) c.rawAllPages = append(c.rawAllPages[:i], c.rawAllPages[i+1:]...) } @@ -309,7 +312,7 @@ func (c *PageCollections) removePageFilename(filename string) { func (c *PageCollections) removePage(page *Page) { if i := c.rawAllPages.findPagePos(page); i >= 0 { - c.clearResourceCacheForPage(c.rawAllPages[i]) + c.clearResourceCacheForPage(c.rawAllPages[i].(*Page)) c.rawAllPages = append(c.rawAllPages[:i], c.rawAllPages[i+1:]...) } @@ -319,8 +322,9 @@ func (c *PageCollections) findPagesByShortcode(shortcode string) Pages { var pages Pages for _, p := range c.rawAllPages { - if p.shortcodeState != nil { - if _, ok := p.shortcodeState.nameSet[shortcode]; ok { + pp := p.(*Page) + if pp.shortcodeState != nil { + if _, ok := pp.shortcodeState.nameSet[shortcode]; ok { pages = append(pages, p) } } @@ -335,7 +339,7 @@ func (c *PageCollections) replacePage(page *Page) { } func (c *PageCollections) clearResourceCacheForPage(page *Page) { - if len(page.Resources) > 0 { + if len(page.Resources()) > 0 { page.s.ResourceSpec.DeleteCacheByPrefix(page.relTargetPathBase) } } diff --git a/hugolib/pagecollections_test.go b/hugolib/pagecollections_test.go index 2f8b3149044..d2796d3a466 100644 --- a/hugolib/pagecollections_test.go +++ b/hugolib/pagecollections_test.go @@ -114,7 +114,7 @@ func (t *testCase) check(p *Page, err error, errorMsg string, assert *require.As default: assert.NoError(err, errorMsg) assert.NotNil(p, errorMsg) - assert.Equal(t.kind, p.Kind, errorMsg) + assert.Equal(t.kind, p.Kind(), errorMsg) assert.Equal(t.expectedTitle, p.title, errorMsg) } } diff --git a/hugolib/pagemeta/page_frontmatter.go b/hugolib/pagemeta/page_frontmatter.go index b67ffbc05a0..6a303906abe 100644 --- a/hugolib/pagemeta/page_frontmatter.go +++ b/hugolib/pagemeta/page_frontmatter.go @@ -300,7 +300,7 @@ func (f *FrontMatterHandler) createHandlers() error { if f.dateHandler, err = f.createDateHandler(f.fmConfig.date, func(d *FrontMatterDescriptor, t time.Time) { - d.Dates.Date = t + d.Dates.DDate = t setParamIfNotSet(fmDate, t, d) }); err != nil { return err @@ -309,7 +309,7 @@ func (f *FrontMatterHandler) createHandlers() error { if f.lastModHandler, err = f.createDateHandler(f.fmConfig.lastmod, func(d *FrontMatterDescriptor, t time.Time) { setParamIfNotSet(fmLastmod, t, d) - d.Dates.Lastmod = t + d.Dates.DLastMod = t }); err != nil { return err } @@ -317,7 +317,7 @@ func (f *FrontMatterHandler) createHandlers() error { if f.publishDateHandler, err = f.createDateHandler(f.fmConfig.publishDate, func(d *FrontMatterDescriptor, t time.Time) { setParamIfNotSet(fmPubDate, t, d) - d.Dates.PublishDate = t + d.Dates.DPublishDate = t }); err != nil { return err } @@ -325,7 +325,7 @@ func (f *FrontMatterHandler) createHandlers() error { if f.expiryDateHandler, err = f.createDateHandler(f.fmConfig.expiryDate, func(d *FrontMatterDescriptor, t time.Time) { setParamIfNotSet(fmExpiryDate, t, d) - d.Dates.ExpiryDate = t + d.Dates.DExpiryDate = t }); err != nil { return err } diff --git a/hugolib/pagemeta/page_frontmatter_test.go b/hugolib/pagemeta/page_frontmatter_test.go index 03f4c2f84a4..c4f7d40038f 100644 --- a/hugolib/pagemeta/page_frontmatter_test.go +++ b/hugolib/pagemeta/page_frontmatter_test.go @@ -143,13 +143,13 @@ func TestFrontMatterDatesHandlers(t *testing.T) { } d.Frontmatter["date"] = d2 assert.NoError(handler.HandleDates(d)) - assert.Equal(d1, d.Dates.Date) + assert.Equal(d1, d.Dates.DDate) assert.Equal(d2, d.Params["date"]) d = newTestFd() d.Frontmatter["date"] = d2 assert.NoError(handler.HandleDates(d)) - assert.Equal(d2, d.Dates.Date) + assert.Equal(d2, d.Dates.DDate) assert.Equal(d2, d.Params["date"]) } @@ -186,15 +186,15 @@ func TestFrontMatterDatesCustomConfig(t *testing.T) { assert.NoError(handler.HandleDates(d)) - assert.Equal(1, d.Dates.Date.Day()) - assert.Equal(4, d.Dates.Lastmod.Day()) - assert.Equal(4, d.Dates.PublishDate.Day()) - assert.Equal(5, d.Dates.ExpiryDate.Day()) + assert.Equal(1, d.Dates.DDate.Day()) + assert.Equal(4, d.Dates.DLastMod.Day()) + assert.Equal(4, d.Dates.DPublishDate.Day()) + assert.Equal(5, d.Dates.DExpiryDate.Day()) - assert.Equal(d.Dates.Date, d.Params["date"]) - assert.Equal(d.Dates.Date, d.Params["mydate"]) - assert.Equal(d.Dates.PublishDate, d.Params["publishdate"]) - assert.Equal(d.Dates.ExpiryDate, d.Params["expirydate"]) + assert.Equal(d.Dates.DDate, d.Params["date"]) + assert.Equal(d.Dates.DDate, d.Params["mydate"]) + assert.Equal(d.Dates.DPublishDate, d.Params["publishdate"]) + assert.Equal(d.Dates.DExpiryDate, d.Params["expirydate"]) assert.False(handler.IsDateKey("date")) // This looks odd, but is configured like this. assert.True(handler.IsDateKey("mydate")) @@ -227,10 +227,10 @@ func TestFrontMatterDatesDefaultKeyword(t *testing.T) { assert.NoError(handler.HandleDates(d)) - assert.Equal(1, d.Dates.Date.Day()) - assert.Equal(2, d.Dates.Lastmod.Day()) - assert.Equal(4, d.Dates.PublishDate.Day()) - assert.True(d.Dates.ExpiryDate.IsZero()) + assert.Equal(1, d.Dates.DDate.Day()) + assert.Equal(2, d.Dates.DLastMod.Day()) + assert.Equal(4, d.Dates.DPublishDate.Day()) + assert.True(d.Dates.DExpiryDate.IsZero()) } @@ -252,10 +252,10 @@ func TestFrontMatterDateFieldHandler(t *testing.T) { fd := newTestFd() d, _ := time.Parse("2006-01-02", "2018-02-01") fd.Frontmatter["date"] = d - h := handlers.newDateFieldHandler("date", func(d *FrontMatterDescriptor, t time.Time) { d.Dates.Date = t }) + h := handlers.newDateFieldHandler("date", func(d *FrontMatterDescriptor, t time.Time) { d.Dates.DDate = t }) handled, err := h(fd) assert.True(handled) assert.NoError(err) - assert.Equal(d, fd.Dates.Date) + assert.Equal(d, fd.Dates.DDate) } diff --git a/hugolib/pagemeta/pagemeta.go b/hugolib/pagemeta/pagemeta.go index 93dc9a12f0b..6c92e02e465 100644 --- a/hugolib/pagemeta/pagemeta.go +++ b/hugolib/pagemeta/pagemeta.go @@ -24,9 +24,26 @@ type URLPath struct { Section string } +// TODO(bep) page type PageDates struct { - Date time.Time - Lastmod time.Time - PublishDate time.Time - ExpiryDate time.Time + DDate time.Time + DLastMod time.Time + DPublishDate time.Time + DExpiryDate time.Time +} + +func (p PageDates) Date() time.Time { + return p.DDate +} + +func (p PageDates) Lastmod() time.Time { + return p.DLastMod +} + +func (p PageDates) PublishDate() time.Time { + return p.DPublishDate +} + +func (p PageDates) ExpiryDate() time.Time { + return p.DExpiryDate } diff --git a/hugolib/pagesPrevNext.go b/hugolib/pagesPrevNext.go index 947a49b8581..717440f6583 100644 --- a/hugolib/pagesPrevNext.go +++ b/hugolib/pagesPrevNext.go @@ -13,10 +13,14 @@ package hugolib +import ( + "github.com/gohugoio/hugo/resources/resource" +) + // Prev returns the previous page reletive to the given page. -func (p Pages) Prev(cur *Page) *Page { +func (p Pages) Prev(cur resource.Page) resource.Page { for x, c := range p { - if c.Eq(cur) { + if c.(*Page).Eq(cur) { if x == 0 { // TODO(bep) consider return nil here to get it line with the other Prevs return p[len(p)-1] @@ -28,9 +32,9 @@ func (p Pages) Prev(cur *Page) *Page { } // Next returns the next page reletive to the given page. -func (p Pages) Next(cur *Page) *Page { +func (p Pages) Next(cur resource.Page) resource.Page { for x, c := range p { - if c.Eq(cur) { + if c.(*Page).Eq(cur) { if x < len(p)-1 { return p[x+1] } diff --git a/hugolib/pagesPrevNext_test.go b/hugolib/pagesPrevNext_test.go index 5945d8fe50b..0aa251e9831 100644 --- a/hugolib/pagesPrevNext_test.go +++ b/hugolib/pagesPrevNext_test.go @@ -59,10 +59,10 @@ func prepareWeightedPagesPrevNext(t *testing.T) WeightedPages { if err != nil { t.Fatalf("failed to prepare test page %s", src.path) } - p.Weight = src.weight - p.Date = cast.ToTime(src.date) - p.PublishDate = cast.ToTime(src.date) - w = append(w, WeightedPage{p.Weight, p}) + p.weight = src.weight + p.DDate = cast.ToTime(src.date) + p.DPublishDate = cast.ToTime(src.date) + w = append(w, WeightedPage{p.weight, p}) } w.Sort() diff --git a/hugolib/pages_language_merge.go b/hugolib/pages_language_merge.go index 8bbae9a1271..8dbaef7648f 100644 --- a/hugolib/pages_language_merge.go +++ b/hugolib/pages_language_merge.go @@ -33,11 +33,13 @@ func (p1 Pages) MergeByLanguage(p2 Pages) Pages { merge := func(pages *Pages) { m := make(map[string]bool) for _, p := range *pages { - m[p.TranslationKey()] = true + pp := p.(*Page) + m[pp.TranslationKey()] = true } for _, p := range p2 { - if _, found := m[p.TranslationKey()]; !found { + pp := p.(*Page) + if _, found := m[pp.TranslationKey()]; !found { *pages = append(*pages, p) } } diff --git a/hugolib/pages_language_merge_test.go b/hugolib/pages_language_merge_test.go index efcfbf04b34..e190859823f 100644 --- a/hugolib/pages_language_merge_test.go +++ b/hugolib/pages_language_merge_test.go @@ -48,7 +48,7 @@ func TestMergeLanguages(t *testing.T) { if i == 2 || i%3 == 0 || i == 31 { expectedLang = "nn" } - p := mergedNN[i-1] + p := mergedNN[i-1].(*Page) assert.Equal(expectedLang, p.Lang(), fmt.Sprintf("Test %d", i)) } } @@ -60,24 +60,24 @@ func TestMergeLanguages(t *testing.T) { if i%5 == 0 { expectedLang = "fr" } - p := mergedFR[i-1] + p := mergedFR[i-1].(*Page) assert.Equal(expectedLang, p.Lang(), fmt.Sprintf("Test %d", i)) } - firstNN := nnSite.RegularPages[0] + firstNN := nnSite.RegularPages[0].(*Page) assert.Equal(4, len(firstNN.Sites())) assert.Equal("en", firstNN.Sites().First().Language().Lang) nnBundle := nnSite.getPage("page", "bundle") enBundle := enSite.getPage("page", "bundle") - assert.Equal(6, len(enBundle.Resources)) - assert.Equal(2, len(nnBundle.Resources)) + assert.Equal(6, len(enBundle.Resources())) + assert.Equal(2, len(nnBundle.Resources())) - var ri interface{} = nnBundle.Resources + var ri interface{} = nnBundle.Resources() // This looks less ugly in the templates ... - mergedNNResources := ri.(resource.ResourcesLanguageMerger).MergeByLanguage(enBundle.Resources) + mergedNNResources := ri.(resource.ResourcesLanguageMerger).MergeByLanguage(enBundle.Resources()) assert.Equal(6, len(mergedNNResources)) unchanged, err := nnSite.RegularPages.MergeByLanguageInterface(nil) diff --git a/hugolib/pages_related.go b/hugolib/pages_related.go index 2881a45e6e3..7bd4765e214 100644 --- a/hugolib/pages_related.go +++ b/hugolib/pages_related.go @@ -110,7 +110,7 @@ func (p Pages) withInvertedIndex(search func(idx *related.InvertedIndex) ([]rela return nil, nil } - cache := p[0].s.relatedDocsHandler + cache := p[0].(*Page).s.relatedDocsHandler searchIndex, err := cache.getOrCreateIndex(p) if err != nil { diff --git a/hugolib/pages_related_test.go b/hugolib/pages_related_test.go index ed8d9df9d6d..cfb2abab894 100644 --- a/hugolib/pages_related_test.go +++ b/hugolib/pages_related_test.go @@ -54,22 +54,22 @@ Content assert.NoError(err) assert.Len(result, 2) - assert.Equal("Page 2", result[0].title) - assert.Equal("Page 1", result[1].title) + assert.Equal("Page 2", result[0].Title()) + assert.Equal("Page 1", result[1].Title()) result, err = s.RegularPages.Related(s.RegularPages[0]) assert.Len(result, 2) - assert.Equal("Page 2", result[0].title) - assert.Equal("Page 3", result[1].title) + assert.Equal("Page 2", result[0].Title()) + assert.Equal("Page 3", result[1].Title()) result, err = s.RegularPages.RelatedIndices(s.RegularPages[0], "keywords") assert.Len(result, 2) - assert.Equal("Page 2", result[0].title) - assert.Equal("Page 3", result[1].title) + assert.Equal("Page 2", result[0].Title()) + assert.Equal("Page 3", result[1].Title()) result, err = s.RegularPages.RelatedTo(types.NewKeyValuesStrings("keywords", "bep", "rocks")) assert.NoError(err) assert.Len(result, 2) - assert.Equal("Page 2", result[0].title) - assert.Equal("Page 3", result[1].title) + assert.Equal("Page 2", result[0].Title()) + assert.Equal("Page 3", result[1].Title()) } diff --git a/hugolib/pagination.go b/hugolib/pagination.go index 05846a6bb35..cc3f3751cac 100644 --- a/hugolib/pagination.go +++ b/hugolib/pagination.go @@ -21,6 +21,8 @@ import ( "reflect" "strings" + "github.com/gohugoio/hugo/resources/resource" + "github.com/gohugoio/hugo/config" "github.com/spf13/cast" @@ -120,7 +122,7 @@ func (p *Pager) element() paginatedElement { } // page returns the Page with the given index -func (p *Pager) page(index int) (*Page, error) { +func (p *Pager) page(index int) (resource.Page, error) { if pages, ok := p.element().(Pages); ok { if pages != nil && len(pages) > index { @@ -221,7 +223,7 @@ func splitPageGroups(pageGroups PagesGroup, size int) []paginatedElement { type keyPage struct { key interface{} - page *Page + page resource.Page } var ( @@ -270,7 +272,7 @@ func (p *Page) Paginator(options ...interface{}) (*Pager, error) { // If it's not, one will be created with all pages in Data["Pages"]. func (p *PageOutput) Paginator(options ...interface{}) (*Pager, error) { if !p.IsNode() { - return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", p.Kind, p.title) + return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", p.Kind(), p.Title()) } pagerSize, err := resolvePagerSize(p.s.Cfg, options...) @@ -321,7 +323,7 @@ func (p *Page) Paginate(seq interface{}, options ...interface{}) (*Pager, error) // Note that repeated calls will return the same result, even if the sequence is different. func (p *PageOutput) Paginate(seq interface{}, options ...interface{}) (*Pager, error) { if !p.IsNode() { - return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", p.Kind, p.title) + return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", p.Kind(), p.Title()) } pagerSize, err := resolvePagerSize(p.s.Cfg, options...) @@ -458,8 +460,6 @@ func toPages(seq interface{}) (Pages, error) { return v, nil case *Pages: return *(v), nil - case []*Page: - return Pages(v), nil case WeightedPages: return v.Pages(), nil case PageGroup: diff --git a/hugolib/pagination_test.go b/hugolib/pagination_test.go index 5dbef609bdc..473d5d4a1fa 100644 --- a/hugolib/pagination_test.go +++ b/hugolib/pagination_test.go @@ -59,7 +59,7 @@ func TestSplitPageGroups(t *testing.T) { // first group 10 in weight require.Equal(t, 10, pg.Key) for _, p := range pg.Pages { - require.True(t, p.fuzzyWordCount%2 == 0) // magic test + require.True(t, p.(*Page).fuzzyWordCount%2 == 0) // magic test } } } else { @@ -74,7 +74,7 @@ func TestSplitPageGroups(t *testing.T) { // last should have 5 in weight require.Equal(t, 5, pg.Key) for _, p := range pg.Pages { - require.True(t, p.fuzzyWordCount%2 != 0) // magic test + require.True(t, p.(*Page).fuzzyWordCount%2 != 0) // magic test } } } else { @@ -553,10 +553,10 @@ func TestPage(t *testing.T) { page21, _ := f2.page(1) page2Nil, _ := f2.page(3) - require.Equal(t, 3, page11.fuzzyWordCount) + require.Equal(t, 3, page11.(*Page).fuzzyWordCount) require.Nil(t, page1Nil) - require.Equal(t, 3, page21.fuzzyWordCount) + require.Equal(t, 3, page21.(*Page).fuzzyWordCount) require.Nil(t, page2Nil) } @@ -570,7 +570,7 @@ func createTestPages(s *Site, num int) Pages { w = 10 } p.fuzzyWordCount = i + 2 - p.Weight = w + p.weight = w pages[i] = p } diff --git a/hugolib/permalinks.go b/hugolib/permalinks.go index 3d261a113e3..1ad9dd0dc26 100644 --- a/hugolib/permalinks.go +++ b/hugolib/permalinks.go @@ -131,19 +131,19 @@ func pageToPermalinkDate(p *Page, dateField string) (string, error) { // a Page contains a Node which provides a field Date, time.Time switch dateField { case "year": - return strconv.Itoa(p.Date.Year()), nil + return strconv.Itoa(p.Date().Year()), nil case "month": - return fmt.Sprintf("%02d", int(p.Date.Month())), nil + return fmt.Sprintf("%02d", int(p.Date().Month())), nil case "monthname": - return p.Date.Month().String(), nil + return p.Date().Month().String(), nil case "day": - return fmt.Sprintf("%02d", p.Date.Day()), nil + return fmt.Sprintf("%02d", p.Date().Day()), nil case "weekday": - return strconv.Itoa(int(p.Date.Weekday())), nil + return strconv.Itoa(int(p.Date().Weekday())), nil case "weekdayname": - return p.Date.Weekday().String(), nil + return p.Date().Weekday().String(), nil case "yearday": - return strconv.Itoa(p.Date.YearDay()), nil + return strconv.Itoa(p.Date().YearDay()), nil } //TODO: support classic strftime escapes too // (and pass those through despite not being in the map) diff --git a/hugolib/shortcode_test.go b/hugolib/shortcode_test.go index 9545301ea80..0aa5d5f3712 100644 --- a/hugolib/shortcode_test.go +++ b/hugolib/shortcode_test.go @@ -87,7 +87,7 @@ title: "Title" require.Len(t, h.Sites[0].RegularPages, 1) - output := strings.TrimSpace(string(h.Sites[0].RegularPages[0].content())) + output := strings.TrimSpace(string(h.Sites[0].RegularPages[0].(*Page).content())) output = strings.TrimPrefix(output, "

") output = strings.TrimSuffix(output, "

") diff --git a/hugolib/site.go b/hugolib/site.go index 43b398b7059..910ca89398f 100644 --- a/hugolib/site.go +++ b/hugolib/site.go @@ -174,7 +174,8 @@ func (s *Site) initRenderFormats() { formatSet := make(map[string]bool) formats := output.Formats{} for _, p := range s.Pages { - for _, f := range p.outputFormats { + pp := p.(*Page) + for _, f := range pp.outputFormats { if !formatSet[f.Name] { formats = append(formats, f) formatSet[f.Name] = true @@ -860,7 +861,7 @@ func (s *Site) processPartial(events []fsnotify.Event) (whatChanged, error) { // pages that keeps a reference to the changed shortcode. pagesWithShortcode := h.findPagesByShortcode(shortcode) for _, p := range pagesWithShortcode { - contentFilesChanged = append(contentFilesChanged, p.File.Filename()) + contentFilesChanged = append(contentFilesChanged, p.(*Page).File.Filename()) } } @@ -1047,12 +1048,13 @@ func (s *Site) setupSitePages() { var siteLastChange time.Time for i, page := range s.RegularPages { + pagep := page.(*Page) if i > 0 { - page.NextPage = s.RegularPages[i-1] + pagep.NextPage = s.RegularPages[i-1] } if i < len(s.RegularPages)-1 { - page.PrevPage = s.RegularPages[i+1] + pagep.PrevPage = s.RegularPages[i+1] } // Determine Site.Info.LastChange @@ -1060,8 +1062,8 @@ func (s *Site) setupSitePages() { // is already applied, so this is *the* date to use. // We cannot just pick the last page in the default sort, because // that may not be ordered by date. - if page.Lastmod.After(siteLastChange) { - siteLastChange = page.Lastmod + if pagep.Lastmod().After(siteLastChange) { + siteLastChange = pagep.Lastmod() } } @@ -1360,7 +1362,7 @@ func (s *Site) buildSiteMeta() (err error) { for _, p := range s.AllPages { // this depends on taxonomies - p.setValuesForKind(s) + p.(*Page).setValuesForKind(s) } return @@ -1438,18 +1440,18 @@ func (s *Site) assembleMenus() { if sectionPagesMenu != "" { for _, p := range pages { - if p.Kind == KindSection { + if p.Kind() == KindSection { // From Hugo 0.22 we have nested sections, but until we get a // feel of how that would work in this setting, let us keep // this menu for the top level only. - id := p.Section() + id := p.(*Page).Section() if _, ok := flat[twoD{sectionPagesMenu, id}]; ok { continue } me := MenuEntry{Identifier: id, Name: p.LinkTitle(), - Weight: p.Weight, + Weight: p.Weight(), URL: p.RelPermalink()} flat[twoD{sectionPagesMenu, me.KeyName()}] = &me } @@ -1458,9 +1460,10 @@ func (s *Site) assembleMenus() { // Add menu entries provided by pages for _, p := range pages { - for name, me := range p.Menus() { + pp := p.(*Page) + for name, me := range pp.Menus() { if _, ok := flat[twoD{name, me.KeyName()}]; ok { - s.SendError(p.errWithFileContext(errors.Errorf("duplicate menu entry with identifier %q in menu %q", me.KeyName(), name))) + s.SendError(p.(*Page).errWithFileContext(errors.Errorf("duplicate menu entry with identifier %q in menu %q", me.KeyName(), name))) continue } flat[twoD{name, me.KeyName()}] = me @@ -1526,12 +1529,13 @@ func (s *Site) assembleTaxonomies() { s.taxonomiesPluralSingular[plural] = singular for _, p := range s.Pages { - vals := p.getParam(plural, !s.Info.preserveTaxonomyNames) + pp := p.(*Page) + vals := pp.getParam(plural, !s.Info.preserveTaxonomyNames) - w := p.getParamToLower(plural + "_weight") + w := pp.getParamToLower(plural + "_weight") weight, err := cast.ToIntE(w) if err != nil { - s.Log.ERROR.Printf("Unable to convert taxonomy weight %#v to int for %s", w, p.File.Path()) + s.Log.ERROR.Printf("Unable to convert taxonomy weight %#v to int for %s", w, pp.File.Path()) // weight will equal zero, so let the flow continue } @@ -1553,7 +1557,7 @@ func (s *Site) assembleTaxonomies() { s.taxonomiesOrigKey[fmt.Sprintf("%s-%s", plural, s.PathSpec.MakePathSanitized(v))] = v } } else { - s.Log.ERROR.Printf("Invalid %s in %s\n", plural, p.File.Path()) + s.Log.ERROR.Printf("Invalid %s in %s\n", plural, pp.File.Path()) } } } @@ -1579,10 +1583,11 @@ func (s *Site) resetBuildState() { s.expiredCount = 0 for _, p := range s.rawAllPages { - p.subSections = Pages{} - p.parent = nil - p.scratch = maps.NewScratch() - p.mainPageOutput = nil + pp := p.(*Page) + pp.subSections = Pages{} + pp.parent = nil + pp.scratch = maps.NewScratch() + pp.mainPageOutput = nil } } @@ -1594,10 +1599,11 @@ func (s *Site) preparePages() error { var errors []error for _, p := range s.Pages { - if err := p.prepareLayouts(); err != nil { + pp := p.(*Page) + if err := pp.prepareLayouts(); err != nil { errors = append(errors, err) } - if err := p.prepareData(s); err != nil { + if err := pp.prepareData(s); err != nil { errors = append(errors, err) } } @@ -1688,7 +1694,7 @@ func (s *Site) renderAndWritePage(statCounter *uint64, name string, targetPath s renderBuffer := bp.GetBuffer() defer bp.PutBuffer(renderBuffer) - if err := s.renderForLayouts(p.Kind, p, renderBuffer, layouts...); err != nil { + if err := s.renderForLayouts(p.Kind(), p, renderBuffer, layouts...); err != nil { return err } @@ -1809,14 +1815,14 @@ func (s *Site) newNodePage(typ string, sections ...string) *Page { language: s.Language, pageInit: &pageInit{}, pageContentInit: &pageContentInit{}, - Kind: typ, + kind: typ, File: &source.FileInfo{}, data: make(map[string]interface{}), Site: &s.Info, sections: sections, s: s} - p.outputFormats = p.s.outputFormats[p.Kind] + p.outputFormats = p.s.outputFormats[p.Kind()] return p diff --git a/hugolib/site_render.go b/hugolib/site_render.go index 4ce2b4c53d1..7e4cfefcf31 100644 --- a/hugolib/site_render.go +++ b/hugolib/site_render.go @@ -49,8 +49,9 @@ func (s *Site) renderPages(cfg *BuildCfg) error { } for _, page := range s.Pages { - if cfg.shouldRender(page) { - pages <- page + pagep := page.(*Page) + if cfg.shouldRender(pagep) { + pages <- pagep } } @@ -70,14 +71,15 @@ func (s *Site) renderPages(cfg *BuildCfg) error { func headlessPagesPublisher(s *Site, wg *sync.WaitGroup) { defer wg.Done() for _, page := range s.headlessPages { - outFormat := page.outputFormats[0] // There is only one + pagep := page.(*Page) + outFormat := pagep.outputFormats[0] // There is only one if outFormat.Name != s.rc.Format.Name { // Avoid double work. continue } - pageOutput, err := newPageOutput(page, false, false, outFormat) + pageOutput, err := newPageOutput(pagep, false, false, outFormat) if err == nil { - page.mainPageOutput = pageOutput + page.(*Page).mainPageOutput = pageOutput err = pageOutput.renderResources() } @@ -164,7 +166,7 @@ func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.Wa continue } - s.Log.DEBUG.Printf("Render %s to %q with layouts %q", pageOutput.Kind, targetPath, layouts) + s.Log.DEBUG.Printf("Render %s to %q with layouts %q", pageOutput.Kind(), targetPath, layouts) if err := s.renderAndWritePage(&s.PathSpec.ProcessingStats.Pages, "page "+pageOutput.FullFilePath(), targetPath, pageOutput, layouts...); err != nil { results <- err @@ -219,8 +221,8 @@ func (s *Site) renderPaginator(p *PageOutput) error { pagerNode.paginator = pager if pager.TotalPages() > 0 { first, _ := pager.page(0) - pagerNode.Date = first.Date - pagerNode.Lastmod = first.Lastmod + pagerNode.DDate = first.Date() + pagerNode.DLastMod = first.Lastmod() } pageNumber := i + 1 @@ -337,16 +339,17 @@ func (s *Site) renderSitemap() error { // TODO(bep) this should be done somewhere else for _, page := range pages { - if page.Sitemap.ChangeFreq == "" { - page.Sitemap.ChangeFreq = sitemapDefault.ChangeFreq + pagep := page.(*Page) + if pagep.Sitemap.ChangeFreq == "" { + pagep.Sitemap.ChangeFreq = sitemapDefault.ChangeFreq } - if page.Sitemap.Priority == -1 { - page.Sitemap.Priority = sitemapDefault.Priority + if pagep.Sitemap.Priority == -1 { + pagep.Sitemap.Priority = sitemapDefault.Priority } - if page.Sitemap.Filename == "" { - page.Sitemap.Filename = sitemapDefault.Filename + if pagep.Sitemap.Filename == "" { + pagep.Sitemap.Filename = sitemapDefault.Filename } } @@ -392,32 +395,34 @@ func (s *Site) renderRobotsTXT() error { // renderAliases renders shell pages that simply have a redirect in the header. func (s *Site) renderAliases() error { for _, p := range s.Pages { - if len(p.Aliases) == 0 { + pp := p.(*Page) + + if len(pp.Aliases) == 0 { continue } - for _, f := range p.outputFormats { + for _, f := range pp.outputFormats { if !f.IsHTML { continue } - o := newOutputFormat(p, f) + o := newOutputFormat(pp, f) plink := o.Permalink() - for _, a := range p.Aliases { + for _, a := range pp.Aliases { if f.Path != "" { // Make sure AMP and similar doesn't clash with regular aliases. a = path.Join(a, f.Path) } - lang := p.Lang() + lang := pp.Lang() if s.owner.multihost && !strings.HasPrefix(a, "/"+lang) { // These need to be in its language root. a = path.Join(lang, a) } - if err := s.writeDestAlias(a, plink, f, p); err != nil { + if err := s.writeDestAlias(a, plink, f, pp); err != nil { return err } } diff --git a/hugolib/site_sections.go b/hugolib/site_sections.go index 38f6a3b6fce..15e7354a433 100644 --- a/hugolib/site_sections.go +++ b/hugolib/site_sections.go @@ -20,6 +20,7 @@ import ( "strings" "github.com/gohugoio/hugo/helpers" + "github.com/gohugoio/hugo/resources/resource" radix "github.com/hashicorp/go-immutable-radix" ) @@ -112,7 +113,7 @@ func (p *Page) IsDescendant(other interface{}) (bool, error) { return false, err } - if pp.Kind == KindPage && len(p.sections) == len(pp.sections) { + if pp.Kind() == KindPage && len(p.sections) == len(pp.sections) { // A regular page is never its section's descendant. return false, nil } @@ -131,7 +132,7 @@ func (p *Page) IsAncestor(other interface{}) (bool, error) { return false, err } - if p.Kind == KindPage && len(p.sections) == len(pp.sections) { + if p.Kind() == KindPage && len(p.sections) == len(pp.sections) { // A regular page is never its section's ancestor. return false, nil } @@ -180,11 +181,12 @@ func (s *Site) assembleSections() Pages { } // Maps section kind pages to their path, i.e. "my/section" - sectionPages := make(map[string]*Page) + sectionPages := make(map[string]resource.Page) // The sections with content files will already have been created. for _, sect := range s.findPagesByKind(KindSection) { - sectionPages[path.Join(sect.sections...)] = sect + sectp := sect.(*Page) + sectionPages[path.Join(sectp.sections...)] = sect } const ( @@ -202,33 +204,35 @@ func (s *Site) assembleSections() Pages { home := s.findFirstPageByKindIn(KindHome, s.Pages) for i, p := range s.Pages { - if p.Kind != KindPage { + if p.Kind() != KindPage { continue } - if len(p.sections) == 0 { + pp := p.(*Page) + + if len(pp.sections) == 0 { // Root level pages. These will have the home page as their Parent. - p.parent = home + pp.parent = home continue } - sectionKey := path.Join(p.sections...) + sectionKey := path.Join(pp.sections...) sect, found := sectionPages[sectionKey] - if !found && len(p.sections) == 1 { + if !found && len(pp.sections) == 1 { // We only create content-file-less sections for the root sections. - sect = s.newSectionPage(p.sections[0]) + sect = s.newSectionPage(pp.sections[0]) sectionPages[sectionKey] = sect newPages = append(newPages, sect) found = true } - if len(p.sections) > 1 { + if len(pp.sections) > 1 { // Create the root section if not found. - _, rootFound := sectionPages[p.sections[0]] + _, rootFound := sectionPages[pp.sections[0]] if !rootFound { - sect = s.newSectionPage(p.sections[0]) - sectionPages[p.sections[0]] = sect + sect = s.newSectionPage(pp.sections[0]) + sectionPages[pp.sections[0]] = sect newPages = append(newPages, sect) } } @@ -246,13 +250,14 @@ func (s *Site) assembleSections() Pages { // given a content file in /content/a/b/c/_index.md, we cannot create just // the c section. for _, sect := range sectionPages { - for i := len(sect.sections); i > 0; i-- { - sectionPath := sect.sections[:i] + sectp := sect.(*Page) + for i := len(sectp.sections); i > 0; i-- { + sectionPath := sectp.sections[:i] sectionKey := path.Join(sectionPath...) sect, found := sectionPages[sectionKey] if !found { sect = s.newSectionPage(sectionPath[len(sectionPath)-1]) - sect.sections = sectionPath + sectp.sections = sectionPath sectionPages[sectionKey] = sect newPages = append(newPages, sect) } @@ -271,8 +276,9 @@ func (s *Site) assembleSections() Pages { ) for i, p := range undecided { + pp := p.(*Page) // Now we can decide where to put this page into the tree. - sectionKey := path.Join(p.sections...) + sectionKey := path.Join(pp.sections...) _, v, _ := rootSections.LongestPrefix([]byte(sectionKey)) sect := v.(*Page) pagePath := path.Join(path.Join(sect.sections...), sectSectKey, "u", strconv.Itoa(i)) @@ -284,7 +290,7 @@ func (s *Site) assembleSections() Pages { rootPages.Walk(func(path []byte, v interface{}) bool { p := v.(*Page) - if p.Kind == KindSection { + if p.Kind() == KindSection { if currentSection != nil { // A new section currentSection.setPagePages(children) @@ -309,17 +315,18 @@ func (s *Site) assembleSections() Pages { // Build the sections hierarchy for _, sect := range sectionPages { - if len(sect.sections) == 1 { - sect.parent = home + sectp := sect.(*Page) + if len(sectp.sections) == 1 { + sectp.parent = home } else { - parentSearchKey := path.Join(sect.sections[:len(sect.sections)-1]...) + parentSearchKey := path.Join(sectp.sections[:len(sectp.sections)-1]...) _, v, _ := rootSections.LongestPrefix([]byte(parentSearchKey)) p := v.(*Page) - sect.parent = p + sectp.parent = p } - if sect.parent != nil { - sect.parent.subSections = append(sect.parent.subSections, sect) + if sectp.parent != nil { + sectp.parent.subSections = append(sectp.parent.subSections, sect) } } @@ -334,23 +341,25 @@ func (s *Site) assembleSections() Pages { mainSections, mainSectionsFound = s.Info.Params[sectionsParamIdLower] for _, sect := range sectionPages { - if sect.parent != nil { - sect.parent.subSections.sort() + sectp := sect.(*Page) + if sectp.parent != nil { + sectp.parent.subSections.sort() } - for i, p := range sect.Pages { + for i, p := range sectp.Pages { + pp := p.(*Page) if i > 0 { - p.NextInSection = sect.Pages[i-1] + pp.NextInSection = sectp.Pages[i-1] } - if i < len(sect.Pages)-1 { - p.PrevInSection = sect.Pages[i+1] + if i < len(sectp.Pages)-1 { + pp.PrevInSection = sectp.Pages[i+1] } } if !mainSectionsFound { - weight := len(sect.Pages) + (len(sect.Sections()) * 5) + weight := len(sectp.Pages) + (len(sectp.Sections()) * 5) if weight >= maxSectionWeight { - mainSections = []string{sect.Section()} + mainSections = []string{sectp.Section()} maxSectionWeight = weight } } diff --git a/hugolib/site_sections_test.go b/hugolib/site_sections_test.go index 1987d2bcb1e..acdcc00b193 100644 --- a/hugolib/site_sections_test.go +++ b/hugolib/site_sections_test.go @@ -126,13 +126,13 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} {"elsewhere", func(p *Page) { assert.Len(p.Pages, 1) for _, p := range p.Pages { - assert.Equal([]string{"elsewhere"}, p.sections) + assert.Equal([]string{"elsewhere"}, p.(*Page).sections) } }}, {"post", func(p *Page) { assert.Len(p.Pages, 2) for _, p := range p.Pages { - assert.Equal("post", p.Section()) + assert.Equal("post", p.(*Page).Section()) } }}, {"empty1", func(p *Page) { @@ -163,7 +163,7 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} b := p.s.getPage(KindSection, "empty3", "b") assert.NotNil(b) assert.Len(b.Pages, 1) - assert.Equal("empty3.md", b.Pages[0].File.LogicalName()) + assert.Equal("empty3.md", b.Pages[0].(*Page).File.LogicalName()) }}, {"empty3", func(p *Page) { @@ -174,8 +174,8 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} {"top", func(p *Page) { assert.Equal("Tops", p.title) assert.Len(p.Pages, 2) - assert.Equal("mypage2.md", p.Pages[0].LogicalName()) - assert.Equal("mypage3.md", p.Pages[1].LogicalName()) + assert.Equal("mypage2.md", p.Pages[0].(*Page).LogicalName()) + assert.Equal("mypage3.md", p.Pages[1].(*Page).LogicalName()) home := p.Parent() assert.True(home.IsHome()) assert.Len(p.Sections(), 0) @@ -194,15 +194,16 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} {"l1,l2", func(p *Page) { assert.Equal("T2_-1", p.title) assert.Len(p.Pages, 3) - assert.Equal(p, p.Pages[0].Parent()) + assert.Equal(p, p.Pages[0].(*Page).Parent()) assert.Equal("L1s", p.Parent().title) assert.Equal("/l1/l2/", p.URLPath.URL) assert.Equal("/l1/l2/", p.RelPermalink()) assert.Len(p.Sections(), 1) for _, child := range p.Pages { - assert.Equal(p, child.CurrentSection()) - active, err := child.InSection(p) + childp := child.(*Page) + assert.Equal(p, childp.CurrentSection()) + active, err := childp.InSection(p) assert.NoError(err) assert.True(active) active, err = p.InSection(child) @@ -215,14 +216,14 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} isAncestor, err := p.IsAncestor(child) assert.NoError(err) assert.True(isAncestor) - isAncestor, err = child.IsAncestor(p) + isAncestor, err = childp.IsAncestor(p) assert.NoError(err) assert.False(isAncestor) isDescendant, err := p.IsDescendant(child) assert.NoError(err) assert.False(isDescendant) - isDescendant, err = child.IsDescendant(p) + isDescendant, err = childp.IsDescendant(p) assert.NoError(err) assert.True(isDescendant) } @@ -233,7 +234,7 @@ PAG|{{ .Title }}|{{ $sect.InSection . }} {"l1,l2_2", func(p *Page) { assert.Equal("T22_-1", p.title) assert.Len(p.Pages, 2) - assert.Equal(filepath.FromSlash("l1/l2_2/page_2_2_1.md"), p.Pages[0].Path()) + assert.Equal(filepath.FromSlash("l1/l2_2/page_2_2_1.md"), p.Pages[0].(*Page).Path()) assert.Equal("L1s", p.Parent().title) assert.Len(p.Sections(), 0) }}, diff --git a/hugolib/site_test.go b/hugolib/site_test.go index 4f8d43122ff..9d8d80ea168 100644 --- a/hugolib/site_test.go +++ b/hugolib/site_test.go @@ -138,7 +138,7 @@ func TestFutureExpirationRender(t *testing.T) { } } - if s.AllPages[0].title == "doc2" { + if s.AllPages[0].Title() == "doc2" { t.Fatal("Expired content published unexpectedly") } } @@ -336,7 +336,7 @@ func doTestShouldAlwaysHaveUglyURLs(t *testing.T, uglyURLs bool) { } for _, p := range s.RegularPages { - assert.False(t, p.IsHome()) + assert.False(t, p.(*Page).IsHome()) } for _, test := range tests { @@ -611,40 +611,40 @@ func TestOrderedPages(t *testing.T) { s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{SkipRender: true}) - if s.getPage(KindSection, "sect").Pages[1].title != "Three" || s.getPage(KindSection, "sect").Pages[2].title != "Four" { + if s.getPage(KindSection, "sect").Pages[1].Title() != "Three" || s.getPage(KindSection, "sect").Pages[2].Title() != "Four" { t.Error("Pages in unexpected order.") } bydate := s.RegularPages.ByDate() - if bydate[0].title != "One" { - t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bydate[0].title) + if bydate[0].Title() != "One" { + t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bydate[0].Title()) } rev := bydate.Reverse() - if rev[0].title != "Three" { - t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rev[0].title) + if rev[0].Title() != "Three" { + t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rev[0].Title()) } bypubdate := s.RegularPages.ByPublishDate() - if bypubdate[0].title != "One" { - t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bypubdate[0].title) + if bypubdate[0].Title() != "One" { + t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bypubdate[0].Title()) } rbypubdate := bypubdate.Reverse() - if rbypubdate[0].title != "Three" { - t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rbypubdate[0].title) + if rbypubdate[0].Title() != "Three" { + t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rbypubdate[0].Title()) } bylength := s.RegularPages.ByLength() - if bylength[0].title != "One" { - t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bylength[0].title) + if bylength[0].Title() != "One" { + t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bylength[0].Title()) } rbylength := bylength.Reverse() - if rbylength[0].title != "Four" { - t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Four", rbylength[0].title) + if rbylength[0].Title() != "Four" { + t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Four", rbylength[0].Title()) } } @@ -683,8 +683,8 @@ func TestGroupedPages(t *testing.T) { if rbysection[2].Key != "sect1" { t.Errorf("PageGroup array in unexpected order. Third group key should be '%s', got '%s'", "sect1", rbysection[2].Key) } - if rbysection[0].Pages[0].title != "Four" { - t.Errorf("PageGroup has an unexpected page. First group's pages should have '%s', got '%s'", "Four", rbysection[0].Pages[0].title) + if rbysection[0].Pages[0].Title() != "Four" { + t.Errorf("PageGroup has an unexpected page. First group's pages should have '%s', got '%s'", "Four", rbysection[0].Pages[0].Title()) } if len(rbysection[2].Pages) != 2 { t.Errorf("PageGroup has unexpected number of pages. Third group should have '%d' pages, got '%d' pages", 2, len(rbysection[2].Pages)) @@ -703,8 +703,8 @@ func TestGroupedPages(t *testing.T) { if bytype[2].Key != "sect3" { t.Errorf("PageGroup array in unexpected order. Third group key should be '%s', got '%s'", "sect3", bytype[2].Key) } - if bytype[2].Pages[0].title != "Four" { - t.Errorf("PageGroup has an unexpected page. Third group's data should have '%s', got '%s'", "Four", bytype[0].Pages[0].title) + if bytype[2].Pages[0].Title() != "Four" { + t.Errorf("PageGroup has an unexpected page. Third group's data should have '%s', got '%s'", "Four", bytype[0].Pages[0].Title()) } if len(bytype[0].Pages) != 2 { t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(bytype[2].Pages)) @@ -731,8 +731,8 @@ func TestGroupedPages(t *testing.T) { if bypubdate[1].Key != "0001" { t.Errorf("PageGroup array in unexpected order. Second group key should be '%s', got '%s'", "0001", bypubdate[1].Key) } - if bypubdate[0].Pages[0].title != "Three" { - t.Errorf("PageGroup has an unexpected page. Third group's pages should have '%s', got '%s'", "Three", bypubdate[0].Pages[0].title) + if bypubdate[0].Pages[0].Title() != "Three" { + t.Errorf("PageGroup has an unexpected page. Third group's pages should have '%s', got '%s'", "Three", bypubdate[0].Pages[0].Title()) } if len(bypubdate[0].Pages) != 3 { t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 3, len(bypubdate[0].Pages)) @@ -751,8 +751,8 @@ func TestGroupedPages(t *testing.T) { if byparam[2].Key != "bar" { t.Errorf("PageGroup array in unexpected order. Third group key should be '%s', got '%s'", "bar", byparam[2].Key) } - if byparam[2].Pages[0].title != "Three" { - t.Errorf("PageGroup has an unexpected page. Third group's pages should have '%s', got '%s'", "Three", byparam[2].Pages[0].title) + if byparam[2].Pages[0].Title() != "Three" { + t.Errorf("PageGroup has an unexpected page. Third group's pages should have '%s', got '%s'", "Three", byparam[2].Pages[0].Title()) } if len(byparam[0].Pages) != 2 { t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(byparam[0].Pages)) @@ -784,8 +784,8 @@ func TestGroupedPages(t *testing.T) { if byParamDate[1].Key != "1979-05" { t.Errorf("PageGroup array in unexpected order. Second group key should be '%s', got '%s'", "1979-05", byParamDate[1].Key) } - if byParamDate[1].Pages[0].title != "One" { - t.Errorf("PageGroup has an unexpected page. Second group's pages should have '%s', got '%s'", "One", byParamDate[1].Pages[0].title) + if byParamDate[1].Pages[0].Title() != "One" { + t.Errorf("PageGroup has an unexpected page. Second group's pages should have '%s', got '%s'", "One", byParamDate[1].Pages[0].Title()) } if len(byParamDate[0].Pages) != 2 { t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(byParamDate[2].Pages)) @@ -841,16 +841,16 @@ func TestWeightedTaxonomies(t *testing.T) { writeSourcesToSource(t, "content", fs, sources...) s := buildSingleSite(t, deps.DepsCfg{Fs: fs, Cfg: cfg}, BuildCfg{}) - if s.Taxonomies["tags"]["a"][0].Page.title != "foo" { - t.Errorf("Pages in unexpected order, 'foo' expected first, got '%v'", s.Taxonomies["tags"]["a"][0].Page.title) + if s.Taxonomies["tags"]["a"][0].Page.Title() != "foo" { + t.Errorf("Pages in unexpected order, 'foo' expected first, got '%v'", s.Taxonomies["tags"]["a"][0].Page.Title()) } - if s.Taxonomies["categories"]["d"][0].Page.title != "bar" { - t.Errorf("Pages in unexpected order, 'bar' expected first, got '%v'", s.Taxonomies["categories"]["d"][0].Page.title) + if s.Taxonomies["categories"]["d"][0].Page.Title() != "bar" { + t.Errorf("Pages in unexpected order, 'bar' expected first, got '%v'", s.Taxonomies["categories"]["d"][0].Page.Title()) } - if s.Taxonomies["categories"]["e"][0].Page.title != "bza" { - t.Errorf("Pages in unexpected order, 'bza' expected first, got '%v'", s.Taxonomies["categories"]["e"][0].Page.title) + if s.Taxonomies["categories"]["e"][0].Page.Title() != "bza" { + t.Errorf("Pages in unexpected order, 'bza' expected first, got '%v'", s.Taxonomies["categories"]["e"][0].Page.Title()) } } diff --git a/hugolib/taxonomy.go b/hugolib/taxonomy.go index c8447d1bae0..eaafbdacdd9 100644 --- a/hugolib/taxonomy.go +++ b/hugolib/taxonomy.go @@ -16,6 +16,8 @@ package hugolib import ( "fmt" "sort" + + "github.com/gohugoio/hugo/resources/resource" ) // The TaxonomyList is a list of all taxonomies and their values @@ -39,11 +41,11 @@ type WeightedPages []WeightedPage // A WeightedPage is a Page with a weight. type WeightedPage struct { Weight int - *Page + resource.Page } func (w WeightedPage) String() string { - return fmt.Sprintf("WeightedPage(%d,%q)", w.Weight, w.Page.title) + return fmt.Sprintf("WeightedPage(%d,%q)", w.Weight, w.Page.Title()) } // OrderedTaxonomy is another representation of an Taxonomy using an array rather than a map. @@ -176,9 +178,9 @@ func (wp WeightedPages) Pages() Pages { // Prev returns the previous Page relative to the given Page in // this weighted page set. -func (wp WeightedPages) Prev(cur *Page) *Page { +func (wp WeightedPages) Prev(cur resource.Page) resource.Page { for x, c := range wp { - if c.Page.UniqueID() == cur.UniqueID() { + if c.Page == cur { if x == 0 { return wp[len(wp)-1].Page } @@ -190,9 +192,9 @@ func (wp WeightedPages) Prev(cur *Page) *Page { // Next returns the next Page relative to the given Page in // this weighted page set. -func (wp WeightedPages) Next(cur *Page) *Page { +func (wp WeightedPages) Next(cur resource.Page) resource.Page { for x, c := range wp { - if c.Page.UniqueID() == cur.UniqueID() { + if c.Page == cur { if x < len(wp)-1 { return wp[x+1].Page } @@ -213,10 +215,10 @@ func (wp WeightedPages) Count() int { return len(wp) } func (wp WeightedPages) Less(i, j int) bool { if wp[i].Weight == wp[j].Weight { - if wp[i].Page.Date.Equal(wp[j].Page.Date) { - return wp[i].Page.title < wp[j].Page.title + if wp[i].Page.Date().Equal(wp[j].Page.Date()) { + return wp[i].Page.Title() < wp[j].Page.Title() } - return wp[i].Page.Date.After(wp[i].Page.Date) + return wp[i].Page.Date().After(wp[i].Page.Date()) } return wp[i].Weight < wp[j].Weight } diff --git a/hugolib/taxonomy_test.go b/hugolib/taxonomy_test.go index 1ae9fae228f..6578698f952 100644 --- a/hugolib/taxonomy_test.go +++ b/hugolib/taxonomy_test.go @@ -186,7 +186,7 @@ permalinkeds: require.Len(t, term.Pages, count) for _, page := range term.Pages { - require.Equal(t, KindTaxonomy, page.Kind) + require.Equal(t, KindTaxonomy, page.Kind()) } } diff --git a/hugolib/testhelpers_test.go b/hugolib/testhelpers_test.go index 64d1ff96a23..e761a26dec2 100644 --- a/hugolib/testhelpers_test.go +++ b/hugolib/testhelpers_test.go @@ -700,7 +700,7 @@ func dumpPages(pages ...*Page) { for i, p := range pages { fmt.Printf("%d: Kind: %s Title: %-10s RelPermalink: %-10s Path: %-10s sections: %s Len Sections(): %d\n", i+1, - p.Kind, p.title, p.RelPermalink(), p.Path(), p.sections, len(p.Sections())) + p.Kind(), p.title, p.RelPermalink(), p.Path(), p.sections, len(p.Sections())) } } diff --git a/hugolib/translations.go b/hugolib/translations.go index 2682363f003..97c5376a05f 100644 --- a/hugolib/translations.go +++ b/hugolib/translations.go @@ -13,23 +13,28 @@ package hugolib +import ( + "github.com/gohugoio/hugo/resources/resource" +) + // Translations represent the other translations for a given page. The // string here is the language code, as affected by the `post.LANG.md` // filename. -type Translations map[string]*Page +type Translations map[string]resource.Page -func pagesToTranslationsMap(pages []*Page) map[string]Translations { +func pagesToTranslationsMap(pages Pages) map[string]Translations { out := make(map[string]Translations) for _, page := range pages { - base := page.TranslationKey() + pagep := page.(*Page) + base := pagep.TranslationKey() pageTranslation, present := out[base] if !present { pageTranslation = make(Translations) } - pageLang := page.Lang() + pageLang := pagep.Lang() if pageLang == "" { continue } @@ -41,19 +46,20 @@ func pagesToTranslationsMap(pages []*Page) map[string]Translations { return out } -func assignTranslationsToPages(allTranslations map[string]Translations, pages []*Page) { +func assignTranslationsToPages(allTranslations map[string]Translations, pages Pages) { for _, page := range pages { - page.translations = page.translations[:0] - base := page.TranslationKey() + pagep := page.(*Page) + pagep.translations = pagep.translations[:0] + base := pagep.TranslationKey() trans, exist := allTranslations[base] if !exist { continue } for _, translatedPage := range trans { - page.translations = append(page.translations, translatedPage) + pagep.translations = append(pagep.translations, translatedPage) } - pageBy(languagePageSort).Sort(page.translations) + pageBy(languagePageSort).Sort(pagep.translations) } } diff --git a/related/inverted_index.go b/related/inverted_index.go index 309eb4097d0..7dcf50e4b33 100644 --- a/related/inverted_index.go +++ b/related/inverted_index.go @@ -110,7 +110,7 @@ type Document interface { SearchKeywords(cfg IndexConfig) ([]Keyword, error) // When this document was or will be published. - PubDate() time.Time + PublishDate() time.Time } // InvertedIndex holds an inverted index, also sometimes named posting list, which @@ -211,7 +211,7 @@ func (r ranks) Len() int { return len(r) } func (r ranks) Swap(i, j int) { r[i], r[j] = r[j], r[i] } func (r ranks) Less(i, j int) bool { if r[i].Weight == r[j].Weight { - return r[i].Doc.PubDate().After(r[j].Doc.PubDate()) + return r[i].Doc.PublishDate().After(r[j].Doc.PublishDate()) } return r[i].Weight > r[j].Weight } @@ -250,7 +250,7 @@ func (idx *InvertedIndex) SearchDoc(doc Document, indices ...string) ([]Document } - return idx.searchDate(doc.PubDate(), q...) + return idx.searchDate(doc.PublishDate(), q...) } // ToKeywords returns a Keyword slice of the given input. @@ -344,7 +344,7 @@ func (idx *InvertedIndex) searchDate(upperDate time.Time, query ...queryElement) for _, doc := range docs { if applyDateFilter { // Exclude newer than the limit given - if doc.PubDate().After(upperDate) { + if doc.PublishDate().After(upperDate) { continue } } diff --git a/related/inverted_index_test.go b/related/inverted_index_test.go index 2e6b90bbf0b..eeba8111d09 100644 --- a/related/inverted_index_test.go +++ b/related/inverted_index_test.go @@ -72,7 +72,7 @@ func (d *testDoc) SearchKeywords(cfg IndexConfig) ([]Keyword, error) { return d.keywords[cfg.Name], nil } -func (d *testDoc) PubDate() time.Time { +func (d *testDoc) PublishDate() time.Time { return d.date } diff --git a/resources/resource/page.go b/resources/resource/page.go new file mode 100644 index 00000000000..fbe539fb730 --- /dev/null +++ b/resources/resource/page.go @@ -0,0 +1,48 @@ +// Copyright 2019 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package resource + +import ( + "time" + + "github.com/gohugoio/hugo/related" +) + +type Page interface { + Resource + Dates + + Kind() string + + Param(key interface{}) (interface{}, error) + + Weight() int + LinkTitle() string + + Resources() Resources + + // Make it indexable as a related.Document + SearchKeywords(cfg related.IndexConfig) ([]related.Keyword, error) +} + +type Dates interface { + Date() time.Time + Lastmod() time.Time + PublishDate() time.Time + ExpiryDate() time.Time +} + +type LengthProvider interface { + Len() int +} diff --git a/tpl/collections/collections.go b/tpl/collections/collections.go index bad65369fab..2353c206c86 100644 --- a/tpl/collections/collections.go +++ b/tpl/collections/collections.go @@ -329,13 +329,18 @@ func (ns *Namespace) Group(key interface{}, items interface{}) (interface{}, err return nil, errors.New("nil is not a valid key to group by") } + if g, ok := items.(collections.Grouper); ok { + return g.Group(key, items) + } + + // TODO(bep) page need below? in := newSliceElement(items) if g, ok := in.(collections.Grouper); ok { return g.Group(key, items) } - return nil, fmt.Errorf("grouping not supported for type %T", items) + return nil, fmt.Errorf("grouping not supported for type %T %T", items, in) } // IsSet returns whether a given array, channel, slice, or map has a key