node to page: Make Nodes into Pages

* `.Site.Pages` now contains all page types, including sections etc.
* `.Data.Pages` will also contain "node type" pages where relevant.

Updates #2297
This commit is contained in:
Bjørn Erik Pedersen 2016-11-11 09:01:47 +01:00
parent 88972b0d5b
commit 063b78d2ec
15 changed files with 135 additions and 156 deletions

View file

@ -66,9 +66,9 @@ func doTestShortcodeCrossrefs(t *testing.T, relative bool) {
require.NoError(t, err) require.NoError(t, err)
require.NoError(t, sites.Build(BuildCfg{})) require.NoError(t, sites.Build(BuildCfg{}))
require.Len(t, sites.Sites[0].Pages, 1) require.Len(t, sites.Sites[0].regularPages, 1)
output := string(sites.Sites[0].Pages[0].Content) output := string(sites.Sites[0].regularPages[0].Content)
if !strings.Contains(output, expected) { if !strings.Contains(output, expected) {
t.Errorf("Got\n%q\nExpected\n%q", output, expected) t.Errorf("Got\n%q\nExpected\n%q", output, expected)

View file

@ -196,9 +196,9 @@ func (h *HugoSites) renderCrossSitesArtifacts() error {
func (h *HugoSites) assignMissingTranslations() error { func (h *HugoSites) assignMissingTranslations() error {
// This looks heavy, but it should be a small number of nodes by now. // This looks heavy, but it should be a small number of nodes by now.
allNodes := h.findAllPagesByNodeTypeNotIn(NodePage) allPages := h.findAllPagesByNodeTypeNotIn(NodePage)
for _, nodeType := range []NodeType{NodeHome, NodeSection, NodeTaxonomy, NodeTaxonomyTerms} { for _, nodeType := range []NodeType{NodeHome, NodeSection, NodeTaxonomy, NodeTaxonomyTerms} {
nodes := h.findPagesByNodeTypeIn(nodeType, allNodes) nodes := h.findPagesByNodeTypeIn(nodeType, allPages)
// Assign translations // Assign translations
for _, t1 := range nodes { for _, t1 := range nodes {
@ -213,14 +213,12 @@ func (h *HugoSites) assignMissingTranslations() error {
} }
// createMissingNodes creates home page, taxonomies etc. that isnt't created as an // createMissingPages creates home page, taxonomies etc. that isnt't created as an
// effect of having a content file. // effect of having a content file.
func (h *HugoSites) createMissingNodes() error { func (h *HugoSites) createMissingPages() error {
// TODO(bep) np revisit this on languages -- as this is currently run after the page language distribution (due to taxonomies)
// TODO(bep) np re above, Pages vs.
// TODO(bep) np check node title etc. // TODO(bep) np check node title etc.
var newNodes Pages var newPages Pages
for _, s := range h.Sites { for _, s := range h.Sites {
@ -231,8 +229,8 @@ func (h *HugoSites) createMissingNodes() error {
} }
if len(home) == 0 { if len(home) == 0 {
n := s.newHomePage() n := s.newHomePage()
s.Nodes = append(s.Nodes, n) s.Pages = append(s.Pages, n)
newNodes = append(newNodes, n) newPages = append(newPages, n)
} }
// taxonomy list and terms pages // taxonomy list and terms pages
@ -259,15 +257,15 @@ func (h *HugoSites) createMissingNodes() error {
} }
if !foundTaxonomyPage { if !foundTaxonomyPage {
n := s.newTaxonomyPage(plural, key) n := s.newTaxonomyPage(plural, key)
s.Nodes = append(s.Nodes, n) s.Pages = append(s.Pages, n)
newNodes = append(newNodes, n) newPages = append(newPages, n)
} }
if !foundTaxonomyTermsPage { if !foundTaxonomyTermsPage {
foundTaxonomyTermsPage = true foundTaxonomyTermsPage = true
n := s.newTaxonomyTermsPage(plural) n := s.newTaxonomyTermsPage(plural)
s.Nodes = append(s.Nodes, n) s.Pages = append(s.Pages, n)
newNodes = append(newNodes, n) newPages = append(newPages, n)
} }
} }
} }
@ -292,18 +290,18 @@ func (h *HugoSites) createMissingNodes() error {
} }
if !foundSection { if !foundSection {
n := s.newSectionPage(name, section) n := s.newSectionPage(name, section)
s.Nodes = append(s.Nodes, n) s.Pages = append(s.Pages, n)
newNodes = append(newNodes, n) newPages = append(newPages, n)
} }
} }
} }
} }
if len(newNodes) > 0 { if len(newPages) > 0 {
first := h.Sites[0] first := h.Sites[0]
first.AllNodes = append(first.AllNodes, newNodes...) first.AllPages = append(first.AllPages, newPages...)
for i := 1; i < len(h.Sites); i++ { for i := 1; i < len(h.Sites); i++ {
h.Sites[i].AllNodes = first.AllNodes h.Sites[i].AllPages = first.AllPages
} }
} }
return nil return nil
@ -404,7 +402,7 @@ func (h *HugoSites) setupTranslations() {
if strings.HasPrefix(site.Language.Lang, p.Lang()) { if strings.HasPrefix(site.Language.Lang, p.Lang()) {
site.updateBuildStats(p) site.updateBuildStats(p)
if shouldBuild { if shouldBuild {
site.Nodes = append(site.Nodes, p) site.Pages = append(site.Pages, p)
p.Site = &site.Info p.Site = &site.Info
} }
} }
@ -414,7 +412,7 @@ func (h *HugoSites) setupTranslations() {
} }
if i == 0 { if i == 0 {
site.AllNodes = append(site.AllNodes, p) site.AllPages = append(site.AllPages, p)
} }
} }
@ -422,12 +420,12 @@ func (h *HugoSites) setupTranslations() {
// Pull over the collections from the master site // Pull over the collections from the master site
for i := 1; i < len(h.Sites); i++ { for i := 1; i < len(h.Sites); i++ {
h.Sites[i].AllNodes = h.Sites[0].AllNodes h.Sites[i].AllPages = h.Sites[0].AllPages
h.Sites[i].Data = h.Sites[0].Data h.Sites[i].Data = h.Sites[0].Data
} }
if len(h.Sites) > 1 { if len(h.Sites) > 1 {
pages := h.Sites[0].AllNodes pages := h.Sites[0].AllPages
allTranslations := pagesToTranslationsMap(h.multilingual, pages) allTranslations := pagesToTranslationsMap(h.multilingual, pages)
assignTranslationsToPages(allTranslations, pages) assignTranslationsToPages(allTranslations, pages)
} }
@ -524,7 +522,7 @@ func (s *Site) preparePagesForRender(cfg BuildCfg, changed whatChanged) {
}(pageChan, wg) }(pageChan, wg)
} }
for _, p := range s.Nodes { for _, p := range s.Pages {
pageChan <- p pageChan <- p
} }
@ -586,7 +584,7 @@ func (h *HugoSites) findPagesByNodeTypeIn(n NodeType, inPages Pages) Pages {
} }
func (h *HugoSites) findAllPagesByNodeTypeNotIn(n NodeType) Pages { func (h *HugoSites) findAllPagesByNodeTypeNotIn(n NodeType) Pages {
return h.findPagesByNodeTypeNotIn(n, h.Sites[0].AllNodes) return h.findPagesByNodeTypeNotIn(n, h.Sites[0].AllPages)
} }
// Convenience func used in tests to build a single site/language excluding render phase. // Convenience func used in tests to build a single site/language excluding render phase.

View file

@ -156,7 +156,7 @@ func (h *HugoSites) assemble(config *BuildCfg) error {
} }
} }
if err := h.createMissingNodes(); err != nil { if err := h.createMissingPages(); err != nil {
return err return err
} }

View file

@ -84,8 +84,8 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) {
require.Equal(t, "/blog/en/foo", enSite.Info.pathSpec.RelURL("foo", true)) require.Equal(t, "/blog/en/foo", enSite.Info.pathSpec.RelURL("foo", true))
doc1en := enSite.Pages[0] doc1en := enSite.regularPages[0]
doc1fr := frSite.Pages[0] doc1fr := frSite.regularPages[0]
enPerm, _ := doc1en.Permalink() enPerm, _ := doc1en.Permalink()
enRelPerm, _ := doc1en.RelPermalink() enRelPerm, _ := doc1en.RelPermalink()
@ -217,24 +217,24 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
assert.Equal(t, "en", enSite.Language.Lang) assert.Equal(t, "en", enSite.Language.Lang)
if len(enSite.Pages) != 4 { if len(enSite.regularPages) != 4 {
t.Fatal("Expected 4 english pages") t.Fatal("Expected 4 english pages")
} }
assert.Len(t, enSite.Source.Files(), 14, "should have 13 source files") assert.Len(t, enSite.Source.Files(), 14, "should have 13 source files")
assert.Len(t, enSite.AllPages, 9, "should have 8 total pages (including translations)") assert.Len(t, enSite.AllPages, 28, "should have 28 total pages (including translations and index types)")
doc1en := enSite.Pages[0] doc1en := enSite.regularPages[0]
permalink, err := doc1en.Permalink() permalink, err := doc1en.Permalink()
assert.NoError(t, err, "permalink call failed") assert.NoError(t, err, "permalink call failed")
assert.Equal(t, "http://example.com/blog/en/sect/doc1-slug/", permalink, "invalid doc1.en permalink") assert.Equal(t, "http://example.com/blog/en/sect/doc1-slug/", permalink, "invalid doc1.en permalink")
assert.Len(t, doc1en.Translations(), 1, "doc1-en should have one translation, excluding itself") assert.Len(t, doc1en.Translations(), 1, "doc1-en should have one translation, excluding itself")
doc2 := enSite.Pages[1] doc2 := enSite.regularPages[1]
permalink, err = doc2.Permalink() permalink, err = doc2.Permalink()
assert.NoError(t, err, "permalink call failed") assert.NoError(t, err, "permalink call failed")
assert.Equal(t, "http://example.com/blog/en/sect/doc2/", permalink, "invalid doc2 permalink") assert.Equal(t, "http://example.com/blog/en/sect/doc2/", permalink, "invalid doc2 permalink")
doc3 := enSite.Pages[2] doc3 := enSite.regularPages[2]
permalink, err = doc3.Permalink() permalink, err = doc3.Permalink()
assert.NoError(t, err, "permalink call failed") assert.NoError(t, err, "permalink call failed")
// Note that /superbob is a custom URL set in frontmatter. // Note that /superbob is a custom URL set in frontmatter.
@ -277,10 +277,10 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
frSite := sites.Sites[1] frSite := sites.Sites[1]
assert.Equal(t, "fr", frSite.Language.Lang) assert.Equal(t, "fr", frSite.Language.Lang)
assert.Len(t, frSite.Pages, 3, "should have 3 pages") assert.Len(t, frSite.regularPages, 3, "should have 3 pages")
assert.Len(t, frSite.AllPages, 9, "should have 8 total pages (including translations)") assert.Len(t, frSite.AllPages, 28, "should have 28 total pages (including translations and nodes)")
for _, frenchPage := range frSite.Pages { for _, frenchPage := range frSite.regularPages {
assert.Equal(t, "fr", frenchPage.Lang()) assert.Equal(t, "fr", frenchPage.Lang())
} }
@ -387,8 +387,8 @@ func TestMultiSitesRebuild(t *testing.T) {
enSite := sites.Sites[0] enSite := sites.Sites[0]
frSite := sites.Sites[1] frSite := sites.Sites[1]
require.Len(t, enSite.Pages, 4) require.Len(t, enSite.regularPages, 4)
require.Len(t, frSite.Pages, 3) require.Len(t, frSite.regularPages, 3)
// Verify translations // Verify translations
assertFileContent(t, "public/en/sect/doc1-slug/index.html", true, "Hello") assertFileContent(t, "public/en/sect/doc1-slug/index.html", true, "Hello")
@ -414,7 +414,7 @@ func TestMultiSitesRebuild(t *testing.T) {
nil, nil,
[]fsnotify.Event{{Name: "content/sect/doc2.en.md", Op: fsnotify.Remove}}, []fsnotify.Event{{Name: "content/sect/doc2.en.md", Op: fsnotify.Remove}},
func(t *testing.T) { func(t *testing.T) {
require.Len(t, enSite.Pages, 3, "1 en removed") require.Len(t, enSite.regularPages, 3, "1 en removed")
// Check build stats // Check build stats
require.Equal(t, 1, enSite.draftCount, "Draft") require.Equal(t, 1, enSite.draftCount, "Draft")
@ -437,12 +437,12 @@ func TestMultiSitesRebuild(t *testing.T) {
{Name: "content/new1.fr.md", Op: fsnotify.Create}, {Name: "content/new1.fr.md", Op: fsnotify.Create},
}, },
func(t *testing.T) { func(t *testing.T) {
require.Len(t, enSite.Pages, 5) require.Len(t, enSite.regularPages, 5)
require.Len(t, enSite.AllPages, 11) require.Len(t, enSite.AllPages, 30)
require.Len(t, frSite.Pages, 4) require.Len(t, frSite.regularPages, 4)
require.Equal(t, "new_fr_1", frSite.Pages[3].Title) require.Equal(t, "new_fr_1", frSite.regularPages[3].Title)
require.Equal(t, "new_en_2", enSite.Pages[0].Title) require.Equal(t, "new_en_2", enSite.regularPages[0].Title)
require.Equal(t, "new_en_1", enSite.Pages[1].Title) require.Equal(t, "new_en_1", enSite.regularPages[1].Title)
rendered := readDestination(t, "public/en/new1/index.html") rendered := readDestination(t, "public/en/new1/index.html")
require.True(t, strings.Contains(rendered, "new_en_1"), rendered) require.True(t, strings.Contains(rendered, "new_en_1"), rendered)
@ -457,7 +457,7 @@ func TestMultiSitesRebuild(t *testing.T) {
}, },
[]fsnotify.Event{{Name: "content/sect/doc1.en.md", Op: fsnotify.Write}}, []fsnotify.Event{{Name: "content/sect/doc1.en.md", Op: fsnotify.Write}},
func(t *testing.T) { func(t *testing.T) {
require.Len(t, enSite.Pages, 5) require.Len(t, enSite.regularPages, 5)
doc1 := readDestination(t, "public/en/sect/doc1-slug/index.html") doc1 := readDestination(t, "public/en/sect/doc1-slug/index.html")
require.True(t, strings.Contains(doc1, "CHANGED"), doc1) require.True(t, strings.Contains(doc1, "CHANGED"), doc1)
@ -475,8 +475,8 @@ func TestMultiSitesRebuild(t *testing.T) {
{Name: "content/new1.en.md", Op: fsnotify.Rename}, {Name: "content/new1.en.md", Op: fsnotify.Rename},
}, },
func(t *testing.T) { func(t *testing.T) {
require.Len(t, enSite.Pages, 5, "Rename") require.Len(t, enSite.regularPages, 5, "Rename")
require.Equal(t, "new_en_1", enSite.Pages[1].Title) require.Equal(t, "new_en_1", enSite.regularPages[1].Title)
rendered := readDestination(t, "public/en/new1renamed/index.html") rendered := readDestination(t, "public/en/new1renamed/index.html")
require.True(t, strings.Contains(rendered, "new_en_1"), rendered) require.True(t, strings.Contains(rendered, "new_en_1"), rendered)
}}, }},
@ -490,9 +490,9 @@ func TestMultiSitesRebuild(t *testing.T) {
}, },
[]fsnotify.Event{{Name: "layouts/_default/single.html", Op: fsnotify.Write}}, []fsnotify.Event{{Name: "layouts/_default/single.html", Op: fsnotify.Write}},
func(t *testing.T) { func(t *testing.T) {
require.Len(t, enSite.Pages, 5) require.Len(t, enSite.regularPages, 5)
require.Len(t, enSite.AllPages, 11) require.Len(t, enSite.AllPages, 30)
require.Len(t, frSite.Pages, 4) require.Len(t, frSite.regularPages, 4)
doc1 := readDestination(t, "public/en/sect/doc1-slug/index.html") doc1 := readDestination(t, "public/en/sect/doc1-slug/index.html")
require.True(t, strings.Contains(doc1, "Template Changed"), doc1) require.True(t, strings.Contains(doc1, "Template Changed"), doc1)
}, },
@ -507,9 +507,9 @@ func TestMultiSitesRebuild(t *testing.T) {
}, },
[]fsnotify.Event{{Name: "i18n/fr.yaml", Op: fsnotify.Write}}, []fsnotify.Event{{Name: "i18n/fr.yaml", Op: fsnotify.Write}},
func(t *testing.T) { func(t *testing.T) {
require.Len(t, enSite.Pages, 5) require.Len(t, enSite.regularPages, 5)
require.Len(t, enSite.AllPages, 11) require.Len(t, enSite.AllPages, 30)
require.Len(t, frSite.Pages, 4) require.Len(t, frSite.regularPages, 4)
docEn := readDestination(t, "public/en/sect/doc1-slug/index.html") docEn := readDestination(t, "public/en/sect/doc1-slug/index.html")
require.True(t, strings.Contains(docEn, "Hello"), "No Hello") require.True(t, strings.Contains(docEn, "Hello"), "No Hello")
docFr := readDestination(t, "public/fr/sect/doc1/index.html") docFr := readDestination(t, "public/fr/sect/doc1/index.html")
@ -531,9 +531,9 @@ func TestMultiSitesRebuild(t *testing.T) {
{Name: "layouts/shortcodes/shortcode.html", Op: fsnotify.Write}, {Name: "layouts/shortcodes/shortcode.html", Op: fsnotify.Write},
}, },
func(t *testing.T) { func(t *testing.T) {
require.Len(t, enSite.Pages, 5) require.Len(t, enSite.regularPages, 5)
require.Len(t, enSite.AllPages, 11) require.Len(t, enSite.AllPages, 30)
require.Len(t, frSite.Pages, 4) require.Len(t, frSite.regularPages, 4)
assertFileContent(t, "public/fr/sect/doc1/index.html", true, "Single", "Modified Shortcode: Salut") assertFileContent(t, "public/fr/sect/doc1/index.html", true, "Single", "Modified Shortcode: Salut")
assertFileContent(t, "public/en/sect/doc1-slug/index.html", true, "Single", "Modified Shortcode: Hello") assertFileContent(t, "public/en/sect/doc1-slug/index.html", true, "Single", "Modified Shortcode: Hello")
}, },
@ -627,12 +627,12 @@ title = "Svenska"
require.Len(t, homeEn.Translations(), 4) require.Len(t, homeEn.Translations(), 4)
require.Equal(t, "sv", homeEn.Translations()[0].Lang()) require.Equal(t, "sv", homeEn.Translations()[0].Lang())
require.Len(t, enSite.Pages, 4) require.Len(t, enSite.regularPages, 4)
require.Len(t, frSite.Pages, 3) require.Len(t, frSite.regularPages, 3)
// Veriy Swedish site // Veriy Swedish site
require.Len(t, svSite.Pages, 1) require.Len(t, svSite.regularPages, 1)
svPage := svSite.Pages[0] svPage := svSite.regularPages[0]
require.Equal(t, "Swedish Contentfile", svPage.Title) require.Equal(t, "Swedish Contentfile", svPage.Title)
require.Equal(t, "sv", svPage.Lang()) require.Equal(t, "sv", svPage.Lang())
require.Len(t, svPage.Translations(), 2) require.Len(t, svPage.Translations(), 2)

View file

@ -208,7 +208,7 @@ func doTestPageMenuWithIdentifier(t *testing.T, menuPageSources []source.ByteSou
s := setupMenuTests(t, menuPageSources) s := setupMenuTests(t, menuPageSources)
assert.Equal(t, 3, len(s.Pages), "Not enough pages") assert.Equal(t, 3, len(s.regularPages), "Not enough pages")
me1 := findTestMenuEntryByID(s, "m1", "i1") me1 := findTestMenuEntryByID(s, "m1", "i1")
me2 := findTestMenuEntryByID(s, "m1", "i2") me2 := findTestMenuEntryByID(s, "m1", "i2")
@ -246,7 +246,7 @@ func doTestPageMenuWithDuplicateName(t *testing.T, menuPageSources []source.Byte
s := setupMenuTests(t, menuPageSources) s := setupMenuTests(t, menuPageSources)
assert.Equal(t, 3, len(s.Pages), "Not enough pages") assert.Equal(t, 3, len(s.regularPages), "Not enough pages")
me1 := findTestMenuEntryByName(s, "m1", "n1") me1 := findTestMenuEntryByName(s, "m1", "n1")
me2 := findTestMenuEntryByName(s, "m1", "n2") me2 := findTestMenuEntryByName(s, "m1", "n2")
@ -264,13 +264,13 @@ func TestPageMenu(t *testing.T) {
s := setupMenuTests(t, menuPageSources) s := setupMenuTests(t, menuPageSources)
if len(s.Pages) != 3 { if len(s.regularPages) != 3 {
t.Fatalf("Posts not created, expected 3 got %d", len(s.Pages)) t.Fatalf("Posts not created, expected 3 got %d", len(s.regularPages))
} }
first := s.Pages[0] first := s.regularPages[0]
second := s.Pages[1] second := s.regularPages[1]
third := s.Pages[2] third := s.regularPages[2]
pOne := findTestMenuEntryByName(s, "p_one", "One") pOne := findTestMenuEntryByName(s, "p_one", "One")
pTwo := findTestMenuEntryByID(s, "p_two", "Two") pTwo := findTestMenuEntryByID(s, "p_two", "Two")
@ -358,9 +358,9 @@ Yaml Front Matter with Menu Pages`)
{Name: filepath.FromSlash("sect/yaml1.md"), Content: ps1}, {Name: filepath.FromSlash("sect/yaml1.md"), Content: ps1},
{Name: filepath.FromSlash("sect/yaml2.md"), Content: ps2}}) {Name: filepath.FromSlash("sect/yaml2.md"), Content: ps2}})
p1 := s.Pages[0] p1 := s.regularPages[0]
assert.Len(t, p1.Menus(), 2, "List YAML") assert.Len(t, p1.Menus(), 2, "List YAML")
p2 := s.Pages[1] p2 := s.regularPages[1]
assert.Len(t, p2.Menus(), 2, "Map YAML") assert.Len(t, p2.Menus(), 2, "Map YAML")
} }

View file

@ -103,21 +103,6 @@ type Node struct {
lang string lang string
} }
// The Nodes type is temporary until we get https://github.com/spf13/hugo/issues/2297 fixed.
type Nodes []*Node
func (n Nodes) Len() int {
return len(n)
}
func (n Nodes) Less(i, j int) bool {
return n[i].language.Weight < n[j].language.Weight
}
func (n Nodes) Swap(i, j int) {
n[i], n[j] = n[j], n[i]
}
func (n *Node) Now() time.Time { func (n *Node) Now() time.Time {
return time.Now() return time.Now()
} }

View file

@ -79,7 +79,7 @@ Content Page %02d
assertFileContent(t, filepath.Join("public", "index.html"), false, assertFileContent(t, filepath.Join("public", "index.html"), false,
"Index Title: Home Sweet Home!", "Index Title: Home Sweet Home!",
"Home <strong>Content!</strong>", "Home <strong>Content!</strong>",
"# Pages: 4") "# Pages: 9")
assertFileContent(t, filepath.Join("public", "sect1", "regular1", "index.html"), false, "Single Title: Page 01", "Content Page 01") assertFileContent(t, filepath.Join("public", "sect1", "regular1", "index.html"), false, "Single Title: Page 01", "Content Page 01")
@ -170,7 +170,7 @@ func TestNodesWithNoContentFile(t *testing.T) {
require.Len(t, homePages, 1) require.Len(t, homePages, 1)
homePage := homePages[0] homePage := homePages[0]
require.Len(t, homePage.Data["Pages"], 4) require.Len(t, homePage.Data["Pages"], 9)
assertFileContent(t, filepath.Join("public", "index.html"), false, assertFileContent(t, filepath.Join("public", "index.html"), false,
"Index Title: Hugo Rocks!") "Index Title: Hugo Rocks!")

View file

@ -1277,7 +1277,7 @@ func (p *Page) prepareData(s *Site) error {
switch p.NodeType { switch p.NodeType {
case NodePage: case NodePage:
case NodeHome: case NodeHome:
p.Data["Pages"] = s.Pages p.Data["Pages"] = s.findPagesByNodeTypeNotIn(NodeHome, s.Pages)
case NodeSection: case NodeSection:
sectionData, ok := s.Sections[p.sections[0]] sectionData, ok := s.Sections[p.sections[0]]
if !ok { if !ok {

View file

@ -17,38 +17,34 @@ import (
"fmt" "fmt"
) )
// TODO(bep) np pages names // PageCollections contains the page collections for a site.
// TODO(bep) np this is a somewhat breaking change and should be doc. + release notes: See AllPages vs. "this language only". Looks like it is like this alread, check.
type PageCollections struct { type PageCollections struct {
// Includes only pages of NodePage type, and only pages in the current language. // Includes only pages of all types, and only pages in the current language.
Pages Pages Pages Pages
// Includes all pages in all languages, including the current one. // Includes all pages in all languages, including the current one.
// Only pages of NodePage type. // Inlcudes pages of all types.
AllPages Pages AllPages Pages
// Includes pages of all types, but only pages in the current language. // A convenience cache for the traditional index types, taxonomies, home page etc.
Nodes Pages
// Includes all pages in all languages, including the current one.
// Includes pages of all types.
AllNodes Pages
// A convenience cache for the traditional node types, taxonomies, home page etc.
// This is for the current language only. // This is for the current language only.
indexNodes Pages indexPages Pages
// A convenience cache for the regular pages.
// This is for the current language only.
// TODO(bep) np consider exporting this
regularPages Pages
// Includes absolute all pages (of all types), including drafts etc. // Includes absolute all pages (of all types), including drafts etc.
rawAllPages Pages rawAllPages Pages
} }
func (c *PageCollections) refreshPageCaches() { func (c *PageCollections) refreshPageCaches() {
// All pages are stored in AllNodes and Nodes. Filter from those. c.indexPages = c.findPagesByNodeTypeNotIn(NodePage, c.Pages)
c.Pages = c.findPagesByNodeTypeIn(NodePage, c.Nodes) c.regularPages = c.findPagesByNodeTypeIn(NodePage, c.Pages)
c.indexNodes = c.findPagesByNodeTypeNotIn(NodePage, c.Nodes)
c.AllPages = c.findPagesByNodeTypeIn(NodePage, c.AllNodes)
for _, n := range c.Nodes { // TODO(bep) np remove eventually
for _, n := range c.Pages {
if n.NodeType == NodeUnknown { if n.NodeType == NodeUnknown {
panic(fmt.Sprintf("Got unknown type %s", n.Title)) panic(fmt.Sprintf("Got unknown type %s", n.Title))
} }
@ -66,11 +62,11 @@ func newPageCollectionsFromPages(pages Pages) *PageCollections {
// TODO(bep) np clean and remove finders // TODO(bep) np clean and remove finders
func (c *PageCollections) findPagesByNodeType(n NodeType) Pages { func (c *PageCollections) findPagesByNodeType(n NodeType) Pages {
return c.findPagesByNodeTypeIn(n, c.Nodes) return c.findPagesByNodeTypeIn(n, c.Pages)
} }
func (c *PageCollections) getPage(n NodeType, path ...string) *Page { func (c *PageCollections) getPage(n NodeType, path ...string) *Page {
pages := c.findPagesByNodeTypeIn(n, c.Nodes) pages := c.findPagesByNodeTypeIn(n, c.Pages)
if len(pages) == 0 { if len(pages) == 0 {
return nil return nil
@ -99,7 +95,7 @@ func (c *PageCollections) getPage(n NodeType, path ...string) *Page {
} }
func (c *PageCollections) findIndexNodesByNodeType(n NodeType) Pages { func (c *PageCollections) findIndexNodesByNodeType(n NodeType) Pages {
return c.findPagesByNodeTypeIn(n, c.indexNodes) return c.findPagesByNodeTypeIn(n, c.indexPages)
} }
func (*PageCollections) findPagesByNodeTypeIn(n NodeType, inPages Pages) Pages { func (*PageCollections) findPagesByNodeTypeIn(n NodeType, inPages Pages) Pages {
@ -123,7 +119,7 @@ func (*PageCollections) findPagesByNodeTypeNotIn(n NodeType, inPages Pages) Page
} }
func (c *PageCollections) findAllPagesByNodeType(n NodeType) Pages { func (c *PageCollections) findAllPagesByNodeType(n NodeType) Pages {
return c.findPagesByNodeTypeIn(n, c.Nodes) return c.findPagesByNodeTypeIn(n, c.Pages)
} }
func (c *PageCollections) findRawAllPagesByNodeType(n NodeType) Pages { func (c *PageCollections) findRawAllPagesByNodeType(n NodeType) Pages {

View file

@ -630,9 +630,9 @@ func testAllMarkdownEnginesForPages(t *testing.T,
t.Fatalf("Failed to build site: %s", err) t.Fatalf("Failed to build site: %s", err)
} }
require.Len(t, s.Pages, len(pageSources)) require.Len(t, s.regularPages, len(pageSources))
assertFunc(t, e.ext, s.Pages) assertFunc(t, e.ext, s.regularPages)
} }
@ -740,9 +740,9 @@ func TestPageWithDelimiterForMarkdownThatCrossesBorder(t *testing.T) {
t.Fatalf("Failed to build site: %s", err) t.Fatalf("Failed to build site: %s", err)
} }
require.Len(t, s.Pages, 1) require.Len(t, s.regularPages, 1)
p := s.Pages[0] p := s.regularPages[0]
if p.Summary != template.HTML("<p>The <a href=\"http://gohugo.io/\">best static site generator</a>.<sup class=\"footnote-ref\" id=\"fnref:1\"><a rel=\"footnote\" href=\"#fn:1\">1</a></sup>\n</p>") { if p.Summary != template.HTML("<p>The <a href=\"http://gohugo.io/\">best static site generator</a>.<sup class=\"footnote-ref\" id=\"fnref:1\"><a rel=\"footnote\" href=\"#fn:1\">1</a></sup>\n</p>") {
t.Fatalf("Got summary:\n%q", p.Summary) t.Fatalf("Got summary:\n%q", p.Summary)
@ -788,9 +788,9 @@ func TestPageWithAdditionalExtension(t *testing.T) {
t.Fatalf("Failed to build site: %s", err) t.Fatalf("Failed to build site: %s", err)
} }
require.Len(t, s.Pages, 1) require.Len(t, s.regularPages, 1)
p := s.Pages[0] p := s.regularPages[0]
checkPageContent(t, p, "<p>first line.<br />\nsecond line.</p>\n\n<p>fourth line.</p>\n") checkPageContent(t, p, "<p>first line.<br />\nsecond line.</p>\n\n<p>fourth line.</p>\n")
} }
@ -802,9 +802,9 @@ func TestTableOfContents(t *testing.T) {
t.Fatalf("Failed to build site: %s", err) t.Fatalf("Failed to build site: %s", err)
} }
require.Len(t, s.Pages, 1) require.Len(t, s.regularPages, 1)
p := s.Pages[0] p := s.regularPages[0]
checkPageContent(t, p, "\n\n<p>For some moments the old man did not reply. He stood with bowed head, buried in deep thought. But at last he spoke.</p>\n\n<h2 id=\"aa\">AA</h2>\n\n<p>I have no idea, of course, how long it took me to reach the limit of the plain,\nbut at last I entered the foothills, following a pretty little canyon upward\ntoward the mountains. Beside me frolicked a laughing brooklet, hurrying upon\nits noisy way down to the silent sea. In its quieter pools I discovered many\nsmall fish, of four-or five-pound weight I should imagine. In appearance,\nexcept as to size and color, they were not unlike the whale of our own seas. As\nI watched them playing about I discovered, not only that they suckled their\nyoung, but that at intervals they rose to the surface to breathe as well as to\nfeed upon certain grasses and a strange, scarlet lichen which grew upon the\nrocks just above the water line.</p>\n\n<h3 id=\"aaa\">AAA</h3>\n\n<p>I remember I felt an extraordinary persuasion that I was being played with,\nthat presently, when I was upon the very verge of safety, this mysterious\ndeath&ndash;as swift as the passage of light&ndash;would leap after me from the pit about\nthe cylinder and strike me down. ## BB</p>\n\n<h3 id=\"bbb\">BBB</h3>\n\n<p>&ldquo;You&rsquo;re a great Granser,&rdquo; he cried delightedly, &ldquo;always making believe them little marks mean something.&rdquo;</p>\n") checkPageContent(t, p, "\n\n<p>For some moments the old man did not reply. He stood with bowed head, buried in deep thought. But at last he spoke.</p>\n\n<h2 id=\"aa\">AA</h2>\n\n<p>I have no idea, of course, how long it took me to reach the limit of the plain,\nbut at last I entered the foothills, following a pretty little canyon upward\ntoward the mountains. Beside me frolicked a laughing brooklet, hurrying upon\nits noisy way down to the silent sea. In its quieter pools I discovered many\nsmall fish, of four-or five-pound weight I should imagine. In appearance,\nexcept as to size and color, they were not unlike the whale of our own seas. As\nI watched them playing about I discovered, not only that they suckled their\nyoung, but that at intervals they rose to the surface to breathe as well as to\nfeed upon certain grasses and a strange, scarlet lichen which grew upon the\nrocks just above the water line.</p>\n\n<h3 id=\"aaa\">AAA</h3>\n\n<p>I remember I felt an extraordinary persuasion that I was being played with,\nthat presently, when I was upon the very verge of safety, this mysterious\ndeath&ndash;as swift as the passage of light&ndash;would leap after me from the pit about\nthe cylinder and strike me down. ## BB</p>\n\n<h3 id=\"bbb\">BBB</h3>\n\n<p>&ldquo;You&rsquo;re a great Granser,&rdquo; he cried delightedly, &ldquo;always making believe them little marks mean something.&rdquo;</p>\n")
checkPageTOC(t, p, "<nav id=\"TableOfContents\">\n<ul>\n<li>\n<ul>\n<li><a href=\"#aa\">AA</a>\n<ul>\n<li><a href=\"#aaa\">AAA</a></li>\n<li><a href=\"#bbb\">BBB</a></li>\n</ul></li>\n</ul></li>\n</ul>\n</nav>") checkPageTOC(t, p, "<nav id=\"TableOfContents\">\n<ul>\n<li>\n<ul>\n<li><a href=\"#aa\">AA</a>\n<ul>\n<li><a href=\"#aaa\">AAA</a></li>\n<li><a href=\"#bbb\">BBB</a></li>\n</ul></li>\n</ul></li>\n</ul>\n</nav>")
@ -832,9 +832,9 @@ func TestPageWithDate(t *testing.T) {
t.Fatalf("Failed to build site: %s", err) t.Fatalf("Failed to build site: %s", err)
} }
require.Len(t, s.Pages, 1) require.Len(t, s.regularPages, 1)
p := s.Pages[0] p := s.regularPages[0]
d, _ := time.Parse(time.RFC3339, "2013-05-17T16:59:30Z") d, _ := time.Parse(time.RFC3339, "2013-05-17T16:59:30Z")
checkPageDate(t, p, d) checkPageDate(t, p, d)
@ -1266,9 +1266,9 @@ func TestChompBOM(t *testing.T) {
t.Fatalf("Failed to build site: %s", err) t.Fatalf("Failed to build site: %s", err)
} }
require.Len(t, s.Pages, 1) require.Len(t, s.regularPages, 1)
p := s.Pages[0] p := s.regularPages[0]
checkPageTitle(t, p, "Simple") checkPageTitle(t, p, "Simple")
} }

View file

@ -69,9 +69,9 @@ title: "Title"
t.Fatalf("No error from shortcode") t.Fatalf("No error from shortcode")
} }
require.Len(t, h.Sites[0].Pages, 1) require.Len(t, h.Sites[0].regularPages, 1)
output := strings.TrimSpace(string(h.Sites[0].Pages[0].Content)) output := strings.TrimSpace(string(h.Sites[0].regularPages[0].Content))
if strings.HasPrefix(output, "<p>") { if strings.HasPrefix(output, "<p>") {
output = output[3:] output = output[3:]
} }

View file

@ -1327,14 +1327,14 @@ func (s *Site) buildSiteMeta() (err error) {
s.assembleMenus() s.assembleMenus()
if len(s.Nodes) == 0 { if len(s.Pages) == 0 {
return return
} }
s.assembleTaxonomies() s.assembleTaxonomies()
// TODO(bep) np // TODO(bep) np
for _, p := range s.AllNodes { for _, p := range s.AllPages {
// setNodeTypeVars needs taxonomies // setNodeTypeVars needs taxonomies
p.setNodeTypeVars(s) p.setNodeTypeVars(s)
} }
@ -1343,7 +1343,7 @@ func (s *Site) buildSiteMeta() (err error) {
s.assembleSections() s.assembleSections()
// TODO(bep) np Site.LastMod // TODO(bep) np Site.LastMod
pages := s.Nodes pages := s.Pages
s.Info.LastChange = pages[0].Lastmod s.Info.LastChange = pages[0].Lastmod
return return
@ -1418,7 +1418,7 @@ func (s *Site) assembleMenus() {
sectionPagesMenu := s.Language.GetString("SectionPagesMenu") sectionPagesMenu := s.Language.GetString("SectionPagesMenu")
sectionPagesMenus := make(map[string]interface{}) sectionPagesMenus := make(map[string]interface{})
//creating flat hash //creating flat hash
pages := s.Nodes pages := s.Pages
for _, p := range pages { for _, p := range pages {
if sectionPagesMenu != "" { if sectionPagesMenu != "" {
@ -1486,7 +1486,7 @@ func (s *Site) assembleTaxonomies() {
s.Taxonomies[plural] = make(Taxonomy) s.Taxonomies[plural] = make(Taxonomy)
s.taxonomiesPluralSingular[plural] = singular s.taxonomiesPluralSingular[plural] = singular
for _, p := range s.Nodes { for _, p := range s.Pages {
vals := p.getParam(plural, !s.Info.preserveTaxonomyNames) vals := p.getParam(plural, !s.Info.preserveTaxonomyNames)
weight := p.GetParam(plural + "_weight") weight := p.GetParam(plural + "_weight")
if weight == nil { if weight == nil {
@ -1574,7 +1574,7 @@ func (s *Site) nodeTypeFromSections(sections []string) NodeType {
func (s *Site) preparePages() error { func (s *Site) preparePages() error {
var errors []error var errors []error
for _, p := range s.Nodes { for _, p := range s.Pages {
if err := p.prepareLayouts(); err != nil { if err := p.prepareLayouts(); err != nil {
errors = append(errors, err) errors = append(errors, err)
} }

View file

@ -30,7 +30,7 @@ func TestEncodePage(t *testing.T) {
_, err := json.Marshal(s) _, err := json.Marshal(s)
check(t, err) check(t, err)
_, err = json.Marshal(s.Pages[0]) _, err = json.Marshal(s.regularPages[0])
check(t, err) check(t, err)
} }

View file

@ -45,7 +45,7 @@ func (s *Site) renderPages() error {
go pageRenderer(s, pages, results, wg) go pageRenderer(s, pages, results, wg)
} }
for _, page := range s.Nodes { for _, page := range s.Pages {
pages <- page pages <- page
} }
@ -183,7 +183,7 @@ func (s *Site) renderSitemap() error {
n := s.newNodePage(NodeSitemap) n := s.newNodePage(NodeSitemap)
// Include all pages (regular, home page, taxonomies etc.) // Include all pages (regular, home page, taxonomies etc.)
pages := s.Nodes pages := s.Pages
page := s.newNodePage(NodeSitemap) page := s.newNodePage(NodeSitemap)
page.URLPath.URL = "" page.URLPath.URL = ""
@ -239,7 +239,7 @@ func (s *Site) renderRobotsTXT() error {
// renderAliases renders shell pages that simply have a redirect in the header. // renderAliases renders shell pages that simply have a redirect in the header.
func (s *Site) renderAliases() error { func (s *Site) renderAliases() error {
for _, p := range s.Nodes { for _, p := range s.Pages {
if len(p.Aliases) == 0 { if len(p.Aliases) == 0 {
continue continue
} }

View file

@ -91,9 +91,9 @@ func TestDegenerateRenderThingMissingTemplate(t *testing.T) {
t.Fatalf("Failed to build site: %s", err) t.Fatalf("Failed to build site: %s", err)
} }
require.Len(t, s.Pages, 1) require.Len(t, s.regularPages, 1)
p := s.Pages[0] p := s.regularPages[0]
err := s.renderThing(p, "foobar", nil) err := s.renderThing(p, "foobar", nil)
if err == nil { if err == nil {
@ -142,14 +142,14 @@ func TestDraftAndFutureRender(t *testing.T) {
// Testing Defaults.. Only draft:true and publishDate in the past should be rendered // Testing Defaults.. Only draft:true and publishDate in the past should be rendered
s := siteSetup(t) s := siteSetup(t)
if len(s.AllPages) != 1 { if len(s.regularPages) != 1 {
t.Fatal("Draft or Future dated content published unexpectedly") t.Fatal("Draft or Future dated content published unexpectedly")
} }
// only publishDate in the past should be rendered // only publishDate in the past should be rendered
viper.Set("buildDrafts", true) viper.Set("buildDrafts", true)
s = siteSetup(t) s = siteSetup(t)
if len(s.AllPages) != 2 { if len(s.regularPages) != 2 {
t.Fatal("Future Dated Posts published unexpectedly") t.Fatal("Future Dated Posts published unexpectedly")
} }
@ -157,7 +157,7 @@ func TestDraftAndFutureRender(t *testing.T) {
viper.Set("buildDrafts", false) viper.Set("buildDrafts", false)
viper.Set("buildFuture", true) viper.Set("buildFuture", true)
s = siteSetup(t) s = siteSetup(t)
if len(s.AllPages) != 2 { if len(s.regularPages) != 2 {
t.Fatal("Draft posts published unexpectedly") t.Fatal("Draft posts published unexpectedly")
} }
@ -165,7 +165,7 @@ func TestDraftAndFutureRender(t *testing.T) {
viper.Set("buildDrafts", true) viper.Set("buildDrafts", true)
viper.Set("buildFuture", true) viper.Set("buildFuture", true)
s = siteSetup(t) s = siteSetup(t)
if len(s.AllPages) != 4 { if len(s.regularPages) != 4 {
t.Fatal("Drafts or Future posts not included as expected") t.Fatal("Drafts or Future posts not included as expected")
} }
@ -201,11 +201,11 @@ func TestFutureExpirationRender(t *testing.T) {
s := siteSetup(t) s := siteSetup(t)
if len(s.AllPages) != 1 { if len(s.AllPages) != 1 {
if len(s.AllPages) > 1 { if len(s.regularPages) > 1 {
t.Fatal("Expired content published unexpectedly") t.Fatal("Expired content published unexpectedly")
} }
if len(s.AllPages) < 1 { if len(s.regularPages) < 1 {
t.Fatal("Valid content expired unexpectedly") t.Fatal("Valid content expired unexpectedly")
} }
} }
@ -285,7 +285,7 @@ THE END.`, refShortcode)),
t.Fatalf("Failed to build site: %s", err) t.Fatalf("Failed to build site: %s", err)
} }
if len(s.AllPages) != 3 { if len(s.regularPages) != 3 {
t.Fatalf("Expected 3 got %d pages", len(s.AllPages)) t.Fatalf("Expected 3 got %d pages", len(s.AllPages))
} }
@ -377,7 +377,7 @@ func doTestShouldAlwaysHaveUglyURLs(t *testing.T, uglyURLs bool) {
{filepath.FromSlash("public/ugly.html"), "\n\n<h1 id=\"title\">title</h1>\n\n<p>doc2 <em>content</em></p>\n"}, {filepath.FromSlash("public/ugly.html"), "\n\n<h1 id=\"title\">title</h1>\n\n<p>doc2 <em>content</em></p>\n"},
} }
for _, p := range s.Pages { for _, p := range s.regularPages {
assert.False(t, p.IsHome()) assert.False(t, p.IsHome())
} }
@ -649,7 +649,7 @@ func TestOrderedPages(t *testing.T) {
t.Errorf("Pages in unexpected order. Second should be '%s', got '%s'", "Three", s.Sections["sect"][1].Page.Title) t.Errorf("Pages in unexpected order. Second should be '%s', got '%s'", "Three", s.Sections["sect"][1].Page.Title)
} }
bydate := s.Pages.ByDate() bydate := s.regularPages.ByDate()
if bydate[0].Title != "One" { if bydate[0].Title != "One" {
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bydate[0].Title) t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bydate[0].Title)
@ -660,7 +660,7 @@ func TestOrderedPages(t *testing.T) {
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rev[0].Title) t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rev[0].Title)
} }
bypubdate := s.Pages.ByPublishDate() bypubdate := s.regularPages.ByPublishDate()
if bypubdate[0].Title != "One" { if bypubdate[0].Title != "One" {
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bypubdate[0].Title) t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bypubdate[0].Title)
@ -671,7 +671,7 @@ func TestOrderedPages(t *testing.T) {
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rbypubdate[0].Title) t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "Three", rbypubdate[0].Title)
} }
bylength := s.Pages.ByLength() bylength := s.regularPages.ByLength()
if bylength[0].Title != "One" { if bylength[0].Title != "One" {
t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bylength[0].Title) t.Errorf("Pages in unexpected order. First should be '%s', got '%s'", "One", bylength[0].Title)
} }
@ -710,7 +710,7 @@ func TestGroupedPages(t *testing.T) {
t.Fatalf("Failed to build site: %s", err) t.Fatalf("Failed to build site: %s", err)
} }
rbysection, err := s.Pages.GroupBy("Section", "desc") rbysection, err := s.regularPages.GroupBy("Section", "desc")
if err != nil { if err != nil {
t.Fatalf("Unable to make PageGroup array: %s", err) t.Fatalf("Unable to make PageGroup array: %s", err)
} }
@ -730,7 +730,7 @@ func TestGroupedPages(t *testing.T) {
t.Errorf("PageGroup has unexpected number of pages. Third group should have '%d' pages, got '%d' pages", 2, len(rbysection[2].Pages)) t.Errorf("PageGroup has unexpected number of pages. Third group should have '%d' pages, got '%d' pages", 2, len(rbysection[2].Pages))
} }
bytype, err := s.Pages.GroupBy("Type", "asc") bytype, err := s.regularPages.GroupBy("Type", "asc")
if err != nil { if err != nil {
t.Fatalf("Unable to make PageGroup array: %s", err) t.Fatalf("Unable to make PageGroup array: %s", err)
} }
@ -750,7 +750,7 @@ func TestGroupedPages(t *testing.T) {
t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(bytype[2].Pages)) t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(bytype[2].Pages))
} }
bydate, err := s.Pages.GroupByDate("2006-01", "asc") bydate, err := s.regularPages.GroupByDate("2006-01", "asc")
if err != nil { if err != nil {
t.Fatalf("Unable to make PageGroup array: %s", err) t.Fatalf("Unable to make PageGroup array: %s", err)
} }
@ -770,7 +770,7 @@ func TestGroupedPages(t *testing.T) {
t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(bydate[2].Pages)) t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(bydate[2].Pages))
} }
bypubdate, err := s.Pages.GroupByPublishDate("2006") bypubdate, err := s.regularPages.GroupByPublishDate("2006")
if err != nil { if err != nil {
t.Fatalf("Unable to make PageGroup array: %s", err) t.Fatalf("Unable to make PageGroup array: %s", err)
} }
@ -787,7 +787,7 @@ func TestGroupedPages(t *testing.T) {
t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 3, len(bypubdate[0].Pages)) t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 3, len(bypubdate[0].Pages))
} }
byparam, err := s.Pages.GroupByParam("my_param", "desc") byparam, err := s.regularPages.GroupByParam("my_param", "desc")
if err != nil { if err != nil {
t.Fatalf("Unable to make PageGroup array: %s", err) t.Fatalf("Unable to make PageGroup array: %s", err)
} }
@ -807,12 +807,12 @@ func TestGroupedPages(t *testing.T) {
t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(byparam[0].Pages)) t.Errorf("PageGroup has unexpected number of pages. First group should have '%d' pages, got '%d' pages", 2, len(byparam[0].Pages))
} }
_, err = s.Pages.GroupByParam("not_exist") _, err = s.regularPages.GroupByParam("not_exist")
if err == nil { if err == nil {
t.Errorf("GroupByParam didn't return an expected error") t.Errorf("GroupByParam didn't return an expected error")
} }
byOnlyOneParam, err := s.Pages.GroupByParam("only_one") byOnlyOneParam, err := s.regularPages.GroupByParam("only_one")
if err != nil { if err != nil {
t.Fatalf("Unable to make PageGroup array: %s", err) t.Fatalf("Unable to make PageGroup array: %s", err)
} }
@ -823,7 +823,7 @@ func TestGroupedPages(t *testing.T) {
t.Errorf("PageGroup array in unexpected order. First group key should be '%s', got '%s'", "yes", byOnlyOneParam[0].Key) t.Errorf("PageGroup array in unexpected order. First group key should be '%s', got '%s'", "yes", byOnlyOneParam[0].Key)
} }
byParamDate, err := s.Pages.GroupByParamDate("my_date", "2006-01") byParamDate, err := s.regularPages.GroupByParamDate("my_date", "2006-01")
if err != nil { if err != nil {
t.Fatalf("Unable to make PageGroup array: %s", err) t.Fatalf("Unable to make PageGroup array: %s", err)
} }