node to page: Add Pages to Page

As an alias to .Data.Pages for home page etc.

Also renamte NodeType to PageType and make it a string so it can be used in `where`.

Updates #2297
This commit is contained in:
Bjørn Erik Pedersen 2016-11-11 09:19:16 +01:00
parent 063b78d2ec
commit c80308e6b3
13 changed files with 204 additions and 209 deletions

View file

@ -196,8 +196,8 @@ func (h *HugoSites) renderCrossSitesArtifacts() error {
func (h *HugoSites) assignMissingTranslations() error {
// This looks heavy, but it should be a small number of nodes by now.
allPages := h.findAllPagesByNodeTypeNotIn(NodePage)
for _, nodeType := range []NodeType{NodeHome, NodeSection, NodeTaxonomy, NodeTaxonomyTerms} {
allPages := h.findAllPagesByNodeTypeNotIn(PagePage)
for _, nodeType := range []PageType{PageHome, PageSection, PageTaxonomy, PageTaxonomyTerm} {
nodes := h.findPagesByNodeTypeIn(nodeType, allPages)
// Assign translations
@ -223,7 +223,7 @@ func (h *HugoSites) createMissingPages() error {
for _, s := range h.Sites {
// home pages
home := s.findPagesByNodeType(NodeHome)
home := s.findPagesByNodeType(PageHome)
if len(home) > 1 {
panic("Too many homes")
}
@ -236,8 +236,8 @@ func (h *HugoSites) createMissingPages() error {
// taxonomy list and terms pages
taxonomies := s.Language.GetStringMapString("taxonomies")
if len(taxonomies) > 0 {
taxonomyPages := s.findPagesByNodeType(NodeTaxonomy)
taxonomyTermsPages := s.findPagesByNodeType(NodeTaxonomyTerms)
taxonomyPages := s.findPagesByNodeType(PageTaxonomy)
taxonomyTermsPages := s.findPagesByNodeType(PageTaxonomyTerm)
for _, plural := range taxonomies {
tax := s.Taxonomies[plural]
foundTaxonomyPage := false
@ -271,7 +271,7 @@ func (h *HugoSites) createMissingPages() error {
}
}
sectionPages := s.findPagesByNodeType(NodeSection)
sectionPages := s.findPagesByNodeType(PageSection)
if len(sectionPages) < len(s.Sections) {
for name, section := range s.Sections {
// A section may be created for the root content folder if a
@ -309,12 +309,12 @@ func (h *HugoSites) createMissingPages() error {
// TODO(bep) np move
// Move the new* methods after cleanup in site.go
func (s *Site) newNodePage(typ NodeType) *Page {
func (s *Site) newNodePage(typ PageType) *Page {
return &Page{
PageType: typ,
Node: Node{
Date: s.Info.LastChange,
Lastmod: s.Info.LastChange,
NodeType: typ,
Data: make(map[string]interface{}),
Site: &s.Info,
language: s.Language,
@ -322,9 +322,11 @@ func (s *Site) newNodePage(typ NodeType) *Page {
}
func (s *Site) newHomePage() *Page {
p := s.newNodePage(NodeHome)
p := s.newNodePage(PageHome)
p.Title = s.Info.Title
p.Data["Pages"] = Pages{}
pages := Pages{}
p.Data["Pages"] = pages
p.Pages = pages
s.setPageURLs(p, "/")
// TODO(bep) np check Data pages
// TODO(bep) np check setURLs
@ -339,7 +341,7 @@ func (s *Site) setPageURLs(p *Page, in string) {
func (s *Site) newTaxonomyPage(plural, key string) *Page {
p := s.newNodePage(NodeTaxonomy)
p := s.newNodePage(PageTaxonomy)
p.sections = []string{plural, key}
@ -361,7 +363,7 @@ func (s *Site) newTaxonomyPage(plural, key string) *Page {
func (s *Site) newSectionPage(name string, section WeightedPages) *Page {
p := s.newNodePage(NodeSection)
p := s.newNodePage(PageSection)
p.sections = []string{name}
sectionName := name
@ -380,7 +382,7 @@ func (s *Site) newSectionPage(name string, section WeightedPages) *Page {
}
func (s *Site) newTaxonomyTermsPage(plural string) *Page {
p := s.newNodePage(NodeTaxonomyTerms)
p := s.newNodePage(PageTaxonomyTerm)
p.sections = []string{plural}
p.Title = strings.Title(plural)
s.setPageURLs(p, plural)
@ -571,19 +573,19 @@ func (s *Site) updateBuildStats(page *Page) {
}
// TODO(bep) np remove
func (h *HugoSites) findAllPagesByNodeType(n NodeType) Pages {
func (h *HugoSites) findAllPagesByNodeType(n PageType) Pages {
return h.Sites[0].findAllPagesByNodeType(n)
}
func (h *HugoSites) findPagesByNodeTypeNotIn(n NodeType, inPages Pages) Pages {
func (h *HugoSites) findPagesByNodeTypeNotIn(n PageType, inPages Pages) Pages {
return h.Sites[0].findPagesByNodeTypeNotIn(n, inPages)
}
func (h *HugoSites) findPagesByNodeTypeIn(n NodeType, inPages Pages) Pages {
func (h *HugoSites) findPagesByNodeTypeIn(n PageType, inPages Pages) Pages {
return h.Sites[0].findPagesByNodeTypeIn(n, inPages)
}
func (h *HugoSites) findAllPagesByNodeTypeNotIn(n NodeType) Pages {
func (h *HugoSites) findAllPagesByNodeTypeNotIn(n PageType) Pages {
return h.findPagesByNodeTypeNotIn(n, h.Sites[0].AllPages)
}

View file

@ -297,7 +297,7 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
assertFileContent(t, "public/en/sect/doc1-slug/index.html", true, "Single", "Shortcode: Hello")
// Check node translations
homeEn := enSite.getPage(NodeHome)
homeEn := enSite.getPage(PageHome)
require.NotNil(t, homeEn)
require.Len(t, homeEn.Translations(), 3)
require.Equal(t, "fr", homeEn.Translations()[0].Lang())
@ -307,7 +307,7 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
require.Equal(t, "På bokmål", homeEn.Translations()[2].Title, configSuffix)
require.Equal(t, "Bokmål", homeEn.Translations()[2].Language().LanguageName, configSuffix)
sectFr := frSite.getPage(NodeSection, "sect")
sectFr := frSite.getPage(PageSection, "sect")
require.NotNil(t, sectFr)
require.Equal(t, "fr", sectFr.Lang())
@ -317,12 +317,12 @@ func doTestMultiSitesBuild(t *testing.T, configTemplate, configSuffix string) {
nnSite := sites.Sites[2]
require.Equal(t, "nn", nnSite.Language.Lang)
taxNn := nnSite.getPage(NodeTaxonomyTerms, "lag")
taxNn := nnSite.getPage(PageTaxonomyTerm, "lag")
require.NotNil(t, taxNn)
require.Len(t, taxNn.Translations(), 1)
require.Equal(t, "nb", taxNn.Translations()[0].Lang())
taxTermNn := nnSite.getPage(NodeTaxonomy, "lag", "sogndal")
taxTermNn := nnSite.getPage(PageTaxonomy, "lag", "sogndal")
require.NotNil(t, taxTermNn)
require.Len(t, taxTermNn.Translations(), 1)
require.Equal(t, "nb", taxTermNn.Translations()[0].Lang())
@ -515,7 +515,7 @@ func TestMultiSitesRebuild(t *testing.T) {
docFr := readDestination(t, "public/fr/sect/doc1/index.html")
require.True(t, strings.Contains(docFr, "Salut"), "No Salut")
homeEn := enSite.getPage(NodeHome)
homeEn := enSite.getPage(PageHome)
require.NotNil(t, homeEn)
require.Len(t, homeEn.Translations(), 3)
require.Equal(t, "fr", homeEn.Translations()[0].Lang())
@ -622,7 +622,7 @@ title = "Svenska"
require.True(t, svSite.Language.Lang == "sv", svSite.Language.Lang)
require.True(t, frSite.Language.Lang == "fr", frSite.Language.Lang)
homeEn := enSite.getPage(NodeHome)
homeEn := enSite.getPage(PageHome)
require.NotNil(t, homeEn)
require.Len(t, homeEn.Translations(), 4)
require.Equal(t, "sv", homeEn.Translations()[0].Lang())

View file

@ -415,11 +415,11 @@ func doTestSectionPagesMenu(canonifyURLs bool, t *testing.T) {
fishySectionPages := s.Sections["fish-and-chips"]
assert.Equal(t, 1, len(fishySectionPages))
nodeFirst := s.getPage(NodeSection, "first")
nodeFirst := s.getPage(PageSection, "first")
require.NotNil(t, nodeFirst)
nodeSecond := s.getPage(NodeSection, "second-section")
nodeSecond := s.getPage(PageSection, "second-section")
require.NotNil(t, nodeSecond)
nodeFishy := s.getPage(NodeSection, "fish-and-chips")
nodeFishy := s.getPage(PageSection, "fish-and-chips")
require.Equal(t, "fish-and-chips", nodeFishy.sections[0])
firstSectionMenuEntry := findTestMenuEntryByID(s, "spm", "first")
@ -555,7 +555,7 @@ func TestHomeNodeMenu(t *testing.T) {
s := setupMenuTests(t, menuPageSources)
home := s.getPage(NodeHome)
home := s.getPage(PageHome)
homeMenuEntry := &MenuEntry{Name: home.Title, URL: home.URL()}

View file

@ -29,59 +29,7 @@ import (
// TODO(bep) np clean up node vs page
type NodeType int
const (
NodePage NodeType = iota
// Temporary state.
NodeUnknown
// The rest are node types; home page, sections etc.
NodeHome
NodeSection
NodeTaxonomy
NodeTaxonomyTerms
// The following are (currently) temporary nodes,
// i.e. nodes we create just to render in isolation.
NodeSitemap
NodeRobotsTXT
Node404
)
func (p NodeType) String() string {
switch p {
case NodePage:
return "page"
case NodeHome:
return "home page"
case NodeSection:
return "section list"
case NodeTaxonomy:
return "taxonomy list"
case NodeTaxonomyTerms:
return "taxonomy terms"
case NodeSitemap:
return "sitemap"
case NodeRobotsTXT:
return "robots.txt"
case Node404:
return "404 Not Found"
case NodeUnknown:
return "unknown"
default:
return "invalid value"
}
}
func (p NodeType) IsNode() bool {
return p >= NodeHome
}
type Node struct {
NodeType NodeType
RSSLink template.HTML
Site *SiteInfo `json:"-"`
// layout string
@ -181,18 +129,6 @@ func (n *Node) RSSlink() template.HTML {
return n.RSSLink
}
func (n *Node) IsNode() bool {
return n.NodeType.IsNode()
}
func (n *Node) IsHome() bool {
return n.NodeType == NodeHome
}
func (n *Node) IsPage() bool {
return n.NodeType == NodePage
}
func (n *Node) Ref(ref string) (string, error) {
return n.Site.Ref(ref, nil)
}
@ -240,7 +176,7 @@ func (n *Node) Lang() string {
}
func (p *Page) isTranslation(candidate *Page) bool {
if p == candidate || p.NodeType != candidate.NodeType {
if p == candidate || p.PageType != candidate.PageType {
return false
}
@ -248,7 +184,7 @@ func (p *Page) isTranslation(candidate *Page) bool {
return false
}
if p.NodeType == NodePage || p.NodeType == NodeUnknown {
if p.PageType == PagePage || p.PageType == pageUnknown {
panic("Node type not currently supported for this op")
}
@ -355,41 +291,41 @@ func sectionsFromFilename(filename string) []string {
}
// TODO(bep) np node identificator
func nodeTypeFromFilename(filename string) NodeType {
func nodeTypeFromFilename(filename string) PageType {
if !strings.Contains(filename, "_index") {
return NodePage
return PagePage
}
if strings.HasPrefix(filename, "_index") {
return NodeHome
return PageHome
}
// We don't know enough yet to determine the type.
return NodeUnknown
return pageUnknown
}
func (p *Page) setNodeTypeVars(s *Site) {
// TODO(bep) np taxonomies etc.
if p.NodeType == NodeUnknown {
if p.PageType == pageUnknown {
// This is either a taxonomy list, taxonomy term or a section
nodeType := s.nodeTypeFromSections(p.sections)
if nodeType == NodeUnknown {
if nodeType == pageUnknown {
panic(fmt.Sprintf("Unable to determine node type from %q", p.sections))
}
p.NodeType = nodeType
p.PageType = nodeType
}
// TODO(bep) np node URL
// Set Node URL
switch p.NodeType {
case NodeHome:
switch p.PageType {
case PageHome:
p.URLPath.URL = ""
case NodeSection:
case PageSection:
p.URLPath.URL = p.sections[0]
case NodeTaxonomy:
case PageTaxonomy:
p.URLPath.URL = path.Join(p.sections...)
case NodeTaxonomyTerms:
case PageTaxonomyTerm:
p.URLPath.URL = path.Join(p.sections...)
}

View file

@ -84,7 +84,7 @@ Content Page %02d
assertFileContent(t, filepath.Join("public", "sect1", "regular1", "index.html"), false, "Single Title: Page 01", "Content Page 01")
h := s.owner
nodes := h.findAllPagesByNodeType(NodeHome)
nodes := h.findAllPagesByNodeType(PageHome)
require.Len(t, nodes, 1)
home := nodes[0]
@ -93,7 +93,7 @@ Content Page %02d
require.True(t, home.IsNode())
require.False(t, home.IsPage())
pages := h.findAllPagesByNodeType(NodePage)
pages := h.findAllPagesByNodeType(PagePage)
require.Len(t, pages, 4)
first := pages[0]
@ -115,7 +115,7 @@ Content Page %02d
assertFileContent(t, filepath.Join("public", "sect1", "page", "2", "index.html"), false,
"Pag: Page 02")
sections := h.findAllPagesByNodeType(NodeSection)
sections := h.findAllPagesByNodeType(PageSection)
require.Len(t, sections, 2)
@ -166,11 +166,12 @@ func TestNodesWithNoContentFile(t *testing.T) {
}
// Home page
homePages := s.findIndexNodesByNodeType(NodeHome)
homePages := s.findIndexNodesByNodeType(PageHome)
require.Len(t, homePages, 1)
homePage := homePages[0]
require.Len(t, homePage.Data["Pages"], 9)
require.Len(t, homePage.Pages, 9) // Alias
assertFileContent(t, filepath.Join("public", "index.html"), false,
"Index Title: Hugo Rocks!")

View file

@ -1,40 +0,0 @@
// Copyright 2016-present The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"testing"
"time"
)
func TestNodeSimpleMethods(t *testing.T) {
for i, this := range []struct {
assertFunc func(n *Node) bool
}{
{func(n *Node) bool { return n.IsNode() }},
{func(n *Node) bool { return !n.IsPage() }},
{func(n *Node) bool { return n.RSSlink() == "rssLink" }},
{func(n *Node) bool { return n.Scratch() != nil }},
{func(n *Node) bool { return n.Hugo() != nil }},
{func(n *Node) bool { return n.Now().Unix() == time.Now().Unix() }},
} {
n := &Node{NodeType: NodeHome}
n.RSSLink = "rssLink"
if !this.assertFunc(n) {
t.Errorf("[%d] Node method error", i)
}
}
}

View file

@ -49,7 +49,46 @@ var (
cjk = regexp.MustCompile(`\p{Han}|\p{Hangul}|\p{Hiragana}|\p{Katakana}`)
)
// PageType is the discriminator that identifies the different page types
// in the different page collections. This can, as an example, be used
// to to filter regular pages, find sections etc.
// NOTE: THe exported constants below are used to filter pages from
// templates in the wild, so do not change the values!
type PageType string
const (
PagePage PageType = "page"
// The rest are node types; home page, sections etc.
PageHome PageType = "home"
PageSection PageType = "section"
PageTaxonomy PageType = "taxonomy"
PageTaxonomyTerm PageType = "taxonomyTerm"
// Temporary state.
pageUnknown PageType = "unknown"
// The following are (currently) temporary nodes,
// i.e. nodes we create just to render in isolation.
pageSitemap PageType = "sitemap"
pageRobotsTXT PageType = "robotsTXT"
page404 PageType = "404"
)
func (p PageType) IsNode() bool {
return p != PagePage
}
type Page struct {
PageType PageType
// Since Hugo 0.18 we got rid of the Node type. So now all pages are ...
// pages (regular pages, home page, sections etc.).
// Sections etc. will have child pages. These were earlier placed in .Data.Pages,
// but can now be more intuitively also be fetched directly from .Pages.
// This collection will be nil for regular pages.
Pages Pages
Params map[string]interface{}
Content template.HTML
Summary template.HTML
@ -146,6 +185,18 @@ func (*PageMeta) ReadingTime() int {
return 0
}
func (p *Page) IsNode() bool {
return p.PageType.IsNode()
}
func (p *Page) IsHome() bool {
return p.PageType == PageHome
}
func (p *Page) IsPage() bool {
return p.PageType == PagePage
}
type Position struct {
Prev *Page
Next *Page
@ -430,9 +481,11 @@ func (p *Page) getRenderingConfig() *helpers.Blackfriday {
}
func newPage(filename string) *Page {
page := Page{contentType: "",
page := Page{
PageType: nodeTypeFromFilename(filename),
contentType: "",
Source: Source{File: *source.NewFile(filename)},
Node: Node{NodeType: nodeTypeFromFilename(filename), Keywords: []string{}, Sitemap: Sitemap{Priority: -1}},
Node: Node{Keywords: []string{}, Sitemap: Sitemap{Priority: -1}},
Params: make(map[string]interface{}),
translations: make(Pages, 0),
sections: sectionsFromFilename(filename),
@ -468,16 +521,16 @@ func (p *Page) layouts(l ...string) []string {
}
// TODO(bep) np taxonomy etc.
switch p.NodeType {
case NodeHome:
switch p.PageType {
case PageHome:
return []string{"index.html", "_default/list.html"}
case NodeSection:
case PageSection:
section := p.sections[0]
return []string{"section/" + section + ".html", "_default/section.html", "_default/list.html", "indexes/" + section + ".html", "_default/indexes.html"}
case NodeTaxonomy:
case PageTaxonomy:
singular := p.site.taxonomiesPluralSingular[p.sections[0]]
return []string{"taxonomy/" + singular + ".html", "indexes/" + singular + ".html", "_default/taxonomy.html", "_default/list.html"}
case NodeTaxonomyTerms:
case PageTaxonomyTerm:
singular := p.site.taxonomiesPluralSingular[p.sections[0]]
return []string{"taxonomy/" + singular + ".terms.html", "_default/terms.html", "indexes/indexes.html"}
}
@ -502,18 +555,18 @@ func (p *Page) layouts(l ...string) []string {
// rssLayouts returns RSS layouts to use for the RSS version of this page, nil
// if no RSS should be rendered.
func (p *Page) rssLayouts() []string {
switch p.NodeType {
case NodeHome:
switch p.PageType {
case PageHome:
return []string{"rss.xml", "_default/rss.xml", "_internal/_default/rss.xml"}
case NodeSection:
case PageSection:
section := p.sections[0]
return []string{"section/" + section + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
case NodeTaxonomy:
case PageTaxonomy:
singular := p.site.taxonomiesPluralSingular[p.sections[0]]
return []string{"taxonomy/" + singular + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
case NodeTaxonomyTerms:
case PageTaxonomyTerm:
// No RSS for taxonomy terms
case NodePage:
case PagePage:
// No RSS for regular pages
}
@ -723,7 +776,7 @@ func (p *Page) IsExpired() bool {
func (p *Page) Permalink() (string, error) {
// TODO(bep) np permalink
if p.NodeType.IsNode() {
if p.PageType.IsNode() {
return p.Node.Permalink(), nil
}
link, err := p.permalink()
@ -961,7 +1014,7 @@ func (p *Page) getParam(key string, stringToLower bool) interface{} {
func (p *Page) HasMenuCurrent(menu string, me *MenuEntry) bool {
// TODO(bep) np menu
if p.NodeType.IsNode() {
if p.PageType.IsNode() {
return p.Node.HasMenuCurrent(menu, me)
}
menus := p.Menus()
@ -991,7 +1044,7 @@ func (p *Page) HasMenuCurrent(menu string, me *MenuEntry) bool {
func (p *Page) IsMenuCurrent(menu string, inme *MenuEntry) bool {
// TODO(bep) np menu
if p.NodeType.IsNode() {
if p.PageType.IsNode() {
return p.Node.IsMenuCurrent(menu, inme)
}
menus := p.Menus()
@ -1200,14 +1253,14 @@ func (p *Page) FullFilePath() string {
func (p *Page) TargetPath() (outfile string) {
// TODO(bep) np
switch p.NodeType {
case NodeHome:
switch p.PageType {
case PageHome:
return p.addLangFilepathPrefix(helpers.FilePathSeparator)
case NodeSection:
case PageSection:
return p.addLangFilepathPrefix(p.sections[0])
case NodeTaxonomy:
case PageTaxonomy:
return p.addLangFilepathPrefix(filepath.Join(p.sections...))
case NodeTaxonomyTerms:
case PageTaxonomyTerm:
return p.addLangFilepathPrefix(filepath.Join(p.sections...))
}
@ -1252,7 +1305,7 @@ func (p *Page) TargetPath() (outfile string) {
func (p *Page) prepareLayouts() error {
// TODO(bep): Check the IsRenderable logic.
if p.NodeType == NodePage {
if p.PageType == PagePage {
var layouts []string
if !p.IsRenderable() {
self := "__" + p.TargetPath()
@ -1273,18 +1326,20 @@ func (p *Page) prepareLayouts() error {
// TODO(bep) np naming, move some
func (p *Page) prepareData(s *Site) error {
var pages Pages
p.Data = make(map[string]interface{})
switch p.NodeType {
case NodePage:
case NodeHome:
p.Data["Pages"] = s.findPagesByNodeTypeNotIn(NodeHome, s.Pages)
case NodeSection:
switch p.PageType {
case PagePage:
case PageHome:
pages = s.findPagesByNodeTypeNotIn(PageHome, s.Pages)
case PageSection:
sectionData, ok := s.Sections[p.sections[0]]
if !ok {
return fmt.Errorf("Data for section %s not found", p.Section())
}
p.Data["Pages"] = sectionData.Pages()
case NodeTaxonomy:
pages = sectionData.Pages()
case PageTaxonomy:
plural := p.sections[0]
term := p.sections[1]
@ -1294,8 +1349,8 @@ func (p *Page) prepareData(s *Site) error {
p.Data[singular] = taxonomy
p.Data["Singular"] = singular
p.Data["Plural"] = plural
p.Data["Pages"] = taxonomy.Pages()
case NodeTaxonomyTerms:
pages = taxonomy.Pages()
case PageTaxonomyTerm:
plural := p.sections[0]
singular := s.taxonomiesPluralSingular[plural]
@ -1307,6 +1362,9 @@ func (p *Page) prepareData(s *Site) error {
p.Data["Index"] = p.Data["Terms"]
}
p.Data["Pages"] = pages
p.Pages = pages
return nil
}
@ -1315,7 +1373,7 @@ func (p *Page) prepareData(s *Site) error {
// the paginators etc., we do it manually here.
// TODO(bep) np do better
func (p *Page) copy() *Page {
c := &Page{Node: Node{NodeType: p.NodeType, Site: p.Site}}
c := &Page{PageType: p.PageType, Node: Node{Site: p.Site}}
c.Title = p.Title
c.Data = p.Data
c.Date = p.Date

View file

@ -40,12 +40,12 @@ type PageCollections struct {
}
func (c *PageCollections) refreshPageCaches() {
c.indexPages = c.findPagesByNodeTypeNotIn(NodePage, c.Pages)
c.regularPages = c.findPagesByNodeTypeIn(NodePage, c.Pages)
c.indexPages = c.findPagesByNodeTypeNotIn(PagePage, c.Pages)
c.regularPages = c.findPagesByNodeTypeIn(PagePage, c.Pages)
// TODO(bep) np remove eventually
for _, n := range c.Pages {
if n.NodeType == NodeUnknown {
if n.PageType == pageUnknown {
panic(fmt.Sprintf("Got unknown type %s", n.Title))
}
}
@ -61,11 +61,11 @@ func newPageCollectionsFromPages(pages Pages) *PageCollections {
// TODO(bep) np clean and remove finders
func (c *PageCollections) findPagesByNodeType(n NodeType) Pages {
func (c *PageCollections) findPagesByNodeType(n PageType) Pages {
return c.findPagesByNodeTypeIn(n, c.Pages)
}
func (c *PageCollections) getPage(n NodeType, path ...string) *Page {
func (c *PageCollections) getPage(n PageType, path ...string) *Page {
pages := c.findPagesByNodeTypeIn(n, c.Pages)
if len(pages) == 0 {
@ -94,35 +94,35 @@ func (c *PageCollections) getPage(n NodeType, path ...string) *Page {
return nil
}
func (c *PageCollections) findIndexNodesByNodeType(n NodeType) Pages {
func (c *PageCollections) findIndexNodesByNodeType(n PageType) Pages {
return c.findPagesByNodeTypeIn(n, c.indexPages)
}
func (*PageCollections) findPagesByNodeTypeIn(n NodeType, inPages Pages) Pages {
func (*PageCollections) findPagesByNodeTypeIn(n PageType, inPages Pages) Pages {
var pages Pages
for _, p := range inPages {
if p.NodeType == n {
if p.PageType == n {
pages = append(pages, p)
}
}
return pages
}
func (*PageCollections) findPagesByNodeTypeNotIn(n NodeType, inPages Pages) Pages {
func (*PageCollections) findPagesByNodeTypeNotIn(n PageType, inPages Pages) Pages {
var pages Pages
for _, p := range inPages {
if p.NodeType != n {
if p.PageType != n {
pages = append(pages, p)
}
}
return pages
}
func (c *PageCollections) findAllPagesByNodeType(n NodeType) Pages {
func (c *PageCollections) findAllPagesByNodeType(n PageType) Pages {
return c.findPagesByNodeTypeIn(n, c.Pages)
}
func (c *PageCollections) findRawAllPagesByNodeType(n NodeType) Pages {
func (c *PageCollections) findRawAllPagesByNodeType(n PageType) Pages {
return c.findPagesByNodeTypeIn(n, c.rawAllPages)
}

View file

@ -66,6 +66,7 @@ func TestPermalink(t *testing.T) {
info := newSiteInfo(siteBuilderCfg{baseURL: string(test.base), language: helpers.NewDefaultLanguage()})
p := &Page{
PageType: PagePage,
Node: Node{
URLPath: URLPath{
Section: "z",

View file

@ -1257,6 +1257,40 @@ func TestPageSimpleMethods(t *testing.T) {
}
}
func TestIndexPageSimpleMethods(t *testing.T) {
for i, this := range []struct {
assertFunc func(n *Page) bool
}{
{func(n *Page) bool { return n.IsNode() }},
{func(n *Page) bool { return !n.IsPage() }},
{func(n *Page) bool { return n.RSSlink() == "rssLink" }},
{func(n *Page) bool { return n.Scratch() != nil }},
{func(n *Page) bool { return n.Hugo() != nil }},
{func(n *Page) bool { return n.Now().Unix() == time.Now().Unix() }},
} {
n := &Page{PageType: PageHome}
n.RSSLink = "rssLink"
if !this.assertFunc(n) {
t.Errorf("[%d] Node method error", i)
}
}
}
func TestPageType(t *testing.T) {
// Add tests for these constants to make sure they don't change
require.Equal(t, PageType("page"), PagePage)
require.Equal(t, PageType("home"), PageHome)
require.Equal(t, PageType("section"), PageSection)
require.Equal(t, PageType("taxonomy"), PageTaxonomy)
require.Equal(t, PageType("taxonomyTerm"), PageTaxonomyTerm)
require.False(t, PagePage.IsNode())
require.True(t, PageHome.IsNode())
}
func TestChompBOM(t *testing.T) {
const utf8BOM = "\xef\xbb\xbf"

View file

@ -260,8 +260,8 @@ func splitPageGroups(pageGroups PagesGroup, size int) []paginatedElement {
// Paginator gets this Node's paginator if it's already created.
// If it's not, one will be created with all pages in Data["Pages"].
func (n *Page) Paginator(options ...interface{}) (*Pager, error) {
if !n.NodeType.IsNode() {
return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", n.NodeType, n.Title)
if !n.PageType.IsNode() {
return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", n.PageType, n.Title)
}
pagerSize, err := resolvePagerSize(options...)
@ -303,8 +303,8 @@ func (n *Page) Paginator(options ...interface{}) (*Pager, error) {
// If it's not, one will be created with the qiven sequence.
// Note that repeated calls will return the same result, even if the sequence is different.
func (n *Page) Paginate(seq interface{}, options ...interface{}) (*Pager, error) {
if !n.NodeType.IsNode() {
return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", n.NodeType, n.Title)
if !n.PageType.IsNode() {
return nil, fmt.Errorf("Paginators not supported for pages of type %q (%q)", n.PageType, n.Title)
}
pagerSize, err := resolvePagerSize(options...)

View file

@ -1533,8 +1533,8 @@ func (s *Site) assembleSections() {
s.Sections = make(Taxonomy)
s.Info.Sections = s.Sections
// TODO(bep) np check these vs the caches
regularPages := s.findPagesByNodeType(NodePage)
sectionPages := s.findPagesByNodeType(NodeSection)
regularPages := s.findPagesByNodeType(PagePage)
sectionPages := s.findPagesByNodeType(PageSection)
for i, p := range regularPages {
s.Sections.add(p.Section(), WeightedPage{regularPages[i].Weight, regularPages[i]}, s.Info.preserveTaxonomyNames)
@ -1561,14 +1561,14 @@ func (s *Site) assembleSections() {
}
}
func (s *Site) nodeTypeFromSections(sections []string) NodeType {
func (s *Site) nodeTypeFromSections(sections []string) PageType {
if _, isTaxonomy := s.Taxonomies[sections[0]]; isTaxonomy {
if len(sections) == 1 {
return NodeTaxonomyTerms
return PageTaxonomyTerm
}
return NodeTaxonomy
return PageTaxonomy
}
return NodeSection
return PageSection
}
func (s *Site) preparePages() error {

View file

@ -67,14 +67,14 @@ func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.Wa
for p := range pages {
targetPath := p.TargetPath()
layouts := p.layouts()
jww.DEBUG.Printf("Render %s to %q with layouts %q", p.NodeType, targetPath, layouts)
jww.DEBUG.Printf("Render %s to %q with layouts %q", p.PageType, targetPath, layouts)
if err := s.renderAndWritePage("page "+p.FullFilePath(), targetPath, p, s.appendThemeTemplates(layouts)...); err != nil {
results <- err
}
// Taxonomy terms have no page set to paginate, so skip that for now.
if p.NodeType.IsNode() && p.NodeType != NodeTaxonomyTerms {
if p.PageType.IsNode() && p.PageType != PageTaxonomyTerm {
if err := s.renderPaginator(p); err != nil {
results <- err
}
@ -160,9 +160,10 @@ func (s *Site) render404() error {
return nil
}
p := s.newNodePage(Node404)
p := s.newNodePage(page404)
p.Title = "404 Page not found"
p.Data["Pages"] = s.Pages
p.Pages = s.Pages
s.setPageURLs(p, "404.html")
nfLayouts := []string{"404.html"}
@ -180,18 +181,19 @@ func (s *Site) renderSitemap() error {
sitemapDefault := parseSitemap(viper.GetStringMap("sitemap"))
n := s.newNodePage(NodeSitemap)
n := s.newNodePage(pageSitemap)
// Include all pages (regular, home page, taxonomies etc.)
pages := s.Pages
page := s.newNodePage(NodeSitemap)
page := s.newNodePage(pageSitemap)
page.URLPath.URL = ""
page.Sitemap.ChangeFreq = sitemapDefault.ChangeFreq
page.Sitemap.Priority = sitemapDefault.Priority
page.Sitemap.Filename = sitemapDefault.Filename
n.Data["Pages"] = pages
n.Pages = pages
// TODO(bep) this should be done somewhere else
for _, page := range pages {
@ -222,8 +224,9 @@ func (s *Site) renderRobotsTXT() error {
return nil
}
n := s.newNodePage(NodeRobotsTXT)
n := s.newNodePage(pageRobotsTXT)
n.Data["Pages"] = s.Pages
n.Pages = s.Pages
rLayouts := []string{"robots.txt", "_default/robots.txt", "_internal/_default/robots.txt"}
outBuffer := bp.GetBuffer()