node to page: Fix the page collections

Updates #2297
This commit is contained in:
Bjørn Erik Pedersen 2016-11-04 00:34:25 +01:00
parent c175407fa4
commit 698b994f71
8 changed files with 206 additions and 106 deletions

View file

@ -52,6 +52,7 @@ func (h *HugoSites) assembleGitInfo() {
s := h.Sites[0] s := h.Sites[0]
for _, p := range s.AllPages { for _, p := range s.AllPages {
// TODO(bep) np consider other nodes
// Git normalizes file paths on this form: // Git normalizes file paths on this form:
filename := path.Join(contentRoot, contentDir, filepath.ToSlash(p.Path())) filename := path.Join(contentRoot, contentDir, filepath.ToSlash(p.Path()))
g, ok := gitMap[filename] g, ok := gitMap[filename]

View file

@ -238,10 +238,16 @@ func (h *HugoSites) Build(config BuildCfg) error {
} }
} }
// TODO(bep) np createMissingNodes needs taxonomies and sections
if err := h.createMissingNodes(); err != nil { if err := h.createMissingNodes(); err != nil {
return err return err
} }
for _, s := range h.Sites {
// Needed by all who use .Pages, .AllPages, .indexPages
s.refreshPageCaches()
}
if err := h.preRender(config, whatChanged{source: true, other: true}); err != nil { if err := h.preRender(config, whatChanged{source: true, other: true}); err != nil {
return err return err
} }
@ -314,10 +320,10 @@ func (h *HugoSites) Rebuild(config BuildCfg, events ...fsnotify.Event) error {
return err return err
} }
} }
}
if err := h.createMissingNodes(); err != nil { if err := h.createMissingNodes(); err != nil {
return err return err
}
} }
if err := h.preRender(config, changed); err != nil { if err := h.preRender(config, changed); err != nil {
@ -391,16 +397,11 @@ func (h *HugoSites) createMissingNodes() error {
// TODO(bep) np check node title etc. // TODO(bep) np check node title etc.
s := h.Sites[0] s := h.Sites[0]
// TODO(bep) np
for _, p := range s.Pages {
p.setNodeTypeVars(s)
}
home := s.findPagesByNodeType(NodeHome) home := s.findPagesByNodeType(NodeHome)
// home page // home page
if len(home) == 0 { if len(home) == 0 {
s.Pages = append(s.Pages, s.newHomePage()) s.Nodes = append(s.Nodes, s.newHomePage())
} }
// taxonomy list and terms pages // taxonomy list and terms pages
@ -426,11 +427,11 @@ func (h *HugoSites) createMissingNodes() error {
} }
} }
if !foundTaxonomyPage { if !foundTaxonomyPage {
s.Pages = append(s.Pages, s.newTaxonomyPage(plural, key)) s.Nodes = append(s.Nodes, s.newTaxonomyPage(plural, key))
} }
if !foundTaxonomyTermsPage { if !foundTaxonomyTermsPage {
s.Pages = append(s.Pages, s.newTaxonomyTermsPage(plural)) s.Nodes = append(s.Nodes, s.newTaxonomyTermsPage(plural))
} }
} }
@ -449,7 +450,7 @@ func (h *HugoSites) createMissingNodes() error {
} }
} }
if !foundSection { if !foundSection {
s.Pages = append(s.Pages, s.newSectionPage(name, section)) s.Nodes = append(s.Nodes, s.newSectionPage(name, section))
} }
} }
} }
@ -542,7 +543,7 @@ func (h *HugoSites) setupTranslations() {
if strings.HasPrefix(site.Language.Lang, p.Lang()) { if strings.HasPrefix(site.Language.Lang, p.Lang()) {
site.updateBuildStats(p) site.updateBuildStats(p)
if shouldBuild { if shouldBuild {
site.Pages = append(site.Pages, p) site.Nodes = append(site.Nodes, p)
p.Site = &site.Info p.Site = &site.Info
} }
} }
@ -552,7 +553,7 @@ func (h *HugoSites) setupTranslations() {
} }
if i == 0 { if i == 0 {
site.AllPages = append(site.AllPages, p) site.AllNodes = append(site.AllNodes, p)
} }
} }
@ -560,12 +561,12 @@ func (h *HugoSites) setupTranslations() {
// Pull over the collections from the master site // Pull over the collections from the master site
for i := 1; i < len(h.Sites); i++ { for i := 1; i < len(h.Sites); i++ {
h.Sites[i].AllPages = h.Sites[0].AllPages h.Sites[i].AllNodes = h.Sites[0].AllNodes
h.Sites[i].Data = h.Sites[0].Data h.Sites[i].Data = h.Sites[0].Data
} }
if len(h.Sites) > 1 { if len(h.Sites) > 1 {
pages := h.Sites[0].AllPages pages := h.Sites[0].AllNodes
allTranslations := pagesToTranslationsMap(h.multilingual, pages) allTranslations := pagesToTranslationsMap(h.multilingual, pages)
assignTranslationsToPages(allTranslations, pages) assignTranslationsToPages(allTranslations, pages)
} }
@ -574,12 +575,14 @@ func (h *HugoSites) setupTranslations() {
// preRender performs build tasks that need to be done as late as possible. // preRender performs build tasks that need to be done as late as possible.
// Shortcode handling is the main task in here. // Shortcode handling is the main task in here.
// TODO(bep) We need to look at the whole handler-chain construct with he below in mind. // TODO(bep) We need to look at the whole handler-chain construct with he below in mind.
// TODO(bep) np clean
func (h *HugoSites) preRender(cfg BuildCfg, changed whatChanged) error { func (h *HugoSites) preRender(cfg BuildCfg, changed whatChanged) error {
for _, s := range h.Sites { for _, s := range h.Sites {
if err := s.setCurrentLanguageConfig(); err != nil { if err := s.setCurrentLanguageConfig(); err != nil {
return err return err
} }
// Run "render prepare" // Run "render prepare"
if err := s.renderHomePage(true); err != nil { if err := s.renderHomePage(true); err != nil {
return err return err
@ -680,7 +683,7 @@ func (s *Site) preparePagesForRender(cfg BuildCfg, changed whatChanged) {
}(pageChan, wg) }(pageChan, wg)
} }
for _, p := range s.Pages { for _, p := range s.Nodes {
pageChan <- p pageChan <- p
} }
@ -728,9 +731,10 @@ func (s *Site) updateBuildStats(page *Page) {
} }
} }
// TODO(bep) np remove
func (h *HugoSites) findAllPagesByNodeType(n NodeType) Pages { func (h *HugoSites) findAllPagesByNodeType(n NodeType) Pages {
var pages Pages var pages Pages
for _, p := range h.Sites[0].AllPages { for _, p := range h.Sites[0].AllNodes {
if p.NodeType == n { if p.NodeType == n {
pages = append(pages, p) pages = append(pages, p)
} }
@ -750,6 +754,9 @@ func buildAndRenderSite(s *Site, additionalTemplates ...string) error {
// Convenience func used in tests to build a single site/language. // Convenience func used in tests to build a single site/language.
func doBuildSite(s *Site, render bool, additionalTemplates ...string) error { func doBuildSite(s *Site, render bool, additionalTemplates ...string) error {
if s.PageCollections == nil {
s.PageCollections = newPageCollections()
}
sites, err := newHugoSites(s) sites, err := newHugoSites(s)
if err != nil { if err != nil {
return err return err

View file

@ -395,7 +395,6 @@ func (p *Page) setNodeTypeVars(s *Site) {
} }
p.NodeType = nodeType p.NodeType = nodeType
} }
// TODO(bep) np node URL // TODO(bep) np node URL
// Set Node URL // Set Node URL

View file

@ -31,8 +31,8 @@ import (
*/ */
func TestNodesAsPage(t *testing.T) { func TestNodesAsPage(t *testing.T) {
jww.SetStdoutThreshold(jww.LevelDebug) //jww.SetStdoutThreshold(jww.LevelDebug)
//jww.SetStdoutThreshold(jww.LevelFatal) jww.SetStdoutThreshold(jww.LevelFatal)
nodePageFeatureFlag = true nodePageFeatureFlag = true
defer toggleNodePageFeatureFlag() defer toggleNodePageFeatureFlag()
@ -184,8 +184,8 @@ Content Page %02d
} }
func TestNodesWithNoContentFile(t *testing.T) { func TestNodesWithNoContentFile(t *testing.T) {
jww.SetStdoutThreshold(jww.LevelDebug) //jww.SetStdoutThreshold(jww.LevelDebug)
//jww.SetStdoutThreshold(jww.LevelFatal) jww.SetStdoutThreshold(jww.LevelFatal)
nodePageFeatureFlag = true nodePageFeatureFlag = true
defer toggleNodePageFeatureFlag() defer toggleNodePageFeatureFlag()
@ -221,7 +221,7 @@ Content Page %02d
} }
// Home page // Home page
homePages := s.findPagesByNodeType(NodeHome) homePages := s.findIndexNodesByNodeType(NodeHome)
require.Len(t, homePages, 1) require.Len(t, homePages, 1)
homePage := homePages[0] homePage := homePages[0]

View file

@ -1260,12 +1260,12 @@ func (p *Page) prepareLayouts() error {
// TODO(bep) np naming, move some // TODO(bep) np naming, move some
func (p *Page) prepareData(s *Site) error { func (p *Page) prepareData(s *Site) error {
p.Data = make(map[string]interface{}) p.Data = make(map[string]interface{})
switch p.NodeType { switch p.NodeType {
case NodePage: case NodePage:
case NodeHome: case NodeHome:
// TODO(bep) np cache the below p.Data["Pages"] = s.Pages
p.Data["Pages"] = s.owner.findAllPagesByNodeType(NodePage)
case NodeSection: case NodeSection:
sectionData, ok := s.Sections[p.sections[0]] sectionData, ok := s.Sections[p.sections[0]]
if !ok { if !ok {

122
hugolib/page_collections.go Normal file
View file

@ -0,0 +1,122 @@
// Copyright 2016 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"fmt"
)
// TODO(bep) np pages names
// TODO(bep) np this is a somewhat breaking change and should be doc. + release notes: See AllPages vs. "this language only". Looks like it is like this alread, check.
type PageCollections struct {
// Includes only pages of NodePage type, and only pages in the current language.
Pages Pages
// Includes all pages in all languages, including the current one.
// Only pages of NodePage type.
AllPages Pages
// Includes pages of all types, but only pages in the current language.
Nodes Pages
// Includes all pages in all languages, including the current one.
// Includes pages of all types.
AllNodes Pages
// A convenience cache for the traditional node types, taxonomies, home page etc.
// This is for the current language only.
indexNodes Pages
// Includes absolute all pages (of all types), including drafts etc.
rawAllPages Pages
}
func (c *PageCollections) refreshPageCaches() {
// All pages are stored in AllNodes and Nodes. Filter from those.
c.Pages = c.findPagesByNodeTypeIn(NodePage, c.Nodes)
c.indexNodes = c.findPagesByNodeTypeNotIn(NodePage, c.Nodes)
c.AllPages = c.findPagesByNodeTypeIn(NodePage, c.AllNodes)
for _, n := range c.Nodes {
if n.NodeType == NodeUnknown {
panic(fmt.Sprintf("Got unknown type %s", n.Title))
}
}
}
func newPageCollections() *PageCollections {
return &PageCollections{}
}
func newPageCollectionsFromPages(pages Pages) *PageCollections {
return &PageCollections{rawAllPages: pages}
}
func (c *PageCollections) findPagesByNodeType(n NodeType) Pages {
return c.findPagesByNodeTypeIn(n, c.Nodes)
}
func (c *PageCollections) findIndexNodesByNodeType(n NodeType) Pages {
return c.findPagesByNodeTypeIn(n, c.indexNodes)
}
func (*PageCollections) findPagesByNodeTypeIn(n NodeType, inPages Pages) Pages {
var pages Pages
for _, p := range inPages {
if p.NodeType == n {
pages = append(pages, p)
}
}
return pages
}
func (*PageCollections) findPagesByNodeTypeNotIn(n NodeType, inPages Pages) Pages {
var pages Pages
for _, p := range inPages {
if p.NodeType != n {
pages = append(pages, p)
}
}
return pages
}
func (c *PageCollections) findAllPagesByNodeType(n NodeType) Pages {
return c.findPagesByNodeTypeIn(n, c.rawAllPages)
}
func (c *PageCollections) findRawAllPagesByNodeType(n NodeType) Pages {
return c.findPagesByNodeTypeIn(n, c.rawAllPages)
}
func (c *PageCollections) addPage(page *Page) {
c.rawAllPages = append(c.rawAllPages, page)
}
func (c *PageCollections) removePageByPath(path string) {
if i := c.rawAllPages.FindPagePosByFilePath(path); i >= 0 {
c.rawAllPages = append(c.rawAllPages[:i], c.rawAllPages[i+1:]...)
}
}
func (c *PageCollections) removePage(page *Page) {
if i := c.rawAllPages.FindPagePos(page); i >= 0 {
c.rawAllPages = append(c.rawAllPages[:i], c.rawAllPages[i+1:]...)
}
}
func (c *PageCollections) replacePage(page *Page) {
// will find existing page that matches filepath and remove it
c.removePage(page)
c.addPage(page)
}

View file

@ -91,11 +91,10 @@ type Site struct {
nodeCache *nodeCache nodeCache *nodeCache
nodeCacheInit sync.Once nodeCacheInit sync.Once
Pages Pages *PageCollections
AllPages Pages
rawAllPages Pages Files []*source.File
Files []*source.File Taxonomies TaxonomyList
Taxonomies TaxonomyList
// Plural is what we get in the folder, so keep track of this mapping // Plural is what we get in the folder, so keep track of this mapping
// to get the singular form from that value. // to get the singular form from that value.
@ -122,7 +121,8 @@ func (s *Site) reset() *Site {
// newSite creates a new site in the given language. // newSite creates a new site in the given language.
func newSite(lang *helpers.Language) *Site { func newSite(lang *helpers.Language) *Site {
return &Site{Language: lang, Info: newSiteInfo(siteBuilderCfg{language: lang})} c := newPageCollections()
return &Site{Language: lang, PageCollections: c, Info: newSiteInfo(siteBuilderCfg{pageCollections: c, language: lang})}
} }
@ -172,14 +172,12 @@ type SiteInfo struct {
_ [4]byte _ [4]byte
paginationPageCount uint64 paginationPageCount uint64
BaseURL template.URL BaseURL template.URL
Taxonomies TaxonomyList Taxonomies TaxonomyList
Authors AuthorList Authors AuthorList
Social SiteSocial Social SiteSocial
Sections Taxonomy Sections Taxonomy
Pages *Pages // Includes only pages in this language *PageCollections
AllPages *Pages // Includes other translated pages, excluding those in this language.
rawAllPages *Pages // Includes absolute all pages, including drafts etc.
Files *[]*source.File Files *[]*source.File
Menus *Menus Menus *Menus
Hugo *HugoInfo Hugo *HugoInfo
@ -211,18 +209,19 @@ type SiteInfo struct {
// Used in tests. // Used in tests.
type siteBuilderCfg struct { type siteBuilderCfg struct {
language *helpers.Language language *helpers.Language
baseURL string pageCollections *PageCollections
baseURL string
pages *Pages pages Pages
} }
func newSiteInfo(cfg siteBuilderCfg) SiteInfo { func newSiteInfo(cfg siteBuilderCfg) SiteInfo {
return SiteInfo{ return SiteInfo{
BaseURL: template.URL(cfg.baseURL), BaseURL: template.URL(cfg.baseURL),
rawAllPages: cfg.pages, pathSpec: helpers.NewPathSpecFromConfig(cfg.language),
pathSpec: helpers.NewPathSpecFromConfig(cfg.language), multilingual: newMultiLingualForLanguage(cfg.language),
multilingual: newMultiLingualForLanguage(cfg.language), PageCollections: cfg.pageCollections,
} }
} }
@ -297,7 +296,8 @@ func (s *SiteInfo) refLink(ref string, page *Page, relative bool) (string, error
var link string var link string
if refURL.Path != "" { if refURL.Path != "" {
for _, page := range []*Page(*s.AllPages) { // TODO(bep) np relRef
for _, page := range s.AllPages {
refPath := filepath.FromSlash(refURL.Path) refPath := filepath.FromSlash(refURL.Path)
if page.Source.Path() == refPath || page.Source.LogicalName() == refPath { if page.Source.Path() == refPath || page.Source.LogicalName() == refPath {
target = page target = page
@ -372,7 +372,8 @@ func (s *SiteInfo) SourceRelativeLink(ref string, currentPage *Page) (string, er
} }
} }
for _, page := range []*Page(*s.AllPages) { // TODO(bep) np sourceRelativeLink
for _, page := range s.AllPages {
if page.Source.Path() == refPath { if page.Source.Path() == refPath {
target = page target = page
break break
@ -381,14 +382,14 @@ func (s *SiteInfo) SourceRelativeLink(ref string, currentPage *Page) (string, er
// need to exhaust the test, then try with the others :/ // need to exhaust the test, then try with the others :/
// if the refPath doesn't end in a filename with extension `.md`, then try with `.md` , and then `/index.md` // if the refPath doesn't end in a filename with extension `.md`, then try with `.md` , and then `/index.md`
mdPath := strings.TrimSuffix(refPath, string(os.PathSeparator)) + ".md" mdPath := strings.TrimSuffix(refPath, string(os.PathSeparator)) + ".md"
for _, page := range []*Page(*s.AllPages) { for _, page := range s.AllPages {
if page.Source.Path() == mdPath { if page.Source.Path() == mdPath {
target = page target = page
break break
} }
} }
indexPath := filepath.Join(refPath, "index.md") indexPath := filepath.Join(refPath, "index.md")
for _, page := range []*Page(*s.AllPages) { for _, page := range s.AllPages {
if page.Source.Path() == indexPath { if page.Source.Path() == indexPath {
target = page target = page
break break
@ -806,6 +807,7 @@ func (s *Site) postProcess() (err error) {
if err = s.buildSiteMeta(); err != nil { if err = s.buildSiteMeta(); err != nil {
return return
} }
s.timerStep("build taxonomies") s.timerStep("build taxonomies")
return return
} }
@ -975,9 +977,7 @@ func (s *Site) initializeSiteInfo() {
BuildDrafts: viper.GetBool("buildDrafts"), BuildDrafts: viper.GetBool("buildDrafts"),
canonifyURLs: viper.GetBool("canonifyURLs"), canonifyURLs: viper.GetBool("canonifyURLs"),
preserveTaxonomyNames: lang.GetBool("preserveTaxonomyNames"), preserveTaxonomyNames: lang.GetBool("preserveTaxonomyNames"),
AllPages: &s.AllPages, PageCollections: s.PageCollections,
Pages: &s.Pages,
rawAllPages: &s.rawAllPages,
Files: &s.Files, Files: &s.Files,
Menus: &s.Menus, Menus: &s.Menus,
Params: params, Params: params,
@ -1293,28 +1293,6 @@ func converterCollator(s *Site, results <-chan HandledResult, errs chan<- error)
errs <- fmt.Errorf("Errors rendering pages: %s", strings.Join(errMsgs, "\n")) errs <- fmt.Errorf("Errors rendering pages: %s", strings.Join(errMsgs, "\n"))
} }
func (s *Site) addPage(page *Page) {
s.rawAllPages = append(s.rawAllPages, page)
}
func (s *Site) removePageByPath(path string) {
if i := s.rawAllPages.FindPagePosByFilePath(path); i >= 0 {
s.rawAllPages = append(s.rawAllPages[:i], s.rawAllPages[i+1:]...)
}
}
func (s *Site) removePage(page *Page) {
if i := s.rawAllPages.FindPagePos(page); i >= 0 {
s.rawAllPages = append(s.rawAllPages[:i], s.rawAllPages[i+1:]...)
}
}
func (s *Site) replacePage(page *Page) {
// will find existing page that matches filepath and remove it
s.removePage(page)
s.addPage(page)
}
func (s *Site) replaceFile(sf *source.File) { func (s *Site) replaceFile(sf *source.File) {
for i, f := range s.Files { for i, f := range s.Files {
if f.Path() == sf.Path() { if f.Path() == sf.Path() {
@ -1379,15 +1357,29 @@ func readCollator(s *Site, results <-chan HandledResult, errs chan<- error) {
} }
func (s *Site) buildSiteMeta() (err error) { func (s *Site) buildSiteMeta() (err error) {
s.assembleMenus() s.assembleMenus()
if len(s.Pages) == 0 { if len(s.Nodes) == 0 {
return return
} }
// TODO(bep) np order
// assembleTaxonomies: Needs pages (temp lookup) (maybe later nodes)
s.assembleTaxonomies() s.assembleTaxonomies()
// TODO(bep) np
for _, p := range s.AllNodes {
// setNodeTypeVars needs taxonomies
p.setNodeTypeVars(s)
}
// assembleSections: Needs pages (temp lookup)
s.assembleSections() s.assembleSections()
s.Info.LastChange = s.Pages[0].Lastmod
// TODO(bep) np
pages := s.findPagesByNodeType(NodePage)
s.Info.LastChange = pages[0].Lastmod
return return
} }
@ -1527,7 +1519,8 @@ func (s *Site) assembleTaxonomies() {
for singular, plural := range taxonomies { for singular, plural := range taxonomies {
s.Taxonomies[plural] = make(Taxonomy) s.Taxonomies[plural] = make(Taxonomy)
s.taxonomiesPluralSingular[plural] = singular s.taxonomiesPluralSingular[plural] = singular
for _, p := range s.Pages { // TODO(np) tax other nodes
for _, p := range s.findPagesByNodeType(NodePage) {
vals := p.getParam(plural, !s.Info.preserveTaxonomyNames) vals := p.getParam(plural, !s.Info.preserveTaxonomyNames)
weight := p.GetParam(plural + "_weight") weight := p.GetParam(plural + "_weight")
if weight == nil { if weight == nil {
@ -1560,8 +1553,7 @@ func (s *Site) resetBuildState() {
s.nodeCache.reset() s.nodeCache.reset()
s.Pages = make(Pages, 0) s.PageCollections = newPageCollections()
s.AllPages = make(Pages, 0)
s.Info.paginationPageCount = 0 s.Info.paginationPageCount = 0
s.draftCount = 0 s.draftCount = 0
@ -1578,7 +1570,7 @@ func (s *Site) assembleSections() {
s.Info.Sections = s.Sections s.Info.Sections = s.Sections
regularPages := s.findPagesByNodeType(NodePage) regularPages := s.findPagesByNodeType(NodePage)
for i, p := range regularPages { for i, p := range regularPages {
s.Sections.add(p.Section(), WeightedPage{s.Pages[i].Weight, regularPages[i]}, s.Info.preserveTaxonomyNames) s.Sections.add(p.Section(), WeightedPage{regularPages[i].Weight, regularPages[i]}, s.Info.preserveTaxonomyNames)
} }
for k := range s.Sections { for k := range s.Sections {
@ -1605,28 +1597,6 @@ func (s *Site) nodeTypeFromSections(sections []string) NodeType {
return NodeSection return NodeSection
} }
func (s *Site) findPagesByNodeType(n NodeType) Pages {
return s.findPagesByNodeTypeIn(n, s.Pages)
}
func (s *Site) findPagesByNodeTypeIn(n NodeType, inPages Pages) Pages {
var pages Pages
for _, p := range inPages {
if p.NodeType == n {
pages = append(pages, p)
}
}
return pages
}
func (s *Site) findAllPagesByNodeType(n NodeType) Pages {
return s.findPagesByNodeTypeIn(n, s.rawAllPages)
}
func (s *Site) findRawAllPagesByNodeType(n NodeType) Pages {
return s.findPagesByNodeTypeIn(n, s.rawAllPages)
}
// renderAliases renders shell pages that simply have a redirect in the header. // renderAliases renders shell pages that simply have a redirect in the header.
func (s *Site) renderAliases() error { func (s *Site) renderAliases() error {
for _, p := range s.Pages { for _, p := range s.Pages {
@ -1668,7 +1638,7 @@ func (s *Site) renderAliases() error {
func (s *Site) preparePages() error { func (s *Site) preparePages() error {
var errors []error var errors []error
for _, p := range s.Pages { for _, p := range s.Nodes {
if err := p.prepareLayouts(); err != nil { if err := p.prepareLayouts(); err != nil {
errors = append(errors, err) errors = append(errors, err)
} }

View file

@ -43,7 +43,7 @@ func (s *Site) renderPages() error {
go pageRenderer(s, pages, results, wg) go pageRenderer(s, pages, results, wg)
} }
for _, page := range s.Pages { for _, page := range s.Nodes {
pages <- page pages <- page
} }
@ -135,6 +135,7 @@ func (s *Site) renderRSS(p *Page) error {
} }
// TODO(bep) np check RSS titles // TODO(bep) np check RSS titles
// TODO(bep) np check RSS page limit, 50?
rssNode := p.copy() rssNode := p.copy()
// TODO(bep) np todelido URL // TODO(bep) np todelido URL