node to page: Final TODO-fixes

Updates #2297
This commit is contained in:
Bjørn Erik Pedersen 2016-11-16 21:06:10 +01:00
parent 99d11386a7
commit a843d5d3bb
5 changed files with 99 additions and 116 deletions

View file

@ -15,9 +15,7 @@ package hugolib
import ( import (
"fmt" "fmt"
"html/template"
"os" "os"
"path"
"strings" "strings"
"sync" "sync"
@ -25,7 +23,6 @@ import (
"github.com/spf13/viper" "github.com/spf13/viper"
"github.com/bep/inflect"
"github.com/spf13/hugo/source" "github.com/spf13/hugo/source"
"github.com/spf13/hugo/tpl" "github.com/spf13/hugo/tpl"
jww "github.com/spf13/jwalterweatherman" jww "github.com/spf13/jwalterweatherman"
@ -300,84 +297,6 @@ func (h *HugoSites) createMissingPages() error {
return nil return nil
} }
// TODO(bep) np move
// Move the new* methods after cleanup in site.go
func (s *Site) newNodePage(typ string) *Page {
return &Page{
pageInit: &pageInit{},
Kind: typ,
Data: make(map[string]interface{}),
Site: &s.Info,
language: s.Language,
site: s}
}
func (s *Site) newHomePage() *Page {
p := s.newNodePage(KindHome)
p.Title = s.Info.Title
pages := Pages{}
p.Data["Pages"] = pages
p.Pages = pages
s.setPageURLs(p, "/")
return p
}
func (s *Site) setPageURLs(p *Page, in string) {
p.URLPath.URL = s.Info.pathSpec.URLizeAndPrep(in)
p.URLPath.Permalink = s.Info.permalink(p.URLPath.URL)
p.RSSLink = template.HTML(s.Info.permalink(in + ".xml"))
}
func (s *Site) newTaxonomyPage(plural, key string) *Page {
p := s.newNodePage(KindTaxonomy)
p.sections = []string{plural, key}
if s.Info.preserveTaxonomyNames {
key = s.Info.pathSpec.MakePathSanitized(key)
}
if s.Info.preserveTaxonomyNames {
// keep as is in the title
p.Title = key
} else {
p.Title = strings.Replace(strings.Title(key), "-", " ", -1)
}
s.setPageURLs(p, path.Join(plural, key))
return p
}
func (s *Site) newSectionPage(name string, section WeightedPages) *Page {
p := s.newNodePage(KindSection)
p.sections = []string{name}
sectionName := name
if !s.Info.preserveTaxonomyNames && len(section) > 0 {
sectionName = section[0].Page.Section()
}
sectionName = helpers.FirstUpper(sectionName)
if viper.GetBool("pluralizeListTitles") {
p.Title = inflect.Pluralize(sectionName)
} else {
p.Title = sectionName
}
s.setPageURLs(p, name)
return p
}
func (s *Site) newTaxonomyTermsPage(plural string) *Page {
p := s.newNodePage(KindTaxonomyTerm)
p.sections = []string{plural}
p.Title = strings.Title(plural)
s.setPageURLs(p, plural)
return p
}
func (h *HugoSites) setupTranslations() { func (h *HugoSites) setupTranslations() {
master := h.Sites[0] master := h.Sites[0]

View file

@ -175,7 +175,7 @@ type Page struct {
site *Site site *Site
// Pulled over from Node. TODO(bep) np reorg and group (embed) // Pulled over from old Node. TODO(bep) reorg and group (embed)
Site *SiteInfo `json:"-"` Site *SiteInfo `json:"-"`
@ -1458,7 +1458,6 @@ func (p *Page) prepareLayouts() error {
return nil return nil
} }
// TODO(bep) np naming, move some
func (p *Page) prepareData(s *Site) error { func (p *Page) prepareData(s *Site) error {
var pages Pages var pages Pages
@ -1507,7 +1506,7 @@ func (p *Page) prepareData(s *Site) error {
} }
func (p *Page) updatePageDates() { func (p *Page) updatePageDates() {
// TODO(bep) np there is a potential issue with page sorting for home pages // TODO(bep) there is a potential issue with page sorting for home pages
// etc. without front matter dates set, but let us wrap the head around // etc. without front matter dates set, but let us wrap the head around
// that in another time. // that in another time.
if !p.IsNode() { if !p.IsNode() {
@ -1553,8 +1552,6 @@ func (p *Page) copy() *Page {
return &c return &c
} }
// TODO(bep) np these are pulled over from Node. Needs regrouping / embed
func (p *Page) Now() time.Time { func (p *Page) Now() time.Time {
return time.Now() return time.Now()
} }

View file

@ -13,10 +13,6 @@
package hugolib package hugolib
import (
"fmt"
)
// PageCollections contains the page collections for a site. // PageCollections contains the page collections for a site.
type PageCollections struct { type PageCollections struct {
// Includes only pages of all types, and only pages in the current language. // Includes only pages of all types, and only pages in the current language.
@ -34,6 +30,9 @@ type PageCollections struct {
// This is for the current language only. // This is for the current language only.
RegularPages Pages RegularPages Pages
// A convenience cache for the all the regular pages.
AllRegularPages Pages
// Includes absolute all pages (of all types), including drafts etc. // Includes absolute all pages (of all types), including drafts etc.
rawAllPages Pages rawAllPages Pages
} }
@ -41,13 +40,7 @@ type PageCollections struct {
func (c *PageCollections) refreshPageCaches() { func (c *PageCollections) refreshPageCaches() {
c.indexPages = c.findPagesByKindNotIn(KindPage, c.Pages) c.indexPages = c.findPagesByKindNotIn(KindPage, c.Pages)
c.RegularPages = c.findPagesByKindIn(KindPage, c.Pages) c.RegularPages = c.findPagesByKindIn(KindPage, c.Pages)
c.AllRegularPages = c.findPagesByKindIn(KindPage, c.AllPages)
// TODO(bep) np remove eventually
for _, n := range c.Pages {
if n.Kind == kindUnknown {
panic(fmt.Sprintf("Got unknown type %s", n.Title))
}
}
} }
func newPageCollections() *PageCollections { func newPageCollections() *PageCollections {

View file

@ -27,6 +27,8 @@ import (
"sync" "sync"
"time" "time"
"github.com/bep/inflect"
"sync/atomic" "sync/atomic"
"github.com/fsnotify/fsnotify" "github.com/fsnotify/fsnotify"
@ -285,8 +287,7 @@ func (s *SiteInfo) refLink(ref string, page *Page, relative bool) (string, error
var link string var link string
if refURL.Path != "" { if refURL.Path != "" {
// TODO(bep) np relRef for _, page := range s.AllRegularPages {
for _, page := range s.AllPages {
refPath := filepath.FromSlash(refURL.Path) refPath := filepath.FromSlash(refURL.Path)
if page.Source.Path() == refPath || page.Source.LogicalName() == refPath { if page.Source.Path() == refPath || page.Source.LogicalName() == refPath {
target = page target = page
@ -357,8 +358,7 @@ func (s *SiteInfo) SourceRelativeLink(ref string, currentPage *Page) (string, er
} }
} }
// TODO(bep) np sourceRelativeLink for _, page := range s.AllRegularPages {
for _, page := range s.AllPages {
if page.Source.Path() == refPath { if page.Source.Path() == refPath {
target = page target = page
break break
@ -367,14 +367,14 @@ func (s *SiteInfo) SourceRelativeLink(ref string, currentPage *Page) (string, er
// need to exhaust the test, then try with the others :/ // need to exhaust the test, then try with the others :/
// if the refPath doesn't end in a filename with extension `.md`, then try with `.md` , and then `/index.md` // if the refPath doesn't end in a filename with extension `.md`, then try with `.md` , and then `/index.md`
mdPath := strings.TrimSuffix(refPath, string(os.PathSeparator)) + ".md" mdPath := strings.TrimSuffix(refPath, string(os.PathSeparator)) + ".md"
for _, page := range s.AllPages { for _, page := range s.AllRegularPages {
if page.Source.Path() == mdPath { if page.Source.Path() == mdPath {
target = page target = page
break break
} }
} }
indexPath := filepath.Join(refPath, "index.md") indexPath := filepath.Join(refPath, "index.md")
for _, page := range s.AllPages { for _, page := range s.AllRegularPages {
if page.Source.Path() == indexPath { if page.Source.Path() == indexPath {
target = page target = page
break break
@ -1525,7 +1525,7 @@ func (s *Site) resetBuildState() {
func (s *Site) assembleSections() { func (s *Site) assembleSections() {
s.Sections = make(Taxonomy) s.Sections = make(Taxonomy)
s.Info.Sections = s.Sections s.Info.Sections = s.Sections
// TODO(bep) np check these vs the caches
regularPages := s.findPagesByKind(KindPage) regularPages := s.findPagesByKind(KindPage)
sectionPages := s.findPagesByKind(KindSection) sectionPages := s.findPagesByKind(KindSection)
@ -1716,7 +1716,6 @@ func (s *Site) renderAndWritePage(name string, dest string, d interface{}, layou
var pageTarget target.Output var pageTarget target.Output
// TODO(bep) np ugly urls vs frontmatter
if p, ok := d.(*Page); ok && p.IsPage() && path.Ext(p.URLPath.URL) != "" { if p, ok := d.(*Page); ok && p.IsPage() && path.Ext(p.URLPath.URL) != "" {
// user has explicitly set a URL with extension for this page // user has explicitly set a URL with extension for this page
// make sure it sticks even if "ugly URLs" are turned off. // make sure it sticks even if "ugly URLs" are turned off.
@ -1989,3 +1988,79 @@ func getGoMaxProcs() int {
} }
return 1 return 1
} }
func (s *Site) newNodePage(typ string) *Page {
return &Page{
pageInit: &pageInit{},
Kind: typ,
Data: make(map[string]interface{}),
Site: &s.Info,
language: s.Language,
site: s}
}
func (s *Site) newHomePage() *Page {
p := s.newNodePage(KindHome)
p.Title = s.Info.Title
pages := Pages{}
p.Data["Pages"] = pages
p.Pages = pages
s.setPageURLs(p, "/")
return p
}
func (s *Site) setPageURLs(p *Page, in string) {
p.URLPath.URL = s.Info.pathSpec.URLizeAndPrep(in)
p.URLPath.Permalink = s.Info.permalink(p.URLPath.URL)
p.RSSLink = template.HTML(s.Info.permalink(in + ".xml"))
}
func (s *Site) newTaxonomyPage(plural, key string) *Page {
p := s.newNodePage(KindTaxonomy)
p.sections = []string{plural, key}
if s.Info.preserveTaxonomyNames {
key = s.Info.pathSpec.MakePathSanitized(key)
}
if s.Info.preserveTaxonomyNames {
// keep as is in the title
p.Title = key
} else {
p.Title = strings.Replace(strings.Title(key), "-", " ", -1)
}
s.setPageURLs(p, path.Join(plural, key))
return p
}
func (s *Site) newSectionPage(name string, section WeightedPages) *Page {
p := s.newNodePage(KindSection)
p.sections = []string{name}
sectionName := name
if !s.Info.preserveTaxonomyNames && len(section) > 0 {
sectionName = section[0].Page.Section()
}
sectionName = helpers.FirstUpper(sectionName)
if viper.GetBool("pluralizeListTitles") {
p.Title = inflect.Pluralize(sectionName)
} else {
p.Title = sectionName
}
s.setPageURLs(p, name)
return p
}
func (s *Site) newTaxonomyTermsPage(plural string) *Page {
p := s.newNodePage(KindTaxonomyTerm)
p.sections = []string{plural}
p.Title = strings.Title(plural)
s.setPageURLs(p, plural)
return p
}

View file

@ -87,7 +87,6 @@ func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.Wa
} }
// renderPaginator must be run after the owning Page has been rendered. // renderPaginator must be run after the owning Page has been rendered.
// TODO(bep) np
func (s *Site) renderPaginator(p *Page) error { func (s *Site) renderPaginator(p *Page) error {
if p.paginator != nil { if p.paginator != nil {
jww.DEBUG.Printf("Render paginator for page %q", p.Path()) jww.DEBUG.Printf("Render paginator for page %q", p.Path())
@ -95,10 +94,8 @@ func (s *Site) renderPaginator(p *Page) error {
// write alias for page 1 // write alias for page 1
// TODO(bep) ml all of these n.addLang ... fix. // TODO(bep) ml all of these n.addLang ... fix.
// TODO(bep) np URL
aliasPath := p.addLangPathPrefix(helpers.PaginateAliasPath(path.Join(p.sections...), 1)) aliasPath := p.addLangPathPrefix(helpers.PaginateAliasPath(path.Join(p.sections...), 1))
//TODO(bep) np node.permalink
link := p.Permalink() link := p.Permalink()
s.writeDestAlias(aliasPath, link, nil) s.writeDestAlias(aliasPath, link, nil)
@ -141,16 +138,18 @@ func (s *Site) renderRSS(p *Page) error {
return nil return nil
} }
// TODO(bep) np check RSS titles rssPage := p.copy()
// TODO(bep) np check RSS page limit, 50? rssPage.Kind = kindRSS
rssNode := p.copy() high := 50
rssNode.Kind = kindRSS if len(rssPage.Pages) > high {
rssPage.Pages = rssPage.Pages[:high]
// TODO(bep) np todelido URL rssPage.Data["Pages"] = rssPage.Pages
}
rssURI := s.Language.GetString("rssURI") rssURI := s.Language.GetString("rssURI")
rssNode.URLPath.URL = path.Join(rssNode.URLPath.URL, rssURI) rssPath := path.Join(rssPage.URLPath.URL, rssURI)
s.setPageURLs(rssPage, rssPath)
if err := s.renderAndWriteXML(rssNode.Title, rssNode.addLangFilepathPrefix(rssNode.URLPath.URL), rssNode, s.appendThemeTemplates(layouts)...); err != nil { if err := s.renderAndWriteXML(rssPage.Title, rssPage.addLangFilepathPrefix(rssPath), rssPage, s.appendThemeTemplates(layouts)...); err != nil {
return err return err
} }