diff --git a/hugolib/hugo_sites.go b/hugolib/hugo_sites.go index 5f0168d7a..785fc847f 100644 --- a/hugolib/hugo_sites.go +++ b/hugolib/hugo_sites.go @@ -464,6 +464,8 @@ func (s *Site) preparePagesForRender(cfg BuildCfg, changed whatChanged) { continue } + p.setNodeTypeVars(s) + // If we got this far it means that this is either a new Page pointer // or a template or similar has changed so wee need to do a rerendering // of the shortcodes etc. @@ -572,7 +574,7 @@ func (s *Site) updateBuildStats(page *Page) { } } -func (h *HugoSites) findPagesByNodeType(n NodeType) Pages { +func (h *HugoSites) findAllPagesByNodeType(n NodeType) Pages { var pages Pages for _, p := range h.Sites[0].AllPages { if p.NodeType == n { diff --git a/hugolib/node.go b/hugolib/node.go index c7c620930..070290521 100644 --- a/hugolib/node.go +++ b/hugolib/node.go @@ -32,9 +32,13 @@ type NodeType int const ( NodePage NodeType = iota + // Temporary state. + NodeUnknown + // The rest are node types; home page, sections etc. NodeHome NodeSection + NodeTaxonomy ) func (p NodeType) IsNode() bool { @@ -337,8 +341,14 @@ func (n *Node) addLangFilepathPrefix(outfile string) string { return helpers.FilePathSeparator + filepath.Join(n.Lang(), outfile) } +func sectionsFromFilename(filename string) []string { + dir, _ := filepath.Split(filename) + return strings.Split(dir, helpers.FilePathSeparator) +} + +// TODO(bep) np node identificator func nodeTypeFromFilename(filename string) NodeType { - // TODO(bep) np + if !strings.Contains(filename, "_node") { return NodePage } @@ -347,5 +357,32 @@ func nodeTypeFromFilename(filename string) NodeType { return NodeHome } - return NodeSection + // We don't know enough yet to determine the type. + return NodeUnknown +} + +func (p *Page) setNodeTypeVars(s *Site) { + // TODO(bep) np taxonomies etc. + if p.NodeType == NodeUnknown { + // This is either a taxonomy or a section + if s.isTaxonomy(p.Section()) { + p.NodeType = NodeTaxonomy + } else { + p.NodeType = NodeSection + } + + } + // TODO(bep) np node URL + // Set Node URL + switch p.NodeType { + case NodeHome: + p.URLPath.URL = "" + case NodeSection: + p.URLPath.URL = p.Section() + case NodeTaxonomy: + p.URLPath.URL = path.Join(p.sections...) + } + + p.site = s + } diff --git a/hugolib/node_as_page_test.go b/hugolib/node_as_page_test.go index 6d5c2fdb2..8527030bc 100644 --- a/hugolib/node_as_page_test.go +++ b/hugolib/node_as_page_test.go @@ -31,8 +31,8 @@ import ( */ func TestNodesAsPage(t *testing.T) { - //jww.SetStdoutThreshold(jww.LevelDebug) - jww.SetStdoutThreshold(jww.LevelFatal) + jww.SetStdoutThreshold(jww.LevelDebug) + //jww.SetStdoutThreshold(jww.LevelFatal) nodePageFeatureFlag = true defer toggleNodePageFeatureFlag() @@ -65,6 +65,12 @@ Section1 **Content!** title: Section2 --- Section2 **Content!** +`) + + writeSource(t, filepath.Join("content", "categories", "hugo", "_node.md"), `--- +title: Taxonomy Hugo +--- +Taxonomy Hugo **Content!** `) writeSource(t, filepath.Join("layouts", "index.html"), ` @@ -88,6 +94,15 @@ Section Content: {{ .Content }} {{ range .Paginator.Pages }} Pag: {{ .Title }} {{ end }} +`) + + writeSource(t, filepath.Join("layouts", "_default", "taxonomy.html"), ` +Taxonomy Title: {{ .Title }} +Taxonomy Content: {{ .Content }} +# Pages: {{ len .Data.Pages }} +{{ range .Paginator.Pages }} + Pag: {{ .Title }} +{{ end }} `) // Add some regular pages @@ -120,7 +135,7 @@ Content Page %02d assertFileContent(t, filepath.Join("public", "sect1", "regular1", "index.html"), false, "Single Title: Page 01", "Content Page 01") h := s.owner - nodes := h.findPagesByNodeType(NodeHome) + nodes := h.findAllPagesByNodeType(NodeHome) require.Len(t, nodes, 1) home := nodes[0] @@ -129,7 +144,7 @@ Content Page %02d require.True(t, home.IsNode()) require.False(t, home.IsPage()) - pages := h.findPagesByNodeType(NodePage) + pages := h.findAllPagesByNodeType(NodePage) require.Len(t, pages, 4) first := pages[0] @@ -151,7 +166,16 @@ Content Page %02d assertFileContent(t, filepath.Join("public", "sect1", "page", "2", "index.html"), false, "Pag: Page 02") - sections := h.findPagesByNodeType(NodeSection) + sections := h.findAllPagesByNodeType(NodeSection) require.Len(t, sections, 2) + // Check taxonomy list + assertFileContent(t, filepath.Join("public", "categories", "hugo", "index.html"), false, + "Taxonomy Title: Taxonomy Hugo", "Taxonomy Hugo Content!") + + // Check taxonomy list paginator + assertFileContent(t, filepath.Join("public", "categories", "hugo", "page", "2", "index.html"), false, + "Taxonomy Title: Taxonomy Hugo", + "Pag: Page 02") + } diff --git a/hugolib/page.go b/hugolib/page.go index 60fddf55c..9f7fb6655 100644 --- a/hugolib/page.go +++ b/hugolib/page.go @@ -98,6 +98,21 @@ type Page struct { Node GitInfo *gitmap.GitInfo + + // This was added as part of getting the Nodes (taxonomies etc.) to work as + // Pages in Hugo 0.18. + // It is deliberately named similar to Section, but not exported (for now). + // We currently have only one level of section in Hugo, but the page can live + // any number of levels down the file path. + // To support taxonomies like /categories/hugo etc. we will need to keep track + // of that information in a general way. + // So, sections represents the path to the content, i.e. a content file or a + // virtual content file in the situations where a taxonomy or a section etc. + // isn't accomanied by one. + sections []string + + // TODO(bep) np Site added to page, keep? + site *Site } type Source struct { @@ -418,6 +433,7 @@ func newPage(filename string) *Page { Node: Node{NodeType: nodeTypeFromFilename(filename), Keywords: []string{}, Sitemap: Sitemap{Priority: -1}}, Params: make(map[string]interface{}), translations: make(Pages, 0), + sections: sectionsFromFilename(filename), } jww.DEBUG.Println("Reading from", page.File.Path()) @@ -449,15 +465,20 @@ func (p *Page) layouts(l ...string) []string { return p.layoutsCalculated } - // TODO(bep) np + // TODO(bep) np taxonomy etc. switch p.NodeType { case NodeHome: return []string{"index.html", "_default/list.html"} case NodeSection: section := p.Section() return []string{"section/" + section + ".html", "_default/section.html", "_default/list.html", "indexes/" + section + ".html", "_default/indexes.html"} + case NodeTaxonomy: + singular := p.site.taxonomiesPluralSingular[p.sections[0]] + return []string{"taxonomy/" + singular + ".html", "indexes/" + singular + ".html", "_default/taxonomy.html", "_default/list.html"} } + // Regular Page handled below + if p.Layout != "" { return layouts(p.Type(), p.Layout) } @@ -862,15 +883,6 @@ func (p *Page) update(f interface{}) error { } } - // TODO(bep) np node URL - // Set Node URL - switch p.NodeType { - case NodeHome: - p.URLPath.URL = "" - case NodeSection: - p.URLPath.URL = p.Section() - } - return nil } @@ -1153,6 +1165,8 @@ func (p *Page) TargetPath() (outfile string) { return "index.html" case NodeSection: return filepath.Join(p.Section(), "index.html") + case NodeTaxonomy: + return filepath.Join(append(p.sections, "index.html")...) } // Always use URL if it's specified @@ -1214,67 +1228,37 @@ func (p *Page) prepareLayouts() error { return nil } +// TODO(bep) np naming, move some func (p *Page) prepareData(s *Site) error { p.Data = make(map[string]interface{}) switch p.NodeType { case NodePage: case NodeHome: // TODO(bep) np cache the below - // TODO(bep) np - p.Data["Pages"] = s.owner.findPagesByNodeType(NodePage) + p.Data["Pages"] = s.owner.findAllPagesByNodeType(NodePage) case NodeSection: sectionData, ok := s.Sections[p.Section()] if !ok { return fmt.Errorf("Data for section %s not found", p.Section()) } p.Data["Pages"] = sectionData + case NodeTaxonomy: + plural := p.sections[0] + term := p.sections[1] + + singular := s.taxonomiesPluralSingular[plural] + taxonomy := s.Taxonomies[plural].Get(term) + + p.Data[singular] = taxonomy + p.Data["Singular"] = singular + p.Data["Plural"] = plural + p.Data["Pages"] = taxonomy.Pages() + } return nil } -// renderPaginator must be run after the owning Page has been rendered. -// TODO(bep) np -func (p *Page) renderPaginator(s *Site) error { - if p.paginator != nil { - paginatePath := helpers.Config().GetString("paginatePath") - - // write alias for page 1 - // TODO(bep) ml all of these n.addLang ... fix. - //permaLink, _ := p.Permalink() - // TODO(bep) np fix - //s.writeDestAlias(p.addLangPathPrefix(helpers.PaginateAliasPath("", 1)), permaLink, nil) - - pagers := p.paginator.Pagers() - - for i, pager := range pagers { - if i == 0 { - // already created - continue - } - - pagerNode := p.copy() - - pagerNode.paginator = pager - if pager.TotalPages() > 0 { - first, _ := pager.page(0) - pagerNode.Date = first.Date - pagerNode.Lastmod = first.Lastmod - } - - pageNumber := i + 1 - htmlBase := path.Join(p.URLPath.URL, fmt.Sprintf("/%s/%d", paginatePath, pageNumber)) - htmlBase = p.addLangPathPrefix(htmlBase) - if err := s.renderAndWritePage(pagerNode.Title, - filepath.FromSlash(htmlBase), pagerNode, p.layouts()...); err != nil { - return err - } - - } - } - return nil -} - // Page constains some sync.Once which have a mutex, so we cannot just // copy the Page by value. So for the situations where we need a copy, // the paginators etc., we do it manually here. diff --git a/hugolib/site.go b/hugolib/site.go index 55bf1dc99..ad6625388 100644 --- a/hugolib/site.go +++ b/hugolib/site.go @@ -91,11 +91,16 @@ type Site struct { nodeCache *nodeCache nodeCacheInit sync.Once - Pages Pages - AllPages Pages - rawAllPages Pages - Files []*source.File - Taxonomies TaxonomyList + Pages Pages + AllPages Pages + rawAllPages Pages + Files []*source.File + Taxonomies TaxonomyList + + // Plural is what we get in the folder, so keep track of this mapping + // to get the singular form from that value. + taxonomiesPluralSingular map[string]string + Source source.Input Sections Taxonomy Info SiteInfo @@ -1514,12 +1519,14 @@ func (s *Site) assembleMenus() { func (s *Site) assembleTaxonomies() { s.Taxonomies = make(TaxonomyList) + s.taxonomiesPluralSingular = make(map[string]string) taxonomies := s.Language.GetStringMapString("Taxonomies") jww.INFO.Printf("found taxonomies: %#v\n", taxonomies) - for _, plural := range taxonomies { + for singular, plural := range taxonomies { s.Taxonomies[plural] = make(Taxonomy) + s.taxonomiesPluralSingular[plural] = singular for _, p := range s.Pages { vals := p.getParam(plural, !s.Info.preserveTaxonomyNames) weight := p.GetParam(plural + "_weight") @@ -1569,9 +1576,13 @@ func (s *Site) resetBuildState() { func (s *Site) assembleSections() { s.Sections = make(Taxonomy) s.Info.Sections = s.Sections - - for i, p := range s.Pages { - s.Sections.add(p.Section(), WeightedPage{s.Pages[i].Weight, s.Pages[i]}, s.Info.preserveTaxonomyNames) + regularPages := s.findPagesByNodeType(NodePage) + for i, p := range regularPages { + section := p.Section() + if s.isTaxonomy(section) { + continue + } + s.Sections.add(section, WeightedPage{s.Pages[i].Weight, regularPages[i]}, s.Info.preserveTaxonomyNames) } for k := range s.Sections { @@ -1588,6 +1599,23 @@ func (s *Site) assembleSections() { } } +func (s *Site) isTaxonomy(section string) bool { + if _, isTaxonomy := s.Taxonomies[section]; isTaxonomy { + return true + } + return false +} + +func (s *Site) findPagesByNodeType(n NodeType) Pages { + var pages Pages + for _, p := range s.Pages { + if p.NodeType == n { + pages = append(pages, p) + } + } + return pages +} + // renderAliases renders shell pages that simply have a redirect in the header. func (s *Site) renderAliases() error { for _, p := range s.Pages { @@ -1645,59 +1673,6 @@ func (s *Site) preparePages() error { return nil } -// renderPages renders pages each corresponding to a markdown file. -func (s *Site) renderPages() error { - - results := make(chan error) - pages := make(chan *Page) - errs := make(chan error) - - go errorCollator(results, errs) - - procs := getGoMaxProcs() - - wg := &sync.WaitGroup{} - - for i := 0; i < procs*4; i++ { - wg.Add(1) - go pageRenderer(s, pages, results, wg) - } - - for _, page := range s.Pages { - pages <- page - } - - close(pages) - - wg.Wait() - - close(results) - - err := <-errs - if err != nil { - return fmt.Errorf("Error(s) rendering pages: %s", err) - } - return nil -} - -func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.WaitGroup) { - defer wg.Done() - for p := range pages { - targetPath := p.TargetPath() - layouts := p.layouts() - jww.DEBUG.Printf("Render Page to %q with layouts %q", targetPath, layouts) - if err := s.renderAndWritePage("page "+p.FullFilePath(), targetPath, p, s.appendThemeTemplates(layouts)...); err != nil { - results <- err - } - - if p.NodeType.IsNode() { - if err := p.renderPaginator(s); err != nil { - results <- err - } - } - } -} - func errorCollator(results <-chan error, errs chan<- error) { errMsgs := []string{} for err := range results { @@ -1753,6 +1728,9 @@ type taxRenderInfo struct { // renderTaxonomiesLists renders the listing pages based on the meta data // each unique term within a taxonomy will have a page created func (s *Site) renderTaxonomiesLists(prepare bool) error { + if nodePageFeatureFlag { + return nil + } wg := &sync.WaitGroup{} taxes := make(chan taxRenderInfo) diff --git a/hugolib/site_render.go b/hugolib/site_render.go new file mode 100644 index 000000000..2076e46ae --- /dev/null +++ b/hugolib/site_render.go @@ -0,0 +1,122 @@ +// Copyright 2016 The Hugo Authors. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package hugolib + +import ( + "fmt" + "path" + "path/filepath" + "sync" + + "github.com/spf13/hugo/helpers" + + jww "github.com/spf13/jwalterweatherman" +) + +// renderPages renders pages each corresponding to a markdown file. +// TODO(bep np doc +func (s *Site) renderPages() error { + + results := make(chan error) + pages := make(chan *Page) + errs := make(chan error) + + go errorCollator(results, errs) + + procs := getGoMaxProcs() + + wg := &sync.WaitGroup{} + + for i := 0; i < procs*4; i++ { + wg.Add(1) + go pageRenderer(s, pages, results, wg) + } + + for _, page := range s.Pages { + pages <- page + } + + close(pages) + + wg.Wait() + + close(results) + + err := <-errs + if err != nil { + return fmt.Errorf("Error(s) rendering pages: %s", err) + } + return nil +} + +func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.WaitGroup) { + defer wg.Done() + for p := range pages { + targetPath := p.TargetPath() + layouts := p.layouts() + jww.DEBUG.Printf("Render Page to %q with layouts %q", targetPath, layouts) + if err := s.renderAndWritePage("page "+p.FullFilePath(), targetPath, p, s.appendThemeTemplates(layouts)...); err != nil { + results <- err + } + + if p.NodeType.IsNode() { + if err := s.renderPaginator(p); err != nil { + results <- err + } + } + } +} + +// renderPaginator must be run after the owning Page has been rendered. +// TODO(bep) np +func (s *Site) renderPaginator(p *Page) error { + if p.paginator != nil { + jww.DEBUG.Printf("Render paginator for page %q", p.Path()) + paginatePath := helpers.Config().GetString("paginatePath") + + // write alias for page 1 + // TODO(bep) ml all of these n.addLang ... fix. + //permaLink, _ := p.Permalink() + // TODO(bep) np fix + //s.writeDestAlias(p.addLangPathPrefix(helpers.PaginateAliasPath("", 1)), permaLink, nil) + + pagers := p.paginator.Pagers() + + for i, pager := range pagers { + if i == 0 { + // already created + continue + } + + pagerNode := p.copy() + + pagerNode.paginator = pager + if pager.TotalPages() > 0 { + first, _ := pager.page(0) + pagerNode.Date = first.Date + pagerNode.Lastmod = first.Lastmod + } + + pageNumber := i + 1 + htmlBase := path.Join(p.URLPath.URL, fmt.Sprintf("/%s/%d", paginatePath, pageNumber)) + htmlBase = p.addLangPathPrefix(htmlBase) + if err := s.renderAndWritePage(pagerNode.Title, + filepath.FromSlash(htmlBase), pagerNode, p.layouts()...); err != nil { + return err + } + + } + } + return nil +}