diff --git a/hugolib/node.go b/hugolib/node.go index 070290521..13e124608 100644 --- a/hugolib/node.go +++ b/hugolib/node.go @@ -14,6 +14,7 @@ package hugolib import ( + "fmt" "html/template" "path" "path/filepath" @@ -39,6 +40,7 @@ const ( NodeHome NodeSection NodeTaxonomy + NodeTaxonomyTerms ) func (p NodeType) IsNode() bool { @@ -343,7 +345,9 @@ func (n *Node) addLangFilepathPrefix(outfile string) string { func sectionsFromFilename(filename string) []string { dir, _ := filepath.Split(filename) - return strings.Split(dir, helpers.FilePathSeparator) + dir = strings.TrimSuffix(dir, helpers.FilePathSeparator) + sections := strings.Split(dir, helpers.FilePathSeparator) + return sections } // TODO(bep) np node identificator @@ -364,13 +368,15 @@ func nodeTypeFromFilename(filename string) NodeType { func (p *Page) setNodeTypeVars(s *Site) { // TODO(bep) np taxonomies etc. if p.NodeType == NodeUnknown { - // This is either a taxonomy or a section - if s.isTaxonomy(p.Section()) { - p.NodeType = NodeTaxonomy - } else { - p.NodeType = NodeSection + // This is either a taxonomy list, taxonomy term or a section + nodeType := s.nodeTypeFromSections(p.sections) + + if nodeType == NodeUnknown { + panic(fmt.Sprintf("Unable to determine node type from %q", p.sections)) } + p.NodeType = nodeType + } // TODO(bep) np node URL // Set Node URL @@ -381,6 +387,8 @@ func (p *Page) setNodeTypeVars(s *Site) { p.URLPath.URL = p.Section() case NodeTaxonomy: p.URLPath.URL = path.Join(p.sections...) + case NodeTaxonomyTerms: + p.URLPath.URL = path.Join(p.sections...) } p.site = s diff --git a/hugolib/node_as_page_test.go b/hugolib/node_as_page_test.go index 8527030bc..090323cec 100644 --- a/hugolib/node_as_page_test.go +++ b/hugolib/node_as_page_test.go @@ -31,8 +31,8 @@ import ( */ func TestNodesAsPage(t *testing.T) { - jww.SetStdoutThreshold(jww.LevelDebug) - //jww.SetStdoutThreshold(jww.LevelFatal) + //jww.SetStdoutThreshold(jww.LevelDebug) + jww.SetStdoutThreshold(jww.LevelFatal) nodePageFeatureFlag = true defer toggleNodePageFeatureFlag() @@ -71,6 +71,18 @@ Section2 **Content!** title: Taxonomy Hugo --- Taxonomy Hugo **Content!** +`) + + writeSource(t, filepath.Join("content", "categories", "web", "_node.md"), `--- +title: Taxonomy Web +--- +Taxonomy Web **Content!** +`) + + writeSource(t, filepath.Join("content", "categories", "_node.md"), `--- +title: Taxonomy Term Categories +--- +Taxonomy Term Categories **Content!** `) writeSource(t, filepath.Join("layouts", "index.html"), ` @@ -96,6 +108,7 @@ Section Content: {{ .Content }} {{ end }} `) + // Taxonomy lists writeSource(t, filepath.Join("layouts", "_default", "taxonomy.html"), ` Taxonomy Title: {{ .Title }} Taxonomy Content: {{ .Content }} @@ -103,6 +116,15 @@ Taxonomy Content: {{ .Content }} {{ range .Paginator.Pages }} Pag: {{ .Title }} {{ end }} +`) + + // Taxonomy terms + writeSource(t, filepath.Join("layouts", "_default", "terms.html"), ` +Taxonomy Terms Title: {{ .Title }} +Taxonomy Terms Content: {{ .Content }} +{{ range $key, $value := .Data.Terms }} + k/v: {{ $key }} / {{ printf "%=v" $value }} +{{ end }} `) // Add some regular pages @@ -113,7 +135,10 @@ Taxonomy Content: {{ .Content }} } writeSource(t, filepath.Join("content", sect, fmt.Sprintf("regular%d.md", i)), fmt.Sprintf(`--- title: Page %02d -categories: Hugo +categories: [ + "Hugo", + "Web" +] --- Content Page %02d `, i, i)) @@ -169,13 +194,22 @@ Content Page %02d sections := h.findAllPagesByNodeType(NodeSection) require.Len(t, sections, 2) - // Check taxonomy list + // Check taxonomy lists assertFileContent(t, filepath.Join("public", "categories", "hugo", "index.html"), false, "Taxonomy Title: Taxonomy Hugo", "Taxonomy Hugo Content!") + assertFileContent(t, filepath.Join("public", "categories", "web", "index.html"), false, + "Taxonomy Title: Taxonomy Web", "Taxonomy Web Content!") + // Check taxonomy list paginator assertFileContent(t, filepath.Join("public", "categories", "hugo", "page", "2", "index.html"), false, "Taxonomy Title: Taxonomy Hugo", "Pag: Page 02") + // Check taxonomy terms + assertFileContent(t, filepath.Join("public", "categories", "index.html"), false, + "Taxonomy Terms Title: Taxonomy Term Categories", "Taxonomy Term Categories Content!", "k/v: hugo") + + // There are no pages to paginate over in the taxonomy terms. + } diff --git a/hugolib/page.go b/hugolib/page.go index 9f7fb6655..c8ce5ca4a 100644 --- a/hugolib/page.go +++ b/hugolib/page.go @@ -475,6 +475,9 @@ func (p *Page) layouts(l ...string) []string { case NodeTaxonomy: singular := p.site.taxonomiesPluralSingular[p.sections[0]] return []string{"taxonomy/" + singular + ".html", "indexes/" + singular + ".html", "_default/taxonomy.html", "_default/list.html"} + case NodeTaxonomyTerms: + singular := p.site.taxonomiesPluralSingular[p.sections[0]] + return []string{"taxonomy/" + singular + ".terms.html", "_default/terms.html", "indexes/indexes.html"} } // Regular Page handled below @@ -1167,6 +1170,8 @@ func (p *Page) TargetPath() (outfile string) { return filepath.Join(p.Section(), "index.html") case NodeTaxonomy: return filepath.Join(append(p.sections, "index.html")...) + case NodeTaxonomyTerms: + return filepath.Join(append(p.sections, "index.html")...) } // Always use URL if it's specified @@ -1253,7 +1258,16 @@ func (p *Page) prepareData(s *Site) error { p.Data["Singular"] = singular p.Data["Plural"] = plural p.Data["Pages"] = taxonomy.Pages() + case NodeTaxonomyTerms: + plural := p.sections[0] + singular := s.taxonomiesPluralSingular[plural] + p.Data["Singular"] = singular + p.Data["Plural"] = plural + p.Data["Terms"] = s.Taxonomies[plural] + // keep the following just for legacy reasons + p.Data["OrderedIndex"] = p.Data["Terms"] + p.Data["Index"] = p.Data["Terms"] } return nil diff --git a/hugolib/site.go b/hugolib/site.go index ad6625388..8eef37b0b 100644 --- a/hugolib/site.go +++ b/hugolib/site.go @@ -1578,11 +1578,7 @@ func (s *Site) assembleSections() { s.Info.Sections = s.Sections regularPages := s.findPagesByNodeType(NodePage) for i, p := range regularPages { - section := p.Section() - if s.isTaxonomy(section) { - continue - } - s.Sections.add(section, WeightedPage{s.Pages[i].Weight, regularPages[i]}, s.Info.preserveTaxonomyNames) + s.Sections.add(p.Section(), WeightedPage{s.Pages[i].Weight, regularPages[i]}, s.Info.preserveTaxonomyNames) } for k := range s.Sections { @@ -1599,11 +1595,14 @@ func (s *Site) assembleSections() { } } -func (s *Site) isTaxonomy(section string) bool { - if _, isTaxonomy := s.Taxonomies[section]; isTaxonomy { - return true +func (s *Site) nodeTypeFromSections(sections []string) NodeType { + if _, isTaxonomy := s.Taxonomies[sections[0]]; isTaxonomy { + if len(sections) == 1 { + return NodeTaxonomyTerms + } + return NodeTaxonomy } - return false + return NodeSection } func (s *Site) findPagesByNodeType(n NodeType) Pages { @@ -1891,6 +1890,9 @@ func taxonomyRenderer(prepare bool, s *Site, taxes <-chan taxRenderInfo, results // renderListsOfTaxonomyTerms renders a page per taxonomy that lists the terms for that taxonomy func (s *Site) renderListsOfTaxonomyTerms(prepare bool) (err error) { + if nodePageFeatureFlag { + return nil + } taxonomies := s.Language.GetStringMapString("Taxonomies") for singular, plural := range taxonomies { n := s.nodeLookup(fmt.Sprintf("taxlist-%s", plural), 0, prepare) diff --git a/hugolib/site_render.go b/hugolib/site_render.go index 2076e46ae..c91a78413 100644 --- a/hugolib/site_render.go +++ b/hugolib/site_render.go @@ -70,7 +70,8 @@ func pageRenderer(s *Site, pages <-chan *Page, results chan<- error, wg *sync.Wa results <- err } - if p.NodeType.IsNode() { + // Taxonomy terms have no page set to paginate, so skip that for now. + if p.NodeType.IsNode() && p.NodeType != NodeTaxonomyTerms { if err := s.renderPaginator(p); err != nil { results <- err }