Create a Node map to get proper node translations

In a multi-language setup, before this commit the Node's Translations() method
would return some "dummy nodes" that would point to the correct page (Permalink),
but would not be the same as the node it points to -- it would not have the translated
title etc.

The node creation is, however, so mingled with rendering, whihc is too early to have any global state,
so the nodes has to be split in a prepare and a render phase. This commits does that with as small
a change as possible. This implementation is a temp solution until we fix #2297.

Updates #2309
This commit is contained in:
Bjørn Erik Pedersen 2016-08-09 20:06:15 +02:00
parent d1212307f2
commit a07293cf97
6 changed files with 369 additions and 120 deletions

View file

@ -39,6 +39,10 @@ type HugoSites struct {
runMode runmode runMode runmode
multilingual *Multilingual multilingual *Multilingual
// Maps internalID to a set of nodes.
nodeMap map[string]Nodes
nodeMapMu sync.Mutex
} }
// NewHugoSites creates a new collection of sites given the input sites, building // NewHugoSites creates a new collection of sites given the input sites, building
@ -50,7 +54,7 @@ func newHugoSites(sites ...*Site) (*HugoSites, error) {
return nil, err return nil, err
} }
h := &HugoSites{multilingual: langConfig, Sites: sites} h := &HugoSites{multilingual: langConfig, Sites: sites, nodeMap: make(map[string]Nodes)}
for _, s := range sites { for _, s := range sites {
s.owner = h s.owner = h
@ -92,14 +96,39 @@ func createSitesFromConfig() ([]*Site, error) {
return sites, nil return sites, nil
} }
func (h *HugoSites) addNode(nodeID string, node *Node) {
h.nodeMapMu.Lock()
if nodes, ok := h.nodeMap[nodeID]; ok {
h.nodeMap[nodeID] = append(nodes, node)
} else {
h.nodeMap[nodeID] = Nodes{node}
}
h.nodeMapMu.Unlock()
}
func (h *HugoSites) getNodes(nodeID string) Nodes {
// At this point it is read only, so no need to lock.
if nodeID != "" {
if nodes, ok := h.nodeMap[nodeID]; ok {
return nodes
}
}
// Paginator pages will not have related nodes.
return Nodes{}
}
// Reset resets the sites, making it ready for a full rebuild. // Reset resets the sites, making it ready for a full rebuild.
func (h *HugoSites) reset() { func (h *HugoSites) reset() {
h.nodeMap = make(map[string]Nodes)
for i, s := range h.Sites { for i, s := range h.Sites {
h.Sites[i] = s.reset() h.Sites[i] = s.reset()
} }
} }
func (h *HugoSites) reCreateFromConfig() error { func (h *HugoSites) reCreateFromConfig() error {
h.nodeMap = make(map[string]Nodes)
sites, err := createSitesFromConfig() sites, err := createSitesFromConfig()
if err != nil { if err != nil {
@ -236,6 +265,7 @@ func (h *HugoSites) Rebuild(config BuildCfg, events ...fsnotify.Event) error {
firstSite := h.Sites[0] firstSite := h.Sites[0]
h.nodeMap = make(map[string]Nodes)
for _, s := range h.Sites { for _, s := range h.Sites {
s.resetBuildState() s.resetBuildState()
} }
@ -359,6 +389,23 @@ func (h *HugoSites) setupTranslations(master *Site) {
// Shortcode handling is the main task in here. // Shortcode handling is the main task in here.
// TODO(bep) We need to look at the whole handler-chain construct witht he below in mind. // TODO(bep) We need to look at the whole handler-chain construct witht he below in mind.
func (h *HugoSites) preRender() error { func (h *HugoSites) preRender() error {
for _, s := range h.Sites {
// Run "render prepare"
if err := s.renderHomePage(true); err != nil {
return err
}
if err := s.renderTaxonomiesLists(true); err != nil {
return err
}
if err := s.renderListsOfTaxonomyTerms(true); err != nil {
return err
}
if err := s.renderSectionLists(true); err != nil {
return err
}
}
pageChan := make(chan *Page) pageChan := make(chan *Page)
wg := &sync.WaitGroup{} wg := &sync.WaitGroup{}
@ -418,7 +465,7 @@ func (h *HugoSites) preRender() error {
} }
// Pages returns all pages for all sites. // Pages returns all pages for all sites.
func (h HugoSites) Pages() Pages { func (h *HugoSites) Pages() Pages {
return h.Sites[0].AllPages return h.Sites[0].AllPages
} }

View file

@ -59,7 +59,7 @@ func doTestMultiSitesMainLangInRoot(t *testing.T, defaultInSubDir bool) {
t.Fatalf("Failed to build sites: %s", err) t.Fatalf("Failed to build sites: %s", err)
} }
require.Len(t, sites.Sites, 2) require.Len(t, sites.Sites, 4)
enSite := sites.Sites[0] enSite := sites.Sites[0]
frSite := sites.Sites[1] frSite := sites.Sites[1]
@ -177,8 +177,8 @@ func TestMultiSitesBuild(t *testing.T) {
if len(enSite.Pages) != 3 { if len(enSite.Pages) != 3 {
t.Fatal("Expected 3 english pages") t.Fatal("Expected 3 english pages")
} }
assert.Len(t, enSite.Source.Files(), 11, "should have 11 source files") assert.Len(t, enSite.Source.Files(), 13, "should have 13 source files")
assert.Len(t, enSite.AllPages, 6, "should have 6 total pages (including translations)") assert.Len(t, enSite.AllPages, 8, "should have 8 total pages (including translations)")
doc1en := enSite.Pages[0] doc1en := enSite.Pages[0]
permalink, err := doc1en.Permalink() permalink, err := doc1en.Permalink()
@ -230,7 +230,7 @@ func TestMultiSitesBuild(t *testing.T) {
assert.Equal(t, "fr", frSite.Language.Lang) assert.Equal(t, "fr", frSite.Language.Lang)
assert.Len(t, frSite.Pages, 3, "should have 3 pages") assert.Len(t, frSite.Pages, 3, "should have 3 pages")
assert.Len(t, frSite.AllPages, 6, "should have 6 total pages (including translations)") assert.Len(t, frSite.AllPages, 8, "should have 8 total pages (including translations)")
for _, frenchPage := range frSite.Pages { for _, frenchPage := range frSite.Pages {
assert.Equal(t, "fr", frenchPage.Lang()) assert.Equal(t, "fr", frenchPage.Lang())
@ -240,6 +240,36 @@ func TestMultiSitesBuild(t *testing.T) {
languageRedirect := readDestination(t, "public/index.html") languageRedirect := readDestination(t, "public/index.html")
require.True(t, strings.Contains(languageRedirect, "0; url=http://example.com/blog/fr"), languageRedirect) require.True(t, strings.Contains(languageRedirect, "0; url=http://example.com/blog/fr"), languageRedirect)
// Check node translations
homeEn := enSite.getNode("home-0")
require.NotNil(t, homeEn)
require.Len(t, homeEn.Translations(), 3)
require.Equal(t, "fr", homeEn.Translations()[0].Lang())
require.Equal(t, "nn", homeEn.Translations()[1].Lang())
require.Equal(t, "Nynorsk", homeEn.Translations()[1].Title)
require.Equal(t, "nb", homeEn.Translations()[2].Lang())
require.Equal(t, "Bokmål", homeEn.Translations()[2].Title)
sectFr := frSite.getNode("sect-sect-0")
require.NotNil(t, sectFr)
require.Equal(t, "fr", sectFr.Lang())
require.Len(t, sectFr.Translations(), 1)
require.Equal(t, "en", sectFr.Translations()[0].Lang())
require.Equal(t, "Sects", sectFr.Translations()[0].Title)
nnSite := sites.Sites[2]
require.Equal(t, "nn", nnSite.Language.Lang)
taxNn := nnSite.getNode("taxlist-lag-0")
require.NotNil(t, taxNn)
require.Len(t, taxNn.Translations(), 1)
require.Equal(t, "nb", taxNn.Translations()[0].Lang())
taxTermNn := nnSite.getNode("tax-lag-sogndal-0")
require.NotNil(t, taxTermNn)
require.Len(t, taxTermNn.Translations(), 1)
require.Equal(t, "nb", taxTermNn.Translations()[0].Lang())
// Check sitemap(s) // Check sitemap(s)
sitemapIndex := readDestination(t, "public/sitemap.xml") sitemapIndex := readDestination(t, "public/sitemap.xml")
require.True(t, strings.Contains(sitemapIndex, "<loc>http:/example.com/blog/en/sitemap.xml</loc>"), sitemapIndex) require.True(t, strings.Contains(sitemapIndex, "<loc>http:/example.com/blog/en/sitemap.xml</loc>"), sitemapIndex)
@ -338,7 +368,7 @@ func TestMultiSitesRebuild(t *testing.T) {
}, },
func(t *testing.T) { func(t *testing.T) {
assert.Len(t, enSite.Pages, 4) assert.Len(t, enSite.Pages, 4)
assert.Len(t, enSite.AllPages, 8) assert.Len(t, enSite.AllPages, 10)
assert.Len(t, frSite.Pages, 4) assert.Len(t, frSite.Pages, 4)
assert.Equal(t, "new_fr_1", frSite.Pages[3].Title) assert.Equal(t, "new_fr_1", frSite.Pages[3].Title)
assert.Equal(t, "new_en_2", enSite.Pages[0].Title) assert.Equal(t, "new_en_2", enSite.Pages[0].Title)
@ -391,7 +421,7 @@ func TestMultiSitesRebuild(t *testing.T) {
[]fsnotify.Event{{Name: "layouts/_default/single.html", Op: fsnotify.Write}}, []fsnotify.Event{{Name: "layouts/_default/single.html", Op: fsnotify.Write}},
func(t *testing.T) { func(t *testing.T) {
assert.Len(t, enSite.Pages, 4) assert.Len(t, enSite.Pages, 4)
assert.Len(t, enSite.AllPages, 8) assert.Len(t, enSite.AllPages, 10)
assert.Len(t, frSite.Pages, 4) assert.Len(t, frSite.Pages, 4)
doc1 := readDestination(t, "public/en/sect/doc1-slug/index.html") doc1 := readDestination(t, "public/en/sect/doc1-slug/index.html")
assert.True(t, strings.Contains(doc1, "Template Changed"), doc1) assert.True(t, strings.Contains(doc1, "Template Changed"), doc1)
@ -408,12 +438,18 @@ func TestMultiSitesRebuild(t *testing.T) {
[]fsnotify.Event{{Name: "i18n/fr.yaml", Op: fsnotify.Write}}, []fsnotify.Event{{Name: "i18n/fr.yaml", Op: fsnotify.Write}},
func(t *testing.T) { func(t *testing.T) {
assert.Len(t, enSite.Pages, 4) assert.Len(t, enSite.Pages, 4)
assert.Len(t, enSite.AllPages, 8) assert.Len(t, enSite.AllPages, 10)
assert.Len(t, frSite.Pages, 4) assert.Len(t, frSite.Pages, 4)
docEn := readDestination(t, "public/en/sect/doc1-slug/index.html") docEn := readDestination(t, "public/en/sect/doc1-slug/index.html")
assert.True(t, strings.Contains(docEn, "Hello"), "No Hello") assert.True(t, strings.Contains(docEn, "Hello"), "No Hello")
docFr := readDestination(t, "public/fr/sect/doc1/index.html") docFr := readDestination(t, "public/fr/sect/doc1/index.html")
assert.True(t, strings.Contains(docFr, "Salut"), "No Salut") assert.True(t, strings.Contains(docFr, "Salut"), "No Salut")
homeEn := enSite.getNode("home-0")
require.NotNil(t, homeEn)
require.Len(t, homeEn.Translations(), 3)
require.Equal(t, "fr", homeEn.Translations()[0].Lang())
}, },
}, },
} { } {
@ -469,12 +505,12 @@ func TestAddNewLanguage(t *testing.T) {
newConfig := multiSiteTomlConfig + ` newConfig := multiSiteTomlConfig + `
[Languages.no] [Languages.sv]
weight = 15 weight = 15
title = "Norsk" title = "Svenska"
` `
writeNewContentFile(t, "Norwegian Contentfile", "2016-01-01", "content/sect/doc1.no.md", 10) writeNewContentFile(t, "Swedish Contentfile", "2016-01-01", "content/sect/doc1.sv.md", 10)
// replace the config // replace the config
writeSource(t, "multilangconfig.toml", newConfig) writeSource(t, "multilangconfig.toml", newConfig)
@ -486,27 +522,32 @@ title = "Norsk"
t.Fatalf("Failed to rebuild sites: %s", err) t.Fatalf("Failed to rebuild sites: %s", err)
} }
require.Len(t, sites.Sites, 3, fmt.Sprintf("Len %d", len(sites.Sites))) require.Len(t, sites.Sites, 5, fmt.Sprintf("Len %d", len(sites.Sites)))
// The Norwegian site should be put in the middle (language weight=15) // The Swedish site should be put in the middle (language weight=15)
enSite := sites.Sites[0] enSite := sites.Sites[0]
noSite := sites.Sites[1] svSite := sites.Sites[1]
frSite := sites.Sites[2] frSite := sites.Sites[2]
require.True(t, enSite.Language.Lang == "en", enSite.Language.Lang) require.True(t, enSite.Language.Lang == "en", enSite.Language.Lang)
require.True(t, noSite.Language.Lang == "no", noSite.Language.Lang) require.True(t, svSite.Language.Lang == "sv", svSite.Language.Lang)
require.True(t, frSite.Language.Lang == "fr", frSite.Language.Lang) require.True(t, frSite.Language.Lang == "fr", frSite.Language.Lang)
homeEn := enSite.getNode("home-0")
require.NotNil(t, homeEn)
require.Len(t, homeEn.Translations(), 4)
require.Equal(t, "sv", homeEn.Translations()[0].Lang())
require.Len(t, enSite.Pages, 3) require.Len(t, enSite.Pages, 3)
require.Len(t, frSite.Pages, 3) require.Len(t, frSite.Pages, 3)
// Veriy Norwegian site // Veriy Swedish site
require.Len(t, noSite.Pages, 1) require.Len(t, svSite.Pages, 1)
noPage := noSite.Pages[0] svPage := svSite.Pages[0]
require.Equal(t, "Norwegian Contentfile", noPage.Title) require.Equal(t, "Swedish Contentfile", svPage.Title)
require.Equal(t, "no", noPage.Lang()) require.Equal(t, "sv", svPage.Lang())
require.Len(t, noPage.Translations(), 2) require.Len(t, svPage.Translations(), 2)
require.Len(t, noPage.AllTranslations(), 3) require.Len(t, svPage.AllTranslations(), 3)
require.Equal(t, "en", noPage.Translations()[0].Lang()) require.Equal(t, "en", svPage.Translations()[0].Lang())
//noFile := readDestination(t, "/public/no/doc1/index.html") //noFile := readDestination(t, "/public/no/doc1/index.html")
//require.True(t, strings.Contains("foo", noFile), noFile) //require.True(t, strings.Contains("foo", noFile), noFile)
@ -543,6 +584,18 @@ weight = 20
title = "Français" title = "Français"
[Languages.fr.Taxonomies] [Languages.fr.Taxonomies]
plaque = "plaques" plaque = "plaques"
[Languages.nn]
weight = 30
title = "Nynorsk"
[Languages.nn.Taxonomies]
lag = "lag"
[Languages.nb]
weight = 40
title = "Bokmål"
[Languages.nb.Taxonomies]
lag = "lag"
` `
func createMultiTestSites(t *testing.T, tomlConfig string) *HugoSites { func createMultiTestSites(t *testing.T, tomlConfig string) *HugoSites {
@ -686,6 +739,24 @@ publishdate: "2000-01-06"
draft: true draft: true
--- ---
# Draft # Draft
`)},
{filepath.FromSlash("stats/tax.nn.md"), []byte(`---
title: Tax NN
publishdate: "2000-01-06"
weight: 1001
lag:
- Sogndal
---
# Tax NN
`)},
{filepath.FromSlash("stats/tax.nb.md"), []byte(`---
title: Tax NB
publishdate: "2000-01-06"
weight: 1002
lag:
- Sogndal
---
# Tax NB
`)}, `)},
} }
@ -713,7 +784,7 @@ draft: true
t.Fatalf("Failed to create sites: %s", err) t.Fatalf("Failed to create sites: %s", err)
} }
if len(sites.Sites) != 2 { if len(sites.Sites) != 4 {
t.Fatalf("Got %d sites", len(sites.Sites)) t.Fatalf("Got %d sites", len(sites.Sites))
} }

View file

@ -27,6 +27,7 @@ import (
"github.com/spf13/hugo/source" "github.com/spf13/hugo/source"
"github.com/spf13/viper" "github.com/spf13/viper"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
) )
const ( const (
@ -414,9 +415,13 @@ func doTestSectionPagesMenu(canonifyUrls bool, t *testing.T) {
fishySectionPages := s.Sections["fish-and-chips"] fishySectionPages := s.Sections["fish-and-chips"]
assert.Equal(t, 1, len(fishySectionPages)) assert.Equal(t, 1, len(fishySectionPages))
nodeFirst := s.newSectionListNode("First", "first", firstSectionPages) nodeFirst := s.getNode("sect-first-0")
nodeSecond := s.newSectionListNode("Second Section", "second-section", secondSectionPages) require.NotNil(t, nodeFirst)
nodeFishy := s.newSectionListNode("Fish and Chips", "fish-and-chips", fishySectionPages) nodeSecond := s.getNode("sect-second-section-0")
require.NotNil(t, nodeSecond)
nodeFishy := s.getNode("sect-Fish and Chips-0")
require.NotNil(t, nodeFishy)
firstSectionMenuEntry := findTestMenuEntryByID(s, "spm", "first") firstSectionMenuEntry := findTestMenuEntryByID(s, "spm", "first")
secondSectionMenuEntry := findTestMenuEntryByID(s, "spm", "second-section") secondSectionMenuEntry := findTestMenuEntryByID(s, "spm", "second-section")
fishySectionMenuEntry := findTestMenuEntryByID(s, "spm", "Fish and Chips") fishySectionMenuEntry := findTestMenuEntryByID(s, "spm", "Fish and Chips")
@ -472,7 +477,7 @@ func TestTaxonomyNodeMenu(t *testing.T) {
&MenuEntry{Name: "Somewhere else", URL: "/somewhereelse"}, false, false}, &MenuEntry{Name: "Somewhere else", URL: "/somewhereelse"}, false, false},
} { } {
n, _ := s.newTaxonomyNode(this.taxInfo) n, _ := s.newTaxonomyNode(true, this.taxInfo, i)
isMenuCurrent := n.IsMenuCurrent(this.menu, this.menuItem) isMenuCurrent := n.IsMenuCurrent(this.menu, this.menuItem)
hasMenuCurrent := n.HasMenuCurrent(this.menu, this.menuItem) hasMenuCurrent := n.HasMenuCurrent(this.menu, this.menuItem)
@ -544,7 +549,8 @@ func TestHomeNodeMenu(t *testing.T) {
s := setupMenuTests(t, menuPageSources) s := setupMenuTests(t, menuPageSources)
home := s.newHomeNode() home := s.getNode("home-0")
homeMenuEntry := &MenuEntry{Name: home.Title, URL: home.URL()} homeMenuEntry := &MenuEntry{Name: home.Title, URL: home.URL()}
for i, this := range []struct { for i, this := range []struct {

View file

@ -17,7 +17,7 @@ import (
"html/template" "html/template"
"path" "path"
"path/filepath" "path/filepath"
"sort" //"sort"
"strings" "strings"
"sync" "sync"
"time" "time"
@ -30,6 +30,11 @@ import (
) )
type Node struct { type Node struct {
// a natural key that should be unique for this site
// for the home page this will typically be "home", but it can anything
// as long as it is the same for repeated builds.
nodeID string
RSSLink template.HTML RSSLink template.HTML
Site *SiteInfo `json:"-"` Site *SiteInfo `json:"-"`
// layout string // layout string
@ -283,23 +288,8 @@ func (n *Node) IsTranslated() bool {
func (n *Node) initTranslations() { func (n *Node) initTranslations() {
n.translationsInit.Do(func() { n.translationsInit.Do(func() {
if n.translations != nil { n.translations = n.Site.owner.getNodes(n.nodeID)
return //sort.Sort(n.translations)
}
n.translations = make(Nodes, 0)
for _, l := range n.Site.Languages {
if l == n.language {
n.translations = append(n.translations, n)
continue
}
translation := *n
translation.language = l
translation.translations = n.translations
n.translations = append(n.translations, &translation)
}
sort.Sort(n.translations)
}) })
} }

View file

@ -224,8 +224,8 @@ func doTestPaginator(t *testing.T, useViper bool) {
} }
pages := createTestPages(12) pages := createTestPages(12)
s := newSiteDefaultLang() s := newSiteDefaultLang()
n1 := s.newHomeNode() n1 := s.newHomeNode(true, 0)
n2 := s.newHomeNode() n2 := s.newHomeNode(true, 1)
n1.Data["Pages"] = pages n1.Data["Pages"] = pages
var paginator1 *Pager var paginator1 *Pager
@ -261,7 +261,7 @@ func TestPaginatorWithNegativePaginate(t *testing.T) {
viper.Set("paginate", -1) viper.Set("paginate", -1)
s := newSiteDefaultLang() s := newSiteDefaultLang()
_, err := s.newHomeNode().Paginator() _, err := s.newHomeNode(true, 0).Paginator()
assert.NotNil(t, err) assert.NotNil(t, err)
} }
@ -283,8 +283,8 @@ func doTestPaginate(t *testing.T, useViper bool) {
pages := createTestPages(6) pages := createTestPages(6)
s := newSiteDefaultLang() s := newSiteDefaultLang()
n1 := s.newHomeNode() n1 := s.newHomeNode(true, 0)
n2 := s.newHomeNode() n2 := s.newHomeNode(true, 1)
var paginator1, paginator2 *Pager var paginator1, paginator2 *Pager
var err error var err error
@ -316,7 +316,7 @@ func doTestPaginate(t *testing.T, useViper bool) {
func TestInvalidOptions(t *testing.T) { func TestInvalidOptions(t *testing.T) {
s := newSiteDefaultLang() s := newSiteDefaultLang()
n1 := s.newHomeNode() n1 := s.newHomeNode(true, 0)
_, err := n1.Paginate(createTestPages(1), 1, 2) _, err := n1.Paginate(createTestPages(1), 1, 2)
assert.NotNil(t, err) assert.NotNil(t, err)
_, err = n1.Paginator(1, 2) _, err = n1.Paginator(1, 2)
@ -330,7 +330,7 @@ func TestPaginateWithNegativePaginate(t *testing.T) {
viper.Set("paginate", -1) viper.Set("paginate", -1)
s := newSiteDefaultLang() s := newSiteDefaultLang()
_, err := s.newHomeNode().Paginate(createTestPages(2)) _, err := s.newHomeNode(true, 0).Paginate(createTestPages(2))
assert.NotNil(t, err) assert.NotNil(t, err)
} }
@ -352,8 +352,8 @@ func TestPaginatorFollowedByPaginateShouldFail(t *testing.T) {
viper.Set("paginate", 10) viper.Set("paginate", 10)
s := newSiteDefaultLang() s := newSiteDefaultLang()
n1 := s.newHomeNode() n1 := s.newHomeNode(true, 0)
n2 := s.newHomeNode() n2 := s.newHomeNode(true, 1)
_, err := n1.Paginator() _, err := n1.Paginator()
assert.Nil(t, err) assert.Nil(t, err)
@ -370,8 +370,8 @@ func TestPaginateFollowedByDifferentPaginateShouldFail(t *testing.T) {
viper.Set("paginate", 10) viper.Set("paginate", 10)
s := newSiteDefaultLang() s := newSiteDefaultLang()
n1 := s.newHomeNode() n1 := s.newHomeNode(true, 0)
n2 := s.newHomeNode() n2 := s.newHomeNode(true, 1)
p1 := createTestPages(2) p1 := createTestPages(2)
p2 := createTestPages(10) p2 := createTestPages(10)

View file

@ -58,6 +58,15 @@ var (
distinctFeedbackLogger = helpers.NewDistinctFeedbackLogger() distinctFeedbackLogger = helpers.NewDistinctFeedbackLogger()
) )
type nodeCache struct {
m map[string]*Node
sync.RWMutex
}
func (c *nodeCache) reset() {
c.m = make(map[string]*Node)
}
// Site contains all the information relevant for constructing a static // Site contains all the information relevant for constructing a static
// site. The basic flow of information is as follows: // site. The basic flow of information is as follows:
// //
@ -76,7 +85,12 @@ var (
// //
// 5. The entire collection of files is written to disk. // 5. The entire collection of files is written to disk.
type Site struct { type Site struct {
owner *HugoSites owner *HugoSites
// Used internally to discover duplicates.
nodeCache *nodeCache
nodeCacheInit sync.Once
Pages Pages Pages Pages
AllPages Pages AllPages Pages
rawAllPages Pages rawAllPages Pages
@ -167,6 +181,7 @@ type SiteInfo struct {
paginationPageCount uint64 paginationPageCount uint64
Data *map[string]interface{} Data *map[string]interface{}
owner *HugoSites
multilingual *Multilingual multilingual *Multilingual
Language *helpers.Language Language *helpers.Language
LanguagePrefix string LanguagePrefix string
@ -773,13 +788,15 @@ func (s *Site) render() (err error) {
return return
} }
s.timerStep("render and write aliases") s.timerStep("render and write aliases")
if err = s.renderTaxonomiesLists(); err != nil { if err = s.renderTaxonomiesLists(false); err != nil {
return return
} }
s.timerStep("render and write taxonomies") s.timerStep("render and write taxonomies")
s.renderListsOfTaxonomyTerms() if err = s.renderListsOfTaxonomyTerms(false); err != nil {
return
}
s.timerStep("render & write taxonomy lists") s.timerStep("render & write taxonomy lists")
if err = s.renderSectionLists(); err != nil { if err = s.renderSectionLists(false); err != nil {
return return
} }
s.timerStep("render and write lists") s.timerStep("render and write lists")
@ -787,7 +804,7 @@ func (s *Site) render() (err error) {
return return
} }
s.timerStep("render and write pages") s.timerStep("render and write pages")
if err = s.renderHomePage(); err != nil { if err = s.renderHomePage(false); err != nil {
return return
} }
s.timerStep("render and write homepage") s.timerStep("render and write homepage")
@ -904,6 +921,7 @@ func (s *Site) initializeSiteInfo() {
Params: params, Params: params,
Permalinks: permalinks, Permalinks: permalinks,
Data: &s.Data, Data: &s.Data,
owner: s.owner,
} }
} }
@ -1372,6 +1390,8 @@ func (s *Site) assembleTaxonomies() {
// Prepare site for a new full build. // Prepare site for a new full build.
func (s *Site) resetBuildState() { func (s *Site) resetBuildState() {
s.nodeCache.reset()
s.Pages = make(Pages, 0) s.Pages = make(Pages, 0)
s.AllPages = make(Pages, 0) s.AllPages = make(Pages, 0)
@ -1557,7 +1577,7 @@ type taxRenderInfo struct {
// renderTaxonomiesLists renders the listing pages based on the meta data // renderTaxonomiesLists renders the listing pages based on the meta data
// each unique term within a taxonomy will have a page created // each unique term within a taxonomy will have a page created
func (s *Site) renderTaxonomiesLists() error { func (s *Site) renderTaxonomiesLists(prepare bool) error {
wg := &sync.WaitGroup{} wg := &sync.WaitGroup{}
taxes := make(chan taxRenderInfo) taxes := make(chan taxRenderInfo)
@ -1567,7 +1587,7 @@ func (s *Site) renderTaxonomiesLists() error {
for i := 0; i < procs*4; i++ { for i := 0; i < procs*4; i++ {
wg.Add(1) wg.Add(1)
go taxonomyRenderer(s, taxes, results, wg) go taxonomyRenderer(prepare, s, taxes, results, wg)
} }
errs := make(chan error) errs := make(chan error)
@ -1593,9 +1613,19 @@ func (s *Site) renderTaxonomiesLists() error {
return nil return nil
} }
func (s *Site) newTaxonomyNode(t taxRenderInfo) (*Node, string) { func (s *Site) newTaxonomyNode(prepare bool, t taxRenderInfo, counter int) (*Node, string) {
key := t.key key := t.key
n := s.newNode() n := s.nodeLookup(fmt.Sprintf("tax-%s-%s", t.plural, key), counter, prepare)
if s.Info.preserveTaxonomyNames {
key = helpers.MakePathSanitized(key)
}
base := t.plural + "/" + key
if !prepare {
return n, base
}
if s.Info.preserveTaxonomyNames { if s.Info.preserveTaxonomyNames {
key = helpers.MakePathSanitized(key) key = helpers.MakePathSanitized(key)
// keep as is in the title // keep as is in the title
@ -1603,7 +1633,6 @@ func (s *Site) newTaxonomyNode(t taxRenderInfo) (*Node, string) {
} else { } else {
n.Title = strings.Replace(strings.Title(t.key), "-", " ", -1) n.Title = strings.Replace(strings.Title(t.key), "-", " ", -1)
} }
base := t.plural + "/" + key
s.setURLs(n, base) s.setURLs(n, base)
if len(t.pages) > 0 { if len(t.pages) > 0 {
n.Date = t.pages[0].Page.Date n.Date = t.pages[0].Page.Date
@ -1616,19 +1645,30 @@ func (s *Site) newTaxonomyNode(t taxRenderInfo) (*Node, string) {
return n, base return n, base
} }
func taxonomyRenderer(s *Site, taxes <-chan taxRenderInfo, results chan<- error, wg *sync.WaitGroup) { func taxonomyRenderer(prepare bool, s *Site, taxes <-chan taxRenderInfo, results chan<- error, wg *sync.WaitGroup) {
defer wg.Done() defer wg.Done()
var n *Node var n *Node
for t := range taxes { for t := range taxes {
var base string var (
layouts := s.appendThemeTemplates( base string
[]string{"taxonomy/" + t.singular + ".html", "indexes/" + t.singular + ".html", "_default/taxonomy.html", "_default/list.html"}) baseWithLanguagePrefix string
paginatePath string
layouts []string
)
n, base = s.newTaxonomyNode(t) n, base = s.newTaxonomyNode(prepare, t, 0)
baseWithLanguagePrefix := n.addLangPathPrefix(base)
if prepare {
continue
}
baseWithLanguagePrefix = n.addLangPathPrefix(base)
layouts = s.appendThemeTemplates(
[]string{"taxonomy/" + t.singular + ".html", "indexes/" + t.singular + ".html", "_default/taxonomy.html", "_default/list.html"})
dest := base dest := base
if viper.GetBool("UglyURLs") { if viper.GetBool("UglyURLs") {
@ -1644,7 +1684,7 @@ func taxonomyRenderer(s *Site, taxes <-chan taxRenderInfo, results chan<- error,
if n.paginator != nil { if n.paginator != nil {
paginatePath := viper.GetString("paginatePath") paginatePath = viper.GetString("paginatePath")
// write alias for page 1 // write alias for page 1
s.writeDestAlias(helpers.PaginateAliasPath(baseWithLanguagePrefix, 1), n.Permalink()) s.writeDestAlias(helpers.PaginateAliasPath(baseWithLanguagePrefix, 1), n.Permalink())
@ -1657,13 +1697,15 @@ func taxonomyRenderer(s *Site, taxes <-chan taxRenderInfo, results chan<- error,
continue continue
} }
taxonomyPagerNode, _ := s.newTaxonomyNode(t) taxonomyPagerNode, _ := s.newTaxonomyNode(true, t, i)
taxonomyPagerNode.paginator = pager taxonomyPagerNode.paginator = pager
if pager.TotalPages() > 0 { if pager.TotalPages() > 0 {
first, _ := pager.page(0) first, _ := pager.page(0)
taxonomyPagerNode.Date = first.Date taxonomyPagerNode.Date = first.Date
taxonomyPagerNode.Lastmod = first.Lastmod taxonomyPagerNode.Lastmod = first.Lastmod
} }
pageNumber := i + 1 pageNumber := i + 1
htmlBase := fmt.Sprintf("/%s/%s/%d", baseWithLanguagePrefix, paginatePath, pageNumber) htmlBase := fmt.Sprintf("/%s/%s/%d", baseWithLanguagePrefix, paginatePath, pageNumber)
if err := s.renderAndWritePage(fmt.Sprintf("taxonomy %s", t.singular), htmlBase, taxonomyPagerNode, layouts...); err != nil { if err := s.renderAndWritePage(fmt.Sprintf("taxonomy %s", t.singular), htmlBase, taxonomyPagerNode, layouts...); err != nil {
@ -1673,13 +1715,21 @@ func taxonomyRenderer(s *Site, taxes <-chan taxRenderInfo, results chan<- error,
} }
} }
if prepare {
continue
}
if !viper.GetBool("DisableRSS") { if !viper.GetBool("DisableRSS") {
// XML Feed // XML Feed
c := *n
rssNode := &c
rssNode.nodeID = ""
rssuri := viper.GetString("RSSUri") rssuri := viper.GetString("RSSUri")
s.setURLs(n, base+"/"+rssuri) s.setURLs(rssNode, base+"/"+rssuri)
rssLayouts := []string{"taxonomy/" + t.singular + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"} rssLayouts := []string{"taxonomy/" + t.singular + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
if err := s.renderAndWriteXML("taxonomy "+t.singular+" rss", baseWithLanguagePrefix+"/"+rssuri, n, s.appendThemeTemplates(rssLayouts)...); err != nil { if err := s.renderAndWriteXML("taxonomy "+t.singular+" rss", baseWithLanguagePrefix+"/"+rssuri, rssNode, s.appendThemeTemplates(rssLayouts)...); err != nil {
results <- err results <- err
continue continue
} }
@ -1688,18 +1738,24 @@ func taxonomyRenderer(s *Site, taxes <-chan taxRenderInfo, results chan<- error,
} }
// renderListsOfTaxonomyTerms renders a page per taxonomy that lists the terms for that taxonomy // renderListsOfTaxonomyTerms renders a page per taxonomy that lists the terms for that taxonomy
func (s *Site) renderListsOfTaxonomyTerms() (err error) { func (s *Site) renderListsOfTaxonomyTerms(prepare bool) (err error) {
taxonomies := s.Language.GetStringMapString("Taxonomies") taxonomies := s.Language.GetStringMapString("Taxonomies")
for singular, plural := range taxonomies { for singular, plural := range taxonomies {
n := s.newNode() n := s.nodeLookup(fmt.Sprintf("taxlist-%s", plural), 0, prepare)
n.Title = strings.Title(plural)
s.setURLs(n, plural) if prepare {
n.Data["Singular"] = singular n.Title = strings.Title(plural)
n.Data["Plural"] = plural s.setURLs(n, plural)
n.Data["Terms"] = s.Taxonomies[plural] n.Data["Singular"] = singular
// keep the following just for legacy reasons n.Data["Plural"] = plural
n.Data["OrderedIndex"] = n.Data["Terms"] n.Data["Terms"] = s.Taxonomies[plural]
n.Data["Index"] = n.Data["Terms"] // keep the following just for legacy reasons
n.Data["OrderedIndex"] = n.Data["Terms"]
n.Data["Index"] = n.Data["Terms"]
continue
}
layouts := []string{"taxonomy/" + singular + ".terms.html", "_default/terms.html", "indexes/indexes.html"} layouts := []string{"taxonomy/" + singular + ".terms.html", "_default/terms.html", "indexes/indexes.html"}
layouts = s.appendThemeTemplates(layouts) layouts = s.appendThemeTemplates(layouts)
if s.layoutExists(layouts...) { if s.layoutExists(layouts...) {
@ -1712,8 +1768,13 @@ func (s *Site) renderListsOfTaxonomyTerms() (err error) {
return return
} }
func (s *Site) newSectionListNode(sectionName, section string, data WeightedPages) *Node { func (s *Site) newSectionListNode(prepare bool, sectionName, section string, data WeightedPages, counter int) *Node {
n := s.newNode() n := s.nodeLookup(fmt.Sprintf("sect-%s", sectionName), counter, prepare)
if !prepare {
return n
}
sectionName = helpers.FirstUpper(sectionName) sectionName = helpers.FirstUpper(sectionName)
if viper.GetBool("PluralizeListTitles") { if viper.GetBool("PluralizeListTitles") {
n.Title = inflect.Pluralize(sectionName) n.Title = inflect.Pluralize(sectionName)
@ -1729,7 +1790,7 @@ func (s *Site) newSectionListNode(sectionName, section string, data WeightedPage
} }
// renderSectionLists renders a page for each section // renderSectionLists renders a page for each section
func (s *Site) renderSectionLists() error { func (s *Site) renderSectionLists(prepare bool) error {
for section, data := range s.Sections { for section, data := range s.Sections {
// section keys can be lower case (depending on site.pathifyTaxonomyKeys) // section keys can be lower case (depending on site.pathifyTaxonomyKeys)
// extract the original casing from the first page to get sensible titles. // extract the original casing from the first page to get sensible titles.
@ -1737,6 +1798,13 @@ func (s *Site) renderSectionLists() error {
if !s.Info.preserveTaxonomyNames && len(data) > 0 { if !s.Info.preserveTaxonomyNames && len(data) > 0 {
sectionName = data[0].Page.Section() sectionName = data[0].Page.Section()
} }
n := s.newSectionListNode(prepare, sectionName, section, data, 0)
if prepare {
continue
}
layouts := s.appendThemeTemplates( layouts := s.appendThemeTemplates(
[]string{"section/" + section + ".html", "_default/section.html", "_default/list.html", "indexes/" + section + ".html", "_default/indexes.html"}) []string{"section/" + section + ".html", "_default/section.html", "_default/list.html", "indexes/" + section + ".html", "_default/indexes.html"})
@ -1744,7 +1812,6 @@ func (s *Site) renderSectionLists() error {
section = helpers.MakePathSanitized(section) section = helpers.MakePathSanitized(section)
} }
n := s.newSectionListNode(sectionName, section, data)
base := n.addLangPathPrefix(section) base := n.addLangPathPrefix(section)
if err := s.renderAndWritePage(fmt.Sprintf("section %s", section), base, n, s.appendThemeTemplates(layouts)...); err != nil { if err := s.renderAndWritePage(fmt.Sprintf("section %s", section), base, n, s.appendThemeTemplates(layouts)...); err != nil {
@ -1766,7 +1833,7 @@ func (s *Site) renderSectionLists() error {
continue continue
} }
sectionPagerNode := s.newSectionListNode(sectionName, section, data) sectionPagerNode := s.newSectionListNode(true, sectionName, section, data, i)
sectionPagerNode.paginator = pager sectionPagerNode.paginator = pager
if pager.TotalPages() > 0 { if pager.TotalPages() > 0 {
first, _ := pager.page(0) first, _ := pager.page(0)
@ -1781,6 +1848,10 @@ func (s *Site) renderSectionLists() error {
} }
} }
if prepare {
return nil
}
if !viper.GetBool("DisableRSS") && section != "" { if !viper.GetBool("DisableRSS") && section != "" {
// XML Feed // XML Feed
rssuri := viper.GetString("RSSUri") rssuri := viper.GetString("RSSUri")
@ -1794,9 +1865,12 @@ func (s *Site) renderSectionLists() error {
return nil return nil
} }
func (s *Site) renderHomePage() error { func (s *Site) renderHomePage(prepare bool) error {
n := s.newHomeNode() n := s.newHomeNode(prepare, 0)
if prepare {
return nil
}
layouts := s.appendThemeTemplates([]string{"index.html", "_default/list.html"}) layouts := s.appendThemeTemplates([]string{"index.html", "_default/list.html"})
base := n.addLangFilepathPrefix("") base := n.addLangFilepathPrefix("")
@ -1807,9 +1881,11 @@ func (s *Site) renderHomePage() error {
if n.paginator != nil { if n.paginator != nil {
paginatePath := viper.GetString("paginatePath") paginatePath := viper.GetString("paginatePath")
// write alias for page 1 {
// TODO(bep) ml all of these n.addLang ... fix. // write alias for page 1
s.writeDestAlias(n.addLangPathPrefix(helpers.PaginateAliasPath("", 1)), n.Permalink()) // TODO(bep) ml all of these n.addLang ... fix.
s.writeDestAlias(n.addLangPathPrefix(helpers.PaginateAliasPath("", 1)), n.Permalink())
}
pagers := n.paginator.Pagers() pagers := n.paginator.Pagers()
@ -1819,39 +1895,44 @@ func (s *Site) renderHomePage() error {
continue continue
} }
homePagerNode := s.newHomeNode() homePagerNode := s.newHomeNode(true, i)
homePagerNode.paginator = pager homePagerNode.paginator = pager
if pager.TotalPages() > 0 { if pager.TotalPages() > 0 {
first, _ := pager.page(0) first, _ := pager.page(0)
homePagerNode.Date = first.Date homePagerNode.Date = first.Date
homePagerNode.Lastmod = first.Lastmod homePagerNode.Lastmod = first.Lastmod
} }
pageNumber := i + 1 pageNumber := i + 1
htmlBase := fmt.Sprintf("/%s/%d", paginatePath, pageNumber) htmlBase := fmt.Sprintf("/%s/%d", paginatePath, pageNumber)
htmlBase = n.addLangPathPrefix(htmlBase) htmlBase = n.addLangPathPrefix(htmlBase)
if err := s.renderAndWritePage(fmt.Sprintf("homepage"), filepath.FromSlash(htmlBase), homePagerNode, layouts...); err != nil { if err := s.renderAndWritePage(fmt.Sprintf("homepage"),
filepath.FromSlash(htmlBase), homePagerNode, layouts...); err != nil {
return err return err
} }
} }
} }
if !viper.GetBool("DisableRSS") { if !viper.GetBool("DisableRSS") {
// XML Feed // XML Feed
s.setURLs(n, viper.GetString("RSSUri")) rssNode := s.newNode("rss-home")
n.Title = "" s.setURLs(rssNode, viper.GetString("RSSUri"))
rssNode.Title = ""
high := 50 high := 50
if len(s.Pages) < high { if len(s.Pages) < high {
high = len(s.Pages) high = len(s.Pages)
} }
n.Data["Pages"] = s.Pages[:high] rssNode.Data["Pages"] = s.Pages[:high]
if len(s.Pages) > 0 { if len(s.Pages) > 0 {
n.Date = s.Pages[0].Date rssNode.Date = s.Pages[0].Date
n.Lastmod = s.Pages[0].Lastmod rssNode.Lastmod = s.Pages[0].Lastmod
} }
rssLayouts := []string{"rss.xml", "_default/rss.xml", "_internal/_default/rss.xml"} rssLayouts := []string{"rss.xml", "_default/rss.xml", "_internal/_default/rss.xml"}
if err := s.renderAndWriteXML("homepage rss", n.addLangPathPrefix(viper.GetString("RSSUri")), n, s.appendThemeTemplates(rssLayouts)...); err != nil { if err := s.renderAndWriteXML("homepage rss", rssNode.addLangPathPrefix(viper.GetString("RSSUri")), rssNode, s.appendThemeTemplates(rssLayouts)...); err != nil {
return err return err
} }
} }
@ -1860,23 +1941,20 @@ func (s *Site) renderHomePage() error {
return nil return nil
} }
// TODO(bep) reusing the Home Node smells trouble node404 := s.newNode("404")
n.URLPath.URL = helpers.URLize("404.html") node404.Title = "404 Page not found"
n.IsHome = false s.setURLs(node404, "404.html")
n.Title = "404 Page not found"
n.URLPath.Permalink = permalink("404.html")
n.scratch = newScratch()
nfLayouts := []string{"404.html"} nfLayouts := []string{"404.html"}
if nfErr := s.renderAndWritePage("404 page", "404.html", n, s.appendThemeTemplates(nfLayouts)...); nfErr != nil { if nfErr := s.renderAndWritePage("404 page", "404.html", node404, s.appendThemeTemplates(nfLayouts)...); nfErr != nil {
return nfErr return nfErr
} }
return nil return nil
} }
func (s *Site) newHomeNode() *Node { func (s *Site) newHomeNode(prepare bool, counter int) *Node {
n := s.newNode() n := s.nodeLookup("home", counter, prepare)
n.Title = n.Site.Title n.Title = n.Site.Title
n.IsHome = true n.IsHome = true
s.setURLs(n, "/") s.setURLs(n, "/")
@ -1895,7 +1973,7 @@ func (s *Site) renderSitemap() error {
sitemapDefault := parseSitemap(viper.GetStringMap("Sitemap")) sitemapDefault := parseSitemap(viper.GetStringMap("Sitemap"))
n := s.newNode() n := s.newNode("sitemap")
// Prepend homepage to the list of pages // Prepend homepage to the list of pages
pages := make(Pages, 0) pages := make(Pages, 0)
@ -1942,7 +2020,7 @@ func (s *Site) renderRobotsTXT() error {
return nil return nil
} }
n := s.newNode() n := s.newNode("robots")
n.Data["Pages"] = s.Pages n.Data["Pages"] = s.Pages
rLayouts := []string{"robots.txt", "_default/robots.txt", "_internal/_default/robots.txt"} rLayouts := []string{"robots.txt", "_default/robots.txt", "_internal/_default/robots.txt"}
@ -1988,13 +2066,70 @@ func permalink(plink string) string {
func permalinkStr(plink string) string { func permalinkStr(plink string) string {
return helpers.MakePermalink(viper.GetString("BaseURL"), helpers.URLizeAndPrep(plink)).String() return helpers.MakePermalink(viper.GetString("BaseURL"), helpers.URLizeAndPrep(plink)).String()
} }
func (s *Site) newNode(nodeID string) *Node {
return s.nodeLookup(nodeID, 0, true)
}
func (s *Site) newNode() *Node { func (s *Site) getNode(nodeID string) *Node {
return &Node{ return s.getOrAddNode(nodeID, false)
}
func (s *Site) getOrAddNode(nodeID string, add bool) *Node {
s.nodeCacheInit.Do(func() {
s.nodeCache = &nodeCache{m: make(map[string]*Node)}
})
s.nodeCache.RLock()
if n, ok := s.nodeCache.m[nodeID]; ok {
s.nodeCache.RUnlock()
if !add {
return n
}
panic(fmt.Sprintf("Node with ID %q in use", nodeID))
}
s.nodeCache.RUnlock()
s.nodeCache.Lock()
if !add {
// this is a test type error, print the keys
for k, _ := range s.nodeCache.m {
fmt.Println("Node:", k)
}
return nil
}
// Double check
if _, ok := s.nodeCache.m[nodeID]; ok {
s.nodeCache.Unlock()
panic(fmt.Sprintf("Node with ID %q in use", nodeID))
}
n := &Node{
nodeID: nodeID,
Data: make(map[string]interface{}), Data: make(map[string]interface{}),
Site: &s.Info, Site: &s.Info,
language: s.Language, language: s.Language,
} }
s.nodeCache.m[nodeID] = n
s.nodeCache.Unlock()
return n
}
func (s *Site) nodeLookup(nodeIDPrefix string, counter int, add bool) *Node {
nodeID := fmt.Sprintf("%s-%d", nodeIDPrefix, counter)
n := s.getOrAddNode(nodeID, add)
// Paginator nodes (counter > 0) gets created during rendering and cannot take part in any
// global translations mapping
if add && s.owner != nil && counter == 0 {
s.owner.addNode(nodeID, n)
}
return n
} }
func (s *Site) layoutExists(layouts ...string) bool { func (s *Site) layoutExists(layouts ...string) bool {