Support Fish and Chips section

Section names are also used as the title of the list pages, but naming section folders as `Fish and Chips` and similar didn't work very well.

This commit fixes that.

This commit also changes the title casing of the section titles. Some may argue that this is a breaking change, but the old behaviour was also pretty broken,
even for languages that use title capitalizations, as it didn't follow any particular style guide, `fish and chips` became `Fish And Chips` etc.

Now it just turns the first letter into upper case, so `Fish and Chips` will be left as `Fish and Chips`.

People wanting the good old behaviour can use the `title` template func.

Fixes #1176
This commit is contained in:
bep 2015-05-28 23:05:13 +02:00
parent be535832f7
commit be7404e337
8 changed files with 187 additions and 42 deletions

View file

@ -25,6 +25,8 @@ import (
"reflect"
"strings"
"sync"
"unicode"
"unicode/utf8"
"github.com/spf13/cast"
bp "github.com/spf13/hugo/bufferpool"
@ -78,6 +80,15 @@ func GuessType(in string) string {
return "unknown"
}
// FirstUpper returns a string with the first character as upper case.
func FirstUpper(s string) string {
if s == "" {
return ""
}
r, n := utf8.DecodeRuneInString(s)
return string(unicode.ToUpper(r)) + s[n:]
}
// ReaderToBytes takes an io.Reader argument, reads from it
// and returns bytes.
func ReaderToBytes(lines io.Reader) []byte {

View file

@ -33,6 +33,24 @@ func TestGuessType(t *testing.T) {
}
}
func TestFirstUpper(t *testing.T) {
for i, this := range []struct {
in string
expect string
}{
{"foo", "Foo"},
{"foo bar", "Foo bar"},
{"Foo Bar", "Foo Bar"},
{"", ""},
{"å", "Å"},
} {
result := FirstUpper(this.in)
if result != this.expect {
t.Errorf("[%d] got %s but expected %s", i, result, this.expect)
}
}
}
func TestBytesToReader(t *testing.T) {
asBytes := ReaderToBytes(strings.NewReader("Hello World!"))
asReader := BytesToReader(asBytes)

View file

@ -92,7 +92,7 @@ func UnicodeSanitize(s string) string {
target := make([]rune, 0, len(source))
for _, r := range source {
if unicode.IsLetter(r) || unicode.IsDigit(r) || r == '.' || r == '/' || r == '_' || r == '-' || r == '#' {
if unicode.IsLetter(r) || unicode.IsDigit(r) || r == '.' || r == '/' || r == '\\' || r == '_' || r == '-' || r == '#' {
target = append(target, r)
}
}
@ -332,8 +332,8 @@ func GetRelativePath(path, base string) (final string, err error) {
return "", err
}
if strings.HasSuffix(path, "/") && !strings.HasSuffix(name, "/") {
name += "/"
if strings.HasSuffix(filepath.FromSlash(path), FilePathSeparator) && !strings.HasSuffix(name, FilePathSeparator) {
name += FilePathSeparator
}
return name, nil
}

View file

@ -92,6 +92,16 @@ weight = 3
+++
Front Matter with Menu Pages`)
var MENU_PAGE_4 = []byte(`+++
title = "Four"
weight = 4
[menu]
[menu.p_two]
Name = "Four"
Parent = "Three"
+++
Front Matter with Menu Pages`)
var MENU_PAGE_SOURCES = []source.ByteSource{
{filepath.FromSlash("sect/doc1.md"), MENU_PAGE_1},
{filepath.FromSlash("sect/doc2.md"), MENU_PAGE_2},
@ -102,6 +112,7 @@ var MENU_PAGE_SECTIONS_SOURCES = []source.ByteSource{
{filepath.FromSlash("first/doc1.md"), MENU_PAGE_1},
{filepath.FromSlash("first/doc2.md"), MENU_PAGE_2},
{filepath.FromSlash("second-section/doc3.md"), MENU_PAGE_3},
{filepath.FromSlash("Fish and Chips/doc4.md"), MENU_PAGE_4},
}
func tstCreateMenuPageWithNameTOML(title, menu, name string) []byte {
@ -332,48 +343,64 @@ func doTestMenuWithUnicodeURLs(t *testing.T, canonifyURLs, uglyURLs bool) {
// Issue #1114
func TestSectionPagesMenu(t *testing.T) {
viper.Reset()
defer viper.Reset()
viper.Set("SectionPagesMenu", "spm")
doTestSectionPagesMenu(true, t)
doTestSectionPagesMenu(false, t)
}
func doTestSectionPagesMenu(canonifyUrls bool, t *testing.T) {
viper.Reset()
defer viper.Reset()
viper.Set("SectionPagesMenu", "spm")
viper.Set("CanonifyURLs", canonifyUrls)
s := setupMenuTests(t, MENU_PAGE_SECTIONS_SOURCES)
assert.Equal(t, 2, len(s.Sections))
assert.Equal(t, 3, len(s.Sections))
firstSectionPages := s.Sections["first"]
assert.Equal(t, 2, len(firstSectionPages))
secondSectionPages := s.Sections["second-section"]
assert.Equal(t, 1, len(secondSectionPages))
fishySectionPages := s.Sections["fish-and-chips"]
assert.Equal(t, 1, len(fishySectionPages))
nodeFirst := s.newSectionListNode("first", firstSectionPages)
nodeSecond := s.newSectionListNode("second-section", secondSectionPages)
nodeFirst := s.newSectionListNode("First", "first", firstSectionPages)
nodeSecond := s.newSectionListNode("Second Section", "second-section", secondSectionPages)
nodeFishy := s.newSectionListNode("Fish and Chips", "fish-and-chips", fishySectionPages)
firstSectionMenuEntry := findTestMenuEntryByID(s, "spm", "first")
secondSectionMenuEntry := findTestMenuEntryByID(s, "spm", "second-section")
fishySectionMenuEntry := findTestMenuEntryByID(s, "spm", "Fish and Chips")
assert.NotNil(t, firstSectionMenuEntry)
assert.NotNil(t, secondSectionMenuEntry)
assert.NotNil(t, nodeFirst)
assert.NotNil(t, nodeSecond)
assert.NotNil(t, fishySectionMenuEntry)
assert.NotNil(t, nodeFishy)
assert.True(t, nodeFirst.IsMenuCurrent("spm", firstSectionMenuEntry))
assert.False(t, nodeFirst.IsMenuCurrent("spm", secondSectionMenuEntry))
assert.False(t, nodeFirst.IsMenuCurrent("spm", fishySectionMenuEntry))
assert.True(t, nodeFishy.IsMenuCurrent("spm", fishySectionMenuEntry))
assert.Equal(t, "Fish and Chips", fishySectionMenuEntry.Name)
for _, p := range firstSectionPages {
assert.True(t, p.Page.HasMenuCurrent("spm", firstSectionMenuEntry))
assert.False(t, p.Page.HasMenuCurrent("spm", secondSectionMenuEntry))
assert.True(t, nodeFirst.IsMenuCurrent("spm", firstSectionMenuEntry))
assert.False(t, nodeFirst.IsMenuCurrent("spm", secondSectionMenuEntry))
}
for _, p := range secondSectionPages {
assert.False(t, p.Page.HasMenuCurrent("spm", firstSectionMenuEntry))
assert.True(t, p.Page.HasMenuCurrent("spm", secondSectionMenuEntry))
}
for _, p := range fishySectionPages {
assert.False(t, p.Page.HasMenuCurrent("spm", firstSectionMenuEntry))
assert.False(t, p.Page.HasMenuCurrent("spm", secondSectionMenuEntry))
assert.True(t, p.Page.HasMenuCurrent("spm", fishySectionMenuEntry))
}
}
func TestTaxonomyNodeMenu(t *testing.T) {

View file

@ -346,7 +346,7 @@ func (p *Page) analyzePage() {
func (p *Page) permalink() (*url.URL, error) {
baseURL := string(p.Site.BaseURL)
dir := strings.TrimSpace(filepath.ToSlash(p.Source.Dir()))
dir := strings.TrimSpace(helpers.MakePath(filepath.ToSlash(strings.ToLower(p.Source.Dir()))))
pSlug := strings.TrimSpace(helpers.URLize(p.Slug))
pURL := strings.TrimSpace(helpers.URLize(p.URL))
var permalink string
@ -837,5 +837,5 @@ func (p *Page) TargetPath() (outfile string) {
outfile = helpers.ReplaceExtension(p.Source.LogicalName(), p.Extension())
}
return filepath.Join(p.Source.Dir(), strings.TrimSpace(outfile))
return filepath.Join(strings.ToLower(helpers.MakePath(p.Source.Dir())), strings.TrimSpace(outfile))
}

View file

@ -15,7 +15,6 @@ func TestPermalink(t *testing.T) {
tests := []struct {
file string
dir string
base template.URL
slug string
url string
@ -24,27 +23,27 @@ func TestPermalink(t *testing.T) {
expectedAbs string
expectedRel string
}{
{"x/y/z/boofar.md", "x/y/z", "", "", "", false, false, "/x/y/z/boofar/", "/x/y/z/boofar/"},
{"x/y/z/boofar.md", "x/y/z/", "", "", "", false, false, "/x/y/z/boofar/", "/x/y/z/boofar/"},
{"x/y/z/boofar.md", "", "", "", false, false, "/x/y/z/boofar/", "/x/y/z/boofar/"},
{"x/y/z/boofar.md", "", "", "", false, false, "/x/y/z/boofar/", "/x/y/z/boofar/"},
// Issue #1174
{"x/y/z/boofar.md", "x/y/z", "http://gopher.com/", "", "", false, true, "http://gopher.com/x/y/z/boofar/", "/x/y/z/boofar/"},
{"x/y/z/boofar.md", "x/y/z/", "http://gopher.com/", "", "", true, true, "http://gopher.com/x/y/z/boofar.html", "/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "x/y/z/", "", "boofar", "", false, false, "/x/y/z/boofar/", "/x/y/z/boofar/"},
{"x/y/z/boofar.md", "x/y/z", "http://barnew/", "", "", false, false, "http://barnew/x/y/z/boofar/", "/x/y/z/boofar/"},
{"x/y/z/boofar.md", "x/y/z/", "http://barnew/", "boofar", "", false, false, "http://barnew/x/y/z/boofar/", "/x/y/z/boofar/"},
{"x/y/z/boofar.md", "x/y/z", "", "", "", true, false, "/x/y/z/boofar.html", "/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "x/y/z/", "", "", "", true, false, "/x/y/z/boofar.html", "/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "x/y/z/", "", "boofar", "", true, false, "/x/y/z/boofar.html", "/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "x/y/z", "http://barnew/", "", "", true, false, "http://barnew/x/y/z/boofar.html", "/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "x/y/z/", "http://barnew/", "boofar", "", true, false, "http://barnew/x/y/z/boofar.html", "/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "x/y/z/", "http://barnew/boo/", "boofar", "", true, false, "http://barnew/boo/x/y/z/boofar.html", "/boo/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "x/y/z/", "http://barnew/boo/", "boofar", "", false, true, "http://barnew/boo/x/y/z/boofar/", "/x/y/z/boofar/"},
{"x/y/z/boofar.md", "x/y/z/", "http://barnew/boo/", "boofar", "", false, false, "http://barnew/boo/x/y/z/boofar/", "/boo/x/y/z/boofar/"},
{"x/y/z/boofar.md", "x/y/z/", "http://barnew/boo/", "boofar", "", true, true, "http://barnew/boo/x/y/z/boofar.html", "/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "x/y/z/", "http://barnew/boo", "boofar", "", true, true, "http://barnew/boo/x/y/z/boofar.html", "/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "http://gopher.com/", "", "", false, true, "http://gopher.com/x/y/z/boofar/", "/x/y/z/boofar/"},
{"x/y/z/boofar.md", "http://gopher.com/", "", "", true, true, "http://gopher.com/x/y/z/boofar.html", "/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "", "boofar", "", false, false, "/x/y/z/boofar/", "/x/y/z/boofar/"},
{"x/y/z/boofar.md", "http://barnew/", "", "", false, false, "http://barnew/x/y/z/boofar/", "/x/y/z/boofar/"},
{"x/y/z/boofar.md", "http://barnew/", "boofar", "", false, false, "http://barnew/x/y/z/boofar/", "/x/y/z/boofar/"},
{"x/y/z/boofar.md", "", "", "", true, false, "/x/y/z/boofar.html", "/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "", "", "", true, false, "/x/y/z/boofar.html", "/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "", "boofar", "", true, false, "/x/y/z/boofar.html", "/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "http://barnew/", "", "", true, false, "http://barnew/x/y/z/boofar.html", "/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "http://barnew/", "boofar", "", true, false, "http://barnew/x/y/z/boofar.html", "/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "http://barnew/boo/", "boofar", "", true, false, "http://barnew/boo/x/y/z/boofar.html", "/boo/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "http://barnew/boo/", "boofar", "", false, true, "http://barnew/boo/x/y/z/boofar/", "/x/y/z/boofar/"},
{"x/y/z/boofar.md", "http://barnew/boo/", "boofar", "", false, false, "http://barnew/boo/x/y/z/boofar/", "/boo/x/y/z/boofar/"},
{"x/y/z/boofar.md", "http://barnew/boo/", "boofar", "", true, true, "http://barnew/boo/x/y/z/boofar.html", "/x/y/z/boofar.html"},
{"x/y/z/boofar.md", "http://barnew/boo", "boofar", "", true, true, "http://barnew/boo/x/y/z/boofar.html", "/x/y/z/boofar.html"},
// test URL overrides
{"x/y/z/boofar.md", "x/y/z", "", "", "/z/y/q/", false, false, "/z/y/q/", "/z/y/q/"},
{"x/y/z/boofar.md", "", "", "/z/y/q/", false, false, "/z/y/q/", "/z/y/q/"},
}
viper.Set("DefaultExtension", "html")

View file

@ -774,7 +774,7 @@ func (s *Site) assembleMenus() {
if sectionPagesMenu != "" {
if _, ok := sectionPagesMenus[p.Section()]; !ok {
if p.Section() != "" {
me := MenuEntry{Identifier: p.Section(), Name: helpers.MakeTitle(p.Section()), URL: s.Info.createNodeMenuEntryURL("/" + p.Section())}
me := MenuEntry{Identifier: p.Section(), Name: helpers.MakeTitle(helpers.FirstUpper(p.Section())), URL: s.Info.createNodeMenuEntryURL("/" + p.Section())}
if _, ok := flat[twoD{sectionPagesMenu, me.KeyName()}]; ok {
// menu with same id defined in config, let that one win
continue
@ -1160,12 +1160,13 @@ func (s *Site) RenderListsOfTaxonomyTerms() (err error) {
return
}
func (s *Site) newSectionListNode(section string, data WeightedPages) *Node {
func (s *Site) newSectionListNode(sectionName, section string, data WeightedPages) *Node {
n := s.NewNode()
sectionName = helpers.FirstUpper(sectionName)
if viper.GetBool("PluralizeListTitles") {
n.Title = strings.Title(inflect.Pluralize(section))
n.Title = inflect.Pluralize(sectionName)
} else {
n.Title = strings.Title(section)
n.Title = sectionName
}
s.setURLs(n, section)
n.Date = data[0].Page.Date
@ -1179,12 +1180,17 @@ func (s *Site) newSectionListNode(section string, data WeightedPages) *Node {
func (s *Site) RenderSectionLists() error {
for section, data := range s.Sections {
// section keys are lower case
// extract the original casing from the first page to get sensible titles.
sectionName := section
if len(data) > 0 {
sectionName = data[0].Page.Section()
}
layouts := s.appendThemeTemplates(
[]string{"section/" + section + ".html", "_default/section.html", "_default/list.html", "indexes/" + section + ".html", "_default/indexes.html"})
n := s.newSectionListNode(section, data)
if err := s.renderAndWritePage(fmt.Sprintf("section %s", section), fmt.Sprintf("/%s", section), n, s.appendThemeTemplates(layouts)...); err != nil {
n := s.newSectionListNode(sectionName, section, data)
if err := s.renderAndWritePage(fmt.Sprintf("section %s", section), section, n, s.appendThemeTemplates(layouts)...); err != nil {
return err
}
@ -1203,7 +1209,7 @@ func (s *Site) RenderSectionLists() error {
continue
}
sectionPagerNode := s.newSectionListNode(section, data)
sectionPagerNode := s.newSectionListNode(sectionName, section, data)
sectionPagerNode.paginator = pager
if pager.TotalPages() > 0 {
sectionPagerNode.Date = pager.Pages()[0].Date

View file

@ -1,6 +1,7 @@
package hugolib
import (
"bitbucket.org/pkg/inflect"
"bytes"
"fmt"
"html/template"
@ -277,6 +278,7 @@ func TestDraftAndFutureRender(t *testing.T) {
// Issue #957
func TestCrossrefs(t *testing.T) {
hugofs.DestinationFS = new(afero.MemMapFs)
for _, uglyUrls := range []bool{true, false} {
for _, relative := range []bool{true, false} {
doTestCrossrefs(t, relative, uglyUrls)
@ -360,6 +362,7 @@ func doTestCrossrefs(t *testing.T, relative, uglyUrls bool) {
// Issue #939
func Test404ShouldAlwaysHaveUglyUrls(t *testing.T) {
hugofs.DestinationFS = new(afero.MemMapFs)
for _, uglyURLs := range []bool{true, false} {
doTest404ShouldAlwaysHaveUglyUrls(t, uglyURLs)
}
@ -439,6 +442,87 @@ func doTest404ShouldAlwaysHaveUglyUrls(t *testing.T, uglyURLs bool) {
}
// Issue #1176
func TestSectionNaming(t *testing.T) {
for _, canonify := range []bool{true, false} {
for _, uglify := range []bool{true, false} {
for _, pluralize := range []bool{true, false} {
doTestSectionNaming(t, canonify, uglify, pluralize)
}
}
}
}
func doTestSectionNaming(t *testing.T, canonify, uglify, pluralize bool) {
hugofs.DestinationFS = new(afero.MemMapFs)
viper.Reset()
defer viper.Reset()
viper.Set("baseurl", "http://auth/sub/")
viper.Set("DefaultExtension", "html")
viper.Set("UglyURLs", uglify)
viper.Set("PluralizeListTitles", pluralize)
viper.Set("CanonifyURLs", canonify)
var expectedPathSuffix string
if uglify {
expectedPathSuffix = ".html"
} else {
expectedPathSuffix = "/index.html"
}
sources := []source.ByteSource{
{filepath.FromSlash("sect/doc1.html"), []byte("doc1")},
{filepath.FromSlash("Fish and Chips/doc2.html"), []byte("doc2")},
{filepath.FromSlash("ラーメン/doc3.html"), []byte("doc3")},
}
s := &Site{
Source: &source.InMemorySource{ByteSource: sources},
Targets: targetList{Page: &target.PagePub{UglyURLs: uglify}},
}
s.initializeSiteInfo()
templatePrep(s)
must(s.addTemplate("_default/single.html", "{{.Content}}"))
must(s.addTemplate("_default/list.html", "{{ .Title }}"))
createAndRenderPages(t, s)
s.RenderSectionLists()
tests := []struct {
doc string
pluralAware bool
expected string
}{
{filepath.FromSlash(fmt.Sprintf("sect/doc1%s", expectedPathSuffix)), false, "doc1"},
{filepath.FromSlash(fmt.Sprintf("sect%s", expectedPathSuffix)), true, "Sect"},
{filepath.FromSlash(fmt.Sprintf("fish-and-chips/doc2%s", expectedPathSuffix)), false, "doc2"},
{filepath.FromSlash(fmt.Sprintf("fish-and-chips%s", expectedPathSuffix)), true, "Fish and Chips"},
{filepath.FromSlash(fmt.Sprintf("ラーメン/doc3%s", expectedPathSuffix)), false, "doc3"},
{filepath.FromSlash(fmt.Sprintf("ラーメン%s", expectedPathSuffix)), true, "ラーメン"},
}
for _, test := range tests {
file, err := hugofs.DestinationFS.Open(test.doc)
if err != nil {
t.Fatalf("Did not find %s in target: %s", test.doc, err)
}
content := helpers.ReaderToString(file)
if test.pluralAware && pluralize {
test.expected = inflect.Pluralize(test.expected)
}
if content != test.expected {
t.Errorf("%s content expected:\n%q\ngot:\n%q", test.doc, test.expected, content)
}
}
}
func TestSkipRender(t *testing.T) {
viper.Reset()
defer viper.Reset()