Add pagination support for home page, sections and taxonomies

Two new configuration properties, `Paginate` (default `0`) and `PaginatePath` (default `page`) are added.

Setting `paginate` to a positive value will split the list pages for the home page, sections and taxonomies into chunks of size of the `paginate` property.

A `.Paginator` is provided to help building a pager menu.

There are two ways to configure a `.Paginator`:

1. The simplest way is just to call `.Paginator.Pages` from a template. It will contain the pages for "that page" (`.Data.Pages` will (like today) contain all the pages).
2. Select a sub-set of the pages with the available template functions and pass the slice to `.Paginate` : `{{ range (.Paginate (where .Data.Pages "Type" "post")).Pages }}`

**NOTE:** For a given Node, it's one of the options above. It's perfectly legitimate to iterate over the same pager more than once, but it's static and cannot change.

The `.Paginator` contains enough information to build a full-blown paginator interface.

The pages are built on the form (note: BLANK means no value, i.e. home page):

```
[SECTION/TAXONOMY/BLANK]/index.html
[SECTION/TAXONOMY/BLANK]/page/1/index.html => redirect to  [SECTION/TAXONOMY/BLANK]/index.html
[SECTION/TAXONOMY/BLANK]/page/2/index.html
....
```

Fixes #96
This commit is contained in:
bep 2014-12-27 14:11:19 +01:00
parent 407e80a9ab
commit 37445bc6aa
7 changed files with 554 additions and 59 deletions

View file

@ -136,6 +136,8 @@ func InitializeConfig() {
viper.SetDefault("FootnoteAnchorPrefix", "")
viper.SetDefault("FootnoteReturnLinkContents", "")
viper.SetDefault("NewContentEditor", "")
viper.SetDefault("Paginate", 0)
viper.SetDefault("PaginatePath", "page")
viper.SetDefault("Blackfriday", new(helpers.Blackfriday))
if hugoCmdV.PersistentFlags().Lookup("buildDrafts").Changed {

View file

@ -16,6 +16,7 @@ package helpers
import (
"fmt"
"github.com/PuerkitoBio/purell"
"github.com/spf13/viper"
"net/url"
"path"
"strings"
@ -97,6 +98,10 @@ func AddContextRoot(baseUrl, relativePath string) string {
return newPath
}
func UrlizeAndPrep(in string) string {
return UrlPrep(viper.GetBool("UglyUrls"), Urlize(in))
}
func UrlPrep(ugly bool, in string) string {
if ugly {
x := Uglify(SanitizeUrl(in))

View file

@ -15,6 +15,7 @@ package hugolib
import (
"html/template"
"sync"
"time"
)
@ -30,6 +31,8 @@ type Node struct {
Date time.Time
Sitemap Sitemap
UrlPath
paginator *pager
paginatorInit sync.Once
}
func (n *Node) Now() time.Time {

269
hugolib/pagination.go Normal file
View file

@ -0,0 +1,269 @@
// Copyright © 2013-14 Steve Francia <spf@spf13.com>.
//
// Licensed under the Simple Public License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://opensource.org/licenses/Simple-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hugolib
import (
"errors"
"fmt"
"github.com/spf13/hugo/helpers"
"github.com/spf13/viper"
"html/template"
"math"
"path"
)
type pager struct {
number int
*paginator
}
type pagers []*pager
type paginator struct {
paginatedPages []Pages
pagers
paginationUrlFactory
total int
size int
}
type paginationUrlFactory func(int) string
// PageNumber returns the current page's number in the pager sequence.
func (p *pager) PageNumber() int {
return p.number
}
// Url returns the url to the current page.
func (p *pager) Url() template.HTML {
return template.HTML(p.paginationUrlFactory(p.PageNumber()))
}
// Pages returns the elements on this page.
func (p *pager) Pages() Pages {
return p.paginatedPages[p.PageNumber()-1]
}
// NumberOfElements gets the number of elements on this page.
func (p *pager) NumberOfElements() int {
return len(p.Pages())
}
// HasPrev tests whether there are page(s) before the current.
func (p *pager) HasPrev() bool {
return p.PageNumber() > 1
}
// Prev returns the pager for the previous page.
func (p *pager) Prev() *pager {
if !p.HasPrev() {
return nil
}
return p.pagers[p.PageNumber()-2]
}
// HasNext tests whether there are page(s) after the current.
func (p *pager) HasNext() bool {
return p.PageNumber() < len(p.paginatedPages)
}
// Next returns the pager for the next page.
func (p *pager) Next() *pager {
if !p.HasNext() {
return nil
}
return p.pagers[p.PageNumber()]
}
// First returns the pager for the first page.
func (p *pager) First() *pager {
if p.TotalPages() == 0 {
return nil
}
return p.pagers[0]
}
// Last returns the pager for the last page.
func (p *pager) Last() *pager {
if p.TotalPages() == 0 {
return nil
}
return p.pagers[len(p.pagers)-1]
}
// Pagers returns a list of pagers that can be used to build a pagination menu.
func (p *paginator) Pagers() pagers {
return p.pagers
}
// PageSize returns the size of each paginator page.
func (p *paginator) PageSize() int {
return p.size
}
// TotalPages returns the number of pages in the paginator.
func (p *paginator) TotalPages() int {
return len(p.paginatedPages)
}
// TotalNumberOfElements returns the number of elements on all pages in this paginator.
func (p *paginator) TotalNumberOfElements() int {
return p.total
}
func splitPages(pages Pages, size int) []Pages {
var split []Pages
for low, j := 0, len(pages); low < j; low += size {
high := int(math.Min(float64(low+size), float64(len(pages))))
split = append(split, pages[low:high])
}
return split
}
// Paginate gets this Node's paginator if it's already created.
// If it's not, one will be created with all pages in Data["Pages"].
func (n *Node) Paginator() (*pager, error) {
if n.IsPage() {
return nil, errors.New("Paginators isn't supported for content pages.")
}
var initError error
n.paginatorInit.Do(func() {
if n.paginator != nil {
return
}
pagers, err := paginatePages(n.Data["Pages"], n.Url)
if err != nil {
initError = err
}
if len(pagers) > 0 {
// the rest of the nodes will be created later
n.paginator = pagers[0]
n.Site.addToPaginationPageCount(uint64(n.paginator.TotalPages()))
}
})
if initError != nil {
return nil, initError
}
return n.paginator, nil
}
// Paginate gets this Node's paginator if it's already created.
// If it's not, one will be created with the qiven sequence.
// Note that repeated calls will return the same result, even if the sequence is different.
func (n *Node) Paginate(seq interface{}) (*pager, error) {
if n.IsPage() {
return nil, errors.New("Paginators isn't supported for content pages.")
}
var initError error
n.paginatorInit.Do(func() {
if n.paginator != nil {
return
}
pagers, err := paginatePages(seq, n.Url)
if err != nil {
initError = err
}
if len(pagers) > 0 {
// the rest of the nodes will be created later
n.paginator = pagers[0]
n.Site.addToPaginationPageCount(uint64(n.paginator.TotalPages()))
}
})
if initError != nil {
return nil, initError
}
return n.paginator, nil
}
func paginatePages(seq interface{}, section string) (pagers, error) {
paginateSize := viper.GetInt("paginate")
if paginateSize <= 0 {
return nil, errors.New("'paginate' configuration setting must be positive to paginate")
}
var pages Pages
switch seq.(type) {
case Pages:
pages = seq.(Pages)
case *Pages:
pages = *(seq.(*Pages))
case WeightedPages:
pages = (seq.(WeightedPages)).Pages()
case PageGroup:
pages = (seq.(PageGroup)).Pages
default:
return nil, errors.New(fmt.Sprintf("unsupported type in paginate, got %T", seq))
}
urlFactory := newPaginationUrlFactory(section)
paginator := newPaginator(pages, paginateSize, urlFactory)
pagers := paginator.Pagers()
return pagers, nil
}
func newPaginator(pages Pages, size int, urlFactory paginationUrlFactory) *paginator {
if size <= 0 {
panic("Paginator size must be positive")
}
split := splitPages(pages, size)
p := &paginator{total: len(pages), paginatedPages: split, size: size, paginationUrlFactory: urlFactory}
pagers := make(pagers, len(split))
for i := range p.paginatedPages {
pagers[i] = &pager{number: (i + 1), paginator: p}
}
p.pagers = pagers
return p
}
func newPaginationUrlFactory(pathElements ...string) paginationUrlFactory {
paginatePath := viper.GetString("paginatePath")
return func(page int) string {
var rel string
if page == 1 {
rel = fmt.Sprintf("/%s/", path.Join(pathElements...))
} else {
rel = fmt.Sprintf("/%s/%s/%d/", path.Join(pathElements...), paginatePath, page)
}
return helpers.UrlizeAndPrep(rel)
}
}

View file

@ -0,0 +1,95 @@
package hugolib
import (
"fmt"
"github.com/spf13/hugo/source"
"github.com/spf13/viper"
"github.com/stretchr/testify/assert"
"path/filepath"
"testing"
)
func TestSplitPages(t *testing.T) {
pages := createTestPages(21)
chunks := splitPages(pages, 5)
assert.Equal(t, 5, len(chunks))
for i := 0; i < 4; i++ {
assert.Equal(t, 5, len(chunks[i]))
}
lastChunk := chunks[4]
assert.Equal(t, 1, len(lastChunk))
}
func TestPaginator(t *testing.T) {
pages := createTestPages(21)
urlFactory := func(page int) string {
return fmt.Sprintf("page/%d/", page)
}
paginator := newPaginator(pages, 5, urlFactory)
paginatorPages := paginator.Pagers()
assert.Equal(t, 5, len(paginatorPages))
assert.Equal(t, 21, paginator.TotalNumberOfElements())
assert.Equal(t, 5, paginator.PageSize())
assert.Equal(t, 5, paginator.TotalPages())
first := paginatorPages[0]
assert.Equal(t, "page/1/", first.Url())
assert.Equal(t, first, first.First())
assert.Equal(t, true, first.HasNext())
assert.Equal(t, false, first.HasPrev())
assert.Equal(t, 5, first.NumberOfElements())
assert.Equal(t, 1, first.PageNumber())
third := paginatorPages[2]
assert.Equal(t, true, third.HasNext())
assert.Equal(t, true, third.HasPrev())
last := paginatorPages[4]
assert.Equal(t, "page/5/", last.Url())
assert.Equal(t, last, last.Last())
assert.Equal(t, false, last.HasNext())
assert.Equal(t, true, last.HasPrev())
assert.Equal(t, 1, last.NumberOfElements())
assert.Equal(t, 5, last.PageNumber())
}
func TestPaginationUrlFactory(t *testing.T) {
viper.Set("PaginatePath", "zoo")
unicode := newPaginationUrlFactory("новости проекта")
fooBar := newPaginationUrlFactory("foo", "bar")
assert.Equal(t, "/%D0%BD%D0%BE%D0%B2%D0%BE%D1%81%D1%82%D0%B8-%D0%BF%D1%80%D0%BE%D0%B5%D0%BA%D1%82%D0%B0/", unicode(1))
assert.Equal(t, "/foo/bar/", fooBar(1))
assert.Equal(t, "/%D0%BD%D0%BE%D0%B2%D0%BE%D1%81%D1%82%D0%B8-%D0%BF%D1%80%D0%BE%D0%B5%D0%BA%D1%82%D0%B0/zoo/4/", unicode(4))
assert.Equal(t, "/foo/bar/zoo/12345/", fooBar(12345))
}
func createTestPages(num int) Pages {
pages := make(Pages, num)
for i := 0; i < num; i++ {
pages[i] = &Page{
Node: Node{
UrlPath: UrlPath{
Section: "z",
Url: fmt.Sprintf("http://base/x/y/p%d.html", num),
},
Site: &SiteInfo{
BaseUrl: "http://base/",
},
},
Source: Source{File: *source.NewFile(filepath.FromSlash(fmt.Sprintf("/x/y/p%d.md", num)))},
}
}
return pages
}

View file

@ -21,6 +21,7 @@ import (
"io"
"net/url"
"os"
"path/filepath"
"strconv"
"strings"
"sync"
@ -37,6 +38,7 @@ import (
jww "github.com/spf13/jwalterweatherman"
"github.com/spf13/nitro"
"github.com/spf13/viper"
"sync/atomic"
)
var _ = transform.AbsURL
@ -86,27 +88,28 @@ type targetList struct {
}
type SiteInfo struct {
BaseUrl template.URL
Taxonomies TaxonomyList
Authors AuthorList
Social SiteSocial
Indexes *TaxonomyList // legacy, should be identical to Taxonomies
Sections Taxonomy
Pages *Pages
Files []*source.File
Recent *Pages // legacy, should be identical to Pages
Menus *Menus
Hugo *HugoInfo
Title string
Author map[string]interface{}
LanguageCode string
DisqusShortname string
Copyright string
LastChange time.Time
Permalinks PermalinkOverrides
Params map[string]interface{}
BuildDrafts bool
canonifyUrls bool
BaseUrl template.URL
Taxonomies TaxonomyList
Authors AuthorList
Social SiteSocial
Indexes *TaxonomyList // legacy, should be identical to Taxonomies
Sections Taxonomy
Pages *Pages
Files []*source.File
Recent *Pages // legacy, should be identical to Pages
Menus *Menus
Hugo *HugoInfo
Title string
Author map[string]interface{}
LanguageCode string
DisqusShortname string
Copyright string
LastChange time.Time
Permalinks PermalinkOverrides
Params map[string]interface{}
BuildDrafts bool
canonifyUrls bool
paginationPageCount uint64
}
// SiteSocial is a place to put social details on a site level. These are the
@ -205,6 +208,10 @@ func (s *SiteInfo) RelRef(ref string, page *Page) (string, error) {
return s.refLink(ref, page, true)
}
func (s *SiteInfo) addToPaginationPageCount(cnt uint64) {
atomic.AddUint64(&s.paginationPageCount, cnt)
}
type runmode struct {
Watching bool
}
@ -614,7 +621,7 @@ func (s *Site) getMenusFromConfig() Menus {
if strings.HasPrefix(menuEntry.Url, "/") {
// make it match the nodes
menuEntryUrl := menuEntry.Url
menuEntryUrl = s.prepUrl(menuEntryUrl)
menuEntryUrl = helpers.UrlizeAndPrep(menuEntryUrl)
if !s.Info.canonifyUrls {
menuEntryUrl = helpers.AddContextRoot(string(s.Info.BaseUrl), menuEntryUrl)
}
@ -943,17 +950,63 @@ func (s *Site) newTaxonomyNode(t taxRenderInfo) (*Node, string) {
func taxonomyRenderer(s *Site, taxes <-chan taxRenderInfo, results chan<- error, wg *sync.WaitGroup) {
defer wg.Done()
var n *Node
for t := range taxes {
n, base := s.newTaxonomyNode(t)
layouts := []string{"taxonomy/" + t.singular + ".html", "indexes/" + t.singular + ".html", "_default/taxonomy.html", "_default/list.html"}
b, err := s.renderPage("taxononomy "+t.singular, n, s.appendThemeTemplates(layouts)...)
var base string
layouts := s.appendThemeTemplates(
[]string{"taxonomy/" + t.singular + ".html", "indexes/" + t.singular + ".html", "_default/taxonomy.html", "_default/list.html"})
n, base = s.newTaxonomyNode(t)
b, err := s.renderPage("taxononomy "+t.singular, n, layouts...)
if err != nil {
results <- err
continue
} else {
err := s.WriteDestPage(base+".html", b)
if err != nil {
results <- err
}
err = s.WriteDestPage(base, b)
if err != nil {
results <- err
continue
}
if n.paginator != nil {
paginatePath := viper.GetString("paginatePath")
// write alias for page 1
s.WriteDestAlias(fmt.Sprintf("%s/%s/%d/index.html", base, paginatePath, 1), s.permalink(base))
pagers := n.paginator.Pagers()
for i, pager := range pagers {
if i == 0 {
// already created
continue
}
taxonomyPagerNode, _ := s.newTaxonomyNode(t)
taxonomyPagerNode.paginator = pager
if pager.TotalPages() > 0 {
taxonomyPagerNode.Date = pager.Pages()[0].Date
}
pageNumber := i + 1
htmlBase := fmt.Sprintf("/%s/%s/%d", base, paginatePath, pageNumber)
b, err := s.renderPage(fmt.Sprintf("taxononomy_%s_%d", t.singular, pageNumber), taxonomyPagerNode, layouts...)
if err != nil {
results <- err
continue
}
err = s.WriteDestPage(htmlBase, b)
if err != nil {
results <- err
continue
}
}
}
@ -1005,34 +1058,76 @@ func (s *Site) RenderListsOfTaxonomyTerms() (err error) {
return
}
func (s *Site) newSectionListNode(section string, data WeightedPages) *Node {
n := s.NewNode()
if viper.GetBool("PluralizeListTitles") {
n.Title = strings.Title(inflect.Pluralize(section))
} else {
n.Title = strings.Title(section)
}
s.setUrls(n, section)
n.Date = data[0].Page.Date
n.Data["Pages"] = data.Pages()
return n
}
// Render a page for each section
func (s *Site) RenderSectionLists() error {
for section, data := range s.Sections {
n := s.NewNode()
if viper.GetBool("PluralizeListTitles") {
n.Title = strings.Title(inflect.Pluralize(section))
} else {
n.Title = strings.Title(section)
}
s.setUrls(n, section)
n.Date = data[0].Page.Date
n.Data["Pages"] = data.Pages()
layouts := []string{"section/" + section + ".html", "_default/section.html", "_default/list.html", "indexes/" + section + ".html", "_default/indexes.html"}
b, err := s.renderPage("section "+section, n, s.appendThemeTemplates(layouts)...)
layouts := s.appendThemeTemplates(
[]string{"section/" + section + ".html", "_default/section.html", "_default/list.html", "indexes/" + section + ".html", "_default/indexes.html"})
n := s.newSectionListNode(section, data)
b, err := s.renderPage(fmt.Sprintf("section%s_%d", section, 1), n, s.appendThemeTemplates(layouts)...)
if err != nil {
return err
}
if err := s.WriteDestPage(section, b); err != nil {
if err := s.WriteDestPage(fmt.Sprintf("/%s", section), b); err != nil {
return err
}
if n.paginator != nil {
paginatePath := viper.GetString("paginatePath")
// write alias for page 1
s.WriteDestAlias(filepath.FromSlash(fmt.Sprintf("/%s/%s/%d", section, paginatePath, 1)), s.permalink(section))
pagers := n.paginator.Pagers()
for i, pager := range pagers {
if i == 0 {
// already created
continue
}
sectionPagerNode := s.newSectionListNode(section, data)
sectionPagerNode.paginator = pager
if pager.TotalPages() > 0 {
sectionPagerNode.Date = pager.Pages()[0].Date
}
pageNumber := i + 1
htmlBase := fmt.Sprintf("/%s/%s/%d", section, paginatePath, pageNumber)
b, err := s.renderPage(fmt.Sprintf("section_%s_%d", section, pageNumber), sectionPagerNode, layouts...)
if err != nil {
return err
}
if err := s.WriteDestPage(filepath.FromSlash(htmlBase), b); err != nil {
return err
}
}
}
if !viper.GetBool("DisableRSS") && section != "" {
// XML Feed
n.Url = s.permalinkStr(section + "/index.xml")
n.Permalink = s.permalink(section)
rssLayouts := []string{"section/" + section + ".rss.xml", "_default/rss.xml", "rss.xml", "_internal/_default/rss.xml"}
b, err = s.renderXML("section "+section+" rss", n, s.appendThemeTemplates(rssLayouts)...)
b, err := s.renderXML("section "+section+" rss", n, s.appendThemeTemplates(rssLayouts)...)
if err != nil {
return err
}
@ -1054,8 +1149,10 @@ func (s *Site) newHomeNode() *Node {
func (s *Site) RenderHomePage() error {
n := s.newHomeNode()
layouts := []string{"index.html", "_default/list.html", "_default/single.html"}
b, err := s.renderPage("homepage", n, s.appendThemeTemplates(layouts)...)
layouts := s.appendThemeTemplates([]string{"index.html", "_default/list.html", "_default/single.html"})
b, err := s.renderPage("homepage", n, layouts...)
if err != nil {
return err
}
@ -1063,6 +1160,39 @@ func (s *Site) RenderHomePage() error {
return err
}
if n.paginator != nil {
paginatePath := viper.GetString("paginatePath")
// write alias for page 1
s.WriteDestAlias(filepath.FromSlash(fmt.Sprintf("/%s/%d", paginatePath, 1)), s.permalink("/"))
pagers := n.paginator.Pagers()
for i, pager := range pagers {
if i == 0 {
// already created
continue
}
homePagerNode := s.newHomeNode()
homePagerNode.paginator = pager
if pager.TotalPages() > 0 {
homePagerNode.Date = pager.Pages()[0].Date
}
pageNumber := i + 1
htmlBase := fmt.Sprintf("/%s/%d", paginatePath, pageNumber)
b, err := s.renderPage(fmt.Sprintf("homepage_%d", pageNumber), homePagerNode, layouts...)
if err != nil {
return err
}
if err := s.WriteDestPage(filepath.FromSlash(htmlBase), b); err != nil {
return err
}
}
}
if !viper.GetBool("DisableRSS") {
// XML Feed
n.Url = s.permalinkStr("index.xml")
@ -1163,7 +1293,9 @@ func (s *Site) Stats() {
jww.FEEDBACK.Println(s.draftStats())
jww.FEEDBACK.Println(s.futureStats())
jww.FEEDBACK.Printf("%d pages created \n", len(s.Pages))
if viper.GetInt("paginate") > 0 {
jww.FEEDBACK.Printf("%d paginator pages created \n", s.Info.paginationPageCount)
}
taxonomies := viper.GetStringMapString("Taxonomies")
for _, pl := range taxonomies {
@ -1172,7 +1304,7 @@ func (s *Site) Stats() {
}
func (s *Site) setUrls(n *Node, in string) {
n.Url = s.prepUrl(in)
n.Url = helpers.UrlizeAndPrep(in)
n.Permalink = s.permalink(n.Url)
n.RSSLink = s.permalink(in + ".xml")
}
@ -1182,19 +1314,7 @@ func (s *Site) permalink(plink string) template.HTML {
}
func (s *Site) permalinkStr(plink string) string {
return helpers.MakePermalink(string(viper.GetString("BaseUrl")), s.prepUrl(plink)).String()
}
func (s *Site) prepUrl(in string) string {
return s.PrettifyUrl(helpers.Urlize(in))
}
func (s *Site) PrettifyUrl(in string) string {
return helpers.UrlPrep(viper.GetBool("UglyUrls"), in)
}
func (s *Site) PrettifyPath(in string) string {
return helpers.PathPrep(viper.GetBool("UglyUrls"), in)
return helpers.MakePermalink(string(viper.GetString("BaseUrl")), helpers.UrlizeAndPrep(plink)).String()
}
func (s *Site) NewNode() *Node {

View file

@ -56,6 +56,7 @@ func TestPageCount(t *testing.T) {
hugofs.DestinationFS = new(afero.MemMapFs)
viper.Set("uglyurls", false)
viper.Set("paginate", 10)
s := &Site{
Source: &source.InMemorySource{ByteSource: urlFakeSource},
}