Default is now pretty urls (without .html)

Adding --uglyurls option to retain previous behavior
This commit is contained in:
spf13 2013-07-19 00:10:42 -07:00
parent dcd8ff716a
commit 8fae5f0dd6
5 changed files with 172 additions and 139 deletions

View file

@ -1,28 +1,28 @@
<ul class="nav nav-list"> <ul class="nav nav-list">
<li> <a href="/">Home</a> </li> <li> <a href="/">Home</a></li>
<li class="divider"></li> <li class="divider"></li>
<li class="nav-header">Getting Started</li> <li class="nav-header">Getting Started</li>
<li> <a href="/doc/installing.html">Installing Hugo</a> </li> <li> <a href="/doc/installing">Installing Hugo</a></li>
<li> <a href="/doc/usage.html">Usage</a> </li> <li> <a href="/doc/usage">Usage</a> </li>
<li> <a href="/doc/configuration.html">Configuration</a> </li> <li> <a href="/doc/configuration">Configuration</a></li>
<li> <a href="/doc/source-directory.html">Input Directory Layout</a> </li> <li> <a href="/doc/source-directory">Input Directory Layout</a></li>
<li class="divider"></li> <li class="divider"></li>
<li class="nav-header">Layout</li> <li class="nav-header">Layout</li>
<li> <a href="/doc/templates.html">Templates</a> </li> <li> <a href="/doc/templates">Templates</a></li>
<li> <a href="/doc/variables.html">Variables</a> </li> <li> <a href="/doc/variables">Variables</a></li>
<li class="divider"></li> <li class="divider"></li>
<li class="nav-header">Content</li> <li class="nav-header">Content</li>
<li> <a href="/doc/organization.html">Organization</a> </li> <li> <a href="/doc/organization">Organization</a></li>
<li> <a href="/doc/front-matter.html">Front Matter</a> </li> <li> <a href="/doc/front-matter">Front Matter</a></li>
<li> <a href="/doc/example.html">Example</a> </li> <li> <a href="/doc/example">Example</a></li>
<li class="divider"></li> <li class="divider"></li>
<li class="nav-header">Extras</li> <li class="nav-header">Extras</li>
<li> <a href="/doc/shortcodes.html">ShortCodes</a> </li> <li> <a href="/doc/shortcodes">ShortCodes</a></li>
<li class="divider"></li> <li class="divider"></li>
<li class="nav-header">Meta</li> <li class="nav-header">Meta</li>
<li> <a href="/doc/release-notes.html">Release Notes</a> </li> <li> <a href="/doc/release-notes">Release Notes</a></li>
<li> <a href="/doc/roadmap.html">Roadmap</a> </li> <li> <a href="/doc/roadmap">Roadmap</a> </li>
<li> <a href="/doc/contributing.html">Contributing</a> </li> <li> <a href="/doc/contributing">Contributing</a></li>
<li> <a href="/doc/contributors.html">Contributors</a> </li> <li> <a href="/doc/contributors">Contributors</a></li>
<li> <a href="/doc/license.html">License</a> </li> <li> <a href="/doc/license">License</a></li>
</ul> </ul>

View file

@ -14,11 +14,11 @@
package hugolib package hugolib
import ( import (
"launchpad.net/goyaml"
"github.com/BurntSushi/toml"
"encoding/json" "encoding/json"
"fmt" "fmt"
"github.com/BurntSushi/toml"
"io/ioutil" "io/ioutil"
"launchpad.net/goyaml"
"os" "os"
"path" "path"
"path/filepath" "path/filepath"
@ -33,7 +33,7 @@ type Config struct {
Title string Title string
Indexes map[string]string // singular, plural Indexes map[string]string // singular, plural
ProcessFilters map[string][]string ProcessFilters map[string][]string
BuildDrafts bool BuildDrafts, UglyUrls, Verbose bool
} }
var c Config var c Config
@ -42,7 +42,7 @@ var c Config
func SetupConfig(cfgfile *string, path *string) *Config { func SetupConfig(cfgfile *string, path *string) *Config {
c.setPath(*path) c.setPath(*path)
cfg , err := c.findConfigFile(*cfgfile) cfg, err := c.findConfigFile(*cfgfile)
c.ConfigFile = cfg c.ConfigFile = cfg
if err != nil { if err != nil {
@ -57,6 +57,8 @@ func SetupConfig(cfgfile *string, path *string) *Config {
c.StaticDir = "static" c.StaticDir = "static"
c.DefaultLayout = "post" c.DefaultLayout = "post"
c.BuildDrafts = false c.BuildDrafts = false
c.UglyUrls = false
c.Verbose = false
c.readInConfig() c.readInConfig()

View file

@ -254,7 +254,7 @@ func (page *Page) handleYamlMetaData(datum []byte) (interface{}, error) {
return m, nil return m, nil
} }
func (page *Page) handleJsonMetaData(datum []byte) ( interface{}, error ) { func (page *Page) handleJsonMetaData(datum []byte) (interface{}, error) {
var f interface{} var f interface{}
if err := json.Unmarshal(datum, &f); err != nil { if err := json.Unmarshal(datum, &f); err != nil {
return f, fmt.Errorf("Invalid JSON in %v \nError parsing page meta data: %s", page.FileName, err) return f, fmt.Errorf("Invalid JSON in %v \nError parsing page meta data: %s", page.FileName, err)
@ -385,10 +385,6 @@ func (page *Page) buildPageFromFile() error {
return err return err
} }
if err := page.setOutFile(); err != nil {
return err
}
switch page.Markup { switch page.Markup {
case "md": case "md":
page.convertMarkdown(content) page.convertMarkdown(content)
@ -398,22 +394,6 @@ func (page *Page) buildPageFromFile() error {
return nil return nil
} }
func (p *Page) setOutFile() error {
if len(strings.TrimSpace(p.Slug)) > 0 {
// Use Slug if provided
p.OutFile = strings.TrimSpace(p.Slug + "." + p.Extension)
} else if len(strings.TrimSpace(p.Url)) > 2 {
// Use Url if provided & Slug missing
p.OutFile = strings.TrimSpace(p.Url)
} else {
// Fall back to filename
_, t := filepath.Split(p.FileName)
p.OutFile = replaceExtension(strings.TrimSpace(t), p.Extension)
}
return nil
}
func (page *Page) convertMarkdown(lines []string) { func (page *Page) convertMarkdown(lines []string) {
page.RawMarkdown = strings.Join(lines, "\n") page.RawMarkdown = strings.Join(lines, "\n")
@ -437,7 +417,7 @@ func (page *Page) convertRestructuredText(lines []string) {
rstLines := strings.Split(out.String(), "\n") rstLines := strings.Split(out.String(), "\n")
for i, line := range rstLines { for i, line := range rstLines {
if strings.HasPrefix(line, "<body>") { if strings.HasPrefix(line, "<body>") {
rstLines = (rstLines[i+1:len(rstLines)-3]) rstLines = (rstLines[i+1 : len(rstLines)-3])
} }
} }
content := strings.Join(rstLines, "\n") content := strings.Join(rstLines, "\n")

View file

@ -27,6 +27,8 @@ import (
//"sync" //"sync"
) )
const slash = string(os.PathSeparator)
type Site struct { type Site struct {
c Config c Config
Pages Pages Pages Pages
@ -201,6 +203,7 @@ func (s *Site) CreatePages() {
page := NewPage(fileName) page := NewPage(fileName)
page.Site = s.Info page.Site = s.Info
page.Tmpl = s.Tmpl page.Tmpl = s.Tmpl
s.setOutFile(page)
if s.c.BuildDrafts || !page.Draft { if s.c.BuildDrafts || !page.Draft {
s.Pages = append(s.Pages, page) s.Pages = append(s.Pages, page)
} }
@ -251,7 +254,30 @@ func (s *Site) RenderPages() {
func (s *Site) WritePages() { func (s *Site) WritePages() {
for _, p := range s.Pages { for _, p := range s.Pages {
s.WritePublic(p.Section, p.OutFile, p.RenderedContent.Bytes()) s.WritePublic(p.Section+slash+p.OutFile, p.RenderedContent.Bytes())
}
}
func (s *Site) setOutFile(p *Page) {
if len(strings.TrimSpace(p.Slug)) > 0 {
// Use Slug if provided
if s.c.UglyUrls {
p.OutFile = strings.TrimSpace(p.Slug + "." + p.Extension)
} else {
p.OutFile = strings.TrimSpace(p.Slug + "/index.html")
}
} else if len(strings.TrimSpace(p.Url)) > 2 {
// Use Url if provided & Slug missing
p.OutFile = strings.TrimSpace(p.Url)
} else {
// Fall back to filename
_, t := filepath.Split(p.FileName)
if s.c.UglyUrls {
p.OutFile = replaceExtension(strings.TrimSpace(t), p.Extension)
} else {
file, _ := fileExt(strings.TrimSpace(t))
p.OutFile = file + "/index." + p.Extension
}
} }
} }
@ -261,24 +287,42 @@ func (s *Site) RenderIndexes() {
n := s.NewNode() n := s.NewNode()
n.Title = strings.Title(k) n.Title = strings.Title(k)
url := Urlize(plural + "/" + k) url := Urlize(plural + "/" + k)
plink := url
if s.c.UglyUrls {
n.Url = url + ".html" n.Url = url + ".html"
n.Permalink = template.HTML(MakePermalink(string(n.Site.BaseUrl), string(n.Url))) plink = n.Url
} else {
n.Url = url + "/index.html"
}
n.Permalink = template.HTML(MakePermalink(string(n.Site.BaseUrl), string(plink)))
n.RSSlink = template.HTML(MakePermalink(string(n.Site.BaseUrl), string(url+".xml"))) n.RSSlink = template.HTML(MakePermalink(string(n.Site.BaseUrl), string(url+".xml")))
n.Date = o[0].Date n.Date = o[0].Date
n.Data[singular] = o n.Data[singular] = o
n.Data["Pages"] = o n.Data["Pages"] = o
layout := "indexes/" + singular + ".html" layout := "indexes" + slash + singular + ".html"
x := s.RenderThing(n, layout) x := s.RenderThing(n, layout)
s.WritePublic(plural, k+".html", x.Bytes())
var base string
if s.c.UglyUrls {
base = plural + slash + k
} else {
base = plural + slash + k + slash + "index"
}
s.WritePublic(base+".html", x.Bytes())
if a := s.Tmpl.Lookup("rss.xml"); a != nil { if a := s.Tmpl.Lookup("rss.xml"); a != nil {
// XML Feed // XML Feed
y := s.NewXMLBuffer() y := s.NewXMLBuffer()
if s.c.UglyUrls {
n.Url = Urlize(plural + "/" + k + ".xml") n.Url = Urlize(plural + "/" + k + ".xml")
n.Permalink = template.HTML(string(n.Site.BaseUrl) + plural + "/" + k + ".xml") } else {
n.Url = Urlize(plural + "/" + k + "/index.xml")
}
n.Permalink = template.HTML(string(n.Site.BaseUrl) + n.Url)
s.Tmpl.ExecuteTemplate(y, "rss.xml", n) s.Tmpl.ExecuteTemplate(y, "rss.xml", n)
s.WritePublic(plural, k+".xml", y.Bytes()) s.WritePublic(base+".xml", y.Bytes())
} }
} }
} }
@ -290,21 +334,21 @@ func (s *Site) RenderLists() {
n.Title = strings.Title(inflect.Pluralize(section)) n.Title = strings.Title(inflect.Pluralize(section))
n.Url = Urlize(section + "/index.html") n.Url = Urlize(section + "/index.html")
n.Permalink = template.HTML(MakePermalink(string(n.Site.BaseUrl), string(n.Url))) n.Permalink = template.HTML(MakePermalink(string(n.Site.BaseUrl), string(n.Url)))
n.RSSlink = template.HTML(MakePermalink(string(n.Site.BaseUrl), string(section+"/index.xml"))) n.RSSlink = template.HTML(MakePermalink(string(n.Site.BaseUrl), string(section+".xml")))
n.Date = data[0].Date n.Date = data[0].Date
n.Data["Pages"] = data n.Data["Pages"] = data
layout := "indexes/" + section + ".html" layout := "indexes/" + section + ".html"
x := s.RenderThing(n, layout) x := s.RenderThing(n, layout)
s.WritePublic(section, "index.html", x.Bytes()) s.WritePublic(section+slash+"index.html", x.Bytes())
if a := s.Tmpl.Lookup("rss.xml"); a != nil { if a := s.Tmpl.Lookup("rss.xml"); a != nil {
// XML Feed // XML Feed
n.Url = Urlize(section + "/index.xml") n.Url = Urlize(section + ".xml")
n.Permalink = template.HTML(string(n.Site.BaseUrl) + section + "/index.xml") n.Permalink = template.HTML(string(n.Site.BaseUrl) + n.Url)
y := s.NewXMLBuffer() y := s.NewXMLBuffer()
s.Tmpl.ExecuteTemplate(y, "rss.xml", n) s.Tmpl.ExecuteTemplate(y, "rss.xml", n)
s.WritePublic(section, "index.xml", y.Bytes()) s.WritePublic(section+slash+"index.xml", y.Bytes())
} }
} }
} }
@ -322,7 +366,7 @@ func (s *Site) RenderHomePage() {
n.Data["Pages"] = s.Pages[:9] n.Data["Pages"] = s.Pages[:9]
} }
x := s.RenderThing(n, "index.html") x := s.RenderThing(n, "index.html")
s.WritePublic("", "index.html", x.Bytes()) s.WritePublic("index.html", x.Bytes())
if a := s.Tmpl.Lookup("rss.xml"); a != nil { if a := s.Tmpl.Lookup("rss.xml"); a != nil {
// XML Feed // XML Feed
@ -331,7 +375,7 @@ func (s *Site) RenderHomePage() {
n.Permalink = template.HTML(string(n.Site.BaseUrl) + "index.xml") n.Permalink = template.HTML(string(n.Site.BaseUrl) + "index.xml")
y := s.NewXMLBuffer() y := s.NewXMLBuffer()
s.Tmpl.ExecuteTemplate(y, "rss.xml", n) s.Tmpl.ExecuteTemplate(y, "rss.xml", n)
s.WritePublic("", "index.xml", y.Bytes()) s.WritePublic("index.xml", y.Bytes())
} }
} }
@ -361,17 +405,18 @@ func (s *Site) NewXMLBuffer() *bytes.Buffer {
return bytes.NewBufferString(header) return bytes.NewBufferString(header)
} }
func (s *Site) WritePublic(path string, filename string, content []byte) { func (s *Site) WritePublic(path string, content []byte) {
AbsPath := ""
if path != "" { if s.c.Verbose {
// TODO double check the following line.. calling GetAbsPath 2x seems wrong fmt.Println(path)
mkdirIf(s.c.GetAbsPath(filepath.Join(s.c.GetAbsPath(s.c.PublishDir), path)))
AbsPath = filepath.Join(s.c.GetAbsPath(s.c.PublishDir), path, filename)
} else {
AbsPath = filepath.Join(s.c.GetAbsPath(s.c.PublishDir), filename)
} }
file, _ := os.Create(AbsPath) path, filename := filepath.Split(path)
path = filepath.FromSlash(s.c.GetAbsPath(filepath.Join(s.c.PublishDir, path)))
mkdirIf(path)
file, _ := os.Create(filepath.Join(path, filename))
defer file.Close() defer file.Close()
file.Write(content) file.Write(content)

10
main.go
View file

@ -14,10 +14,10 @@
package main package main
import ( import (
"github.com/spf13/hugo/hugolib"
"flag" "flag"
"fmt" "fmt"
"github.com/howeyc/fsnotify" "github.com/howeyc/fsnotify"
"github.com/spf13/hugo/hugolib"
"net/http" "net/http"
"os" "os"
"path/filepath" "path/filepath"
@ -39,6 +39,7 @@ var (
watchMode = flag.Bool("w", false, "watch filesystem for changes and recreate as needed") watchMode = flag.Bool("w", false, "watch filesystem for changes and recreate as needed")
server = flag.Bool("s", false, "run a (very) simple web server") server = flag.Bool("s", false, "run a (very) simple web server")
port = flag.String("port", "1313", "port to run web server on, default :1313") port = flag.String("port", "1313", "port to run web server on, default :1313")
uglyUrls = flag.Bool("uglyurls", false, "use /filename.html instead of /filename/ ")
) )
func usage() { func usage() {
@ -58,6 +59,8 @@ func main() {
config := hugolib.SetupConfig(cfgfile, path) config := hugolib.SetupConfig(cfgfile, path)
config.BuildDrafts = *draft config.BuildDrafts = *draft
config.UglyUrls = *uglyUrls
config.Verbose = *verbose
if *baseUrl != "" { if *baseUrl != "" {
config.BaseUrl = *baseUrl config.BaseUrl = *baseUrl
@ -119,7 +122,7 @@ func buildSite(config *hugolib.Config) *hugolib.Site {
site := hugolib.NewSite(config) site := hugolib.NewSite(config)
site.Build() site.Build()
site.Stats() site.Stats()
fmt.Printf("in %v ms\n", int(1000 * time.Since(startTime).Seconds())) fmt.Printf("in %v ms\n", int(1000*time.Since(startTime).Seconds()))
return site return site
} }
@ -145,6 +148,9 @@ func NewWatcher(c *hugolib.Config, port string, server bool) error {
select { select {
case ev := <-watcher.Event: case ev := <-watcher.Event:
var _ = ev var _ = ev
if c.Verbose {
fmt.Println(ev)
}
watchChange(c) watchChange(c)
// TODO add newly created directories to the watch list // TODO add newly created directories to the watch list
case err := <-watcher.Error: case err := <-watcher.Error: