Add support for URLs relative to context root

Setting `RelativeURLs` to `true` will make all relative URLs in the site *really* relative.

And will do so with speed.

So:

In `/post/myblogpost.html`:

`/mycss.css` becomes `../mycss.css`

The same in `/index.html` will become:

`./mycss.css` etc.

Note that absolute URLs will not be touched (either external resources, or URLs constructed with `BaseURL`).

The speediness is about the same as before:

```
benchmark                    old ns/op     new ns/op     delta
BenchmarkAbsURL              17462         18164         +4.02%
BenchmarkAbsURLSrcset        18842         19632         +4.19%
BenchmarkXMLAbsURLSrcset     18643         19313         +3.59%
BenchmarkXMLAbsURL           9283          9656          +4.02%

benchmark                    old allocs     new allocs     delta
BenchmarkAbsURL              24             28             +16.67%
BenchmarkAbsURLSrcset        29             32             +10.34%
BenchmarkXMLAbsURLSrcset     27             30             +11.11%
BenchmarkXMLAbsURL           12             14             +16.67%

benchmark                    old bytes     new bytes     delta
BenchmarkAbsURL              3154          3404          +7.93%
BenchmarkAbsURLSrcset        2376          2573          +8.29%
BenchmarkXMLAbsURLSrcset     2569          2763          +7.55%
BenchmarkXMLAbsURL           1888          1998          +5.83%

```

Fixes #1104
Fixes #622
Fixes #937
Fixes #157
This commit is contained in:
bep 2015-05-16 00:11:39 +02:00
parent e522e5f415
commit beaa8b1bca
10 changed files with 181 additions and 110 deletions

View file

@ -139,6 +139,7 @@ func InitializeConfig() {
viper.SetDefault("Verbose", false)
viper.SetDefault("IgnoreCache", false)
viper.SetDefault("CanonifyURLs", false)
viper.SetDefault("RelativeURLs", false)
viper.SetDefault("Taxonomies", map[string]string{"tag": "tags", "category": "categories"})
viper.SetDefault("Permalinks", make(hugolib.PermalinkOverrides, 0))
viper.SetDefault("Sitemap", hugolib.Sitemap{Priority: -1})

View file

@ -232,6 +232,41 @@ func MakePathRelative(inPath string, possibleDirectories ...string) (string, err
return inPath, errors.New("Can't extract relative path, unknown prefix")
}
// Should be good enough for Hugo.
var isFileRe = regexp.MustCompile(".*\\..{1,6}$")
// Expects a relative path starting after the content directory.
func GetDottedRelativePath(inPath string) string {
inPath = filepath.Clean(filepath.FromSlash(inPath))
if inPath == "." {
return "./"
}
isFile := isFileRe.MatchString(inPath)
if !isFile {
if !strings.HasSuffix(inPath, FilePathSeparator) {
inPath += FilePathSeparator
}
}
if !strings.HasPrefix(inPath, FilePathSeparator) {
inPath = FilePathSeparator + inPath
}
dir, _ := filepath.Split(inPath)
sectionCount := strings.Count(dir, FilePathSeparator)
if sectionCount == 0 || dir == FilePathSeparator {
return "./"
}
var dottedPath string
for i := 1; i < sectionCount; i++ {
dottedPath += "../"
}
return dottedPath
}
// Filename takes a path, strips out the extension,
// and returns the name of the file.
func Filename(in string) (name string) {

View file

@ -112,6 +112,45 @@ func TestMakePathRelative(t *testing.T) {
}
}
func TestGetDottedRelativePath(t *testing.T) {
// on Windows this will receive both kinds, both country and western ...
for _, f := range []func(string) string{filepath.FromSlash, func(s string) string { return s }} {
doTestGetDottedRelativePath(f, t)
}
}
func doTestGetDottedRelativePath(urlFixer func(string) string, t *testing.T) {
type test struct {
input, expected string
}
data := []test{
{"", "./"},
{urlFixer("/"), "./"},
{urlFixer("post"), "../"},
{urlFixer("/post"), "../"},
{urlFixer("post/"), "../"},
{urlFixer("tags/foo.html"), "../"},
{urlFixer("/tags/foo.html"), "../"},
{urlFixer("/post/"), "../"},
{urlFixer("////post/////"), "../"},
{urlFixer("/foo/bar/index.html"), "../../"},
{urlFixer("/foo/bar/foo/"), "../../../"},
{urlFixer("/foo/bar/foo"), "../../../"},
{urlFixer("foo/bar/foo/"), "../../../"},
{urlFixer("foo/bar/foo/bar"), "../../../../"},
{"404.html", "./"},
{"404.xml", "./"},
{"/404.html", "./"},
}
for i, d := range data {
output := GetDottedRelativePath(d.input)
if d.expected != output {
t.Errorf("Test %d failed. Expected %q got %q", i, d.expected, output)
}
}
}
func TestMakeTitle(t *testing.T) {
type test struct {
input, expected string

View file

@ -1397,16 +1397,21 @@ func (s *Site) renderAndWriteXML(name string, dest string, d interface{}, layout
err := s.render(name, d, renderBuffer, layouts...)
absURLInXML, err := transform.AbsURLInXML()
if err != nil {
return err
}
outBuffer := bp.GetBuffer()
defer bp.PutBuffer(outBuffer)
transformer := transform.NewChain(absURLInXML...)
transformer.Apply(outBuffer, renderBuffer)
var path []byte
if viper.GetBool("RelativeURLs") {
path = []byte(helpers.GetDottedRelativePath(dest))
} else {
s := viper.GetString("BaseURL")
if !strings.HasSuffix(s, "/") {
s += "/"
}
path = []byte(s)
}
transformer := transform.NewChain(transform.AbsURLInXML)
transformer.Apply(outBuffer, renderBuffer, path)
if err == nil {
err = s.WriteDestFile(dest, outBuffer)
@ -1426,20 +1431,32 @@ func (s *Site) renderAndWritePage(name string, dest string, d interface{}, layou
transformLinks := transform.NewEmptyTransforms()
if viper.GetBool("CanonifyURLs") {
absURL, err := transform.AbsURL()
if err != nil {
return err
}
transformLinks = append(transformLinks, absURL...)
if viper.GetBool("RelativeURLs") || viper.GetBool("CanonifyURLs") {
transformLinks = append(transformLinks, transform.AbsURL)
}
if viper.GetBool("watch") && !viper.GetBool("DisableLiveReload") {
transformLinks = append(transformLinks, transform.LiveReloadInject)
}
var path []byte
if viper.GetBool("RelativeURLs") {
translated, err := s.PageTarget().(target.OptionalTranslator).TranslateRelative(dest)
if err != nil {
return err
}
path = []byte(helpers.GetDottedRelativePath(translated))
} else if viper.GetBool("CanonifyURLs") {
s := viper.GetString("BaseURL")
if !strings.HasSuffix(s, "/") {
s += "/"
}
path = []byte(s)
}
transformer := transform.NewChain(transformLinks...)
transformer.Apply(outBuffer, renderBuffer)
transformer.Apply(outBuffer, renderBuffer, path)
if err == nil {
if err = s.WriteDestPage(dest, outBuffer); err != nil {

View file

@ -16,6 +16,11 @@ type Translator interface {
Translate(string) (string, error)
}
// TODO(bep) consider other ways to solve this.
type OptionalTranslator interface {
TranslateRelative(string) (string, error)
}
type Output interface {
Publisher
Translator

View file

@ -32,10 +32,18 @@ func (pp *PagePub) Publish(path string, r io.Reader) (err error) {
}
func (pp *PagePub) Translate(src string) (dest string, err error) {
dir, err := pp.TranslateRelative(src)
if err != nil {
return dir, err
}
if pp.PublishDir != "" {
dir = filepath.Join(pp.PublishDir, dir)
}
return dir, nil
}
func (pp *PagePub) TranslateRelative(src string) (dest string, err error) {
if src == helpers.FilePathSeparator {
if pp.PublishDir != "" {
return filepath.Join(pp.PublishDir, "index.html"), nil
}
return "index.html", nil
}
@ -43,9 +51,6 @@ func (pp *PagePub) Translate(src string) (dest string, err error) {
isRoot := dir == ""
ext := pp.extension(filepath.Ext(file))
name := filename(file)
if pp.PublishDir != "" {
dir = filepath.Join(pp.PublishDir, dir)
}
if pp.UglyURLs || file == "index.html" || (isRoot && file == "404.html") {
return filepath.Join(dir, fmt.Sprintf("%s%s", name, ext)), nil

View file

@ -1,58 +1,11 @@
package transform
import (
"github.com/spf13/viper"
"sync"
)
var ar *absURLReplacer = newAbsURLReplacer()
// to be used in tests; the live site will get its value from Viper.
var AbsBaseUrl string
var absURLInit sync.Once
var ar *absURLReplacer
func AbsURL() (trs []link, err error) {
initAbsURLReplacer()
return absURLFromReplacer(ar)
var AbsURL = func(ct contentTransformer) {
ar.replaceInHTML(ct)
}
func absURLFromURL(URL string) (trs []link, err error) {
return absURLFromReplacer(newAbsURLReplacer(URL))
}
func absURLFromReplacer(ar *absURLReplacer) (trs []link, err error) {
trs = append(trs, func(ct contentTransformer) {
ar.replaceInHTML(ct)
})
return
}
func AbsURLInXML() (trs []link, err error) {
initAbsURLReplacer()
return absURLInXMLFromReplacer(ar)
}
func absURLInXMLFromURL(URL string) (trs []link, err error) {
return absURLInXMLFromReplacer(newAbsURLReplacer(URL))
}
func absURLInXMLFromReplacer(ar *absURLReplacer) (trs []link, err error) {
trs = append(trs, func(ct contentTransformer) {
ar.replaceInXML(ct)
})
return
}
func initAbsURLReplacer() {
absURLInit.Do(func() {
var url string
if AbsBaseUrl != "" {
url = AbsBaseUrl
} else {
url = viper.GetString("BaseURL")
}
ar = newAbsURLReplacer(url)
})
var AbsURLInXML = func(ct contentTransformer) {
ar.replaceInXML(ct)
}

View file

@ -3,8 +3,6 @@ package transform
import (
"bytes"
"io"
"net/url"
"strings"
"unicode/utf8"
)
@ -23,6 +21,9 @@ type absurllexer struct {
// the target for the new absurlified content
w io.Writer
// path may be set to a "." relative path
path []byte
pos int // input position
start int // item start position
width int // width of last element
@ -54,9 +55,8 @@ var prefixes = []*prefix{
}
type absURLMatcher struct {
match []byte
quote []byte
replacementURL []byte
match []byte
quote []byte
}
// match check rune inside word. Will be != ' '.
@ -147,7 +147,7 @@ func checkCandidateBase(l *absurllexer) {
}
l.pos += len(m.match)
l.w.Write(m.quote)
l.w.Write(m.replacementURL)
l.w.Write(l.path)
l.start = l.pos
}
}
@ -188,7 +188,7 @@ func checkCandidateSrcset(l *absurllexer) {
l.w.Write([]byte(m.quote))
for i, f := range fields {
if f[0] == '/' {
l.w.Write(m.replacementURL)
l.w.Write(l.path)
l.w.Write(f[1:])
} else {
@ -252,9 +252,11 @@ func (l *absurllexer) replace() {
}
func doReplace(ct contentTransformer, matchers []absURLMatcher) {
lexer := &absurllexer{
content: ct.Content(),
w: ct,
path: ct.Path(),
matchers: matchers}
lexer.replace()
@ -265,9 +267,7 @@ type absURLReplacer struct {
xmlMatchers []absURLMatcher
}
func newAbsURLReplacer(baseURL string) *absURLReplacer {
u, _ := url.Parse(baseURL)
base := []byte(strings.TrimRight(u.String(), "/") + "/")
func newAbsURLReplacer() *absURLReplacer {
// HTML
dqHTMLMatch := []byte("\"/")
@ -285,14 +285,13 @@ func newAbsURLReplacer(baseURL string) *absURLReplacer {
return &absURLReplacer{
htmlMatchers: []absURLMatcher{
{dqHTMLMatch, dqHTML, base},
{sqHTMLMatch, sqHTML, base},
{dqHTMLMatch, dqHTML},
{sqHTMLMatch, sqHTML},
},
xmlMatchers: []absURLMatcher{
{dqXMLMatch, dqXML, base},
{sqXMLMatch, sqXML, base},
{dqXMLMatch, dqXML},
{sqXMLMatch, sqXML},
}}
}
func (au *absURLReplacer) replaceInHTML(ct contentTransformer) {

View file

@ -23,6 +23,7 @@ func NewEmptyTransforms() []link {
// contentTransformer is an interface that enables rotation of pooled buffers
// in the transformer chain.
type contentTransformer interface {
Path() []byte
Content() []byte
io.Writer
}
@ -30,10 +31,15 @@ type contentTransformer interface {
// Implements contentTransformer
// Content is read from the from-buffer and rewritten to to the to-buffer.
type fromToBuffer struct {
path []byte
from *bytes.Buffer
to *bytes.Buffer
}
func (ft fromToBuffer) Path() []byte {
return ft.path
}
func (ft fromToBuffer) Write(p []byte) (n int, err error) {
return ft.to.Write(p)
}
@ -42,7 +48,7 @@ func (ft fromToBuffer) Content() []byte {
return ft.from.Bytes()
}
func (c *chain) Apply(w io.Writer, r io.Reader) error {
func (c *chain) Apply(w io.Writer, r io.Reader, p []byte) error {
b1 := bp.GetBuffer()
defer bp.PutBuffer(b1)
@ -57,7 +63,7 @@ func (c *chain) Apply(w io.Writer, r io.Reader) error {
b2 := bp.GetBuffer()
defer bp.PutBuffer(b2)
fb := &fromToBuffer{from: b1, to: b2}
fb := &fromToBuffer{path: p, from: b1, to: b2}
for i, tr := range *c {
if i > 0 {

View file

@ -3,6 +3,7 @@ package transform
import (
"bytes"
"github.com/spf13/hugo/helpers"
"path/filepath"
"strings"
"testing"
)
@ -62,6 +63,11 @@ schemaless: &lt;img srcset=&#39;//img.jpg&#39; src=&#39;//basic.jpg&#39;&gt;
schemaless2: &lt;img srcset=&quot;//img.jpg&quot; src=&quot;//basic.jpg2&gt; POST
`
const REL_PATH_VARIATIONS = `PRE. a href="/img/small.jpg" POST.`
const REL_PATH_VARIATIONS_CORRECT = `PRE. a href="../../img/small.jpg" POST.`
const testBaseURL = "http://base/"
var abs_url_bench_tests = []test{
{H5_JS_CONTENT_DOUBLE_QUOTE, CORRECT_OUTPUT_SRC_HREF_DQ},
{H5_JS_CONTENT_SINGLE_QUOTE, CORRECT_OUTPUT_SRC_HREF_SQ},
@ -85,11 +91,13 @@ var srcset_xml_tests = []test{
{SRCSET_XML_SINGLE_QUOTE, SRCSET_XML_SINGLE_QUOTE_CORRECT},
{SRCSET_XML_VARIATIONS, SRCSET_XML_VARIATIONS_CORRECT}}
var relurl_tests = []test{{REL_PATH_VARIATIONS, REL_PATH_VARIATIONS_CORRECT}}
func TestChainZeroTransformers(t *testing.T) {
tr := NewChain()
in := new(bytes.Buffer)
out := new(bytes.Buffer)
if err := tr.Apply(in, out); err != nil {
if err := tr.Apply(in, out, []byte("")); err != nil {
t.Errorf("A zero transformer chain returned an error.")
}
}
@ -112,7 +120,7 @@ func TestChaingMultipleTransformers(t *testing.T) {
tr := NewChain(f1, f2, f3, f4)
out := new(bytes.Buffer)
if err := tr.Apply(out, helpers.StringToReader("Test: f4 f3 f1 f2 f1 The End.")); err != nil {
if err := tr.Apply(out, helpers.StringToReader("Test: f4 f3 f1 f2 f1 The End."), []byte("")); err != nil {
t.Errorf("Multi transformer chain returned an error: %s", err)
}
@ -124,8 +132,7 @@ func TestChaingMultipleTransformers(t *testing.T) {
}
func BenchmarkAbsURL(b *testing.B) {
absURL, _ := absURLFromURL("http://base")
tr := NewChain(absURL...)
tr := NewChain(AbsURL)
b.ResetTimer()
for i := 0; i < b.N; i++ {
@ -134,8 +141,7 @@ func BenchmarkAbsURL(b *testing.B) {
}
func BenchmarkAbsURLSrcset(b *testing.B) {
absURL, _ := absURLFromURL("http://base")
tr := NewChain(absURL...)
tr := NewChain(AbsURL)
b.ResetTimer()
for i := 0; i < b.N; i++ {
@ -144,8 +150,7 @@ func BenchmarkAbsURLSrcset(b *testing.B) {
}
func BenchmarkXMLAbsURLSrcset(b *testing.B) {
absXMLURL, _ := absURLInXMLFromURL("http://base")
tr := NewChain(absXMLURL...)
tr := NewChain(AbsURLInXML)
b.ResetTimer()
for i := 0; i < b.N; i++ {
@ -154,31 +159,33 @@ func BenchmarkXMLAbsURLSrcset(b *testing.B) {
}
func TestAbsURL(t *testing.T) {
absURL, _ := absURLFromURL("http://base")
tr := NewChain(absURL...)
tr := NewChain(AbsURL)
apply(t.Errorf, tr, abs_url_tests)
}
func TestRelativeURL(t *testing.T) {
tr := NewChain(AbsURL)
applyWithPath(t.Errorf, tr, relurl_tests, helpers.GetDottedRelativePath(filepath.FromSlash("/post/sub/")))
}
func TestAbsURLSrcSet(t *testing.T) {
absURL, _ := absURLFromURL("http://base")
tr := NewChain(absURL...)
tr := NewChain(AbsURL)
apply(t.Errorf, tr, srcset_tests)
}
func TestAbsXMLURLSrcSet(t *testing.T) {
absURLInXML, _ := absURLInXMLFromURL("http://base")
tr := NewChain(absURLInXML...)
tr := NewChain(AbsURLInXML)
apply(t.Errorf, tr, srcset_xml_tests)
}
func BenchmarkXMLAbsURL(b *testing.B) {
absURLInXML, _ := absURLInXMLFromURL("http://base")
tr := NewChain(absURLInXML...)
tr := NewChain(AbsURLInXML)
b.ResetTimer()
for i := 0; i < b.N; i++ {
@ -187,17 +194,17 @@ func BenchmarkXMLAbsURL(b *testing.B) {
}
func TestXMLAbsURL(t *testing.T) {
absURLInXML, _ := absURLInXMLFromURL("http://base")
tr := NewChain(absURLInXML...)
tr := NewChain(AbsURLInXML)
apply(t.Errorf, tr, xml_abs_url_tests)
}
type errorf func(string, ...interface{})
func apply(ef errorf, tr chain, tests []test) {
func applyWithPath(ef errorf, tr chain, tests []test, path string) {
for _, test := range tests {
out := new(bytes.Buffer)
err := tr.Apply(out, strings.NewReader(test.content))
var err error
err = tr.Apply(out, strings.NewReader(test.content), []byte(path))
if err != nil {
ef("Unexpected error: %s", err)
}
@ -207,6 +214,10 @@ func apply(ef errorf, tr chain, tests []test) {
}
}
func apply(ef errorf, tr chain, tests []test) {
applyWithPath(ef, tr, tests, testBaseURL)
}
type test struct {
content string
expected string